Merge branch 'master' into fix/overflow-invoices
This commit is contained in:
commit
69e8a4308b
|
@ -152,7 +152,7 @@
|
|||
{#if isDisabled && !syncAutomationsEnabled && action.stepId === ActionStepID.COLLECT}
|
||||
<div class="tag-color">
|
||||
<Tags>
|
||||
<Tag icon="LockClosed">Business</Tag>
|
||||
<Tag icon="LockClosed">Premium</Tag>
|
||||
</Tags>
|
||||
</div>
|
||||
{:else if isDisabled}
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
{ label: "False", value: "false" },
|
||||
]}
|
||||
/>
|
||||
{:else if schema.type === "array"}
|
||||
{:else if schemaHasOptions(schema) && schema.type === "array"}
|
||||
<Multiselect
|
||||
bind:value={value[field]}
|
||||
options={schema.constraints.inclusion}
|
||||
|
@ -77,7 +77,7 @@
|
|||
on:change={e => onChange(e, field)}
|
||||
useLabel={false}
|
||||
/>
|
||||
{:else if ["string", "number", "bigint", "barcodeqr"].includes(schema.type)}
|
||||
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
|
||||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
<script>
|
||||
import { currentAsset } from "builderStore"
|
||||
import { findClosestMatchingComponent } from "builderStore/componentUtils"
|
||||
import { currentAsset, store } from "builderStore"
|
||||
import {
|
||||
findClosestMatchingComponent,
|
||||
findComponent,
|
||||
} from "builderStore/componentUtils"
|
||||
import {
|
||||
getDatasourceForProvider,
|
||||
getSchemaForDatasource,
|
||||
|
@ -20,8 +23,23 @@
|
|||
component => component._component.endsWith("/form")
|
||||
)
|
||||
|
||||
const resolveDatasource = (currentAsset, componentInstance, form) => {
|
||||
if (!form && componentInstance._id != $store.selectedComponentId) {
|
||||
const block = findComponent(
|
||||
currentAsset.props,
|
||||
$store.selectedComponentId
|
||||
)
|
||||
const def = store.actions.components.getDefinition(block._component)
|
||||
return def?.block === true
|
||||
? getDatasourceForProvider(currentAsset, block)
|
||||
: {}
|
||||
} else {
|
||||
return getDatasourceForProvider(currentAsset, form)
|
||||
}
|
||||
}
|
||||
|
||||
// Get that form's schema
|
||||
$: datasource = getDatasourceForProvider($currentAsset, form)
|
||||
$: datasource = resolveDatasource($currentAsset, componentInstance, form)
|
||||
$: formSchema = getSchemaForDatasource($currentAsset, datasource)?.schema
|
||||
|
||||
// Get the schema for the relationship field that this picker is using
|
||||
|
|
|
@ -214,7 +214,7 @@
|
|||
<Heading size="M">Branding</Heading>
|
||||
{#if !isCloud && !brandingEnabled}
|
||||
<Tags>
|
||||
<Tag icon="LockClosed">Business</Tag>
|
||||
<Tag icon="LockClosed">Premium</Tag>
|
||||
</Tags>
|
||||
{/if}
|
||||
{#if isCloud && !brandingEnabled}
|
||||
|
|
|
@ -97,7 +97,7 @@
|
|||
<Heading size="M">Groups</Heading>
|
||||
{#if !$licensing.groupsEnabled}
|
||||
<Tags>
|
||||
<Tag icon="LockClosed">Business</Tag>
|
||||
<Tag icon="LockClosed">Enterpise</Tag>
|
||||
</Tags>
|
||||
{/if}
|
||||
</div>
|
||||
|
|
|
@ -80,7 +80,7 @@
|
|||
"koa": "2.13.4",
|
||||
"koa-body": "4.2.0",
|
||||
"koa-compress": "4.0.1",
|
||||
"koa-send": "5.0.0",
|
||||
"koa-send": "5.0.1",
|
||||
"koa-useragent": "^4.1.0",
|
||||
"koa2-ratelimit": "1.1.1",
|
||||
"lodash": "4.17.21",
|
||||
|
@ -120,6 +120,7 @@
|
|||
"@types/jest": "29.5.5",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa__router": "8.0.8",
|
||||
"@types/koa-send": "^4.1.6",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/mssql": "9.1.4",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
import {
|
||||
DocumentType,
|
||||
generateDatasourceID,
|
||||
getQueryParams,
|
||||
getTableParams,
|
||||
} from "../../db/utils"
|
||||
import { getQueryParams, getTableParams } from "../../db/utils"
|
||||
import { getIntegration } from "../../integrations"
|
||||
import { invalidateDynamicVariables } from "../../threads/utils"
|
||||
import { context, db as dbCore, events } from "@budibase/backend-core"
|
||||
import {
|
||||
BuildSchemaFromSourceRequest,
|
||||
BuildSchemaFromSourceResponse,
|
||||
CreateDatasourceRequest,
|
||||
CreateDatasourceResponse,
|
||||
Datasource,
|
||||
|
@ -22,7 +19,6 @@ import {
|
|||
} from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
import { builderSocket } from "../../websockets"
|
||||
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
|
||||
import { isEqual } from "lodash"
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
|
@ -67,22 +63,16 @@ export async function information(
|
|||
}
|
||||
}
|
||||
|
||||
export async function buildSchemaFromDb(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
export async function buildSchemaFromSource(
|
||||
ctx: UserCtx<BuildSchemaFromSourceRequest, BuildSchemaFromSourceResponse>
|
||||
) {
|
||||
const datasourceId = ctx.params.datasourceId
|
||||
const tablesFilter = ctx.request.body.tablesFilter
|
||||
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
|
||||
|
||||
const { tables, errors } = await sdk.datasources.buildFilteredSchema(
|
||||
datasource,
|
||||
const { datasource, errors } = await sdk.datasources.buildSchemaFromSource(
|
||||
datasourceId,
|
||||
tablesFilter
|
||||
)
|
||||
datasource.entities = tables
|
||||
|
||||
setDefaultDisplayColumns(datasource)
|
||||
const dbResp = await db.put(
|
||||
sdk.tables.populateExternalTableSchemas(datasource)
|
||||
)
|
||||
datasource._rev = dbResp.rev
|
||||
|
||||
ctx.body = {
|
||||
datasource: await sdk.datasources.removeSecretSingle(datasource),
|
||||
|
@ -90,24 +80,6 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Make sure all datasource entities have a display name selected
|
||||
*/
|
||||
function setDefaultDisplayColumns(datasource: Datasource) {
|
||||
//
|
||||
for (let entity of Object.values(datasource.entities || {})) {
|
||||
if (entity.primaryDisplay) {
|
||||
continue
|
||||
}
|
||||
const notAutoColumn = Object.values(entity.schema).find(
|
||||
schema => !schema.autocolumn
|
||||
)
|
||||
if (notAutoColumn) {
|
||||
entity.primaryDisplay = notAutoColumn.name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for variables that have been updated or removed and invalidate them.
|
||||
*/
|
||||
|
@ -205,54 +177,18 @@ export async function update(ctx: UserCtx<any, UpdateDatasourceResponse>) {
|
|||
}
|
||||
}
|
||||
|
||||
const preSaveAction: Partial<Record<SourceName, any>> = {
|
||||
[SourceName.GOOGLE_SHEETS]: async (datasource: Datasource) => {
|
||||
await googleSetupCreationAuth(datasource.config as any)
|
||||
},
|
||||
}
|
||||
|
||||
export async function save(
|
||||
ctx: UserCtx<CreateDatasourceRequest, CreateDatasourceResponse>
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
const plus = ctx.request.body.datasource.plus
|
||||
const fetchSchema = ctx.request.body.fetchSchema
|
||||
const tablesFilter = ctx.request.body.tablesFilter
|
||||
|
||||
const datasource = {
|
||||
_id: generateDatasourceID({ plus }),
|
||||
...ctx.request.body.datasource,
|
||||
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
|
||||
}
|
||||
|
||||
let errors: Record<string, string> = {}
|
||||
if (fetchSchema) {
|
||||
const schema = await sdk.datasources.buildFilteredSchema(
|
||||
datasource,
|
||||
tablesFilter
|
||||
)
|
||||
datasource.entities = schema.tables
|
||||
setDefaultDisplayColumns(datasource)
|
||||
errors = schema.errors
|
||||
}
|
||||
|
||||
if (preSaveAction[datasource.source]) {
|
||||
await preSaveAction[datasource.source](datasource)
|
||||
}
|
||||
|
||||
const dbResp = await db.put(
|
||||
sdk.tables.populateExternalTableSchemas(datasource)
|
||||
)
|
||||
await events.datasource.created(datasource)
|
||||
datasource._rev = dbResp.rev
|
||||
|
||||
// Drain connection pools when configuration is changed
|
||||
if (datasource.source) {
|
||||
const source = await getIntegration(datasource.source)
|
||||
if (source && source.pool) {
|
||||
await source.pool.end()
|
||||
}
|
||||
}
|
||||
const {
|
||||
datasource: datasourceData,
|
||||
fetchSchema,
|
||||
tablesFilter,
|
||||
} = ctx.request.body
|
||||
const { datasource, errors } = await sdk.datasources.save(datasourceData, {
|
||||
fetchSchema,
|
||||
tablesFilter,
|
||||
})
|
||||
|
||||
ctx.body = {
|
||||
datasource: await sdk.datasources.removeSecretSingle(datasource),
|
||||
|
|
|
@ -53,7 +53,7 @@ router
|
|||
.post(
|
||||
"/api/datasources/:datasourceId/schema",
|
||||
authorized(permissions.BUILDER),
|
||||
datasourceController.buildSchemaFromDb
|
||||
datasourceController.buildSchemaFromSource
|
||||
)
|
||||
.post(
|
||||
"/api/datasources",
|
||||
|
|
|
@ -103,8 +103,7 @@ function typeCoercion(filters: SearchFilters, table: Table) {
|
|||
return filters
|
||||
}
|
||||
for (let key of Object.keys(filters)) {
|
||||
// @ts-ignore
|
||||
const searchParam = filters[key]
|
||||
const searchParam = filters[key as keyof SearchFilters]
|
||||
if (typeof searchParam === "object") {
|
||||
for (let [property, value] of Object.entries(searchParam)) {
|
||||
// We need to strip numerical prefixes here, so that we can look up
|
||||
|
@ -117,7 +116,13 @@ function typeCoercion(filters: SearchFilters, table: Table) {
|
|||
continue
|
||||
}
|
||||
if (column.type === FieldTypes.NUMBER) {
|
||||
searchParam[property] = parseFloat(value)
|
||||
if (key === "oneOf") {
|
||||
searchParam[property] = value
|
||||
.split(",")
|
||||
.map(item => parseFloat(item))
|
||||
} else {
|
||||
searchParam[property] = parseFloat(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { context, db as dbCore } from "@budibase/backend-core"
|
||||
import { context, db as dbCore, events } from "@budibase/backend-core"
|
||||
import { findHBSBlocks, processObjectSync } from "@budibase/string-templates"
|
||||
import {
|
||||
Datasource,
|
||||
|
@ -14,16 +14,22 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { getEnvironmentVariables } from "../../utils"
|
||||
import { getDefinitions, getDefinition } from "../../../integrations"
|
||||
import {
|
||||
getDefinitions,
|
||||
getDefinition,
|
||||
getIntegration,
|
||||
} from "../../../integrations"
|
||||
import merge from "lodash/merge"
|
||||
import {
|
||||
BudibaseInternalDB,
|
||||
generateDatasourceID,
|
||||
getDatasourceParams,
|
||||
getDatasourcePlusParams,
|
||||
getTableParams,
|
||||
DocumentType,
|
||||
} from "../../../db/utils"
|
||||
import sdk from "../../index"
|
||||
import datasource from "../../../api/routes/datasource"
|
||||
import { setupCreationAuth as googleSetupCreationAuth } from "../../../integrations/googlesheets"
|
||||
|
||||
const ENV_VAR_PREFIX = "env."
|
||||
|
||||
|
@ -273,3 +279,75 @@ export async function getExternalDatasources(): Promise<Datasource[]> {
|
|||
|
||||
return externalDatasources.rows.map(r => r.doc!)
|
||||
}
|
||||
|
||||
export async function save(
|
||||
datasource: Datasource,
|
||||
opts?: { fetchSchema?: boolean; tablesFilter?: string[] }
|
||||
): Promise<{ datasource: Datasource; errors: Record<string, string> }> {
|
||||
const db = context.getAppDB()
|
||||
const plus = datasource.plus
|
||||
|
||||
const fetchSchema = opts?.fetchSchema || false
|
||||
const tablesFilter = opts?.tablesFilter || []
|
||||
|
||||
datasource = {
|
||||
_id: generateDatasourceID({ plus }),
|
||||
...datasource,
|
||||
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
|
||||
}
|
||||
|
||||
let errors: Record<string, string> = {}
|
||||
if (fetchSchema) {
|
||||
const schema = await sdk.datasources.buildFilteredSchema(
|
||||
datasource,
|
||||
tablesFilter
|
||||
)
|
||||
datasource.entities = schema.tables
|
||||
setDefaultDisplayColumns(datasource)
|
||||
errors = schema.errors
|
||||
}
|
||||
|
||||
if (preSaveAction[datasource.source]) {
|
||||
await preSaveAction[datasource.source](datasource)
|
||||
}
|
||||
|
||||
const dbResp = await db.put(
|
||||
sdk.tables.populateExternalTableSchemas(datasource)
|
||||
)
|
||||
await events.datasource.created(datasource)
|
||||
datasource._rev = dbResp.rev
|
||||
|
||||
// Drain connection pools when configuration is changed
|
||||
if (datasource.source) {
|
||||
const source = await getIntegration(datasource.source)
|
||||
if (source && source.pool) {
|
||||
await source.pool.end()
|
||||
}
|
||||
}
|
||||
|
||||
return { datasource, errors }
|
||||
}
|
||||
|
||||
const preSaveAction: Partial<Record<SourceName, any>> = {
|
||||
[SourceName.GOOGLE_SHEETS]: async (datasource: Datasource) => {
|
||||
await googleSetupCreationAuth(datasource.config as any)
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Make sure all datasource entities have a display name selected
|
||||
*/
|
||||
export function setDefaultDisplayColumns(datasource: Datasource) {
|
||||
//
|
||||
for (let entity of Object.values(datasource.entities || {})) {
|
||||
if (entity.primaryDisplay) {
|
||||
continue
|
||||
}
|
||||
const notAutoColumn = Object.values(entity.schema).find(
|
||||
schema => !schema.autocolumn
|
||||
)
|
||||
if (notAutoColumn) {
|
||||
entity.primaryDisplay = notAutoColumn.name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,9 @@ import {
|
|||
Schema,
|
||||
} from "@budibase/types"
|
||||
import * as datasources from "./datasources"
|
||||
import tableSdk from "../tables"
|
||||
import { getIntegration } from "../../../integrations"
|
||||
import { context } from "@budibase/backend-core"
|
||||
|
||||
export async function buildFilteredSchema(
|
||||
datasource: Datasource,
|
||||
|
@ -60,3 +62,24 @@ export async function getAndMergeDatasource(datasource: Datasource) {
|
|||
}
|
||||
return await datasources.enrich(datasource)
|
||||
}
|
||||
|
||||
export async function buildSchemaFromSource(
|
||||
datasourceId: string,
|
||||
tablesFilter?: string[]
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
|
||||
const datasource = await datasources.get(datasourceId)
|
||||
|
||||
const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter)
|
||||
datasource.entities = tables
|
||||
|
||||
datasources.setDefaultDisplayColumns(datasource)
|
||||
const dbResp = await db.put(tableSdk.populateExternalTableSchemas(datasource))
|
||||
datasource._rev = dbResp.rev
|
||||
|
||||
return {
|
||||
datasource,
|
||||
errors,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -143,100 +143,104 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
|||
oneOf: {},
|
||||
containsAny: {},
|
||||
}
|
||||
if (Array.isArray(filter)) {
|
||||
filter.forEach(expression => {
|
||||
let { operator, field, type, value, externalType, onEmptyFilter } =
|
||||
expression
|
||||
const isHbs =
|
||||
typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0
|
||||
// Parse all values into correct types
|
||||
if (operator === "allOr") {
|
||||
query.allOr = true
|
||||
|
||||
if (!Array.isArray(filter)) {
|
||||
return query
|
||||
}
|
||||
|
||||
filter.forEach(expression => {
|
||||
let { operator, field, type, value, externalType, onEmptyFilter } =
|
||||
expression
|
||||
const isHbs =
|
||||
typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0
|
||||
// Parse all values into correct types
|
||||
if (operator === "allOr") {
|
||||
query.allOr = true
|
||||
return
|
||||
}
|
||||
if (onEmptyFilter) {
|
||||
query.onEmptyFilter = onEmptyFilter
|
||||
return
|
||||
}
|
||||
if (
|
||||
type === "datetime" &&
|
||||
!isHbs &&
|
||||
operator !== "empty" &&
|
||||
operator !== "notEmpty"
|
||||
) {
|
||||
// Ensure date value is a valid date and parse into correct format
|
||||
if (!value) {
|
||||
return
|
||||
}
|
||||
if (onEmptyFilter) {
|
||||
query.onEmptyFilter = onEmptyFilter
|
||||
try {
|
||||
value = new Date(value).toISOString()
|
||||
} catch (error) {
|
||||
return
|
||||
}
|
||||
if (
|
||||
type === "datetime" &&
|
||||
!isHbs &&
|
||||
operator !== "empty" &&
|
||||
operator !== "notEmpty"
|
||||
}
|
||||
if (type === "number" && typeof value === "string" && !isHbs) {
|
||||
if (operator === "oneOf") {
|
||||
value = value.split(",").map(item => parseFloat(item))
|
||||
} else {
|
||||
value = parseFloat(value)
|
||||
}
|
||||
}
|
||||
if (type === "boolean") {
|
||||
value = `${value}`?.toLowerCase() === "true"
|
||||
}
|
||||
if (
|
||||
["contains", "notContains", "containsAny"].includes(operator) &&
|
||||
type === "array" &&
|
||||
typeof value === "string"
|
||||
) {
|
||||
value = value.split(",")
|
||||
}
|
||||
if (operator.startsWith("range") && query.range) {
|
||||
const minint =
|
||||
SqlNumberTypeRangeMap[
|
||||
externalType as keyof typeof SqlNumberTypeRangeMap
|
||||
]?.min || Number.MIN_SAFE_INTEGER
|
||||
const maxint =
|
||||
SqlNumberTypeRangeMap[
|
||||
externalType as keyof typeof SqlNumberTypeRangeMap
|
||||
]?.max || Number.MAX_SAFE_INTEGER
|
||||
if (!query.range[field]) {
|
||||
query.range[field] = {
|
||||
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
|
||||
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
|
||||
}
|
||||
}
|
||||
if ((operator as any) === "rangeLow" && value != null && value !== "") {
|
||||
query.range[field].low = value
|
||||
} else if (
|
||||
(operator as any) === "rangeHigh" &&
|
||||
value != null &&
|
||||
value !== ""
|
||||
) {
|
||||
// Ensure date value is a valid date and parse into correct format
|
||||
if (!value) {
|
||||
return
|
||||
}
|
||||
try {
|
||||
value = new Date(value).toISOString()
|
||||
} catch (error) {
|
||||
return
|
||||
}
|
||||
}
|
||||
if (type === "number" && typeof value === "string") {
|
||||
if (operator === "oneOf") {
|
||||
value = value.split(",").map(item => parseFloat(item))
|
||||
} else if (!isHbs) {
|
||||
value = parseFloat(value)
|
||||
}
|
||||
query.range[field].high = value
|
||||
}
|
||||
} else if (query[operator] && operator !== "onEmptyFilter") {
|
||||
if (type === "boolean") {
|
||||
value = `${value}`?.toLowerCase() === "true"
|
||||
}
|
||||
if (
|
||||
["contains", "notContains", "containsAny"].includes(operator) &&
|
||||
type === "array" &&
|
||||
typeof value === "string"
|
||||
) {
|
||||
value = value.split(",")
|
||||
}
|
||||
if (operator.startsWith("range") && query.range) {
|
||||
const minint =
|
||||
SqlNumberTypeRangeMap[
|
||||
externalType as keyof typeof SqlNumberTypeRangeMap
|
||||
]?.min || Number.MIN_SAFE_INTEGER
|
||||
const maxint =
|
||||
SqlNumberTypeRangeMap[
|
||||
externalType as keyof typeof SqlNumberTypeRangeMap
|
||||
]?.max || Number.MAX_SAFE_INTEGER
|
||||
if (!query.range[field]) {
|
||||
query.range[field] = {
|
||||
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
|
||||
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
|
||||
}
|
||||
}
|
||||
if ((operator as any) === "rangeLow" && value != null && value !== "") {
|
||||
query.range[field].low = value
|
||||
} else if (
|
||||
(operator as any) === "rangeHigh" &&
|
||||
value != null &&
|
||||
value !== ""
|
||||
) {
|
||||
query.range[field].high = value
|
||||
}
|
||||
} else if (query[operator] && operator !== "onEmptyFilter") {
|
||||
if (type === "boolean") {
|
||||
// Transform boolean filters to cope with null.
|
||||
// "equals false" needs to be "not equals true"
|
||||
// "not equals false" needs to be "equals true"
|
||||
if (operator === "equal" && value === false) {
|
||||
query.notEqual = query.notEqual || {}
|
||||
query.notEqual[field] = true
|
||||
} else if (operator === "notEqual" && value === false) {
|
||||
query.equal = query.equal || {}
|
||||
query.equal[field] = true
|
||||
} else {
|
||||
query[operator] = query[operator] || {}
|
||||
query[operator]![field] = value
|
||||
}
|
||||
// Transform boolean filters to cope with null.
|
||||
// "equals false" needs to be "not equals true"
|
||||
// "not equals false" needs to be "equals true"
|
||||
if (operator === "equal" && value === false) {
|
||||
query.notEqual = query.notEqual || {}
|
||||
query.notEqual[field] = true
|
||||
} else if (operator === "notEqual" && value === false) {
|
||||
query.equal = query.equal || {}
|
||||
query.equal[field] = true
|
||||
} else {
|
||||
query[operator] = query[operator] || {}
|
||||
query[operator]![field] = value
|
||||
}
|
||||
} else {
|
||||
query[operator] = query[operator] || {}
|
||||
query[operator]![field] = value
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return query
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
import { SearchQuery, SearchQueryOperators } from "@budibase/types"
|
||||
import { runLuceneQuery } from "../filters"
|
||||
import { expect, describe, it } from "vitest"
|
||||
import {
|
||||
SearchQuery,
|
||||
SearchQueryOperators,
|
||||
FieldType,
|
||||
SearchFilter,
|
||||
} from "@budibase/types"
|
||||
import { buildLuceneQuery, runLuceneQuery } from "../filters"
|
||||
import { expect, describe, it, test } from "vitest"
|
||||
|
||||
describe("runLuceneQuery", () => {
|
||||
const docs = [
|
||||
|
@ -167,4 +172,186 @@ describe("runLuceneQuery", () => {
|
|||
})
|
||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2, 3])
|
||||
})
|
||||
|
||||
test.each([[523, 259], "523,259"])(
|
||||
"should return rows with matches on numeric oneOf filter",
|
||||
input => {
|
||||
let query = buildQuery("oneOf", {
|
||||
customer_id: input,
|
||||
})
|
||||
expect(runLuceneQuery(docs, query).map(row => row.customer_id)).toEqual([
|
||||
259, 523,
|
||||
])
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe("buildLuceneQuery", () => {
|
||||
it("should return a basic search query template if the input is not an array", () => {
|
||||
const filter: any = "NOT_AN_ARRAY"
|
||||
expect(buildLuceneQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {},
|
||||
containsAny: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("should parseFloat if the type is a number, but the value is a numeric string", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "1212",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.ONE_OF,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "1000,1212,3400",
|
||||
},
|
||||
]
|
||||
expect(buildLuceneQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {
|
||||
customer_id: 1212,
|
||||
},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {
|
||||
customer_id: [1000, 1212, 3400],
|
||||
},
|
||||
containsAny: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("should not parseFloat if the type is a number, but the value is a handlebars binding string", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "{{ customer_id }}",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.ONE_OF,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "{{ list_of_customer_ids }}",
|
||||
},
|
||||
]
|
||||
expect(buildLuceneQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {
|
||||
customer_id: "{{ customer_id }}",
|
||||
},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {
|
||||
customer_id: "{{ list_of_customer_ids }}",
|
||||
},
|
||||
containsAny: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("should cast string to boolean if the type is boolean", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
field: "a",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "not_true",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.NOT_EQUAL,
|
||||
field: "b",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "not_true",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
field: "c",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "true",
|
||||
},
|
||||
]
|
||||
expect(buildLuceneQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {
|
||||
b: true,
|
||||
c: true,
|
||||
},
|
||||
notEqual: {
|
||||
a: true,
|
||||
},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {},
|
||||
containsAny: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("should split the string for contains operators", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchQueryOperators.CONTAINS,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.NOT_CONTAINS,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.CONTAINS_ANY,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
},
|
||||
]
|
||||
expect(buildLuceneQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {
|
||||
description: ["Large box", "Heavy box", "Small box"],
|
||||
},
|
||||
notContains: {
|
||||
description: ["Large box", "Heavy box", "Small box"],
|
||||
},
|
||||
oneOf: {},
|
||||
containsAny: {
|
||||
description: ["Large box", "Heavy box", "Small box"],
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -35,3 +35,12 @@ export interface FetchDatasourceInfoResponse {
|
|||
export interface UpdateDatasourceRequest extends Datasource {
|
||||
datasource: Datasource
|
||||
}
|
||||
|
||||
export interface BuildSchemaFromSourceRequest {
|
||||
tablesFilter?: string[]
|
||||
}
|
||||
|
||||
export interface BuildSchemaFromSourceResponse {
|
||||
datasource: Datasource
|
||||
errors: Record<string, string>
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue