Merge branch 'master' into fix/automations-ux

This commit is contained in:
deanhannigan 2024-07-11 09:02:38 +01:00 committed by GitHub
commit aa0e77beda
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 403 additions and 143 deletions

View File

@ -108,7 +108,7 @@ jobs:
- name: Pull testcontainers images - name: Pull testcontainers images
run: | run: |
docker pull testcontainers/ryuk:0.5.1 & docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.2.1-sql & docker pull budibase/couchdb:v3.2.1-sqs &
docker pull redis & docker pull redis &
wait $(jobs -p) wait $(jobs -p)

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.15", "version": "2.29.16",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -206,3 +206,29 @@ export function pagination<T>(
nextPage, nextPage,
} }
} }
export function isSqsEnabledForTenant(): boolean {
const tenantId = getTenantId()
if (!env.SQS_SEARCH_ENABLE) {
return false
}
// This is to guard against the situation in tests where tests pass because
// we're not actually using SQS, we're using Lucene and the tests pass due to
// parity.
if (env.isTest() && env.SQS_SEARCH_ENABLE_TENANTS.length === 0) {
throw new Error(
"to enable SQS you must specify a list of tenants in the SQS_SEARCH_ENABLE_TENANTS env var"
)
}
// Special case to enable all tenants, for testing in QA.
if (
env.SQS_SEARCH_ENABLE_TENANTS.length === 1 &&
env.SQS_SEARCH_ENABLE_TENANTS[0] === "*"
) {
return true
}
return env.SQS_SEARCH_ENABLE_TENANTS.includes(tenantId)
}

View File

@ -116,6 +116,9 @@ const environment = {
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005", COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006", COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006",
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE, SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
SQS_SEARCH_ENABLE_TENANTS:
process.env.SQS_SEARCH_ENABLE_TENANTS?.split(",") || [],
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
COUCH_DB_USERNAME: process.env.COUCH_DB_USER, COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD, COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID, GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,

View File

@ -18,9 +18,10 @@ import {
CouchFindOptions, CouchFindOptions,
DatabaseQueryOpts, DatabaseQueryOpts,
SearchFilters, SearchFilters,
SearchFilterOperator,
SearchUsersRequest, SearchUsersRequest,
User, User,
BasicOperator,
ArrayOperator,
} from "@budibase/types" } from "@budibase/types"
import * as context from "../context" import * as context from "../context"
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
@ -46,9 +47,9 @@ function removeUserPassword(users: User | User[]) {
export function isSupportedUserSearch(query: SearchFilters) { export function isSupportedUserSearch(query: SearchFilters) {
const allowed = [ const allowed = [
{ op: SearchFilterOperator.STRING, key: "email" }, { op: BasicOperator.STRING, key: "email" },
{ op: SearchFilterOperator.EQUAL, key: "_id" }, { op: BasicOperator.EQUAL, key: "_id" },
{ op: SearchFilterOperator.ONE_OF, key: "_id" }, { op: ArrayOperator.ONE_OF, key: "_id" },
] ]
for (let [key, operation] of Object.entries(query)) { for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") { if (typeof operation !== "object") {

View File

@ -11,7 +11,7 @@
Label, Label,
Multiselect, Multiselect,
} from "@budibase/bbui" } from "@budibase/bbui"
import { FieldType, SearchFilterOperator } from "@budibase/types" import { ArrayOperator, FieldType } from "@budibase/types"
import { generate } from "shortid" import { generate } from "shortid"
import { QueryUtils, Constants } from "@budibase/frontend-core" import { QueryUtils, Constants } from "@budibase/frontend-core"
import { getContext } from "svelte" import { getContext } from "svelte"
@ -268,7 +268,7 @@
<slot name="binding" {filter} /> <slot name="binding" {filter} />
{:else if [FieldType.STRING, FieldType.LONGFORM, FieldType.NUMBER, FieldType.BIGINT, FieldType.FORMULA].includes(filter.type)} {:else if [FieldType.STRING, FieldType.LONGFORM, FieldType.NUMBER, FieldType.BIGINT, FieldType.FORMULA].includes(filter.type)}
<Input disabled={filter.noValue} bind:value={filter.value} /> <Input disabled={filter.noValue} bind:value={filter.value} />
{:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === SearchFilterOperator.ONE_OF)} {:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === ArrayOperator.ONE_OF)}
<Multiselect <Multiselect
disabled={filter.noValue} disabled={filter.noValue}
options={getFieldOptions(filter.field)} options={getFieldOptions(filter.field)}

View File

@ -22,6 +22,20 @@ export function isManyToMany(
return !!(field as ManyToManyRelationshipFieldMetadata).through return !!(field as ManyToManyRelationshipFieldMetadata).through
} }
function isCorrectRelationship(
relationship: RelationshipsJson,
table1: Table,
table2: Table,
row: Row
): boolean {
const junctionTableId = generateJunctionTableID(table1._id!, table2._id!)
const possibleColumns = [
`${junctionTableId}.doc1.fieldName`,
`${junctionTableId}.doc2.fieldName`,
]
return !!possibleColumns.find(col => row[col] === relationship.column)
}
/** /**
* This iterates through the returned rows and works out what elements of the rows * This iterates through the returned rows and works out what elements of the rows
* actually match up to another row (based on primary keys) - this is pretty specific * actually match up to another row (based on primary keys) - this is pretty specific
@ -64,7 +78,12 @@ export async function updateRelationshipColumns(
if (!linked._id) { if (!linked._id) {
continue continue
} }
columns[relationship.column] = linked if (
!opts?.sqs ||
isCorrectRelationship(relationship, table, linkedTable, row)
) {
columns[relationship.column] = linked
}
} }
for (let [column, related] of Object.entries(columns)) { for (let [column, related] of Object.entries(columns)) {
if (!row._id) { if (!row._id) {

View File

@ -15,7 +15,7 @@ import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder" import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { events, context } from "@budibase/backend-core" import { events, context, db as dbCore } from "@budibase/backend-core"
import { import {
AutoFieldSubType, AutoFieldSubType,
ContextUser, ContextUser,
@ -324,7 +324,7 @@ class TableSaveFunctions {
importRows: this.importRows, importRows: this.importRows,
user: this.user, user: this.user,
}) })
if (env.SQS_SEARCH_ENABLE) { if (dbCore.isSqsEnabledForTenant()) {
await sdk.tables.sqs.addTable(table) await sdk.tables.sqs.addTable(table)
} }
return table return table
@ -518,7 +518,7 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
if (rows) { if (rows) {
await AttachmentCleanup.tableDelete(table, rows) await AttachmentCleanup.tableDelete(table, rows)
} }
if (env.SQS_SEARCH_ENABLE) { if (dbCore.isSqsEnabledForTenant()) {
await sdk.tables.sqs.removeTable(table) await sdk.tables.sqs.removeTable(table)
} }
} }

View File

@ -54,10 +54,13 @@ describe.each([
let rows: Row[] let rows: Row[]
beforeAll(async () => { beforeAll(async () => {
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, () => config.init())
if (isSqs) { if (isSqs) {
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" }) envCleanup = config.setCoreEnv({
SQS_SEARCH_ENABLE: "true",
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
})
} }
await config.init()
if (config.app?.appId) { if (config.app?.appId) {
config.app = await config.api.application.update(config.app?.appId, { config.app = await config.api.application.update(config.app?.appId, {
@ -780,6 +783,32 @@ describe.each([
it("fails to find nonexistent row", async () => { it("fails to find nonexistent row", async () => {
await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing() await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing()
}) })
it("can have multiple values for same column", async () => {
await expectQuery({
oneOf: {
name: ["foo", "bar"],
},
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
})
it("splits comma separated strings", async () => {
await expectQuery({
oneOf: {
// @ts-ignore
name: "foo,bar",
},
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
})
it("trims whitespace", async () => {
await expectQuery({
oneOf: {
// @ts-ignore
name: "foo, bar",
},
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
})
}) })
describe("fuzzy", () => { describe("fuzzy", () => {
@ -1002,6 +1031,32 @@ describe.each([
it("fails to find nonexistent row", async () => { it("fails to find nonexistent row", async () => {
await expectQuery({ oneOf: { age: [2] } }).toFindNothing() await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
}) })
// I couldn't find a way to make this work in Lucene and given that
// we're getting rid of Lucene soon I wasn't inclined to spend time on
// it.
!isLucene &&
it("can convert from a string", async () => {
await expectQuery({
oneOf: {
// @ts-ignore
age: "1",
},
}).toContainExactly([{ age: 1 }])
})
// I couldn't find a way to make this work in Lucene and given that
// we're getting rid of Lucene soon I wasn't inclined to spend time on
// it.
!isLucene &&
it("can find multiple values for same column", async () => {
await expectQuery({
oneOf: {
// @ts-ignore
age: "1,10",
},
}).toContainExactly([{ age: 1 }, { age: 10 }])
})
}) })
describe("range", () => { describe("range", () => {
@ -2085,6 +2140,106 @@ describe.each([
}) })
}) })
isInternal &&
describe("relations to same table", () => {
let relatedTable: Table, relatedRows: Row[]
beforeAll(async () => {
relatedTable = await createTable(
{
name: { name: "name", type: FieldType.STRING },
},
"productCategory"
)
table = await createTable({
name: { name: "name", type: FieldType.STRING },
related1: {
type: FieldType.LINK,
name: "related1",
fieldName: "main1",
tableId: relatedTable._id!,
relationshipType: RelationshipType.MANY_TO_MANY,
},
related2: {
type: FieldType.LINK,
name: "related2",
fieldName: "main2",
tableId: relatedTable._id!,
relationshipType: RelationshipType.MANY_TO_MANY,
},
})
relatedRows = await Promise.all([
config.api.row.save(relatedTable._id!, { name: "foo" }),
config.api.row.save(relatedTable._id!, { name: "bar" }),
config.api.row.save(relatedTable._id!, { name: "baz" }),
config.api.row.save(relatedTable._id!, { name: "boo" }),
])
await Promise.all([
config.api.row.save(table._id!, {
name: "test",
related1: [relatedRows[0]._id!],
related2: [relatedRows[1]._id!],
}),
config.api.row.save(table._id!, {
name: "test2",
related1: [relatedRows[2]._id!],
related2: [relatedRows[3]._id!],
}),
])
})
it("should be able to relate to same table", async () => {
await expectSearch({
query: {},
}).toContainExactly([
{
name: "test",
related1: [{ _id: relatedRows[0]._id }],
related2: [{ _id: relatedRows[1]._id }],
},
{
name: "test2",
related1: [{ _id: relatedRows[2]._id }],
related2: [{ _id: relatedRows[3]._id }],
},
])
})
isSqs &&
it("should be able to filter down to second row with equal", async () => {
await expectSearch({
query: {
equal: {
["related1.name"]: "baz",
},
},
}).toContainExactly([
{
name: "test2",
related1: [{ _id: relatedRows[2]._id }],
},
])
})
isSqs &&
it("should be able to filter down to first row with not equal", async () => {
await expectSearch({
query: {
notEqual: {
["1:related2.name"]: "bar",
["2:related2.name"]: "baz",
["3:related2.name"]: "boo",
},
},
}).toContainExactly([
{
name: "test",
related1: [{ _id: relatedRows[0]._id }],
},
])
})
})
isInternal && isInternal &&
describe("no column error backwards compat", () => { describe("no column error backwards compat", () => {
beforeAll(async () => { beforeAll(async () => {

View File

@ -86,9 +86,10 @@ describe("/templates", () => {
async source => { async source => {
const env = { const env = {
SQS_SEARCH_ENABLE: source === "sqs" ? "true" : "false", SQS_SEARCH_ENABLE: source === "sqs" ? "true" : "false",
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
} }
await config.withEnv(env, async () => { await config.withCoreEnv(env, async () => {
const name = generator.guid().replaceAll("-", "") const name = generator.guid().replaceAll("-", "")
const url = `/${name}` const url = `/${name}`

View File

@ -9,7 +9,6 @@ import {
QuotaUsageType, QuotaUsageType,
Row, Row,
SaveTableRequest, SaveTableRequest,
SearchFilterOperator,
SortOrder, SortOrder,
SortType, SortType,
StaticQuotaName, StaticQuotaName,
@ -19,6 +18,7 @@ import {
ViewUIFieldMetadata, ViewUIFieldMetadata,
ViewV2, ViewV2,
SearchResponse, SearchResponse,
BasicOperator,
} from "@budibase/types" } from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
@ -88,10 +88,16 @@ describe.each([
} }
beforeAll(async () => { beforeAll(async () => {
await config.withCoreEnv(
{ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" },
() => config.init()
)
if (isSqs) { if (isSqs) {
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" }) envCleanup = config.setCoreEnv({
SQS_SEARCH_ENABLE: "true",
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
})
} }
await config.init()
if (dsProvider) { if (dsProvider) {
datasource = await config.createDatasource({ datasource = await config.createDatasource({
@ -149,7 +155,7 @@ describe.each([
primaryDisplay: "id", primaryDisplay: "id",
query: [ query: [
{ {
operator: SearchFilterOperator.EQUAL, operator: BasicOperator.EQUAL,
field: "field", field: "field",
value: "value", value: "value",
}, },
@ -561,7 +567,7 @@ describe.each([
...view, ...view,
query: [ query: [
{ {
operator: SearchFilterOperator.EQUAL, operator: BasicOperator.EQUAL,
field: "newField", field: "newField",
value: "thatValue", value: "thatValue",
}, },
@ -589,7 +595,7 @@ describe.each([
primaryDisplay: "Price", primaryDisplay: "Price",
query: [ query: [
{ {
operator: SearchFilterOperator.EQUAL, operator: BasicOperator.EQUAL,
field: generator.word(), field: generator.word(),
value: generator.word(), value: generator.word(),
}, },
@ -673,7 +679,7 @@ describe.each([
tableId: generator.guid(), tableId: generator.guid(),
query: [ query: [
{ {
operator: SearchFilterOperator.EQUAL, operator: BasicOperator.EQUAL,
field: "newField", field: "newField",
value: "thatValue", value: "thatValue",
}, },
@ -1194,7 +1200,7 @@ describe.each([
name: generator.guid(), name: generator.guid(),
query: [ query: [
{ {
operator: SearchFilterOperator.EQUAL, operator: BasicOperator.EQUAL,
field: "two", field: "two",
value: "bar2", value: "bar2",
}, },

View File

@ -1,6 +1,6 @@
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one // This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
import env from "../environment" import { env } from "@budibase/backend-core"
import { AppMigration } from "." import { AppMigration } from "."
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs" import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"

View File

@ -1,8 +1,7 @@
import { context } from "@budibase/backend-core" import { context, env } from "@budibase/backend-core"
import { allLinkDocs } from "../../db/utils" import { allLinkDocs } from "../../db/utils"
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument" import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
import sdk from "../../sdk" import sdk from "../../sdk"
import env from "../../environment"
const migration = async () => { const migration = async () => {
const linkDocs = await allLinkDocs() const linkDocs = await allLinkDocs()

View File

@ -69,11 +69,14 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE" type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) { async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
await config.withEnv({ [envVar]: "" }, cb) await config.withCoreEnv({ [envVar]: "", SQS_SEARCH_ENABLE_TENANTS: [] }, cb)
} }
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) { async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
await config.withEnv({ [envVar]: "1" }, cb) await config.withCoreEnv(
{ [envVar]: "1", SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()] },
cb
)
} }
describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])( describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])(

View File

@ -87,8 +87,6 @@ const environment = {
SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, SQL_MAX_ROWS: process.env.SQL_MAX_ROWS,
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE, SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE, SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE,
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
// flags // flags
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS, ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
DISABLE_THREADING: process.env.DISABLE_THREADING, DISABLE_THREADING: process.env.DISABLE_THREADING,

View File

@ -2,7 +2,6 @@ import {
EmptyFilterOption, EmptyFilterOption,
Row, Row,
RowSearchParams, RowSearchParams,
SearchFilterOperator,
SearchFilters, SearchFilters,
SearchResponse, SearchResponse,
SortOrder, SortOrder,
@ -12,11 +11,11 @@ import * as internal from "./search/internal"
import * as external from "./search/external" import * as external from "./search/external"
import { NoEmptyFilterStrings } from "../../../constants" import { NoEmptyFilterStrings } from "../../../constants"
import * as sqs from "./search/sqs" import * as sqs from "./search/sqs"
import env from "../../../environment"
import { ExportRowsParams, ExportRowsResult } from "./search/types" import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core" import { dataFilters } from "@budibase/shared-core"
import sdk from "../../index" import sdk from "../../index"
import { searchInputMapping } from "./search/utils" import { searchInputMapping } from "./search/utils"
import { db as dbCore } from "@budibase/backend-core"
export { isValidFilter } from "../../../integrations/utils" export { isValidFilter } from "../../../integrations/utils"
@ -66,37 +65,12 @@ export function removeEmptyFilters(filters: SearchFilters) {
return filters return filters
} }
// The frontend can send single values for array fields sometimes, so to handle
// this we convert them to arrays at the controller level so that nothing below
// this has to worry about the non-array values.
function fixupFilterArrays(filters: SearchFilters) {
const arrayFields = [
SearchFilterOperator.ONE_OF,
SearchFilterOperator.CONTAINS,
SearchFilterOperator.NOT_CONTAINS,
SearchFilterOperator.CONTAINS_ANY,
]
for (const searchField of arrayFields) {
const field = filters[searchField]
if (field == null) {
continue
}
for (const key of Object.keys(field)) {
if (!Array.isArray(field[key])) {
field[key] = [field[key]]
}
}
}
return filters
}
export async function search( export async function search(
options: RowSearchParams options: RowSearchParams
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const isExternalTable = isExternalTableID(options.tableId) const isExternalTable = isExternalTableID(options.tableId)
options.query = removeEmptyFilters(options.query || {}) options.query = removeEmptyFilters(options.query || {})
options.query = fixupFilterArrays(options.query) options.query = dataFilters.fixupFilterArrays(options.query)
if ( if (
!dataFilters.hasFilters(options.query) && !dataFilters.hasFilters(options.query) &&
options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
@ -115,7 +89,7 @@ export async function search(
if (isExternalTable) { if (isExternalTable) {
return external.search(options, table) return external.search(options, table)
} else if (env.SQS_SEARCH_ENABLE) { } else if (dbCore.isSqsEnabledForTenant()) {
return sqs.search(options, table) return sqs.search(options, table)
} else { } else {
return internal.search(options, table) return internal.search(options, table)

View File

@ -5,6 +5,7 @@ import {
Operation, Operation,
QueryJson, QueryJson,
RelationshipFieldMetadata, RelationshipFieldMetadata,
RelationshipsJson,
Row, Row,
RowSearchParams, RowSearchParams,
SearchFilters, SearchFilters,
@ -30,7 +31,10 @@ import {
SQLITE_DESIGN_DOC_ID, SQLITE_DESIGN_DOC_ID,
SQS_DATASOURCE_INTERNAL, SQS_DATASOURCE_INTERNAL,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" import {
CONSTANT_INTERNAL_ROW_COLS,
generateJunctionTableID,
} from "../../../../db/utils"
import AliasTables from "../sqlAlias" import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import pick from "lodash/pick" import pick from "lodash/pick"
@ -52,28 +56,35 @@ const USER_COLUMN_PREFIX_REGEX = new RegExp(
function buildInternalFieldList( function buildInternalFieldList(
table: Table, table: Table,
tables: Table[], tables: Table[],
opts: { relationships: boolean } = { relationships: true } opts?: { relationships?: RelationshipsJson[] }
) { ) {
let fieldList: string[] = [] let fieldList: string[] = []
const addJunctionFields = (relatedTable: Table, fields: string[]) => {
fields.forEach(field => {
fieldList.push(
`${generateJunctionTableID(table._id!, relatedTable._id!)}.${field}`
)
})
}
fieldList = fieldList.concat( fieldList = fieldList.concat(
CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`) CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`)
) )
for (let col of Object.values(table.schema)) { for (let col of Object.values(table.schema)) {
const isRelationship = col.type === FieldType.LINK const isRelationship = col.type === FieldType.LINK
if (!opts.relationships && isRelationship) { if (!opts?.relationships && isRelationship) {
continue continue
} }
if (isRelationship) { if (isRelationship) {
const linkCol = col as RelationshipFieldMetadata const linkCol = col as RelationshipFieldMetadata
const relatedTable = tables.find(table => table._id === linkCol.tableId)! const relatedTable = tables.find(table => table._id === linkCol.tableId)!
fieldList = fieldList.concat( // no relationships provided, don't go more than a layer deep
buildInternalFieldList(relatedTable, tables, { relationships: false }) fieldList = fieldList.concat(buildInternalFieldList(relatedTable, tables))
) addJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
} else { } else {
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
} }
} }
return fieldList return [...new Set(fieldList)]
} }
function cleanupFilters( function cleanupFilters(
@ -165,18 +176,27 @@ function reverseUserColumnMapping(rows: Row[]) {
}) })
} }
function runSqlQuery(json: QueryJson, tables: Table[]): Promise<Row[]>
function runSqlQuery( function runSqlQuery(
json: QueryJson, json: QueryJson,
tables: Table[], tables: Table[],
relationships: RelationshipsJson[]
): Promise<Row[]>
function runSqlQuery(
json: QueryJson,
tables: Table[],
relationships: RelationshipsJson[],
opts: { countTotalRows: true } opts: { countTotalRows: true }
): Promise<number> ): Promise<number>
async function runSqlQuery( async function runSqlQuery(
json: QueryJson, json: QueryJson,
tables: Table[], tables: Table[],
relationships: RelationshipsJson[],
opts?: { countTotalRows?: boolean } opts?: { countTotalRows?: boolean }
) { ) {
const alias = new AliasTables(tables.map(table => table.name)) const relationshipJunctionTableIds = relationships.map(rel => rel.through!)
const alias = new AliasTables(
tables.map(table => table.name).concat(relationshipJunctionTableIds)
)
if (opts?.countTotalRows) { if (opts?.countTotalRows) {
json.endpoint.operation = Operation.COUNT json.endpoint.operation = Operation.COUNT
} }
@ -193,8 +213,13 @@ async function runSqlQuery(
let bindings = query.bindings let bindings = query.bindings
// quick hack for docIds // quick hack for docIds
sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`") const fixJunctionDocs = (field: string) =>
["doc1", "doc2"].forEach(doc => {
sql = sql.replaceAll(`\`${doc}\`.\`${field}\``, `\`${doc}.${field}\``)
})
fixJunctionDocs("rowId")
fixJunctionDocs("fieldName")
if (Array.isArray(query)) { if (Array.isArray(query)) {
throw new Error("SQS cannot currently handle multiple queries") throw new Error("SQS cannot currently handle multiple queries")
@ -260,7 +285,7 @@ export async function search(
columnPrefix: USER_COLUMN_PREFIX, columnPrefix: USER_COLUMN_PREFIX,
}, },
resource: { resource: {
fields: buildInternalFieldList(table, allTables), fields: buildInternalFieldList(table, allTables, { relationships }),
}, },
relationships, relationships,
} }
@ -292,11 +317,11 @@ export async function search(
try { try {
const queries: Promise<Row[] | number>[] = [] const queries: Promise<Row[] | number>[] = []
queries.push(runSqlQuery(request, allTables)) queries.push(runSqlQuery(request, allTables, relationships))
if (options.countRows) { if (options.countRows) {
// get the total count of rows // get the total count of rows
queries.push( queries.push(
runSqlQuery(request, allTables, { runSqlQuery(request, allTables, relationships, {
countTotalRows: true, countTotalRows: true,
}) })
) )

View File

@ -31,10 +31,17 @@ describe.each([
let rows: Row[] let rows: Row[]
beforeAll(async () => { beforeAll(async () => {
await config.withCoreEnv(
{ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" },
() => config.init()
)
if (isSqs) { if (isSqs) {
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" }) envCleanup = config.setCoreEnv({
SQS_SEARCH_ENABLE: "true",
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
})
} }
await config.init()
if (dsProvider) { if (dsProvider) {
datasource = await config.createDatasource({ datasource = await config.createDatasource({

View File

@ -111,7 +111,8 @@ export default class AliasTables {
aliasField(field: string) { aliasField(field: string) {
const tableNames = this.tableNames const tableNames = this.tableNames
if (field.includes(".")) { if (field.includes(".")) {
const [tableName, column] = field.split(".") const [tableName, ...rest] = field.split(".")
const column = rest.join(".")
const foundTableName = tableNames.find(name => { const foundTableName = tableNames.find(name => {
const idx = tableName.indexOf(name) const idx = tableName.indexOf(name)
if (idx === -1 || idx > 1) { if (idx === -1 || idx > 1) {

View File

@ -1,4 +1,4 @@
import { context } from "@budibase/backend-core" import { context, db as dbCore, env } from "@budibase/backend-core"
import { getTableParams } from "../../../db/utils" import { getTableParams } from "../../../db/utils"
import { import {
breakExternalTableId, breakExternalTableId,
@ -15,7 +15,6 @@ import {
} from "@budibase/types" } from "@budibase/types"
import datasources from "../datasources" import datasources from "../datasources"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import env from "../../../environment"
export function processTable(table: Table): Table { export function processTable(table: Table): Table {
if (!table) { if (!table) {
@ -34,7 +33,7 @@ export function processTable(table: Table): Table {
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID, sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL, sourceType: TableSourceType.INTERNAL,
} }
if (env.SQS_SEARCH_ENABLE) { if (dbCore.isSqsEnabledForTenant()) {
processed.sql = !!env.SQS_SEARCH_ENABLE processed.sql = !!env.SQS_SEARCH_ENABLE
} }
return processed return processed

View File

@ -176,9 +176,22 @@ export async function addTable(table: Table) {
export async function removeTable(table: Table) { export async function removeTable(table: Table) {
const db = context.getAppDB() const db = context.getAppDB()
try { try {
const definition = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID) const [tables, definition] = await Promise.all([
if (definition.sql?.tables?.[table._id!]) { tablesSdk.getAllInternalTables(),
delete definition.sql.tables[table._id!] db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID),
])
const tableIds = tables
.map(tbl => tbl._id!)
.filter(id => !id.includes(table._id!))
let cleanup = false
for (let tableKey of Object.keys(definition.sql?.tables || {})) {
// there are no tables matching anymore
if (!tableIds.find(id => tableKey.includes(id))) {
delete definition.sql.tables[tableKey]
cleanup = true
}
}
if (cleanup) {
await db.put(definition) await db.put(definition)
// make sure SQS is cleaned up, tables removed // make sure SQS is cleaned up, tables removed
await db.sqlDiskCleanup() await db.sqlDiskCleanup()

View File

@ -39,7 +39,9 @@ describe("should be able to re-write attachment URLs", () => {
} }
const db = dbCore.getDB(config.getAppId()) const db = dbCore.getDB(config.getAppId())
await sdk.backups.updateAttachmentColumns(db.name, db) await config.doInContext(config.getAppId(), () =>
sdk.backups.updateAttachmentColumns(db.name, db)
)
return { return {
db, db,

View File

@ -245,10 +245,10 @@ export default class TestConfiguration {
} }
} }
async withEnv(newEnvVars: Partial<typeof env>, f: () => Promise<void>) { async withEnv<T>(newEnvVars: Partial<typeof env>, f: () => Promise<T>) {
let cleanup = this.setEnv(newEnvVars) let cleanup = this.setEnv(newEnvVars)
try { try {
await f() return await f()
} finally { } finally {
cleanup() cleanup()
} }
@ -273,13 +273,13 @@ export default class TestConfiguration {
} }
} }
async withCoreEnv( async withCoreEnv<T>(
newEnvVars: Partial<typeof coreEnv>, newEnvVars: Partial<typeof coreEnv>,
f: () => Promise<void> f: () => Promise<T>
) { ) {
let cleanup = this.setCoreEnv(newEnvVars) let cleanup = this.setCoreEnv(newEnvVars)
try { try {
await f() return await f()
} finally { } finally {
cleanup() cleanup()
} }

View File

@ -6,6 +6,7 @@ import {
SearchFilter, SearchFilter,
SearchFilters, SearchFilters,
SearchQueryFields, SearchQueryFields,
ArrayOperator,
SearchFilterOperator, SearchFilterOperator,
SortType, SortType,
FieldConstraints, FieldConstraints,
@ -14,11 +15,13 @@ import {
EmptyFilterOption, EmptyFilterOption,
SearchResponse, SearchResponse,
Table, Table,
BasicOperator,
RangeOperator,
} from "@budibase/types" } from "@budibase/types"
import dayjs from "dayjs" import dayjs from "dayjs"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
import { deepGet, schema } from "./helpers" import { deepGet, schema } from "./helpers"
import _ from "lodash" import { isPlainObject, isEmpty } from "lodash"
const HBS_REGEX = /{{([^{].*?)}}/g const HBS_REGEX = /{{([^{].*?)}}/g
@ -323,6 +326,32 @@ export const buildQuery = (filter: SearchFilter[]) => {
return query return query
} }
// The frontend can send single values for array fields sometimes, so to handle
// this we convert them to arrays at the controller level so that nothing below
// this has to worry about the non-array values.
export function fixupFilterArrays(filters: SearchFilters) {
for (const searchField of Object.values(ArrayOperator)) {
const field = filters[searchField]
if (field == null || !isPlainObject(field)) {
continue
}
for (const key of Object.keys(field)) {
if (Array.isArray(field[key])) {
continue
}
const value = field[key] as any
if (typeof value === "string") {
field[key] = value.split(",").map((x: string) => x.trim())
} else {
field[key] = [value]
}
}
}
return filters
}
export const search = ( export const search = (
docs: Record<string, any>[], docs: Record<string, any>[],
query: RowSearchParams query: RowSearchParams
@ -356,6 +385,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
} }
query = cleanupQuery(query) query = cleanupQuery(query)
query = fixupFilterArrays(query)
if ( if (
!hasFilters(query) && !hasFilters(query) &&
@ -382,7 +412,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
} }
const stringMatch = match( const stringMatch = match(
SearchFilterOperator.STRING, BasicOperator.STRING,
(docValue: any, testValue: any) => { (docValue: any, testValue: any) => {
if (!(typeof docValue === "string")) { if (!(typeof docValue === "string")) {
return false return false
@ -395,7 +425,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
) )
const fuzzyMatch = match( const fuzzyMatch = match(
SearchFilterOperator.FUZZY, BasicOperator.FUZZY,
(docValue: any, testValue: any) => { (docValue: any, testValue: any) => {
if (!(typeof docValue === "string")) { if (!(typeof docValue === "string")) {
return false return false
@ -408,17 +438,17 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
) )
const rangeMatch = match( const rangeMatch = match(
SearchFilterOperator.RANGE, RangeOperator.RANGE,
(docValue: any, testValue: any) => { (docValue: any, testValue: any) => {
if (docValue == null || docValue === "") { if (docValue == null || docValue === "") {
return false return false
} }
if (_.isObject(testValue.low) && _.isEmpty(testValue.low)) { if (isPlainObject(testValue.low) && isEmpty(testValue.low)) {
testValue.low = undefined testValue.low = undefined
} }
if (_.isObject(testValue.high) && _.isEmpty(testValue.high)) { if (isPlainObject(testValue.high) && isEmpty(testValue.high)) {
testValue.high = undefined testValue.high = undefined
} }
@ -497,11 +527,8 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
(...args: T): boolean => (...args: T): boolean =>
!f(...args) !f(...args)
const equalMatch = match(SearchFilterOperator.EQUAL, _valueMatches) const equalMatch = match(BasicOperator.EQUAL, _valueMatches)
const notEqualMatch = match( const notEqualMatch = match(BasicOperator.NOT_EQUAL, not(_valueMatches))
SearchFilterOperator.NOT_EQUAL,
not(_valueMatches)
)
const _empty = (docValue: any) => { const _empty = (docValue: any) => {
if (typeof docValue === "string") { if (typeof docValue === "string") {
@ -516,26 +543,24 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
return docValue == null return docValue == null
} }
const emptyMatch = match(SearchFilterOperator.EMPTY, _empty) const emptyMatch = match(BasicOperator.EMPTY, _empty)
const notEmptyMatch = match(SearchFilterOperator.NOT_EMPTY, not(_empty)) const notEmptyMatch = match(BasicOperator.NOT_EMPTY, not(_empty))
const oneOf = match( const oneOf = match(ArrayOperator.ONE_OF, (docValue: any, testValue: any) => {
SearchFilterOperator.ONE_OF, if (typeof testValue === "string") {
(docValue: any, testValue: any) => { testValue = testValue.split(",")
if (typeof testValue === "string") {
testValue = testValue.split(",")
if (typeof docValue === "number") {
testValue = testValue.map((item: string) => parseFloat(item))
}
}
if (!Array.isArray(testValue)) {
return false
}
return testValue.some(item => _valueMatches(docValue, item))
} }
)
if (typeof docValue === "number") {
testValue = testValue.map((item: string) => parseFloat(item))
}
if (!Array.isArray(testValue)) {
return false
}
return testValue.some(item => _valueMatches(docValue, item))
})
const _contains = const _contains =
(f: "some" | "every") => (docValue: any, testValue: any) => { (f: "some" | "every") => (docValue: any, testValue: any) => {
@ -562,7 +587,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
} }
const contains = match( const contains = match(
SearchFilterOperator.CONTAINS, ArrayOperator.CONTAINS,
(docValue: any, testValue: any) => { (docValue: any, testValue: any) => {
if (Array.isArray(testValue) && testValue.length === 0) { if (Array.isArray(testValue) && testValue.length === 0) {
return true return true
@ -571,7 +596,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
} }
) )
const notContains = match( const notContains = match(
SearchFilterOperator.NOT_CONTAINS, ArrayOperator.NOT_CONTAINS,
(docValue: any, testValue: any) => { (docValue: any, testValue: any) => {
// Not sure if this is logically correct, but at the time this code was // Not sure if this is logically correct, but at the time this code was
// written the search endpoint behaved this way and we wanted to make this // written the search endpoint behaved this way and we wanted to make this
@ -582,10 +607,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
return not(_contains("every"))(docValue, testValue) return not(_contains("every"))(docValue, testValue)
} }
) )
const containsAny = match( const containsAny = match(ArrayOperator.CONTAINS_ANY, _contains("some"))
SearchFilterOperator.CONTAINS_ANY,
_contains("some")
)
const docMatch = (doc: Record<string, any>) => { const docMatch = (doc: Record<string, any>) => {
const filterFunctions = { const filterFunctions = {

View File

@ -3,20 +3,28 @@ import { Row, Table, DocumentType } from "../documents"
import { SortOrder, SortType } from "../api" import { SortOrder, SortType } from "../api"
import { Knex } from "knex" import { Knex } from "knex"
export enum SearchFilterOperator { export enum BasicOperator {
STRING = "string",
FUZZY = "fuzzy",
RANGE = "range",
EQUAL = "equal", EQUAL = "equal",
NOT_EQUAL = "notEqual", NOT_EQUAL = "notEqual",
EMPTY = "empty", EMPTY = "empty",
NOT_EMPTY = "notEmpty", NOT_EMPTY = "notEmpty",
ONE_OF = "oneOf", FUZZY = "fuzzy",
STRING = "string",
}
export enum ArrayOperator {
CONTAINS = "contains", CONTAINS = "contains",
NOT_CONTAINS = "notContains", NOT_CONTAINS = "notContains",
CONTAINS_ANY = "containsAny", CONTAINS_ANY = "containsAny",
ONE_OF = "oneOf",
} }
export enum RangeOperator {
RANGE = "range",
}
export type SearchFilterOperator = BasicOperator | ArrayOperator | RangeOperator
export enum InternalSearchFilterOperator { export enum InternalSearchFilterOperator {
COMPLEX_ID_OPERATOR = "_complexIdOperator", COMPLEX_ID_OPERATOR = "_complexIdOperator",
} }
@ -52,17 +60,17 @@ export interface SearchFilters {
// allows just fuzzy to be or - all the fuzzy/like parameters // allows just fuzzy to be or - all the fuzzy/like parameters
fuzzyOr?: boolean fuzzyOr?: boolean
onEmptyFilter?: EmptyFilterOption onEmptyFilter?: EmptyFilterOption
[SearchFilterOperator.STRING]?: BasicFilter<string> [BasicOperator.STRING]?: BasicFilter<string>
[SearchFilterOperator.FUZZY]?: BasicFilter<string> [BasicOperator.FUZZY]?: BasicFilter<string>
[SearchFilterOperator.RANGE]?: RangeFilter [RangeOperator.RANGE]?: RangeFilter
[SearchFilterOperator.EQUAL]?: BasicFilter [BasicOperator.EQUAL]?: BasicFilter
[SearchFilterOperator.NOT_EQUAL]?: BasicFilter [BasicOperator.NOT_EQUAL]?: BasicFilter
[SearchFilterOperator.EMPTY]?: BasicFilter [BasicOperator.EMPTY]?: BasicFilter
[SearchFilterOperator.NOT_EMPTY]?: BasicFilter [BasicOperator.NOT_EMPTY]?: BasicFilter
[SearchFilterOperator.ONE_OF]?: ArrayFilter [ArrayOperator.ONE_OF]?: ArrayFilter
[SearchFilterOperator.CONTAINS]?: ArrayFilter [ArrayOperator.CONTAINS]?: ArrayFilter
[SearchFilterOperator.NOT_CONTAINS]?: ArrayFilter [ArrayOperator.NOT_CONTAINS]?: ArrayFilter
[SearchFilterOperator.CONTAINS_ANY]?: ArrayFilter [ArrayOperator.CONTAINS_ANY]?: ArrayFilter
// specific to SQS/SQLite search on internal tables this can be used // specific to SQS/SQLite search on internal tables this can be used
// to make sure the documents returned are always filtered down to a // to make sure the documents returned are always filtered down to a
// specific document type (such as just rows) // specific document type (such as just rows)

View File

@ -24,7 +24,7 @@ async function isSqsAvailable() {
} }
async function isSqsMissing() { async function isSqsMissing() {
return env.SQS_SEARCH_ENABLE && !(await isSqsAvailable()) return coreEnv.SQS_SEARCH_ENABLE && !(await isSqsAvailable())
} }
export const fetch = async (ctx: Ctx) => { export const fetch = async (ctx: Ctx) => {

View File

@ -5,8 +5,7 @@ const compress = require("koa-compress")
import zlib from "zlib" import zlib from "zlib"
import { routes } from "./routes" import { routes } from "./routes"
import { middleware as pro, sdk } from "@budibase/pro" import { middleware as pro, sdk } from "@budibase/pro"
import { auth, middleware } from "@budibase/backend-core" import { auth, middleware, env } from "@budibase/backend-core"
import env from "../environment"
if (env.SQS_SEARCH_ENABLE) { if (env.SQS_SEARCH_ENABLE) {
sdk.auditLogs.useSQLSearch() sdk.auditLogs.useSQLSearch()

View File

@ -46,7 +46,6 @@ const environment = {
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL, DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED, SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE, DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT, BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
// smtp // smtp
SMTP_USER: process.env.SMTP_USER, SMTP_USER: process.env.SMTP_USER,