Merge pull request #14835 from Budibase/chore/sqs-always-on

Remove SQS flag
This commit is contained in:
Adria Navarro 2024-11-18 10:53:23 +01:00 committed by GitHub
commit 36f93dd452
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 2895 additions and 3396 deletions

View File

@ -10,7 +10,6 @@ import {
DatabaseQueryOpts,
DBError,
Document,
FeatureFlag,
isDocument,
RowResponse,
RowValue,
@ -27,7 +26,6 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
import { DDInstrumentedDatabase } from "../instrumentation"
import { checkSlashesInUrl } from "../../helpers"
import { sqlLog } from "../../sql/utils"
import { flags } from "../../features"
const DATABASE_NOT_FOUND = "Database does not exist."
@ -456,10 +454,7 @@ export class DatabaseImpl implements Database {
}
async destroy() {
if (
(await flags.isEnabled(FeatureFlag.SQS)) &&
(await this.exists(SQLITE_DESIGN_DOC_ID))
) {
if (await this.exists(SQLITE_DESIGN_DOC_ID)) {
// delete the design document, then run the cleanup operation
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
// remove all tables - save the definition then trigger a cleanup

View File

@ -269,7 +269,6 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
export const flags = new FlagSet({
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
[FeatureFlag.SQS]: Flag.boolean(true),
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(true),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),

@ -1 +1 @@
Subproject commit 80770215c6159e4d47f3529fd02e74bc8ad07543
Subproject commit a56696a4af5667617746600fc75fe6a01744b692

View File

@ -15,12 +15,11 @@ import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro"
import { context, events, features, HTTPError } from "@budibase/backend-core"
import { context, events, HTTPError } from "@budibase/backend-core"
import {
AutoFieldSubType,
Database,
Datasource,
FeatureFlag,
FieldSchema,
FieldType,
NumberFieldMetadata,
@ -336,9 +335,8 @@ class TableSaveFunctions {
importRows: this.importRows,
userId: this.userId,
})
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
await sdk.tables.sqs.addTable(table)
}
return table
}
@ -530,10 +528,9 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
if (rows) {
await AttachmentCleanup.tableDelete(table, rows)
}
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
await sdk.tables.sqs.removeTable(table)
}
}
const _TableSaveFunctions = TableSaveFunctions
export { _TableSaveFunctions as TableSaveFunctions }

View File

@ -16,7 +16,7 @@ jest.mock("../../../utilities/redis", () => ({
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
import { AppStatus } from "../../../db/utils"
import { events, utils, context, features } from "@budibase/backend-core"
import { events, utils, context } from "@budibase/backend-core"
import env from "../../../environment"
import { type App, BuiltinPermissionID } from "@budibase/types"
import tk from "timekeeper"
@ -355,21 +355,6 @@ describe("/applications", () => {
expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
})
it("should be able to delete an app after SQS has been set but app hasn't been migrated", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await features.testutils.withFeatureFlags(
"*",
{ SQS: true },
async () => {
await config.api.application.delete(app.appId)
}
)
})
})
describe("POST /api/applications/:appId/duplicate", () => {

View File

@ -19,17 +19,14 @@ import {
import { quotas } from "@budibase/pro"
import {
AIOperationEnum,
AttachmentFieldMetadata,
AutoFieldSubType,
Datasource,
DateFieldMetadata,
DeleteRow,
FieldSchema,
FieldType,
BBReferenceFieldSubType,
FormulaType,
INTERNAL_TABLE_SOURCE_ID,
NumberFieldMetadata,
QuotaUsageType,
RelationshipType,
Row,
@ -90,8 +87,7 @@ async function waitForEvent(
}
describe.each([
["lucene", undefined],
["sqs", undefined],
["internal", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
@ -99,8 +95,6 @@ describe.each([
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/rows (%s)", (providerType, dsProvider) => {
const isInternal = dsProvider === undefined
const isLucene = providerType === "lucene"
const isSqs = providerType === "sqs"
const isMSSQL = providerType === DatabaseName.SQL_SERVER
const isOracle = providerType === DatabaseName.ORACLE
const config = setup.getConfig()
@ -108,15 +102,9 @@ describe.each([
let table: Table
let datasource: Datasource | undefined
let client: Knex | undefined
let envCleanup: (() => void) | undefined
beforeAll(async () => {
await features.testutils.withFeatureFlags("*", { SQS: true }, () =>
config.init()
)
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: isSqs,
})
await config.init()
if (dsProvider) {
const rawDatasource = await dsProvider
@ -129,9 +117,6 @@ describe.each([
afterAll(async () => {
setup.afterAll()
if (envCleanup) {
envCleanup()
}
})
function saveTableRequest(
@ -381,185 +366,6 @@ describe.each([
expect(ids).toEqual(expect.arrayContaining(sequence))
})
isLucene &&
it("row values are coerced", async () => {
const str: FieldSchema = {
type: FieldType.STRING,
name: "str",
constraints: { type: "string", presence: false },
}
const singleAttachment: FieldSchema = {
type: FieldType.ATTACHMENT_SINGLE,
name: "single attachment",
constraints: { presence: false },
}
const attachmentList: AttachmentFieldMetadata = {
type: FieldType.ATTACHMENTS,
name: "attachments",
constraints: { type: "array", presence: false },
}
const signature: FieldSchema = {
type: FieldType.SIGNATURE_SINGLE,
name: "signature",
constraints: { presence: false },
}
const bool: FieldSchema = {
type: FieldType.BOOLEAN,
name: "boolean",
constraints: { type: "boolean", presence: false },
}
const number: NumberFieldMetadata = {
type: FieldType.NUMBER,
name: "str",
constraints: { type: "number", presence: false },
}
const datetime: DateFieldMetadata = {
type: FieldType.DATETIME,
name: "datetime",
constraints: {
type: "string",
presence: false,
datetime: { earliest: "", latest: "" },
},
}
const arrayField: FieldSchema = {
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
presence: false,
inclusion: ["One", "Two", "Three"],
},
name: "Sample Tags",
sortable: false,
}
const optsField: FieldSchema = {
name: "Sample Opts",
type: FieldType.OPTIONS,
constraints: {
type: "string",
presence: false,
inclusion: ["Alpha", "Beta", "Gamma"],
},
}
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: str,
stringUndefined: str,
stringNull: str,
stringString: str,
numberEmptyString: number,
numberNull: number,
numberUndefined: number,
numberString: number,
numberNumber: number,
datetimeEmptyString: datetime,
datetimeNull: datetime,
datetimeUndefined: datetime,
datetimeString: datetime,
datetimeDate: datetime,
boolNull: bool,
boolEmpty: bool,
boolUndefined: bool,
boolString: bool,
boolBool: bool,
singleAttachmentNull: singleAttachment,
singleAttachmentUndefined: singleAttachment,
attachmentListNull: attachmentList,
attachmentListUndefined: attachmentList,
attachmentListEmpty: attachmentList,
attachmentListEmptyArrayStr: attachmentList,
signatureNull: signature,
signatureUndefined: signature,
arrayFieldEmptyArrayStr: arrayField,
arrayFieldArrayStrKnown: arrayField,
arrayFieldNull: arrayField,
arrayFieldUndefined: arrayField,
optsFieldEmptyStr: optsField,
optsFieldUndefined: optsField,
optsFieldNull: optsField,
optsFieldStrKnown: optsField,
},
})
)
const datetimeStr = "1984-04-20T00:00:00.000Z"
const row = await config.api.row.save(table._id!, {
name: "Test Row",
stringUndefined: undefined,
stringNull: null,
stringString: "i am a string",
numberEmptyString: "",
numberNull: null,
numberUndefined: undefined,
numberString: "123",
numberNumber: 123,
datetimeEmptyString: "",
datetimeNull: null,
datetimeUndefined: undefined,
datetimeString: datetimeStr,
datetimeDate: new Date(datetimeStr),
boolNull: null,
boolEmpty: "",
boolUndefined: undefined,
boolString: "true",
boolBool: true,
tableId: table._id,
singleAttachmentNull: null,
singleAttachmentUndefined: undefined,
attachmentListNull: null,
attachmentListUndefined: undefined,
attachmentListEmpty: "",
attachmentListEmptyArrayStr: "[]",
signatureNull: null,
signatureUndefined: undefined,
arrayFieldEmptyArrayStr: "[]",
arrayFieldUndefined: undefined,
arrayFieldNull: null,
arrayFieldArrayStrKnown: "['One']",
optsFieldEmptyStr: "",
optsFieldUndefined: undefined,
optsFieldNull: null,
optsFieldStrKnown: "Alpha",
})
expect(row.stringUndefined).toBe(undefined)
expect(row.stringNull).toBe(null)
expect(row.stringString).toBe("i am a string")
expect(row.numberEmptyString).toBe(null)
expect(row.numberNull).toBe(null)
expect(row.numberUndefined).toBe(undefined)
expect(row.numberString).toBe(123)
expect(row.numberNumber).toBe(123)
expect(row.datetimeEmptyString).toBe(null)
expect(row.datetimeNull).toBe(null)
expect(row.datetimeUndefined).toBe(undefined)
expect(row.datetimeString).toBe(new Date(datetimeStr).toISOString())
expect(row.datetimeDate).toBe(new Date(datetimeStr).toISOString())
expect(row.boolNull).toBe(null)
expect(row.boolEmpty).toBe(null)
expect(row.boolUndefined).toBe(undefined)
expect(row.boolString).toBe(true)
expect(row.boolBool).toBe(true)
expect(row.singleAttachmentNull).toEqual(null)
expect(row.singleAttachmentUndefined).toBe(undefined)
expect(row.attachmentListNull).toEqual([])
expect(row.attachmentListUndefined).toBe(undefined)
expect(row.attachmentListEmpty).toEqual([])
expect(row.attachmentListEmptyArrayStr).toEqual([])
expect(row.signatureNull).toEqual(null)
expect(row.signatureUndefined).toBe(undefined)
expect(row.arrayFieldEmptyArrayStr).toEqual([])
expect(row.arrayFieldNull).toEqual([])
expect(row.arrayFieldUndefined).toEqual(undefined)
expect(row.optsFieldEmptyStr).toEqual(null)
expect(row.optsFieldUndefined).toEqual(undefined)
expect(row.optsFieldNull).toEqual(null)
expect(row.arrayFieldArrayStrKnown).toEqual(["One"])
expect(row.optsFieldStrKnown).toEqual("Alpha")
})
isInternal &&
it("doesn't allow creating in user table", async () => {
const response = await config.api.row.save(
@ -1023,7 +829,6 @@ describe.each([
})
})
!isLucene &&
describe("relations to same table", () => {
let relatedRows: Row[]
@ -1224,7 +1029,6 @@ describe.each([
expect(rows).toHaveLength(1)
})
!isLucene &&
describe("relations to same table", () => {
let relatedRows: Row[]
@ -1628,7 +1432,6 @@ describe.each([
expect(res.length).toEqual(2)
})
!isLucene &&
describe("relations to same table", () => {
let relatedRows: Row[]
@ -3422,7 +3225,7 @@ describe.each([
)
})
isSqs &&
isInternal &&
describe("AI fields", () => {
let table: Table

View File

@ -8,7 +8,6 @@ import {
context,
db as dbCore,
docIds,
features,
MAX_VALID_DATE,
MIN_VALID_DATE,
SQLITE_DESIGN_DOC_ID,
@ -64,7 +63,6 @@ jest.mock("@budibase/pro", () => ({
describe.each([
["in-memory", undefined],
["lucene", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -72,15 +70,12 @@ describe.each([
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("search (%s)", (name, dsProvider) => {
const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInMemory = name === "in-memory"
const isInternal = isSqs || isLucene || isInMemory
const isInternal = !dsProvider
const isOracle = name === DatabaseName.ORACLE
const isSql = !isInMemory && !isLucene
const isSql = !isInMemory
const config = setup.getConfig()
let envCleanup: (() => void) | undefined
let datasource: Datasource | undefined
let client: Knex | undefined
let tableOrViewId: string
@ -111,12 +106,7 @@ describe.each([
}
beforeAll(async () => {
await features.testutils.withFeatureFlags("*", { SQS: true }, () =>
config.init()
)
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: isSqs,
})
await config.init()
if (config.app?.appId) {
config.app = await config.api.application.update(config.app?.appId, {
@ -140,9 +130,6 @@ describe.each([
afterAll(async () => {
setup.afterAll()
if (envCleanup) {
envCleanup()
}
})
async function createTable(schema?: TableSchema) {
@ -221,11 +208,6 @@ describe.each([
])("from %s", (sourceType, createTableOrView) => {
const isView = sourceType === "view"
if (isView && isLucene) {
// Some tests don't have the expected result in views via lucene, and given that it is getting deprecated, we exclude them from the tests
return
}
class SearchAssertion {
constructor(private readonly query: SearchRowRequest) {}
@ -598,7 +580,6 @@ describe.each([
])
})
!isLucene &&
it("should return all rows matching the session user firstname when logical operator used", async () => {
await expectQuery({
$and: {
@ -1034,7 +1015,6 @@ describe.each([
}).toFindNothing()
})
!isLucene &&
it("ignores low if it's an empty object", async () => {
await expectQuery({
// @ts-ignore
@ -1042,7 +1022,6 @@ describe.each([
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
})
!isLucene &&
it("ignores high if it's an empty object", async () => {
await expectQuery({
// @ts-ignore
@ -1202,10 +1181,6 @@ describe.each([
await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
})
// I couldn't find a way to make this work in Lucene and given that
// we're getting rid of Lucene soon I wasn't inclined to spend time on
// it.
!isLucene &&
it("can convert from a string", async () => {
await expectQuery({
oneOf: {
@ -1215,10 +1190,6 @@ describe.each([
}).toContainExactly([{ age: 1 }])
})
// I couldn't find a way to make this work in Lucene and given that
// we're getting rid of Lucene soon I wasn't inclined to spend time on
// it.
!isLucene &&
it("can find multiple values for same column", async () => {
await expectQuery({
oneOf: {
@ -1648,7 +1619,8 @@ describe.each([
})
})
isSqs &&
isInternal &&
!isInMemory &&
describe("AI Column", () => {
const UNEXISTING_AI_COLUMN = "Real LLM Response"
@ -1879,10 +1851,6 @@ describe.each([
})
})
// Range searches against bigints don't seem to work at all in Lucene, and I
// couldn't figure out why. Given that we're replacing Lucene with SQS,
// we've decided not to spend time on it.
!isLucene &&
describe("range", () => {
it("successfully finds a row", async () => {
await expectQuery({
@ -2016,14 +1984,12 @@ describe.each([
}).toFindNothing()
})
isSqs &&
it("can search using just a low value", async () => {
await expectQuery({
range: { auto: { low: 9 } },
}).toContainExactly([{ auto: 9 }, { auto: 10 }])
})
isSqs &&
it("can search using just a high value", async () => {
await expectQuery({
range: { auto: { high: 2 } },
@ -2031,13 +1997,13 @@ describe.each([
})
})
isSqs &&
describe("sort", () => {
it("sorts ascending", async () => {
await expectSearch({
query: {},
sort: "auto",
sortOrder: SortOrder.ASCENDING,
sortType: SortType.NUMBER,
}).toMatchExactly([
{ auto: 1 },
{ auto: 2 },
@ -2057,6 +2023,7 @@ describe.each([
query: {},
sort: "auto",
sortOrder: SortOrder.DESCENDING,
sortType: SortType.NUMBER,
}).toMatchExactly([
{ auto: 10 },
{ auto: 9 },
@ -2392,8 +2359,6 @@ describe.each([
})
})
// This will never work for Lucene.
!isLucene &&
// It also can't work for in-memory searching because the related table name
// isn't available.
!isInMemory &&
@ -2847,8 +2812,6 @@ describe.each([
})
})
// lucene can't count the total rows
!isLucene &&
describe("row counting", () => {
beforeAll(async () => {
tableOrViewId = await createTableOrView({
@ -3065,9 +3028,7 @@ describe.each([
})
})
// This was never actually supported in Lucene but SQS does support it, so may
// as well have a test for it.
;(isSqs || isInMemory) &&
isInternal &&
describe("space at start of column name", () => {
beforeAll(async () => {
tableOrViewId = await createTableOrView({
@ -3100,7 +3061,7 @@ describe.each([
})
})
isSqs &&
isInternal &&
!isView &&
describe("duplicate columns", () => {
beforeAll(async () => {
@ -3262,7 +3223,6 @@ describe.each([
})
})
!isLucene &&
describe("$and", () => {
beforeAll(async () => {
tableOrViewId = await createTableOrView({
@ -3340,10 +3300,7 @@ describe.each([
await expect(
expectQuery({
$and: {
conditions: [
{ equal: { age: 10 } },
"invalidCondition" as any,
],
conditions: [{ equal: { age: 10 } }, "invalidCondition" as any],
},
}).toFindNothing()
).rejects.toThrow(
@ -3396,7 +3353,6 @@ describe.each([
})
})
!isLucene &&
describe("$or", () => {
beforeAll(async () => {
tableOrViewId = await createTableOrView({
@ -3590,8 +3546,7 @@ describe.each([
})
})
isSql &&
!isSqs &&
!isInternal &&
describe("SQL injection", () => {
const badStrings = [
"1; DROP TABLE %table_name%;",

View File

@ -2,7 +2,6 @@ import * as setup from "./utilities"
import path from "path"
import nock from "nock"
import { generator } from "@budibase/backend-core/tests"
import { features } from "@budibase/backend-core"
interface App {
background: string
@ -82,13 +81,7 @@ describe("/templates", () => {
})
describe("create app from template", () => {
it.each(["sqs", "lucene"])(
`should be able to create an app from a template (%s)`,
async source => {
await features.testutils.withFeatureFlags(
"*",
{ SQS: source === "sqs" },
async () => {
it("should be able to create an app from a template", async () => {
const name = generator.guid().replaceAll("-", "")
const url = `/${name}`
@ -111,19 +104,13 @@ describe("/templates", () => {
expect(agencyProjects.name).toBe("Agency Projects")
expect(users.name).toBe("Users")
const { rows } = await config.api.row.search(
agencyProjects._id!,
{
const { rows } = await config.api.row.search(agencyProjects._id!, {
tableId: agencyProjects._id!,
query: {},
}
)
})
expect(rows).toHaveLength(3)
})
}
)
}
)
})
})
})

View File

@ -43,7 +43,6 @@ import { quotas } from "@budibase/pro"
import { db, roles, features, context } from "@budibase/backend-core"
describe.each([
["lucene", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -52,14 +51,11 @@ describe.each([
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/v2/views (%s)", (name, dsProvider) => {
const config = setup.getConfig()
const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInternal = isSqs || isLucene
const isInternal = name === "sqs"
let table: Table
let rawDatasource: Datasource | undefined
let datasource: Datasource | undefined
let envCleanup: (() => void) | undefined
function saveTableRequest(
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
@ -106,13 +102,7 @@ describe.each([
}
beforeAll(async () => {
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () =>
config.init()
)
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: isSqs,
})
await config.init()
if (dsProvider) {
rawDatasource = await dsProvider
@ -125,9 +115,6 @@ describe.each([
afterAll(async () => {
setup.afterAll()
if (envCleanup) {
envCleanup()
}
})
beforeEach(() => {
@ -855,7 +842,6 @@ describe.each([
})
})
!isLucene &&
it("does not get confused when a calculation field shadows a basic one", async () => {
const table = await config.api.table.save(
saveTableRequest({
@ -1453,7 +1439,6 @@ describe.each([
)
})
!isLucene &&
describe("calculation views", () => {
let table: Table
let view: ViewV2
@ -2293,7 +2278,6 @@ describe.each([
})
})
!isLucene &&
describe("calculation views", () => {
it("should not remove calculation columns when modifying table schema", async () => {
let table = await config.api.table.save(
@ -2721,13 +2705,10 @@ describe.each([
})
})
!isLucene &&
describe("search", () => {
it("returns empty rows from view when no schema is passed", async () => {
const rows = await Promise.all(
Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, {})
)
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
)
const response = await config.api.viewV2.search(view.id)
expect(response.rows).toHaveLength(10)
@ -2864,9 +2845,7 @@ describe.each([
it("respects the limit parameter", async () => {
await Promise.all(
Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, {})
)
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
)
const limit = generator.integer({ min: 1, max: 8 })
const response = await config.api.viewV2.search(view.id, {
@ -2878,9 +2857,7 @@ describe.each([
it("can handle pagination", async () => {
await Promise.all(
Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, {})
)
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
)
const rows = (await config.api.viewV2.search(view.id)).rows
@ -2923,9 +2900,6 @@ describe.each([
hasNextPage: false,
totalRows: 10,
}
if (isLucene) {
expectation.bookmark = expect.anything()
}
expect(page3).toEqual(expectation)
})
@ -3148,9 +3122,7 @@ describe.each([
})
expect(response.rows).toHaveLength(1)
expect(response.rows).toEqual(
expect.arrayContaining([
expect.objectContaining({ _id: three._id }),
])
expect.arrayContaining([expect.objectContaining({ _id: three._id })])
)
})
@ -3211,7 +3183,6 @@ describe.each([
)
})
!isLucene &&
it.each([true, false])(
"can filter a view without a view filter",
async allOr => {
@ -3249,7 +3220,6 @@ describe.each([
}
)
!isLucene &&
it.each([true, false])("cannot bypass a view filter", async allOr => {
await config.api.row.save(table._id!, {
one: "foo",
@ -3455,7 +3425,6 @@ describe.each([
})
})
!isLucene &&
describe("calculations", () => {
let table: Table
let rows: Row[]
@ -3508,10 +3477,7 @@ describe.each([
expect(response.rows).toEqual(
expect.arrayContaining([
expect.objectContaining({
"Quantity Sum": rows.reduce(
(acc, r) => acc + r.quantity,
0
),
"Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
}),
])
)
@ -3552,9 +3518,7 @@ describe.each([
}
for (const row of response.rows) {
expect(row["Total Price"]).toEqual(
priceByQuantity[row.quantity]
)
expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity])
}
})
@ -3582,10 +3546,7 @@ describe.each([
query: {},
})
function calculate(
type: CalculationType,
numbers: number[]
): number {
function calculate(type: CalculationType, numbers: number[]): number {
switch (type) {
case CalculationType.COUNT:
return numbers.length
@ -3744,12 +3705,9 @@ describe.each([
},
})
const apertureScience = await config.api.row.save(
companies._id!,
{
const apertureScience = await config.api.row.save(companies._id!, {
name: "Aperture Science Laboratories",
}
)
})
const blackMesa = await config.api.row.save(companies._id!, {
name: "Black Mesa",
@ -4075,7 +4033,6 @@ describe.each([
})
})
!isLucene &&
it("should not need required fields to be present", async () => {
const table = await config.api.table.save(
saveTableRequest({
@ -4464,9 +4421,7 @@ describe.each([
}),
expected: () => [
{
users: [
expect.objectContaining({ _id: config.getUser()._id }),
],
users: [expect.objectContaining({ _id: config.getUser()._id })],
},
],
},
@ -4603,9 +4558,7 @@ describe.each([
query: {},
...searchOpts,
})
expect(rows).toEqual(
expected.map(r => expect.objectContaining(r))
)
expect(rows).toEqual(expected.map(r => expect.objectContaining(r)))
}
)
})

View File

@ -1,10 +1,6 @@
import * as setup from "../../../api/routes/tests/utilities"
import { basicTable } from "../../../tests/utilities/structures"
import {
db as dbCore,
features,
SQLITE_DESIGN_DOC_ID,
} from "@budibase/backend-core"
import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
import {
LinkDocument,
DocumentType,
@ -70,17 +66,8 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
}
}
async function sqsDisabled(cb: () => Promise<void>) {
await features.testutils.withFeatureFlags("*", { SQS: false }, cb)
}
async function sqsEnabled(cb: () => Promise<void>) {
await features.testutils.withFeatureFlags("*", { SQS: true }, cb)
}
describe("SQS migration", () => {
beforeAll(async () => {
await sqsDisabled(async () => {
await config.init()
const table = await config.api.table.save(basicTable())
tableId = table._id!
@ -88,7 +75,6 @@ describe("SQS migration", () => {
// old link document
await db.put(oldLinkDocument())
})
})
beforeEach(async () => {
await config.doInTenant(async () => {
@ -101,19 +87,11 @@ describe("SQS migration", () => {
it("test migration runs as expected against an older DB", async () => {
const db = dbCore.getDB(config.appId!)
// confirm nothing exists initially
await sqsDisabled(async () => {
let error: any | undefined
try {
await db.get(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
error = err
}
expect(error).toBeDefined()
expect(error.status).toBe(404)
})
await sqsEnabled(async () => {
// remove sqlite design doc to simulate it comes from an older installation
const doc = await db.get(SQLITE_DESIGN_DOC_ID)
await db.remove({ _id: doc._id, _rev: doc._rev })
await processMigrations(config.appId!, MIGRATIONS)
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
expect(designDoc.sql.tables).toBeDefined()
@ -130,9 +108,7 @@ describe("SQS migration", () => {
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
expect(linkDoc.tableId).toEqual(
generateJunctionTableID(tableId1, tableId2)
)
expect(linkDoc.tableId).toEqual(generateJunctionTableID(tableId1, tableId2))
// should have swapped the documents
expect(linkDoc.doc1.tableId).toEqual(tableId2)
expect(linkDoc.doc1.rowId).toEqual(rowId2)
@ -140,4 +116,3 @@ describe("SQS migration", () => {
expect(linkDoc.doc2.rowId).toEqual(rowId1)
})
})
})

View File

@ -1,11 +1,8 @@
import {
EmptyFilterOption,
FeatureFlag,
LegacyFilter,
LogicalOperator,
Row,
RowSearchParams,
SearchFilterKey,
SearchFilters,
SearchResponse,
SortOrder,
@ -19,7 +16,6 @@ import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../index"
import { checkFilters, searchInputMapping } from "./search/utils"
import { db, features } from "@budibase/backend-core"
import tracer from "dd-trace"
import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
import { enrichSearchContext } from "../../../api/controllers/row/utils"
@ -104,35 +100,6 @@ export async function search(
}
viewQuery = checkFilters(table, viewQuery)
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
const supportsLogicalOperators =
isExternalTableID(view.tableId) || sqsEnabled
if (!supportsLogicalOperators) {
// In the unlikely event that a Grouped Filter is in a non-SQS environment
// It needs to be ignored entirely
let queryFilters: LegacyFilter[] = Array.isArray(view.query)
? view.query
: []
const { filters } = dataFilters.splitFiltersArray(queryFilters)
// Extract existing fields
const existingFields = filters.map(filter =>
db.removeKeyNumbering(filter.field)
)
// Carry over filters for unused fields
Object.keys(options.query).forEach(key => {
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
Object.keys(options.query[operator] || {}).forEach(field => {
if (!existingFields.includes(db.removeKeyNumbering(field))) {
viewQuery[operator]![field] = options.query[operator]![field]
}
})
})
options.query = viewQuery
} else {
const conditions = viewQuery ? [viewQuery] : []
options.query = {
$and: {
@ -143,7 +110,6 @@ export async function search(
options.query.onEmptyFilter = viewQuery.onEmptyFilter
}
}
}
options.query = dataFilters.cleanupQuery(options.query)
options.query = dataFilters.fixupFilterArrays(options.query)
@ -170,12 +136,9 @@ export async function search(
if (isExternalTable) {
span?.addTags({ searchType: "external" })
result = await external.search(options, source)
} else if (await features.flags.isEnabled(FeatureFlag.SQS)) {
} else {
span?.addTags({ searchType: "sqs" })
result = await internal.sqs.search(options, source)
} else {
span?.addTags({ searchType: "lucene" })
result = await internal.lucene.search(options, source)
}
span.addTags({

View File

@ -1,3 +1,2 @@
export * as sqs from "./sqs"
export * as lucene from "./lucene"
export * from "./internal"

View File

@ -1,79 +0,0 @@
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
import { fullSearch, paginatedSearch } from "../utils"
import { InternalTables } from "../../../../../db/utils"
import {
Row,
RowSearchParams,
SearchResponse,
SortType,
Table,
User,
ViewV2,
} from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick"
import sdk from "../../../../"
export async function search(
options: RowSearchParams,
source: Table | ViewV2
): Promise<SearchResponse<Row>> {
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const { paginate, query } = options
const params: RowSearchParams = {
tableId: options.tableId,
viewId: options.viewId,
sort: options.sort,
sortOrder: options.sortOrder,
sortType: options.sortType,
limit: options.limit,
bookmark: options.bookmark,
version: options.version,
disableEscaping: options.disableEscaping,
query: {},
}
if (params.sort && !params.sortType) {
const schema = table.schema
const sortField = schema[params.sort]
params.sortType =
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
}
let response
if (paginate) {
response = await paginatedSearch(query, params)
} else {
response = await fullSearch(query, params)
}
// Enrich search results with relationships
if (response.rows && response.rows.length) {
// enrich with global users if from users table
if (table._id === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
}
const visibleFields =
options.fields ||
Object.keys(source.schema || {}).filter(
key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
response.rows = await outputProcessing(source, response.rows, {
squash: true,
})
}
return response
}

View File

@ -10,7 +10,7 @@ import {
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { search } from "../../../../../sdk/app/rows/search"
import { generator } from "@budibase/backend-core/tests"
import { features } from "@budibase/backend-core"
import {
DatabaseName,
getDatasource,
@ -21,30 +21,20 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures"
// (e.g. limiting searches to returning specific fields). If it's possible to
// test through the API, it should be done there instead.
describe.each([
["lucene", undefined],
["sqs", undefined],
["internal", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("search sdk (%s)", (name, dsProvider) => {
const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInternal = isLucene || isSqs
const isInternal = name === "internal"
const config = new TestConfiguration()
let envCleanup: (() => void) | undefined
let datasource: Datasource | undefined
let table: Table
beforeAll(async () => {
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () =>
config.init()
)
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: isSqs,
})
await config.init()
if (dsProvider) {
datasource = await config.createDatasource({
@ -105,9 +95,6 @@ describe.each([
afterAll(async () => {
config.end()
if (envCleanup) {
envCleanup()
}
})
it("querying by fields will always return data attribute columns", async () => {
@ -211,7 +198,6 @@ describe.each([
})
})
!isLucene &&
it.each([
[["id", "name", "age"], 3],
[["name", "age"], 10],

View File

@ -1,4 +1,4 @@
import { context, features } from "@budibase/backend-core"
import { context } from "@budibase/backend-core"
import { getTableParams } from "../../../db/utils"
import {
breakExternalTableId,
@ -12,7 +12,6 @@ import {
TableResponse,
TableSourceType,
TableViewsResponse,
FeatureFlag,
} from "@budibase/types"
import datasources from "../datasources"
import sdk from "../../../sdk"
@ -49,10 +48,7 @@ export async function processTable(table: Table): Promise<Table> {
type: "table",
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
}
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
if (sqsEnabled) {
processed.sql = true
sql: true,
}
return processed
}

View File

@ -3,7 +3,6 @@ import { fixAutoColumnSubType, processFormulas } from "./utils"
import {
cache,
context,
features,
HTTPError,
objectStore,
utils,
@ -19,7 +18,6 @@ import {
Table,
User,
ViewV2,
FeatureFlag,
} from "@budibase/types"
import { cloneDeep } from "lodash/fp"
import {
@ -423,7 +421,6 @@ export async function coreOutputProcessing(
// remove null properties to match internal API
const isExternal = isExternalTableID(table._id!)
if (isExternal || (await features.flags.isEnabled(FeatureFlag.SQS))) {
for (const row of rows) {
for (const key of Object.keys(row)) {
if (row[key] === null) {
@ -465,7 +462,6 @@ export async function coreOutputProcessing(
}
}
}
}
if (!isUserMetadataTable(table._id!)) {
const protectedColumns = isExternal

View File

@ -8,7 +8,7 @@ import {
} from "@budibase/types"
import { outputProcessing } from ".."
import { generator, structures } from "@budibase/backend-core/tests"
import { features } from "@budibase/backend-core"
import * as bbReferenceProcessor from "../bbReferenceProcessor"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
@ -21,7 +21,6 @@ jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
describe("rowProcessor - outputProcessing", () => {
const config = new TestConfiguration()
let cleanupFlags: () => void = () => {}
beforeAll(async () => {
await config.init()
@ -33,11 +32,6 @@ describe("rowProcessor - outputProcessing", () => {
beforeEach(() => {
jest.resetAllMocks()
cleanupFlags = features.testutils.setFeatureFlags("*", { SQS: true })
})
afterEach(() => {
cleanupFlags()
})
const processOutputBBReferenceMock =

View File

@ -527,7 +527,12 @@ export function search<T extends Record<string, any>>(
): SearchResponse<T> {
let result = runQuery(docs, query.query)
if (query.sort) {
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
result = sort(
result,
query.sort,
query.sortOrder || SortOrder.ASCENDING,
query.sortType
)
}
const totalRows = result.length
if (query.limit) {

View File

@ -12,7 +12,6 @@ import type PouchDB from "pouchdb-find"
export enum SearchIndex {
ROWS = "rows",
AUDIT = "audit",
USER = "user",
}

View File

@ -2,7 +2,6 @@ export enum FeatureFlag {
PER_CREATOR_PER_USER_PRICE = "PER_CREATOR_PER_USER_PRICE",
PER_CREATOR_PER_USER_PRICE_ALERT = "PER_CREATOR_PER_USER_PRICE_ALERT",
AUTOMATION_BRANCHING = "AUTOMATION_BRANCHING",
SQS = "SQS",
AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS",
DEFAULT_VALUES = "DEFAULT_VALUES",
ENRICHED_RELATIONSHIPS = "ENRICHED_RELATIONSHIPS",

View File

@ -1,6 +1,6 @@
import { Ctx, MaintenanceType, FeatureFlag } from "@budibase/types"
import { Ctx, MaintenanceType } from "@budibase/types"
import env from "../../../environment"
import { env as coreEnv, db as dbCore, features } from "@budibase/backend-core"
import { env as coreEnv, db as dbCore } from "@budibase/backend-core"
import nodeFetch from "node-fetch"
import { helpers } from "@budibase/shared-core"
@ -35,10 +35,7 @@ async function isSqsAvailable() {
}
async function isSqsMissing() {
return (
(await features.flags.isEnabled(FeatureFlag.SQS)) &&
!(await isSqsAvailable())
)
return !(await isSqsAvailable())
}
export const fetch = async (ctx: Ctx) => {

View File

@ -1,5 +1,5 @@
import { mocks, structures } from "@budibase/backend-core/tests"
import { context, events, features } from "@budibase/backend-core"
import { context, events } from "@budibase/backend-core"
import { Event, IdentityType } from "@budibase/types"
import { TestConfiguration } from "../../../../tests"
@ -12,19 +12,14 @@ const BASE_IDENTITY = {
const USER_AUDIT_LOG_COUNT = 3
const APP_ID = "app_1"
describe.each(["lucene", "sql"])("/api/global/auditlogs (%s)", method => {
describe("/api/global/auditlogs (%s)", () => {
const config = new TestConfiguration()
let envCleanup: (() => void) | undefined
beforeAll(async () => {
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: method === "sql",
})
await config.beforeAll()
})
afterAll(async () => {
envCleanup?.()
await config.afterAll()
})