Merge branch 'master' into create-secret-key-once

This commit is contained in:
Sam Rose 2024-11-18 11:10:53 +00:00 committed by GitHub
commit 0a65d7245c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
57 changed files with 3313 additions and 3631 deletions

View File

@ -50,19 +50,6 @@ http {
ignore_invalid_headers off; ignore_invalid_headers off;
proxy_buffering off; proxy_buffering off;
set $csp_default "default-src 'self'";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com";
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
set $csp_object "object-src 'none'";
set $csp_base_uri "base-uri 'self'";
set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com https://us.i.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
set $csp_frame "frame-src 'self' https:";
set $csp_img "img-src http: https: data: blob:";
set $csp_manifest "manifest-src 'self'";
set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live";
set $csp_worker "worker-src blob:";
error_page 502 503 504 /error.html; error_page 502 503 504 /error.html;
location = /error.html { location = /error.html {
root /usr/share/nginx/html; root /usr/share/nginx/html;
@ -73,7 +60,6 @@ http {
add_header X-Frame-Options SAMEORIGIN always; add_header X-Frame-Options SAMEORIGIN always;
add_header X-Content-Type-Options nosniff always; add_header X-Content-Type-Options nosniff always;
add_header X-XSS-Protection "1; mode=block" always; add_header X-XSS-Protection "1; mode=block" always;
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always; add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
# upstreams # upstreams
@ -120,6 +106,12 @@ http {
location ~ ^/api/(system|admin|global)/ { location ~ ^/api/(system|admin|global)/ {
proxy_set_header Host $host; proxy_set_header Host $host;
# Enable buffering for potentially large OIDC configs
proxy_buffering on;
proxy_buffer_size 16k;
proxy_buffers 4 32k;
proxy_pass $worker; proxy_pass $worker;
} }

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.0", "version": "3.2.3",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -1,7 +1,12 @@
import tk from "timekeeper" import tk from "timekeeper"
import _ from "lodash" import _ from "lodash"
import { DBTestConfiguration, generator, structures } from "../../../tests" import {
DBTestConfiguration,
generator,
structures,
utils,
} from "../../../tests"
import { getDB } from "../../db" import { getDB } from "../../db"
import { import {
@ -10,15 +15,14 @@ import {
init, init,
} from "../docWritethrough" } from "../docWritethrough"
import InMemoryQueue from "../../queue/inMemoryQueue"
const initialTime = Date.now() const initialTime = Date.now()
async function waitForQueueCompletion() { async function waitForQueueCompletion() {
const queue: InMemoryQueue = DocWritethroughProcessor.queue as never await utils.queue.processMessages(DocWritethroughProcessor.queue)
await queue.waitForCompletion()
} }
beforeAll(() => utils.queue.useRealQueues())
describe("docWritethrough", () => { describe("docWritethrough", () => {
beforeAll(() => { beforeAll(() => {
init() init()
@ -67,7 +71,7 @@ describe("docWritethrough", () => {
const patch3 = generatePatchObject(3) const patch3 = generatePatchObject(3)
await docWritethrough.patch(patch3) await docWritethrough.patch(patch3)
expect(await db.get(documentId)).toEqual({ expect(await db.tryGet(documentId)).toEqual({
_id: documentId, _id: documentId,
...patch1, ...patch1,
...patch2, ...patch2,
@ -92,7 +96,7 @@ describe("docWritethrough", () => {
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({ expect.objectContaining({
_id: documentId, _id: documentId,
...patch1, ...patch1,
@ -117,7 +121,7 @@ describe("docWritethrough", () => {
await waitForQueueCompletion() await waitForQueueCompletion()
expect(date1).not.toEqual(date2) expect(date1).not.toEqual(date2)
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({ expect.objectContaining({
createdAt: date1.toISOString(), createdAt: date1.toISOString(),
updatedAt: date2.toISOString(), updatedAt: date2.toISOString(),
@ -135,7 +139,7 @@ describe("docWritethrough", () => {
await docWritethrough.patch(patch2) await docWritethrough.patch(patch2)
const keyToOverride = _.sample(Object.keys(patch1))! const keyToOverride = _.sample(Object.keys(patch1))!
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({ expect.objectContaining({
[keyToOverride]: patch1[keyToOverride], [keyToOverride]: patch1[keyToOverride],
}) })
@ -150,7 +154,7 @@ describe("docWritethrough", () => {
await docWritethrough.patch(patch3) await docWritethrough.patch(patch3)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({ expect.objectContaining({
...patch1, ...patch1,
...patch2, ...patch2,
@ -180,14 +184,14 @@ describe("docWritethrough", () => {
await secondDocWritethrough.patch(doc2Patch2) await secondDocWritethrough.patch(doc2Patch2)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(docWritethrough.docId)).toEqual( expect(await db.tryGet(docWritethrough.docId)).toEqual(
expect.objectContaining({ expect.objectContaining({
...doc1Patch, ...doc1Patch,
...doc1Patch2, ...doc1Patch2,
}) })
) )
expect(await db.get(secondDocWritethrough.docId)).toEqual( expect(await db.tryGet(secondDocWritethrough.docId)).toEqual(
expect.objectContaining({ expect.objectContaining({
...doc2Patch, ...doc2Patch,
...doc2Patch2, ...doc2Patch2,
@ -203,7 +207,7 @@ describe("docWritethrough", () => {
await docWritethrough.patch(initialPatch) await docWritethrough.patch(initialPatch)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(initialPatch) expect.objectContaining(initialPatch)
) )
@ -214,10 +218,10 @@ describe("docWritethrough", () => {
await docWritethrough.patch(extraPatch) await docWritethrough.patch(extraPatch)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(extraPatch) expect.objectContaining(extraPatch)
) )
expect(await db.get(documentId)).not.toEqual( expect(await db.tryGet(documentId)).not.toEqual(
expect.objectContaining(initialPatch) expect.objectContaining(initialPatch)
) )
}) })
@ -242,7 +246,7 @@ describe("docWritethrough", () => {
expect(queueMessageSpy).toHaveBeenCalledTimes(5) expect(queueMessageSpy).toHaveBeenCalledTimes(5)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(patches) expect.objectContaining(patches)
) )
@ -250,7 +254,7 @@ describe("docWritethrough", () => {
expect(queueMessageSpy).toHaveBeenCalledTimes(45) expect(queueMessageSpy).toHaveBeenCalledTimes(45)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(patches) expect.objectContaining(patches)
) )
@ -258,20 +262,18 @@ describe("docWritethrough", () => {
expect(queueMessageSpy).toHaveBeenCalledTimes(55) expect(queueMessageSpy).toHaveBeenCalledTimes(55)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(patches) expect.objectContaining(patches)
) )
}) })
}) })
// This is not yet supported it("patches will execute in order", async () => {
// eslint-disable-next-line jest/no-disabled-tests
it.skip("patches will execute in order", async () => {
let incrementalValue = 0 let incrementalValue = 0
const keyToOverride = generator.word() const keyToOverride = generator.word()
async function incrementalPatches(count: number) { async function incrementalPatches(count: number) {
for (let i = 0; i < count; i++) { for (let i = 0; i < count; i++) {
await docWritethrough.patch({ [keyToOverride]: incrementalValue++ }) await docWritethrough.patch({ [keyToOverride]: ++incrementalValue })
} }
} }
@ -279,13 +281,13 @@ describe("docWritethrough", () => {
await incrementalPatches(5) await incrementalPatches(5)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({ [keyToOverride]: 5 }) expect.objectContaining({ [keyToOverride]: 5 })
) )
await incrementalPatches(40) await incrementalPatches(40)
await waitForQueueCompletion() await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual( expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({ [keyToOverride]: 45 }) expect.objectContaining({ [keyToOverride]: 45 })
) )
}) })

View File

@ -10,7 +10,6 @@ import {
DatabaseQueryOpts, DatabaseQueryOpts,
DBError, DBError,
Document, Document,
FeatureFlag,
isDocument, isDocument,
RowResponse, RowResponse,
RowValue, RowValue,
@ -27,7 +26,6 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
import { DDInstrumentedDatabase } from "../instrumentation" import { DDInstrumentedDatabase } from "../instrumentation"
import { checkSlashesInUrl } from "../../helpers" import { checkSlashesInUrl } from "../../helpers"
import { sqlLog } from "../../sql/utils" import { sqlLog } from "../../sql/utils"
import { flags } from "../../features"
const DATABASE_NOT_FOUND = "Database does not exist." const DATABASE_NOT_FOUND = "Database does not exist."
@ -456,10 +454,7 @@ export class DatabaseImpl implements Database {
} }
async destroy() { async destroy() {
if ( if (await this.exists(SQLITE_DESIGN_DOC_ID)) {
(await flags.isEnabled(FeatureFlag.SQS)) &&
(await this.exists(SQLITE_DESIGN_DOC_ID))
) {
// delete the design document, then run the cleanup operation // delete the design document, then run the cleanup operation
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID) const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
// remove all tables - save the definition then trigger a cleanup // remove all tables - save the definition then trigger a cleanup

View File

@ -230,6 +230,10 @@ const environment = {
OPENAI_API_KEY: process.env.OPENAI_API_KEY, OPENAI_API_KEY: process.env.OPENAI_API_KEY,
MIN_VERSION_WITHOUT_POWER_ROLE: MIN_VERSION_WITHOUT_POWER_ROLE:
process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0", process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0",
DISABLE_CONTENT_SECURITY_POLICY: process.env.DISABLE_CONTENT_SECURITY_POLICY,
// stopgap migration strategy until we can ensure backwards compat without unsafe-inline in CSP
DISABLE_CSP_UNSAFE_INLINE_SCRIPTS:
process.env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS,
} }
export function setEnv(newEnvVars: Partial<typeof environment>): () => void { export function setEnv(newEnvVars: Partial<typeof environment>): () => void {

View File

@ -269,8 +269,6 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
export const flags = new FlagSet({ export const flags = new FlagSet({
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true), [FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true), [FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
[FeatureFlag.SQS]: Flag.boolean(true),
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(true),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true), [FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true), [FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
}) })

View File

@ -0,0 +1,118 @@
import crypto from "crypto"
import env from "../environment"
const CSP_DIRECTIVES = {
"default-src": ["'self'"],
"script-src": [
"'self'",
"'unsafe-eval'",
"https://*.budibase.net",
"https://cdn.budi.live",
"https://js.intercomcdn.com",
"https://widget.intercom.io",
"https://d2l5prqdbvm3op.cloudfront.net",
"https://us-assets.i.posthog.com",
],
"style-src": [
"'self'",
"'unsafe-inline'",
"https://cdn.jsdelivr.net",
"https://fonts.googleapis.com",
"https://rsms.me",
"https://maxcdn.bootstrapcdn.com",
],
"object-src": ["'none'"],
"base-uri": ["'self'"],
"connect-src": [
"'self'",
"https://*.budibase.app",
"https://*.budibaseqa.app",
"https://*.budibase.net",
"https://api-iam.intercom.io",
"https://api-ping.intercom.io",
"https://app.posthog.com",
"https://us.i.posthog.com",
"wss://nexus-websocket-a.intercom.io",
"wss://nexus-websocket-b.intercom.io",
"https://nexus-websocket-a.intercom.io",
"https://nexus-websocket-b.intercom.io",
"https://uploads.intercomcdn.com",
"https://uploads.intercomusercontent.com",
"https://*.amazonaws.com",
"https://*.s3.amazonaws.com",
"https://*.s3.us-east-2.amazonaws.com",
"https://*.s3.us-east-1.amazonaws.com",
"https://*.s3.us-west-1.amazonaws.com",
"https://*.s3.us-west-2.amazonaws.com",
"https://*.s3.af-south-1.amazonaws.com",
"https://*.s3.ap-east-1.amazonaws.com",
"https://*.s3.ap-south-1.amazonaws.com",
"https://*.s3.ap-northeast-2.amazonaws.com",
"https://*.s3.ap-southeast-1.amazonaws.com",
"https://*.s3.ap-southeast-2.amazonaws.com",
"https://*.s3.ap-northeast-1.amazonaws.com",
"https://*.s3.ca-central-1.amazonaws.com",
"https://*.s3.cn-north-1.amazonaws.com",
"https://*.s3.cn-northwest-1.amazonaws.com",
"https://*.s3.eu-central-1.amazonaws.com",
"https://*.s3.eu-west-1.amazonaws.com",
"https://*.s3.eu-west-2.amazonaws.com",
"https://*.s3.eu-south-1.amazonaws.com",
"https://*.s3.eu-west-3.amazonaws.com",
"https://*.s3.eu-north-1.amazonaws.com",
"https://*.s3.sa-east-1.amazonaws.com",
"https://*.s3.me-south-1.amazonaws.com",
"https://*.s3.us-gov-east-1.amazonaws.com",
"https://*.s3.us-gov-west-1.amazonaws.com",
"https://api.github.com",
],
"font-src": [
"'self'",
"data:",
"https://cdn.jsdelivr.net",
"https://fonts.gstatic.com",
"https://rsms.me",
"https://maxcdn.bootstrapcdn.com",
"https://js.intercomcdn.com",
"https://fonts.intercomcdn.com",
],
"frame-src": ["'self'", "https:"],
"img-src": ["http:", "https:", "data:", "blob:"],
"manifest-src": ["'self'"],
"media-src": [
"'self'",
"https://js.intercomcdn.com",
"https://cdn.budi.live",
],
"worker-src": ["blob:"],
}
export async function contentSecurityPolicy(ctx: any, next: any) {
try {
const nonce = crypto.randomBytes(16).toString("base64")
const directives = { ...CSP_DIRECTIVES }
directives["script-src"] = [
...CSP_DIRECTIVES["script-src"],
`'nonce-${nonce}'`,
]
if (!env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS) {
directives["script-src"].push("'unsafe-inline'")
}
ctx.state.nonce = nonce
const cspHeader = Object.entries(directives)
.map(([key, sources]) => `${key} ${sources.join(" ")}`)
.join("; ")
ctx.set("Content-Security-Policy", cspHeader)
await next()
} catch (err: any) {
console.error(
`Error occurred in Content-Security-Policy middleware: ${err}`
)
}
}
export default contentSecurityPolicy

View File

@ -19,5 +19,6 @@ export { default as pino } from "../logging/pino/middleware"
export { default as correlation } from "../logging/correlation/middleware" export { default as correlation } from "../logging/correlation/middleware"
export { default as errorHandling } from "./errorHandling" export { default as errorHandling } from "./errorHandling"
export { default as querystringToBody } from "./querystringToBody" export { default as querystringToBody } from "./querystringToBody"
export { default as csp } from "./contentSecurityPolicy"
export * as joiValidator from "./joi-validator" export * as joiValidator from "./joi-validator"
export { default as ip } from "./ip" export { default as ip } from "./ip"

View File

@ -0,0 +1,75 @@
import crypto from "crypto"
import contentSecurityPolicy from "../contentSecurityPolicy"
jest.mock("crypto", () => ({
randomBytes: jest.fn(),
randomUUID: jest.fn(),
}))
describe("contentSecurityPolicy middleware", () => {
let ctx: any
let next: any
const mockNonce = "mocked/nonce"
beforeEach(() => {
ctx = {
state: {},
set: jest.fn(),
}
next = jest.fn()
// @ts-ignore
crypto.randomBytes.mockReturnValue(Buffer.from(mockNonce, "base64"))
})
afterEach(() => {
jest.clearAllMocks()
})
it("should generate a nonce and set it in the script-src directive", async () => {
await contentSecurityPolicy(ctx, next)
expect(ctx.state.nonce).toBe(mockNonce)
expect(ctx.set).toHaveBeenCalledWith(
"Content-Security-Policy",
expect.stringContaining(
`script-src 'self' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com 'nonce-${mockNonce}'`
)
)
expect(next).toHaveBeenCalled()
})
it("should include all CSP directives in the header", async () => {
await contentSecurityPolicy(ctx, next)
const cspHeader = ctx.set.mock.calls[0][1]
expect(cspHeader).toContain("default-src 'self'")
expect(cspHeader).toContain("script-src 'self' 'unsafe-eval'")
expect(cspHeader).toContain("style-src 'self' 'unsafe-inline'")
expect(cspHeader).toContain("object-src 'none'")
expect(cspHeader).toContain("base-uri 'self'")
expect(cspHeader).toContain("connect-src 'self'")
expect(cspHeader).toContain("font-src 'self'")
expect(cspHeader).toContain("frame-src 'self'")
expect(cspHeader).toContain("img-src http: https: data: blob:")
expect(cspHeader).toContain("manifest-src 'self'")
expect(cspHeader).toContain("media-src 'self'")
expect(cspHeader).toContain("worker-src blob:")
})
it("should handle errors and log an error message", async () => {
const consoleSpy = jest.spyOn(console, "error").mockImplementation()
const error = new Error("Test error")
// @ts-ignore
crypto.randomBytes.mockImplementation(() => {
throw error
})
await contentSecurityPolicy(ctx, next)
expect(consoleSpy).toHaveBeenCalledWith(
`Error occurred in Content-Security-Policy middleware: ${error}`
)
expect(next).not.toHaveBeenCalled()
consoleSpy.mockRestore()
})
})

View File

@ -1,5 +1,5 @@
import events from "events" import events from "events"
import { newid, timeout } from "../utils" import { newid } from "../utils"
import { Queue, QueueOptions, JobOptions } from "./queue" import { Queue, QueueOptions, JobOptions } from "./queue"
interface JobMessage { interface JobMessage {
@ -184,16 +184,6 @@ class InMemoryQueue implements Partial<Queue> {
// do nothing // do nothing
return this as any return this as any
} }
async waitForCompletion() {
do {
await timeout(50)
} while (this.hasRunningJobs())
}
hasRunningJobs() {
return this._addCount > this._runCount
}
} }
export default InMemoryQueue export default InMemoryQueue

View File

@ -15,7 +15,7 @@ const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs() const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
// cleanup the queue every 60 seconds // cleanup the queue every 60 seconds
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs() const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = [] let QUEUES: BullQueue.Queue[] = []
let cleanupInterval: NodeJS.Timeout let cleanupInterval: NodeJS.Timeout
async function cleanup() { async function cleanup() {
@ -45,11 +45,18 @@ export function createQueue<T>(
if (opts.jobOptions) { if (opts.jobOptions) {
queueConfig.defaultJobOptions = opts.jobOptions queueConfig.defaultJobOptions = opts.jobOptions
} }
let queue: any let queue: BullQueue.Queue<T>
if (!env.isTest()) { if (!env.isTest()) {
queue = new BullQueue(jobQueue, queueConfig) queue = new BullQueue(jobQueue, queueConfig)
} else if (
process.env.BULL_TEST_REDIS_PORT &&
!isNaN(+process.env.BULL_TEST_REDIS_PORT)
) {
queue = new BullQueue(jobQueue, {
redis: { host: "localhost", port: +process.env.BULL_TEST_REDIS_PORT },
})
} else { } else {
queue = new InMemoryQueue(jobQueue, queueConfig) queue = new InMemoryQueue(jobQueue, queueConfig) as any
} }
addListeners(queue, jobQueue, opts?.removeStalledCb) addListeners(queue, jobQueue, opts?.removeStalledCb)
QUEUES.push(queue) QUEUES.push(queue)

View File

@ -4,3 +4,4 @@ export { generator } from "./structures"
export * as testContainerUtils from "./testContainerUtils" export * as testContainerUtils from "./testContainerUtils"
export * as utils from "./utils" export * as utils from "./utils"
export * from "./jestUtils" export * from "./jestUtils"
export * as queue from "./queue"

View File

@ -0,0 +1,9 @@
import { Queue } from "bull"
export async function processMessages(queue: Queue) {
do {
await queue.whenCurrentJobsFinished()
} while (await queue.count())
await queue.whenCurrentJobsFinished()
}

View File

@ -1,4 +1,6 @@
import { execSync } from "child_process" import { execSync } from "child_process"
import { cloneDeep } from "lodash"
import { GenericContainer, StartedTestContainer } from "testcontainers"
const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g") const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g")
@ -106,3 +108,58 @@ export function setupEnv(...envs: any[]) {
} }
} }
} }
export async function startContainer(container: GenericContainer) {
const imageName = (container as any).imageName.string as string
let key: string = imageName
if (imageName.includes("@sha256")) {
key = imageName.split("@")[0]
}
key = key.replace(/\//g, "-").replace(/:/g, "-")
container = container
.withReuse()
.withLabels({ "com.budibase": "true" })
.withName(`${key}_testcontainer`)
let startedContainer: StartedTestContainer | undefined = undefined
let lastError = undefined
for (let i = 0; i < 10; i++) {
try {
// container.start() is not an idempotent operation, calling `start`
// modifies the internal state of a GenericContainer instance such that
// the hash it uses to determine reuse changes. We need to clone the
// container before calling start to ensure that we're using the same
// reuse hash every time.
const containerCopy = cloneDeep(container)
startedContainer = await containerCopy.start()
lastError = undefined
break
} catch (e: any) {
lastError = e
await new Promise(resolve => setTimeout(resolve, 1000))
}
}
if (!startedContainer) {
if (lastError) {
throw lastError
}
throw new Error(`failed to start container: ${imageName}`)
}
const info = getContainerById(startedContainer.getId())
if (!info) {
throw new Error("Container not found")
}
// Some Docker runtimes, when you expose a port, will bind it to both
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
// addresses are not shared, and testcontainers will sometimes give you back
// the ipv6 port. There's no way to know that this has happened, and if you
// try to then connect to `localhost:port` you may attempt to bind to the v4
// address which could be unbound or even an entirely different container. For
// that reason, we don't use testcontainers' `getExposedPort` function,
// preferring instead our own method that guaranteed v4 ports.
return getExposedV4Ports(info)
}

View File

@ -1 +1,2 @@
export * as time from "./time" export * as time from "./time"
export * as queue from "./queue"

View File

@ -0,0 +1,27 @@
import { Queue } from "bull"
import { GenericContainer, Wait } from "testcontainers"
import { startContainer } from "../testContainerUtils"
export async function useRealQueues() {
const ports = await startContainer(
new GenericContainer("redis")
.withExposedPorts(6379)
.withWaitStrategy(
Wait.forSuccessfulCommand(`redis-cli`).withStartupTimeout(10000)
)
)
const port = ports.find(x => x.container === 6379)?.host
if (!port) {
throw new Error("Redis port not found")
}
process.env.BULL_TEST_REDIS_PORT = port.toString()
}
export async function processMessages(queue: Queue) {
do {
await queue.whenCurrentJobsFinished()
} while (await queue.count())
await queue.whenCurrentJobsFinished()
}

View File

@ -8,6 +8,7 @@
import Link from "../../Link/Link.svelte" import Link from "../../Link/Link.svelte"
import Tag from "../../Tags/Tag.svelte" import Tag from "../../Tags/Tag.svelte"
import Tags from "../../Tags/Tags.svelte" import Tags from "../../Tags/Tags.svelte"
import ProgressCircle from "../../ProgressCircle/ProgressCircle.svelte"
const BYTES_IN_KB = 1000 const BYTES_IN_KB = 1000
const BYTES_IN_MB = 1000000 const BYTES_IN_MB = 1000000
@ -39,12 +40,14 @@
"jfif", "jfif",
"webp", "webp",
] ]
const fieldId = id || uuid() const fieldId = id || uuid()
let selectedImageIdx = 0 let selectedImageIdx = 0
let fileDragged = false let fileDragged = false
let selectedUrl let selectedUrl
let fileInput let fileInput
let loading = false
$: selectedImage = value?.[selectedImageIdx] ?? null $: selectedImage = value?.[selectedImageIdx] ?? null
$: fileCount = value?.length ?? 0 $: fileCount = value?.length ?? 0
$: isImage = $: isImage =
@ -86,10 +89,15 @@
} }
if (processFiles) { if (processFiles) {
loading = true
try {
const processedFiles = await processFiles(fileList) const processedFiles = await processFiles(fileList)
const newValue = [...value, ...processedFiles] const newValue = [...value, ...processedFiles]
dispatch("change", newValue) dispatch("change", newValue)
selectedImageIdx = newValue.length - 1 selectedImageIdx = newValue.length - 1
} finally {
loading = false
}
} else { } else {
dispatch("change", fileList) dispatch("change", fileList)
} }
@ -227,7 +235,7 @@
{#if showDropzone} {#if showDropzone}
<div <div
class="spectrum-Dropzone" class="spectrum-Dropzone"
class:disabled class:disabled={disabled || loading}
role="region" role="region"
tabindex="0" tabindex="0"
on:dragover={handleDragOver} on:dragover={handleDragOver}
@ -241,7 +249,7 @@
id={fieldId} id={fieldId}
{disabled} {disabled}
type="file" type="file"
multiple multiple={maximum !== 1}
accept={extensions} accept={extensions}
bind:this={fileInput} bind:this={fileInput}
on:change={handleFile} on:change={handleFile}
@ -339,6 +347,12 @@
{/if} {/if}
{/if} {/if}
</div> </div>
{#if loading}
<div class="loading">
<ProgressCircle size="M" />
</div>
{/if}
</div> </div>
{/if} {/if}
</div> </div>
@ -464,6 +478,7 @@
.spectrum-Dropzone { .spectrum-Dropzone {
height: 220px; height: 220px;
position: relative;
} }
.compact .spectrum-Dropzone { .compact .spectrum-Dropzone {
height: 40px; height: 40px;
@ -488,4 +503,14 @@
.tag { .tag {
margin-top: 8px; margin-top: 8px;
} }
.loading {
position: absolute;
display: grid;
place-items: center;
height: 100%;
width: 100%;
top: 0;
left: 0;
}
</style> </style>

View File

@ -11,7 +11,6 @@
export let disabledPermissions = [] export let disabledPermissions = []
export let columns export let columns
export let fromRelationshipField export let fromRelationshipField
export let canSetRelationshipSchemas
const { datasource, dispatch } = getContext("grid") const { datasource, dispatch } = getContext("grid")
@ -129,6 +128,8 @@
} }
}) })
$: hasLinkColumns = columns.some(c => c.schema.type === FieldType.LINK)
async function toggleColumn(column, permission) { async function toggleColumn(column, permission) {
const visible = permission !== FieldPermissions.HIDDEN const visible = permission !== FieldPermissions.HIDDEN
const readonly = permission === FieldPermissions.READONLY const readonly = permission === FieldPermissions.READONLY
@ -184,7 +185,7 @@
value={columnToPermissionOptions(column)} value={columnToPermissionOptions(column)}
options={column.options} options={column.options}
/> />
{#if canSetRelationshipSchemas && column.schema.type === FieldType.LINK && columnToPermissionOptions(column) !== FieldPermissions.HIDDEN} {#if column.schema.type === FieldType.LINK && columnToPermissionOptions(column) !== FieldPermissions.HIDDEN}
<div class="relationship-columns"> <div class="relationship-columns">
<ActionButton <ActionButton
on:click={e => { on:click={e => {
@ -203,7 +204,7 @@
</div> </div>
</div> </div>
{#if canSetRelationshipSchemas} {#if hasLinkColumns}
<Popover <Popover
on:close={() => (relationshipFieldName = null)} on:close={() => (relationshipFieldName = null)}
open={relationshipFieldName} open={relationshipFieldName}

View File

@ -10,8 +10,6 @@
import { getContext } from "svelte" import { getContext } from "svelte"
import { ActionButton } from "@budibase/bbui" import { ActionButton } from "@budibase/bbui"
import ColumnsSettingContent from "./ColumnsSettingContent.svelte" import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
import { isEnabled } from "helpers/featureFlags"
import { FeatureFlag } from "@budibase/types"
import DetailPopover from "components/common/DetailPopover.svelte" import DetailPopover from "components/common/DetailPopover.svelte"
const { tableColumns, datasource } = getContext("grid") const { tableColumns, datasource } = getContext("grid")
@ -46,9 +44,5 @@
{text} {text}
</ActionButton> </ActionButton>
</svelte:fragment> </svelte:fragment>
<ColumnsSettingContent <ColumnsSettingContent columns={$tableColumns} {permissions} />
columns={$tableColumns}
canSetRelationshipSchemas={isEnabled(FeatureFlag.ENRICHED_RELATIONSHIPS)}
{permissions}
/>
</DetailPopover> </DetailPopover>

View File

@ -53,6 +53,7 @@
on:close={close} on:close={close}
maxHeight={null} maxHeight={null}
resizable resizable
minWidth={360}
> >
<div class="content"> <div class="content">
<slot /> <slot />
@ -80,7 +81,6 @@
} }
.content { .content {
width: 300px;
padding: 20px; padding: 20px;
display: flex; display: flex;
flex-direction: column; flex-direction: column;

View File

@ -5,6 +5,7 @@ export default class NestedProviderFetch extends DataFetch {
// Nested providers should already have exposed their own schema // Nested providers should already have exposed their own schema
return { return {
schema: datasource?.value?.schema, schema: datasource?.value?.schema,
primaryDisplay: datasource?.value?.primaryDisplay,
} }
} }

@ -1 +1 @@
Subproject commit 04bee88597edb1edb88ed299d0597b587f0362ec Subproject commit a56696a4af5667617746600fc75fe6a01744b692

View File

@ -153,7 +153,11 @@ async function createInstance(appId: string, template: AppTemplate) {
await createAllSearchIndex() await createAllSearchIndex()
if (template && template.useTemplate) { if (template && template.useTemplate) {
await sdk.backups.importApp(appId, db, template) const opts = {
importObjStoreContents: true,
updateAttachmentColumns: !template.key, // preserve attachments when using Budibase templates
}
await sdk.backups.importApp(appId, db, template, opts)
} else { } else {
// create the users table // create the users table
await db.put(USERS_TABLE_SCHEMA) await db.put(USERS_TABLE_SCHEMA)

View File

@ -209,6 +209,7 @@ export const serveApp = async function (ctx: UserCtx) {
? objectStore.getGlobalFileUrl("settings", "logoUrl") ? objectStore.getGlobalFileUrl("settings", "logoUrl")
: "", : "",
appMigrating: needMigrations, appMigrating: needMigrations,
nonce: ctx.state.nonce,
}) })
const appHbs = loadHandlebarsFile(appHbsPath) const appHbs = loadHandlebarsFile(appHbsPath)
ctx.body = await processString(appHbs, { ctx.body = await processString(appHbs, {
@ -217,6 +218,7 @@ export const serveApp = async function (ctx: UserCtx) {
css: `:root{${themeVariables}} ${css.code}`, css: `:root{${themeVariables}} ${css.code}`,
appId, appId,
embedded: bbHeaderEmbed, embedded: bbHeaderEmbed,
nonce: ctx.state.nonce,
}) })
} else { } else {
// just return the app info for jest to assert on // just return the app info for jest to assert on
@ -258,6 +260,7 @@ export const serveBuilderPreview = async function (ctx: Ctx) {
const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs")) const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs"))
ctx.body = await processString(previewHbs, { ctx.body = await processString(previewHbs, {
clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version), clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version),
nonce: ctx.state.nonce,
}) })
} else { } else {
// just return the app info for jest to assert on // just return the app info for jest to assert on

View File

@ -16,6 +16,8 @@
export let hideDevTools export let hideDevTools
export let sideNav export let sideNav
export let hideFooter export let hideFooter
export let nonce
</script> </script>
<svelte:head> <svelte:head>
@ -118,11 +120,11 @@
<p /> <p />
{/if} {/if}
</div> </div>
<script type="application/javascript"> <script type="application/javascript" {nonce}>
window.INIT_TIME = Date.now() window.INIT_TIME = Date.now()
</script> </script>
{#if appMigrating} {#if appMigrating}
<script type="application/javascript"> <script type="application/javascript" {nonce}>
window.MIGRATING_APP = true window.MIGRATING_APP = true
</script> </script>
{/if} {/if}
@ -135,7 +137,7 @@
<script type="application/javascript" src={plugin.jsUrl}></script> <script type="application/javascript" src={plugin.jsUrl}></script>
{/each} {/each}
{/if} {/if}
<script type="application/javascript"> <script type="application/javascript" {nonce}>
if (window.loadBudibase) { if (window.loadBudibase) {
window.loadBudibase() window.loadBudibase()
} else { } else {

View File

@ -1,5 +1,5 @@
<html> <html>
<script> <script nonce="{{ nonce }}">
document.fonts.ready.then(() => { document.fonts.ready.then(() => {
window.parent.postMessage({ type: "docLoaded" }); window.parent.postMessage({ type: "docLoaded" });
}) })
@ -9,7 +9,7 @@
<style>{{{css}}}</style> <style>{{{css}}}</style>
</head> </head>
<script> <script nonce="{{ nonce }}">
window["##BUDIBASE_APP_ID##"] = "{{appId}}" window["##BUDIBASE_APP_ID##"] = "{{appId}}"
window["##BUDIBASE_APP_EMBEDDED##"] = "{{embedded}}" window["##BUDIBASE_APP_EMBEDDED##"] = "{{embedded}}"
</script> </script>

View File

@ -31,7 +31,7 @@
} }
</style> </style>
<script src='{{ clientLibPath }}'></script> <script src='{{ clientLibPath }}'></script>
<script> <script nonce="{{ nonce }}">
function receiveMessage(event) { function receiveMessage(event) {
if (!event.data) { if (!event.data) {
return return

View File

@ -15,12 +15,11 @@ import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder" import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { context, events, features, HTTPError } from "@budibase/backend-core" import { context, events, HTTPError } from "@budibase/backend-core"
import { import {
AutoFieldSubType, AutoFieldSubType,
Database, Database,
Datasource, Datasource,
FeatureFlag,
FieldSchema, FieldSchema,
FieldType, FieldType,
NumberFieldMetadata, NumberFieldMetadata,
@ -336,9 +335,8 @@ class TableSaveFunctions {
importRows: this.importRows, importRows: this.importRows,
userId: this.userId, userId: this.userId,
}) })
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
await sdk.tables.sqs.addTable(table) await sdk.tables.sqs.addTable(table)
}
return table return table
} }
@ -530,10 +528,9 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
if (rows) { if (rows) {
await AttachmentCleanup.tableDelete(table, rows) await AttachmentCleanup.tableDelete(table, rows)
} }
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
await sdk.tables.sqs.removeTable(table) await sdk.tables.sqs.removeTable(table)
} }
}
const _TableSaveFunctions = TableSaveFunctions const _TableSaveFunctions = TableSaveFunctions
export { _TableSaveFunctions as TableSaveFunctions } export { _TableSaveFunctions as TableSaveFunctions }

View File

@ -16,7 +16,7 @@ jest.mock("../../../utilities/redis", () => ({
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities" import * as setup from "./utilities"
import { AppStatus } from "../../../db/utils" import { AppStatus } from "../../../db/utils"
import { events, utils, context, features } from "@budibase/backend-core" import { events, utils, context } from "@budibase/backend-core"
import env from "../../../environment" import env from "../../../environment"
import { type App, BuiltinPermissionID } from "@budibase/types" import { type App, BuiltinPermissionID } from "@budibase/types"
import tk from "timekeeper" import tk from "timekeeper"
@ -355,21 +355,6 @@ describe("/applications", () => {
expect(events.app.deleted).toHaveBeenCalledTimes(1) expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1)
}) })
it("should be able to delete an app after SQS has been set but app hasn't been migrated", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await features.testutils.withFeatureFlags(
"*",
{ SQS: true },
async () => {
await config.api.application.delete(app.appId)
}
)
})
}) })
describe("POST /api/applications/:appId/duplicate", () => { describe("POST /api/applications/:appId/duplicate", () => {

View File

@ -9,27 +9,18 @@ import {
import tk from "timekeeper" import tk from "timekeeper"
import emitter from "../../../../src/events" import emitter from "../../../../src/events"
import { outputProcessing } from "../../../utilities/rowProcessor" import { outputProcessing } from "../../../utilities/rowProcessor"
import { import { context, InternalTable, tenancy, utils } from "@budibase/backend-core"
context,
InternalTable,
tenancy,
features,
utils,
} from "@budibase/backend-core"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { import {
AIOperationEnum, AIOperationEnum,
AttachmentFieldMetadata,
AutoFieldSubType, AutoFieldSubType,
Datasource, Datasource,
DateFieldMetadata,
DeleteRow, DeleteRow,
FieldSchema, FieldSchema,
FieldType, FieldType,
BBReferenceFieldSubType, BBReferenceFieldSubType,
FormulaType, FormulaType,
INTERNAL_TABLE_SOURCE_ID, INTERNAL_TABLE_SOURCE_ID,
NumberFieldMetadata,
QuotaUsageType, QuotaUsageType,
RelationshipType, RelationshipType,
Row, Row,
@ -90,8 +81,7 @@ async function waitForEvent(
} }
describe.each([ describe.each([
["lucene", undefined], ["internal", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
@ -99,8 +89,6 @@ describe.each([
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/rows (%s)", (providerType, dsProvider) => { ])("/rows (%s)", (providerType, dsProvider) => {
const isInternal = dsProvider === undefined const isInternal = dsProvider === undefined
const isLucene = providerType === "lucene"
const isSqs = providerType === "sqs"
const isMSSQL = providerType === DatabaseName.SQL_SERVER const isMSSQL = providerType === DatabaseName.SQL_SERVER
const isOracle = providerType === DatabaseName.ORACLE const isOracle = providerType === DatabaseName.ORACLE
const config = setup.getConfig() const config = setup.getConfig()
@ -108,15 +96,9 @@ describe.each([
let table: Table let table: Table
let datasource: Datasource | undefined let datasource: Datasource | undefined
let client: Knex | undefined let client: Knex | undefined
let envCleanup: (() => void) | undefined
beforeAll(async () => { beforeAll(async () => {
await features.testutils.withFeatureFlags("*", { SQS: true }, () => await config.init()
config.init()
)
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: isSqs,
})
if (dsProvider) { if (dsProvider) {
const rawDatasource = await dsProvider const rawDatasource = await dsProvider
@ -129,9 +111,6 @@ describe.each([
afterAll(async () => { afterAll(async () => {
setup.afterAll() setup.afterAll()
if (envCleanup) {
envCleanup()
}
}) })
function saveTableRequest( function saveTableRequest(
@ -381,185 +360,6 @@ describe.each([
expect(ids).toEqual(expect.arrayContaining(sequence)) expect(ids).toEqual(expect.arrayContaining(sequence))
}) })
isLucene &&
it("row values are coerced", async () => {
const str: FieldSchema = {
type: FieldType.STRING,
name: "str",
constraints: { type: "string", presence: false },
}
const singleAttachment: FieldSchema = {
type: FieldType.ATTACHMENT_SINGLE,
name: "single attachment",
constraints: { presence: false },
}
const attachmentList: AttachmentFieldMetadata = {
type: FieldType.ATTACHMENTS,
name: "attachments",
constraints: { type: "array", presence: false },
}
const signature: FieldSchema = {
type: FieldType.SIGNATURE_SINGLE,
name: "signature",
constraints: { presence: false },
}
const bool: FieldSchema = {
type: FieldType.BOOLEAN,
name: "boolean",
constraints: { type: "boolean", presence: false },
}
const number: NumberFieldMetadata = {
type: FieldType.NUMBER,
name: "str",
constraints: { type: "number", presence: false },
}
const datetime: DateFieldMetadata = {
type: FieldType.DATETIME,
name: "datetime",
constraints: {
type: "string",
presence: false,
datetime: { earliest: "", latest: "" },
},
}
const arrayField: FieldSchema = {
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
presence: false,
inclusion: ["One", "Two", "Three"],
},
name: "Sample Tags",
sortable: false,
}
const optsField: FieldSchema = {
name: "Sample Opts",
type: FieldType.OPTIONS,
constraints: {
type: "string",
presence: false,
inclusion: ["Alpha", "Beta", "Gamma"],
},
}
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: str,
stringUndefined: str,
stringNull: str,
stringString: str,
numberEmptyString: number,
numberNull: number,
numberUndefined: number,
numberString: number,
numberNumber: number,
datetimeEmptyString: datetime,
datetimeNull: datetime,
datetimeUndefined: datetime,
datetimeString: datetime,
datetimeDate: datetime,
boolNull: bool,
boolEmpty: bool,
boolUndefined: bool,
boolString: bool,
boolBool: bool,
singleAttachmentNull: singleAttachment,
singleAttachmentUndefined: singleAttachment,
attachmentListNull: attachmentList,
attachmentListUndefined: attachmentList,
attachmentListEmpty: attachmentList,
attachmentListEmptyArrayStr: attachmentList,
signatureNull: signature,
signatureUndefined: signature,
arrayFieldEmptyArrayStr: arrayField,
arrayFieldArrayStrKnown: arrayField,
arrayFieldNull: arrayField,
arrayFieldUndefined: arrayField,
optsFieldEmptyStr: optsField,
optsFieldUndefined: optsField,
optsFieldNull: optsField,
optsFieldStrKnown: optsField,
},
})
)
const datetimeStr = "1984-04-20T00:00:00.000Z"
const row = await config.api.row.save(table._id!, {
name: "Test Row",
stringUndefined: undefined,
stringNull: null,
stringString: "i am a string",
numberEmptyString: "",
numberNull: null,
numberUndefined: undefined,
numberString: "123",
numberNumber: 123,
datetimeEmptyString: "",
datetimeNull: null,
datetimeUndefined: undefined,
datetimeString: datetimeStr,
datetimeDate: new Date(datetimeStr),
boolNull: null,
boolEmpty: "",
boolUndefined: undefined,
boolString: "true",
boolBool: true,
tableId: table._id,
singleAttachmentNull: null,
singleAttachmentUndefined: undefined,
attachmentListNull: null,
attachmentListUndefined: undefined,
attachmentListEmpty: "",
attachmentListEmptyArrayStr: "[]",
signatureNull: null,
signatureUndefined: undefined,
arrayFieldEmptyArrayStr: "[]",
arrayFieldUndefined: undefined,
arrayFieldNull: null,
arrayFieldArrayStrKnown: "['One']",
optsFieldEmptyStr: "",
optsFieldUndefined: undefined,
optsFieldNull: null,
optsFieldStrKnown: "Alpha",
})
expect(row.stringUndefined).toBe(undefined)
expect(row.stringNull).toBe(null)
expect(row.stringString).toBe("i am a string")
expect(row.numberEmptyString).toBe(null)
expect(row.numberNull).toBe(null)
expect(row.numberUndefined).toBe(undefined)
expect(row.numberString).toBe(123)
expect(row.numberNumber).toBe(123)
expect(row.datetimeEmptyString).toBe(null)
expect(row.datetimeNull).toBe(null)
expect(row.datetimeUndefined).toBe(undefined)
expect(row.datetimeString).toBe(new Date(datetimeStr).toISOString())
expect(row.datetimeDate).toBe(new Date(datetimeStr).toISOString())
expect(row.boolNull).toBe(null)
expect(row.boolEmpty).toBe(null)
expect(row.boolUndefined).toBe(undefined)
expect(row.boolString).toBe(true)
expect(row.boolBool).toBe(true)
expect(row.singleAttachmentNull).toEqual(null)
expect(row.singleAttachmentUndefined).toBe(undefined)
expect(row.attachmentListNull).toEqual([])
expect(row.attachmentListUndefined).toBe(undefined)
expect(row.attachmentListEmpty).toEqual([])
expect(row.attachmentListEmptyArrayStr).toEqual([])
expect(row.signatureNull).toEqual(null)
expect(row.signatureUndefined).toBe(undefined)
expect(row.arrayFieldEmptyArrayStr).toEqual([])
expect(row.arrayFieldNull).toEqual([])
expect(row.arrayFieldUndefined).toEqual(undefined)
expect(row.optsFieldEmptyStr).toEqual(null)
expect(row.optsFieldUndefined).toEqual(undefined)
expect(row.optsFieldNull).toEqual(null)
expect(row.arrayFieldArrayStrKnown).toEqual(["One"])
expect(row.optsFieldStrKnown).toEqual("Alpha")
})
isInternal && isInternal &&
it("doesn't allow creating in user table", async () => { it("doesn't allow creating in user table", async () => {
const response = await config.api.row.save( const response = await config.api.row.save(
@ -1023,7 +823,6 @@ describe.each([
}) })
}) })
!isLucene &&
describe("relations to same table", () => { describe("relations to same table", () => {
let relatedRows: Row[] let relatedRows: Row[]
@ -1224,7 +1023,6 @@ describe.each([
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
}) })
!isLucene &&
describe("relations to same table", () => { describe("relations to same table", () => {
let relatedRows: Row[] let relatedRows: Row[]
@ -1628,7 +1426,6 @@ describe.each([
expect(res.length).toEqual(2) expect(res.length).toEqual(2)
}) })
!isLucene &&
describe("relations to same table", () => { describe("relations to same table", () => {
let relatedRows: Row[] let relatedRows: Row[]
@ -3061,13 +2858,7 @@ describe.each([
let auxData: Row[] = [] let auxData: Row[] = []
let flagCleanup: (() => void) | undefined
beforeAll(async () => { beforeAll(async () => {
flagCleanup = features.testutils.setFeatureFlags("*", {
ENRICHED_RELATIONSHIPS: true,
})
const aux2Table = await config.api.table.save(saveTableRequest()) const aux2Table = await config.api.table.save(saveTableRequest())
const aux2Data = await config.api.row.save(aux2Table._id!, {}) const aux2Data = await config.api.row.save(aux2Table._id!, {})
@ -3214,10 +3005,6 @@ describe.each([
viewId = view.id viewId = view.id
}) })
afterAll(() => {
flagCleanup?.()
})
const testScenarios: [string, (row: Row) => Promise<Row> | Row][] = [ const testScenarios: [string, (row: Row) => Promise<Row> | Row][] = [
["get row", (row: Row) => config.api.row.get(viewId, row._id!)], ["get row", (row: Row) => config.api.row.get(viewId, row._id!)],
[ [
@ -3290,68 +3077,6 @@ describe.each([
} }
) )
it.each(testScenarios)(
"does not enrich relationships when not enabled (via %s)",
async (__, retrieveDelegate) => {
await features.testutils.withFeatureFlags(
"*",
{
ENRICHED_RELATIONSHIPS: false,
},
async () => {
const otherRows = _.sampleSize(auxData, 5)
const row = await config.api.row.save(viewId, {
title: generator.word(),
relWithNoSchema: [otherRows[0]],
relWithEmptySchema: [otherRows[1]],
relWithFullSchema: [otherRows[2]],
relWithHalfSchema: [otherRows[3]],
relWithIllegalSchema: [otherRows[4]],
})
const retrieved = await retrieveDelegate(row)
expect(retrieved).toEqual(
expect.objectContaining({
title: row.title,
relWithNoSchema: [
{
_id: otherRows[0]._id,
primaryDisplay: otherRows[0].name,
},
],
relWithEmptySchema: [
{
_id: otherRows[1]._id,
primaryDisplay: otherRows[1].name,
},
],
relWithFullSchema: [
{
_id: otherRows[2]._id,
primaryDisplay: otherRows[2].name,
},
],
relWithHalfSchema: [
{
_id: otherRows[3]._id,
primaryDisplay: otherRows[3].name,
},
],
relWithIllegalSchema: [
{
_id: otherRows[4]._id,
primaryDisplay: otherRows[4].name,
},
],
})
)
}
)
}
)
it.each([ it.each([
[ [
"from table fetch", "from table fetch",
@ -3422,7 +3147,7 @@ describe.each([
) )
}) })
isSqs && isInternal &&
describe("AI fields", () => { describe("AI fields", () => {
let table: Table let table: Table

View File

@ -8,7 +8,6 @@ import {
context, context,
db as dbCore, db as dbCore,
docIds, docIds,
features,
MAX_VALID_DATE, MAX_VALID_DATE,
MIN_VALID_DATE, MIN_VALID_DATE,
SQLITE_DESIGN_DOC_ID, SQLITE_DESIGN_DOC_ID,
@ -64,7 +63,6 @@ jest.mock("@budibase/pro", () => ({
describe.each([ describe.each([
["in-memory", undefined], ["in-memory", undefined],
["lucene", undefined],
["sqs", undefined], ["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -72,15 +70,12 @@ describe.each([
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("search (%s)", (name, dsProvider) => { ])("search (%s)", (name, dsProvider) => {
const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInMemory = name === "in-memory" const isInMemory = name === "in-memory"
const isInternal = isSqs || isLucene || isInMemory const isInternal = !dsProvider
const isOracle = name === DatabaseName.ORACLE const isOracle = name === DatabaseName.ORACLE
const isSql = !isInMemory && !isLucene const isSql = !isInMemory
const config = setup.getConfig() const config = setup.getConfig()
let envCleanup: (() => void) | undefined
let datasource: Datasource | undefined let datasource: Datasource | undefined
let client: Knex | undefined let client: Knex | undefined
let tableOrViewId: string let tableOrViewId: string
@ -111,12 +106,7 @@ describe.each([
} }
beforeAll(async () => { beforeAll(async () => {
await features.testutils.withFeatureFlags("*", { SQS: true }, () => await config.init()
config.init()
)
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: isSqs,
})
if (config.app?.appId) { if (config.app?.appId) {
config.app = await config.api.application.update(config.app?.appId, { config.app = await config.api.application.update(config.app?.appId, {
@ -140,9 +130,6 @@ describe.each([
afterAll(async () => { afterAll(async () => {
setup.afterAll() setup.afterAll()
if (envCleanup) {
envCleanup()
}
}) })
async function createTable(schema?: TableSchema) { async function createTable(schema?: TableSchema) {
@ -221,11 +208,6 @@ describe.each([
])("from %s", (sourceType, createTableOrView) => { ])("from %s", (sourceType, createTableOrView) => {
const isView = sourceType === "view" const isView = sourceType === "view"
if (isView && isLucene) {
// Some tests don't have the expected result in views via lucene, and given that it is getting deprecated, we exclude them from the tests
return
}
class SearchAssertion { class SearchAssertion {
constructor(private readonly query: SearchRowRequest) {} constructor(private readonly query: SearchRowRequest) {}
@ -598,7 +580,6 @@ describe.each([
]) ])
}) })
!isLucene &&
it("should return all rows matching the session user firstname when logical operator used", async () => { it("should return all rows matching the session user firstname when logical operator used", async () => {
await expectQuery({ await expectQuery({
$and: { $and: {
@ -1034,7 +1015,6 @@ describe.each([
}).toFindNothing() }).toFindNothing()
}) })
!isLucene &&
it("ignores low if it's an empty object", async () => { it("ignores low if it's an empty object", async () => {
await expectQuery({ await expectQuery({
// @ts-ignore // @ts-ignore
@ -1042,7 +1022,6 @@ describe.each([
}).toContainExactly([{ name: "foo" }, { name: "bar" }]) }).toContainExactly([{ name: "foo" }, { name: "bar" }])
}) })
!isLucene &&
it("ignores high if it's an empty object", async () => { it("ignores high if it's an empty object", async () => {
await expectQuery({ await expectQuery({
// @ts-ignore // @ts-ignore
@ -1202,10 +1181,6 @@ describe.each([
await expectQuery({ oneOf: { age: [2] } }).toFindNothing() await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
}) })
// I couldn't find a way to make this work in Lucene and given that
// we're getting rid of Lucene soon I wasn't inclined to spend time on
// it.
!isLucene &&
it("can convert from a string", async () => { it("can convert from a string", async () => {
await expectQuery({ await expectQuery({
oneOf: { oneOf: {
@ -1215,10 +1190,6 @@ describe.each([
}).toContainExactly([{ age: 1 }]) }).toContainExactly([{ age: 1 }])
}) })
// I couldn't find a way to make this work in Lucene and given that
// we're getting rid of Lucene soon I wasn't inclined to spend time on
// it.
!isLucene &&
it("can find multiple values for same column", async () => { it("can find multiple values for same column", async () => {
await expectQuery({ await expectQuery({
oneOf: { oneOf: {
@ -1648,7 +1619,8 @@ describe.each([
}) })
}) })
isSqs && isInternal &&
!isInMemory &&
describe("AI Column", () => { describe("AI Column", () => {
const UNEXISTING_AI_COLUMN = "Real LLM Response" const UNEXISTING_AI_COLUMN = "Real LLM Response"
@ -1879,10 +1851,6 @@ describe.each([
}) })
}) })
// Range searches against bigints don't seem to work at all in Lucene, and I
// couldn't figure out why. Given that we're replacing Lucene with SQS,
// we've decided not to spend time on it.
!isLucene &&
describe("range", () => { describe("range", () => {
it("successfully finds a row", async () => { it("successfully finds a row", async () => {
await expectQuery({ await expectQuery({
@ -2016,14 +1984,12 @@ describe.each([
}).toFindNothing() }).toFindNothing()
}) })
isSqs &&
it("can search using just a low value", async () => { it("can search using just a low value", async () => {
await expectQuery({ await expectQuery({
range: { auto: { low: 9 } }, range: { auto: { low: 9 } },
}).toContainExactly([{ auto: 9 }, { auto: 10 }]) }).toContainExactly([{ auto: 9 }, { auto: 10 }])
}) })
isSqs &&
it("can search using just a high value", async () => { it("can search using just a high value", async () => {
await expectQuery({ await expectQuery({
range: { auto: { high: 2 } }, range: { auto: { high: 2 } },
@ -2031,13 +1997,13 @@ describe.each([
}) })
}) })
isSqs &&
describe("sort", () => { describe("sort", () => {
it("sorts ascending", async () => { it("sorts ascending", async () => {
await expectSearch({ await expectSearch({
query: {}, query: {},
sort: "auto", sort: "auto",
sortOrder: SortOrder.ASCENDING, sortOrder: SortOrder.ASCENDING,
sortType: SortType.NUMBER,
}).toMatchExactly([ }).toMatchExactly([
{ auto: 1 }, { auto: 1 },
{ auto: 2 }, { auto: 2 },
@ -2057,6 +2023,7 @@ describe.each([
query: {}, query: {},
sort: "auto", sort: "auto",
sortOrder: SortOrder.DESCENDING, sortOrder: SortOrder.DESCENDING,
sortType: SortType.NUMBER,
}).toMatchExactly([ }).toMatchExactly([
{ auto: 10 }, { auto: 10 },
{ auto: 9 }, { auto: 9 },
@ -2392,8 +2359,6 @@ describe.each([
}) })
}) })
// This will never work for Lucene.
!isLucene &&
// It also can't work for in-memory searching because the related table name // It also can't work for in-memory searching because the related table name
// isn't available. // isn't available.
!isInMemory && !isInMemory &&
@ -2847,8 +2812,6 @@ describe.each([
}) })
}) })
// lucene can't count the total rows
!isLucene &&
describe("row counting", () => { describe("row counting", () => {
beforeAll(async () => { beforeAll(async () => {
tableOrViewId = await createTableOrView({ tableOrViewId = await createTableOrView({
@ -3065,9 +3028,7 @@ describe.each([
}) })
}) })
// This was never actually supported in Lucene but SQS does support it, so may isInternal &&
// as well have a test for it.
;(isSqs || isInMemory) &&
describe("space at start of column name", () => { describe("space at start of column name", () => {
beforeAll(async () => { beforeAll(async () => {
tableOrViewId = await createTableOrView({ tableOrViewId = await createTableOrView({
@ -3100,7 +3061,7 @@ describe.each([
}) })
}) })
isSqs && isInternal &&
!isView && !isView &&
describe("duplicate columns", () => { describe("duplicate columns", () => {
beforeAll(async () => { beforeAll(async () => {
@ -3262,7 +3223,6 @@ describe.each([
}) })
}) })
!isLucene &&
describe("$and", () => { describe("$and", () => {
beforeAll(async () => { beforeAll(async () => {
tableOrViewId = await createTableOrView({ tableOrViewId = await createTableOrView({
@ -3340,10 +3300,7 @@ describe.each([
await expect( await expect(
expectQuery({ expectQuery({
$and: { $and: {
conditions: [ conditions: [{ equal: { age: 10 } }, "invalidCondition" as any],
{ equal: { age: 10 } },
"invalidCondition" as any,
],
}, },
}).toFindNothing() }).toFindNothing()
).rejects.toThrow( ).rejects.toThrow(
@ -3396,7 +3353,6 @@ describe.each([
}) })
}) })
!isLucene &&
describe("$or", () => { describe("$or", () => {
beforeAll(async () => { beforeAll(async () => {
tableOrViewId = await createTableOrView({ tableOrViewId = await createTableOrView({
@ -3590,8 +3546,7 @@ describe.each([
}) })
}) })
isSql && !isInternal &&
!isSqs &&
describe("SQL injection", () => { describe("SQL injection", () => {
const badStrings = [ const badStrings = [
"1; DROP TABLE %table_name%;", "1; DROP TABLE %table_name%;",

View File

@ -2,7 +2,6 @@ import * as setup from "./utilities"
import path from "path" import path from "path"
import nock from "nock" import nock from "nock"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { features } from "@budibase/backend-core"
interface App { interface App {
background: string background: string
@ -82,13 +81,7 @@ describe("/templates", () => {
}) })
describe("create app from template", () => { describe("create app from template", () => {
it.each(["sqs", "lucene"])( it("should be able to create an app from a template", async () => {
`should be able to create an app from a template (%s)`,
async source => {
await features.testutils.withFeatureFlags(
"*",
{ SQS: source === "sqs" },
async () => {
const name = generator.guid().replaceAll("-", "") const name = generator.guid().replaceAll("-", "")
const url = `/${name}` const url = `/${name}`
@ -111,19 +104,13 @@ describe("/templates", () => {
expect(agencyProjects.name).toBe("Agency Projects") expect(agencyProjects.name).toBe("Agency Projects")
expect(users.name).toBe("Users") expect(users.name).toBe("Users")
const { rows } = await config.api.row.search( const { rows } = await config.api.row.search(agencyProjects._id!, {
agencyProjects._id!,
{
tableId: agencyProjects._id!, tableId: agencyProjects._id!,
query: {}, query: {},
} })
)
expect(rows).toHaveLength(3) expect(rows).toHaveLength(3)
}) })
} })
)
}
)
}) })
}) })

View File

@ -40,10 +40,9 @@ import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import merge from "lodash/merge" import merge from "lodash/merge"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { db, roles, features, context } from "@budibase/backend-core" import { db, roles, context } from "@budibase/backend-core"
describe.each([ describe.each([
["lucene", undefined],
["sqs", undefined], ["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -52,14 +51,11 @@ describe.each([
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)], [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/v2/views (%s)", (name, dsProvider) => { ])("/v2/views (%s)", (name, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
const isSqs = name === "sqs" const isInternal = name === "sqs"
const isLucene = name === "lucene"
const isInternal = isSqs || isLucene
let table: Table let table: Table
let rawDatasource: Datasource | undefined let rawDatasource: Datasource | undefined
let datasource: Datasource | undefined let datasource: Datasource | undefined
let envCleanup: (() => void) | undefined
function saveTableRequest( function saveTableRequest(
...overrides: Partial<Omit<SaveTableRequest, "name">>[] ...overrides: Partial<Omit<SaveTableRequest, "name">>[]
@ -106,13 +102,7 @@ describe.each([
} }
beforeAll(async () => { beforeAll(async () => {
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () => await config.init()
config.init()
)
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: isSqs,
})
if (dsProvider) { if (dsProvider) {
rawDatasource = await dsProvider rawDatasource = await dsProvider
@ -125,9 +115,6 @@ describe.each([
afterAll(async () => { afterAll(async () => {
setup.afterAll() setup.afterAll()
if (envCleanup) {
envCleanup()
}
}) })
beforeEach(() => { beforeEach(() => {
@ -855,7 +842,6 @@ describe.each([
}) })
}) })
!isLucene &&
it("does not get confused when a calculation field shadows a basic one", async () => { it("does not get confused when a calculation field shadows a basic one", async () => {
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
@ -1453,7 +1439,6 @@ describe.each([
) )
}) })
!isLucene &&
describe("calculation views", () => { describe("calculation views", () => {
let table: Table let table: Table
let view: ViewV2 let view: ViewV2
@ -2293,7 +2278,6 @@ describe.each([
}) })
}) })
!isLucene &&
describe("calculation views", () => { describe("calculation views", () => {
it("should not remove calculation columns when modifying table schema", async () => { it("should not remove calculation columns when modifying table schema", async () => {
let table = await config.api.table.save( let table = await config.api.table.save(
@ -2721,13 +2705,10 @@ describe.each([
}) })
}) })
!isLucene &&
describe("search", () => { describe("search", () => {
it("returns empty rows from view when no schema is passed", async () => { it("returns empty rows from view when no schema is passed", async () => {
const rows = await Promise.all( const rows = await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
config.api.row.save(table._id!, {})
)
) )
const response = await config.api.viewV2.search(view.id) const response = await config.api.viewV2.search(view.id)
expect(response.rows).toHaveLength(10) expect(response.rows).toHaveLength(10)
@ -2864,9 +2845,7 @@ describe.each([
it("respects the limit parameter", async () => { it("respects the limit parameter", async () => {
await Promise.all( await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
config.api.row.save(table._id!, {})
)
) )
const limit = generator.integer({ min: 1, max: 8 }) const limit = generator.integer({ min: 1, max: 8 })
const response = await config.api.viewV2.search(view.id, { const response = await config.api.viewV2.search(view.id, {
@ -2878,9 +2857,7 @@ describe.each([
it("can handle pagination", async () => { it("can handle pagination", async () => {
await Promise.all( await Promise.all(
Array.from({ length: 10 }, () => Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
config.api.row.save(table._id!, {})
)
) )
const rows = (await config.api.viewV2.search(view.id)).rows const rows = (await config.api.viewV2.search(view.id)).rows
@ -2923,9 +2900,6 @@ describe.each([
hasNextPage: false, hasNextPage: false,
totalRows: 10, totalRows: 10,
} }
if (isLucene) {
expectation.bookmark = expect.anything()
}
expect(page3).toEqual(expectation) expect(page3).toEqual(expectation)
}) })
@ -3148,9 +3122,7 @@ describe.each([
}) })
expect(response.rows).toHaveLength(1) expect(response.rows).toHaveLength(1)
expect(response.rows).toEqual( expect(response.rows).toEqual(
expect.arrayContaining([ expect.arrayContaining([expect.objectContaining({ _id: three._id })])
expect.objectContaining({ _id: three._id }),
])
) )
}) })
@ -3211,7 +3183,6 @@ describe.each([
) )
}) })
!isLucene &&
it.each([true, false])( it.each([true, false])(
"can filter a view without a view filter", "can filter a view without a view filter",
async allOr => { async allOr => {
@ -3249,7 +3220,6 @@ describe.each([
} }
) )
!isLucene &&
it.each([true, false])("cannot bypass a view filter", async allOr => { it.each([true, false])("cannot bypass a view filter", async allOr => {
await config.api.row.save(table._id!, { await config.api.row.save(table._id!, {
one: "foo", one: "foo",
@ -3295,17 +3265,6 @@ describe.each([
}) })
describe("foreign relationship columns", () => { describe("foreign relationship columns", () => {
let envCleanup: () => void
beforeAll(() => {
envCleanup = features.testutils.setFeatureFlags("*", {
ENRICHED_RELATIONSHIPS: true,
})
})
afterAll(() => {
envCleanup?.()
})
const createMainTable = async ( const createMainTable = async (
links: { links: {
name: string name: string
@ -3455,7 +3414,6 @@ describe.each([
}) })
}) })
!isLucene &&
describe("calculations", () => { describe("calculations", () => {
let table: Table let table: Table
let rows: Row[] let rows: Row[]
@ -3508,10 +3466,7 @@ describe.each([
expect(response.rows).toEqual( expect(response.rows).toEqual(
expect.arrayContaining([ expect.arrayContaining([
expect.objectContaining({ expect.objectContaining({
"Quantity Sum": rows.reduce( "Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
(acc, r) => acc + r.quantity,
0
),
}), }),
]) ])
) )
@ -3552,9 +3507,7 @@ describe.each([
} }
for (const row of response.rows) { for (const row of response.rows) {
expect(row["Total Price"]).toEqual( expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity])
priceByQuantity[row.quantity]
)
} }
}) })
@ -3582,10 +3535,7 @@ describe.each([
query: {}, query: {},
}) })
function calculate( function calculate(type: CalculationType, numbers: number[]): number {
type: CalculationType,
numbers: number[]
): number {
switch (type) { switch (type) {
case CalculationType.COUNT: case CalculationType.COUNT:
return numbers.length return numbers.length
@ -3744,12 +3694,9 @@ describe.each([
}, },
}) })
const apertureScience = await config.api.row.save( const apertureScience = await config.api.row.save(companies._id!, {
companies._id!,
{
name: "Aperture Science Laboratories", name: "Aperture Science Laboratories",
} })
)
const blackMesa = await config.api.row.save(companies._id!, { const blackMesa = await config.api.row.save(companies._id!, {
name: "Black Mesa", name: "Black Mesa",
@ -4075,7 +4022,6 @@ describe.each([
}) })
}) })
!isLucene &&
it("should not need required fields to be present", async () => { it("should not need required fields to be present", async () => {
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
@ -4464,9 +4410,7 @@ describe.each([
}), }),
expected: () => [ expected: () => [
{ {
users: [ users: [expect.objectContaining({ _id: config.getUser()._id })],
expect.objectContaining({ _id: config.getUser()._id }),
],
}, },
], ],
}, },
@ -4603,9 +4547,7 @@ describe.each([
query: {}, query: {},
...searchOpts, ...searchOpts,
}) })
expect(rows).toEqual( expect(rows).toEqual(expected.map(r => expect.objectContaining(r)))
expected.map(r => expect.objectContaining(r))
)
} }
) )
}) })

View File

@ -1,10 +1,6 @@
import * as setup from "../../../api/routes/tests/utilities" import * as setup from "../../../api/routes/tests/utilities"
import { basicTable } from "../../../tests/utilities/structures" import { basicTable } from "../../../tests/utilities/structures"
import { import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
db as dbCore,
features,
SQLITE_DESIGN_DOC_ID,
} from "@budibase/backend-core"
import { import {
LinkDocument, LinkDocument,
DocumentType, DocumentType,
@ -70,17 +66,8 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
} }
} }
async function sqsDisabled(cb: () => Promise<void>) {
await features.testutils.withFeatureFlags("*", { SQS: false }, cb)
}
async function sqsEnabled(cb: () => Promise<void>) {
await features.testutils.withFeatureFlags("*", { SQS: true }, cb)
}
describe("SQS migration", () => { describe("SQS migration", () => {
beforeAll(async () => { beforeAll(async () => {
await sqsDisabled(async () => {
await config.init() await config.init()
const table = await config.api.table.save(basicTable()) const table = await config.api.table.save(basicTable())
tableId = table._id! tableId = table._id!
@ -88,7 +75,6 @@ describe("SQS migration", () => {
// old link document // old link document
await db.put(oldLinkDocument()) await db.put(oldLinkDocument())
}) })
})
beforeEach(async () => { beforeEach(async () => {
await config.doInTenant(async () => { await config.doInTenant(async () => {
@ -101,19 +87,11 @@ describe("SQS migration", () => {
it("test migration runs as expected against an older DB", async () => { it("test migration runs as expected against an older DB", async () => {
const db = dbCore.getDB(config.appId!) const db = dbCore.getDB(config.appId!)
// confirm nothing exists initially
await sqsDisabled(async () => {
let error: any | undefined
try {
await db.get(SQLITE_DESIGN_DOC_ID)
} catch (err: any) {
error = err
}
expect(error).toBeDefined()
expect(error.status).toBe(404)
})
await sqsEnabled(async () => { // remove sqlite design doc to simulate it comes from an older installation
const doc = await db.get(SQLITE_DESIGN_DOC_ID)
await db.remove({ _id: doc._id, _rev: doc._rev })
await processMigrations(config.appId!, MIGRATIONS) await processMigrations(config.appId!, MIGRATIONS)
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID) const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
expect(designDoc.sql.tables).toBeDefined() expect(designDoc.sql.tables).toBeDefined()
@ -130,9 +108,7 @@ describe("SQS migration", () => {
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo() const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID()) const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
expect(linkDoc.tableId).toEqual( expect(linkDoc.tableId).toEqual(generateJunctionTableID(tableId1, tableId2))
generateJunctionTableID(tableId1, tableId2)
)
// should have swapped the documents // should have swapped the documents
expect(linkDoc.doc1.tableId).toEqual(tableId2) expect(linkDoc.doc1.tableId).toEqual(tableId2)
expect(linkDoc.doc1.rowId).toEqual(rowId2) expect(linkDoc.doc1.rowId).toEqual(rowId2)
@ -140,4 +116,3 @@ describe("SQS migration", () => {
expect(linkDoc.doc2.rowId).toEqual(rowId1) expect(linkDoc.doc2.rowId).toEqual(rowId1)
}) })
}) })
})

View File

@ -14,11 +14,10 @@ import {
coreOutputProcessing, coreOutputProcessing,
processFormulas, processFormulas,
} from "../../utilities/rowProcessor" } from "../../utilities/rowProcessor"
import { context, features } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { import {
ContextUser, ContextUser,
EventType, EventType,
FeatureFlag,
FieldType, FieldType,
LinkDocumentValue, LinkDocumentValue,
Row, Row,
@ -251,20 +250,14 @@ export async function squashLinks<T = Row[] | Row>(
source: Table | ViewV2, source: Table | ViewV2,
enriched: T enriched: T
): Promise<T> { ): Promise<T> {
const allowRelationshipSchemas = await features.flags.isEnabled(
FeatureFlag.ENRICHED_RELATIONSHIPS
)
let viewSchema: ViewV2Schema = {} let viewSchema: ViewV2Schema = {}
if (sdk.views.isView(source)) { if (sdk.views.isView(source)) {
if (helpers.views.isCalculationView(source)) { if (helpers.views.isCalculationView(source)) {
return enriched return enriched
} }
if (allowRelationshipSchemas) {
viewSchema = source.schema || {} viewSchema = source.schema || {}
} }
}
let table: Table let table: Table
if (sdk.views.isView(source)) { if (sdk.views.isView(source)) {

View File

@ -6,12 +6,12 @@ import * as mysql from "./mysql"
import * as mssql from "./mssql" import * as mssql from "./mssql"
import * as mariadb from "./mariadb" import * as mariadb from "./mariadb"
import * as oracle from "./oracle" import * as oracle from "./oracle"
import { GenericContainer, StartedTestContainer } from "testcontainers"
import { testContainerUtils } from "@budibase/backend-core/tests" import { testContainerUtils } from "@budibase/backend-core/tests"
import cloneDeep from "lodash/cloneDeep"
export type DatasourceProvider = () => Promise<Datasource> export type DatasourceProvider = () => Promise<Datasource>
export const { startContainer } = testContainerUtils
export enum DatabaseName { export enum DatabaseName {
POSTGRES = "postgres", POSTGRES = "postgres",
MONGODB = "mongodb", MONGODB = "mongodb",
@ -71,58 +71,3 @@ export async function knexClient(ds: Datasource) {
} }
} }
} }
export async function startContainer(container: GenericContainer) {
const imageName = (container as any).imageName.string as string
let key: string = imageName
if (imageName.includes("@sha256")) {
key = imageName.split("@")[0]
}
key = key.replaceAll("/", "-").replaceAll(":", "-")
container = container
.withReuse()
.withLabels({ "com.budibase": "true" })
.withName(`${key}_testcontainer`)
let startedContainer: StartedTestContainer | undefined = undefined
let lastError = undefined
for (let i = 0; i < 10; i++) {
try {
// container.start() is not an idempotent operation, calling `start`
// modifies the internal state of a GenericContainer instance such that
// the hash it uses to determine reuse changes. We need to clone the
// container before calling start to ensure that we're using the same
// reuse hash every time.
const containerCopy = cloneDeep(container)
startedContainer = await containerCopy.start()
lastError = undefined
break
} catch (e: any) {
lastError = e
await new Promise(resolve => setTimeout(resolve, 1000))
}
}
if (!startedContainer) {
if (lastError) {
throw lastError
}
throw new Error(`failed to start container: ${imageName}`)
}
const info = testContainerUtils.getContainerById(startedContainer.getId())
if (!info) {
throw new Error("Container not found")
}
// Some Docker runtimes, when you expose a port, will bind it to both
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
// addresses are not shared, and testcontainers will sometimes give you back
// the ipv6 port. There's no way to know that this has happened, and if you
// try to then connect to `localhost:port` you may attempt to bind to the v4
// address which could be unbound or even an entirely different container. For
// that reason, we don't use testcontainers' `getExposedPort` function,
// preferring instead our own method that guaranteed v4 ports.
return testContainerUtils.getExposedV4Ports(info)
}

View File

@ -6,7 +6,13 @@ import * as api from "./api"
import * as automations from "./automations" import * as automations from "./automations"
import { Thread } from "./threads" import { Thread } from "./threads"
import * as redis from "./utilities/redis" import * as redis from "./utilities/redis"
import { events, logging, middleware, timers } from "@budibase/backend-core" import {
events,
logging,
middleware,
timers,
env as coreEnv,
} from "@budibase/backend-core"
import destroyable from "server-destroy" import destroyable from "server-destroy"
import { userAgent } from "koa-useragent" import { userAgent } from "koa-useragent"
@ -37,6 +43,9 @@ export default function createKoaApp() {
app.use(middleware.correlation) app.use(middleware.correlation)
app.use(middleware.pino) app.use(middleware.pino)
app.use(middleware.ip) app.use(middleware.ip)
if (!coreEnv.DISABLE_CONTENT_SECURITY_POLICY) {
app.use(middleware.csp)
}
app.use(userAgent) app.use(userAgent)
const server = http.createServer(app.callback()) const server = http.createServer(app.callback())

View File

@ -123,6 +123,7 @@ export async function updateWithExport(
// don't need obj store, the existing app already has everything we need // don't need obj store, the existing app already has everything we need
await backups.importApp(devId, tempDb, template, { await backups.importApp(devId, tempDb, template, {
importObjStoreContents: false, importObjStoreContents: false,
updateAttachmentColumns: true,
}) })
const newMetadata = await getNewAppMetadata(tempDb, appDb) const newMetadata = await getNewAppMetadata(tempDb, appDb)
// get the documents to copy // get the documents to copy

View File

@ -170,7 +170,10 @@ export async function importApp(
appId: string, appId: string,
db: Database, db: Database,
template: TemplateType, template: TemplateType,
opts: { importObjStoreContents: boolean } = { importObjStoreContents: true } opts: {
importObjStoreContents: boolean
updateAttachmentColumns: boolean
} = { importObjStoreContents: true, updateAttachmentColumns: true }
) { ) {
let prodAppId = dbCore.getProdAppID(appId) let prodAppId = dbCore.getProdAppID(appId)
let dbStream: any let dbStream: any
@ -219,7 +222,9 @@ export async function importApp(
if (!ok) { if (!ok) {
throw "Error loading database dump from template." throw "Error loading database dump from template."
} }
if (opts.updateAttachmentColumns) {
await updateAttachmentColumns(prodAppId, db) await updateAttachmentColumns(prodAppId, db)
}
await updateAutomations(prodAppId, db) await updateAutomations(prodAppId, db)
// clear up afterward // clear up afterward
if (tmpPath) { if (tmpPath) {

View File

@ -1,11 +1,8 @@
import { import {
EmptyFilterOption, EmptyFilterOption,
FeatureFlag,
LegacyFilter, LegacyFilter,
LogicalOperator,
Row, Row,
RowSearchParams, RowSearchParams,
SearchFilterKey,
SearchFilters, SearchFilters,
SearchResponse, SearchResponse,
SortOrder, SortOrder,
@ -19,7 +16,6 @@ import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core" import { dataFilters } from "@budibase/shared-core"
import sdk from "../../index" import sdk from "../../index"
import { checkFilters, searchInputMapping } from "./search/utils" import { checkFilters, searchInputMapping } from "./search/utils"
import { db, features } from "@budibase/backend-core"
import tracer from "dd-trace" import tracer from "dd-trace"
import { getQueryableFields, removeInvalidFilters } from "./queryUtils" import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
import { enrichSearchContext } from "../../../api/controllers/row/utils" import { enrichSearchContext } from "../../../api/controllers/row/utils"
@ -104,35 +100,6 @@ export async function search(
} }
viewQuery = checkFilters(table, viewQuery) viewQuery = checkFilters(table, viewQuery)
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
const supportsLogicalOperators =
isExternalTableID(view.tableId) || sqsEnabled
if (!supportsLogicalOperators) {
// In the unlikely event that a Grouped Filter is in a non-SQS environment
// It needs to be ignored entirely
let queryFilters: LegacyFilter[] = Array.isArray(view.query)
? view.query
: []
const { filters } = dataFilters.splitFiltersArray(queryFilters)
// Extract existing fields
const existingFields = filters.map(filter =>
db.removeKeyNumbering(filter.field)
)
// Carry over filters for unused fields
Object.keys(options.query).forEach(key => {
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
Object.keys(options.query[operator] || {}).forEach(field => {
if (!existingFields.includes(db.removeKeyNumbering(field))) {
viewQuery[operator]![field] = options.query[operator]![field]
}
})
})
options.query = viewQuery
} else {
const conditions = viewQuery ? [viewQuery] : [] const conditions = viewQuery ? [viewQuery] : []
options.query = { options.query = {
$and: { $and: {
@ -143,7 +110,6 @@ export async function search(
options.query.onEmptyFilter = viewQuery.onEmptyFilter options.query.onEmptyFilter = viewQuery.onEmptyFilter
} }
} }
}
options.query = dataFilters.cleanupQuery(options.query) options.query = dataFilters.cleanupQuery(options.query)
options.query = dataFilters.fixupFilterArrays(options.query) options.query = dataFilters.fixupFilterArrays(options.query)
@ -170,12 +136,9 @@ export async function search(
if (isExternalTable) { if (isExternalTable) {
span?.addTags({ searchType: "external" }) span?.addTags({ searchType: "external" })
result = await external.search(options, source) result = await external.search(options, source)
} else if (await features.flags.isEnabled(FeatureFlag.SQS)) { } else {
span?.addTags({ searchType: "sqs" }) span?.addTags({ searchType: "sqs" })
result = await internal.sqs.search(options, source) result = await internal.sqs.search(options, source)
} else {
span?.addTags({ searchType: "lucene" })
result = await internal.lucene.search(options, source)
} }
span.addTags({ span.addTags({

View File

@ -1,3 +1,2 @@
export * as sqs from "./sqs" export * as sqs from "./sqs"
export * as lucene from "./lucene"
export * from "./internal" export * from "./internal"

View File

@ -1,79 +0,0 @@
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
import { fullSearch, paginatedSearch } from "../utils"
import { InternalTables } from "../../../../../db/utils"
import {
Row,
RowSearchParams,
SearchResponse,
SortType,
Table,
User,
ViewV2,
} from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick"
import sdk from "../../../../"
export async function search(
options: RowSearchParams,
source: Table | ViewV2
): Promise<SearchResponse<Row>> {
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const { paginate, query } = options
const params: RowSearchParams = {
tableId: options.tableId,
viewId: options.viewId,
sort: options.sort,
sortOrder: options.sortOrder,
sortType: options.sortType,
limit: options.limit,
bookmark: options.bookmark,
version: options.version,
disableEscaping: options.disableEscaping,
query: {},
}
if (params.sort && !params.sortType) {
const schema = table.schema
const sortField = schema[params.sort]
params.sortType =
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
}
let response
if (paginate) {
response = await paginatedSearch(query, params)
} else {
response = await fullSearch(query, params)
}
// Enrich search results with relationships
if (response.rows && response.rows.length) {
// enrich with global users if from users table
if (table._id === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
}
const visibleFields =
options.fields ||
Object.keys(source.schema || {}).filter(
key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
response.rows = await outputProcessing(source, response.rows, {
squash: true,
})
}
return response
}

View File

@ -10,7 +10,7 @@ import {
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { search } from "../../../../../sdk/app/rows/search" import { search } from "../../../../../sdk/app/rows/search"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { features } from "@budibase/backend-core"
import { import {
DatabaseName, DatabaseName,
getDatasource, getDatasource,
@ -21,30 +21,20 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures"
// (e.g. limiting searches to returning specific fields). If it's possible to // (e.g. limiting searches to returning specific fields). If it's possible to
// test through the API, it should be done there instead. // test through the API, it should be done there instead.
describe.each([ describe.each([
["lucene", undefined], ["internal", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("search sdk (%s)", (name, dsProvider) => { ])("search sdk (%s)", (name, dsProvider) => {
const isSqs = name === "sqs" const isInternal = name === "internal"
const isLucene = name === "lucene"
const isInternal = isLucene || isSqs
const config = new TestConfiguration() const config = new TestConfiguration()
let envCleanup: (() => void) | undefined
let datasource: Datasource | undefined let datasource: Datasource | undefined
let table: Table let table: Table
beforeAll(async () => { beforeAll(async () => {
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () => await config.init()
config.init()
)
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: isSqs,
})
if (dsProvider) { if (dsProvider) {
datasource = await config.createDatasource({ datasource = await config.createDatasource({
@ -105,9 +95,6 @@ describe.each([
afterAll(async () => { afterAll(async () => {
config.end() config.end()
if (envCleanup) {
envCleanup()
}
}) })
it("querying by fields will always return data attribute columns", async () => { it("querying by fields will always return data attribute columns", async () => {
@ -211,7 +198,6 @@ describe.each([
}) })
}) })
!isLucene &&
it.each([ it.each([
[["id", "name", "age"], 3], [["id", "name", "age"], 3],
[["name", "age"], 10], [["name", "age"], 10],

View File

@ -1,4 +1,4 @@
import { context, features } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { getTableParams } from "../../../db/utils" import { getTableParams } from "../../../db/utils"
import { import {
breakExternalTableId, breakExternalTableId,
@ -12,7 +12,6 @@ import {
TableResponse, TableResponse,
TableSourceType, TableSourceType,
TableViewsResponse, TableViewsResponse,
FeatureFlag,
} from "@budibase/types" } from "@budibase/types"
import datasources from "../datasources" import datasources from "../datasources"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -49,10 +48,7 @@ export async function processTable(table: Table): Promise<Table> {
type: "table", type: "table",
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID, sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL, sourceType: TableSourceType.INTERNAL,
} sql: true,
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
if (sqsEnabled) {
processed.sql = true
} }
return processed return processed
} }

View File

@ -237,6 +237,7 @@ export default class TestConfiguration {
if (!this) { if (!this) {
return return
} }
if (this.server) { if (this.server) {
this.server.close() this.server.close()
} else { } else {

View File

@ -3,7 +3,6 @@ import { fixAutoColumnSubType, processFormulas } from "./utils"
import { import {
cache, cache,
context, context,
features,
HTTPError, HTTPError,
objectStore, objectStore,
utils, utils,
@ -19,7 +18,6 @@ import {
Table, Table,
User, User,
ViewV2, ViewV2,
FeatureFlag,
} from "@budibase/types" } from "@budibase/types"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { import {
@ -423,7 +421,6 @@ export async function coreOutputProcessing(
// remove null properties to match internal API // remove null properties to match internal API
const isExternal = isExternalTableID(table._id!) const isExternal = isExternalTableID(table._id!)
if (isExternal || (await features.flags.isEnabled(FeatureFlag.SQS))) {
for (const row of rows) { for (const row of rows) {
for (const key of Object.keys(row)) { for (const key of Object.keys(row)) {
if (row[key] === null) { if (row[key] === null) {
@ -465,7 +462,6 @@ export async function coreOutputProcessing(
} }
} }
} }
}
if (!isUserMetadataTable(table._id!)) { if (!isUserMetadataTable(table._id!)) {
const protectedColumns = isExternal const protectedColumns = isExternal

View File

@ -8,7 +8,7 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { outputProcessing } from ".." import { outputProcessing } from ".."
import { generator, structures } from "@budibase/backend-core/tests" import { generator, structures } from "@budibase/backend-core/tests"
import { features } from "@budibase/backend-core"
import * as bbReferenceProcessor from "../bbReferenceProcessor" import * as bbReferenceProcessor from "../bbReferenceProcessor"
import TestConfiguration from "../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../tests/utilities/TestConfiguration"
@ -21,7 +21,6 @@ jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
describe("rowProcessor - outputProcessing", () => { describe("rowProcessor - outputProcessing", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
let cleanupFlags: () => void = () => {}
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
@ -33,11 +32,6 @@ describe("rowProcessor - outputProcessing", () => {
beforeEach(() => { beforeEach(() => {
jest.resetAllMocks() jest.resetAllMocks()
cleanupFlags = features.testutils.setFeatureFlags("*", { SQS: true })
})
afterEach(() => {
cleanupFlags()
}) })
const processOutputBBReferenceMock = const processOutputBBReferenceMock =

View File

@ -527,7 +527,12 @@ export function search<T extends Record<string, any>>(
): SearchResponse<T> { ): SearchResponse<T> {
let result = runQuery(docs, query.query) let result = runQuery(docs, query.query)
if (query.sort) { if (query.sort) {
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING) result = sort(
result,
query.sort,
query.sortOrder || SortOrder.ASCENDING,
query.sortType
)
} }
const totalRows = result.length const totalRows = result.length
if (query.limit) { if (query.limit) {

View File

@ -48,7 +48,7 @@ export function validate(
cronExpression: string cronExpression: string
): { valid: false; err: string[] } | { valid: true } { ): { valid: false; err: string[] } | { valid: true } {
const result = cronValidate(cronExpression, { const result = cronValidate(cronExpression, {
preset: "npm-node-cron", preset: "npm-cron-schedule",
override: { override: {
useSeconds: false, useSeconds: false,
}, },

View File

@ -12,7 +12,6 @@ import type PouchDB from "pouchdb-find"
export enum SearchIndex { export enum SearchIndex {
ROWS = "rows", ROWS = "rows",
AUDIT = "audit",
USER = "user", USER = "user",
} }

View File

@ -2,10 +2,9 @@ export enum FeatureFlag {
PER_CREATOR_PER_USER_PRICE = "PER_CREATOR_PER_USER_PRICE", PER_CREATOR_PER_USER_PRICE = "PER_CREATOR_PER_USER_PRICE",
PER_CREATOR_PER_USER_PRICE_ALERT = "PER_CREATOR_PER_USER_PRICE_ALERT", PER_CREATOR_PER_USER_PRICE_ALERT = "PER_CREATOR_PER_USER_PRICE_ALERT",
AUTOMATION_BRANCHING = "AUTOMATION_BRANCHING", AUTOMATION_BRANCHING = "AUTOMATION_BRANCHING",
SQS = "SQS",
AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS", AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS",
DEFAULT_VALUES = "DEFAULT_VALUES", DEFAULT_VALUES = "DEFAULT_VALUES",
ENRICHED_RELATIONSHIPS = "ENRICHED_RELATIONSHIPS",
BUDIBASE_AI = "BUDIBASE_AI", BUDIBASE_AI = "BUDIBASE_AI",
} }

View File

@ -48,6 +48,7 @@ export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
request: BBRequest<RequestBody> request: BBRequest<RequestBody>
body: ResponseBody body: ResponseBody
userAgent: UserAgentContext["userAgent"] userAgent: UserAgentContext["userAgent"]
state: { nonce?: string }
} }
/** /**
@ -56,6 +57,7 @@ export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
export interface UserCtx<RequestBody = any, ResponseBody = any> export interface UserCtx<RequestBody = any, ResponseBody = any>
extends Ctx<RequestBody, ResponseBody> { extends Ctx<RequestBody, ResponseBody> {
user: ContextUser user: ContextUser
state: { nonce?: string }
roleId?: string roleId?: string
eventEmitter?: ContextEmitter eventEmitter?: ContextEmitter
loginMethod?: LoginMethod loginMethod?: LoginMethod

View File

@ -1,6 +1,6 @@
import { Ctx, MaintenanceType, FeatureFlag } from "@budibase/types" import { Ctx, MaintenanceType } from "@budibase/types"
import env from "../../../environment" import env from "../../../environment"
import { env as coreEnv, db as dbCore, features } from "@budibase/backend-core" import { env as coreEnv, db as dbCore } from "@budibase/backend-core"
import nodeFetch from "node-fetch" import nodeFetch from "node-fetch"
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
@ -35,10 +35,7 @@ async function isSqsAvailable() {
} }
async function isSqsMissing() { async function isSqsMissing() {
return ( return !(await isSqsAvailable())
(await features.flags.isEnabled(FeatureFlag.SQS)) &&
!(await isSqsAvailable())
)
} }
export const fetch = async (ctx: Ctx) => { export const fetch = async (ctx: Ctx) => {

View File

@ -1,5 +1,5 @@
import { mocks, structures } from "@budibase/backend-core/tests" import { mocks, structures } from "@budibase/backend-core/tests"
import { context, events, features } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import { Event, IdentityType } from "@budibase/types" import { Event, IdentityType } from "@budibase/types"
import { TestConfiguration } from "../../../../tests" import { TestConfiguration } from "../../../../tests"
@ -12,19 +12,14 @@ const BASE_IDENTITY = {
const USER_AUDIT_LOG_COUNT = 3 const USER_AUDIT_LOG_COUNT = 3
const APP_ID = "app_1" const APP_ID = "app_1"
describe.each(["lucene", "sql"])("/api/global/auditlogs (%s)", method => { describe("/api/global/auditlogs (%s)", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
let envCleanup: (() => void) | undefined
beforeAll(async () => { beforeAll(async () => {
envCleanup = features.testutils.setFeatureFlags("*", {
SQS: method === "sql",
})
await config.beforeAll() await config.beforeAll()
}) })
afterAll(async () => { afterAll(async () => {
envCleanup?.()
await config.afterAll() await config.afterAll()
}) })

View File

@ -56,6 +56,9 @@ app.use(koaSession(app))
app.use(middleware.correlation) app.use(middleware.correlation)
app.use(middleware.pino) app.use(middleware.pino)
app.use(middleware.ip) app.use(middleware.ip)
if (!coreEnv.DISABLE_CONTENT_SECURITY_POLICY) {
app.use(middleware.csp)
}
app.use(userAgent) app.use(userAgent)
// authentication // authentication

View File

@ -12,7 +12,7 @@ dbConfig.init()
import env from "../environment" import env from "../environment"
import * as controllers from "./controllers" import * as controllers from "./controllers"
const supertest = require("supertest") import supertest from "supertest"
import { Config } from "../constants" import { Config } from "../constants"
import { import {