diff --git a/hosting/nginx.dev.conf b/hosting/nginx.dev.conf index f0a58a9a98..e3d6d47287 100644 --- a/hosting/nginx.dev.conf +++ b/hosting/nginx.dev.conf @@ -45,6 +45,20 @@ http { client_max_body_size 50000m; ignore_invalid_headers off; proxy_buffering off; + set $csp_default "default-src 'self'"; + set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com"; + set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com"; + set $csp_object "object-src 'none'"; + set $csp_base_uri "base-uri 'self'"; + set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com https://us.i.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com"; + set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com"; + set $csp_frame "frame-src 'self' https:"; + set $csp_img "img-src http: https: data: blob:"; + set $csp_manifest "manifest-src 'self'"; + set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live"; + set $csp_worker "worker-src blob:"; + + add_header Content-Security-Policy "${csp_default}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always; error_page 502 503 504 /error.html; location = /error.html { diff --git a/hosting/proxy/nginx.prod.conf b/hosting/proxy/nginx.prod.conf index f18bae09a4..c5d378afd8 100644 --- a/hosting/proxy/nginx.prod.conf +++ b/hosting/proxy/nginx.prod.conf @@ -50,19 +50,6 @@ http { ignore_invalid_headers off; proxy_buffering off; - set $csp_default "default-src 'self'"; - set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com"; - set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com"; - set $csp_object "object-src 'none'"; - set $csp_base_uri "base-uri 'self'"; - set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com https://us.i.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com"; - set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com"; - set $csp_frame "frame-src 'self' https:"; - set $csp_img "img-src http: https: data: blob:"; - set $csp_manifest "manifest-src 'self'"; - set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live"; - set $csp_worker "worker-src blob:"; - error_page 502 503 504 /error.html; location = /error.html { root /usr/share/nginx/html; @@ -73,7 +60,6 @@ http { add_header X-Frame-Options SAMEORIGIN always; add_header X-Content-Type-Options nosniff always; add_header X-XSS-Protection "1; mode=block" always; - add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always; add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always; # upstreams @@ -120,6 +106,12 @@ http { location ~ ^/api/(system|admin|global)/ { proxy_set_header Host $host; + + # Enable buffering for potentially large OIDC configs + proxy_buffering on; + proxy_buffer_size 16k; + proxy_buffers 4 32k; + proxy_pass $worker; } diff --git a/lerna.json b/lerna.json index 19b603b1cd..582f95b303 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "3.2.0", + "version": "3.2.3", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 47b3f0672f..b72651e21f 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,7 +1,12 @@ import tk from "timekeeper" import _ from "lodash" -import { DBTestConfiguration, generator, structures } from "../../../tests" +import { + DBTestConfiguration, + generator, + structures, + utils, +} from "../../../tests" import { getDB } from "../../db" import { @@ -10,15 +15,14 @@ import { init, } from "../docWritethrough" -import InMemoryQueue from "../../queue/inMemoryQueue" - const initialTime = Date.now() async function waitForQueueCompletion() { - const queue: InMemoryQueue = DocWritethroughProcessor.queue as never - await queue.waitForCompletion() + await utils.queue.processMessages(DocWritethroughProcessor.queue) } +beforeAll(() => utils.queue.useRealQueues()) + describe("docWritethrough", () => { beforeAll(() => { init() @@ -67,7 +71,7 @@ describe("docWritethrough", () => { const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - expect(await db.get(documentId)).toEqual({ + expect(await db.tryGet(documentId)).toEqual({ _id: documentId, ...patch1, ...patch2, @@ -92,7 +96,7 @@ describe("docWritethrough", () => { await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ _id: documentId, ...patch1, @@ -117,7 +121,7 @@ describe("docWritethrough", () => { await waitForQueueCompletion() expect(date1).not.toEqual(date2) - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ createdAt: date1.toISOString(), updatedAt: date2.toISOString(), @@ -135,7 +139,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch2) const keyToOverride = _.sample(Object.keys(patch1))! - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ [keyToOverride]: patch1[keyToOverride], }) @@ -150,7 +154,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch3) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ ...patch1, ...patch2, @@ -180,14 +184,14 @@ describe("docWritethrough", () => { await secondDocWritethrough.patch(doc2Patch2) await waitForQueueCompletion() - expect(await db.get(docWritethrough.docId)).toEqual( + expect(await db.tryGet(docWritethrough.docId)).toEqual( expect.objectContaining({ ...doc1Patch, ...doc1Patch2, }) ) - expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect(await db.tryGet(secondDocWritethrough.docId)).toEqual( expect.objectContaining({ ...doc2Patch, ...doc2Patch2, @@ -203,7 +207,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(initialPatch) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(initialPatch) ) @@ -214,10 +218,10 @@ describe("docWritethrough", () => { await docWritethrough.patch(extraPatch) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(extraPatch) ) - expect(await db.get(documentId)).not.toEqual( + expect(await db.tryGet(documentId)).not.toEqual( expect.objectContaining(initialPatch) ) }) @@ -242,7 +246,7 @@ describe("docWritethrough", () => { expect(queueMessageSpy).toHaveBeenCalledTimes(5) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(patches) ) @@ -250,7 +254,7 @@ describe("docWritethrough", () => { expect(queueMessageSpy).toHaveBeenCalledTimes(45) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(patches) ) @@ -258,20 +262,18 @@ describe("docWritethrough", () => { expect(queueMessageSpy).toHaveBeenCalledTimes(55) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(patches) ) }) }) - // This is not yet supported - // eslint-disable-next-line jest/no-disabled-tests - it.skip("patches will execute in order", async () => { + it("patches will execute in order", async () => { let incrementalValue = 0 const keyToOverride = generator.word() async function incrementalPatches(count: number) { for (let i = 0; i < count; i++) { - await docWritethrough.patch({ [keyToOverride]: incrementalValue++ }) + await docWritethrough.patch({ [keyToOverride]: ++incrementalValue }) } } @@ -279,13 +281,13 @@ describe("docWritethrough", () => { await incrementalPatches(5) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ [keyToOverride]: 5 }) ) await incrementalPatches(40) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ [keyToOverride]: 45 }) ) }) diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 4cb0a9c731..b2f95210d3 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -225,6 +225,10 @@ const environment = { OPENAI_API_KEY: process.env.OPENAI_API_KEY, MIN_VERSION_WITHOUT_POWER_ROLE: process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0", + DISABLE_CONTENT_SECURITY_POLICY: process.env.DISABLE_CONTENT_SECURITY_POLICY, + // stopgap migration strategy until we can ensure backwards compat without unsafe-inline in CSP + DISABLE_CSP_UNSAFE_INLINE_SCRIPTS: + process.env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS, } export function setEnv(newEnvVars: Partial): () => void { diff --git a/packages/backend-core/src/middleware/contentSecurityPolicy.ts b/packages/backend-core/src/middleware/contentSecurityPolicy.ts new file mode 100644 index 0000000000..e0dfbe6f64 --- /dev/null +++ b/packages/backend-core/src/middleware/contentSecurityPolicy.ts @@ -0,0 +1,118 @@ +import crypto from "crypto" +import env from "../environment" + +const CSP_DIRECTIVES = { + "default-src": ["'self'"], + "script-src": [ + "'self'", + "'unsafe-eval'", + "https://*.budibase.net", + "https://cdn.budi.live", + "https://js.intercomcdn.com", + "https://widget.intercom.io", + "https://d2l5prqdbvm3op.cloudfront.net", + "https://us-assets.i.posthog.com", + ], + "style-src": [ + "'self'", + "'unsafe-inline'", + "https://cdn.jsdelivr.net", + "https://fonts.googleapis.com", + "https://rsms.me", + "https://maxcdn.bootstrapcdn.com", + ], + "object-src": ["'none'"], + "base-uri": ["'self'"], + "connect-src": [ + "'self'", + "https://*.budibase.app", + "https://*.budibaseqa.app", + "https://*.budibase.net", + "https://api-iam.intercom.io", + "https://api-ping.intercom.io", + "https://app.posthog.com", + "https://us.i.posthog.com", + "wss://nexus-websocket-a.intercom.io", + "wss://nexus-websocket-b.intercom.io", + "https://nexus-websocket-a.intercom.io", + "https://nexus-websocket-b.intercom.io", + "https://uploads.intercomcdn.com", + "https://uploads.intercomusercontent.com", + "https://*.amazonaws.com", + "https://*.s3.amazonaws.com", + "https://*.s3.us-east-2.amazonaws.com", + "https://*.s3.us-east-1.amazonaws.com", + "https://*.s3.us-west-1.amazonaws.com", + "https://*.s3.us-west-2.amazonaws.com", + "https://*.s3.af-south-1.amazonaws.com", + "https://*.s3.ap-east-1.amazonaws.com", + "https://*.s3.ap-south-1.amazonaws.com", + "https://*.s3.ap-northeast-2.amazonaws.com", + "https://*.s3.ap-southeast-1.amazonaws.com", + "https://*.s3.ap-southeast-2.amazonaws.com", + "https://*.s3.ap-northeast-1.amazonaws.com", + "https://*.s3.ca-central-1.amazonaws.com", + "https://*.s3.cn-north-1.amazonaws.com", + "https://*.s3.cn-northwest-1.amazonaws.com", + "https://*.s3.eu-central-1.amazonaws.com", + "https://*.s3.eu-west-1.amazonaws.com", + "https://*.s3.eu-west-2.amazonaws.com", + "https://*.s3.eu-south-1.amazonaws.com", + "https://*.s3.eu-west-3.amazonaws.com", + "https://*.s3.eu-north-1.amazonaws.com", + "https://*.s3.sa-east-1.amazonaws.com", + "https://*.s3.me-south-1.amazonaws.com", + "https://*.s3.us-gov-east-1.amazonaws.com", + "https://*.s3.us-gov-west-1.amazonaws.com", + "https://api.github.com", + ], + "font-src": [ + "'self'", + "data:", + "https://cdn.jsdelivr.net", + "https://fonts.gstatic.com", + "https://rsms.me", + "https://maxcdn.bootstrapcdn.com", + "https://js.intercomcdn.com", + "https://fonts.intercomcdn.com", + ], + "frame-src": ["'self'", "https:"], + "img-src": ["http:", "https:", "data:", "blob:"], + "manifest-src": ["'self'"], + "media-src": [ + "'self'", + "https://js.intercomcdn.com", + "https://cdn.budi.live", + ], + "worker-src": ["blob:"], +} + +export async function contentSecurityPolicy(ctx: any, next: any) { + try { + const nonce = crypto.randomBytes(16).toString("base64") + + const directives = { ...CSP_DIRECTIVES } + directives["script-src"] = [ + ...CSP_DIRECTIVES["script-src"], + `'nonce-${nonce}'`, + ] + + if (!env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS) { + directives["script-src"].push("'unsafe-inline'") + } + + ctx.state.nonce = nonce + + const cspHeader = Object.entries(directives) + .map(([key, sources]) => `${key} ${sources.join(" ")}`) + .join("; ") + ctx.set("Content-Security-Policy", cspHeader) + await next() + } catch (err: any) { + console.error( + `Error occurred in Content-Security-Policy middleware: ${err}` + ) + } +} + +export default contentSecurityPolicy diff --git a/packages/backend-core/src/middleware/index.ts b/packages/backend-core/src/middleware/index.ts index 20c2125b13..9ee51db45b 100644 --- a/packages/backend-core/src/middleware/index.ts +++ b/packages/backend-core/src/middleware/index.ts @@ -19,5 +19,6 @@ export { default as pino } from "../logging/pino/middleware" export { default as correlation } from "../logging/correlation/middleware" export { default as errorHandling } from "./errorHandling" export { default as querystringToBody } from "./querystringToBody" +export { default as csp } from "./contentSecurityPolicy" export * as joiValidator from "./joi-validator" export { default as ip } from "./ip" diff --git a/packages/backend-core/src/middleware/tests/contentSecurityPolicy.spec.ts b/packages/backend-core/src/middleware/tests/contentSecurityPolicy.spec.ts new file mode 100644 index 0000000000..0c5838e7fe --- /dev/null +++ b/packages/backend-core/src/middleware/tests/contentSecurityPolicy.spec.ts @@ -0,0 +1,75 @@ +import crypto from "crypto" +import contentSecurityPolicy from "../contentSecurityPolicy" + +jest.mock("crypto", () => ({ + randomBytes: jest.fn(), + randomUUID: jest.fn(), +})) + +describe("contentSecurityPolicy middleware", () => { + let ctx: any + let next: any + const mockNonce = "mocked/nonce" + + beforeEach(() => { + ctx = { + state: {}, + set: jest.fn(), + } + next = jest.fn() + // @ts-ignore + crypto.randomBytes.mockReturnValue(Buffer.from(mockNonce, "base64")) + }) + + afterEach(() => { + jest.clearAllMocks() + }) + + it("should generate a nonce and set it in the script-src directive", async () => { + await contentSecurityPolicy(ctx, next) + + expect(ctx.state.nonce).toBe(mockNonce) + expect(ctx.set).toHaveBeenCalledWith( + "Content-Security-Policy", + expect.stringContaining( + `script-src 'self' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com 'nonce-${mockNonce}'` + ) + ) + expect(next).toHaveBeenCalled() + }) + + it("should include all CSP directives in the header", async () => { + await contentSecurityPolicy(ctx, next) + + const cspHeader = ctx.set.mock.calls[0][1] + expect(cspHeader).toContain("default-src 'self'") + expect(cspHeader).toContain("script-src 'self' 'unsafe-eval'") + expect(cspHeader).toContain("style-src 'self' 'unsafe-inline'") + expect(cspHeader).toContain("object-src 'none'") + expect(cspHeader).toContain("base-uri 'self'") + expect(cspHeader).toContain("connect-src 'self'") + expect(cspHeader).toContain("font-src 'self'") + expect(cspHeader).toContain("frame-src 'self'") + expect(cspHeader).toContain("img-src http: https: data: blob:") + expect(cspHeader).toContain("manifest-src 'self'") + expect(cspHeader).toContain("media-src 'self'") + expect(cspHeader).toContain("worker-src blob:") + }) + + it("should handle errors and log an error message", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation() + const error = new Error("Test error") + // @ts-ignore + crypto.randomBytes.mockImplementation(() => { + throw error + }) + + await contentSecurityPolicy(ctx, next) + + expect(consoleSpy).toHaveBeenCalledWith( + `Error occurred in Content-Security-Policy middleware: ${error}` + ) + expect(next).not.toHaveBeenCalled() + consoleSpy.mockRestore() + }) +}) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 1d8544828d..dd8d3daa37 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,5 @@ import events from "events" -import { newid, timeout } from "../utils" +import { newid } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" interface JobMessage { @@ -184,16 +184,6 @@ class InMemoryQueue implements Partial { // do nothing return this as any } - - async waitForCompletion() { - do { - await timeout(50) - } while (this.hasRunningJobs()) - } - - hasRunningJobs() { - return this._addCount > this._runCount - } } export default InMemoryQueue diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index f633d0885e..f5d710f02d 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -15,7 +15,7 @@ const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs() // cleanup the queue every 60 seconds const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs() -let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = [] +let QUEUES: BullQueue.Queue[] = [] let cleanupInterval: NodeJS.Timeout async function cleanup() { @@ -45,11 +45,18 @@ export function createQueue( if (opts.jobOptions) { queueConfig.defaultJobOptions = opts.jobOptions } - let queue: any + let queue: BullQueue.Queue if (!env.isTest()) { queue = new BullQueue(jobQueue, queueConfig) + } else if ( + process.env.BULL_TEST_REDIS_PORT && + !isNaN(+process.env.BULL_TEST_REDIS_PORT) + ) { + queue = new BullQueue(jobQueue, { + redis: { host: "localhost", port: +process.env.BULL_TEST_REDIS_PORT }, + }) } else { - queue = new InMemoryQueue(jobQueue, queueConfig) + queue = new InMemoryQueue(jobQueue, queueConfig) as any } addListeners(queue, jobQueue, opts?.removeStalledCb) QUEUES.push(queue) diff --git a/packages/backend-core/tests/core/utilities/index.ts b/packages/backend-core/tests/core/utilities/index.ts index 787d69be2c..c3d81784c8 100644 --- a/packages/backend-core/tests/core/utilities/index.ts +++ b/packages/backend-core/tests/core/utilities/index.ts @@ -4,3 +4,4 @@ export { generator } from "./structures" export * as testContainerUtils from "./testContainerUtils" export * as utils from "./utils" export * from "./jestUtils" +export * as queue from "./queue" diff --git a/packages/backend-core/tests/core/utilities/queue.ts b/packages/backend-core/tests/core/utilities/queue.ts new file mode 100644 index 0000000000..49dd33ca29 --- /dev/null +++ b/packages/backend-core/tests/core/utilities/queue.ts @@ -0,0 +1,9 @@ +import { Queue } from "bull" + +export async function processMessages(queue: Queue) { + do { + await queue.whenCurrentJobsFinished() + } while (await queue.count()) + + await queue.whenCurrentJobsFinished() +} diff --git a/packages/backend-core/tests/core/utilities/testContainerUtils.ts b/packages/backend-core/tests/core/utilities/testContainerUtils.ts index 1a25bb28f4..71d7fa32db 100644 --- a/packages/backend-core/tests/core/utilities/testContainerUtils.ts +++ b/packages/backend-core/tests/core/utilities/testContainerUtils.ts @@ -1,4 +1,6 @@ import { execSync } from "child_process" +import { cloneDeep } from "lodash" +import { GenericContainer, StartedTestContainer } from "testcontainers" const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g") @@ -106,3 +108,58 @@ export function setupEnv(...envs: any[]) { } } } + +export async function startContainer(container: GenericContainer) { + const imageName = (container as any).imageName.string as string + let key: string = imageName + if (imageName.includes("@sha256")) { + key = imageName.split("@")[0] + } + key = key.replace(/\//g, "-").replace(/:/g, "-") + + container = container + .withReuse() + .withLabels({ "com.budibase": "true" }) + .withName(`${key}_testcontainer`) + + let startedContainer: StartedTestContainer | undefined = undefined + let lastError = undefined + for (let i = 0; i < 10; i++) { + try { + // container.start() is not an idempotent operation, calling `start` + // modifies the internal state of a GenericContainer instance such that + // the hash it uses to determine reuse changes. We need to clone the + // container before calling start to ensure that we're using the same + // reuse hash every time. + const containerCopy = cloneDeep(container) + startedContainer = await containerCopy.start() + lastError = undefined + break + } catch (e: any) { + lastError = e + await new Promise(resolve => setTimeout(resolve, 1000)) + } + } + + if (!startedContainer) { + if (lastError) { + throw lastError + } + throw new Error(`failed to start container: ${imageName}`) + } + + const info = getContainerById(startedContainer.getId()) + if (!info) { + throw new Error("Container not found") + } + + // Some Docker runtimes, when you expose a port, will bind it to both + // 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6 + // addresses are not shared, and testcontainers will sometimes give you back + // the ipv6 port. There's no way to know that this has happened, and if you + // try to then connect to `localhost:port` you may attempt to bind to the v4 + // address which could be unbound or even an entirely different container. For + // that reason, we don't use testcontainers' `getExposedPort` function, + // preferring instead our own method that guaranteed v4 ports. + return getExposedV4Ports(info) +} diff --git a/packages/backend-core/tests/core/utilities/utils/index.ts b/packages/backend-core/tests/core/utilities/utils/index.ts index 41a249c7e6..3d28189c53 100644 --- a/packages/backend-core/tests/core/utilities/utils/index.ts +++ b/packages/backend-core/tests/core/utilities/utils/index.ts @@ -1 +1,2 @@ export * as time from "./time" +export * as queue from "./queue" diff --git a/packages/backend-core/tests/core/utilities/utils/queue.ts b/packages/backend-core/tests/core/utilities/utils/queue.ts new file mode 100644 index 0000000000..3ad7d6b4b4 --- /dev/null +++ b/packages/backend-core/tests/core/utilities/utils/queue.ts @@ -0,0 +1,27 @@ +import { Queue } from "bull" +import { GenericContainer, Wait } from "testcontainers" +import { startContainer } from "../testContainerUtils" + +export async function useRealQueues() { + const ports = await startContainer( + new GenericContainer("redis") + .withExposedPorts(6379) + .withWaitStrategy( + Wait.forSuccessfulCommand(`redis-cli`).withStartupTimeout(10000) + ) + ) + + const port = ports.find(x => x.container === 6379)?.host + if (!port) { + throw new Error("Redis port not found") + } + process.env.BULL_TEST_REDIS_PORT = port.toString() +} + +export async function processMessages(queue: Queue) { + do { + await queue.whenCurrentJobsFinished() + } while (await queue.count()) + + await queue.whenCurrentJobsFinished() +} diff --git a/packages/bbui/src/Form/Core/Dropzone.svelte b/packages/bbui/src/Form/Core/Dropzone.svelte index 2922d88e7a..26f1dc86c6 100644 --- a/packages/bbui/src/Form/Core/Dropzone.svelte +++ b/packages/bbui/src/Form/Core/Dropzone.svelte @@ -8,6 +8,7 @@ import Link from "../../Link/Link.svelte" import Tag from "../../Tags/Tag.svelte" import Tags from "../../Tags/Tags.svelte" + import ProgressCircle from "../../ProgressCircle/ProgressCircle.svelte" const BYTES_IN_KB = 1000 const BYTES_IN_MB = 1000000 @@ -39,12 +40,14 @@ "jfif", "webp", ] - const fieldId = id || uuid() + let selectedImageIdx = 0 let fileDragged = false let selectedUrl let fileInput + let loading = false + $: selectedImage = value?.[selectedImageIdx] ?? null $: fileCount = value?.length ?? 0 $: isImage = @@ -86,10 +89,15 @@ } if (processFiles) { - const processedFiles = await processFiles(fileList) - const newValue = [...value, ...processedFiles] - dispatch("change", newValue) - selectedImageIdx = newValue.length - 1 + loading = true + try { + const processedFiles = await processFiles(fileList) + const newValue = [...value, ...processedFiles] + dispatch("change", newValue) + selectedImageIdx = newValue.length - 1 + } finally { + loading = false + } } else { dispatch("change", fileList) } @@ -227,7 +235,7 @@ {#if showDropzone}
+ + {#if loading} +
+ +
+ {/if}
{/if} @@ -464,6 +478,7 @@ .spectrum-Dropzone { height: 220px; + position: relative; } .compact .spectrum-Dropzone { height: 40px; @@ -488,4 +503,14 @@ .tag { margin-top: 8px; } + + .loading { + position: absolute; + display: grid; + place-items: center; + height: 100%; + width: 100%; + top: 0; + left: 0; + } diff --git a/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte b/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte index 261954379b..215fdabd8d 100644 --- a/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte +++ b/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte @@ -53,6 +53,7 @@ on:close={close} maxHeight={null} resizable + minWidth={360} >
@@ -80,7 +81,6 @@ } .content { - width: 300px; padding: 20px; display: flex; flex-direction: column; diff --git a/packages/frontend-core/src/fetch/NestedProviderFetch.js b/packages/frontend-core/src/fetch/NestedProviderFetch.js index 01c22b6ba0..0a08b00cb4 100644 --- a/packages/frontend-core/src/fetch/NestedProviderFetch.js +++ b/packages/frontend-core/src/fetch/NestedProviderFetch.js @@ -5,6 +5,7 @@ export default class NestedProviderFetch extends DataFetch { // Nested providers should already have exposed their own schema return { schema: datasource?.value?.schema, + primaryDisplay: datasource?.value?.primaryDisplay, } } diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts index e7d0ed7ba7..101257c321 100644 --- a/packages/server/src/api/controllers/application.ts +++ b/packages/server/src/api/controllers/application.ts @@ -153,7 +153,11 @@ async function createInstance(appId: string, template: AppTemplate) { await createAllSearchIndex() if (template && template.useTemplate) { - await sdk.backups.importApp(appId, db, template) + const opts = { + importObjStoreContents: true, + updateAttachmentColumns: !template.key, // preserve attachments when using Budibase templates + } + await sdk.backups.importApp(appId, db, template, opts) } else { // create the users table await db.put(USERS_TABLE_SCHEMA) diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index daf7b9b25c..1bf04e94f0 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -209,6 +209,7 @@ export const serveApp = async function (ctx: UserCtx) { ? objectStore.getGlobalFileUrl("settings", "logoUrl") : "", appMigrating: needMigrations, + nonce: ctx.state.nonce, }) const appHbs = loadHandlebarsFile(appHbsPath) ctx.body = await processString(appHbs, { @@ -217,6 +218,7 @@ export const serveApp = async function (ctx: UserCtx) { css: `:root{${themeVariables}} ${css.code}`, appId, embedded: bbHeaderEmbed, + nonce: ctx.state.nonce, }) } else { // just return the app info for jest to assert on @@ -258,6 +260,7 @@ export const serveBuilderPreview = async function (ctx: Ctx) { const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs")) ctx.body = await processString(previewHbs, { clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version), + nonce: ctx.state.nonce, }) } else { // just return the app info for jest to assert on diff --git a/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte b/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte index b4bfbe6660..b88b738f90 100644 --- a/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte +++ b/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte @@ -16,6 +16,8 @@ export let hideDevTools export let sideNav export let hideFooter + + export let nonce @@ -118,11 +120,11 @@

{/if}

- {#if appMigrating} - {/if} @@ -135,7 +137,7 @@ {/each} {/if} - diff --git a/packages/server/src/api/controllers/static/templates/preview.hbs b/packages/server/src/api/controllers/static/templates/preview.hbs index 54b5b1a4e4..87b9ad6ea3 100644 --- a/packages/server/src/api/controllers/static/templates/preview.hbs +++ b/packages/server/src/api/controllers/static/templates/preview.hbs @@ -31,7 +31,7 @@ } -