diff --git a/hosting/.env b/hosting/.env index 173d409d04..23681f1f57 100644 --- a/hosting/.env +++ b/hosting/.env @@ -19,7 +19,6 @@ MINIO_PORT=4004 COUCH_DB_PORT=4005 COUCH_DB_SQS_PORT=4006 REDIS_PORT=6379 -WATCHTOWER_PORT=6161 BUDIBASE_ENVIRONMENT=PRODUCTION SQL_MAX_ROWS= diff --git a/hosting/docker-compose.build.yaml b/hosting/docker-compose.build.yaml index 1f16baa9e2..057d51a887 100644 --- a/hosting/docker-compose.build.yaml +++ b/hosting/docker-compose.build.yaml @@ -74,7 +74,6 @@ services: - WORKER_UPSTREAM_URL=http://worker-service:4003 - MINIO_UPSTREAM_URL=http://minio-service:9000 - COUCHDB_UPSTREAM_URL=http://couchdb-service:5984 - - WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080 - RESOLVER=127.0.0.11 depends_on: - minio-service diff --git a/hosting/docker-compose.yaml b/hosting/docker-compose.yaml index c7a22eb2b3..ec24765149 100644 --- a/hosting/docker-compose.yaml +++ b/hosting/docker-compose.yaml @@ -87,7 +87,6 @@ services: - WORKER_UPSTREAM_URL=http://worker-service:4003 - MINIO_UPSTREAM_URL=http://minio-service:9000 - COUCHDB_UPSTREAM_URL=http://couchdb-service:5984 - - WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080 - RESOLVER=127.0.0.11 depends_on: - minio-service @@ -112,19 +111,6 @@ services: volumes: - redis_data:/data - watchtower-service: - restart: always - image: containrrr/watchtower - volumes: - - /var/run/docker.sock:/var/run/docker.sock - command: --debug --http-api-update bbapps bbworker bbproxy - environment: - - WATCHTOWER_HTTP_API=true - - WATCHTOWER_HTTP_API_TOKEN=budibase - - WATCHTOWER_CLEANUP=true - labels: - - "com.centurylinklabs.watchtower.enable=false" - volumes: couchdb3_data: driver: local diff --git a/hosting/envoy.yaml b/hosting/envoy.yaml deleted file mode 100644 index d9f8384688..0000000000 --- a/hosting/envoy.yaml +++ /dev/null @@ -1,152 +0,0 @@ -static_resources: - listeners: - - name: main_listener - address: - socket_address: { address: 0.0.0.0, port_value: 10000 } - filter_chains: - - filters: - - name: envoy.filters.network.http_connection_manager - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager - stat_prefix: ingress - codec_type: auto - route_config: - name: local_route - virtual_hosts: - - name: local_services - domains: ["*"] - routes: - - match: { prefix: "/app/" } - route: - cluster: app-service - prefix_rewrite: "/" - - - match: { path: "/v1/update" } - route: - cluster: watchtower-service - - - match: { prefix: "/builder/" } - route: - cluster: app-service - - - match: { prefix: "/builder" } - route: - cluster: app-service - - - match: { prefix: "/app_" } - route: - cluster: app-service - - # special cases for worker admin (deprecated), global and system API - - match: { prefix: "/api/global/" } - route: - cluster: worker-service - - - match: { prefix: "/api/admin/" } - route: - cluster: worker-service - - - match: { prefix: "/api/system/" } - route: - cluster: worker-service - - - match: { path: "/" } - route: - cluster: app-service - - # special case for when API requests are made, can just forward, not to minio - - match: { prefix: "/api/" } - route: - cluster: app-service - timeout: 120s - - - match: { prefix: "/worker/" } - route: - cluster: worker-service - prefix_rewrite: "/" - - - match: { prefix: "/db/" } - route: - cluster: couchdb-service - prefix_rewrite: "/" - - # minio is on the default route because this works - # best, minio + AWS SDK doesn't handle path proxy - - match: { prefix: "/" } - route: - cluster: minio-service - - http_filters: - - name: envoy.filters.http.router - - clusters: - - name: app-service - connect_timeout: 0.25s - type: strict_dns - lb_policy: round_robin - load_assignment: - cluster_name: app-service - endpoints: - - lb_endpoints: - - endpoint: - address: - socket_address: - address: app-service - port_value: 4002 - - - name: minio-service - connect_timeout: 0.25s - type: strict_dns - lb_policy: round_robin - load_assignment: - cluster_name: minio-service - endpoints: - - lb_endpoints: - - endpoint: - address: - socket_address: - address: minio-service - port_value: 9000 - - - name: worker-service - connect_timeout: 0.25s - type: strict_dns - lb_policy: round_robin - load_assignment: - cluster_name: worker-service - endpoints: - - lb_endpoints: - - endpoint: - address: - socket_address: - address: worker-service - port_value: 4003 - - - name: couchdb-service - connect_timeout: 0.25s - type: strict_dns - lb_policy: round_robin - load_assignment: - cluster_name: couchdb-service - endpoints: - - lb_endpoints: - - endpoint: - address: - socket_address: - address: couchdb-service - port_value: 5984 - - - name: watchtower-service - connect_timeout: 0.25s - type: strict_dns - lb_policy: round_robin - load_assignment: - cluster_name: watchtower-service - endpoints: - - lb_endpoints: - - endpoint: - address: - socket_address: - address: watchtower-service - port_value: 8080 - diff --git a/hosting/hosting.properties b/hosting/hosting.properties index 6c1d9e5dbd..f63bb1941a 100644 --- a/hosting/hosting.properties +++ b/hosting/hosting.properties @@ -18,7 +18,6 @@ WORKER_PORT=4003 MINIO_PORT=4004 COUCH_DB_PORT=4005 REDIS_PORT=6379 -WATCHTOWER_PORT=6161 BUDIBASE_ENVIRONMENT=PRODUCTION # An admin user can be automatically created initially if these are set @@ -26,4 +25,4 @@ BB_ADMIN_USER_EMAIL= BB_ADMIN_USER_PASSWORD= # A path that is watched for plugin bundles. Any bundles found are imported automatically/ -PLUGINS_DIR= \ No newline at end of file +PLUGINS_DIR= diff --git a/hosting/portainer/template.json b/hosting/portainer/template.json index 29107b674e..4ca5b5e94f 100644 --- a/hosting/portainer/template.json +++ b/hosting/portainer/template.json @@ -78,11 +78,6 @@ "default": "6379", "preset": true }, - { - "name": "WATCHTOWER_PORT", - "default": "6161", - "preset": true - }, { "name": "BUDIBASE_ENVIRONMENT", "default": "PRODUCTION", diff --git a/hosting/proxy/Dockerfile b/hosting/proxy/Dockerfile index 42327be087..9ec458a219 100644 --- a/hosting/proxy/Dockerfile +++ b/hosting/proxy/Dockerfile @@ -22,5 +22,4 @@ ENV APPS_UPSTREAM_URL=http://app-service:4002 ENV WORKER_UPSTREAM_URL=http://worker-service:4003 ENV MINIO_UPSTREAM_URL=http://minio-service:9000 ENV COUCHDB_UPSTREAM_URL=http://couchdb-service:5984 -ENV WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080 ENV RESOLVER=127.0.0.11 diff --git a/hosting/proxy/nginx.prod.conf b/hosting/proxy/nginx.prod.conf index 59722dac5c..c5d378afd8 100644 --- a/hosting/proxy/nginx.prod.conf +++ b/hosting/proxy/nginx.prod.conf @@ -50,19 +50,6 @@ http { ignore_invalid_headers off; proxy_buffering off; - set $csp_default "default-src 'self'"; - set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com"; - set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com"; - set $csp_object "object-src 'none'"; - set $csp_base_uri "base-uri 'self'"; - set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com https://us.i.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com"; - set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com"; - set $csp_frame "frame-src 'self' https:"; - set $csp_img "img-src http: https: data: blob:"; - set $csp_manifest "manifest-src 'self'"; - set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live"; - set $csp_worker "worker-src blob:"; - error_page 502 503 504 /error.html; location = /error.html { root /usr/share/nginx/html; @@ -73,7 +60,6 @@ http { add_header X-Frame-Options SAMEORIGIN always; add_header X-Content-Type-Options nosniff always; add_header X-XSS-Protection "1; mode=block" always; - add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always; add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always; # upstreams @@ -81,7 +67,6 @@ http { set $worker ${WORKER_UPSTREAM_URL}; set $minio ${MINIO_UPSTREAM_URL}; set $couchdb ${COUCHDB_UPSTREAM_URL}; - set $watchtower ${WATCHTOWER_UPSTREAM_URL}; location /health { access_log off; @@ -107,10 +92,6 @@ http { proxy_pass $apps; } - location = /v1/update { - proxy_pass $watchtower; - } - location ~ ^/(builder|app_) { proxy_http_version 1.1; @@ -125,6 +106,12 @@ http { location ~ ^/api/(system|admin|global)/ { proxy_set_header Host $host; + + # Enable buffering for potentially large OIDC configs + proxy_buffering on; + proxy_buffer_size 16k; + proxy_buffers 4 32k; + proxy_pass $worker; } diff --git a/hosting/scripts/airgapped/airgappedDockerBuild.js b/hosting/scripts/airgapped/airgappedDockerBuild.js index 58bc7c09a9..432ea9a370 100755 --- a/hosting/scripts/airgapped/airgappedDockerBuild.js +++ b/hosting/scripts/airgapped/airgappedDockerBuild.js @@ -12,7 +12,6 @@ let IMAGES = { couch: "ibmcom/couchdb3", curl: "curlimages/curl", redis: "redis", - watchtower: "containrrr/watchtower", } if (IS_SINGLE_IMAGE) { @@ -53,4 +52,4 @@ if (!IS_SINGLE_IMAGE) { copyFile(FILES.ENV) // compress -execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`) \ No newline at end of file +execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`) diff --git a/lerna.json b/lerna.json index 08a9aa5857..582f95b303 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "3.1.2", + "version": "3.2.3", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts index 47b3f0672f..b72651e21f 100644 --- a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -1,7 +1,12 @@ import tk from "timekeeper" import _ from "lodash" -import { DBTestConfiguration, generator, structures } from "../../../tests" +import { + DBTestConfiguration, + generator, + structures, + utils, +} from "../../../tests" import { getDB } from "../../db" import { @@ -10,15 +15,14 @@ import { init, } from "../docWritethrough" -import InMemoryQueue from "../../queue/inMemoryQueue" - const initialTime = Date.now() async function waitForQueueCompletion() { - const queue: InMemoryQueue = DocWritethroughProcessor.queue as never - await queue.waitForCompletion() + await utils.queue.processMessages(DocWritethroughProcessor.queue) } +beforeAll(() => utils.queue.useRealQueues()) + describe("docWritethrough", () => { beforeAll(() => { init() @@ -67,7 +71,7 @@ describe("docWritethrough", () => { const patch3 = generatePatchObject(3) await docWritethrough.patch(patch3) - expect(await db.get(documentId)).toEqual({ + expect(await db.tryGet(documentId)).toEqual({ _id: documentId, ...patch1, ...patch2, @@ -92,7 +96,7 @@ describe("docWritethrough", () => { await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ _id: documentId, ...patch1, @@ -117,7 +121,7 @@ describe("docWritethrough", () => { await waitForQueueCompletion() expect(date1).not.toEqual(date2) - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ createdAt: date1.toISOString(), updatedAt: date2.toISOString(), @@ -135,7 +139,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch2) const keyToOverride = _.sample(Object.keys(patch1))! - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ [keyToOverride]: patch1[keyToOverride], }) @@ -150,7 +154,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(patch3) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ ...patch1, ...patch2, @@ -180,14 +184,14 @@ describe("docWritethrough", () => { await secondDocWritethrough.patch(doc2Patch2) await waitForQueueCompletion() - expect(await db.get(docWritethrough.docId)).toEqual( + expect(await db.tryGet(docWritethrough.docId)).toEqual( expect.objectContaining({ ...doc1Patch, ...doc1Patch2, }) ) - expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect(await db.tryGet(secondDocWritethrough.docId)).toEqual( expect.objectContaining({ ...doc2Patch, ...doc2Patch2, @@ -203,7 +207,7 @@ describe("docWritethrough", () => { await docWritethrough.patch(initialPatch) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(initialPatch) ) @@ -214,10 +218,10 @@ describe("docWritethrough", () => { await docWritethrough.patch(extraPatch) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(extraPatch) ) - expect(await db.get(documentId)).not.toEqual( + expect(await db.tryGet(documentId)).not.toEqual( expect.objectContaining(initialPatch) ) }) @@ -242,7 +246,7 @@ describe("docWritethrough", () => { expect(queueMessageSpy).toHaveBeenCalledTimes(5) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(patches) ) @@ -250,7 +254,7 @@ describe("docWritethrough", () => { expect(queueMessageSpy).toHaveBeenCalledTimes(45) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(patches) ) @@ -258,20 +262,18 @@ describe("docWritethrough", () => { expect(queueMessageSpy).toHaveBeenCalledTimes(55) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining(patches) ) }) }) - // This is not yet supported - // eslint-disable-next-line jest/no-disabled-tests - it.skip("patches will execute in order", async () => { + it("patches will execute in order", async () => { let incrementalValue = 0 const keyToOverride = generator.word() async function incrementalPatches(count: number) { for (let i = 0; i < count; i++) { - await docWritethrough.patch({ [keyToOverride]: incrementalValue++ }) + await docWritethrough.patch({ [keyToOverride]: ++incrementalValue }) } } @@ -279,13 +281,13 @@ describe("docWritethrough", () => { await incrementalPatches(5) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ [keyToOverride]: 5 }) ) await incrementalPatches(40) await waitForQueueCompletion() - expect(await db.get(documentId)).toEqual( + expect(await db.tryGet(documentId)).toEqual( expect.objectContaining({ [keyToOverride]: 45 }) ) }) diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index b807db0ee3..83b9b69d0b 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -10,7 +10,6 @@ import { DatabaseQueryOpts, DBError, Document, - FeatureFlag, isDocument, RowResponse, RowValue, @@ -27,7 +26,6 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants" import { DDInstrumentedDatabase } from "../instrumentation" import { checkSlashesInUrl } from "../../helpers" import { sqlLog } from "../../sql/utils" -import { flags } from "../../features" const DATABASE_NOT_FOUND = "Database does not exist." @@ -456,10 +454,7 @@ export class DatabaseImpl implements Database { } async destroy() { - if ( - (await flags.isEnabled(FeatureFlag.SQS)) && - (await this.exists(SQLITE_DESIGN_DOC_ID)) - ) { + if (await this.exists(SQLITE_DESIGN_DOC_ID)) { // delete the design document, then run the cleanup operation const definition = await this.get(SQLITE_DESIGN_DOC_ID) // remove all tables - save the definition then trigger a cleanup diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 4cb0a9c731..b2f95210d3 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -225,6 +225,10 @@ const environment = { OPENAI_API_KEY: process.env.OPENAI_API_KEY, MIN_VERSION_WITHOUT_POWER_ROLE: process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0", + DISABLE_CONTENT_SECURITY_POLICY: process.env.DISABLE_CONTENT_SECURITY_POLICY, + // stopgap migration strategy until we can ensure backwards compat without unsafe-inline in CSP + DISABLE_CSP_UNSAFE_INLINE_SCRIPTS: + process.env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS, } export function setEnv(newEnvVars: Partial): () => void { diff --git a/packages/backend-core/src/features/features.ts b/packages/backend-core/src/features/features.ts index 7bba9f23a4..cd84cf7653 100644 --- a/packages/backend-core/src/features/features.ts +++ b/packages/backend-core/src/features/features.ts @@ -269,10 +269,9 @@ export class FlagSet, T extends { [key: string]: V }> { export const flags = new FlagSet({ [FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true), [FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true), - [FeatureFlag.SQS]: Flag.boolean(true), [FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(true), - [FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(env.isDev()), - [FeatureFlag.BUDIBASE_AI]: Flag.boolean(env.isDev()), + [FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true), + [FeatureFlag.BUDIBASE_AI]: Flag.boolean(true), }) type UnwrapPromise = T extends Promise ? U : T diff --git a/packages/backend-core/src/middleware/contentSecurityPolicy.ts b/packages/backend-core/src/middleware/contentSecurityPolicy.ts new file mode 100644 index 0000000000..e0dfbe6f64 --- /dev/null +++ b/packages/backend-core/src/middleware/contentSecurityPolicy.ts @@ -0,0 +1,118 @@ +import crypto from "crypto" +import env from "../environment" + +const CSP_DIRECTIVES = { + "default-src": ["'self'"], + "script-src": [ + "'self'", + "'unsafe-eval'", + "https://*.budibase.net", + "https://cdn.budi.live", + "https://js.intercomcdn.com", + "https://widget.intercom.io", + "https://d2l5prqdbvm3op.cloudfront.net", + "https://us-assets.i.posthog.com", + ], + "style-src": [ + "'self'", + "'unsafe-inline'", + "https://cdn.jsdelivr.net", + "https://fonts.googleapis.com", + "https://rsms.me", + "https://maxcdn.bootstrapcdn.com", + ], + "object-src": ["'none'"], + "base-uri": ["'self'"], + "connect-src": [ + "'self'", + "https://*.budibase.app", + "https://*.budibaseqa.app", + "https://*.budibase.net", + "https://api-iam.intercom.io", + "https://api-ping.intercom.io", + "https://app.posthog.com", + "https://us.i.posthog.com", + "wss://nexus-websocket-a.intercom.io", + "wss://nexus-websocket-b.intercom.io", + "https://nexus-websocket-a.intercom.io", + "https://nexus-websocket-b.intercom.io", + "https://uploads.intercomcdn.com", + "https://uploads.intercomusercontent.com", + "https://*.amazonaws.com", + "https://*.s3.amazonaws.com", + "https://*.s3.us-east-2.amazonaws.com", + "https://*.s3.us-east-1.amazonaws.com", + "https://*.s3.us-west-1.amazonaws.com", + "https://*.s3.us-west-2.amazonaws.com", + "https://*.s3.af-south-1.amazonaws.com", + "https://*.s3.ap-east-1.amazonaws.com", + "https://*.s3.ap-south-1.amazonaws.com", + "https://*.s3.ap-northeast-2.amazonaws.com", + "https://*.s3.ap-southeast-1.amazonaws.com", + "https://*.s3.ap-southeast-2.amazonaws.com", + "https://*.s3.ap-northeast-1.amazonaws.com", + "https://*.s3.ca-central-1.amazonaws.com", + "https://*.s3.cn-north-1.amazonaws.com", + "https://*.s3.cn-northwest-1.amazonaws.com", + "https://*.s3.eu-central-1.amazonaws.com", + "https://*.s3.eu-west-1.amazonaws.com", + "https://*.s3.eu-west-2.amazonaws.com", + "https://*.s3.eu-south-1.amazonaws.com", + "https://*.s3.eu-west-3.amazonaws.com", + "https://*.s3.eu-north-1.amazonaws.com", + "https://*.s3.sa-east-1.amazonaws.com", + "https://*.s3.me-south-1.amazonaws.com", + "https://*.s3.us-gov-east-1.amazonaws.com", + "https://*.s3.us-gov-west-1.amazonaws.com", + "https://api.github.com", + ], + "font-src": [ + "'self'", + "data:", + "https://cdn.jsdelivr.net", + "https://fonts.gstatic.com", + "https://rsms.me", + "https://maxcdn.bootstrapcdn.com", + "https://js.intercomcdn.com", + "https://fonts.intercomcdn.com", + ], + "frame-src": ["'self'", "https:"], + "img-src": ["http:", "https:", "data:", "blob:"], + "manifest-src": ["'self'"], + "media-src": [ + "'self'", + "https://js.intercomcdn.com", + "https://cdn.budi.live", + ], + "worker-src": ["blob:"], +} + +export async function contentSecurityPolicy(ctx: any, next: any) { + try { + const nonce = crypto.randomBytes(16).toString("base64") + + const directives = { ...CSP_DIRECTIVES } + directives["script-src"] = [ + ...CSP_DIRECTIVES["script-src"], + `'nonce-${nonce}'`, + ] + + if (!env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS) { + directives["script-src"].push("'unsafe-inline'") + } + + ctx.state.nonce = nonce + + const cspHeader = Object.entries(directives) + .map(([key, sources]) => `${key} ${sources.join(" ")}`) + .join("; ") + ctx.set("Content-Security-Policy", cspHeader) + await next() + } catch (err: any) { + console.error( + `Error occurred in Content-Security-Policy middleware: ${err}` + ) + } +} + +export default contentSecurityPolicy diff --git a/packages/backend-core/src/middleware/index.ts b/packages/backend-core/src/middleware/index.ts index 20c2125b13..9ee51db45b 100644 --- a/packages/backend-core/src/middleware/index.ts +++ b/packages/backend-core/src/middleware/index.ts @@ -19,5 +19,6 @@ export { default as pino } from "../logging/pino/middleware" export { default as correlation } from "../logging/correlation/middleware" export { default as errorHandling } from "./errorHandling" export { default as querystringToBody } from "./querystringToBody" +export { default as csp } from "./contentSecurityPolicy" export * as joiValidator from "./joi-validator" export { default as ip } from "./ip" diff --git a/packages/backend-core/src/middleware/tests/contentSecurityPolicy.spec.ts b/packages/backend-core/src/middleware/tests/contentSecurityPolicy.spec.ts new file mode 100644 index 0000000000..0c5838e7fe --- /dev/null +++ b/packages/backend-core/src/middleware/tests/contentSecurityPolicy.spec.ts @@ -0,0 +1,75 @@ +import crypto from "crypto" +import contentSecurityPolicy from "../contentSecurityPolicy" + +jest.mock("crypto", () => ({ + randomBytes: jest.fn(), + randomUUID: jest.fn(), +})) + +describe("contentSecurityPolicy middleware", () => { + let ctx: any + let next: any + const mockNonce = "mocked/nonce" + + beforeEach(() => { + ctx = { + state: {}, + set: jest.fn(), + } + next = jest.fn() + // @ts-ignore + crypto.randomBytes.mockReturnValue(Buffer.from(mockNonce, "base64")) + }) + + afterEach(() => { + jest.clearAllMocks() + }) + + it("should generate a nonce and set it in the script-src directive", async () => { + await contentSecurityPolicy(ctx, next) + + expect(ctx.state.nonce).toBe(mockNonce) + expect(ctx.set).toHaveBeenCalledWith( + "Content-Security-Policy", + expect.stringContaining( + `script-src 'self' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com 'nonce-${mockNonce}'` + ) + ) + expect(next).toHaveBeenCalled() + }) + + it("should include all CSP directives in the header", async () => { + await contentSecurityPolicy(ctx, next) + + const cspHeader = ctx.set.mock.calls[0][1] + expect(cspHeader).toContain("default-src 'self'") + expect(cspHeader).toContain("script-src 'self' 'unsafe-eval'") + expect(cspHeader).toContain("style-src 'self' 'unsafe-inline'") + expect(cspHeader).toContain("object-src 'none'") + expect(cspHeader).toContain("base-uri 'self'") + expect(cspHeader).toContain("connect-src 'self'") + expect(cspHeader).toContain("font-src 'self'") + expect(cspHeader).toContain("frame-src 'self'") + expect(cspHeader).toContain("img-src http: https: data: blob:") + expect(cspHeader).toContain("manifest-src 'self'") + expect(cspHeader).toContain("media-src 'self'") + expect(cspHeader).toContain("worker-src blob:") + }) + + it("should handle errors and log an error message", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation() + const error = new Error("Test error") + // @ts-ignore + crypto.randomBytes.mockImplementation(() => { + throw error + }) + + await contentSecurityPolicy(ctx, next) + + expect(consoleSpy).toHaveBeenCalledWith( + `Error occurred in Content-Security-Policy middleware: ${error}` + ) + expect(next).not.toHaveBeenCalled() + consoleSpy.mockRestore() + }) +}) diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 62b971f9f5..dd8d3daa37 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,5 @@ import events from "events" -import { newid, timeout } from "../utils" +import { newid } from "../utils" import { Queue, QueueOptions, JobOptions } from "./queue" interface JobMessage { @@ -141,7 +141,7 @@ class InMemoryQueue implements Partial { } else { pushMessage() } - return {} as any + return { id: jobId } as any } /** @@ -184,16 +184,6 @@ class InMemoryQueue implements Partial { // do nothing return this as any } - - async waitForCompletion() { - do { - await timeout(50) - } while (this.hasRunningJobs()) - } - - hasRunningJobs() { - return this._addCount > this._runCount - } } export default InMemoryQueue diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index f633d0885e..f5d710f02d 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -15,7 +15,7 @@ const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs() // cleanup the queue every 60 seconds const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs() -let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = [] +let QUEUES: BullQueue.Queue[] = [] let cleanupInterval: NodeJS.Timeout async function cleanup() { @@ -45,11 +45,18 @@ export function createQueue( if (opts.jobOptions) { queueConfig.defaultJobOptions = opts.jobOptions } - let queue: any + let queue: BullQueue.Queue if (!env.isTest()) { queue = new BullQueue(jobQueue, queueConfig) + } else if ( + process.env.BULL_TEST_REDIS_PORT && + !isNaN(+process.env.BULL_TEST_REDIS_PORT) + ) { + queue = new BullQueue(jobQueue, { + redis: { host: "localhost", port: +process.env.BULL_TEST_REDIS_PORT }, + }) } else { - queue = new InMemoryQueue(jobQueue, queueConfig) + queue = new InMemoryQueue(jobQueue, queueConfig) as any } addListeners(queue, jobQueue, opts?.removeStalledCb) QUEUES.push(queue) diff --git a/packages/backend-core/tests/core/utilities/index.ts b/packages/backend-core/tests/core/utilities/index.ts index 787d69be2c..c3d81784c8 100644 --- a/packages/backend-core/tests/core/utilities/index.ts +++ b/packages/backend-core/tests/core/utilities/index.ts @@ -4,3 +4,4 @@ export { generator } from "./structures" export * as testContainerUtils from "./testContainerUtils" export * as utils from "./utils" export * from "./jestUtils" +export * as queue from "./queue" diff --git a/packages/backend-core/tests/core/utilities/queue.ts b/packages/backend-core/tests/core/utilities/queue.ts new file mode 100644 index 0000000000..49dd33ca29 --- /dev/null +++ b/packages/backend-core/tests/core/utilities/queue.ts @@ -0,0 +1,9 @@ +import { Queue } from "bull" + +export async function processMessages(queue: Queue) { + do { + await queue.whenCurrentJobsFinished() + } while (await queue.count()) + + await queue.whenCurrentJobsFinished() +} diff --git a/packages/backend-core/tests/core/utilities/testContainerUtils.ts b/packages/backend-core/tests/core/utilities/testContainerUtils.ts index 1a25bb28f4..71d7fa32db 100644 --- a/packages/backend-core/tests/core/utilities/testContainerUtils.ts +++ b/packages/backend-core/tests/core/utilities/testContainerUtils.ts @@ -1,4 +1,6 @@ import { execSync } from "child_process" +import { cloneDeep } from "lodash" +import { GenericContainer, StartedTestContainer } from "testcontainers" const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g") @@ -106,3 +108,58 @@ export function setupEnv(...envs: any[]) { } } } + +export async function startContainer(container: GenericContainer) { + const imageName = (container as any).imageName.string as string + let key: string = imageName + if (imageName.includes("@sha256")) { + key = imageName.split("@")[0] + } + key = key.replace(/\//g, "-").replace(/:/g, "-") + + container = container + .withReuse() + .withLabels({ "com.budibase": "true" }) + .withName(`${key}_testcontainer`) + + let startedContainer: StartedTestContainer | undefined = undefined + let lastError = undefined + for (let i = 0; i < 10; i++) { + try { + // container.start() is not an idempotent operation, calling `start` + // modifies the internal state of a GenericContainer instance such that + // the hash it uses to determine reuse changes. We need to clone the + // container before calling start to ensure that we're using the same + // reuse hash every time. + const containerCopy = cloneDeep(container) + startedContainer = await containerCopy.start() + lastError = undefined + break + } catch (e: any) { + lastError = e + await new Promise(resolve => setTimeout(resolve, 1000)) + } + } + + if (!startedContainer) { + if (lastError) { + throw lastError + } + throw new Error(`failed to start container: ${imageName}`) + } + + const info = getContainerById(startedContainer.getId()) + if (!info) { + throw new Error("Container not found") + } + + // Some Docker runtimes, when you expose a port, will bind it to both + // 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6 + // addresses are not shared, and testcontainers will sometimes give you back + // the ipv6 port. There's no way to know that this has happened, and if you + // try to then connect to `localhost:port` you may attempt to bind to the v4 + // address which could be unbound or even an entirely different container. For + // that reason, we don't use testcontainers' `getExposedPort` function, + // preferring instead our own method that guaranteed v4 ports. + return getExposedV4Ports(info) +} diff --git a/packages/backend-core/tests/core/utilities/utils/index.ts b/packages/backend-core/tests/core/utilities/utils/index.ts index 41a249c7e6..3d28189c53 100644 --- a/packages/backend-core/tests/core/utilities/utils/index.ts +++ b/packages/backend-core/tests/core/utilities/utils/index.ts @@ -1 +1,2 @@ export * as time from "./time" +export * as queue from "./queue" diff --git a/packages/backend-core/tests/core/utilities/utils/queue.ts b/packages/backend-core/tests/core/utilities/utils/queue.ts new file mode 100644 index 0000000000..3ad7d6b4b4 --- /dev/null +++ b/packages/backend-core/tests/core/utilities/utils/queue.ts @@ -0,0 +1,27 @@ +import { Queue } from "bull" +import { GenericContainer, Wait } from "testcontainers" +import { startContainer } from "../testContainerUtils" + +export async function useRealQueues() { + const ports = await startContainer( + new GenericContainer("redis") + .withExposedPorts(6379) + .withWaitStrategy( + Wait.forSuccessfulCommand(`redis-cli`).withStartupTimeout(10000) + ) + ) + + const port = ports.find(x => x.container === 6379)?.host + if (!port) { + throw new Error("Redis port not found") + } + process.env.BULL_TEST_REDIS_PORT = port.toString() +} + +export async function processMessages(queue: Queue) { + do { + await queue.whenCurrentJobsFinished() + } while (await queue.count()) + + await queue.whenCurrentJobsFinished() +} diff --git a/packages/bbui/src/Form/Core/Dropzone.svelte b/packages/bbui/src/Form/Core/Dropzone.svelte index 2922d88e7a..26f1dc86c6 100644 --- a/packages/bbui/src/Form/Core/Dropzone.svelte +++ b/packages/bbui/src/Form/Core/Dropzone.svelte @@ -8,6 +8,7 @@ import Link from "../../Link/Link.svelte" import Tag from "../../Tags/Tag.svelte" import Tags from "../../Tags/Tags.svelte" + import ProgressCircle from "../../ProgressCircle/ProgressCircle.svelte" const BYTES_IN_KB = 1000 const BYTES_IN_MB = 1000000 @@ -39,12 +40,14 @@ "jfif", "webp", ] - const fieldId = id || uuid() + let selectedImageIdx = 0 let fileDragged = false let selectedUrl let fileInput + let loading = false + $: selectedImage = value?.[selectedImageIdx] ?? null $: fileCount = value?.length ?? 0 $: isImage = @@ -86,10 +89,15 @@ } if (processFiles) { - const processedFiles = await processFiles(fileList) - const newValue = [...value, ...processedFiles] - dispatch("change", newValue) - selectedImageIdx = newValue.length - 1 + loading = true + try { + const processedFiles = await processFiles(fileList) + const newValue = [...value, ...processedFiles] + dispatch("change", newValue) + selectedImageIdx = newValue.length - 1 + } finally { + loading = false + } } else { dispatch("change", fileList) } @@ -227,7 +235,7 @@ {#if showDropzone}
+ + {#if loading} +
+ +
+ {/if}
{/if} @@ -464,6 +478,7 @@ .spectrum-Dropzone { height: 220px; + position: relative; } .compact .spectrum-Dropzone { height: 40px; @@ -488,4 +503,14 @@ .tag { margin-top: 8px; } + + .loading { + position: absolute; + display: grid; + place-items: center; + height: 100%; + width: 100%; + top: 0; + left: 0; + } diff --git a/packages/builder/src/pages/builder/portal/settings/version.svelte b/packages/builder/src/pages/builder/portal/settings/version.svelte index c3898b7861..032c077557 100644 --- a/packages/builder/src/pages/builder/portal/settings/version.svelte +++ b/packages/builder/src/pages/builder/portal/settings/version.svelte @@ -4,12 +4,10 @@ Layout, Heading, Body, - Button, Divider, notifications, Label, - Modal, - ModalContent, + Link, } from "@budibase/bbui" import { API } from "api" import { auth, admin } from "stores/portal" @@ -21,8 +19,6 @@ let githubVersion let githubPublishedDate let githubPublishedTime - let needsUpdate = true - let updateModal // Only admins allowed here $: { @@ -31,21 +27,6 @@ } } - async function updateBudibase() { - try { - notifications.info("Updating budibase..") - await fetch("/v1/update", { - headers: { - Authorization: "Bearer budibase", - }, - }) - notifications.success("Your budibase installation is up to date.") - getVersion() - } catch (err) { - notifications.error(`Error installing budibase update ${err}`) - } - } - async function getVersion() { try { version = await API.getBudibaseVersion() @@ -69,13 +50,6 @@ githubPublishedDate = new Date(githubResponse.published_at) githubPublishedTime = githubPublishedDate.toLocaleTimeString() githubPublishedDate = githubPublishedDate.toLocaleDateString() - - //Does Budibase need to be updated? - if (githubVersion === version) { - needsUpdate = false - } else { - needsUpdate = true - } } catch (error) { notifications.error("Error getting the latest Budibase version") githubVersion = null @@ -115,23 +89,15 @@ > -
- + Updating Budibase + To update your self-host installation, follow the docs found here. - - - Are you sure you want to update your budibase installation to the - latest version? - - -
+ {/if} {/if} diff --git a/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte b/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte index 261954379b..215fdabd8d 100644 --- a/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte +++ b/packages/frontend-core/src/components/grid/layout/NewColumnButton.svelte @@ -53,6 +53,7 @@ on:close={close} maxHeight={null} resizable + minWidth={360} >
@@ -80,7 +81,6 @@ } .content { - width: 300px; padding: 20px; display: flex; flex-direction: column; diff --git a/packages/frontend-core/src/fetch/NestedProviderFetch.js b/packages/frontend-core/src/fetch/NestedProviderFetch.js index 01c22b6ba0..0a08b00cb4 100644 --- a/packages/frontend-core/src/fetch/NestedProviderFetch.js +++ b/packages/frontend-core/src/fetch/NestedProviderFetch.js @@ -5,6 +5,7 @@ export default class NestedProviderFetch extends DataFetch { // Nested providers should already have exposed their own schema return { schema: datasource?.value?.schema, + primaryDisplay: datasource?.value?.primaryDisplay, } } diff --git a/packages/pro b/packages/pro index 04bee88597..a56696a4af 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 04bee88597edb1edb88ed299d0597b587f0362ec +Subproject commit a56696a4af5667617746600fc75fe6a01744b692 diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts index e7d0ed7ba7..101257c321 100644 --- a/packages/server/src/api/controllers/application.ts +++ b/packages/server/src/api/controllers/application.ts @@ -153,7 +153,11 @@ async function createInstance(appId: string, template: AppTemplate) { await createAllSearchIndex() if (template && template.useTemplate) { - await sdk.backups.importApp(appId, db, template) + const opts = { + importObjStoreContents: true, + updateAttachmentColumns: !template.key, // preserve attachments when using Budibase templates + } + await sdk.backups.importApp(appId, db, template, opts) } else { // create the users table await db.put(USERS_TABLE_SCHEMA) diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index daf7b9b25c..1bf04e94f0 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -209,6 +209,7 @@ export const serveApp = async function (ctx: UserCtx) { ? objectStore.getGlobalFileUrl("settings", "logoUrl") : "", appMigrating: needMigrations, + nonce: ctx.state.nonce, }) const appHbs = loadHandlebarsFile(appHbsPath) ctx.body = await processString(appHbs, { @@ -217,6 +218,7 @@ export const serveApp = async function (ctx: UserCtx) { css: `:root{${themeVariables}} ${css.code}`, appId, embedded: bbHeaderEmbed, + nonce: ctx.state.nonce, }) } else { // just return the app info for jest to assert on @@ -258,6 +260,7 @@ export const serveBuilderPreview = async function (ctx: Ctx) { const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs")) ctx.body = await processString(previewHbs, { clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version), + nonce: ctx.state.nonce, }) } else { // just return the app info for jest to assert on diff --git a/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte b/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte index b4bfbe6660..b88b738f90 100644 --- a/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte +++ b/packages/server/src/api/controllers/static/templates/BudibaseApp.svelte @@ -16,6 +16,8 @@ export let hideDevTools export let sideNav export let hideFooter + + export let nonce @@ -118,11 +120,11 @@

{/if}

- {#if appMigrating} - {/if} @@ -135,7 +137,7 @@ {/each} {/if} - diff --git a/packages/server/src/api/controllers/static/templates/preview.hbs b/packages/server/src/api/controllers/static/templates/preview.hbs index 54b5b1a4e4..87b9ad6ea3 100644 --- a/packages/server/src/api/controllers/static/templates/preview.hbs +++ b/packages/server/src/api/controllers/static/templates/preview.hbs @@ -31,7 +31,7 @@ } -