Merge branch 'master' into fix/automation-data-test-updates
This commit is contained in:
commit
f0973d4afb
|
@ -19,7 +19,6 @@ MINIO_PORT=4004
|
|||
COUCH_DB_PORT=4005
|
||||
COUCH_DB_SQS_PORT=4006
|
||||
REDIS_PORT=6379
|
||||
WATCHTOWER_PORT=6161
|
||||
BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||
SQL_MAX_ROWS=
|
||||
|
||||
|
|
|
@ -74,7 +74,6 @@ services:
|
|||
- WORKER_UPSTREAM_URL=http://worker-service:4003
|
||||
- MINIO_UPSTREAM_URL=http://minio-service:9000
|
||||
- COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
|
||||
- WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
|
||||
- RESOLVER=127.0.0.11
|
||||
depends_on:
|
||||
- minio-service
|
||||
|
|
|
@ -87,7 +87,6 @@ services:
|
|||
- WORKER_UPSTREAM_URL=http://worker-service:4003
|
||||
- MINIO_UPSTREAM_URL=http://minio-service:9000
|
||||
- COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
|
||||
- WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
|
||||
- RESOLVER=127.0.0.11
|
||||
depends_on:
|
||||
- minio-service
|
||||
|
@ -112,19 +111,6 @@ services:
|
|||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
watchtower-service:
|
||||
restart: always
|
||||
image: containrrr/watchtower
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: --debug --http-api-update bbapps bbworker bbproxy
|
||||
environment:
|
||||
- WATCHTOWER_HTTP_API=true
|
||||
- WATCHTOWER_HTTP_API_TOKEN=budibase
|
||||
- WATCHTOWER_CLEANUP=true
|
||||
labels:
|
||||
- "com.centurylinklabs.watchtower.enable=false"
|
||||
|
||||
volumes:
|
||||
couchdb3_data:
|
||||
driver: local
|
||||
|
|
|
@ -1,152 +0,0 @@
|
|||
static_resources:
|
||||
listeners:
|
||||
- name: main_listener
|
||||
address:
|
||||
socket_address: { address: 0.0.0.0, port_value: 10000 }
|
||||
filter_chains:
|
||||
- filters:
|
||||
- name: envoy.filters.network.http_connection_manager
|
||||
typed_config:
|
||||
"@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager
|
||||
stat_prefix: ingress
|
||||
codec_type: auto
|
||||
route_config:
|
||||
name: local_route
|
||||
virtual_hosts:
|
||||
- name: local_services
|
||||
domains: ["*"]
|
||||
routes:
|
||||
- match: { prefix: "/app/" }
|
||||
route:
|
||||
cluster: app-service
|
||||
prefix_rewrite: "/"
|
||||
|
||||
- match: { path: "/v1/update" }
|
||||
route:
|
||||
cluster: watchtower-service
|
||||
|
||||
- match: { prefix: "/builder/" }
|
||||
route:
|
||||
cluster: app-service
|
||||
|
||||
- match: { prefix: "/builder" }
|
||||
route:
|
||||
cluster: app-service
|
||||
|
||||
- match: { prefix: "/app_" }
|
||||
route:
|
||||
cluster: app-service
|
||||
|
||||
# special cases for worker admin (deprecated), global and system API
|
||||
- match: { prefix: "/api/global/" }
|
||||
route:
|
||||
cluster: worker-service
|
||||
|
||||
- match: { prefix: "/api/admin/" }
|
||||
route:
|
||||
cluster: worker-service
|
||||
|
||||
- match: { prefix: "/api/system/" }
|
||||
route:
|
||||
cluster: worker-service
|
||||
|
||||
- match: { path: "/" }
|
||||
route:
|
||||
cluster: app-service
|
||||
|
||||
# special case for when API requests are made, can just forward, not to minio
|
||||
- match: { prefix: "/api/" }
|
||||
route:
|
||||
cluster: app-service
|
||||
timeout: 120s
|
||||
|
||||
- match: { prefix: "/worker/" }
|
||||
route:
|
||||
cluster: worker-service
|
||||
prefix_rewrite: "/"
|
||||
|
||||
- match: { prefix: "/db/" }
|
||||
route:
|
||||
cluster: couchdb-service
|
||||
prefix_rewrite: "/"
|
||||
|
||||
# minio is on the default route because this works
|
||||
# best, minio + AWS SDK doesn't handle path proxy
|
||||
- match: { prefix: "/" }
|
||||
route:
|
||||
cluster: minio-service
|
||||
|
||||
http_filters:
|
||||
- name: envoy.filters.http.router
|
||||
|
||||
clusters:
|
||||
- name: app-service
|
||||
connect_timeout: 0.25s
|
||||
type: strict_dns
|
||||
lb_policy: round_robin
|
||||
load_assignment:
|
||||
cluster_name: app-service
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: app-service
|
||||
port_value: 4002
|
||||
|
||||
- name: minio-service
|
||||
connect_timeout: 0.25s
|
||||
type: strict_dns
|
||||
lb_policy: round_robin
|
||||
load_assignment:
|
||||
cluster_name: minio-service
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: minio-service
|
||||
port_value: 9000
|
||||
|
||||
- name: worker-service
|
||||
connect_timeout: 0.25s
|
||||
type: strict_dns
|
||||
lb_policy: round_robin
|
||||
load_assignment:
|
||||
cluster_name: worker-service
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: worker-service
|
||||
port_value: 4003
|
||||
|
||||
- name: couchdb-service
|
||||
connect_timeout: 0.25s
|
||||
type: strict_dns
|
||||
lb_policy: round_robin
|
||||
load_assignment:
|
||||
cluster_name: couchdb-service
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: couchdb-service
|
||||
port_value: 5984
|
||||
|
||||
- name: watchtower-service
|
||||
connect_timeout: 0.25s
|
||||
type: strict_dns
|
||||
lb_policy: round_robin
|
||||
load_assignment:
|
||||
cluster_name: watchtower-service
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: watchtower-service
|
||||
port_value: 8080
|
||||
|
|
@ -18,7 +18,6 @@ WORKER_PORT=4003
|
|||
MINIO_PORT=4004
|
||||
COUCH_DB_PORT=4005
|
||||
REDIS_PORT=6379
|
||||
WATCHTOWER_PORT=6161
|
||||
BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||
|
||||
# An admin user can be automatically created initially if these are set
|
||||
|
|
|
@ -78,11 +78,6 @@
|
|||
"default": "6379",
|
||||
"preset": true
|
||||
},
|
||||
{
|
||||
"name": "WATCHTOWER_PORT",
|
||||
"default": "6161",
|
||||
"preset": true
|
||||
},
|
||||
{
|
||||
"name": "BUDIBASE_ENVIRONMENT",
|
||||
"default": "PRODUCTION",
|
||||
|
|
|
@ -22,5 +22,4 @@ ENV APPS_UPSTREAM_URL=http://app-service:4002
|
|||
ENV WORKER_UPSTREAM_URL=http://worker-service:4003
|
||||
ENV MINIO_UPSTREAM_URL=http://minio-service:9000
|
||||
ENV COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
|
||||
ENV WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
|
||||
ENV RESOLVER=127.0.0.11
|
||||
|
|
|
@ -50,19 +50,6 @@ http {
|
|||
ignore_invalid_headers off;
|
||||
proxy_buffering off;
|
||||
|
||||
set $csp_default "default-src 'self'";
|
||||
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com";
|
||||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
||||
set $csp_object "object-src 'none'";
|
||||
set $csp_base_uri "base-uri 'self'";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com https://us.i.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
|
||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
||||
set $csp_frame "frame-src 'self' https:";
|
||||
set $csp_img "img-src http: https: data: blob:";
|
||||
set $csp_manifest "manifest-src 'self'";
|
||||
set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live";
|
||||
set $csp_worker "worker-src blob:";
|
||||
|
||||
error_page 502 503 504 /error.html;
|
||||
location = /error.html {
|
||||
root /usr/share/nginx/html;
|
||||
|
@ -73,7 +60,6 @@ http {
|
|||
add_header X-Frame-Options SAMEORIGIN always;
|
||||
add_header X-Content-Type-Options nosniff always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
|
||||
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
|
||||
|
||||
# upstreams
|
||||
|
@ -81,7 +67,6 @@ http {
|
|||
set $worker ${WORKER_UPSTREAM_URL};
|
||||
set $minio ${MINIO_UPSTREAM_URL};
|
||||
set $couchdb ${COUCHDB_UPSTREAM_URL};
|
||||
set $watchtower ${WATCHTOWER_UPSTREAM_URL};
|
||||
|
||||
location /health {
|
||||
access_log off;
|
||||
|
@ -107,10 +92,6 @@ http {
|
|||
proxy_pass $apps;
|
||||
}
|
||||
|
||||
location = /v1/update {
|
||||
proxy_pass $watchtower;
|
||||
}
|
||||
|
||||
location ~ ^/(builder|app_) {
|
||||
proxy_http_version 1.1;
|
||||
|
||||
|
@ -125,6 +106,12 @@ http {
|
|||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_set_header Host $host;
|
||||
|
||||
# Enable buffering for potentially large OIDC configs
|
||||
proxy_buffering on;
|
||||
proxy_buffer_size 16k;
|
||||
proxy_buffers 4 32k;
|
||||
|
||||
proxy_pass $worker;
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,6 @@ let IMAGES = {
|
|||
couch: "ibmcom/couchdb3",
|
||||
curl: "curlimages/curl",
|
||||
redis: "redis",
|
||||
watchtower: "containrrr/watchtower",
|
||||
}
|
||||
|
||||
if (IS_SINGLE_IMAGE) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.1.2",
|
||||
"version": "3.2.3",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
import tk from "timekeeper"
|
||||
|
||||
import _ from "lodash"
|
||||
import { DBTestConfiguration, generator, structures } from "../../../tests"
|
||||
import {
|
||||
DBTestConfiguration,
|
||||
generator,
|
||||
structures,
|
||||
utils,
|
||||
} from "../../../tests"
|
||||
import { getDB } from "../../db"
|
||||
|
||||
import {
|
||||
|
@ -10,15 +15,14 @@ import {
|
|||
init,
|
||||
} from "../docWritethrough"
|
||||
|
||||
import InMemoryQueue from "../../queue/inMemoryQueue"
|
||||
|
||||
const initialTime = Date.now()
|
||||
|
||||
async function waitForQueueCompletion() {
|
||||
const queue: InMemoryQueue = DocWritethroughProcessor.queue as never
|
||||
await queue.waitForCompletion()
|
||||
await utils.queue.processMessages(DocWritethroughProcessor.queue)
|
||||
}
|
||||
|
||||
beforeAll(() => utils.queue.useRealQueues())
|
||||
|
||||
describe("docWritethrough", () => {
|
||||
beforeAll(() => {
|
||||
init()
|
||||
|
@ -67,7 +71,7 @@ describe("docWritethrough", () => {
|
|||
const patch3 = generatePatchObject(3)
|
||||
await docWritethrough.patch(patch3)
|
||||
|
||||
expect(await db.get(documentId)).toEqual({
|
||||
expect(await db.tryGet(documentId)).toEqual({
|
||||
_id: documentId,
|
||||
...patch1,
|
||||
...patch2,
|
||||
|
@ -92,7 +96,7 @@ describe("docWritethrough", () => {
|
|||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: documentId,
|
||||
...patch1,
|
||||
|
@ -117,7 +121,7 @@ describe("docWritethrough", () => {
|
|||
await waitForQueueCompletion()
|
||||
|
||||
expect(date1).not.toEqual(date2)
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
createdAt: date1.toISOString(),
|
||||
updatedAt: date2.toISOString(),
|
||||
|
@ -135,7 +139,7 @@ describe("docWritethrough", () => {
|
|||
await docWritethrough.patch(patch2)
|
||||
|
||||
const keyToOverride = _.sample(Object.keys(patch1))!
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
[keyToOverride]: patch1[keyToOverride],
|
||||
})
|
||||
|
@ -150,7 +154,7 @@ describe("docWritethrough", () => {
|
|||
await docWritethrough.patch(patch3)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...patch1,
|
||||
...patch2,
|
||||
|
@ -180,14 +184,14 @@ describe("docWritethrough", () => {
|
|||
await secondDocWritethrough.patch(doc2Patch2)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(docWritethrough.docId)).toEqual(
|
||||
expect(await db.tryGet(docWritethrough.docId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...doc1Patch,
|
||||
...doc1Patch2,
|
||||
})
|
||||
)
|
||||
|
||||
expect(await db.get(secondDocWritethrough.docId)).toEqual(
|
||||
expect(await db.tryGet(secondDocWritethrough.docId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...doc2Patch,
|
||||
...doc2Patch2,
|
||||
|
@ -203,7 +207,7 @@ describe("docWritethrough", () => {
|
|||
await docWritethrough.patch(initialPatch)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining(initialPatch)
|
||||
)
|
||||
|
||||
|
@ -214,10 +218,10 @@ describe("docWritethrough", () => {
|
|||
await docWritethrough.patch(extraPatch)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining(extraPatch)
|
||||
)
|
||||
expect(await db.get(documentId)).not.toEqual(
|
||||
expect(await db.tryGet(documentId)).not.toEqual(
|
||||
expect.objectContaining(initialPatch)
|
||||
)
|
||||
})
|
||||
|
@ -242,7 +246,7 @@ describe("docWritethrough", () => {
|
|||
expect(queueMessageSpy).toHaveBeenCalledTimes(5)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
|
||||
|
@ -250,7 +254,7 @@ describe("docWritethrough", () => {
|
|||
expect(queueMessageSpy).toHaveBeenCalledTimes(45)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
|
||||
|
@ -258,20 +262,18 @@ describe("docWritethrough", () => {
|
|||
expect(queueMessageSpy).toHaveBeenCalledTimes(55)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
// This is not yet supported
|
||||
// eslint-disable-next-line jest/no-disabled-tests
|
||||
it.skip("patches will execute in order", async () => {
|
||||
it("patches will execute in order", async () => {
|
||||
let incrementalValue = 0
|
||||
const keyToOverride = generator.word()
|
||||
async function incrementalPatches(count: number) {
|
||||
for (let i = 0; i < count; i++) {
|
||||
await docWritethrough.patch({ [keyToOverride]: incrementalValue++ })
|
||||
await docWritethrough.patch({ [keyToOverride]: ++incrementalValue })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -279,13 +281,13 @@ describe("docWritethrough", () => {
|
|||
await incrementalPatches(5)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining({ [keyToOverride]: 5 })
|
||||
)
|
||||
|
||||
await incrementalPatches(40)
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect(await db.tryGet(documentId)).toEqual(
|
||||
expect.objectContaining({ [keyToOverride]: 45 })
|
||||
)
|
||||
})
|
||||
|
|
|
@ -10,7 +10,6 @@ import {
|
|||
DatabaseQueryOpts,
|
||||
DBError,
|
||||
Document,
|
||||
FeatureFlag,
|
||||
isDocument,
|
||||
RowResponse,
|
||||
RowValue,
|
||||
|
@ -27,7 +26,6 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
|||
import { DDInstrumentedDatabase } from "../instrumentation"
|
||||
import { checkSlashesInUrl } from "../../helpers"
|
||||
import { sqlLog } from "../../sql/utils"
|
||||
import { flags } from "../../features"
|
||||
|
||||
const DATABASE_NOT_FOUND = "Database does not exist."
|
||||
|
||||
|
@ -456,10 +454,7 @@ export class DatabaseImpl implements Database {
|
|||
}
|
||||
|
||||
async destroy() {
|
||||
if (
|
||||
(await flags.isEnabled(FeatureFlag.SQS)) &&
|
||||
(await this.exists(SQLITE_DESIGN_DOC_ID))
|
||||
) {
|
||||
if (await this.exists(SQLITE_DESIGN_DOC_ID)) {
|
||||
// delete the design document, then run the cleanup operation
|
||||
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
// remove all tables - save the definition then trigger a cleanup
|
||||
|
|
|
@ -225,6 +225,10 @@ const environment = {
|
|||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
MIN_VERSION_WITHOUT_POWER_ROLE:
|
||||
process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0",
|
||||
DISABLE_CONTENT_SECURITY_POLICY: process.env.DISABLE_CONTENT_SECURITY_POLICY,
|
||||
// stopgap migration strategy until we can ensure backwards compat without unsafe-inline in CSP
|
||||
DISABLE_CSP_UNSAFE_INLINE_SCRIPTS:
|
||||
process.env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS,
|
||||
}
|
||||
|
||||
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {
|
||||
|
|
|
@ -269,10 +269,9 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
export const flags = new FlagSet({
|
||||
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
|
||||
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
|
||||
[FeatureFlag.SQS]: Flag.boolean(true),
|
||||
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(true),
|
||||
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(env.isDev()),
|
||||
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(env.isDev()),
|
||||
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
|
||||
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
|
||||
})
|
||||
|
||||
type UnwrapPromise<T> = T extends Promise<infer U> ? U : T
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
import crypto from "crypto"
|
||||
import env from "../environment"
|
||||
|
||||
const CSP_DIRECTIVES = {
|
||||
"default-src": ["'self'"],
|
||||
"script-src": [
|
||||
"'self'",
|
||||
"'unsafe-eval'",
|
||||
"https://*.budibase.net",
|
||||
"https://cdn.budi.live",
|
||||
"https://js.intercomcdn.com",
|
||||
"https://widget.intercom.io",
|
||||
"https://d2l5prqdbvm3op.cloudfront.net",
|
||||
"https://us-assets.i.posthog.com",
|
||||
],
|
||||
"style-src": [
|
||||
"'self'",
|
||||
"'unsafe-inline'",
|
||||
"https://cdn.jsdelivr.net",
|
||||
"https://fonts.googleapis.com",
|
||||
"https://rsms.me",
|
||||
"https://maxcdn.bootstrapcdn.com",
|
||||
],
|
||||
"object-src": ["'none'"],
|
||||
"base-uri": ["'self'"],
|
||||
"connect-src": [
|
||||
"'self'",
|
||||
"https://*.budibase.app",
|
||||
"https://*.budibaseqa.app",
|
||||
"https://*.budibase.net",
|
||||
"https://api-iam.intercom.io",
|
||||
"https://api-ping.intercom.io",
|
||||
"https://app.posthog.com",
|
||||
"https://us.i.posthog.com",
|
||||
"wss://nexus-websocket-a.intercom.io",
|
||||
"wss://nexus-websocket-b.intercom.io",
|
||||
"https://nexus-websocket-a.intercom.io",
|
||||
"https://nexus-websocket-b.intercom.io",
|
||||
"https://uploads.intercomcdn.com",
|
||||
"https://uploads.intercomusercontent.com",
|
||||
"https://*.amazonaws.com",
|
||||
"https://*.s3.amazonaws.com",
|
||||
"https://*.s3.us-east-2.amazonaws.com",
|
||||
"https://*.s3.us-east-1.amazonaws.com",
|
||||
"https://*.s3.us-west-1.amazonaws.com",
|
||||
"https://*.s3.us-west-2.amazonaws.com",
|
||||
"https://*.s3.af-south-1.amazonaws.com",
|
||||
"https://*.s3.ap-east-1.amazonaws.com",
|
||||
"https://*.s3.ap-south-1.amazonaws.com",
|
||||
"https://*.s3.ap-northeast-2.amazonaws.com",
|
||||
"https://*.s3.ap-southeast-1.amazonaws.com",
|
||||
"https://*.s3.ap-southeast-2.amazonaws.com",
|
||||
"https://*.s3.ap-northeast-1.amazonaws.com",
|
||||
"https://*.s3.ca-central-1.amazonaws.com",
|
||||
"https://*.s3.cn-north-1.amazonaws.com",
|
||||
"https://*.s3.cn-northwest-1.amazonaws.com",
|
||||
"https://*.s3.eu-central-1.amazonaws.com",
|
||||
"https://*.s3.eu-west-1.amazonaws.com",
|
||||
"https://*.s3.eu-west-2.amazonaws.com",
|
||||
"https://*.s3.eu-south-1.amazonaws.com",
|
||||
"https://*.s3.eu-west-3.amazonaws.com",
|
||||
"https://*.s3.eu-north-1.amazonaws.com",
|
||||
"https://*.s3.sa-east-1.amazonaws.com",
|
||||
"https://*.s3.me-south-1.amazonaws.com",
|
||||
"https://*.s3.us-gov-east-1.amazonaws.com",
|
||||
"https://*.s3.us-gov-west-1.amazonaws.com",
|
||||
"https://api.github.com",
|
||||
],
|
||||
"font-src": [
|
||||
"'self'",
|
||||
"data:",
|
||||
"https://cdn.jsdelivr.net",
|
||||
"https://fonts.gstatic.com",
|
||||
"https://rsms.me",
|
||||
"https://maxcdn.bootstrapcdn.com",
|
||||
"https://js.intercomcdn.com",
|
||||
"https://fonts.intercomcdn.com",
|
||||
],
|
||||
"frame-src": ["'self'", "https:"],
|
||||
"img-src": ["http:", "https:", "data:", "blob:"],
|
||||
"manifest-src": ["'self'"],
|
||||
"media-src": [
|
||||
"'self'",
|
||||
"https://js.intercomcdn.com",
|
||||
"https://cdn.budi.live",
|
||||
],
|
||||
"worker-src": ["blob:"],
|
||||
}
|
||||
|
||||
export async function contentSecurityPolicy(ctx: any, next: any) {
|
||||
try {
|
||||
const nonce = crypto.randomBytes(16).toString("base64")
|
||||
|
||||
const directives = { ...CSP_DIRECTIVES }
|
||||
directives["script-src"] = [
|
||||
...CSP_DIRECTIVES["script-src"],
|
||||
`'nonce-${nonce}'`,
|
||||
]
|
||||
|
||||
if (!env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS) {
|
||||
directives["script-src"].push("'unsafe-inline'")
|
||||
}
|
||||
|
||||
ctx.state.nonce = nonce
|
||||
|
||||
const cspHeader = Object.entries(directives)
|
||||
.map(([key, sources]) => `${key} ${sources.join(" ")}`)
|
||||
.join("; ")
|
||||
ctx.set("Content-Security-Policy", cspHeader)
|
||||
await next()
|
||||
} catch (err: any) {
|
||||
console.error(
|
||||
`Error occurred in Content-Security-Policy middleware: ${err}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export default contentSecurityPolicy
|
|
@ -19,5 +19,6 @@ export { default as pino } from "../logging/pino/middleware"
|
|||
export { default as correlation } from "../logging/correlation/middleware"
|
||||
export { default as errorHandling } from "./errorHandling"
|
||||
export { default as querystringToBody } from "./querystringToBody"
|
||||
export { default as csp } from "./contentSecurityPolicy"
|
||||
export * as joiValidator from "./joi-validator"
|
||||
export { default as ip } from "./ip"
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
import crypto from "crypto"
|
||||
import contentSecurityPolicy from "../contentSecurityPolicy"
|
||||
|
||||
jest.mock("crypto", () => ({
|
||||
randomBytes: jest.fn(),
|
||||
randomUUID: jest.fn(),
|
||||
}))
|
||||
|
||||
describe("contentSecurityPolicy middleware", () => {
|
||||
let ctx: any
|
||||
let next: any
|
||||
const mockNonce = "mocked/nonce"
|
||||
|
||||
beforeEach(() => {
|
||||
ctx = {
|
||||
state: {},
|
||||
set: jest.fn(),
|
||||
}
|
||||
next = jest.fn()
|
||||
// @ts-ignore
|
||||
crypto.randomBytes.mockReturnValue(Buffer.from(mockNonce, "base64"))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("should generate a nonce and set it in the script-src directive", async () => {
|
||||
await contentSecurityPolicy(ctx, next)
|
||||
|
||||
expect(ctx.state.nonce).toBe(mockNonce)
|
||||
expect(ctx.set).toHaveBeenCalledWith(
|
||||
"Content-Security-Policy",
|
||||
expect.stringContaining(
|
||||
`script-src 'self' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com 'nonce-${mockNonce}'`
|
||||
)
|
||||
)
|
||||
expect(next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it("should include all CSP directives in the header", async () => {
|
||||
await contentSecurityPolicy(ctx, next)
|
||||
|
||||
const cspHeader = ctx.set.mock.calls[0][1]
|
||||
expect(cspHeader).toContain("default-src 'self'")
|
||||
expect(cspHeader).toContain("script-src 'self' 'unsafe-eval'")
|
||||
expect(cspHeader).toContain("style-src 'self' 'unsafe-inline'")
|
||||
expect(cspHeader).toContain("object-src 'none'")
|
||||
expect(cspHeader).toContain("base-uri 'self'")
|
||||
expect(cspHeader).toContain("connect-src 'self'")
|
||||
expect(cspHeader).toContain("font-src 'self'")
|
||||
expect(cspHeader).toContain("frame-src 'self'")
|
||||
expect(cspHeader).toContain("img-src http: https: data: blob:")
|
||||
expect(cspHeader).toContain("manifest-src 'self'")
|
||||
expect(cspHeader).toContain("media-src 'self'")
|
||||
expect(cspHeader).toContain("worker-src blob:")
|
||||
})
|
||||
|
||||
it("should handle errors and log an error message", async () => {
|
||||
const consoleSpy = jest.spyOn(console, "error").mockImplementation()
|
||||
const error = new Error("Test error")
|
||||
// @ts-ignore
|
||||
crypto.randomBytes.mockImplementation(() => {
|
||||
throw error
|
||||
})
|
||||
|
||||
await contentSecurityPolicy(ctx, next)
|
||||
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
`Error occurred in Content-Security-Policy middleware: ${error}`
|
||||
)
|
||||
expect(next).not.toHaveBeenCalled()
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
})
|
|
@ -1,5 +1,5 @@
|
|||
import events from "events"
|
||||
import { newid, timeout } from "../utils"
|
||||
import { newid } from "../utils"
|
||||
import { Queue, QueueOptions, JobOptions } from "./queue"
|
||||
|
||||
interface JobMessage {
|
||||
|
@ -141,7 +141,7 @@ class InMemoryQueue implements Partial<Queue> {
|
|||
} else {
|
||||
pushMessage()
|
||||
}
|
||||
return {} as any
|
||||
return { id: jobId } as any
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -184,16 +184,6 @@ class InMemoryQueue implements Partial<Queue> {
|
|||
// do nothing
|
||||
return this as any
|
||||
}
|
||||
|
||||
async waitForCompletion() {
|
||||
do {
|
||||
await timeout(50)
|
||||
} while (this.hasRunningJobs())
|
||||
}
|
||||
|
||||
hasRunningJobs() {
|
||||
return this._addCount > this._runCount
|
||||
}
|
||||
}
|
||||
|
||||
export default InMemoryQueue
|
||||
|
|
|
@ -15,7 +15,7 @@ const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
|
|||
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
|
||||
// cleanup the queue every 60 seconds
|
||||
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
|
||||
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
|
||||
let QUEUES: BullQueue.Queue[] = []
|
||||
let cleanupInterval: NodeJS.Timeout
|
||||
|
||||
async function cleanup() {
|
||||
|
@ -45,11 +45,18 @@ export function createQueue<T>(
|
|||
if (opts.jobOptions) {
|
||||
queueConfig.defaultJobOptions = opts.jobOptions
|
||||
}
|
||||
let queue: any
|
||||
let queue: BullQueue.Queue<T>
|
||||
if (!env.isTest()) {
|
||||
queue = new BullQueue(jobQueue, queueConfig)
|
||||
} else if (
|
||||
process.env.BULL_TEST_REDIS_PORT &&
|
||||
!isNaN(+process.env.BULL_TEST_REDIS_PORT)
|
||||
) {
|
||||
queue = new BullQueue(jobQueue, {
|
||||
redis: { host: "localhost", port: +process.env.BULL_TEST_REDIS_PORT },
|
||||
})
|
||||
} else {
|
||||
queue = new InMemoryQueue(jobQueue, queueConfig)
|
||||
queue = new InMemoryQueue(jobQueue, queueConfig) as any
|
||||
}
|
||||
addListeners(queue, jobQueue, opts?.removeStalledCb)
|
||||
QUEUES.push(queue)
|
||||
|
|
|
@ -4,3 +4,4 @@ export { generator } from "./structures"
|
|||
export * as testContainerUtils from "./testContainerUtils"
|
||||
export * as utils from "./utils"
|
||||
export * from "./jestUtils"
|
||||
export * as queue from "./queue"
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import { Queue } from "bull"
|
||||
|
||||
export async function processMessages(queue: Queue) {
|
||||
do {
|
||||
await queue.whenCurrentJobsFinished()
|
||||
} while (await queue.count())
|
||||
|
||||
await queue.whenCurrentJobsFinished()
|
||||
}
|
|
@ -1,4 +1,6 @@
|
|||
import { execSync } from "child_process"
|
||||
import { cloneDeep } from "lodash"
|
||||
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||
|
||||
const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g")
|
||||
|
||||
|
@ -106,3 +108,58 @@ export function setupEnv(...envs: any[]) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function startContainer(container: GenericContainer) {
|
||||
const imageName = (container as any).imageName.string as string
|
||||
let key: string = imageName
|
||||
if (imageName.includes("@sha256")) {
|
||||
key = imageName.split("@")[0]
|
||||
}
|
||||
key = key.replace(/\//g, "-").replace(/:/g, "-")
|
||||
|
||||
container = container
|
||||
.withReuse()
|
||||
.withLabels({ "com.budibase": "true" })
|
||||
.withName(`${key}_testcontainer`)
|
||||
|
||||
let startedContainer: StartedTestContainer | undefined = undefined
|
||||
let lastError = undefined
|
||||
for (let i = 0; i < 10; i++) {
|
||||
try {
|
||||
// container.start() is not an idempotent operation, calling `start`
|
||||
// modifies the internal state of a GenericContainer instance such that
|
||||
// the hash it uses to determine reuse changes. We need to clone the
|
||||
// container before calling start to ensure that we're using the same
|
||||
// reuse hash every time.
|
||||
const containerCopy = cloneDeep(container)
|
||||
startedContainer = await containerCopy.start()
|
||||
lastError = undefined
|
||||
break
|
||||
} catch (e: any) {
|
||||
lastError = e
|
||||
await new Promise(resolve => setTimeout(resolve, 1000))
|
||||
}
|
||||
}
|
||||
|
||||
if (!startedContainer) {
|
||||
if (lastError) {
|
||||
throw lastError
|
||||
}
|
||||
throw new Error(`failed to start container: ${imageName}`)
|
||||
}
|
||||
|
||||
const info = getContainerById(startedContainer.getId())
|
||||
if (!info) {
|
||||
throw new Error("Container not found")
|
||||
}
|
||||
|
||||
// Some Docker runtimes, when you expose a port, will bind it to both
|
||||
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
|
||||
// addresses are not shared, and testcontainers will sometimes give you back
|
||||
// the ipv6 port. There's no way to know that this has happened, and if you
|
||||
// try to then connect to `localhost:port` you may attempt to bind to the v4
|
||||
// address which could be unbound or even an entirely different container. For
|
||||
// that reason, we don't use testcontainers' `getExposedPort` function,
|
||||
// preferring instead our own method that guaranteed v4 ports.
|
||||
return getExposedV4Ports(info)
|
||||
}
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
export * as time from "./time"
|
||||
export * as queue from "./queue"
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
import { Queue } from "bull"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { startContainer } from "../testContainerUtils"
|
||||
|
||||
export async function useRealQueues() {
|
||||
const ports = await startContainer(
|
||||
new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(`redis-cli`).withStartupTimeout(10000)
|
||||
)
|
||||
)
|
||||
|
||||
const port = ports.find(x => x.container === 6379)?.host
|
||||
if (!port) {
|
||||
throw new Error("Redis port not found")
|
||||
}
|
||||
process.env.BULL_TEST_REDIS_PORT = port.toString()
|
||||
}
|
||||
|
||||
export async function processMessages(queue: Queue) {
|
||||
do {
|
||||
await queue.whenCurrentJobsFinished()
|
||||
} while (await queue.count())
|
||||
|
||||
await queue.whenCurrentJobsFinished()
|
||||
}
|
|
@ -8,6 +8,7 @@
|
|||
import Link from "../../Link/Link.svelte"
|
||||
import Tag from "../../Tags/Tag.svelte"
|
||||
import Tags from "../../Tags/Tags.svelte"
|
||||
import ProgressCircle from "../../ProgressCircle/ProgressCircle.svelte"
|
||||
|
||||
const BYTES_IN_KB = 1000
|
||||
const BYTES_IN_MB = 1000000
|
||||
|
@ -39,12 +40,14 @@
|
|||
"jfif",
|
||||
"webp",
|
||||
]
|
||||
|
||||
const fieldId = id || uuid()
|
||||
|
||||
let selectedImageIdx = 0
|
||||
let fileDragged = false
|
||||
let selectedUrl
|
||||
let fileInput
|
||||
let loading = false
|
||||
|
||||
$: selectedImage = value?.[selectedImageIdx] ?? null
|
||||
$: fileCount = value?.length ?? 0
|
||||
$: isImage =
|
||||
|
@ -86,10 +89,15 @@
|
|||
}
|
||||
|
||||
if (processFiles) {
|
||||
loading = true
|
||||
try {
|
||||
const processedFiles = await processFiles(fileList)
|
||||
const newValue = [...value, ...processedFiles]
|
||||
dispatch("change", newValue)
|
||||
selectedImageIdx = newValue.length - 1
|
||||
} finally {
|
||||
loading = false
|
||||
}
|
||||
} else {
|
||||
dispatch("change", fileList)
|
||||
}
|
||||
|
@ -227,7 +235,7 @@
|
|||
{#if showDropzone}
|
||||
<div
|
||||
class="spectrum-Dropzone"
|
||||
class:disabled
|
||||
class:disabled={disabled || loading}
|
||||
role="region"
|
||||
tabindex="0"
|
||||
on:dragover={handleDragOver}
|
||||
|
@ -241,7 +249,7 @@
|
|||
id={fieldId}
|
||||
{disabled}
|
||||
type="file"
|
||||
multiple
|
||||
multiple={maximum !== 1}
|
||||
accept={extensions}
|
||||
bind:this={fileInput}
|
||||
on:change={handleFile}
|
||||
|
@ -339,6 +347,12 @@
|
|||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
{#if loading}
|
||||
<div class="loading">
|
||||
<ProgressCircle size="M" />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
@ -464,6 +478,7 @@
|
|||
|
||||
.spectrum-Dropzone {
|
||||
height: 220px;
|
||||
position: relative;
|
||||
}
|
||||
.compact .spectrum-Dropzone {
|
||||
height: 40px;
|
||||
|
@ -488,4 +503,14 @@
|
|||
.tag {
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.loading {
|
||||
position: absolute;
|
||||
display: grid;
|
||||
place-items: center;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
top: 0;
|
||||
left: 0;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -4,12 +4,10 @@
|
|||
Layout,
|
||||
Heading,
|
||||
Body,
|
||||
Button,
|
||||
Divider,
|
||||
notifications,
|
||||
Label,
|
||||
Modal,
|
||||
ModalContent,
|
||||
Link,
|
||||
} from "@budibase/bbui"
|
||||
import { API } from "api"
|
||||
import { auth, admin } from "stores/portal"
|
||||
|
@ -21,8 +19,6 @@
|
|||
let githubVersion
|
||||
let githubPublishedDate
|
||||
let githubPublishedTime
|
||||
let needsUpdate = true
|
||||
let updateModal
|
||||
|
||||
// Only admins allowed here
|
||||
$: {
|
||||
|
@ -31,21 +27,6 @@
|
|||
}
|
||||
}
|
||||
|
||||
async function updateBudibase() {
|
||||
try {
|
||||
notifications.info("Updating budibase..")
|
||||
await fetch("/v1/update", {
|
||||
headers: {
|
||||
Authorization: "Bearer budibase",
|
||||
},
|
||||
})
|
||||
notifications.success("Your budibase installation is up to date.")
|
||||
getVersion()
|
||||
} catch (err) {
|
||||
notifications.error(`Error installing budibase update ${err}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function getVersion() {
|
||||
try {
|
||||
version = await API.getBudibaseVersion()
|
||||
|
@ -69,13 +50,6 @@
|
|||
githubPublishedDate = new Date(githubResponse.published_at)
|
||||
githubPublishedTime = githubPublishedDate.toLocaleTimeString()
|
||||
githubPublishedDate = githubPublishedDate.toLocaleDateString()
|
||||
|
||||
//Does Budibase need to be updated?
|
||||
if (githubVersion === version) {
|
||||
needsUpdate = false
|
||||
} else {
|
||||
needsUpdate = true
|
||||
}
|
||||
} catch (error) {
|
||||
notifications.error("Error getting the latest Budibase version")
|
||||
githubVersion = null
|
||||
|
@ -115,23 +89,15 @@
|
|||
>
|
||||
</Layout>
|
||||
<Divider />
|
||||
<div>
|
||||
<Button cta on:click={updateModal.show} disabled={!needsUpdate}
|
||||
>Update Budibase</Button
|
||||
<Layout noPadding gap="XS">
|
||||
<Heading>Updating Budibase</Heading>
|
||||
<Body
|
||||
>To update your self-host installation, follow the docs found <Link
|
||||
size="L"
|
||||
href="https://docs.budibase.com/docs/updating-budibase">here.</Link
|
||||
></Body
|
||||
>
|
||||
<Modal bind:this={updateModal}>
|
||||
<ModalContent
|
||||
title="Update Budibase"
|
||||
confirmText="Update"
|
||||
onConfirm={updateBudibase}
|
||||
>
|
||||
<span
|
||||
>Are you sure you want to update your budibase installation to the
|
||||
latest version?</span
|
||||
>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</div>
|
||||
</Layout>
|
||||
{/if}
|
||||
</Layout>
|
||||
{/if}
|
||||
|
|
|
@ -53,6 +53,7 @@
|
|||
on:close={close}
|
||||
maxHeight={null}
|
||||
resizable
|
||||
minWidth={360}
|
||||
>
|
||||
<div class="content">
|
||||
<slot />
|
||||
|
@ -80,7 +81,6 @@
|
|||
}
|
||||
|
||||
.content {
|
||||
width: 300px;
|
||||
padding: 20px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
|
|
@ -5,6 +5,7 @@ export default class NestedProviderFetch extends DataFetch {
|
|||
// Nested providers should already have exposed their own schema
|
||||
return {
|
||||
schema: datasource?.value?.schema,
|
||||
primaryDisplay: datasource?.value?.primaryDisplay,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 04bee88597edb1edb88ed299d0597b587f0362ec
|
||||
Subproject commit a56696a4af5667617746600fc75fe6a01744b692
|
|
@ -153,7 +153,11 @@ async function createInstance(appId: string, template: AppTemplate) {
|
|||
await createAllSearchIndex()
|
||||
|
||||
if (template && template.useTemplate) {
|
||||
await sdk.backups.importApp(appId, db, template)
|
||||
const opts = {
|
||||
importObjStoreContents: true,
|
||||
updateAttachmentColumns: !template.key, // preserve attachments when using Budibase templates
|
||||
}
|
||||
await sdk.backups.importApp(appId, db, template, opts)
|
||||
} else {
|
||||
// create the users table
|
||||
await db.put(USERS_TABLE_SCHEMA)
|
||||
|
|
|
@ -209,6 +209,7 @@ export const serveApp = async function (ctx: UserCtx) {
|
|||
? objectStore.getGlobalFileUrl("settings", "logoUrl")
|
||||
: "",
|
||||
appMigrating: needMigrations,
|
||||
nonce: ctx.state.nonce,
|
||||
})
|
||||
const appHbs = loadHandlebarsFile(appHbsPath)
|
||||
ctx.body = await processString(appHbs, {
|
||||
|
@ -217,6 +218,7 @@ export const serveApp = async function (ctx: UserCtx) {
|
|||
css: `:root{${themeVariables}} ${css.code}`,
|
||||
appId,
|
||||
embedded: bbHeaderEmbed,
|
||||
nonce: ctx.state.nonce,
|
||||
})
|
||||
} else {
|
||||
// just return the app info for jest to assert on
|
||||
|
@ -258,6 +260,7 @@ export const serveBuilderPreview = async function (ctx: Ctx) {
|
|||
const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs"))
|
||||
ctx.body = await processString(previewHbs, {
|
||||
clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version),
|
||||
nonce: ctx.state.nonce,
|
||||
})
|
||||
} else {
|
||||
// just return the app info for jest to assert on
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
export let hideDevTools
|
||||
export let sideNav
|
||||
export let hideFooter
|
||||
|
||||
export let nonce
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
|
@ -118,11 +120,11 @@
|
|||
<p />
|
||||
{/if}
|
||||
</div>
|
||||
<script type="application/javascript">
|
||||
<script type="application/javascript" {nonce}>
|
||||
window.INIT_TIME = Date.now()
|
||||
</script>
|
||||
{#if appMigrating}
|
||||
<script type="application/javascript">
|
||||
<script type="application/javascript" {nonce}>
|
||||
window.MIGRATING_APP = true
|
||||
</script>
|
||||
{/if}
|
||||
|
@ -135,7 +137,7 @@
|
|||
<script type="application/javascript" src={plugin.jsUrl}></script>
|
||||
{/each}
|
||||
{/if}
|
||||
<script type="application/javascript">
|
||||
<script type="application/javascript" {nonce}>
|
||||
if (window.loadBudibase) {
|
||||
window.loadBudibase()
|
||||
} else {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<html>
|
||||
<script>
|
||||
<script nonce="{{ nonce }}">
|
||||
document.fonts.ready.then(() => {
|
||||
window.parent.postMessage({ type: "docLoaded" });
|
||||
})
|
||||
|
@ -9,7 +9,7 @@
|
|||
<style>{{{css}}}</style>
|
||||
</head>
|
||||
|
||||
<script>
|
||||
<script nonce="{{ nonce }}">
|
||||
window["##BUDIBASE_APP_ID##"] = "{{appId}}"
|
||||
window["##BUDIBASE_APP_EMBEDDED##"] = "{{embedded}}"
|
||||
</script>
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
}
|
||||
</style>
|
||||
<script src='{{ clientLibPath }}'></script>
|
||||
<script>
|
||||
<script nonce="{{ nonce }}">
|
||||
function receiveMessage(event) {
|
||||
if (!event.data) {
|
||||
return
|
||||
|
|
|
@ -15,12 +15,11 @@ import { getViews, saveView } from "../view/utils"
|
|||
import viewTemplate from "../view/viewBuilder"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { context, events, features, HTTPError } from "@budibase/backend-core"
|
||||
import { context, events, HTTPError } from "@budibase/backend-core"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
Database,
|
||||
Datasource,
|
||||
FeatureFlag,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
NumberFieldMetadata,
|
||||
|
@ -336,9 +335,8 @@ class TableSaveFunctions {
|
|||
importRows: this.importRows,
|
||||
userId: this.userId,
|
||||
})
|
||||
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
||||
|
||||
await sdk.tables.sqs.addTable(table)
|
||||
}
|
||||
return table
|
||||
}
|
||||
|
||||
|
@ -530,9 +528,8 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
|
|||
if (rows) {
|
||||
await AttachmentCleanup.tableDelete(table, rows)
|
||||
}
|
||||
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
||||
|
||||
await sdk.tables.sqs.removeTable(table)
|
||||
}
|
||||
}
|
||||
|
||||
const _TableSaveFunctions = TableSaveFunctions
|
||||
|
|
|
@ -16,7 +16,7 @@ jest.mock("../../../utilities/redis", () => ({
|
|||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import * as setup from "./utilities"
|
||||
import { AppStatus } from "../../../db/utils"
|
||||
import { events, utils, context, features } from "@budibase/backend-core"
|
||||
import { events, utils, context } from "@budibase/backend-core"
|
||||
import env from "../../../environment"
|
||||
import { type App, BuiltinPermissionID } from "@budibase/types"
|
||||
import tk from "timekeeper"
|
||||
|
@ -355,21 +355,6 @@ describe("/applications", () => {
|
|||
expect(events.app.deleted).toHaveBeenCalledTimes(1)
|
||||
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should be able to delete an app after SQS has been set but app hasn't been migrated", async () => {
|
||||
const prodAppId = app.appId.replace("_dev", "")
|
||||
nock("http://localhost:10000")
|
||||
.delete(`/api/global/roles/${prodAppId}`)
|
||||
.reply(200, {})
|
||||
|
||||
await features.testutils.withFeatureFlags(
|
||||
"*",
|
||||
{ SQS: true },
|
||||
async () => {
|
||||
await config.api.application.delete(app.appId)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("POST /api/applications/:appId/duplicate", () => {
|
||||
|
|
|
@ -19,17 +19,14 @@ import {
|
|||
import { quotas } from "@budibase/pro"
|
||||
import {
|
||||
AIOperationEnum,
|
||||
AttachmentFieldMetadata,
|
||||
AutoFieldSubType,
|
||||
Datasource,
|
||||
DateFieldMetadata,
|
||||
DeleteRow,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
BBReferenceFieldSubType,
|
||||
FormulaType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
NumberFieldMetadata,
|
||||
QuotaUsageType,
|
||||
RelationshipType,
|
||||
Row,
|
||||
|
@ -90,8 +87,7 @@ async function waitForEvent(
|
|||
}
|
||||
|
||||
describe.each([
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
["internal", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
|
@ -99,8 +95,6 @@ describe.each([
|
|||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("/rows (%s)", (providerType, dsProvider) => {
|
||||
const isInternal = dsProvider === undefined
|
||||
const isLucene = providerType === "lucene"
|
||||
const isSqs = providerType === "sqs"
|
||||
const isMSSQL = providerType === DatabaseName.SQL_SERVER
|
||||
const isOracle = providerType === DatabaseName.ORACLE
|
||||
const config = setup.getConfig()
|
||||
|
@ -108,15 +102,9 @@ describe.each([
|
|||
let table: Table
|
||||
let datasource: Datasource | undefined
|
||||
let client: Knex | undefined
|
||||
let envCleanup: (() => void) | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: true }, () =>
|
||||
config.init()
|
||||
)
|
||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
||||
SQS: isSqs,
|
||||
})
|
||||
await config.init()
|
||||
|
||||
if (dsProvider) {
|
||||
const rawDatasource = await dsProvider
|
||||
|
@ -129,9 +117,6 @@ describe.each([
|
|||
|
||||
afterAll(async () => {
|
||||
setup.afterAll()
|
||||
if (envCleanup) {
|
||||
envCleanup()
|
||||
}
|
||||
})
|
||||
|
||||
function saveTableRequest(
|
||||
|
@ -381,185 +366,6 @@ describe.each([
|
|||
expect(ids).toEqual(expect.arrayContaining(sequence))
|
||||
})
|
||||
|
||||
isLucene &&
|
||||
it("row values are coerced", async () => {
|
||||
const str: FieldSchema = {
|
||||
type: FieldType.STRING,
|
||||
name: "str",
|
||||
constraints: { type: "string", presence: false },
|
||||
}
|
||||
const singleAttachment: FieldSchema = {
|
||||
type: FieldType.ATTACHMENT_SINGLE,
|
||||
name: "single attachment",
|
||||
constraints: { presence: false },
|
||||
}
|
||||
const attachmentList: AttachmentFieldMetadata = {
|
||||
type: FieldType.ATTACHMENTS,
|
||||
name: "attachments",
|
||||
constraints: { type: "array", presence: false },
|
||||
}
|
||||
const signature: FieldSchema = {
|
||||
type: FieldType.SIGNATURE_SINGLE,
|
||||
name: "signature",
|
||||
constraints: { presence: false },
|
||||
}
|
||||
const bool: FieldSchema = {
|
||||
type: FieldType.BOOLEAN,
|
||||
name: "boolean",
|
||||
constraints: { type: "boolean", presence: false },
|
||||
}
|
||||
const number: NumberFieldMetadata = {
|
||||
type: FieldType.NUMBER,
|
||||
name: "str",
|
||||
constraints: { type: "number", presence: false },
|
||||
}
|
||||
const datetime: DateFieldMetadata = {
|
||||
type: FieldType.DATETIME,
|
||||
name: "datetime",
|
||||
constraints: {
|
||||
type: "string",
|
||||
presence: false,
|
||||
datetime: { earliest: "", latest: "" },
|
||||
},
|
||||
}
|
||||
const arrayField: FieldSchema = {
|
||||
type: FieldType.ARRAY,
|
||||
constraints: {
|
||||
type: JsonFieldSubType.ARRAY,
|
||||
presence: false,
|
||||
inclusion: ["One", "Two", "Three"],
|
||||
},
|
||||
name: "Sample Tags",
|
||||
sortable: false,
|
||||
}
|
||||
const optsField: FieldSchema = {
|
||||
name: "Sample Opts",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
type: "string",
|
||||
presence: false,
|
||||
inclusion: ["Alpha", "Beta", "Gamma"],
|
||||
},
|
||||
}
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
name: str,
|
||||
stringUndefined: str,
|
||||
stringNull: str,
|
||||
stringString: str,
|
||||
numberEmptyString: number,
|
||||
numberNull: number,
|
||||
numberUndefined: number,
|
||||
numberString: number,
|
||||
numberNumber: number,
|
||||
datetimeEmptyString: datetime,
|
||||
datetimeNull: datetime,
|
||||
datetimeUndefined: datetime,
|
||||
datetimeString: datetime,
|
||||
datetimeDate: datetime,
|
||||
boolNull: bool,
|
||||
boolEmpty: bool,
|
||||
boolUndefined: bool,
|
||||
boolString: bool,
|
||||
boolBool: bool,
|
||||
singleAttachmentNull: singleAttachment,
|
||||
singleAttachmentUndefined: singleAttachment,
|
||||
attachmentListNull: attachmentList,
|
||||
attachmentListUndefined: attachmentList,
|
||||
attachmentListEmpty: attachmentList,
|
||||
attachmentListEmptyArrayStr: attachmentList,
|
||||
signatureNull: signature,
|
||||
signatureUndefined: signature,
|
||||
arrayFieldEmptyArrayStr: arrayField,
|
||||
arrayFieldArrayStrKnown: arrayField,
|
||||
arrayFieldNull: arrayField,
|
||||
arrayFieldUndefined: arrayField,
|
||||
optsFieldEmptyStr: optsField,
|
||||
optsFieldUndefined: optsField,
|
||||
optsFieldNull: optsField,
|
||||
optsFieldStrKnown: optsField,
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const datetimeStr = "1984-04-20T00:00:00.000Z"
|
||||
|
||||
const row = await config.api.row.save(table._id!, {
|
||||
name: "Test Row",
|
||||
stringUndefined: undefined,
|
||||
stringNull: null,
|
||||
stringString: "i am a string",
|
||||
numberEmptyString: "",
|
||||
numberNull: null,
|
||||
numberUndefined: undefined,
|
||||
numberString: "123",
|
||||
numberNumber: 123,
|
||||
datetimeEmptyString: "",
|
||||
datetimeNull: null,
|
||||
datetimeUndefined: undefined,
|
||||
datetimeString: datetimeStr,
|
||||
datetimeDate: new Date(datetimeStr),
|
||||
boolNull: null,
|
||||
boolEmpty: "",
|
||||
boolUndefined: undefined,
|
||||
boolString: "true",
|
||||
boolBool: true,
|
||||
tableId: table._id,
|
||||
singleAttachmentNull: null,
|
||||
singleAttachmentUndefined: undefined,
|
||||
attachmentListNull: null,
|
||||
attachmentListUndefined: undefined,
|
||||
attachmentListEmpty: "",
|
||||
attachmentListEmptyArrayStr: "[]",
|
||||
signatureNull: null,
|
||||
signatureUndefined: undefined,
|
||||
arrayFieldEmptyArrayStr: "[]",
|
||||
arrayFieldUndefined: undefined,
|
||||
arrayFieldNull: null,
|
||||
arrayFieldArrayStrKnown: "['One']",
|
||||
optsFieldEmptyStr: "",
|
||||
optsFieldUndefined: undefined,
|
||||
optsFieldNull: null,
|
||||
optsFieldStrKnown: "Alpha",
|
||||
})
|
||||
|
||||
expect(row.stringUndefined).toBe(undefined)
|
||||
expect(row.stringNull).toBe(null)
|
||||
expect(row.stringString).toBe("i am a string")
|
||||
expect(row.numberEmptyString).toBe(null)
|
||||
expect(row.numberNull).toBe(null)
|
||||
expect(row.numberUndefined).toBe(undefined)
|
||||
expect(row.numberString).toBe(123)
|
||||
expect(row.numberNumber).toBe(123)
|
||||
expect(row.datetimeEmptyString).toBe(null)
|
||||
expect(row.datetimeNull).toBe(null)
|
||||
expect(row.datetimeUndefined).toBe(undefined)
|
||||
expect(row.datetimeString).toBe(new Date(datetimeStr).toISOString())
|
||||
expect(row.datetimeDate).toBe(new Date(datetimeStr).toISOString())
|
||||
expect(row.boolNull).toBe(null)
|
||||
expect(row.boolEmpty).toBe(null)
|
||||
expect(row.boolUndefined).toBe(undefined)
|
||||
expect(row.boolString).toBe(true)
|
||||
expect(row.boolBool).toBe(true)
|
||||
expect(row.singleAttachmentNull).toEqual(null)
|
||||
expect(row.singleAttachmentUndefined).toBe(undefined)
|
||||
expect(row.attachmentListNull).toEqual([])
|
||||
expect(row.attachmentListUndefined).toBe(undefined)
|
||||
expect(row.attachmentListEmpty).toEqual([])
|
||||
expect(row.attachmentListEmptyArrayStr).toEqual([])
|
||||
expect(row.signatureNull).toEqual(null)
|
||||
expect(row.signatureUndefined).toBe(undefined)
|
||||
expect(row.arrayFieldEmptyArrayStr).toEqual([])
|
||||
expect(row.arrayFieldNull).toEqual([])
|
||||
expect(row.arrayFieldUndefined).toEqual(undefined)
|
||||
expect(row.optsFieldEmptyStr).toEqual(null)
|
||||
expect(row.optsFieldUndefined).toEqual(undefined)
|
||||
expect(row.optsFieldNull).toEqual(null)
|
||||
expect(row.arrayFieldArrayStrKnown).toEqual(["One"])
|
||||
expect(row.optsFieldStrKnown).toEqual("Alpha")
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("doesn't allow creating in user table", async () => {
|
||||
const response = await config.api.row.save(
|
||||
|
@ -1023,7 +829,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("relations to same table", () => {
|
||||
let relatedRows: Row[]
|
||||
|
||||
|
@ -1224,7 +1029,6 @@ describe.each([
|
|||
expect(rows).toHaveLength(1)
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("relations to same table", () => {
|
||||
let relatedRows: Row[]
|
||||
|
||||
|
@ -1628,7 +1432,6 @@ describe.each([
|
|||
expect(res.length).toEqual(2)
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("relations to same table", () => {
|
||||
let relatedRows: Row[]
|
||||
|
||||
|
@ -3422,7 +3225,7 @@ describe.each([
|
|||
)
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
isInternal &&
|
||||
describe("AI fields", () => {
|
||||
let table: Table
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ import {
|
|||
context,
|
||||
db as dbCore,
|
||||
docIds,
|
||||
features,
|
||||
MAX_VALID_DATE,
|
||||
MIN_VALID_DATE,
|
||||
SQLITE_DESIGN_DOC_ID,
|
||||
|
@ -64,7 +63,6 @@ jest.mock("@budibase/pro", () => ({
|
|||
|
||||
describe.each([
|
||||
["in-memory", undefined],
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
|
@ -72,15 +70,12 @@ describe.each([
|
|||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("search (%s)", (name, dsProvider) => {
|
||||
const isSqs = name === "sqs"
|
||||
const isLucene = name === "lucene"
|
||||
const isInMemory = name === "in-memory"
|
||||
const isInternal = isSqs || isLucene || isInMemory
|
||||
const isInternal = !dsProvider
|
||||
const isOracle = name === DatabaseName.ORACLE
|
||||
const isSql = !isInMemory && !isLucene
|
||||
const isSql = !isInMemory
|
||||
const config = setup.getConfig()
|
||||
|
||||
let envCleanup: (() => void) | undefined
|
||||
let datasource: Datasource | undefined
|
||||
let client: Knex | undefined
|
||||
let tableOrViewId: string
|
||||
|
@ -111,12 +106,7 @@ describe.each([
|
|||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: true }, () =>
|
||||
config.init()
|
||||
)
|
||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
||||
SQS: isSqs,
|
||||
})
|
||||
await config.init()
|
||||
|
||||
if (config.app?.appId) {
|
||||
config.app = await config.api.application.update(config.app?.appId, {
|
||||
|
@ -140,9 +130,6 @@ describe.each([
|
|||
|
||||
afterAll(async () => {
|
||||
setup.afterAll()
|
||||
if (envCleanup) {
|
||||
envCleanup()
|
||||
}
|
||||
})
|
||||
|
||||
async function createTable(schema?: TableSchema) {
|
||||
|
@ -221,11 +208,6 @@ describe.each([
|
|||
])("from %s", (sourceType, createTableOrView) => {
|
||||
const isView = sourceType === "view"
|
||||
|
||||
if (isView && isLucene) {
|
||||
// Some tests don't have the expected result in views via lucene, and given that it is getting deprecated, we exclude them from the tests
|
||||
return
|
||||
}
|
||||
|
||||
class SearchAssertion {
|
||||
constructor(private readonly query: SearchRowRequest) {}
|
||||
|
||||
|
@ -598,7 +580,6 @@ describe.each([
|
|||
])
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it("should return all rows matching the session user firstname when logical operator used", async () => {
|
||||
await expectQuery({
|
||||
$and: {
|
||||
|
@ -1034,7 +1015,6 @@ describe.each([
|
|||
}).toFindNothing()
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it("ignores low if it's an empty object", async () => {
|
||||
await expectQuery({
|
||||
// @ts-ignore
|
||||
|
@ -1042,7 +1022,6 @@ describe.each([
|
|||
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it("ignores high if it's an empty object", async () => {
|
||||
await expectQuery({
|
||||
// @ts-ignore
|
||||
|
@ -1202,10 +1181,6 @@ describe.each([
|
|||
await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
|
||||
})
|
||||
|
||||
// I couldn't find a way to make this work in Lucene and given that
|
||||
// we're getting rid of Lucene soon I wasn't inclined to spend time on
|
||||
// it.
|
||||
!isLucene &&
|
||||
it("can convert from a string", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
|
@ -1215,10 +1190,6 @@ describe.each([
|
|||
}).toContainExactly([{ age: 1 }])
|
||||
})
|
||||
|
||||
// I couldn't find a way to make this work in Lucene and given that
|
||||
// we're getting rid of Lucene soon I wasn't inclined to spend time on
|
||||
// it.
|
||||
!isLucene &&
|
||||
it("can find multiple values for same column", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
|
@ -1648,7 +1619,8 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
isInternal &&
|
||||
!isInMemory &&
|
||||
describe("AI Column", () => {
|
||||
const UNEXISTING_AI_COLUMN = "Real LLM Response"
|
||||
|
||||
|
@ -1879,10 +1851,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
// Range searches against bigints don't seem to work at all in Lucene, and I
|
||||
// couldn't figure out why. Given that we're replacing Lucene with SQS,
|
||||
// we've decided not to spend time on it.
|
||||
!isLucene &&
|
||||
describe("range", () => {
|
||||
it("successfully finds a row", async () => {
|
||||
await expectQuery({
|
||||
|
@ -2016,14 +1984,12 @@ describe.each([
|
|||
}).toFindNothing()
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
it("can search using just a low value", async () => {
|
||||
await expectQuery({
|
||||
range: { auto: { low: 9 } },
|
||||
}).toContainExactly([{ auto: 9 }, { auto: 10 }])
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
it("can search using just a high value", async () => {
|
||||
await expectQuery({
|
||||
range: { auto: { high: 2 } },
|
||||
|
@ -2031,13 +1997,13 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
describe("sort", () => {
|
||||
it("sorts ascending", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
sort: "auto",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
sortType: SortType.NUMBER,
|
||||
}).toMatchExactly([
|
||||
{ auto: 1 },
|
||||
{ auto: 2 },
|
||||
|
@ -2057,6 +2023,7 @@ describe.each([
|
|||
query: {},
|
||||
sort: "auto",
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
sortType: SortType.NUMBER,
|
||||
}).toMatchExactly([
|
||||
{ auto: 10 },
|
||||
{ auto: 9 },
|
||||
|
@ -2392,8 +2359,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
// This will never work for Lucene.
|
||||
!isLucene &&
|
||||
// It also can't work for in-memory searching because the related table name
|
||||
// isn't available.
|
||||
!isInMemory &&
|
||||
|
@ -2847,8 +2812,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
// lucene can't count the total rows
|
||||
!isLucene &&
|
||||
describe("row counting", () => {
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
|
@ -3065,9 +3028,7 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
// This was never actually supported in Lucene but SQS does support it, so may
|
||||
// as well have a test for it.
|
||||
;(isSqs || isInMemory) &&
|
||||
isInternal &&
|
||||
describe("space at start of column name", () => {
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
|
@ -3100,7 +3061,7 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
isInternal &&
|
||||
!isView &&
|
||||
describe("duplicate columns", () => {
|
||||
beforeAll(async () => {
|
||||
|
@ -3262,7 +3223,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("$and", () => {
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
|
@ -3340,10 +3300,7 @@ describe.each([
|
|||
await expect(
|
||||
expectQuery({
|
||||
$and: {
|
||||
conditions: [
|
||||
{ equal: { age: 10 } },
|
||||
"invalidCondition" as any,
|
||||
],
|
||||
conditions: [{ equal: { age: 10 } }, "invalidCondition" as any],
|
||||
},
|
||||
}).toFindNothing()
|
||||
).rejects.toThrow(
|
||||
|
@ -3396,7 +3353,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("$or", () => {
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
|
@ -3590,8 +3546,7 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
isSql &&
|
||||
!isSqs &&
|
||||
!isInternal &&
|
||||
describe("SQL injection", () => {
|
||||
const badStrings = [
|
||||
"1; DROP TABLE %table_name%;",
|
||||
|
|
|
@ -2,7 +2,6 @@ import * as setup from "./utilities"
|
|||
import path from "path"
|
||||
import nock from "nock"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { features } from "@budibase/backend-core"
|
||||
|
||||
interface App {
|
||||
background: string
|
||||
|
@ -82,13 +81,7 @@ describe("/templates", () => {
|
|||
})
|
||||
|
||||
describe("create app from template", () => {
|
||||
it.each(["sqs", "lucene"])(
|
||||
`should be able to create an app from a template (%s)`,
|
||||
async source => {
|
||||
await features.testutils.withFeatureFlags(
|
||||
"*",
|
||||
{ SQS: source === "sqs" },
|
||||
async () => {
|
||||
it("should be able to create an app from a template", async () => {
|
||||
const name = generator.guid().replaceAll("-", "")
|
||||
const url = `/${name}`
|
||||
|
||||
|
@ -111,19 +104,13 @@ describe("/templates", () => {
|
|||
expect(agencyProjects.name).toBe("Agency Projects")
|
||||
expect(users.name).toBe("Users")
|
||||
|
||||
const { rows } = await config.api.row.search(
|
||||
agencyProjects._id!,
|
||||
{
|
||||
const { rows } = await config.api.row.search(agencyProjects._id!, {
|
||||
tableId: agencyProjects._id!,
|
||||
query: {},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(3)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -43,7 +43,6 @@ import { quotas } from "@budibase/pro"
|
|||
import { db, roles, features, context } from "@budibase/backend-core"
|
||||
|
||||
describe.each([
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
|
@ -52,14 +51,11 @@ describe.each([
|
|||
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
|
||||
])("/v2/views (%s)", (name, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
const isSqs = name === "sqs"
|
||||
const isLucene = name === "lucene"
|
||||
const isInternal = isSqs || isLucene
|
||||
const isInternal = name === "sqs"
|
||||
|
||||
let table: Table
|
||||
let rawDatasource: Datasource | undefined
|
||||
let datasource: Datasource | undefined
|
||||
let envCleanup: (() => void) | undefined
|
||||
|
||||
function saveTableRequest(
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
|
@ -106,13 +102,7 @@ describe.each([
|
|||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () =>
|
||||
config.init()
|
||||
)
|
||||
|
||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
||||
SQS: isSqs,
|
||||
})
|
||||
await config.init()
|
||||
|
||||
if (dsProvider) {
|
||||
rawDatasource = await dsProvider
|
||||
|
@ -125,9 +115,6 @@ describe.each([
|
|||
|
||||
afterAll(async () => {
|
||||
setup.afterAll()
|
||||
if (envCleanup) {
|
||||
envCleanup()
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
|
@ -855,7 +842,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it("does not get confused when a calculation field shadows a basic one", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
|
@ -1453,7 +1439,6 @@ describe.each([
|
|||
)
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("calculation views", () => {
|
||||
let table: Table
|
||||
let view: ViewV2
|
||||
|
@ -2293,7 +2278,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("calculation views", () => {
|
||||
it("should not remove calculation columns when modifying table schema", async () => {
|
||||
let table = await config.api.table.save(
|
||||
|
@ -2721,13 +2705,10 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("search", () => {
|
||||
it("returns empty rows from view when no schema is passed", async () => {
|
||||
const rows = await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, {})
|
||||
)
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
const response = await config.api.viewV2.search(view.id)
|
||||
expect(response.rows).toHaveLength(10)
|
||||
|
@ -2864,9 +2845,7 @@ describe.each([
|
|||
|
||||
it("respects the limit parameter", async () => {
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, {})
|
||||
)
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
const limit = generator.integer({ min: 1, max: 8 })
|
||||
const response = await config.api.viewV2.search(view.id, {
|
||||
|
@ -2878,9 +2857,7 @@ describe.each([
|
|||
|
||||
it("can handle pagination", async () => {
|
||||
await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, {})
|
||||
)
|
||||
Array.from({ length: 10 }, () => config.api.row.save(table._id!, {}))
|
||||
)
|
||||
const rows = (await config.api.viewV2.search(view.id)).rows
|
||||
|
||||
|
@ -2923,9 +2900,6 @@ describe.each([
|
|||
hasNextPage: false,
|
||||
totalRows: 10,
|
||||
}
|
||||
if (isLucene) {
|
||||
expectation.bookmark = expect.anything()
|
||||
}
|
||||
expect(page3).toEqual(expectation)
|
||||
})
|
||||
|
||||
|
@ -3148,9 +3122,7 @@ describe.each([
|
|||
})
|
||||
expect(response.rows).toHaveLength(1)
|
||||
expect(response.rows).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ _id: three._id }),
|
||||
])
|
||||
expect.arrayContaining([expect.objectContaining({ _id: three._id })])
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -3211,7 +3183,6 @@ describe.each([
|
|||
)
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it.each([true, false])(
|
||||
"can filter a view without a view filter",
|
||||
async allOr => {
|
||||
|
@ -3249,7 +3220,6 @@ describe.each([
|
|||
}
|
||||
)
|
||||
|
||||
!isLucene &&
|
||||
it.each([true, false])("cannot bypass a view filter", async allOr => {
|
||||
await config.api.row.save(table._id!, {
|
||||
one: "foo",
|
||||
|
@ -3455,7 +3425,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("calculations", () => {
|
||||
let table: Table
|
||||
let rows: Row[]
|
||||
|
@ -3508,10 +3477,7 @@ describe.each([
|
|||
expect(response.rows).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
"Quantity Sum": rows.reduce(
|
||||
(acc, r) => acc + r.quantity,
|
||||
0
|
||||
),
|
||||
"Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
|
||||
}),
|
||||
])
|
||||
)
|
||||
|
@ -3552,9 +3518,7 @@ describe.each([
|
|||
}
|
||||
|
||||
for (const row of response.rows) {
|
||||
expect(row["Total Price"]).toEqual(
|
||||
priceByQuantity[row.quantity]
|
||||
)
|
||||
expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity])
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -3582,10 +3546,7 @@ describe.each([
|
|||
query: {},
|
||||
})
|
||||
|
||||
function calculate(
|
||||
type: CalculationType,
|
||||
numbers: number[]
|
||||
): number {
|
||||
function calculate(type: CalculationType, numbers: number[]): number {
|
||||
switch (type) {
|
||||
case CalculationType.COUNT:
|
||||
return numbers.length
|
||||
|
@ -3744,12 +3705,9 @@ describe.each([
|
|||
},
|
||||
})
|
||||
|
||||
const apertureScience = await config.api.row.save(
|
||||
companies._id!,
|
||||
{
|
||||
const apertureScience = await config.api.row.save(companies._id!, {
|
||||
name: "Aperture Science Laboratories",
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
const blackMesa = await config.api.row.save(companies._id!, {
|
||||
name: "Black Mesa",
|
||||
|
@ -4075,7 +4033,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it("should not need required fields to be present", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
|
@ -4464,9 +4421,7 @@ describe.each([
|
|||
}),
|
||||
expected: () => [
|
||||
{
|
||||
users: [
|
||||
expect.objectContaining({ _id: config.getUser()._id }),
|
||||
],
|
||||
users: [expect.objectContaining({ _id: config.getUser()._id })],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -4603,9 +4558,7 @@ describe.each([
|
|||
query: {},
|
||||
...searchOpts,
|
||||
})
|
||||
expect(rows).toEqual(
|
||||
expected.map(r => expect.objectContaining(r))
|
||||
)
|
||||
expect(rows).toEqual(expected.map(r => expect.objectContaining(r)))
|
||||
}
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
import * as setup from "../../../api/routes/tests/utilities"
|
||||
import { basicTable } from "../../../tests/utilities/structures"
|
||||
import {
|
||||
db as dbCore,
|
||||
features,
|
||||
SQLITE_DESIGN_DOC_ID,
|
||||
} from "@budibase/backend-core"
|
||||
import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
|
||||
import {
|
||||
LinkDocument,
|
||||
DocumentType,
|
||||
|
@ -70,17 +66,8 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
|||
}
|
||||
}
|
||||
|
||||
async function sqsDisabled(cb: () => Promise<void>) {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: false }, cb)
|
||||
}
|
||||
|
||||
async function sqsEnabled(cb: () => Promise<void>) {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: true }, cb)
|
||||
}
|
||||
|
||||
describe("SQS migration", () => {
|
||||
beforeAll(async () => {
|
||||
await sqsDisabled(async () => {
|
||||
await config.init()
|
||||
const table = await config.api.table.save(basicTable())
|
||||
tableId = table._id!
|
||||
|
@ -88,7 +75,6 @@ describe("SQS migration", () => {
|
|||
// old link document
|
||||
await db.put(oldLinkDocument())
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await config.doInTenant(async () => {
|
||||
|
@ -101,19 +87,11 @@ describe("SQS migration", () => {
|
|||
|
||||
it("test migration runs as expected against an older DB", async () => {
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// confirm nothing exists initially
|
||||
await sqsDisabled(async () => {
|
||||
let error: any | undefined
|
||||
try {
|
||||
await db.get(SQLITE_DESIGN_DOC_ID)
|
||||
} catch (err: any) {
|
||||
error = err
|
||||
}
|
||||
expect(error).toBeDefined()
|
||||
expect(error.status).toBe(404)
|
||||
})
|
||||
|
||||
await sqsEnabled(async () => {
|
||||
// remove sqlite design doc to simulate it comes from an older installation
|
||||
const doc = await db.get(SQLITE_DESIGN_DOC_ID)
|
||||
await db.remove({ _id: doc._id, _rev: doc._rev })
|
||||
|
||||
await processMigrations(config.appId!, MIGRATIONS)
|
||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
expect(designDoc.sql.tables).toBeDefined()
|
||||
|
@ -130,14 +108,11 @@ describe("SQS migration", () => {
|
|||
|
||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||
expect(linkDoc.tableId).toEqual(
|
||||
generateJunctionTableID(tableId1, tableId2)
|
||||
)
|
||||
expect(linkDoc.tableId).toEqual(generateJunctionTableID(tableId1, tableId2))
|
||||
// should have swapped the documents
|
||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
import tk from "timekeeper"
|
||||
import "../../environment"
|
||||
import * as automations from "../index"
|
||||
import * as setup from "./utilities"
|
||||
import { basicCronAutomation } from "../../tests/utilities/structures"
|
||||
|
||||
const initialTime = Date.now()
|
||||
tk.freeze(initialTime)
|
||||
|
||||
const oneMinuteInMs = 60 * 1000
|
||||
|
||||
describe("cron automations", () => {
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await automations.init()
|
||||
await config.init()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await automations.shutdown()
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
tk.freeze(initialTime)
|
||||
})
|
||||
|
||||
async function travel(ms: number) {
|
||||
tk.travel(Date.now() + ms)
|
||||
}
|
||||
|
||||
it("should initialise the automation timestamp", async () => {
|
||||
const automation = basicCronAutomation(config.appId!, "* * * * *")
|
||||
await config.api.automation.post(automation)
|
||||
await travel(oneMinuteInMs)
|
||||
await config.publish()
|
||||
|
||||
const automationLogs = await config.getAutomationLogs()
|
||||
expect(automationLogs.data).toHaveLength(1)
|
||||
expect(automationLogs.data).toEqual([
|
||||
expect.objectContaining({
|
||||
trigger: expect.objectContaining({
|
||||
outputs: { timestamp: initialTime + oneMinuteInMs },
|
||||
}),
|
||||
}),
|
||||
])
|
||||
})
|
||||
})
|
|
@ -355,6 +355,93 @@ describe("Loop automations", () => {
|
|||
expect(results.steps[2].outputs.rows).toHaveLength(expectedRows.length)
|
||||
})
|
||||
|
||||
it("should run an automation with a loop and update row step using stepIds", async () => {
|
||||
const table = await config.createTable({
|
||||
name: "TestTable",
|
||||
type: "table",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
value: {
|
||||
name: "value",
|
||||
type: FieldType.NUMBER,
|
||||
constraints: {
|
||||
presence: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const rows = [
|
||||
{ name: "Row 1", value: 1, tableId: table._id },
|
||||
{ name: "Row 2", value: 2, tableId: table._id },
|
||||
{ name: "Row 3", value: 3, tableId: table._id },
|
||||
]
|
||||
|
||||
await config.api.row.bulkImport(table._id!, { rows })
|
||||
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Loop and Update Row",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({ fields: {} })
|
||||
.queryRows(
|
||||
{
|
||||
tableId: table._id!,
|
||||
},
|
||||
{ stepId: "abc123" }
|
||||
)
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: "{{ steps.abc123.rows }}",
|
||||
})
|
||||
.updateRow({
|
||||
rowId: "{{ loop.currentItem._id }}",
|
||||
row: {
|
||||
name: "Updated {{ loop.currentItem.name }}",
|
||||
value: "{{ loop.currentItem.value }}",
|
||||
tableId: table._id,
|
||||
},
|
||||
meta: {},
|
||||
})
|
||||
.queryRows({
|
||||
tableId: table._id!,
|
||||
})
|
||||
.run()
|
||||
|
||||
const expectedRows = [
|
||||
{ name: "Updated Row 1", value: 1 },
|
||||
{ name: "Updated Row 2", value: 2 },
|
||||
{ name: "Updated Row 3", value: 3 },
|
||||
]
|
||||
|
||||
expect(results.steps[1].outputs.items).toEqual(
|
||||
expect.arrayContaining(
|
||||
expectedRows.map(row =>
|
||||
expect.objectContaining({
|
||||
success: true,
|
||||
row: expect.objectContaining(row),
|
||||
})
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
expect(results.steps[2].outputs.rows).toEqual(
|
||||
expect.arrayContaining(
|
||||
expectedRows.map(row => expect.objectContaining(row))
|
||||
)
|
||||
)
|
||||
|
||||
expect(results.steps[1].outputs.items).toHaveLength(expectedRows.length)
|
||||
expect(results.steps[2].outputs.rows).toHaveLength(expectedRows.length)
|
||||
})
|
||||
|
||||
it("should run an automation with a loop and delete row step", async () => {
|
||||
const table = await config.createTable({
|
||||
name: "TestTable",
|
||||
|
|
|
@ -70,6 +70,10 @@ export async function processEvent(job: AutomationJob) {
|
|||
|
||||
const task = async () => {
|
||||
try {
|
||||
if (isCronTrigger(job.data.automation)) {
|
||||
// Requires the timestamp at run time
|
||||
job.data.event.timestamp = Date.now()
|
||||
}
|
||||
// need to actually await these so that an error can be captured properly
|
||||
console.log("automation running", ...loggingArgs(job))
|
||||
|
||||
|
@ -210,15 +214,15 @@ export async function enableCronTrigger(appId: any, automation: Automation) {
|
|||
}
|
||||
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
||||
const jobId = `${appId}_cron_${utils.newid()}`
|
||||
const job: any = await automationQueue.add(
|
||||
const job = await automationQueue.add(
|
||||
{
|
||||
automation,
|
||||
event: { appId, timestamp: Date.now() },
|
||||
event: { appId },
|
||||
},
|
||||
{ repeat: { cron: cronExp }, jobId }
|
||||
)
|
||||
// Assign cron job ID from bull so we can remove it later if the cron trigger is removed
|
||||
trigger.cronJobId = job.id
|
||||
trigger.cronJobId = job.id.toString()
|
||||
// can't use getAppDB here as this is likely to be called from dev app,
|
||||
// but this call could be for dev app or prod app, need to just use what
|
||||
// was passed in
|
||||
|
|
|
@ -6,12 +6,12 @@ import * as mysql from "./mysql"
|
|||
import * as mssql from "./mssql"
|
||||
import * as mariadb from "./mariadb"
|
||||
import * as oracle from "./oracle"
|
||||
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import cloneDeep from "lodash/cloneDeep"
|
||||
|
||||
export type DatasourceProvider = () => Promise<Datasource>
|
||||
|
||||
export const { startContainer } = testContainerUtils
|
||||
|
||||
export enum DatabaseName {
|
||||
POSTGRES = "postgres",
|
||||
MONGODB = "mongodb",
|
||||
|
@ -71,58 +71,3 @@ export async function knexClient(ds: Datasource) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function startContainer(container: GenericContainer) {
|
||||
const imageName = (container as any).imageName.string as string
|
||||
let key: string = imageName
|
||||
if (imageName.includes("@sha256")) {
|
||||
key = imageName.split("@")[0]
|
||||
}
|
||||
key = key.replaceAll("/", "-").replaceAll(":", "-")
|
||||
|
||||
container = container
|
||||
.withReuse()
|
||||
.withLabels({ "com.budibase": "true" })
|
||||
.withName(`${key}_testcontainer`)
|
||||
|
||||
let startedContainer: StartedTestContainer | undefined = undefined
|
||||
let lastError = undefined
|
||||
for (let i = 0; i < 10; i++) {
|
||||
try {
|
||||
// container.start() is not an idempotent operation, calling `start`
|
||||
// modifies the internal state of a GenericContainer instance such that
|
||||
// the hash it uses to determine reuse changes. We need to clone the
|
||||
// container before calling start to ensure that we're using the same
|
||||
// reuse hash every time.
|
||||
const containerCopy = cloneDeep(container)
|
||||
startedContainer = await containerCopy.start()
|
||||
lastError = undefined
|
||||
break
|
||||
} catch (e: any) {
|
||||
lastError = e
|
||||
await new Promise(resolve => setTimeout(resolve, 1000))
|
||||
}
|
||||
}
|
||||
|
||||
if (!startedContainer) {
|
||||
if (lastError) {
|
||||
throw lastError
|
||||
}
|
||||
throw new Error(`failed to start container: ${imageName}`)
|
||||
}
|
||||
|
||||
const info = testContainerUtils.getContainerById(startedContainer.getId())
|
||||
if (!info) {
|
||||
throw new Error("Container not found")
|
||||
}
|
||||
|
||||
// Some Docker runtimes, when you expose a port, will bind it to both
|
||||
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
|
||||
// addresses are not shared, and testcontainers will sometimes give you back
|
||||
// the ipv6 port. There's no way to know that this has happened, and if you
|
||||
// try to then connect to `localhost:port` you may attempt to bind to the v4
|
||||
// address which could be unbound or even an entirely different container. For
|
||||
// that reason, we don't use testcontainers' `getExposedPort` function,
|
||||
// preferring instead our own method that guaranteed v4 ports.
|
||||
return testContainerUtils.getExposedV4Ports(info)
|
||||
}
|
||||
|
|
|
@ -6,7 +6,13 @@ import * as api from "./api"
|
|||
import * as automations from "./automations"
|
||||
import { Thread } from "./threads"
|
||||
import * as redis from "./utilities/redis"
|
||||
import { events, logging, middleware, timers } from "@budibase/backend-core"
|
||||
import {
|
||||
events,
|
||||
logging,
|
||||
middleware,
|
||||
timers,
|
||||
env as coreEnv,
|
||||
} from "@budibase/backend-core"
|
||||
import destroyable from "server-destroy"
|
||||
import { userAgent } from "koa-useragent"
|
||||
|
||||
|
@ -37,6 +43,9 @@ export default function createKoaApp() {
|
|||
app.use(middleware.correlation)
|
||||
app.use(middleware.pino)
|
||||
app.use(middleware.ip)
|
||||
if (!coreEnv.DISABLE_CONTENT_SECURITY_POLICY) {
|
||||
app.use(middleware.csp)
|
||||
}
|
||||
app.use(userAgent)
|
||||
|
||||
const server = http.createServer(app.callback())
|
||||
|
|
|
@ -123,6 +123,7 @@ export async function updateWithExport(
|
|||
// don't need obj store, the existing app already has everything we need
|
||||
await backups.importApp(devId, tempDb, template, {
|
||||
importObjStoreContents: false,
|
||||
updateAttachmentColumns: true,
|
||||
})
|
||||
const newMetadata = await getNewAppMetadata(tempDb, appDb)
|
||||
// get the documents to copy
|
||||
|
|
|
@ -170,7 +170,10 @@ export async function importApp(
|
|||
appId: string,
|
||||
db: Database,
|
||||
template: TemplateType,
|
||||
opts: { importObjStoreContents: boolean } = { importObjStoreContents: true }
|
||||
opts: {
|
||||
importObjStoreContents: boolean
|
||||
updateAttachmentColumns: boolean
|
||||
} = { importObjStoreContents: true, updateAttachmentColumns: true }
|
||||
) {
|
||||
let prodAppId = dbCore.getProdAppID(appId)
|
||||
let dbStream: any
|
||||
|
@ -219,7 +222,9 @@ export async function importApp(
|
|||
if (!ok) {
|
||||
throw "Error loading database dump from template."
|
||||
}
|
||||
if (opts.updateAttachmentColumns) {
|
||||
await updateAttachmentColumns(prodAppId, db)
|
||||
}
|
||||
await updateAutomations(prodAppId, db)
|
||||
// clear up afterward
|
||||
if (tmpPath) {
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
import {
|
||||
EmptyFilterOption,
|
||||
FeatureFlag,
|
||||
LegacyFilter,
|
||||
LogicalOperator,
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchFilterKey,
|
||||
SearchFilters,
|
||||
SearchResponse,
|
||||
SortOrder,
|
||||
|
@ -19,7 +16,6 @@ import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
|||
import { dataFilters } from "@budibase/shared-core"
|
||||
import sdk from "../../index"
|
||||
import { checkFilters, searchInputMapping } from "./search/utils"
|
||||
import { db, features } from "@budibase/backend-core"
|
||||
import tracer from "dd-trace"
|
||||
import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
|
||||
import { enrichSearchContext } from "../../../api/controllers/row/utils"
|
||||
|
@ -104,35 +100,6 @@ export async function search(
|
|||
}
|
||||
viewQuery = checkFilters(table, viewQuery)
|
||||
|
||||
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
|
||||
const supportsLogicalOperators =
|
||||
isExternalTableID(view.tableId) || sqsEnabled
|
||||
|
||||
if (!supportsLogicalOperators) {
|
||||
// In the unlikely event that a Grouped Filter is in a non-SQS environment
|
||||
// It needs to be ignored entirely
|
||||
let queryFilters: LegacyFilter[] = Array.isArray(view.query)
|
||||
? view.query
|
||||
: []
|
||||
|
||||
const { filters } = dataFilters.splitFiltersArray(queryFilters)
|
||||
|
||||
// Extract existing fields
|
||||
const existingFields = filters.map(filter =>
|
||||
db.removeKeyNumbering(filter.field)
|
||||
)
|
||||
|
||||
// Carry over filters for unused fields
|
||||
Object.keys(options.query).forEach(key => {
|
||||
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
|
||||
Object.keys(options.query[operator] || {}).forEach(field => {
|
||||
if (!existingFields.includes(db.removeKeyNumbering(field))) {
|
||||
viewQuery[operator]![field] = options.query[operator]![field]
|
||||
}
|
||||
})
|
||||
})
|
||||
options.query = viewQuery
|
||||
} else {
|
||||
const conditions = viewQuery ? [viewQuery] : []
|
||||
options.query = {
|
||||
$and: {
|
||||
|
@ -143,7 +110,6 @@ export async function search(
|
|||
options.query.onEmptyFilter = viewQuery.onEmptyFilter
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
options.query = dataFilters.cleanupQuery(options.query)
|
||||
options.query = dataFilters.fixupFilterArrays(options.query)
|
||||
|
@ -170,12 +136,9 @@ export async function search(
|
|||
if (isExternalTable) {
|
||||
span?.addTags({ searchType: "external" })
|
||||
result = await external.search(options, source)
|
||||
} else if (await features.flags.isEnabled(FeatureFlag.SQS)) {
|
||||
} else {
|
||||
span?.addTags({ searchType: "sqs" })
|
||||
result = await internal.sqs.search(options, source)
|
||||
} else {
|
||||
span?.addTags({ searchType: "lucene" })
|
||||
result = await internal.lucene.search(options, source)
|
||||
}
|
||||
|
||||
span.addTags({
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
export * as sqs from "./sqs"
|
||||
export * as lucene from "./lucene"
|
||||
export * from "./internal"
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
|
||||
import { fullSearch, paginatedSearch } from "../utils"
|
||||
import { InternalTables } from "../../../../../db/utils"
|
||||
import {
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchResponse,
|
||||
SortType,
|
||||
Table,
|
||||
User,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
|
||||
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||
import pick from "lodash/pick"
|
||||
import sdk from "../../../../"
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
source: Table | ViewV2
|
||||
): Promise<SearchResponse<Row>> {
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
const { paginate, query } = options
|
||||
|
||||
const params: RowSearchParams = {
|
||||
tableId: options.tableId,
|
||||
viewId: options.viewId,
|
||||
sort: options.sort,
|
||||
sortOrder: options.sortOrder,
|
||||
sortType: options.sortType,
|
||||
limit: options.limit,
|
||||
bookmark: options.bookmark,
|
||||
version: options.version,
|
||||
disableEscaping: options.disableEscaping,
|
||||
query: {},
|
||||
}
|
||||
|
||||
if (params.sort && !params.sortType) {
|
||||
const schema = table.schema
|
||||
const sortField = schema[params.sort]
|
||||
params.sortType =
|
||||
sortField.type === "number" ? SortType.NUMBER : SortType.STRING
|
||||
}
|
||||
|
||||
let response
|
||||
if (paginate) {
|
||||
response = await paginatedSearch(query, params)
|
||||
} else {
|
||||
response = await fullSearch(query, params)
|
||||
}
|
||||
|
||||
// Enrich search results with relationships
|
||||
if (response.rows && response.rows.length) {
|
||||
// enrich with global users if from users table
|
||||
if (table._id === InternalTables.USER_METADATA) {
|
||||
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
|
||||
}
|
||||
|
||||
const visibleFields =
|
||||
options.fields ||
|
||||
Object.keys(source.schema || {}).filter(
|
||||
key => source.schema?.[key].visible !== false
|
||||
)
|
||||
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
|
||||
|
||||
response.rows = await outputProcessing(source, response.rows, {
|
||||
squash: true,
|
||||
})
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
|
@ -10,7 +10,7 @@ import {
|
|||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
||||
import { search } from "../../../../../sdk/app/rows/search"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { features } from "@budibase/backend-core"
|
||||
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
|
@ -21,30 +21,20 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
|||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||
// test through the API, it should be done there instead.
|
||||
describe.each([
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
["internal", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("search sdk (%s)", (name, dsProvider) => {
|
||||
const isSqs = name === "sqs"
|
||||
const isLucene = name === "lucene"
|
||||
const isInternal = isLucene || isSqs
|
||||
const isInternal = name === "internal"
|
||||
const config = new TestConfiguration()
|
||||
|
||||
let envCleanup: (() => void) | undefined
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await features.testutils.withFeatureFlags("*", { SQS: isSqs }, () =>
|
||||
config.init()
|
||||
)
|
||||
|
||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
||||
SQS: isSqs,
|
||||
})
|
||||
await config.init()
|
||||
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
|
@ -105,9 +95,6 @@ describe.each([
|
|||
|
||||
afterAll(async () => {
|
||||
config.end()
|
||||
if (envCleanup) {
|
||||
envCleanup()
|
||||
}
|
||||
})
|
||||
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
|
@ -211,7 +198,6 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
it.each([
|
||||
[["id", "name", "age"], 3],
|
||||
[["name", "age"], 10],
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { context, features } from "@budibase/backend-core"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { getTableParams } from "../../../db/utils"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
|
@ -12,7 +12,6 @@ import {
|
|||
TableResponse,
|
||||
TableSourceType,
|
||||
TableViewsResponse,
|
||||
FeatureFlag,
|
||||
} from "@budibase/types"
|
||||
import datasources from "../datasources"
|
||||
import sdk from "../../../sdk"
|
||||
|
@ -49,10 +48,7 @@ export async function processTable(table: Table): Promise<Table> {
|
|||
type: "table",
|
||||
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
}
|
||||
const sqsEnabled = await features.flags.isEnabled(FeatureFlag.SQS)
|
||||
if (sqsEnabled) {
|
||||
processed.sql = true
|
||||
sql: true,
|
||||
}
|
||||
return processed
|
||||
}
|
||||
|
|
|
@ -237,6 +237,7 @@ export default class TestConfiguration {
|
|||
if (!this) {
|
||||
return
|
||||
}
|
||||
|
||||
if (this.server) {
|
||||
this.server.close()
|
||||
} else {
|
||||
|
|
|
@ -245,6 +245,38 @@ export function basicAutomation(appId?: string): Automation {
|
|||
}
|
||||
}
|
||||
|
||||
export function basicCronAutomation(appId: string, cron: string): Automation {
|
||||
const automation: Automation = {
|
||||
name: `Automation ${generator.guid()}`,
|
||||
definition: {
|
||||
trigger: {
|
||||
stepId: AutomationTriggerStepId.CRON,
|
||||
name: "test",
|
||||
tagline: "test",
|
||||
icon: "test",
|
||||
description: "test",
|
||||
type: AutomationStepType.TRIGGER,
|
||||
id: "test",
|
||||
inputs: {
|
||||
cron,
|
||||
},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {},
|
||||
},
|
||||
outputs: {
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
steps: [],
|
||||
},
|
||||
type: "automation",
|
||||
appId,
|
||||
}
|
||||
return automation
|
||||
}
|
||||
|
||||
export function serverLogAutomation(appId?: string): Automation {
|
||||
return {
|
||||
name: "My Automation",
|
||||
|
|
|
@ -385,7 +385,7 @@ class Orchestrator {
|
|||
stepIdx: number,
|
||||
pathIdx?: number
|
||||
): Promise<number> {
|
||||
await processObject(loopStep.inputs, this.context)
|
||||
await processObject(loopStep.inputs, this.processContext(this.context))
|
||||
const iterations = getLoopIterations(loopStep)
|
||||
let stepToLoopIndex = stepIdx + 1
|
||||
let pathStepIdx = (pathIdx || stepIdx) + 1
|
||||
|
|
|
@ -3,7 +3,6 @@ import { fixAutoColumnSubType, processFormulas } from "./utils"
|
|||
import {
|
||||
cache,
|
||||
context,
|
||||
features,
|
||||
HTTPError,
|
||||
objectStore,
|
||||
utils,
|
||||
|
@ -19,7 +18,6 @@ import {
|
|||
Table,
|
||||
User,
|
||||
ViewV2,
|
||||
FeatureFlag,
|
||||
} from "@budibase/types"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import {
|
||||
|
@ -423,7 +421,6 @@ export async function coreOutputProcessing(
|
|||
|
||||
// remove null properties to match internal API
|
||||
const isExternal = isExternalTableID(table._id!)
|
||||
if (isExternal || (await features.flags.isEnabled(FeatureFlag.SQS))) {
|
||||
for (const row of rows) {
|
||||
for (const key of Object.keys(row)) {
|
||||
if (row[key] === null) {
|
||||
|
@ -465,7 +462,6 @@ export async function coreOutputProcessing(
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!isUserMetadataTable(table._id!)) {
|
||||
const protectedColumns = isExternal
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { outputProcessing } from ".."
|
||||
import { generator, structures } from "@budibase/backend-core/tests"
|
||||
import { features } from "@budibase/backend-core"
|
||||
|
||||
import * as bbReferenceProcessor from "../bbReferenceProcessor"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
|
||||
|
@ -21,7 +21,6 @@ jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
|
|||
|
||||
describe("rowProcessor - outputProcessing", () => {
|
||||
const config = new TestConfiguration()
|
||||
let cleanupFlags: () => void = () => {}
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
@ -33,11 +32,6 @@ describe("rowProcessor - outputProcessing", () => {
|
|||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
cleanupFlags = features.testutils.setFeatureFlags("*", { SQS: true })
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
cleanupFlags()
|
||||
})
|
||||
|
||||
const processOutputBBReferenceMock =
|
||||
|
|
|
@ -527,7 +527,12 @@ export function search<T extends Record<string, any>>(
|
|||
): SearchResponse<T> {
|
||||
let result = runQuery(docs, query.query)
|
||||
if (query.sort) {
|
||||
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
|
||||
result = sort(
|
||||
result,
|
||||
query.sort,
|
||||
query.sortOrder || SortOrder.ASCENDING,
|
||||
query.sortType
|
||||
)
|
||||
}
|
||||
const totalRows = result.length
|
||||
if (query.limit) {
|
||||
|
|
|
@ -48,7 +48,7 @@ export function validate(
|
|||
cronExpression: string
|
||||
): { valid: false; err: string[] } | { valid: true } {
|
||||
const result = cronValidate(cronExpression, {
|
||||
preset: "npm-node-cron",
|
||||
preset: "npm-cron-schedule",
|
||||
override: {
|
||||
useSeconds: false,
|
||||
},
|
||||
|
|
|
@ -14,6 +14,7 @@ export interface AutomationDataEvent {
|
|||
row?: Row
|
||||
oldRow?: Row
|
||||
user?: UserBindings
|
||||
timestamp?: number
|
||||
}
|
||||
|
||||
export interface AutomationData {
|
||||
|
|
|
@ -12,7 +12,6 @@ import type PouchDB from "pouchdb-find"
|
|||
|
||||
export enum SearchIndex {
|
||||
ROWS = "rows",
|
||||
AUDIT = "audit",
|
||||
USER = "user",
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@ export enum FeatureFlag {
|
|||
PER_CREATOR_PER_USER_PRICE = "PER_CREATOR_PER_USER_PRICE",
|
||||
PER_CREATOR_PER_USER_PRICE_ALERT = "PER_CREATOR_PER_USER_PRICE_ALERT",
|
||||
AUTOMATION_BRANCHING = "AUTOMATION_BRANCHING",
|
||||
SQS = "SQS",
|
||||
AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS",
|
||||
DEFAULT_VALUES = "DEFAULT_VALUES",
|
||||
ENRICHED_RELATIONSHIPS = "ENRICHED_RELATIONSHIPS",
|
||||
|
|
|
@ -48,6 +48,7 @@ export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
|
|||
request: BBRequest<RequestBody>
|
||||
body: ResponseBody
|
||||
userAgent: UserAgentContext["userAgent"]
|
||||
state: { nonce?: string }
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -56,6 +57,7 @@ export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
|
|||
export interface UserCtx<RequestBody = any, ResponseBody = any>
|
||||
extends Ctx<RequestBody, ResponseBody> {
|
||||
user: ContextUser
|
||||
state: { nonce?: string }
|
||||
roleId?: string
|
||||
eventEmitter?: ContextEmitter
|
||||
loginMethod?: LoginMethod
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Ctx, MaintenanceType, FeatureFlag } from "@budibase/types"
|
||||
import { Ctx, MaintenanceType } from "@budibase/types"
|
||||
import env from "../../../environment"
|
||||
import { env as coreEnv, db as dbCore, features } from "@budibase/backend-core"
|
||||
import { env as coreEnv, db as dbCore } from "@budibase/backend-core"
|
||||
import nodeFetch from "node-fetch"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
|
@ -35,10 +35,7 @@ async function isSqsAvailable() {
|
|||
}
|
||||
|
||||
async function isSqsMissing() {
|
||||
return (
|
||||
(await features.flags.isEnabled(FeatureFlag.SQS)) &&
|
||||
!(await isSqsAvailable())
|
||||
)
|
||||
return !(await isSqsAvailable())
|
||||
}
|
||||
|
||||
export const fetch = async (ctx: Ctx) => {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { mocks, structures } from "@budibase/backend-core/tests"
|
||||
import { context, events, features } from "@budibase/backend-core"
|
||||
import { context, events } from "@budibase/backend-core"
|
||||
import { Event, IdentityType } from "@budibase/types"
|
||||
import { TestConfiguration } from "../../../../tests"
|
||||
|
||||
|
@ -12,19 +12,14 @@ const BASE_IDENTITY = {
|
|||
const USER_AUDIT_LOG_COUNT = 3
|
||||
const APP_ID = "app_1"
|
||||
|
||||
describe.each(["lucene", "sql"])("/api/global/auditlogs (%s)", method => {
|
||||
describe("/api/global/auditlogs (%s)", () => {
|
||||
const config = new TestConfiguration()
|
||||
let envCleanup: (() => void) | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
envCleanup = features.testutils.setFeatureFlags("*", {
|
||||
SQS: method === "sql",
|
||||
})
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
envCleanup?.()
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
|
|
|
@ -56,6 +56,9 @@ app.use(koaSession(app))
|
|||
app.use(middleware.correlation)
|
||||
app.use(middleware.pino)
|
||||
app.use(middleware.ip)
|
||||
if (!coreEnv.DISABLE_CONTENT_SECURITY_POLICY) {
|
||||
app.use(middleware.csp)
|
||||
}
|
||||
app.use(userAgent)
|
||||
|
||||
// authentication
|
||||
|
|
|
@ -12,7 +12,7 @@ dbConfig.init()
|
|||
import env from "../environment"
|
||||
import * as controllers from "./controllers"
|
||||
|
||||
const supertest = require("supertest")
|
||||
import supertest from "supertest"
|
||||
|
||||
import { Config } from "../constants"
|
||||
import {
|
||||
|
|
Loading…
Reference in New Issue