Merge branch 'master' into master

This commit is contained in:
Martin McKeaveney 2024-11-26 11:14:48 +00:00 committed by GitHub
commit 4671053250
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
192 changed files with 22261 additions and 18853 deletions

View File

@ -9,8 +9,5 @@ packages/server/client
packages/server/coverage
packages/builder/.routify
packages/sdk/sdk
packages/account-portal/packages/server/build
packages/account-portal/packages/ui/.routify
packages/account-portal/packages/ui/build
**/*.ivm.bundle.js
packages/server/build/oldClientVersions/**/**

View File

@ -64,18 +64,15 @@ jobs:
- run: yarn --frozen-lockfile
# Run build all the projects
- name: Build OSS
run: yarn build:oss
- name: Build account portal
run: yarn build:account-portal
if: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
- name: Build
run: yarn build
# Check the types of the projects built via esbuild
- name: Check types
run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then
yarn check:types --since=${{ env.NX_BASE_BRANCH }} --ignore @budibase/account-portal-server
yarn check:types --since=${{ env.NX_BASE_BRANCH }}
else
yarn check:types --ignore @budibase/account-portal-server
yarn check:types
fi
helm-lint:
@ -117,9 +114,11 @@ jobs:
- name: Test
run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then
yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions
yarn test -- --scope=@budibase/builder --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --ignore=@budibase/worker --ignore=@budibase/server
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix -- --verbose --reporters=default --reporters=github-actions
yarn test -- --scope=@budibase/builder --no-prefix
fi
test-worker:
@ -141,13 +140,22 @@ jobs:
- name: Test worker
run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then
node scripts/run-affected.js --task=test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/worker
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/worker)
if [ -z "$AFFECTED" ]; then
echo "No affected tests to run"
exit 0
fi
fi
cd packages/worker
yarn test --verbose --reporters=default --reporters=github-actions
test-server:
runs-on: budi-tubby-tornado-quad-core-300gb
runs-on: ubuntu-latest
strategy:
matrix:
datasource:
[mssql, mysql, postgres, mongodb, mariadb, oracle, sqs, none]
steps:
- name: Checkout repo
uses: actions/checkout@v4
@ -170,12 +178,19 @@ jobs:
- name: Pull testcontainers images
run: |
docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }} &
docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }} &
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} &
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} &
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} &
docker pull budibase/oracle-database:23.2-slim-faststart &
if [ "${{ matrix.datasource }}" == "mssql" ]; then
docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }}
elif [ "${{ matrix.datasource }}" == "mysql" ]; then
docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }}
elif [ "${{ matrix.datasource }}" == "postgres" ]; then
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }}
elif [ "${{ matrix.datasource }}" == "mongodb" ]; then
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }}
elif [ "${{ matrix.datasource }}" == "mariadb" ]; then
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }}
elif [ "${{ matrix.datasource }}" == "oracle" ]; then
docker pull budibase/oracle-database:23.2-slim-faststart
fi
docker pull minio/minio &
docker pull redis &
docker pull testcontainers/ryuk:0.5.1 &
@ -186,13 +201,25 @@ jobs:
- run: yarn --frozen-lockfile
- name: Test server
env:
DATASOURCE: ${{ matrix.datasource }}
run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then
node scripts/run-affected.js --task=test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/server
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/server)
if [ -z "$AFFECTED" ]; then
echo "No affected tests to run"
exit 0
fi
fi
FILTER="./src/tests/filters/datasource-tests.js"
if [ "${{ matrix.datasource }}" == "none" ]; then
FILTER="./src/tests/filters/non-datasource-tests.js"
fi
cd packages/server
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
check-pro-submodule:
runs-on: ubuntu-latest
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
@ -252,64 +279,6 @@ jobs:
echo 'All good, the submodule had been merged and setup correctly!'
fi
check-accountportal-submodule:
runs-on: ubuntu-latest
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v4
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- uses: dorny/paths-filter@v3
id: changes
with:
filters: |
src:
- packages/account-portal/**
- if: steps.changes.outputs.src == 'true'
name: Check account portal commit
id: get_accountportal_commits
run: |
cd packages/account-portal
accountportal_commit=$(git rev-parse HEAD)
branch="${{ github.base_ref || github.ref_name }}"
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
base_commit=$(git rev-parse origin/master)
if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "accountportal_commit=$accountportal_commit"
echo "accountportal_commit=$accountportal_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
else
echo "Nothing to do - branch to branch merge."
fi
- name: Check submodule merged to base branch
if: ${{ steps.get_accountportal_commits.outputs.base_commit != '' }}
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const submoduleCommit = '${{ steps.get_accountportal_commits.outputs.accountportal_commit }}';
const baseCommit = '${{ steps.get_accountportal_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_accountportal_commits.outputs.target_branch }}" branch.');
console.error('Refer to the account portal repo to merge your changes: https://github.com/Budibase/account-portal/blob/master/docs/index.md')
process.exit(1);
} else {
console.log('All good, the submodule had been merged and setup correctly!')
}
check-lockfile:
runs-on: ubuntu-latest
steps:

3
.gitignore vendored
View File

@ -8,6 +8,7 @@ packages/server/build/oldClientVersions/**/*
packages/builder/src/components/deploy/clientVersions.json
packages/server/src/integrations/tests/utils/*.lock
packages/builder/vite.config.mjs.timestamp*
packages/account-portal
# Logs
logs
@ -110,4 +111,4 @@ budibase-component
budibase-datasource
*.iml
.nx
.nx

3
.gitmodules vendored
View File

@ -1,6 +1,3 @@
[submodule "packages/pro"]
path = packages/pro
url = git@github.com:Budibase/budibase-pro.git
[submodule "packages/account-portal"]
path = packages/account-portal
url = git@github.com:Budibase/account-portal.git

View File

@ -9,8 +9,4 @@ packages/backend-core/coverage
packages/builder/.routify
packages/sdk/sdk
packages/pro/coverage
packages/account-portal/packages/ui/build
packages/account-portal/packages/ui/.routify
packages/account-portal/packages/server/build
packages/account-portal/packages/server/coverage
**/*.ivm.bundle.js

10
.vscode/launch.json vendored
View File

@ -20,16 +20,6 @@
"args": ["${workspaceFolder}/packages/worker/src/index.ts"],
"cwd": "${workspaceFolder}/packages/worker"
},
{
"name": "Camunda Worker",
"type": "node",
"request": "launch",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
"args": [
"${workspaceFolder}/packages/account-portal/packages/server/src/v2/run.ts"
],
"cwd": "${workspaceFolder}/packages/account-portal/packages/server"
},
{
"type": "chrome",
"request": "launch",

View File

@ -423,9 +423,9 @@ core-js-pure@^3.20.2:
integrity sha512-12VZfFIu+wyVbBebyHmRTuEE/tZrB4tJToWcwAMcsp3h4+sHR+fMJWbKpYiCRWlhFBq+KNyO8rIV9rTkeVmznQ==
cross-spawn@^7.0.2:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
version "7.0.6"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
dependencies:
path-key "^3.1.0"
shebang-command "^2.0.0"

View File

@ -62,6 +62,7 @@ export default async function setup() {
},
])
.withLabels({ "com.budibase": "true" })
.withTmpFs({ "/data": "rw" })
.withReuse()
.withWaitStrategy(
Wait.forSuccessfulCommand(
@ -72,6 +73,7 @@ export default async function setup() {
const minio = new GenericContainer("minio/minio")
.withExposedPorts(9000)
.withCommand(["server", "/data"])
.withTmpFs({ "/data": "rw" })
.withEnvironment({
MINIO_ACCESS_KEY: "budibase",
MINIO_SECRET_KEY: "budibase",

View File

@ -19,7 +19,6 @@ MINIO_PORT=4004
COUCH_DB_PORT=4005
COUCH_DB_SQS_PORT=4006
REDIS_PORT=6379
WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION
SQL_MAX_ROWS=

View File

@ -74,7 +74,6 @@ services:
- WORKER_UPSTREAM_URL=http://worker-service:4003
- MINIO_UPSTREAM_URL=http://minio-service:9000
- COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
- WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
- RESOLVER=127.0.0.11
depends_on:
- minio-service

View File

@ -87,7 +87,6 @@ services:
- WORKER_UPSTREAM_URL=http://worker-service:4003
- MINIO_UPSTREAM_URL=http://minio-service:9000
- COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
- WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
- RESOLVER=127.0.0.11
depends_on:
- minio-service
@ -112,19 +111,6 @@ services:
volumes:
- redis_data:/data
watchtower-service:
restart: always
image: containrrr/watchtower
volumes:
- /var/run/docker.sock:/var/run/docker.sock
command: --debug --http-api-update bbapps bbworker bbproxy
environment:
- WATCHTOWER_HTTP_API=true
- WATCHTOWER_HTTP_API_TOKEN=budibase
- WATCHTOWER_CLEANUP=true
labels:
- "com.centurylinklabs.watchtower.enable=false"
volumes:
couchdb3_data:
driver: local

View File

@ -1,152 +0,0 @@
static_resources:
listeners:
- name: main_listener
address:
socket_address: { address: 0.0.0.0, port_value: 10000 }
filter_chains:
- filters:
- name: envoy.filters.network.http_connection_manager
typed_config:
"@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager
stat_prefix: ingress
codec_type: auto
route_config:
name: local_route
virtual_hosts:
- name: local_services
domains: ["*"]
routes:
- match: { prefix: "/app/" }
route:
cluster: app-service
prefix_rewrite: "/"
- match: { path: "/v1/update" }
route:
cluster: watchtower-service
- match: { prefix: "/builder/" }
route:
cluster: app-service
- match: { prefix: "/builder" }
route:
cluster: app-service
- match: { prefix: "/app_" }
route:
cluster: app-service
# special cases for worker admin (deprecated), global and system API
- match: { prefix: "/api/global/" }
route:
cluster: worker-service
- match: { prefix: "/api/admin/" }
route:
cluster: worker-service
- match: { prefix: "/api/system/" }
route:
cluster: worker-service
- match: { path: "/" }
route:
cluster: app-service
# special case for when API requests are made, can just forward, not to minio
- match: { prefix: "/api/" }
route:
cluster: app-service
timeout: 120s
- match: { prefix: "/worker/" }
route:
cluster: worker-service
prefix_rewrite: "/"
- match: { prefix: "/db/" }
route:
cluster: couchdb-service
prefix_rewrite: "/"
# minio is on the default route because this works
# best, minio + AWS SDK doesn't handle path proxy
- match: { prefix: "/" }
route:
cluster: minio-service
http_filters:
- name: envoy.filters.http.router
clusters:
- name: app-service
connect_timeout: 0.25s
type: strict_dns
lb_policy: round_robin
load_assignment:
cluster_name: app-service
endpoints:
- lb_endpoints:
- endpoint:
address:
socket_address:
address: app-service
port_value: 4002
- name: minio-service
connect_timeout: 0.25s
type: strict_dns
lb_policy: round_robin
load_assignment:
cluster_name: minio-service
endpoints:
- lb_endpoints:
- endpoint:
address:
socket_address:
address: minio-service
port_value: 9000
- name: worker-service
connect_timeout: 0.25s
type: strict_dns
lb_policy: round_robin
load_assignment:
cluster_name: worker-service
endpoints:
- lb_endpoints:
- endpoint:
address:
socket_address:
address: worker-service
port_value: 4003
- name: couchdb-service
connect_timeout: 0.25s
type: strict_dns
lb_policy: round_robin
load_assignment:
cluster_name: couchdb-service
endpoints:
- lb_endpoints:
- endpoint:
address:
socket_address:
address: couchdb-service
port_value: 5984
- name: watchtower-service
connect_timeout: 0.25s
type: strict_dns
lb_policy: round_robin
load_assignment:
cluster_name: watchtower-service
endpoints:
- lb_endpoints:
- endpoint:
address:
socket_address:
address: watchtower-service
port_value: 8080

View File

@ -18,7 +18,6 @@ WORKER_PORT=4003
MINIO_PORT=4004
COUCH_DB_PORT=4005
REDIS_PORT=6379
WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
@ -26,4 +25,4 @@ BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=
PLUGINS_DIR=

View File

@ -78,11 +78,6 @@
"default": "6379",
"preset": true
},
{
"name": "WATCHTOWER_PORT",
"default": "6161",
"preset": true
},
{
"name": "BUDIBASE_ENVIRONMENT",
"default": "PRODUCTION",

View File

@ -22,5 +22,4 @@ ENV APPS_UPSTREAM_URL=http://app-service:4002
ENV WORKER_UPSTREAM_URL=http://worker-service:4003
ENV MINIO_UPSTREAM_URL=http://minio-service:9000
ENV COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
ENV WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
ENV RESOLVER=127.0.0.11

View File

@ -50,19 +50,6 @@ http {
ignore_invalid_headers off;
proxy_buffering off;
set $csp_default "default-src 'self'";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com";
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
set $csp_object "object-src 'none'";
set $csp_base_uri "base-uri 'self'";
set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com https://us.i.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
set $csp_frame "frame-src 'self' https:";
set $csp_img "img-src http: https: data: blob:";
set $csp_manifest "manifest-src 'self'";
set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live";
set $csp_worker "worker-src blob:";
error_page 502 503 504 /error.html;
location = /error.html {
root /usr/share/nginx/html;
@ -73,7 +60,6 @@ http {
add_header X-Frame-Options SAMEORIGIN always;
add_header X-Content-Type-Options nosniff always;
add_header X-XSS-Protection "1; mode=block" always;
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always;
# upstreams
@ -81,7 +67,6 @@ http {
set $worker ${WORKER_UPSTREAM_URL};
set $minio ${MINIO_UPSTREAM_URL};
set $couchdb ${COUCHDB_UPSTREAM_URL};
set $watchtower ${WATCHTOWER_UPSTREAM_URL};
location /health {
access_log off;
@ -107,10 +92,6 @@ http {
proxy_pass $apps;
}
location = /v1/update {
proxy_pass $watchtower;
}
location ~ ^/(builder|app_) {
proxy_http_version 1.1;
@ -125,6 +106,12 @@ http {
location ~ ^/api/(system|admin|global)/ {
proxy_set_header Host $host;
# Enable buffering for potentially large OIDC configs
proxy_buffering on;
proxy_buffer_size 16k;
proxy_buffers 4 32k;
proxy_pass $worker;
}

View File

@ -12,7 +12,6 @@ let IMAGES = {
couch: "ibmcom/couchdb3",
curl: "curlimages/curl",
redis: "redis",
watchtower: "containrrr/watchtower",
}
if (IS_SINGLE_IMAGE) {
@ -53,4 +52,4 @@ if (!IS_SINGLE_IMAGE) {
copyFile(FILES.ENV)
// compress
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)

View File

@ -1,12 +1,7 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.1.0",
"version": "3.2.12",
"npmClient": "yarn",
"packages": [
"packages/*",
"!packages/account-portal",
"packages/account-portal/packages/*"
],
"concurrency": 20,
"command": {
"publish": {

View File

@ -2,7 +2,6 @@
"$schema": "./node_modules/nx/schemas/nx-schema.json",
"tasksRunnerOptions": {
"default": {
"runner": "nx-cloud",
"options": {
"cacheableOperations": ["build", "test", "check:types"]
}

View File

@ -9,6 +9,7 @@
"@types/node": "20.10.0",
"@types/proper-lockfile": "^4.1.4",
"@typescript-eslint/parser": "6.9.0",
"depcheck": "^1.4.7",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.14.0",
"eslint": "^8.52.0",
@ -24,22 +25,22 @@
"prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0",
"proper-lockfile": "^4.1.2",
"svelte": "^4.2.10",
"svelte": "4.2.19",
"svelte-eslint-parser": "^0.33.1",
"typescript": "5.5.2",
"typescript-eslint": "^7.3.1",
"yargs": "^17.7.2"
"yargs": "^17.7.2",
"cross-spawn": "7.0.6"
},
"scripts": {
"get-past-client-version": "node scripts/getPastClientVersion.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"build": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:apps": "DISABLE_V8_COMPILE_CACHE=1 yarn build --scope @budibase/server --scope @budibase/worker",
"build:oss": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
"build:cli": "yarn build --scope @budibase/cli",
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run --concurrency 2 check:types --ignore @budibase/account-portal-server",
"check:types": "yarn check:dependencies && lerna run --concurrency 2 check:types",
"check:dependencies": "lerna run --concurrency 2 check:dependencies",
"build:sdk": "lerna run --stream build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset",
@ -52,15 +53,12 @@
"kill-server": "kill-port 4001 4002",
"kill-accountportal": "kill-port 3001 4003",
"kill-all": "yarn run kill-builder && yarn run kill-server && yarn kill-accountportal",
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev",
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
"dev:camunda": "./scripts/deploy-camunda.sh",
"dev:all": "yarn run kill-all && lerna run --stream dev",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "./scripts/devDocker.sh",
"test": "lerna run --concurrency 1 --stream test --stream",
"test": "lerna run --concurrency 1 --stream test",
"test:containers:kill": "./scripts/killTestcontainers.sh",
"lint:eslint": "eslint packages --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
@ -98,9 +96,7 @@
},
"workspaces": {
"packages": [
"packages/*",
"!packages/account-portal",
"packages/account-portal/packages/*"
"packages/*"
]
},
"resolutions": {
@ -114,7 +110,7 @@
"semver": "7.5.3",
"http-cache-semantics": "4.1.1",
"msgpackr": "1.10.1",
"axios": "1.6.3",
"axios": "1.7.7",
"xml2js": "0.6.2",
"unset-value": "2.0.1",
"passport": "0.6.0",
@ -124,6 +120,5 @@
},
"engines": {
"node": ">=20.0.0 <21.0.0"
},
"dependencies": {}
}
}

@ -1 +0,0 @@
Subproject commit 9bef5d1656b4f3c991447ded6d65b0eba393a140

View File

@ -1,6 +1,4 @@
*
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json
!src/**
!tests/**
!package.json

View File

@ -9,6 +9,13 @@
"./tests": "./dist/tests/index.js",
"./*": "./dist/*.js"
},
"typesVersions": {
"*": {
"tests": [
"dist/tests/index.d.ts"
]
}
},
"author": "Budibase",
"license": "GPL-3.0",
"scripts": {
@ -17,6 +24,7 @@
"build": "tsc -p tsconfig.build.json --paths null && node ./scripts/build.js",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020",
"check:dependencies": "node ../../scripts/depcheck.js",
"test": "bash scripts/test.sh",
"test:watch": "jest --watchAll"
},
@ -25,17 +33,21 @@
"@budibase/pouchdb-replication-stream": "1.2.11",
"@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0",
"@techpass/passport-openidconnect": "0.3.3",
"aws-cloudfront-sign": "3.0.2",
"aws-sdk": "2.1030.0",
"aws-sdk": "2.1692.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bull": "4.10.1",
"correlation-id": "4.0.0",
"dd-trace": "5.2.0",
"dd-trace": "5.26.0",
"dotenv": "16.0.1",
"google-auth-library": "^8.0.1",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
"ioredis": "5.3.2",
"joi": "17.6.0",
"jsonwebtoken": "9.0.2",
"knex": "2.4.2",
"koa-passport": "^6.0.0",
"koa-pino-logger": "4.0.0",
"lodash": "4.17.21",
@ -46,17 +58,17 @@
"pino": "8.11.0",
"pino-http": "8.3.3",
"posthog-node": "4.0.1",
"pouchdb": "7.3.0",
"pouchdb-find": "7.2.2",
"pouchdb": "9.0.0",
"pouchdb-find": "9.0.0",
"redlock": "4.2.0",
"rotating-file-stream": "3.1.0",
"sanitize-s3-objectkey": "0.0.1",
"semver": "^7.5.4",
"tar-fs": "2.1.1",
"uuid": "^8.3.2",
"knex": "2.4.2"
"uuid": "^8.3.2"
},
"devDependencies": {
"@jest/types": "^29.6.3",
"@shopify/jest-koa-mocks": "5.1.1",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
@ -64,8 +76,9 @@
"@types/cookies": "0.7.8",
"@types/jest": "29.5.5",
"@types/lodash": "4.14.200",
"@types/node": "^22.9.0",
"@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.0",
"@types/pouchdb": "6.4.2",
"@types/redlock": "4.0.7",
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
@ -74,6 +87,7 @@
"ioredis-mock": "8.9.0",
"jest": "29.7.0",
"jest-serial-runner": "1.2.1",
"nock": "^13.5.6",
"pino-pretty": "10.0.0",
"pouchdb-adapter-memory": "7.2.2",
"testcontainers": "^10.7.2",

View File

@ -1,7 +1,12 @@
import tk from "timekeeper"
import _ from "lodash"
import { DBTestConfiguration, generator, structures } from "../../../tests"
import {
DBTestConfiguration,
generator,
structures,
utils,
} from "../../../tests"
import { getDB } from "../../db"
import {
@ -10,15 +15,14 @@ import {
init,
} from "../docWritethrough"
import InMemoryQueue from "../../queue/inMemoryQueue"
const initialTime = Date.now()
async function waitForQueueCompletion() {
const queue: InMemoryQueue = DocWritethroughProcessor.queue as never
await queue.waitForCompletion()
await utils.queue.processMessages(DocWritethroughProcessor.queue)
}
beforeAll(() => utils.queue.useRealQueues())
describe("docWritethrough", () => {
beforeAll(() => {
init()
@ -67,7 +71,7 @@ describe("docWritethrough", () => {
const patch3 = generatePatchObject(3)
await docWritethrough.patch(patch3)
expect(await db.get(documentId)).toEqual({
expect(await db.tryGet(documentId)).toEqual({
_id: documentId,
...patch1,
...patch2,
@ -92,7 +96,7 @@ describe("docWritethrough", () => {
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({
_id: documentId,
...patch1,
@ -117,7 +121,7 @@ describe("docWritethrough", () => {
await waitForQueueCompletion()
expect(date1).not.toEqual(date2)
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({
createdAt: date1.toISOString(),
updatedAt: date2.toISOString(),
@ -135,7 +139,7 @@ describe("docWritethrough", () => {
await docWritethrough.patch(patch2)
const keyToOverride = _.sample(Object.keys(patch1))!
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({
[keyToOverride]: patch1[keyToOverride],
})
@ -150,7 +154,7 @@ describe("docWritethrough", () => {
await docWritethrough.patch(patch3)
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({
...patch1,
...patch2,
@ -180,14 +184,14 @@ describe("docWritethrough", () => {
await secondDocWritethrough.patch(doc2Patch2)
await waitForQueueCompletion()
expect(await db.get(docWritethrough.docId)).toEqual(
expect(await db.tryGet(docWritethrough.docId)).toEqual(
expect.objectContaining({
...doc1Patch,
...doc1Patch2,
})
)
expect(await db.get(secondDocWritethrough.docId)).toEqual(
expect(await db.tryGet(secondDocWritethrough.docId)).toEqual(
expect.objectContaining({
...doc2Patch,
...doc2Patch2,
@ -203,7 +207,7 @@ describe("docWritethrough", () => {
await docWritethrough.patch(initialPatch)
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(initialPatch)
)
@ -214,10 +218,10 @@ describe("docWritethrough", () => {
await docWritethrough.patch(extraPatch)
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(extraPatch)
)
expect(await db.get(documentId)).not.toEqual(
expect(await db.tryGet(documentId)).not.toEqual(
expect.objectContaining(initialPatch)
)
})
@ -242,7 +246,7 @@ describe("docWritethrough", () => {
expect(queueMessageSpy).toHaveBeenCalledTimes(5)
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(patches)
)
@ -250,7 +254,7 @@ describe("docWritethrough", () => {
expect(queueMessageSpy).toHaveBeenCalledTimes(45)
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(patches)
)
@ -258,20 +262,18 @@ describe("docWritethrough", () => {
expect(queueMessageSpy).toHaveBeenCalledTimes(55)
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining(patches)
)
})
})
// This is not yet supported
// eslint-disable-next-line jest/no-disabled-tests
it.skip("patches will execute in order", async () => {
it("patches will execute in order", async () => {
let incrementalValue = 0
const keyToOverride = generator.word()
async function incrementalPatches(count: number) {
for (let i = 0; i < count; i++) {
await docWritethrough.patch({ [keyToOverride]: incrementalValue++ })
await docWritethrough.patch({ [keyToOverride]: ++incrementalValue })
}
}
@ -279,13 +281,13 @@ describe("docWritethrough", () => {
await incrementalPatches(5)
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({ [keyToOverride]: 5 })
)
await incrementalPatches(40)
await waitForQueueCompletion()
expect(await db.get(documentId)).toEqual(
expect(await db.tryGet(documentId)).toEqual(
expect.objectContaining({ [keyToOverride]: 45 })
)
})

View File

@ -10,7 +10,6 @@ import {
DatabaseQueryOpts,
DBError,
Document,
FeatureFlag,
isDocument,
RowResponse,
RowValue,
@ -27,7 +26,6 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
import { DDInstrumentedDatabase } from "../instrumentation"
import { checkSlashesInUrl } from "../../helpers"
import { sqlLog } from "../../sql/utils"
import { flags } from "../../features"
const DATABASE_NOT_FOUND = "Database does not exist."
@ -192,7 +190,7 @@ export class DatabaseImpl implements Database {
}
}
private async performCall<T>(call: DBCallback<T>): Promise<any> {
private async performCall<T>(call: DBCallback<T>): Promise<T> {
const db = this.getDb()
const fnc = await call(db)
try {
@ -456,10 +454,7 @@ export class DatabaseImpl implements Database {
}
async destroy() {
if (
(await flags.isEnabled(FeatureFlag.SQS)) &&
(await this.exists(SQLITE_DESIGN_DOC_ID))
) {
if (await this.exists(SQLITE_DESIGN_DOC_ID)) {
// delete the design document, then run the cleanup operation
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
// remove all tables - save the definition then trigger a cleanup
@ -472,7 +467,7 @@ export class DatabaseImpl implements Database {
} catch (err: any) {
// didn't exist, don't worry
if (err.statusCode === 404) {
return
return { ok: true }
} else {
throw new CouchDBError(err.message, err)
}

View File

@ -27,7 +27,7 @@ export class DDInstrumentedDatabase implements Database {
exists(docId?: string): Promise<boolean> {
return tracer.trace("db.exists", span => {
span?.addTags({ db_name: this.name, doc_id: docId })
span.addTags({ db_name: this.name, doc_id: docId })
if (docId) {
return this.db.exists(docId)
}
@ -37,15 +37,17 @@ export class DDInstrumentedDatabase implements Database {
get<T extends Document>(id?: string | undefined): Promise<T> {
return tracer.trace("db.get", span => {
span?.addTags({ db_name: this.name, doc_id: id })
span.addTags({ db_name: this.name, doc_id: id })
return this.db.get(id)
})
}
tryGet<T extends Document>(id?: string | undefined): Promise<T | undefined> {
return tracer.trace("db.tryGet", span => {
span?.addTags({ db_name: this.name, doc_id: id })
return this.db.tryGet(id)
return tracer.trace("db.tryGet", async span => {
span.addTags({ db_name: this.name, doc_id: id })
const doc = await this.db.tryGet<T>(id)
span.addTags({ doc_found: doc !== undefined })
return doc
})
}
@ -53,13 +55,15 @@ export class DDInstrumentedDatabase implements Database {
ids: string[],
opts?: { allowMissing?: boolean | undefined } | undefined
): Promise<T[]> {
return tracer.trace("db.getMultiple", span => {
span?.addTags({
return tracer.trace("db.getMultiple", async span => {
span.addTags({
db_name: this.name,
num_docs: ids.length,
allow_missing: opts?.allowMissing,
})
return this.db.getMultiple(ids, opts)
const docs = await this.db.getMultiple<T>(ids, opts)
span.addTags({ num_docs_found: docs.length })
return docs
})
}
@ -69,12 +73,14 @@ export class DDInstrumentedDatabase implements Database {
idOrDoc: string | Document,
rev?: string
): Promise<DocumentDestroyResponse> {
return tracer.trace("db.remove", span => {
span?.addTags({ db_name: this.name, doc_id: idOrDoc })
return tracer.trace("db.remove", async span => {
span.addTags({ db_name: this.name, doc_id: idOrDoc, rev })
const isDocument = typeof idOrDoc === "object"
const id = isDocument ? idOrDoc._id! : idOrDoc
rev = isDocument ? idOrDoc._rev : rev
return this.db.remove(id, rev)
const resp = await this.db.remove(id, rev)
span.addTags({ ok: resp.ok })
return resp
})
}
@ -83,7 +89,11 @@ export class DDInstrumentedDatabase implements Database {
opts?: { silenceErrors?: boolean }
): Promise<void> {
return tracer.trace("db.bulkRemove", span => {
span?.addTags({ db_name: this.name, num_docs: documents.length })
span.addTags({
db_name: this.name,
num_docs: documents.length,
silence_errors: opts?.silenceErrors,
})
return this.db.bulkRemove(documents, opts)
})
}
@ -92,15 +102,21 @@ export class DDInstrumentedDatabase implements Database {
document: AnyDocument,
opts?: DatabasePutOpts | undefined
): Promise<DocumentInsertResponse> {
return tracer.trace("db.put", span => {
span?.addTags({ db_name: this.name, doc_id: document._id })
return this.db.put(document, opts)
return tracer.trace("db.put", async span => {
span.addTags({
db_name: this.name,
doc_id: document._id,
force: opts?.force,
})
const resp = await this.db.put(document, opts)
span.addTags({ ok: resp.ok })
return resp
})
}
bulkDocs(documents: AnyDocument[]): Promise<DocumentBulkResponse[]> {
return tracer.trace("db.bulkDocs", span => {
span?.addTags({ db_name: this.name, num_docs: documents.length })
span.addTags({ db_name: this.name, num_docs: documents.length })
return this.db.bulkDocs(documents)
})
}
@ -108,9 +124,15 @@ export class DDInstrumentedDatabase implements Database {
allDocs<T extends Document | RowValue>(
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
return tracer.trace("db.allDocs", span => {
span?.addTags({ db_name: this.name })
return this.db.allDocs(params)
return tracer.trace("db.allDocs", async span => {
span.addTags({ db_name: this.name, ...params })
const resp = await this.db.allDocs<T>(params)
span.addTags({
total_rows: resp.total_rows,
rows_length: resp.rows.length,
offset: resp.offset,
})
return resp
})
}
@ -118,57 +140,75 @@ export class DDInstrumentedDatabase implements Database {
viewName: string,
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
return tracer.trace("db.query", span => {
span?.addTags({ db_name: this.name, view_name: viewName })
return this.db.query(viewName, params)
return tracer.trace("db.query", async span => {
span.addTags({ db_name: this.name, view_name: viewName, ...params })
const resp = await this.db.query<T>(viewName, params)
span.addTags({
total_rows: resp.total_rows,
rows_length: resp.rows.length,
offset: resp.offset,
})
return resp
})
}
destroy(): Promise<void | OkResponse> {
return tracer.trace("db.destroy", span => {
span?.addTags({ db_name: this.name })
return this.db.destroy()
destroy(): Promise<OkResponse> {
return tracer.trace("db.destroy", async span => {
span.addTags({ db_name: this.name })
const resp = await this.db.destroy()
span.addTags({ ok: resp.ok })
return resp
})
}
compact(): Promise<void | OkResponse> {
return tracer.trace("db.compact", span => {
span?.addTags({ db_name: this.name })
return this.db.compact()
compact(): Promise<OkResponse> {
return tracer.trace("db.compact", async span => {
span.addTags({ db_name: this.name })
const resp = await this.db.compact()
span.addTags({ ok: resp.ok })
return resp
})
}
dump(stream: Writable, opts?: DatabaseDumpOpts | undefined): Promise<any> {
return tracer.trace("db.dump", span => {
span?.addTags({ db_name: this.name })
span.addTags({
db_name: this.name,
batch_limit: opts?.batch_limit,
batch_size: opts?.batch_size,
style: opts?.style,
timeout: opts?.timeout,
num_doc_ids: opts?.doc_ids?.length,
view: opts?.view,
})
return this.db.dump(stream, opts)
})
}
load(...args: any[]): Promise<any> {
return tracer.trace("db.load", span => {
span?.addTags({ db_name: this.name })
span.addTags({ db_name: this.name, num_args: args.length })
return this.db.load(...args)
})
}
createIndex(...args: any[]): Promise<any> {
return tracer.trace("db.createIndex", span => {
span?.addTags({ db_name: this.name })
span.addTags({ db_name: this.name, num_args: args.length })
return this.db.createIndex(...args)
})
}
deleteIndex(...args: any[]): Promise<any> {
return tracer.trace("db.deleteIndex", span => {
span?.addTags({ db_name: this.name })
span.addTags({ db_name: this.name, num_args: args.length })
return this.db.deleteIndex(...args)
})
}
getIndexes(...args: any[]): Promise<any> {
return tracer.trace("db.getIndexes", span => {
span?.addTags({ db_name: this.name })
span.addTags({ db_name: this.name, num_args: args.length })
return this.db.getIndexes(...args)
})
}
@ -177,22 +217,27 @@ export class DDInstrumentedDatabase implements Database {
sql: string,
parameters?: SqlQueryBinding
): Promise<T[]> {
return tracer.trace("db.sql", span => {
span?.addTags({ db_name: this.name })
return this.db.sql(sql, parameters)
return tracer.trace("db.sql", async span => {
span.addTags({ db_name: this.name, num_bindings: parameters?.length })
const resp = await this.db.sql<T>(sql, parameters)
span.addTags({ num_rows: resp.length })
return resp
})
}
sqlPurgeDocument(docIds: string[] | string): Promise<void> {
return tracer.trace("db.sqlPurgeDocument", span => {
span?.addTags({ db_name: this.name })
span.addTags({
db_name: this.name,
num_docs: Array.isArray(docIds) ? docIds.length : 1,
})
return this.db.sqlPurgeDocument(docIds)
})
}
sqlDiskCleanup(): Promise<void> {
return tracer.trace("db.sqlDiskCleanup", span => {
span?.addTags({ db_name: this.name })
span.addTags({ db_name: this.name })
return this.db.sqlDiskCleanup()
})
}

View File

@ -1,6 +1,7 @@
import { existsSync, readFileSync } from "fs"
import { ServiceType } from "@budibase/types"
import { cloneDeep } from "lodash"
import { createSecretKey } from "crypto"
function isTest() {
return isJest()
@ -18,6 +19,12 @@ function isDev() {
return process.env.NODE_ENV !== "production"
}
function parseIntSafe(number?: string) {
if (number) {
return parseInt(number)
}
}
let LOADED = false
if (!LOADED && isDev() && !isTest()) {
require("dotenv").config()
@ -126,8 +133,12 @@ const environment = {
},
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK,
JWT_SECRET: process.env.JWT_SECRET
? createSecretKey(Buffer.from(process.env.JWT_SECRET))
: undefined,
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK
? createSecretKey(Buffer.from(process.env.JWT_SECRET_FALLBACK))
: undefined,
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
@ -225,6 +236,8 @@ const environment = {
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
MIN_VERSION_WITHOUT_POWER_ROLE:
process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0",
DISABLE_CONTENT_SECURITY_POLICY: process.env.DISABLE_CONTENT_SECURITY_POLICY,
BSON_BUFFER_SIZE: parseIntSafe(process.env.BSON_BUFFER_SIZE),
}
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {

View File

@ -267,12 +267,10 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
// All of the machinery in this file is to make sure that flags have their
// default values set correctly and their types flow through the system.
export const flags = new FlagSet({
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(env.isDev()),
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(env.isDev()),
[FeatureFlag.SQS]: Flag.boolean(true),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(env.isDev()),
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(env.isDev()),
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(env.isDev()),
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
})
type UnwrapPromise<T> = T extends Promise<infer U> ? U : T

View File

@ -0,0 +1,113 @@
import crypto from "crypto"
const CSP_DIRECTIVES = {
"default-src": ["'self'"],
"script-src": [
"'self'",
"'unsafe-eval'",
"https://*.budibase.net",
"https://cdn.budi.live",
"https://js.intercomcdn.com",
"https://widget.intercom.io",
"https://d2l5prqdbvm3op.cloudfront.net",
"https://us-assets.i.posthog.com",
],
"style-src": [
"'self'",
"'unsafe-inline'",
"https://cdn.jsdelivr.net",
"https://fonts.googleapis.com",
"https://rsms.me",
"https://maxcdn.bootstrapcdn.com",
],
"object-src": ["'none'"],
"base-uri": ["'self'"],
"connect-src": [
"'self'",
"https://*.budibase.app",
"https://*.budibaseqa.app",
"https://*.budibase.net",
"https://api-iam.intercom.io",
"https://api-ping.intercom.io",
"https://app.posthog.com",
"https://us.i.posthog.com",
"wss://nexus-websocket-a.intercom.io",
"wss://nexus-websocket-b.intercom.io",
"https://nexus-websocket-a.intercom.io",
"https://nexus-websocket-b.intercom.io",
"https://uploads.intercomcdn.com",
"https://uploads.intercomusercontent.com",
"https://*.amazonaws.com",
"https://*.s3.amazonaws.com",
"https://*.s3.us-east-2.amazonaws.com",
"https://*.s3.us-east-1.amazonaws.com",
"https://*.s3.us-west-1.amazonaws.com",
"https://*.s3.us-west-2.amazonaws.com",
"https://*.s3.af-south-1.amazonaws.com",
"https://*.s3.ap-east-1.amazonaws.com",
"https://*.s3.ap-south-1.amazonaws.com",
"https://*.s3.ap-northeast-2.amazonaws.com",
"https://*.s3.ap-southeast-1.amazonaws.com",
"https://*.s3.ap-southeast-2.amazonaws.com",
"https://*.s3.ap-northeast-1.amazonaws.com",
"https://*.s3.ca-central-1.amazonaws.com",
"https://*.s3.cn-north-1.amazonaws.com",
"https://*.s3.cn-northwest-1.amazonaws.com",
"https://*.s3.eu-central-1.amazonaws.com",
"https://*.s3.eu-west-1.amazonaws.com",
"https://*.s3.eu-west-2.amazonaws.com",
"https://*.s3.eu-south-1.amazonaws.com",
"https://*.s3.eu-west-3.amazonaws.com",
"https://*.s3.eu-north-1.amazonaws.com",
"https://*.s3.sa-east-1.amazonaws.com",
"https://*.s3.me-south-1.amazonaws.com",
"https://*.s3.us-gov-east-1.amazonaws.com",
"https://*.s3.us-gov-west-1.amazonaws.com",
"https://api.github.com",
],
"font-src": [
"'self'",
"data:",
"https://cdn.jsdelivr.net",
"https://fonts.gstatic.com",
"https://rsms.me",
"https://maxcdn.bootstrapcdn.com",
"https://js.intercomcdn.com",
"https://fonts.intercomcdn.com",
],
"frame-src": ["'self'", "https:"],
"img-src": ["http:", "https:", "data:", "blob:"],
"manifest-src": ["'self'"],
"media-src": [
"'self'",
"https://js.intercomcdn.com",
"https://cdn.budi.live",
],
"worker-src": ["blob:"],
}
export async function contentSecurityPolicy(ctx: any, next: any) {
try {
const nonce = crypto.randomBytes(16).toString("base64")
const directives = { ...CSP_DIRECTIVES }
directives["script-src"] = [
...CSP_DIRECTIVES["script-src"],
`'nonce-${nonce}'`,
]
ctx.state.nonce = nonce
const cspHeader = Object.entries(directives)
.map(([key, sources]) => `${key} ${sources.join(" ")}`)
.join("; ")
ctx.set("Content-Security-Policy", cspHeader)
await next()
} catch (err: any) {
console.error(
`Error occurred in Content-Security-Policy middleware: ${err}`
)
}
}
export default contentSecurityPolicy

View File

@ -19,5 +19,6 @@ export { default as pino } from "../logging/pino/middleware"
export { default as correlation } from "../logging/correlation/middleware"
export { default as errorHandling } from "./errorHandling"
export { default as querystringToBody } from "./querystringToBody"
export { default as csp } from "./contentSecurityPolicy"
export * as joiValidator from "./joi-validator"
export { default as ip } from "./ip"

View File

@ -0,0 +1,75 @@
import crypto from "crypto"
import contentSecurityPolicy from "../contentSecurityPolicy"
jest.mock("crypto", () => ({
randomBytes: jest.fn(),
randomUUID: jest.fn(),
}))
describe("contentSecurityPolicy middleware", () => {
let ctx: any
let next: any
const mockNonce = "mocked/nonce"
beforeEach(() => {
ctx = {
state: {},
set: jest.fn(),
}
next = jest.fn()
// @ts-ignore
crypto.randomBytes.mockReturnValue(Buffer.from(mockNonce, "base64"))
})
afterEach(() => {
jest.clearAllMocks()
})
it("should generate a nonce and set it in the script-src directive", async () => {
await contentSecurityPolicy(ctx, next)
expect(ctx.state.nonce).toBe(mockNonce)
expect(ctx.set).toHaveBeenCalledWith(
"Content-Security-Policy",
expect.stringContaining(
`script-src 'self' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net https://us-assets.i.posthog.com 'nonce-${mockNonce}'`
)
)
expect(next).toHaveBeenCalled()
})
it("should include all CSP directives in the header", async () => {
await contentSecurityPolicy(ctx, next)
const cspHeader = ctx.set.mock.calls[0][1]
expect(cspHeader).toContain("default-src 'self'")
expect(cspHeader).toContain("script-src 'self' 'unsafe-eval'")
expect(cspHeader).toContain("style-src 'self' 'unsafe-inline'")
expect(cspHeader).toContain("object-src 'none'")
expect(cspHeader).toContain("base-uri 'self'")
expect(cspHeader).toContain("connect-src 'self'")
expect(cspHeader).toContain("font-src 'self'")
expect(cspHeader).toContain("frame-src 'self'")
expect(cspHeader).toContain("img-src http: https: data: blob:")
expect(cspHeader).toContain("manifest-src 'self'")
expect(cspHeader).toContain("media-src 'self'")
expect(cspHeader).toContain("worker-src blob:")
})
it("should handle errors and log an error message", async () => {
const consoleSpy = jest.spyOn(console, "error").mockImplementation()
const error = new Error("Test error")
// @ts-ignore
crypto.randomBytes.mockImplementation(() => {
throw error
})
await contentSecurityPolicy(ctx, next)
expect(consoleSpy).toHaveBeenCalledWith(
`Error occurred in Content-Security-Policy middleware: ${error}`
)
expect(next).not.toHaveBeenCalled()
consoleSpy.mockRestore()
})
})

View File

@ -1,5 +1,5 @@
import events from "events"
import { newid, timeout } from "../utils"
import { newid } from "../utils"
import { Queue, QueueOptions, JobOptions } from "./queue"
interface JobMessage {
@ -141,7 +141,7 @@ class InMemoryQueue implements Partial<Queue> {
} else {
pushMessage()
}
return {} as any
return { id: jobId } as any
}
/**
@ -184,16 +184,6 @@ class InMemoryQueue implements Partial<Queue> {
// do nothing
return this as any
}
async waitForCompletion() {
do {
await timeout(50)
} while (this.hasRunningJobs())
}
hasRunningJobs() {
return this._addCount > this._runCount
}
}
export default InMemoryQueue

View File

@ -15,7 +15,7 @@ const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
// cleanup the queue every 60 seconds
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
let QUEUES: BullQueue.Queue[] = []
let cleanupInterval: NodeJS.Timeout
async function cleanup() {
@ -45,11 +45,18 @@ export function createQueue<T>(
if (opts.jobOptions) {
queueConfig.defaultJobOptions = opts.jobOptions
}
let queue: any
let queue: BullQueue.Queue<T>
if (!env.isTest()) {
queue = new BullQueue(jobQueue, queueConfig)
} else if (
process.env.BULL_TEST_REDIS_PORT &&
!isNaN(+process.env.BULL_TEST_REDIS_PORT)
) {
queue = new BullQueue(jobQueue, {
redis: { host: "localhost", port: +process.env.BULL_TEST_REDIS_PORT },
})
} else {
queue = new InMemoryQueue(jobQueue, queueConfig)
queue = new InMemoryQueue(jobQueue, queueConfig) as any
}
addListeners(queue, jobQueue, opts?.removeStalledCb)
QUEUES.push(queue)

View File

@ -4,7 +4,7 @@ import env from "../../environment"
describe("encryption", () => {
it("should throw an error if API encryption key is not set", () => {
const jwt = getSecret(SecretOption.API)
expect(jwt).toBe(env.JWT_SECRET)
expect(jwt).toBe(env.JWT_SECRET?.export().toString())
})
it("should throw an error if encryption key is not set", () => {

View File

@ -4,3 +4,4 @@ export { generator } from "./structures"
export * as testContainerUtils from "./testContainerUtils"
export * as utils from "./utils"
export * from "./jestUtils"
export * as queue from "./queue"

View File

@ -0,0 +1,9 @@
import { Queue } from "bull"
export async function processMessages(queue: Queue) {
do {
await queue.whenCurrentJobsFinished()
} while (await queue.count())
await queue.whenCurrentJobsFinished()
}

View File

@ -1,4 +1,6 @@
import { execSync } from "child_process"
import { cloneDeep } from "lodash"
import { GenericContainer, StartedTestContainer } from "testcontainers"
const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g")
@ -106,3 +108,58 @@ export function setupEnv(...envs: any[]) {
}
}
}
export async function startContainer(container: GenericContainer) {
const imageName = (container as any).imageName.string as string
let key: string = imageName
if (imageName.includes("@sha256")) {
key = imageName.split("@")[0]
}
key = key.replace(/\//g, "-").replace(/:/g, "-")
container = container
.withReuse()
.withLabels({ "com.budibase": "true" })
.withName(`${key}_testcontainer`)
let startedContainer: StartedTestContainer | undefined = undefined
let lastError = undefined
for (let i = 0; i < 10; i++) {
try {
// container.start() is not an idempotent operation, calling `start`
// modifies the internal state of a GenericContainer instance such that
// the hash it uses to determine reuse changes. We need to clone the
// container before calling start to ensure that we're using the same
// reuse hash every time.
const containerCopy = cloneDeep(container)
startedContainer = await containerCopy.start()
lastError = undefined
break
} catch (e: any) {
lastError = e
await new Promise(resolve => setTimeout(resolve, 1000))
}
}
if (!startedContainer) {
if (lastError) {
throw lastError
}
throw new Error(`failed to start container: ${imageName}`)
}
const info = getContainerById(startedContainer.getId())
if (!info) {
throw new Error("Container not found")
}
// Some Docker runtimes, when you expose a port, will bind it to both
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
// addresses are not shared, and testcontainers will sometimes give you back
// the ipv6 port. There's no way to know that this has happened, and if you
// try to then connect to `localhost:port` you may attempt to bind to the v4
// address which could be unbound or even an entirely different container. For
// that reason, we don't use testcontainers' `getExposedPort` function,
// preferring instead our own method that guaranteed v4 ports.
return getExposedV4Ports(info)
}

View File

@ -1 +1,2 @@
export * as time from "./time"
export * as queue from "./queue"

View File

@ -0,0 +1,27 @@
import { Queue } from "bull"
import { GenericContainer, Wait } from "testcontainers"
import { startContainer } from "../testContainerUtils"
export async function useRealQueues() {
const ports = await startContainer(
new GenericContainer("redis")
.withExposedPorts(6379)
.withWaitStrategy(
Wait.forSuccessfulCommand(`redis-cli`).withStartupTimeout(10000)
)
)
const port = ports.find(x => x.container === 6379)?.host
if (!port) {
throw new Error("Redis port not found")
}
process.env.BULL_TEST_REDIS_PORT = port.toString()
}
export async function processMessages(queue: Queue) {
do {
await queue.whenCurrentJobsFinished()
} while (await queue.count())
await queue.whenCurrentJobsFinished()
}

View File

@ -81,6 +81,7 @@
"@spectrum-css/typography": "3.0.1",
"@spectrum-css/underlay": "2.0.9",
"@spectrum-css/vars": "3.0.1",
"atrament": "^4.3.0",
"dayjs": "^1.10.8",
"easymde": "^2.16.1",
"svelte-dnd-action": "^0.9.8",

View File

@ -8,6 +8,7 @@
import Link from "../../Link/Link.svelte"
import Tag from "../../Tags/Tag.svelte"
import Tags from "../../Tags/Tags.svelte"
import ProgressCircle from "../../ProgressCircle/ProgressCircle.svelte"
const BYTES_IN_KB = 1000
const BYTES_IN_MB = 1000000
@ -39,12 +40,14 @@
"jfif",
"webp",
]
const fieldId = id || uuid()
let selectedImageIdx = 0
let fileDragged = false
let selectedUrl
let fileInput
let loading = false
$: selectedImage = value?.[selectedImageIdx] ?? null
$: fileCount = value?.length ?? 0
$: isImage =
@ -86,10 +89,15 @@
}
if (processFiles) {
const processedFiles = await processFiles(fileList)
const newValue = [...value, ...processedFiles]
dispatch("change", newValue)
selectedImageIdx = newValue.length - 1
loading = true
try {
const processedFiles = await processFiles(fileList)
const newValue = [...value, ...processedFiles]
dispatch("change", newValue)
selectedImageIdx = newValue.length - 1
} finally {
loading = false
}
} else {
dispatch("change", fileList)
}
@ -227,7 +235,7 @@
{#if showDropzone}
<div
class="spectrum-Dropzone"
class:disabled
class:disabled={disabled || loading}
role="region"
tabindex="0"
on:dragover={handleDragOver}
@ -241,7 +249,7 @@
id={fieldId}
{disabled}
type="file"
multiple
multiple={maximum !== 1}
accept={extensions}
bind:this={fileInput}
on:change={handleFile}
@ -339,6 +347,12 @@
{/if}
{/if}
</div>
{#if loading}
<div class="loading">
<ProgressCircle size="M" />
</div>
{/if}
</div>
{/if}
</div>
@ -464,6 +478,7 @@
.spectrum-Dropzone {
height: 220px;
position: relative;
}
.compact .spectrum-Dropzone {
height: 40px;
@ -488,4 +503,14 @@
.tag {
margin-top: 8px;
}
.loading {
position: absolute;
display: grid;
place-items: center;
height: 100%;
width: 100%;
top: 0;
left: 0;
}
</style>

View File

@ -1,4 +1,5 @@
<script>
import { tick } from "svelte"
import {
ModalContent,
TextArea,
@ -8,7 +9,6 @@
import { automationStore, selectedAutomation } from "stores/builder"
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
import { cloneDeep } from "lodash/fp"
import { memo } from "@budibase/frontend-core"
import { AutomationEventType } from "@budibase/types"
let failedParse = null
@ -63,8 +63,7 @@
return true
}
const memoTestData = memo(parseTestData($selectedAutomation.data.testData))
$: memoTestData.set(parseTestData($selectedAutomation.data.testData))
$: testData = testData || parseTestData($selectedAutomation.data.testData)
$: {
// clone the trigger so we're not mutating the reference
@ -83,7 +82,7 @@
$: isError =
!isTriggerValid(trigger) ||
!(trigger.schema.outputs.required || []).every(
required => $memoTestData?.[required] || required !== "row"
required => testData?.[required] || required !== "row"
)
function parseTestJSON(e) {
@ -110,11 +109,10 @@
}
const testAutomation = async () => {
// Ensure testData reactiveness is processed
await tick()
try {
await automationStore.actions.test(
$selectedAutomation.data,
$memoTestData
)
await automationStore.actions.test($selectedAutomation.data, testData)
$automationStore.showTestPanel = true
} catch (error) {
notifications.error(error)
@ -152,7 +150,7 @@
{#if selectedValues}
<div class="tab-content-padding">
<AutomationBlockSetup
testData={$memoTestData}
bind:testData
{schemaProperties}
isTestModal
block={trigger}

View File

@ -503,7 +503,15 @@
row: { "Active": true, "Order Id" : 14, ... }
})
*/
const onChange = Utils.sequential(async update => {
const onChange = async update => {
if (isTestModal) {
testData = update
}
updateAutomation(update)
}
const updateAutomation = Utils.sequential(async update => {
const request = cloneDeep(update)
// Process app trigger updates
if (isTrigger && !isTestModal) {

View File

@ -11,7 +11,6 @@
export let disabledPermissions = []
export let columns
export let fromRelationshipField
export let canSetRelationshipSchemas
const { datasource, dispatch } = getContext("grid")
@ -129,6 +128,8 @@
}
})
$: hasLinkColumns = columns.some(c => c.schema.type === FieldType.LINK)
async function toggleColumn(column, permission) {
const visible = permission !== FieldPermissions.HIDDEN
const readonly = permission === FieldPermissions.READONLY
@ -184,7 +185,7 @@
value={columnToPermissionOptions(column)}
options={column.options}
/>
{#if canSetRelationshipSchemas && column.schema.type === FieldType.LINK && columnToPermissionOptions(column) !== FieldPermissions.HIDDEN}
{#if column.schema.type === FieldType.LINK && columnToPermissionOptions(column) !== FieldPermissions.HIDDEN}
<div class="relationship-columns">
<ActionButton
on:click={e => {
@ -203,7 +204,7 @@
</div>
</div>
{#if canSetRelationshipSchemas}
{#if hasLinkColumns}
<Popover
on:close={() => (relationshipFieldName = null)}
open={relationshipFieldName}

View File

@ -10,8 +10,6 @@
import { getContext } from "svelte"
import { ActionButton } from "@budibase/bbui"
import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
import { isEnabled } from "helpers/featureFlags"
import { FeatureFlag } from "@budibase/types"
import DetailPopover from "components/common/DetailPopover.svelte"
const { tableColumns, datasource } = getContext("grid")
@ -46,9 +44,5 @@
{text}
</ActionButton>
</svelte:fragment>
<ColumnsSettingContent
columns={$tableColumns}
canSetRelationshipSchemas={isEnabled(FeatureFlag.ENRICHED_RELATIONSHIPS)}
{permissions}
/>
<ColumnsSettingContent columns={$tableColumns} {permissions} />
</DetailPopover>

View File

@ -371,6 +371,7 @@
delete editableColumn.relationshipType
delete editableColumn.formulaType
delete editableColumn.constraints
delete editableColumn.responseType
// Add in defaults and initial definition
const definition = fieldDefinitions[type?.toUpperCase()]
@ -386,6 +387,7 @@
editableColumn.relationshipType = RelationshipType.MANY_TO_MANY
} else if (editableColumn.type === FieldType.FORMULA) {
editableColumn.formulaType = "dynamic"
editableColumn.responseType = field.responseType || FIELDS.STRING.type
}
}
@ -767,6 +769,25 @@
</div>
</div>
{/if}
<div class="split-label">
<div class="label-length">
<Label size="M">Response Type</Label>
</div>
<div class="input-length">
<Select
bind:value={editableColumn.responseType}
options={[
FIELDS.STRING,
FIELDS.NUMBER,
FIELDS.BOOLEAN,
FIELDS.DATETIME,
]}
getOptionLabel={option => option.name}
getOptionValue={option => option.type}
tooltip="Formulas by default will return a string - however if you need another type the response can be coerced."
/>
</div>
</div>
<div class="split-label">
<div class="label-length">
<Label size="M">Formula</Label>

View File

@ -84,8 +84,8 @@
on:mouseleave
on:click={onClick}
on:contextmenu
ondragover="return false"
ondragenter="return false"
on:dragover={e => e.preventDefault()}
on:dragenter={e => e.preventDefault()}
{id}
{style}
{draggable}

View File

@ -68,8 +68,8 @@
on:scroll
bind:this={scrollRef}
on:drop={onDrop}
ondragover="return false"
ondragenter="return false"
on:dragover={e => e.preventDefault()}
on:dragenter={e => e.preventDefault()}
>
<slot />
</div>

View File

@ -4,12 +4,10 @@
Layout,
Heading,
Body,
Button,
Divider,
notifications,
Label,
Modal,
ModalContent,
Link,
} from "@budibase/bbui"
import { API } from "api"
import { auth, admin } from "stores/portal"
@ -21,8 +19,6 @@
let githubVersion
let githubPublishedDate
let githubPublishedTime
let needsUpdate = true
let updateModal
// Only admins allowed here
$: {
@ -31,21 +27,6 @@
}
}
async function updateBudibase() {
try {
notifications.info("Updating budibase..")
await fetch("/v1/update", {
headers: {
Authorization: "Bearer budibase",
},
})
notifications.success("Your budibase installation is up to date.")
getVersion()
} catch (err) {
notifications.error(`Error installing budibase update ${err}`)
}
}
async function getVersion() {
try {
version = await API.getBudibaseVersion()
@ -69,13 +50,6 @@
githubPublishedDate = new Date(githubResponse.published_at)
githubPublishedTime = githubPublishedDate.toLocaleTimeString()
githubPublishedDate = githubPublishedDate.toLocaleDateString()
//Does Budibase need to be updated?
if (githubVersion === version) {
needsUpdate = false
} else {
needsUpdate = true
}
} catch (error) {
notifications.error("Error getting the latest Budibase version")
githubVersion = null
@ -115,23 +89,15 @@
>
</Layout>
<Divider />
<div>
<Button cta on:click={updateModal.show} disabled={!needsUpdate}
>Update Budibase</Button
<Layout noPadding gap="XS">
<Heading>Updating Budibase</Heading>
<Body
>To update your self-host installation, follow the docs found <Link
size="L"
href="https://docs.budibase.com/docs/updating-budibase">here.</Link
></Body
>
<Modal bind:this={updateModal}>
<ModalContent
title="Update Budibase"
confirmText="Update"
onConfirm={updateBudibase}
>
<span
>Are you sure you want to update your budibase installation to the
latest version?</span
>
</ModalContent>
</Modal>
</div>
</Layout>
{/if}
</Layout>
{/if}

View File

@ -33,8 +33,7 @@
"sanitize-html": "^2.13.0",
"screenfull": "^6.0.1",
"shortid": "^2.2.15",
"svelte-spa-router": "^4.0.1",
"atrament": "^4.3.0"
"svelte-spa-router": "^4.0.1"
},
"devDependencies": {
"@rollup/plugin-alias": "^5.1.0",

View File

@ -1,5 +1,7 @@
import { makePropSafe as safe } from "@budibase/string-templates"
import { API } from "../api/index.js"
import { UILogicalOperator } from "@budibase/types"
import { OnEmptyFilter } from "@budibase/frontend-core/src/constants.js"
// Map of data types to component types for search fields inside blocks
const schemaComponentMap = {
@ -60,7 +62,11 @@ export const enrichSearchColumns = async (searchColumns, schema) => {
* @param formId the ID of the form containing the search fields
*/
export const enrichFilter = (filter, columns, formId) => {
let enrichedFilter = [...(filter || [])]
if (!columns?.length) {
return filter
}
let newFilters = []
columns?.forEach(column => {
const safePath = column.name.split(".").map(safe).join(".")
const stringType = column.type === "string" || column.type === "formula"
@ -69,7 +75,7 @@ export const enrichFilter = (filter, columns, formId) => {
// For dates, use a range of the entire day selected
if (dateType) {
enrichedFilter.push({
newFilters.push({
field: column.name,
type: column.type,
operator: "rangeLow",
@ -79,7 +85,7 @@ export const enrichFilter = (filter, columns, formId) => {
const format = "YYYY-MM-DDTHH:mm:ss.SSSZ"
let hbs = `{{ date (add (date ${binding} "x") 86399999) "${format}" }}`
hbs = `{{#if ${binding} }}${hbs}{{/if}}`
enrichedFilter.push({
newFilters.push({
field: column.name,
type: column.type,
operator: "rangeHigh",
@ -90,7 +96,7 @@ export const enrichFilter = (filter, columns, formId) => {
// For other fields, do an exact match
else {
enrichedFilter.push({
newFilters.push({
field: column.name,
type: column.type,
operator: stringType ? "string" : "equal",
@ -99,5 +105,16 @@ export const enrichFilter = (filter, columns, formId) => {
})
}
})
return enrichedFilter
return {
logicalOperator: UILogicalOperator.ALL,
onEmptyFilter: OnEmptyFilter.RETURN_ALL,
groups: [
...(filter?.groups || []),
{
logicalOperator: UILogicalOperator.ALL,
filters: newFilters,
},
],
}
}

View File

@ -1,5 +1,21 @@
<script>
import TextCell from "./TextCell.svelte"
import DateCell from "./DateCell.svelte"
import NumberCell from "./NumberCell.svelte"
import BooleanCell from "./BooleanCell.svelte"
import { FieldType } from "@budibase/types"
export let schema
$: responseType = schema.responseType
</script>
<TextCell {...$$props} readonly />
{#if responseType === FieldType.NUMBER}
<NumberCell {...$$props} readonly />
{:else if responseType === FieldType.BOOLEAN}
<BooleanCell {...$$props} readonly />
{:else if responseType === FieldType.DATETIME}
<DateCell {...$$props} readonly />
{:else}
<TextCell {...$$props} readonly />
{/if}

View File

@ -53,6 +53,7 @@
on:close={close}
maxHeight={null}
resizable
minWidth={360}
>
<div class="content">
<slot />
@ -80,7 +81,6 @@
}
.content {
width: 300px;
padding: 20px;
display: flex;
flex-direction: column;

View File

@ -5,6 +5,7 @@ export default class NestedProviderFetch extends DataFetch {
// Nested providers should already have exposed their own schema
return {
schema: datasource?.value?.schema,
primaryDisplay: datasource?.value?.primaryDisplay,
}
}

@ -1 +1 @@
Subproject commit 04bee88597edb1edb88ed299d0597b587f0362ec
Subproject commit 25dd40ee12b048307b558ebcedb36548d6e042cd

View File

@ -13,6 +13,7 @@
"build": "node ./scripts/build.js",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020",
"check:dependencies": "node ../../scripts/depcheck.js",
"build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets",
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers",
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson",
@ -49,9 +50,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@azure/msal-node": "^2.5.1",
"@budibase/backend-core": "0.0.0",
"@budibase/client": "0.0.0",
"@budibase/frontend-core": "0.0.0",
"@budibase/nano": "10.1.5",
"@budibase/pro": "0.0.0",
"@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0",
@ -60,15 +63,17 @@
"@bull-board/koa": "5.10.2",
"@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "7.8.0",
"@koa/router": "8.0.8",
"@koa/cors": "5.0.0",
"@koa/router": "13.1.0",
"@socket.io/redis-adapter": "^8.2.1",
"@types/xml2js": "^0.4.14",
"airtable": "0.12.2",
"arangojs": "7.2.0",
"archiver": "7.0.1",
"aws-sdk": "2.1030.0",
"aws-sdk": "2.1692.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bson": "^6.9.0",
"buffer": "6.0.3",
"bull": "4.10.1",
"chokidar": "3.5.3",
@ -76,17 +81,20 @@
"cookies": "0.8.0",
"csvtojson": "2.0.10",
"curlconverter": "3.21.0",
"dd-trace": "5.2.0",
"dayjs": "^1.10.8",
"dd-trace": "5.26.0",
"dotenv": "8.2.0",
"form-data": "4.0.0",
"global-agent": "3.0.0",
"google-auth-library": "^8.0.1",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
"ioredis": "5.3.2",
"isolated-vm": "^4.7.2",
"jimp": "0.22.12",
"jimp": "1.1.4",
"joi": "17.6.0",
"js-yaml": "4.1.0",
"jsonschema": "1.4.0",
"jsonwebtoken": "9.0.2",
"knex": "2.4.2",
"koa": "2.13.4",
"koa-body": "4.2.0",
@ -97,7 +105,7 @@
"lodash": "4.17.21",
"memorystream": "0.3.1",
"mongodb": "6.7.0",
"mssql": "10.0.1",
"mssql": "11.0.1",
"mysql2": "3.9.8",
"node-fetch": "2.6.7",
"object-sizeof": "2.6.1",
@ -105,24 +113,28 @@
"openapi-types": "9.3.1",
"oracledb": "6.5.1",
"pg": "8.10.0",
"pouchdb": "7.3.0",
"pouchdb": "9.0.0",
"pouchdb-all-dbs": "1.1.1",
"pouchdb-find": "7.2.2",
"pouchdb-find": "9.0.0",
"redis": "4",
"semver": "^7.5.4",
"serialize-error": "^7.0.1",
"server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0",
"socket.io": "4.7.5",
"snowflake-sdk": "^1.15.0",
"socket.io": "4.8.1",
"svelte": "^4.2.10",
"tar": "6.2.1",
"tmp": "0.2.3",
"to-json-schema": "0.2.5",
"uuid": "^8.3.2",
"validate.js": "0.13.1",
"worker-farm": "1.7.0",
"xml2js": "0.5.0"
"xml2js": "0.6.2"
},
"devDependencies": {
"@babel/core": "^7.22.5",
"@babel/preset-env": "7.16.11",
"@jest/types": "^29.6.3",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@types/archiver": "6.0.2",
@ -130,19 +142,24 @@
"@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/koa-send": "^4.1.6",
"@types/koa__router": "8.0.8",
"@types/koa__cors": "5.0.0",
"@types/koa__router": "12.0.4",
"@types/lodash": "4.14.200",
"@types/mssql": "9.1.4",
"@types/mssql": "9.1.5",
"@types/node": "^22.9.0",
"@types/node-fetch": "2.6.4",
"@types/oracledb": "6.5.1",
"@types/pg": "8.6.6",
"@types/pouchdb": "6.4.2",
"@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.14",
"@types/tar": "6.1.5",
"@types/tmp": "0.2.6",
"@types/uuid": "8.3.4",
"chance": "^1.1.12",
"copyfiles": "2.4.1",
"docker-compose": "0.23.17",
"ioredis-mock": "8.9.0",
"jest": "29.7.0",
"jest-extended": "^4.0.2",
"jest-openapi": "0.14.2",

View File

@ -1,12 +1,12 @@
#!/bin/bash
set -e
set -ex
if [[ -n $CI ]]
then
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail "$@"
else
# --maxWorkers performs better in development
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
jest --coverage --maxWorkers=2 --forceExit $@
jest --coverage --maxWorkers=2 --forceExit "$@"
fi

File diff suppressed because it is too large Load Diff

View File

@ -23,6 +23,13 @@ components:
description: The ID of the table which this request is targeting.
schema:
type: string
viewId:
in: path
name: viewId
required: true
description: The ID of the view which this request is targeting.
schema:
type: string
rowId:
in: path
name: rowId
@ -36,7 +43,7 @@ components:
required: true
description: The ID of the app which this request is targeting.
schema:
default: "{{ appId }}"
default: "{{appId}}"
type: string
appIdUrl:
in: path
@ -44,7 +51,7 @@ components:
required: true
description: The ID of the app which this request is targeting.
schema:
default: "{{ appId }}"
default: "{{appId}}"
type: string
queryId:
in: path
@ -442,6 +449,74 @@ components:
# TYPE budibase_quota_limit_automations gauge
budibase_quota_limit_automations 9007199254740991
view:
value:
data:
name: peopleView
tableId: ta_896a325f7e8147d2a2cda93c5d236511
schema:
name:
visible: true
readonly: false
order: 1
width: 300
age:
visible: true
readonly: true
order: 2
width: 200
salary:
visible: false
readonly: false
query:
logicalOperator: all
onEmptyFilter: none
groups:
- logicalOperator: any
filters:
- operator: string
field: name
value: John
- operator: range
field: age
value:
low: 18
high: 100
primaryDisplay: name
views:
value:
data:
- name: peopleView
tableId: ta_896a325f7e8147d2a2cda93c5d236511
schema:
name:
visible: true
readonly: false
order: 1
width: 300
age:
visible: true
readonly: true
order: 2
width: 200
salary:
visible: false
readonly: false
query:
logicalOperator: all
onEmptyFilter: none
groups:
- logicalOperator: any
filters:
- operator: string
field: name
value: John
- operator: range
field: age
value:
low: 18
high: 100
primaryDisplay: name
securitySchemes:
ApiKeyAuth:
type: apiKey
@ -761,7 +836,6 @@ components:
enum:
- static
- dynamic
- ai
description: Defines whether this is a static or dynamic formula.
- type: object
properties:
@ -931,7 +1005,6 @@ components:
enum:
- static
- dynamic
- ai
description: Defines whether this is a static or dynamic formula.
- type: object
properties:
@ -1108,7 +1181,6 @@ components:
enum:
- static
- dynamic
- ai
description: Defines whether this is a static or dynamic formula.
- type: object
properties:
@ -1704,6 +1776,644 @@ components:
- userIds
required:
- data
view:
description: The view to be created/updated.
type: object
required:
- name
- schema
- tableId
properties:
name:
description: The name of the view.
type: string
tableId:
description: The ID of the table this view is based on.
type: string
type:
description: The type of view - standard (empty value) or calculation.
type: string
enum:
- calculation
primaryDisplay:
type: string
description: A column used to display rows from this view - usually used when
rendered in tables.
query:
description: Search parameters for view
type: object
required: []
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
onEmptyFilter:
description: If no filters match, should the view return all rows, or no rows.
type: string
enum:
- all
- none
groups:
description: A grouping of filters to be applied.
type: array
items:
type: object
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
filters:
description: A list of filters to apply
type: array
items:
type: object
properties:
operator:
type: string
description: The type of search operation which is being performed.
enum:
- equal
- notEqual
- empty
- notEmpty
- fuzzy
- string
- contains
- notContains
- containsAny
- oneOf
- range
field:
type: string
description: The field in the view to perform the search on.
value:
description: The value to search for - the type will depend on the operator in
use.
oneOf:
- type: string
- type: number
- type: boolean
- type: object
- type: array
groups:
description: A grouping of filters to be applied.
type: array
items:
type: object
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
filters:
description: A list of filters to apply
type: array
items:
type: object
properties:
operator:
type: string
description: The type of search operation which is being performed.
enum:
- equal
- notEqual
- empty
- notEmpty
- fuzzy
- string
- contains
- notContains
- containsAny
- oneOf
- range
field:
type: string
description: The field in the view to perform the search on.
value:
description: The value to search for - the type will depend on the operator in
use.
oneOf:
- type: string
- type: number
- type: boolean
- type: object
- type: array
sort:
type: object
required:
- field
properties:
field:
type: string
description: The field from the table/view schema to sort on.
order:
type: string
description: The order in which to sort.
enum:
- ascending
- descending
type:
type: string
description: The type of sort to perform (by number, or by alphabetically).
enum:
- string
- number
schema:
type: object
additionalProperties:
oneOf:
- type: object
properties:
visible:
type: boolean
description: Defines whether the column is visible or not - rows
retrieved/updated through this view will not be able to
access it.
readonly:
type: boolean
description: "When used in combination with 'visible: true' the column will be
visible in row responses but cannot be updated."
order:
type: integer
description: A number defining where the column shows up in tables, lowest being
first.
width:
type: integer
description: A width for the column, defined in pixels - this affects rendering
in tables.
column:
type: array
description: If this is a relationship column, we can set the columns we wish to
include
items:
type: object
properties:
readonly:
type: boolean
- type: object
properties:
calculationType:
type: string
description: This column should be built from a calculation, specifying a type
and field. It is important to note when a calculation is
configured all non-calculation columns will be used for
grouping.
enum:
- sum
- avg
- count
- min
- max
field:
type: string
description: The field from the table to perform the calculation on.
distinct:
type: boolean
description: Can be used in tandem with the count calculation type, to count
unique entries.
viewOutput:
type: object
properties:
data:
description: The view to be created/updated.
type: object
required:
- name
- schema
- tableId
- id
properties:
name:
description: The name of the view.
type: string
tableId:
description: The ID of the table this view is based on.
type: string
type:
description: The type of view - standard (empty value) or calculation.
type: string
enum:
- calculation
primaryDisplay:
type: string
description: A column used to display rows from this view - usually used when
rendered in tables.
query:
description: Search parameters for view
type: object
required: []
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
onEmptyFilter:
description: If no filters match, should the view return all rows, or no rows.
type: string
enum:
- all
- none
groups:
description: A grouping of filters to be applied.
type: array
items:
type: object
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
filters:
description: A list of filters to apply
type: array
items:
type: object
properties:
operator:
type: string
description: The type of search operation which is being performed.
enum:
- equal
- notEqual
- empty
- notEmpty
- fuzzy
- string
- contains
- notContains
- containsAny
- oneOf
- range
field:
type: string
description: The field in the view to perform the search on.
value:
description: The value to search for - the type will depend on the operator in
use.
oneOf:
- type: string
- type: number
- type: boolean
- type: object
- type: array
groups:
description: A grouping of filters to be applied.
type: array
items:
type: object
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
filters:
description: A list of filters to apply
type: array
items:
type: object
properties:
operator:
type: string
description: The type of search operation which is being performed.
enum:
- equal
- notEqual
- empty
- notEmpty
- fuzzy
- string
- contains
- notContains
- containsAny
- oneOf
- range
field:
type: string
description: The field in the view to perform the search on.
value:
description: The value to search for - the type will depend on the operator in
use.
oneOf:
- type: string
- type: number
- type: boolean
- type: object
- type: array
sort:
type: object
required:
- field
properties:
field:
type: string
description: The field from the table/view schema to sort on.
order:
type: string
description: The order in which to sort.
enum:
- ascending
- descending
type:
type: string
description: The type of sort to perform (by number, or by alphabetically).
enum:
- string
- number
schema:
type: object
additionalProperties:
oneOf:
- type: object
properties:
visible:
type: boolean
description: Defines whether the column is visible or not - rows
retrieved/updated through this view will not be able
to access it.
readonly:
type: boolean
description: "When used in combination with 'visible: true' the column will be
visible in row responses but cannot be updated."
order:
type: integer
description: A number defining where the column shows up in tables, lowest being
first.
width:
type: integer
description: A width for the column, defined in pixels - this affects rendering
in tables.
column:
type: array
description: If this is a relationship column, we can set the columns we wish to
include
items:
type: object
properties:
readonly:
type: boolean
- type: object
properties:
calculationType:
type: string
description: This column should be built from a calculation, specifying a type
and field. It is important to note when a calculation
is configured all non-calculation columns will be used
for grouping.
enum:
- sum
- avg
- count
- min
- max
field:
type: string
description: The field from the table to perform the calculation on.
distinct:
type: boolean
description: Can be used in tandem with the count calculation type, to count
unique entries.
id:
description: The ID of the view.
type: string
required:
- data
viewSearch:
type: object
properties:
data:
type: array
items:
description: The view to be created/updated.
type: object
required:
- name
- schema
- tableId
- id
properties:
name:
description: The name of the view.
type: string
tableId:
description: The ID of the table this view is based on.
type: string
type:
description: The type of view - standard (empty value) or calculation.
type: string
enum:
- calculation
primaryDisplay:
type: string
description: A column used to display rows from this view - usually used when
rendered in tables.
query:
description: Search parameters for view
type: object
required: []
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
onEmptyFilter:
description: If no filters match, should the view return all rows, or no rows.
type: string
enum:
- all
- none
groups:
description: A grouping of filters to be applied.
type: array
items:
type: object
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
filters:
description: A list of filters to apply
type: array
items:
type: object
properties:
operator:
type: string
description: The type of search operation which is being performed.
enum:
- equal
- notEqual
- empty
- notEmpty
- fuzzy
- string
- contains
- notContains
- containsAny
- oneOf
- range
field:
type: string
description: The field in the view to perform the search on.
value:
description: The value to search for - the type will depend on the operator in
use.
oneOf:
- type: string
- type: number
- type: boolean
- type: object
- type: array
groups:
description: A grouping of filters to be applied.
type: array
items:
type: object
properties:
logicalOperator:
description: When using groups this defines whether all of the filters must
match, or only one of them.
type: string
enum:
- all
- any
filters:
description: A list of filters to apply
type: array
items:
type: object
properties:
operator:
type: string
description: The type of search operation which is being performed.
enum:
- equal
- notEqual
- empty
- notEmpty
- fuzzy
- string
- contains
- notContains
- containsAny
- oneOf
- range
field:
type: string
description: The field in the view to perform the search on.
value:
description: The value to search for - the type will depend on the operator in
use.
oneOf:
- type: string
- type: number
- type: boolean
- type: object
- type: array
sort:
type: object
required:
- field
properties:
field:
type: string
description: The field from the table/view schema to sort on.
order:
type: string
description: The order in which to sort.
enum:
- ascending
- descending
type:
type: string
description: The type of sort to perform (by number, or by alphabetically).
enum:
- string
- number
schema:
type: object
additionalProperties:
oneOf:
- type: object
properties:
visible:
type: boolean
description: Defines whether the column is visible or not - rows
retrieved/updated through this view will not be able
to access it.
readonly:
type: boolean
description: "When used in combination with 'visible: true' the column will be
visible in row responses but cannot be updated."
order:
type: integer
description: A number defining where the column shows up in tables, lowest being
first.
width:
type: integer
description: A width for the column, defined in pixels - this affects rendering
in tables.
column:
type: array
description: If this is a relationship column, we can set the columns we wish to
include
items:
type: object
properties:
readonly:
type: boolean
- type: object
properties:
calculationType:
type: string
description: This column should be built from a calculation, specifying a type
and field. It is important to note when a
calculation is configured all non-calculation
columns will be used for grouping.
enum:
- sum
- avg
- count
- min
- max
field:
type: string
description: The field from the table to perform the calculation on.
distinct:
type: boolean
description: Can be used in tandem with the count calculation type, to count
unique entries.
id:
description: The ID of the view.
type: string
required:
- data
security:
- ApiKeyAuth: []
paths:
@ -2136,6 +2846,32 @@ paths:
examples:
search:
$ref: "#/components/examples/rows"
"/views/{viewId}/rows/search":
post:
operationId: rowViewSearch
summary: Search for rows in a view
tags:
- rows
parameters:
- $ref: "#/components/parameters/viewId"
- $ref: "#/components/parameters/appId"
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/rowSearch"
responses:
"200":
description: The response will contain an array of rows that match the search
parameters.
content:
application/json:
schema:
$ref: "#/components/schemas/searchOutput"
examples:
search:
$ref: "#/components/examples/rows"
/tables:
post:
operationId: tableCreate
@ -2359,4 +3095,123 @@ paths:
examples:
users:
$ref: "#/components/examples/users"
/views:
post:
operationId: viewCreate
summary: Create a view
description: Create a view, this can be against an internal or external table.
tags:
- views
parameters:
- $ref: "#/components/parameters/appId"
requestBody:
content:
application/json:
schema:
$ref: "#/components/schemas/view"
examples:
view:
$ref: "#/components/examples/view"
responses:
"200":
description: Returns the created view, including the ID which has been generated
for it.
content:
application/json:
schema:
$ref: "#/components/schemas/viewOutput"
examples:
view:
$ref: "#/components/examples/view"
"/views/{viewId}":
put:
operationId: viewUpdate
summary: Update a view
description: Update a view, this can be against an internal or external table.
tags:
- views
parameters:
- $ref: "#/components/parameters/viewId"
- $ref: "#/components/parameters/appId"
requestBody:
content:
application/json:
schema:
$ref: "#/components/schemas/view"
examples:
view:
$ref: "#/components/examples/view"
responses:
"200":
description: Returns the updated view.
content:
application/json:
schema:
$ref: "#/components/schemas/viewOutput"
examples:
view:
$ref: "#/components/examples/view"
delete:
operationId: viewDestroy
summary: Delete a view
description: Delete a view, this can be against an internal or external table.
tags:
- views
parameters:
- $ref: "#/components/parameters/viewId"
- $ref: "#/components/parameters/appId"
responses:
"200":
description: Returns the deleted view.
content:
application/json:
schema:
$ref: "#/components/schemas/viewOutput"
examples:
view:
$ref: "#/components/examples/view"
get:
operationId: viewGetById
summary: Retrieve a view
description: Lookup a view, this could be internal or external.
tags:
- views
parameters:
- $ref: "#/components/parameters/viewId"
- $ref: "#/components/parameters/appId"
responses:
"200":
description: Returns the retrieved view.
content:
application/json:
schema:
$ref: "#/components/schemas/viewOutput"
examples:
view:
$ref: "#/components/examples/view"
/views/search:
post:
operationId: viewSearch
summary: Search for views
description: Based on view properties (currently only name) search for views.
tags:
- views
parameters:
- $ref: "#/components/parameters/appId"
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/nameSearch"
responses:
"200":
description: Returns the found views, based on the search parameters.
content:
application/json:
schema:
$ref: "#/components/schemas/viewSearch"
examples:
views:
$ref: "#/components/examples/views"
tags: []

View File

@ -8,6 +8,16 @@ export const tableId = {
},
}
export const viewId = {
in: "path",
name: "viewId",
required: true,
description: "The ID of the view which this request is targeting.",
schema: {
type: "string",
},
}
export const rowId = {
in: "path",
name: "rowId",

View File

@ -6,6 +6,7 @@ import user from "./user"
import metrics from "./metrics"
import misc from "./misc"
import roles from "./roles"
import view from "./view"
export const examples = {
...application.getExamples(),
@ -16,6 +17,7 @@ export const examples = {
...misc.getExamples(),
...metrics.getExamples(),
...roles.getExamples(),
...view.getExamples(),
}
export const schemas = {
@ -26,4 +28,5 @@ export const schemas = {
...user.getSchemas(),
...misc.getSchemas(),
...roles.getSchemas(),
...view.getSchemas(),
}

View File

@ -1,99 +1,101 @@
import { object } from "./utils"
import Resource from "./utils/Resource"
export const searchSchema = {
type: "object",
properties: {
allOr: {
type: "boolean",
description:
"Specifies that a row should be returned if it satisfies any of the specified options, rather than requiring it to fulfill all the search parameters. This defaults to false, meaning AND logic will be used.",
},
string: {
type: "object",
example: {
columnName1: "value",
columnName2: "value",
},
description:
"A map of field name to the string to search for, this will look for rows that have a value starting with the string value.",
additionalProperties: {
type: "string",
description: "The value to search for in the column.",
},
},
fuzzy: {
type: "object",
description:
"Searches for a sub-string within a string column, e.g. searching for 'dib' will match 'Budibase'.",
},
range: {
type: "object",
description:
'Searches within a range, the format of this must be in the format of an object with a "low" and "high" property.',
example: {
columnName1: {
low: 10,
high: 20,
},
},
},
equal: {
type: "object",
description:
"Searches for rows that have a column value that is exactly the value set.",
},
notEqual: {
type: "object",
description:
"Searches for any row which does not contain the specified column value.",
},
empty: {
type: "object",
description:
"Searches for rows which do not contain the specified column. The object should simply contain keys of the column names, these can map to any value.",
example: {
columnName1: "",
},
},
notEmpty: {
type: "object",
description: "Searches for rows which have the specified column.",
},
oneOf: {
type: "object",
description:
"Searches for rows which have a column value that is any of the specified values. The format of this must be columnName -> [value1, value2].",
},
contains: {
type: "object",
description:
"Searches for a value, or set of values in array column types (such as a multi-select). If an array of search options is provided then it must match all.",
example: {
arrayColumn: ["a", "b"],
},
},
notContains: {
type: "object",
description:
"The logical inverse of contains. Only works on array column types. If an array of values is passed, the row must not match any of them to be returned in the response.",
example: {
arrayColumn: ["a", "b"],
},
},
containsAny: {
type: "object",
description:
"As with the contains search, only works on array column types and searches for any of the provided values when given an array.",
example: {
arrayColumn: ["a", "b"],
},
},
},
}
export default new Resource().setSchemas({
rowSearch: object(
{
query: {
type: "object",
properties: {
allOr: {
type: "boolean",
description:
"Specifies that a row should be returned if it satisfies any of the specified options, rather than requiring it to fulfill all the search parameters. This defaults to false, meaning AND logic will be used.",
},
string: {
type: "object",
example: {
columnName1: "value",
columnName2: "value",
},
description:
"A map of field name to the string to search for, this will look for rows that have a value starting with the string value.",
additionalProperties: {
type: "string",
description: "The value to search for in the column.",
},
},
fuzzy: {
type: "object",
description:
"Searches for a sub-string within a string column, e.g. searching for 'dib' will match 'Budibase'.",
},
range: {
type: "object",
description:
'Searches within a range, the format of this must be in the format of an object with a "low" and "high" property.',
example: {
columnName1: {
low: 10,
high: 20,
},
},
},
equal: {
type: "object",
description:
"Searches for rows that have a column value that is exactly the value set.",
},
notEqual: {
type: "object",
description:
"Searches for any row which does not contain the specified column value.",
},
empty: {
type: "object",
description:
"Searches for rows which do not contain the specified column. The object should simply contain keys of the column names, these can map to any value.",
example: {
columnName1: "",
},
},
notEmpty: {
type: "object",
description: "Searches for rows which have the specified column.",
},
oneOf: {
type: "object",
description:
"Searches for rows which have a column value that is any of the specified values. The format of this must be columnName -> [value1, value2].",
},
contains: {
type: "object",
description:
"Searches for a value, or set of values in array column types (such as a multi-select). If an array of search options is provided then it must match all.",
example: {
arrayColumn: ["a", "b"],
},
},
notContains: {
type: "object",
description:
"The logical inverse of contains. Only works on array column types. If an array of values is passed, the row must not match any of them to be returned in the response.",
example: {
arrayColumn: ["a", "b"],
},
},
containsAny: {
type: "object",
description:
"As with the contains search, only works on array column types and searches for any of the provided values when given an array.",
example: {
arrayColumn: ["a", "b"],
},
},
},
},
query: searchSchema,
paginate: {
type: "boolean",
description: "Enables pagination, by default this is disabled.",

View File

@ -0,0 +1,274 @@
import { object } from "./utils"
import Resource from "./utils/Resource"
import {
ArrayOperator,
BasicOperator,
CalculationType,
RangeOperator,
SortOrder,
SortType,
} from "@budibase/types"
import { cloneDeep } from "lodash"
const view = {
name: "peopleView",
tableId: "ta_896a325f7e8147d2a2cda93c5d236511",
schema: {
name: {
visible: true,
readonly: false,
order: 1,
width: 300,
},
age: {
visible: true,
readonly: true,
order: 2,
width: 200,
},
salary: {
visible: false,
readonly: false,
},
},
query: {
logicalOperator: "all",
onEmptyFilter: "none",
groups: [
{
logicalOperator: "any",
filters: [
{ operator: "string", field: "name", value: "John" },
{ operator: "range", field: "age", value: { low: 18, high: 100 } },
],
},
],
},
primaryDisplay: "name",
}
const baseColumnDef = {
visible: {
type: "boolean",
description:
"Defines whether the column is visible or not - rows retrieved/updated through this view will not be able to access it.",
},
readonly: {
type: "boolean",
description:
"When used in combination with 'visible: true' the column will be visible in row responses but cannot be updated.",
},
order: {
type: "integer",
description:
"A number defining where the column shows up in tables, lowest being first.",
},
width: {
type: "integer",
description:
"A width for the column, defined in pixels - this affects rendering in tables.",
},
column: {
type: "array",
description:
"If this is a relationship column, we can set the columns we wish to include",
items: {
type: "object",
properties: {
readonly: {
type: "boolean",
},
},
},
},
}
const logicalOperator = {
description:
"When using groups this defines whether all of the filters must match, or only one of them.",
type: "string",
enum: ["all", "any"],
}
const filterGroup = {
description: "A grouping of filters to be applied.",
type: "array",
items: {
type: "object",
properties: {
logicalOperator,
filters: {
description: "A list of filters to apply",
type: "array",
items: {
type: "object",
properties: {
operator: {
type: "string",
description:
"The type of search operation which is being performed.",
enum: [
...Object.values(BasicOperator),
...Object.values(ArrayOperator),
...Object.values(RangeOperator),
],
},
field: {
type: "string",
description: "The field in the view to perform the search on.",
},
value: {
description:
"The value to search for - the type will depend on the operator in use.",
oneOf: [
{ type: "string" },
{ type: "number" },
{ type: "boolean" },
{ type: "object" },
{ type: "array" },
],
},
},
},
},
},
},
}
// have to clone to avoid constantly recursive structure - we can't represent this easily
const layeredFilterGroup: any = cloneDeep(filterGroup)
layeredFilterGroup.items.properties.groups = filterGroup
const viewQuerySchema = {
description: "Search parameters for view",
type: "object",
required: [],
properties: {
logicalOperator,
onEmptyFilter: {
description:
"If no filters match, should the view return all rows, or no rows.",
type: "string",
enum: ["all", "none"],
},
groups: layeredFilterGroup,
},
}
const viewSchema = {
description: "The view to be created/updated.",
type: "object",
required: ["name", "schema", "tableId"],
properties: {
name: {
description: "The name of the view.",
type: "string",
},
tableId: {
description: "The ID of the table this view is based on.",
type: "string",
},
type: {
description: "The type of view - standard (empty value) or calculation.",
type: "string",
enum: ["calculation"],
},
primaryDisplay: {
type: "string",
description:
"A column used to display rows from this view - usually used when rendered in tables.",
},
query: viewQuerySchema,
sort: {
type: "object",
required: ["field"],
properties: {
field: {
type: "string",
description: "The field from the table/view schema to sort on.",
},
order: {
type: "string",
description: "The order in which to sort.",
enum: Object.values(SortOrder),
},
type: {
type: "string",
description:
"The type of sort to perform (by number, or by alphabetically).",
enum: Object.values(SortType),
},
},
},
schema: {
type: "object",
additionalProperties: {
oneOf: [
{
type: "object",
properties: baseColumnDef,
},
{
type: "object",
properties: {
calculationType: {
type: "string",
description:
"This column should be built from a calculation, specifying a type and field. It is important to note when a calculation is configured all non-calculation columns will be used for grouping.",
enum: Object.values(CalculationType),
},
field: {
type: "string",
description:
"The field from the table to perform the calculation on.",
},
distinct: {
type: "boolean",
description:
"Can be used in tandem with the count calculation type, to count unique entries.",
},
},
},
],
},
},
},
}
const viewOutputSchema = {
...viewSchema,
properties: {
...viewSchema.properties,
id: {
description: "The ID of the view.",
type: "string",
},
},
required: [...viewSchema.required, "id"],
}
export default new Resource()
.setExamples({
view: {
value: {
data: view,
},
},
views: {
value: {
data: [view],
},
},
})
.setSchemas({
view: viewSchema,
viewOutput: object({
data: viewOutputSchema,
}),
viewSearch: object({
data: {
type: "array",
items: viewOutputSchema,
},
}),
})

View File

@ -153,7 +153,11 @@ async function createInstance(appId: string, template: AppTemplate) {
await createAllSearchIndex()
if (template && template.useTemplate) {
await sdk.backups.importApp(appId, db, template)
const opts = {
importObjStoreContents: true,
updateAttachmentColumns: !template.key, // preserve attachments when using Budibase templates
}
await sdk.backups.importApp(appId, db, template, opts)
} else {
// create the users table
await db.put(USERS_TABLE_SCHEMA)

View File

@ -1,6 +1,7 @@
import { Application } from "./types"
import { RequiredKeys } from "@budibase/types"
function application(body: any): Application {
function application(body: any): RequiredKeys<Application> {
let app = body?.application ? body.application : body
return {
_id: app.appId,

View File

@ -3,6 +3,7 @@ import applications from "./applications"
import users from "./users"
import rows from "./rows"
import queries from "./queries"
import views from "./views"
export default {
...tables,
@ -10,4 +11,5 @@ export default {
...users,
...rows,
...queries,
...views,
}

View File

@ -1,6 +1,7 @@
import { Query, ExecuteQuery } from "./types"
import { RequiredKeys } from "@budibase/types"
function query(body: any): Query {
function query(body: any): RequiredKeys<Query> {
return {
_id: body._id,
datasourceId: body.datasourceId,

View File

@ -1,6 +1,7 @@
import { Row, RowSearch } from "./types"
import { RequiredKeys } from "@budibase/types"
function row(body: any): Row {
function row(body: any): RequiredKeys<Row> {
delete body._rev
// have to input everything, since structure unknown
return {

View File

@ -1,6 +1,7 @@
import { Table } from "./types"
import { RequiredKeys } from "@budibase/types"
function table(body: any): Table {
function table(body: any): RequiredKeys<Table> {
return {
_id: body._id,
name: body.name,

View File

@ -9,6 +9,9 @@ export type CreateApplicationParams = components["schemas"]["application"]
export type Table = components["schemas"]["tableOutput"]["data"]
export type CreateTableParams = components["schemas"]["table"]
export type View = components["schemas"]["viewOutput"]["data"]
export type CreateViewParams = components["schemas"]["view"]
export type Row = components["schemas"]["rowOutput"]["data"]
export type RowSearch = components["schemas"]["searchOutput"]
export type CreateRowParams = components["schemas"]["row"]

View File

@ -1,6 +1,7 @@
import { User } from "./types"
import { RequiredKeys } from "@budibase/types"
function user(body: any): User {
function user(body: any): RequiredKeys<User> {
return {
_id: body._id,
email: body.email,

View File

@ -0,0 +1,32 @@
import { View } from "./types"
import { ViewV2, Ctx, RequiredKeys } from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
function view(body: ViewV2): RequiredKeys<View> {
return {
id: body.id,
tableId: body.tableId,
type: body.type,
name: body.name,
schema: body.schema!,
primaryDisplay: body.primaryDisplay,
query: dataFilters.buildQuery(body.query),
sort: body.sort,
}
}
function mapView(ctx: Ctx<{ data: ViewV2 }>): { data: View } {
return {
data: view(ctx.body.data),
}
}
function mapViews(ctx: Ctx<{ data: ViewV2[] }>): { data: View[] } {
const views = ctx.body.data.map((body: ViewV2) => view(body))
return { data: views }
}
export default {
mapView,
mapViews,
}

View File

@ -22,13 +22,13 @@ export function fixRow(row: Row, params: any) {
return row
}
export async function search(ctx: UserCtx, next: Next) {
function buildSearchRequestBody(ctx: UserCtx) {
let { sort, paginate, bookmark, limit, query } = ctx.request.body
// update the body to the correct format of the internal search
if (!sort) {
sort = {}
}
ctx.request.body = {
return {
sort: sort.column,
sortType: sort.type,
sortOrder: sort.order,
@ -37,10 +37,23 @@ export async function search(ctx: UserCtx, next: Next) {
limit,
query,
}
}
export async function search(ctx: UserCtx, next: Next) {
ctx.request.body = buildSearchRequestBody(ctx)
await rowController.search(ctx)
await next()
}
export async function viewSearch(ctx: UserCtx, next: Next) {
ctx.request.body = buildSearchRequestBody(ctx)
ctx.params = {
viewId: ctx.params.viewId,
}
await rowController.views.searchView(ctx)
await next()
}
export async function create(ctx: UserCtx, next: Next) {
ctx.request.body = fixRow(ctx.request.body, ctx.params)
await rowController.save(ctx)
@ -79,4 +92,5 @@ export default {
update,
destroy,
search,
viewSearch,
}

View File

@ -0,0 +1,95 @@
import { search as stringSearch } from "./utils"
import * as controller from "../view"
import { ViewV2, UserCtx, UISearchFilter, PublicAPIView } from "@budibase/types"
import { Next } from "koa"
import { merge } from "lodash"
function viewRequest(view: PublicAPIView, params?: { viewId: string }) {
const viewV2: ViewV2 = view
if (!viewV2) {
return viewV2
}
if (params?.viewId) {
viewV2.id = params.viewId
}
if (!view.query) {
viewV2.query = {}
} else {
// public API only has one form of query
viewV2.queryUI = viewV2.query as UISearchFilter
}
viewV2.version = 2
return viewV2
}
function viewResponse(view: ViewV2): PublicAPIView {
// remove our internal structure - always un-necessary
delete view.query
return {
...view,
query: view.queryUI,
}
}
function viewsResponse(views: ViewV2[]): PublicAPIView[] {
return views.map(viewResponse)
}
export async function search(ctx: UserCtx, next: Next) {
const { name } = ctx.request.body
await controller.v2.fetch(ctx)
ctx.body.data = viewsResponse(stringSearch(ctx.body.data, name))
await next()
}
export async function create(ctx: UserCtx, next: Next) {
ctx = merge(ctx, {
request: {
body: viewRequest(ctx.request.body),
},
})
await controller.v2.create(ctx)
ctx.body.data = viewResponse(ctx.body.data)
await next()
}
export async function read(ctx: UserCtx, next: Next) {
ctx = merge(ctx, {
params: {
viewId: ctx.params.viewId,
},
})
await controller.v2.get(ctx)
ctx.body.data = viewResponse(ctx.body.data)
await next()
}
export async function update(ctx: UserCtx, next: Next) {
const viewId = ctx.params.viewId
ctx = merge(ctx, {
request: {
body: {
data: viewRequest(ctx.request.body, { viewId }),
},
},
params: {
viewId,
},
})
await controller.v2.update(ctx)
ctx.body.data = viewResponse(ctx.body.data)
await next()
}
export async function destroy(ctx: UserCtx, next: Next) {
await controller.v2.remove(ctx)
await next()
}
export default {
create,
read,
update,
destroy,
search,
}

View File

@ -4,7 +4,7 @@ import { URL } from "url"
const curlconverter = require("curlconverter")
const parseCurl = (data: string): any => {
const parseCurl = (data: string): Promise<any> => {
const curlJson = curlconverter.toJsonString(data)
return JSON.parse(curlJson)
}
@ -53,8 +53,7 @@ export class Curl extends ImportSource {
isSupported = async (data: string): Promise<boolean> => {
try {
const curl = parseCurl(data)
this.curl = curl
this.curl = parseCurl(data)
} catch (err) {
return false
}

View File

@ -23,6 +23,7 @@ import {
} from "@budibase/types"
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
import { findHBSBlocks } from "@budibase/string-templates"
import { ObjectId } from "mongodb"
const Runner = new Thread(ThreadType.QUERY, {
timeoutMs: env.QUERY_THREAD_TIMEOUT,
@ -223,6 +224,8 @@ export async function preview(
} else {
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
}
} else if (field instanceof ObjectId) {
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
} else {
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
}

View File

@ -50,6 +50,7 @@ export async function searchView(
result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result
}
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
if (request.sort) {
return {

View File

@ -209,6 +209,7 @@ export const serveApp = async function (ctx: UserCtx) {
? objectStore.getGlobalFileUrl("settings", "logoUrl")
: "",
appMigrating: needMigrations,
nonce: ctx.state.nonce,
})
const appHbs = loadHandlebarsFile(appHbsPath)
ctx.body = await processString(appHbs, {
@ -217,6 +218,7 @@ export const serveApp = async function (ctx: UserCtx) {
css: `:root{${themeVariables}} ${css.code}`,
appId,
embedded: bbHeaderEmbed,
nonce: ctx.state.nonce,
})
} else {
// just return the app info for jest to assert on
@ -258,6 +260,7 @@ export const serveBuilderPreview = async function (ctx: Ctx) {
const previewHbs = loadHandlebarsFile(join(previewLoc, "preview.hbs"))
ctx.body = await processString(previewHbs, {
clientLibPath: objectStore.clientLibraryUrl(appId!, appInfo.version),
nonce: ctx.state.nonce,
})
} else {
// just return the app info for jest to assert on

View File

@ -16,6 +16,8 @@
export let hideDevTools
export let sideNav
export let hideFooter
export let nonce
</script>
<svelte:head>
@ -118,11 +120,11 @@
<p />
{/if}
</div>
<script type="application/javascript">
<script type="application/javascript" {nonce}>
window.INIT_TIME = Date.now()
</script>
{#if appMigrating}
<script type="application/javascript">
<script type="application/javascript" {nonce}>
window.MIGRATING_APP = true
</script>
{/if}
@ -135,7 +137,7 @@
<script type="application/javascript" src={plugin.jsUrl}></script>
{/each}
{/if}
<script type="application/javascript">
<script type="application/javascript" {nonce}>
if (window.loadBudibase) {
window.loadBudibase()
} else {

View File

@ -1,5 +1,5 @@
<html>
<script>
<script nonce="{{ nonce }}">
document.fonts.ready.then(() => {
window.parent.postMessage({ type: "docLoaded" });
})
@ -9,7 +9,7 @@
<style>{{{css}}}</style>
</head>
<script>
<script nonce="{{ nonce }}">
window["##BUDIBASE_APP_ID##"] = "{{appId}}"
window["##BUDIBASE_APP_EMBEDDED##"] = "{{embedded}}"
</script>

View File

@ -31,7 +31,7 @@
}
</style>
<script src='{{ clientLibPath }}'></script>
<script>
<script nonce="{{ nonce }}">
function receiveMessage(event) {
if (!event.data) {
return

View File

@ -15,12 +15,11 @@ import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro"
import { context, events, features, HTTPError } from "@budibase/backend-core"
import { context, events, HTTPError } from "@budibase/backend-core"
import {
AutoFieldSubType,
Database,
Datasource,
FeatureFlag,
FieldSchema,
FieldType,
NumberFieldMetadata,
@ -336,9 +335,8 @@ class TableSaveFunctions {
importRows: this.importRows,
userId: this.userId,
})
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
await sdk.tables.sqs.addTable(table)
}
await sdk.tables.sqs.addTable(table)
return table
}
@ -530,9 +528,8 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
if (rows) {
await AttachmentCleanup.tableDelete(table, rows)
}
if (await features.flags.isEnabled(FeatureFlag.SQS)) {
await sdk.tables.sqs.removeTable(table)
}
await sdk.tables.sqs.removeTable(table)
}
const _TableSaveFunctions = TableSaveFunctions

View File

@ -12,6 +12,7 @@ import {
RelationSchemaField,
ViewFieldMetadata,
CalculationType,
ViewFetchResponseEnriched,
CountDistinctCalculationFieldMetadata,
CountCalculationFieldMetadata,
} from "@budibase/types"
@ -125,6 +126,12 @@ export async function get(ctx: Ctx<void, ViewResponseEnriched>) {
}
}
export async function fetch(ctx: Ctx<void, ViewFetchResponseEnriched>) {
ctx.body = {
data: await sdk.views.getAllEnriched(),
}
}
export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
const view = ctx.request.body
const { tableId } = view

View File

@ -4,19 +4,21 @@ import queryEndpoints from "./queries"
import tableEndpoints from "./tables"
import rowEndpoints from "./rows"
import userEndpoints from "./users"
import viewEndpoints from "./views"
import roleEndpoints from "./roles"
import authorized from "../../../middleware/authorized"
import publicApi from "../../../middleware/publicApi"
import { paramResource, paramSubResource } from "../../../middleware/resourceId"
import { PermissionType, PermissionLevel } from "@budibase/types"
import { PermissionLevel, PermissionType } from "@budibase/types"
import { CtxFn } from "./utils/Endpoint"
import mapperMiddleware from "./middleware/mapper"
import env from "../../../environment"
import { middleware, redis } from "@budibase/backend-core"
import { SelectableDatabase } from "@budibase/backend-core/src/redis/utils"
import cors from "@koa/cors"
// below imports don't have declaration files
const Router = require("@koa/router")
const { RateLimit, Stores } = require("koa2-ratelimit")
import { middleware, redis } from "@budibase/backend-core"
import { SelectableDatabase } from "@budibase/backend-core/src/redis/utils"
interface KoaRateLimitOptions {
socket: {
@ -81,6 +83,7 @@ const publicRouter = new Router({
if (limiter && !env.isDev()) {
publicRouter.use(limiter)
}
publicRouter.use(cors())
function addMiddleware(
endpoints: any,
@ -149,6 +152,7 @@ applyAdminRoutes(metricEndpoints)
applyAdminRoutes(roleEndpoints)
applyRoutes(appEndpoints, PermissionType.APP, "appId")
applyRoutes(tableEndpoints, PermissionType.TABLE, "tableId")
applyRoutes(viewEndpoints, PermissionType.VIEW, "viewId")
applyRoutes(userEndpoints, PermissionType.USER, "userId")
applyRoutes(queryEndpoints, PermissionType.QUERY, "queryId")
// needs to be applied last for routing purposes, don't override other endpoints

View File

@ -1,9 +1,10 @@
import { Ctx } from "@budibase/types"
import mapping from "../../../controllers/public/mapping"
enum Resources {
enum Resource {
APPLICATION = "applications",
TABLES = "tables",
VIEWS = "views",
ROWS = "rows",
USERS = "users",
QUERIES = "queries",
@ -15,7 +16,7 @@ function isAttachment(ctx: Ctx) {
}
function isArrayResponse(ctx: Ctx) {
return ctx.url.endsWith(Resources.SEARCH) || Array.isArray(ctx.body)
return ctx.url.endsWith(Resource.SEARCH) || Array.isArray(ctx.body)
}
function noResponse(ctx: Ctx) {
@ -38,6 +39,14 @@ function processTables(ctx: Ctx) {
}
}
function processViews(ctx: Ctx) {
if (isArrayResponse(ctx)) {
return mapping.mapViews(ctx)
} else {
return mapping.mapView(ctx)
}
}
function processRows(ctx: Ctx) {
if (isArrayResponse(ctx)) {
return mapping.mapRowSearch(ctx)
@ -71,20 +80,27 @@ export default async (ctx: Ctx, next: any) => {
let body = {}
switch (urlParts[0]) {
case Resources.APPLICATION:
case Resource.APPLICATION:
body = processApplications(ctx)
break
case Resources.TABLES:
if (urlParts[2] === Resources.ROWS) {
case Resource.TABLES:
if (urlParts[2] === Resource.ROWS) {
body = processRows(ctx)
} else {
body = processTables(ctx)
}
break
case Resources.USERS:
case Resource.VIEWS:
if (urlParts[2] === Resource.ROWS) {
body = processRows(ctx)
} else {
body = processViews(ctx)
}
break
case Resource.USERS:
body = processUsers(ctx)
break
case Resources.QUERIES:
case Resource.QUERIES:
body = processQueries(ctx)
break
}

View File

@ -1,4 +1,4 @@
import controller from "../../controllers/public/rows"
import controller, { viewSearch } from "../../controllers/public/rows"
import Endpoint from "./utils/Endpoint"
import { externalSearchValidator } from "../utils/validators"
@ -168,4 +168,40 @@ read.push(
).addMiddleware(externalSearchValidator())
)
/**
* @openapi
* /views/{viewId}/rows/search:
* post:
* operationId: rowViewSearch
* summary: Search for rows in a view
* tags:
* - rows
* parameters:
* - $ref: '#/components/parameters/viewId'
* - $ref: '#/components/parameters/appId'
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/rowSearch'
* responses:
* 200:
* description: The response will contain an array of rows that match the search parameters.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/searchOutput'
* examples:
* search:
* $ref: '#/components/examples/rows'
*/
read.push(
new Endpoint(
"post",
"/views/:viewId/rows/search",
controller.viewSearch
).addMiddleware(externalSearchValidator())
)
export default { read, write }

View File

@ -1,13 +1,24 @@
import { User, Table, SearchFilters, Row } from "@budibase/types"
import {
User,
Table,
SearchFilters,
Row,
ViewV2Schema,
ViewV2,
ViewV2Type,
PublicAPIView,
} from "@budibase/types"
import { HttpMethod, MakeRequestResponse, generateMakeRequest } from "./utils"
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
import { Expectations } from "../../../../tests/utilities/api/base"
type RequestOpts = { internal?: boolean; appId?: string }
type Response<T> = { data: T }
export interface PublicAPIExpectations {
status?: number
body?: Record<string, any>
headers?: Record<string, string>
}
export class PublicAPIRequest {
@ -15,6 +26,7 @@ export class PublicAPIRequest {
private appId: string | undefined
tables: PublicTableAPI
views: PublicViewAPI
rows: PublicRowAPI
apiKey: string
@ -28,6 +40,7 @@ export class PublicAPIRequest {
this.appId = appId
this.tables = new PublicTableAPI(this)
this.rows = new PublicRowAPI(this)
this.views = new PublicViewAPI(this)
}
static async init(config: TestConfiguration, user: User, opts?: RequestOpts) {
@ -59,6 +72,12 @@ export class PublicAPIRequest {
if (expectations?.body) {
expect(res.body).toEqual(expectations?.body)
}
if (expectations?.headers) {
for (let [header, value] of Object.entries(expectations.headers)) {
const found = res.headers[header]
expect(found?.toLowerCase()).toEqual(value)
}
}
return res.body
}
}
@ -73,9 +92,16 @@ export class PublicTableAPI {
async create(
table: Table,
expectations?: PublicAPIExpectations
): Promise<{ data: Table }> {
): Promise<Response<Table>> {
return this.request.send("post", "/tables", table, expectations)
}
async search(
name: string,
expectations?: PublicAPIExpectations
): Promise<Response<Table[]>> {
return this.request.send("post", "/tables/search", { name }, expectations)
}
}
export class PublicRowAPI {
@ -85,11 +111,24 @@ export class PublicRowAPI {
this.request = request
}
async create(
tableId: string,
row: Row,
expectations?: PublicAPIExpectations
): Promise<Response<Row>> {
return this.request.send(
"post",
`/tables/${tableId}/rows`,
row,
expectations
)
}
async search(
tableId: string,
query: SearchFilters,
expectations?: PublicAPIExpectations
): Promise<{ data: Row[] }> {
): Promise<Response<Row[]>> {
return this.request.send(
"post",
`/tables/${tableId}/rows/search`,
@ -99,4 +138,75 @@ export class PublicRowAPI {
expectations
)
}
async viewSearch(
viewId: string,
query: SearchFilters,
expectations?: PublicAPIExpectations
): Promise<Response<Row[]>> {
return this.request.send(
"post",
`/views/${viewId}/rows/search`,
{
query,
},
expectations
)
}
}
export class PublicViewAPI {
request: PublicAPIRequest
constructor(request: PublicAPIRequest) {
this.request = request
}
async create(
view: Omit<PublicAPIView, "id" | "version">,
expectations?: PublicAPIExpectations
): Promise<Response<PublicAPIView>> {
return this.request.send("post", "/views", view, expectations)
}
async update(
viewId: string,
view: Omit<PublicAPIView, "id" | "version">,
expectations?: PublicAPIExpectations
): Promise<Response<PublicAPIView>> {
return this.request.send("put", `/views/${viewId}`, view, expectations)
}
async destroy(
viewId: string,
expectations?: PublicAPIExpectations
): Promise<void> {
return this.request.send(
"delete",
`/views/${viewId}`,
undefined,
expectations
)
}
async find(
viewId: string,
expectations?: PublicAPIExpectations
): Promise<Response<PublicAPIView>> {
return this.request.send("get", `/views/${viewId}`, undefined, expectations)
}
async search(
viewName: string,
expectations?: PublicAPIExpectations
): Promise<Response<PublicAPIView[]>> {
return this.request.send(
"post",
"/views/search",
{
name: viewName,
},
expectations
)
}
}

View File

@ -0,0 +1,21 @@
import * as setup from "../../tests/utilities"
import { PublicAPIRequest } from "./Request"
describe("check public API security", () => {
const config = setup.getConfig()
let request: PublicAPIRequest
beforeAll(async () => {
await config.init()
request = await PublicAPIRequest.init(config, await config.globalUser())
})
it("should have Access-Control-Allow-Origin set to *", async () => {
await request.tables.search("", {
status: 200,
headers: {
"access-control-allow-origin": "*",
},
})
})
})

View File

@ -0,0 +1,95 @@
import * as setup from "../../tests/utilities"
import { basicTable } from "../../../../tests/utilities/structures"
import { BasicOperator, Table, UILogicalOperator } from "@budibase/types"
import { PublicAPIRequest } from "./Request"
import { generator } from "@budibase/backend-core/tests"
describe("check public API security", () => {
const config = setup.getConfig()
let request: PublicAPIRequest, table: Table
beforeAll(async () => {
await config.init()
request = await PublicAPIRequest.init(config, await config.globalUser())
table = (await request.tables.create(basicTable())).data
})
function baseView() {
return {
name: generator.word(),
tableId: table._id!,
query: {},
schema: {
name: {
readonly: true,
visible: true,
},
},
}
}
it("should be able to create a view", async () => {
await request.views.create(baseView(), { status: 201 })
})
it("should be able to update a view", async () => {
const view = await request.views.create(baseView(), { status: 201 })
const response = await request.views.update(view.data.id, {
...view.data,
name: "new name",
})
})
it("should be able to search views", async () => {
const viewName = "view to search for"
const view = await request.views.create(
{
...baseView(),
name: viewName,
},
{ status: 201 }
)
const results = await request.views.search(viewName, {
status: 200,
})
expect(results.data.length).toEqual(1)
expect(results.data[0].id).toEqual(view.data.id)
})
it("should be able to delete a view", async () => {
const view = await request.views.create(baseView(), { status: 201 })
const result = await request.views.destroy(view.data.id, { status: 204 })
expect(result).toBeDefined()
})
it("should be able to search rows through a view", async () => {
const row1 = await request.rows.create(
table._id!,
{ name: "hello world" },
{ status: 200 }
)
await request.rows.create(table._id!, { name: "foo bar" }, { status: 200 })
const response = await request.views.create(
{
...baseView(),
query: {
logicalOperator: UILogicalOperator.ANY,
groups: [
{
filters: [
{
operator: BasicOperator.STRING,
field: "name",
value: "hello",
},
],
},
],
},
},
{ status: 201 }
)
const results = await request.rows.viewSearch(response.data.id, {})
expect(results.data.length).toEqual(1)
})
})

View File

@ -0,0 +1,165 @@
import controller from "../../controllers/public/views"
import Endpoint from "./utils/Endpoint"
import { viewValidator, nameValidator } from "../utils/validators"
const read = [],
write = []
/**
* @openapi
* /views:
* post:
* operationId: viewCreate
* summary: Create a view
* description: Create a view, this can be against an internal or external table.
* tags:
* - views
* parameters:
* - $ref: '#/components/parameters/appId'
* requestBody:
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/view'
* examples:
* view:
* $ref: '#/components/examples/view'
* responses:
* 200:
* description: Returns the created view, including the ID which has been generated for it.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/viewOutput'
* examples:
* view:
* $ref: '#/components/examples/view'
*/
write.push(
new Endpoint("post", "/views", controller.create).addMiddleware(
viewValidator()
)
)
/**
* @openapi
* /views/{viewId}:
* put:
* operationId: viewUpdate
* summary: Update a view
* description: Update a view, this can be against an internal or external table.
* tags:
* - views
* parameters:
* - $ref: '#/components/parameters/viewId'
* - $ref: '#/components/parameters/appId'
* requestBody:
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/view'
* examples:
* view:
* $ref: '#/components/examples/view'
* responses:
* 200:
* description: Returns the updated view.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/viewOutput'
* examples:
* view:
* $ref: '#/components/examples/view'
*/
write.push(
new Endpoint("put", "/views/:viewId", controller.update).addMiddleware(
viewValidator()
)
)
/**
* @openapi
* /views/{viewId}:
* delete:
* operationId: viewDestroy
* summary: Delete a view
* description: Delete a view, this can be against an internal or external table.
* tags:
* - views
* parameters:
* - $ref: '#/components/parameters/viewId'
* - $ref: '#/components/parameters/appId'
* responses:
* 200:
* description: Returns the deleted view.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/viewOutput'
* examples:
* view:
* $ref: '#/components/examples/view'
*/
write.push(new Endpoint("delete", "/views/:viewId", controller.destroy))
/**
* @openapi
* /views/{viewId}:
* get:
* operationId: viewGetById
* summary: Retrieve a view
* description: Lookup a view, this could be internal or external.
* tags:
* - views
* parameters:
* - $ref: '#/components/parameters/viewId'
* - $ref: '#/components/parameters/appId'
* responses:
* 200:
* description: Returns the retrieved view.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/viewOutput'
* examples:
* view:
* $ref: '#/components/examples/view'
*/
read.push(new Endpoint("get", "/views/:viewId", controller.read))
/**
* @openapi
* /views/search:
* post:
* operationId: viewSearch
* summary: Search for views
* description: Based on view properties (currently only name) search for views.
* tags:
* - views
* parameters:
* - $ref: '#/components/parameters/appId'
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/nameSearch'
* responses:
* 200:
* description: Returns the found views, based on the search parameters.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/viewSearch'
* examples:
* views:
* $ref: '#/components/examples/views'
*/
read.push(
new Endpoint("post", "/views/search", controller.search).addMiddleware(
nameValidator()
)
)
export default { read, write }

View File

@ -16,7 +16,7 @@ jest.mock("../../../utilities/redis", () => ({
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
import { AppStatus } from "../../../db/utils"
import { events, utils, context, features } from "@budibase/backend-core"
import { events, utils, context } from "@budibase/backend-core"
import env from "../../../environment"
import { type App, BuiltinPermissionID } from "@budibase/types"
import tk from "timekeeper"
@ -355,21 +355,6 @@ describe("/applications", () => {
expect(events.app.deleted).toHaveBeenCalledTimes(1)
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
})
it("should be able to delete an app after SQS has been set but app hasn't been migrated", async () => {
const prodAppId = app.appId.replace("_dev", "")
nock("http://localhost:10000")
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await features.testutils.withFeatureFlags(
"*",
{ SQS: true },
async () => {
await config.api.application.delete(app.appId)
}
)
})
})
describe("POST /api/applications/:appId/duplicate", () => {

View File

@ -19,8 +19,7 @@ import {
} from "@budibase/types"
import {
DatabaseName,
getDatasource,
knexClient,
datasourceDescribe,
} from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
import nock from "nock"
@ -69,7 +68,7 @@ describe("/datasources", () => {
{
status: 500,
body: {
message: "No datasource implementation found.",
message: 'No datasource implementation found called: "invalid"',
},
}
)
@ -163,21 +162,26 @@ describe("/datasources", () => {
})
})
})
})
describe.each([
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("%s", (_, dsProvider) => {
const descriptions = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
})
if (descriptions.length) {
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
let datasource: Datasource
let rawDatasource: Datasource
let client: Knex
beforeEach(async () => {
rawDatasource = await dsProvider
datasource = await config.api.datasource.create(rawDatasource)
client = await knexClient(rawDatasource)
const ds = await dsProvider()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
jest.clearAllMocks()
nock.cleanAll()
})
describe("get", () => {
@ -492,4 +496,4 @@ describe("/datasources", () => {
})
})
})
})
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -9,15 +9,20 @@ import {
import { automations } from "@budibase/pro"
import {
CreateRowActionRequest,
Datasource,
DocumentType,
PermissionLevel,
RowActionResponse,
Table,
TableRowActions,
} from "@budibase/types"
import * as setup from "./utilities"
import { generator, mocks } from "@budibase/backend-core/tests"
import { Expectations } from "../../../tests/utilities/api/base"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import {
DatabaseName,
datasourceDescribe,
} from "../../../integrations/tests/utils"
import { generateRowActionsID } from "../../../db/utils"
const expectAutomationId = () =>
@ -969,36 +974,38 @@ describe("/rowsActions", () => {
status: 200,
})
})
})
})
it.each([
[
"internal",
async () => {
await config.newTenant()
const descriptions = datasourceDescribe({
only: [DatabaseName.SQS, DatabaseName.POSTGRES],
})
if (descriptions.length) {
describe.each(descriptions)(
"row actions ($dbName)",
({ config, dsProvider, isInternal }) => {
let datasource: Datasource | undefined
beforeAll(async () => {
const ds = await dsProvider()
datasource = ds.datasource
})
async function getTable(): Promise<Table> {
if (isInternal) {
await config.api.application.addSampleData(config.getAppId())
const tables = await config.api.table.fetch()
const table = tables.find(
t => t.sourceId === DEFAULT_BB_DATASOURCE_ID
)!
return table
},
],
[
"external",
async () => {
await config.newTenant()
const ds = await config.createDatasource({
datasource: await getDatasource(DatabaseName.POSTGRES),
})
return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
} else {
const table = await config.api.table.save(
setup.structures.tableForDatasource(ds)
setup.structures.tableForDatasource(datasource!)
)
return table
},
],
])(
"should delete all the row actions (and automations) for its tables when a datasource is deleted",
async (_, getTable) => {
}
}
it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => {
async function getRowActionsFromDb(tableId: string) {
return await context.doInAppContext(config.getAppId(), async () => {
const db = context.getAppDB()
@ -1032,7 +1039,7 @@ describe("/rowsActions", () => {
expect(automationsResp.automations).toHaveLength(0)
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
}
)
})
})
})
}
)
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,6 @@ import * as setup from "./utilities"
import path from "path"
import nock from "nock"
import { generator } from "@budibase/backend-core/tests"
import { features } from "@budibase/backend-core"
interface App {
background: string
@ -82,48 +81,36 @@ describe("/templates", () => {
})
describe("create app from template", () => {
it.each(["sqs", "lucene"])(
`should be able to create an app from a template (%s)`,
async source => {
await features.testutils.withFeatureFlags(
"*",
{ SQS: source === "sqs" },
async () => {
const name = generator.guid().replaceAll("-", "")
const url = `/${name}`
it("should be able to create an app from a template", async () => {
const name = generator.guid().replaceAll("-", "")
const url = `/${name}`
const app = await config.api.application.create({
name,
url,
useTemplate: "true",
templateName: "Agency Client Portal",
templateKey: "app/agency-client-portal",
})
expect(app.name).toBe(name)
expect(app.url).toBe(url)
const app = await config.api.application.create({
name,
url,
useTemplate: "true",
templateName: "Agency Client Portal",
templateKey: "app/agency-client-portal",
})
expect(app.name).toBe(name)
expect(app.url).toBe(url)
await config.withApp(app, async () => {
const tables = await config.api.table.fetch()
expect(tables).toHaveLength(2)
await config.withApp(app, async () => {
const tables = await config.api.table.fetch()
expect(tables).toHaveLength(2)
tables.sort((a, b) => a.name.localeCompare(b.name))
const [agencyProjects, users] = tables
expect(agencyProjects.name).toBe("Agency Projects")
expect(users.name).toBe("Users")
tables.sort((a, b) => a.name.localeCompare(b.name))
const [agencyProjects, users] = tables
expect(agencyProjects.name).toBe("Agency Projects")
expect(users.name).toBe("Users")
const { rows } = await config.api.row.search(
agencyProjects._id!,
{
tableId: agencyProjects._id!,
query: {},
}
)
const { rows } = await config.api.row.search(agencyProjects._id!, {
tableId: agencyProjects._id!,
query: {},
})
expect(rows).toHaveLength(3)
})
}
)
}
)
expect(rows).toHaveLength(3)
})
})
})
})

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More