diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 6ace2303d9..854bc2e6dc 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -2,10 +2,11 @@ name: Bug report about: Create a report to help us improve title: '' -labels: bug +labels: bug, linear assignees: '' --- + **Checklist** - [ ] I have searched budibase discussions and github issues to check if my issue already exists diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index e0263546ff..c64adb010f 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -10,7 +10,7 @@ on: pull_request: branches: - master - - develop + - develop workflow_dispatch: env: @@ -64,6 +64,20 @@ jobs: name: codecov-umbrella verbose: true + test-pro: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Use Node.js 14.x + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Install Pro + run: yarn install:pro $BRANCH $BASE_BRANCH + - run: yarn + - run: yarn bootstrap + - run: yarn test:pro + integration-test: runs-on: ubuntu-latest services: diff --git a/.github/workflows/deploy-cloud.yaml b/.github/workflows/deploy-cloud.yaml index 644eb5f1be..fa80da846f 100644 --- a/.github/workflows/deploy-cloud.yaml +++ b/.github/workflows/deploy-cloud.yaml @@ -22,7 +22,7 @@ jobs: - name: Pull values.yaml from budibase-infra run: | - curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \ + curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \ -H 'Accept: application/vnd.github.v3.raw' \ -o values.production.yaml \ -L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/values.yaml diff --git a/.github/workflows/deploy-preprod.yml b/.github/workflows/deploy-preprod.yml index cef47636ee..803dd6af52 100644 --- a/.github/workflows/deploy-preprod.yml +++ b/.github/workflows/deploy-preprod.yml @@ -1,18 +1,16 @@ -name: Budibase Deploy Preprod - +name: "deploy-preprod" on: - workflow_dispatch: - -env: - INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }} - SENTRY_DSN: ${{ secrets.SENTRY_DSN }} + workflow_dispatch: + workflow_call: jobs: - release: + deploy-to-legacy-preprod-env: runs-on: ubuntu-latest - steps: - uses: actions/checkout@v2 + - name: 'Get Previous tag' + id: previoustag + uses: "WyriHaximus/github-action-get-previous-tag@v1" - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v1 @@ -21,23 +19,16 @@ jobs: aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: eu-west-1 - - - name: Get the latest budibase release version - id: version - run: | - release_version=$(cat lerna.json | jq -r '.version') - echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV - - name: Pull values.yaml from budibase-infra - run: | - curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \ + run: | + curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \ -H 'Accept: application/vnd.github.v3.raw' \ -o values.preprod.yaml \ -L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml wc -l values.preprod.yaml - name: Deploy to Preprod Environment - uses: glopezep/helm@v1.7.1 + uses: budibase/helm@v1.8.0 with: release: budibase-preprod namespace: budibase @@ -46,7 +37,7 @@ jobs: helm: helm3 values: | globals: - appVersion: v${{ env.RELEASE_VERSION }} + appVersion: ${{ steps.previoustag.outputs.tag }} ingress: enabled: true nginx: true @@ -61,5 +52,5 @@ jobs: uses: tsickert/discord-webhook@v4.0.0 with: webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }} - content: "Preprod Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Pre-prod." - embed-title: ${{ env.RELEASE_VERSION }} + content: "Preprod Deployment Complete: ${{ steps.previoustag.outputs.tag }} deployed to Budibase Pre-prod." + embed-title: ${{ steps.previoustag.outputs.tag }} \ No newline at end of file diff --git a/.github/workflows/deploy-release.yml b/.github/workflows/deploy-release.yml deleted file mode 100644 index cff26fd7c8..0000000000 --- a/.github/workflows/deploy-release.yml +++ /dev/null @@ -1,88 +0,0 @@ -name: Budibase Deploy Release - -on: - workflow_dispatch: - -jobs: - release: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-1 - - - name: Fail if branch is not develop - if: github.ref != 'refs/heads/develop' - run: | - echo "Ref is not develop, you must run this job from develop." - exit 1 - - - name: Get the latest budibase release version - id: version - run: | - release_version=$(cat lerna.json | jq -r '.version') - echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV - - - name: Pull values.yaml from budibase-infra - run: | - curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \ - -H 'Accept: application/vnd.github.v3.raw' \ - -o values.release.yaml \ - -L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-release/values.yaml - wc -l values.release.yaml - - - name: Deploy to Release Environment - uses: glopezep/helm@v1.7.1 - with: - release: budibase-release - namespace: budibase - chart: charts/budibase - token: ${{ github.token }} - helm: helm3 - values: | - globals: - appVersion: develop - ingress: - enabled: true - nginx: true - value-files: >- - [ - "values.release.yaml" - ] - env: - KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}' - - - name: Re roll app-service - uses: actions-hub/kubectl@master - env: - KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }} - with: - args: rollout restart deployment app-service -n budibase - - - name: Re roll proxy-service - uses: actions-hub/kubectl@master - env: - KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }} - with: - args: rollout restart deployment proxy-service -n budibase - - - name: Re roll worker-service - uses: actions-hub/kubectl@master - env: - KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }} - with: - args: rollout restart deployment worker-service -n budibase - - - - name: Discord Webhook Action - uses: tsickert/discord-webhook@v4.0.0 - with: - webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }} - content: "Release Env Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Release Env." - embed-title: ${{ env.RELEASE_VERSION }} diff --git a/.github/workflows/release-develop.yml b/.github/workflows/release-develop.yml index e986179cfc..68c949447c 100644 --- a/.github/workflows/release-develop.yml +++ b/.github/workflows/release-develop.yml @@ -117,4 +117,4 @@ jobs: with: repository: budibase/budibase-deploys event: budicloud-qa-deploy - github_pat: ${{ secrets.GH_ACCESS_TOKEN }} \ No newline at end of file + github_pat: ${{ secrets.GH_ACCESS_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release-master.yml similarity index 53% rename from .github/workflows/release.yml rename to .github/workflows/release-master.yml index 2a28150891..3ae265fa21 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release-master.yml @@ -35,9 +35,8 @@ env: PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }} jobs: - release: + release-images: runs-on: ubuntu-latest - steps: - name: Fail if branch is not master if: github.ref != 'refs/heads/master' @@ -57,14 +56,6 @@ jobs: - run: yarn lint - run: yarn build - run: yarn build:sdk - - run: yarn test - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-1 - name: Publish budibase packages to NPM env: @@ -90,46 +81,63 @@ jobs: DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }} - - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-1 - - name: Pull values.yaml from budibase-infra - run: | - curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \ - -H 'Accept: application/vnd.github.v3.raw' \ - -o values.preprod.yaml \ - -L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml - wc -l values.preprod.yaml + release-helm-chart: + needs: [release-images] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Setup Helm + uses: azure/setup-helm@v1 + id: helm-install - - name: Deploy to Preprod Environment - uses: glopezep/helm@v1.7.1 - with: - release: budibase-preprod - namespace: budibase - chart: charts/budibase - token: ${{ github.token }} - helm: helm3 - values: | - globals: - appVersion: ${{ steps.previoustag.outputs.tag }} - ingress: - enabled: true - nginx: true - value-files: >- - [ - "values.preprod.yaml" - ] + - name: 'Get Previous tag' + id: previoustag + uses: "WyriHaximus/github-action-get-previous-tag@v1" + + # due to helm repo index issue: https://github.com/helm/helm/issues/7363 + # we need to create new package in a different dir, merge the index and move the package back + - name: Build and release helm chart + run: | + git config user.name "Budibase Helm Bot" + git config user.email "<>" + git reset --hard + git pull + mkdir sync + echo "Packaging chart to sync dir" + helm package charts/budibase --version 0.0.0-master --app-version "$RELEASE_VERSION" --destination sync + echo "Packaging successful" + git checkout gh-pages + echo "Indexing helm repo" + helm repo index --merge docs/index.yaml sync + mv -f sync/* docs + rm -rf sync + echo "Pushing new helm release" + git add -A + git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}" + git push env: - KUBECONFIG_FILE: '${{ secrets.PREPROD_KUBECONFIG }}' + RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }} - - name: Discord Webhook Action - uses: tsickert/discord-webhook@v4.0.0 + deploy-to-legacy-preprod-env: + needs: [release-images] + uses: ./.github/workflows/deploy-preprod.yml + secrets: inherit + + # Trigger deploy to new EKS preprod environment + trigger-deploy-to-preprod-env: + needs: [release-helm-chart] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: 'Get Previous tag' + id: previoustag + uses: "WyriHaximus/github-action-get-previous-tag@v1" + + - uses: passeidireto/trigger-external-workflow-action@main + env: + PAYLOAD_VERSION: ${{ steps.previoustag.outputs.tag }} with: - webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }} - content: "Preprod Deployment Complete: ${{ steps.previoustag.outputs.tag }} deployed to Budibase Pre-prod." - embed-title: ${{ steps.previoustag.outputs.tag }} + repository: budibase/budibase-deploys + event: budicloud-preprod-deploy + github_pat: ${{ secrets.GH_ACCESS_TOKEN }} diff --git a/.github/workflows/release-selfhost.yml b/.github/workflows/release-selfhost.yml index 12fb8f5a9d..f5a2f643c3 100644 --- a/.github/workflows/release-selfhost.yml +++ b/.github/workflows/release-selfhost.yml @@ -16,9 +16,13 @@ jobs: - uses: actions/checkout@v2 with: - node-version: 14.x fetch_depth: 0 + - name: Use Node.js 14.x + uses: actions/setup-node@v1 + with: + node-version: 14.x + - name: Get the latest budibase release version id: version run: | diff --git a/.github/workflows/deploy-single-image.yml b/.github/workflows/release-singleimage.yml similarity index 100% rename from .github/workflows/deploy-single-image.yml rename to .github/workflows/release-singleimage.yml diff --git a/.github/workflows/smoke_test.yaml b/.github/workflows/smoke_test.yaml deleted file mode 100644 index 3fd61cd9c5..0000000000 --- a/.github/workflows/smoke_test.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: Budibase Nightly Tests - -on: - workflow_dispatch: - schedule: - - cron: "0 5 * * *" # every day at 5AM - -jobs: - nightly: - runs-on: [self-hosted, qa] - - steps: - - uses: actions/checkout@v2 - - name: Use Node.js 14.x - uses: actions/setup-node@v1 - with: - node-version: 14.x - - name: QA Core Integration Tests - run: | - cd qa-core - yarn - yarn api:test:ci - env: - BUDIBASE_HOST: budicloud.qa.budibase.net - BUDIBASE_ACCOUNTS_URL: https://account-portal.budicloud.qa.budibase.net - - - name: Cypress Discord Notify - run: yarn test:notify - env: - WEBHOOK_URL: ${{ secrets.BUDI_QA_WEBHOOK }} - GITHUB_RUN_URL: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID \ No newline at end of file diff --git a/charts/budibase/templates/proxy-service-deployment.yaml b/charts/budibase/templates/proxy-service-deployment.yaml index e422df8db3..0dea38fcbd 100644 --- a/charts/budibase/templates/proxy-service-deployment.yaml +++ b/charts/budibase/templates/proxy-service-deployment.yaml @@ -51,6 +51,14 @@ spec: value: {{ tpl .Values.services.proxy.upstreams.minio . | quote }} - name: COUCHDB_UPSTREAM_URL value: {{ .Values.services.couchdb.url | default (tpl .Values.services.proxy.upstreams.couchdb .) | quote }} + {{ if .Values.services.proxy.proxyRateLimitWebhooksPerSecond }} + - name: PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND + value: {{ .Values.services.proxy.proxyRateLimitWebhooksPerSecond | quote }} + {{ end }} + {{ if .Values.services.proxy.proxyRateLimitApiPerSecond }} + - name: PROXY_RATE_LIMIT_API_PER_SECOND + value: {{ .Values.services.proxy.proxyRateLimitApiPerSecond | quote }} + {{ end }} - name: RESOLVER {{ if .Values.services.proxy.resolver }} value: {{ .Values.services.proxy.resolver }} diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml index dd75b2daa3..536af8560f 100644 --- a/charts/budibase/values.yaml +++ b/charts/budibase/values.yaml @@ -245,7 +245,7 @@ couchdb: ## The CouchDB image image: repository: couchdb - tag: 3.2.1 + tag: 3.1.1 pullPolicy: IfNotPresent ## Experimental integration with Lucene-powered fulltext search diff --git a/docs/DEV-SETUP-DEBIAN.md b/docs/DEV-SETUP-DEBIAN.md index 9edd8286cb..cfd7eebf47 100644 --- a/docs/DEV-SETUP-DEBIAN.md +++ b/docs/DEV-SETUP-DEBIAN.md @@ -52,4 +52,14 @@ So this command will actually run the application in dev mode. It creates .env f The dev version will be available on port 10000 i.e. -http://127.0.0.1:10000/builder/admin \ No newline at end of file +http://127.0.0.1:10000/builder/admin + +### File descriptor issues with Vite and Chrome in Linux +If your dev environment stalls forever, with some network requests stuck in flight, it's likely that Chrome is trying to open more file descriptors than your system allows. +To fix this, apply the following tweaks. + +Debian based distros: +Add `* - nofile 65536` to `/etc/security/limits.conf`. + +Arch: +Add `DefaultLimitNOFILE=65536` to `/etc/systemd/system.conf`. \ No newline at end of file diff --git a/hosting/docker-compose.dev.yaml b/hosting/docker-compose.dev.yaml index 7d8198db73..394f5ac256 100644 --- a/hosting/docker-compose.dev.yaml +++ b/hosting/docker-compose.dev.yaml @@ -6,8 +6,7 @@ services: minio-service: container_name: budi-minio-dev restart: on-failure - # Last version that supports the "fs" backend - image: minio/minio:RELEASE.2022-10-24T18-35-07Z + image: minio/minio volumes: - minio_data:/data ports: @@ -69,4 +68,4 @@ volumes: minio_data: driver: local redis_data: - driver: local \ No newline at end of file + driver: local diff --git a/hosting/docker-compose.test.yaml b/hosting/docker-compose.test.yaml index dfd78621c5..f059173d2d 100644 --- a/hosting/docker-compose.test.yaml +++ b/hosting/docker-compose.test.yaml @@ -8,8 +8,8 @@ services: # Last version that supports the "fs" backend image: minio/minio:RELEASE.2022-10-24T18-35-07Z ports: - - 9000 - - 9001 + - "9000" + - "9001" environment: MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY} MINIO_SECRET_KEY: ${MINIO_SECRET_KEY} @@ -28,9 +28,9 @@ services: - COUCHDB_PASSWORD=${COUCH_DB_PASSWORD} - COUCHDB_USER=${COUCH_DB_USER} ports: - - 5984 - - 4369 - - 9100 + - "5984" + - "4369" + - "9100" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:5984/_up"] interval: 30s @@ -42,6 +42,6 @@ services: image: redis command: redis-server --requirepass ${REDIS_PASSWORD} ports: - - 6379 + - "6379" healthcheck: - test: ["CMD", "redis-cli", "ping"] + test: ["CMD", "redis-cli", "ping"] \ No newline at end of file diff --git a/hosting/proxy/nginx.prod.conf b/hosting/proxy/nginx.prod.conf index 4d8b3466bf..8954106feb 100644 --- a/hosting/proxy/nginx.prod.conf +++ b/hosting/proxy/nginx.prod.conf @@ -55,12 +55,12 @@ http { set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com"; set $csp_object "object-src 'none'"; set $csp_base_uri "base-uri 'self'"; - set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.*.amazonaws.com https://s3.*.amazonaws.com https://api.github.com"; + set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com"; set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com"; set $csp_frame "frame-src 'self' https:"; set $csp_img "img-src http: https: data: blob:"; set $csp_manifest "manifest-src 'self'"; - set $csp_media "media-src 'self' https://js.intercomcdn.com"; + set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live"; set $csp_worker "worker-src 'none'"; error_page 502 503 504 /error.html; diff --git a/lerna.json b/lerna.json index 5145222dfa..4fb904f81f 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.3.18-alpha.12", + "version": "2.4.12-alpha.0", "npmClient": "yarn", "packages": [ "packages/*" diff --git a/package.json b/package.json index 3ead7d5553..815e470916 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,7 @@ "js-yaml": "^4.1.0", "kill-port": "^1.6.1", "lerna": "3.14.1", - "madge": "^5.0.1", + "madge": "^6.0.0", "prettier": "^2.3.1", "prettier-plugin-svelte": "^2.3.0", "rimraf": "^3.0.2", @@ -44,7 +44,7 @@ "dev": "yarn run kill-all && lerna link && lerna run --parallel dev:builder --concurrency 1", "dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker", "dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server", - "test": "lerna run test && yarn test:pro", + "test": "lerna run test", "test:pro": "bash scripts/pro/test.sh", "lint:eslint": "eslint packages && eslint qa-core", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"", @@ -84,4 +84,4 @@ "install:pro": "bash scripts/pro/install.sh", "dep:clean": "yarn clean && yarn bootstrap" } -} \ No newline at end of file +} diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index 428d785a44..fff4040c22 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/backend-core", - "version": "2.3.18-alpha.12", + "version": "2.4.12-alpha.0", "description": "Budibase backend core libraries used in server and worker", "main": "dist/src/index.js", "types": "dist/src/index.d.ts", @@ -22,9 +22,9 @@ "test:watch": "jest --watchAll" }, "dependencies": { - "@budibase/nano": "10.1.1", + "@budibase/nano": "10.1.2", "@budibase/pouchdb-replication-stream": "1.2.10", - "@budibase/types": "2.3.18-alpha.12", + "@budibase/types": "2.4.12-alpha.0", "@shopify/jest-koa-mocks": "5.0.1", "@techpass/passport-openidconnect": "0.3.2", "aws-cloudfront-sign": "2.2.0", diff --git a/packages/backend-core/src/auth/auth.ts b/packages/backend-core/src/auth/auth.ts index bee245a3ae..7e6fe4bcee 100644 --- a/packages/backend-core/src/auth/auth.ts +++ b/packages/backend-core/src/auth/auth.ts @@ -2,25 +2,35 @@ const _passport = require("koa-passport") const LocalStrategy = require("passport-local").Strategy const JwtStrategy = require("passport-jwt").Strategy import { getGlobalDB } from "../context" -const refresh = require("passport-oauth2-refresh") -import { Config, Cookie } from "../constants" -import { getScopedConfig } from "../db" +import { Cookie } from "../constants" import { getSessionsForUser, invalidateSessions } from "../security/sessions" import { + authenticated, + csrf, + google, jwt as jwtPassport, local, - authenticated, - tenancy, - csrf, oidc, - google, + tenancy, } from "../middleware" +import * as userCache from "../cache/user" import { invalidateUser } from "../cache/user" -import { PlatformLogoutOpts, User } from "@budibase/types" +import { + ConfigType, + GoogleInnerConfig, + OIDCInnerConfig, + PlatformLogoutOpts, + SSOProviderType, + User, +} from "@budibase/types" import { logAlert } from "../logging" import * as events from "../events" -import * as userCache from "../cache/user" +import * as configs from "../configs" import { clearCookie, getCookie } from "../utils" +import { ssoSaveUserNoOp } from "../middleware/passport/sso/sso" +import env from "../environment" + +const refresh = require("passport-oauth2-refresh") export { auditLog, authError, @@ -33,7 +43,6 @@ export { google, oidc, } from "../middleware" -import { ssoSaveUserNoOp } from "../middleware/passport/sso/sso" export const buildAuthMiddleware = authenticated export const buildTenancyMiddleware = tenancy export const buildCsrfMiddleware = csrf @@ -44,7 +53,7 @@ export const jwt = require("jsonwebtoken") _passport.use(new LocalStrategy(local.options, local.authenticate)) if (jwtPassport.options.secretOrKey) { _passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate)) -} else { +} else if (!env.DISABLE_JWT_WARNING) { logAlert("No JWT Secret supplied, cannot configure JWT strategy") } @@ -63,11 +72,10 @@ _passport.deserializeUser(async (user: User, done: any) => { }) async function refreshOIDCAccessToken( - db: any, - chosenConfig: any, + chosenConfig: OIDCInnerConfig, refreshToken: string -) { - const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig) +): Promise { + const callbackUrl = await oidc.getCallbackUrl() let enrichedConfig: any let strategy: any @@ -90,7 +98,7 @@ async function refreshOIDCAccessToken( return new Promise(resolve => { refresh.requestNewAccessToken( - Config.OIDC, + ConfigType.OIDC, refreshToken, (err: any, accessToken: string, refreshToken: any, params: any) => { resolve({ err, accessToken, refreshToken, params }) @@ -100,11 +108,10 @@ async function refreshOIDCAccessToken( } async function refreshGoogleAccessToken( - db: any, - config: any, + config: GoogleInnerConfig, refreshToken: any -) { - let callbackUrl = await google.getCallbackUrl(db, config) +): Promise { + let callbackUrl = await google.getCallbackUrl(config) let strategy try { @@ -124,7 +131,7 @@ async function refreshGoogleAccessToken( return new Promise(resolve => { refresh.requestNewAccessToken( - Config.GOOGLE, + ConfigType.GOOGLE, refreshToken, (err: any, accessToken: string, refreshToken: string, params: any) => { resolve({ err, accessToken, refreshToken, params }) @@ -133,41 +140,37 @@ async function refreshGoogleAccessToken( }) } +interface RefreshResponse { + err?: { + data?: string + } + accessToken?: string + refreshToken?: string + params?: any +} + export async function refreshOAuthToken( refreshToken: string, - configType: string, - configId: string -) { - const db = getGlobalDB() - - const config = await getScopedConfig(db, { - type: configType, - group: {}, - }) - - let chosenConfig = {} - let refreshResponse - if (configType === Config.OIDC) { - // configId - retrieved from cookie. - chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0] - if (!chosenConfig) { - throw new Error("Invalid OIDC configuration") - } - refreshResponse = await refreshOIDCAccessToken( - db, - chosenConfig, - refreshToken - ) - } else { - chosenConfig = config - refreshResponse = await refreshGoogleAccessToken( - db, - chosenConfig, - refreshToken - ) + providerType: SSOProviderType, + configId?: string +): Promise { + switch (providerType) { + case SSOProviderType.OIDC: + if (!configId) { + return { err: { data: "OIDC config id not provided" } } + } + const oidcConfig = await configs.getOIDCConfigById(configId) + if (!oidcConfig) { + return { err: { data: "OIDC configuration not found" } } + } + return refreshOIDCAccessToken(oidcConfig, refreshToken) + case SSOProviderType.GOOGLE: + let googleConfig = await configs.getGoogleConfig() + if (!googleConfig) { + return { err: { data: "Google configuration not found" } } + } + return refreshGoogleAccessToken(googleConfig, refreshToken) } - - return refreshResponse } // TODO: Refactor to use user save function instead to prevent the need for @@ -225,6 +228,6 @@ export async function platformLogout(opts: PlatformLogoutOpts) { const sessionIds = sessions.map(({ sessionId }) => sessionId) await invalidateSessions(userId, { sessionIds, reason: "logout" }) - await events.auth.logout() + await events.auth.logout(ctx.user?.email) await userCache.invalidateUser(userId) } diff --git a/packages/backend-core/src/cache/appMetadata.ts b/packages/backend-core/src/cache/appMetadata.ts index d24c4a3140..5b66c356d3 100644 --- a/packages/backend-core/src/cache/appMetadata.ts +++ b/packages/backend-core/src/cache/appMetadata.ts @@ -1,6 +1,6 @@ import { getAppClient } from "../redis/init" import { doWithDB, DocumentType } from "../db" -import { Database } from "@budibase/types" +import { Database, App } from "@budibase/types" const AppState = { INVALID: "invalid", @@ -65,7 +65,7 @@ export async function getAppMetadata(appId: string) { if (isInvalid(metadata)) { throw { status: 404, message: "No app metadata found" } } - return metadata + return metadata as App } /** diff --git a/packages/backend-core/src/cache/tests/writethrough.spec.ts b/packages/backend-core/src/cache/tests/writethrough.spec.ts index d346788121..a34f05e881 100644 --- a/packages/backend-core/src/cache/tests/writethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/writethrough.spec.ts @@ -1,10 +1,13 @@ -import { structures, DBTestConfiguration } from "../../../tests" +import { + structures, + DBTestConfiguration, + expectFunctionWasCalledTimesWith, +} from "../../../tests" import { Writethrough } from "../writethrough" import { getDB } from "../../db" import tk from "timekeeper" -const START_DATE = Date.now() -tk.freeze(START_DATE) +tk.freeze(Date.now()) const DELAY = 5000 @@ -17,34 +20,99 @@ describe("writethrough", () => { const writethrough = new Writethrough(db, DELAY) const writethrough2 = new Writethrough(db2, DELAY) + const docId = structures.uuid() + + beforeEach(() => { + jest.clearAllMocks() + }) + describe("put", () => { - let first: any + let current: any it("should be able to store, will go to DB", async () => { await config.doInTenant(async () => { - const response = await writethrough.put({ _id: "test", value: 1 }) + const response = await writethrough.put({ + _id: docId, + value: 1, + }) const output = await db.get(response.id) - first = output + current = output expect(output.value).toBe(1) }) }) it("second put shouldn't update DB", async () => { await config.doInTenant(async () => { - const response = await writethrough.put({ ...first, value: 2 }) + const response = await writethrough.put({ ...current, value: 2 }) const output = await db.get(response.id) - expect(first._rev).toBe(output._rev) + expect(current._rev).toBe(output._rev) expect(output.value).toBe(1) }) }) it("should put it again after delay period", async () => { await config.doInTenant(async () => { - tk.freeze(START_DATE + DELAY + 1) - const response = await writethrough.put({ ...first, value: 3 }) + tk.freeze(Date.now() + DELAY + 1) + const response = await writethrough.put({ ...current, value: 3 }) const output = await db.get(response.id) - expect(response.rev).not.toBe(first._rev) + expect(response.rev).not.toBe(current._rev) expect(output.value).toBe(3) + + current = output + }) + }) + + it("should handle parallel DB updates ignoring conflicts", async () => { + await config.doInTenant(async () => { + tk.freeze(Date.now() + DELAY + 1) + const responses = await Promise.all([ + writethrough.put({ ...current, value: 4 }), + writethrough.put({ ...current, value: 4 }), + writethrough.put({ ...current, value: 4 }), + ]) + + const newRev = responses.map(x => x.rev).find(x => x !== current._rev) + expect(newRev).toBeDefined() + expect(responses.map(x => x.rev)).toEqual( + expect.arrayContaining([current._rev, current._rev, newRev]) + ) + expectFunctionWasCalledTimesWith( + console.warn, + 2, + "bb-warn: Ignoring redlock conflict in write-through cache" + ) + + const output = await db.get(current._id) + expect(output.value).toBe(4) + expect(output._rev).toBe(newRev) + + current = output + }) + }) + + it("should handle updates with documents falling behind", async () => { + await config.doInTenant(async () => { + tk.freeze(Date.now() + DELAY + 1) + + const id = structures.uuid() + await writethrough.put({ _id: id, value: 1 }) + const doc = await writethrough.get(id) + + // Updating document + tk.freeze(Date.now() + DELAY + 1) + await writethrough.put({ ...doc, value: 2 }) + + // Update with the old rev value + tk.freeze(Date.now() + DELAY + 1) + const res = await writethrough.put({ + ...doc, + value: 3, + }) + expect(res.ok).toBe(true) + + const output = await db.get(id) + expect(output.value).toBe(3) + expect(output._rev).toBe(res.rev) }) }) }) @@ -52,8 +120,8 @@ describe("writethrough", () => { describe("get", () => { it("should be able to retrieve", async () => { await config.doInTenant(async () => { - const response = await writethrough.get("test") - expect(response.value).toBe(3) + const response = await writethrough.get(docId) + expect(response.value).toBe(4) }) }) }) diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index dc889d5b18..a3b1ecc08d 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -1,7 +1,8 @@ import BaseCache from "./base" import { getWritethroughClient } from "../redis/init" import { logWarn } from "../logging" -import { Database } from "@budibase/types" +import { Database, Document, LockName, LockType } from "@budibase/types" +import * as locks from "../redis/redlockImpl" const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null @@ -27,44 +28,62 @@ function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem { return { doc, lastWrite: lastWrite || Date.now() } } -export async function put( +async function put( db: Database, - doc: any, + doc: Document, writeRateMs: number = DEFAULT_WRITE_RATE_MS ) { const cache = await getCache() const key = doc._id - let cacheItem: CacheItem | undefined = await cache.get(makeCacheKey(db, key)) + let cacheItem: CacheItem | undefined + if (key) { + cacheItem = await cache.get(makeCacheKey(db, key)) + } const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs let output = doc if (updateDb) { - const writeDb = async (toWrite: any) => { - // doc should contain the _id and _rev - const response = await db.put(toWrite) - output = { - ...doc, - _id: response.id, - _rev: response.rev, - } - } - try { - await writeDb(doc) - } catch (err: any) { - if (err.status !== 409) { - throw err - } else { - // Swallow 409s but log them - logWarn(`Ignoring conflict in write-through cache`) + const lockResponse = await locks.doWithLock( + { + type: LockType.TRY_ONCE, + name: LockName.PERSIST_WRITETHROUGH, + resource: key, + ttl: 1000, + }, + async () => { + const writeDb = async (toWrite: any) => { + // doc should contain the _id and _rev + const response = await db.put(toWrite, { force: true }) + output = { + ...doc, + _id: response.id, + _rev: response.rev, + } + } + try { + await writeDb(doc) + } catch (err: any) { + if (err.status !== 409) { + throw err + } else { + // Swallow 409s but log them + logWarn(`Ignoring conflict in write-through cache`) + } + } } + ) + if (!lockResponse.executed) { + logWarn(`Ignoring redlock conflict in write-through cache`) } } // if we are updating the DB then need to set the lastWrite to now cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite) - await cache.store(makeCacheKey(db, key), cacheItem) + if (output._id) { + await cache.store(makeCacheKey(db, output._id), cacheItem) + } return { ok: true, id: output._id, rev: output._rev } } -export async function get(db: Database, id: string): Promise { +async function get(db: Database, id: string): Promise { const cache = await getCache() const cacheKey = makeCacheKey(db, id) let cacheItem: CacheItem = await cache.get(cacheKey) @@ -76,11 +95,7 @@ export async function get(db: Database, id: string): Promise { return cacheItem.doc } -export async function remove( - db: Database, - docOrId: any, - rev?: any -): Promise { +async function remove(db: Database, docOrId: any, rev?: any): Promise { const cache = await getCache() if (!docOrId) { throw new Error("No ID/Rev provided.") diff --git a/packages/backend-core/src/configs/configs.ts b/packages/backend-core/src/configs/configs.ts new file mode 100644 index 0000000000..b461497747 --- /dev/null +++ b/packages/backend-core/src/configs/configs.ts @@ -0,0 +1,244 @@ +import { + Config, + ConfigType, + GoogleConfig, + GoogleInnerConfig, + OIDCConfig, + OIDCInnerConfig, + SettingsConfig, + SettingsInnerConfig, + SMTPConfig, + SMTPInnerConfig, +} from "@budibase/types" +import { DocumentType, SEPARATOR } from "../constants" +import { CacheKey, TTL, withCache } from "../cache" +import * as context from "../context" +import env from "../environment" +import environment from "../environment" + +// UTILS + +/** + * Generates a new configuration ID. + * @returns {string} The new configuration ID which the config doc can be stored under. + */ +export function generateConfigID(type: ConfigType) { + return `${DocumentType.CONFIG}${SEPARATOR}${type}` +} + +export async function getConfig( + type: ConfigType +): Promise { + const db = context.getGlobalDB() + try { + // await to catch error + const config = (await db.get(generateConfigID(type))) as T + return config + } catch (e: any) { + if (e.status === 404) { + return + } + throw e + } +} + +export async function save( + config: Config +): Promise<{ id: string; rev: string }> { + const db = context.getGlobalDB() + return db.put(config) +} + +// SETTINGS + +export async function getSettingsConfigDoc(): Promise { + let config = await getConfig(ConfigType.SETTINGS) + + if (!config) { + config = { + _id: generateConfigID(ConfigType.SETTINGS), + type: ConfigType.SETTINGS, + config: {}, + } + } + + // overridden fields + config.config.platformUrl = await getPlatformUrl({ + tenantAware: true, + config: config.config, + }) + config.config.analyticsEnabled = await analyticsEnabled({ + config: config.config, + }) + + return config +} + +export async function getSettingsConfig(): Promise { + return (await getSettingsConfigDoc()).config +} + +export async function getPlatformUrl( + opts: { tenantAware: boolean; config?: SettingsInnerConfig } = { + tenantAware: true, + } +) { + let platformUrl = env.PLATFORM_URL || "http://localhost:10000" + + if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) { + // cloud and multi tenant - add the tenant to the default platform url + const tenantId = context.getTenantId() + if (!platformUrl.includes("localhost:")) { + platformUrl = platformUrl.replace("://", `://${tenantId}.`) + } + } else if (env.SELF_HOSTED) { + const config = opts?.config + ? opts.config + : // direct to db to prevent infinite loop + (await getConfig(ConfigType.SETTINGS))?.config + if (config?.platformUrl) { + platformUrl = config.platformUrl + } + } + + return platformUrl +} + +export const analyticsEnabled = async (opts?: { + config?: SettingsInnerConfig +}) => { + // cloud - always use the environment variable + if (!env.SELF_HOSTED) { + return !!env.ENABLE_ANALYTICS + } + + // self host - prefer the settings doc + // use cache as events have high throughput + const enabledInDB = await withCache( + CacheKey.ANALYTICS_ENABLED, + TTL.ONE_DAY, + async () => { + const config = opts?.config + ? opts.config + : // direct to db to prevent infinite loop + (await getConfig(ConfigType.SETTINGS))?.config + + // need to do explicit checks in case the field is not set + if (config?.analyticsEnabled === false) { + return false + } else if (config?.analyticsEnabled === true) { + return true + } + } + ) + + if (enabledInDB !== undefined) { + return enabledInDB + } + + // fallback to the environment variable + // explicitly check for 0 or false here, undefined or otherwise is treated as true + const envEnabled: any = env.ENABLE_ANALYTICS + if (envEnabled === 0 || envEnabled === false) { + return false + } else { + return true + } +} + +// GOOGLE + +async function getGoogleConfigDoc(): Promise { + return await getConfig(ConfigType.GOOGLE) +} + +export async function getGoogleConfig(): Promise< + GoogleInnerConfig | undefined +> { + const config = await getGoogleConfigDoc() + return config?.config +} + +export async function getGoogleDatasourceConfig(): Promise< + GoogleInnerConfig | undefined +> { + if (!env.SELF_HOSTED) { + // always use the env vars in cloud + return getDefaultGoogleConfig() + } + + // prefer the config in self-host + let config = await getGoogleConfig() + + // fallback to env vars + if (!config || !config.activated) { + config = getDefaultGoogleConfig() + } + + return config +} + +export function getDefaultGoogleConfig(): GoogleInnerConfig | undefined { + if (environment.GOOGLE_CLIENT_ID && environment.GOOGLE_CLIENT_SECRET) { + return { + clientID: environment.GOOGLE_CLIENT_ID!, + clientSecret: environment.GOOGLE_CLIENT_SECRET!, + activated: true, + } + } +} + +// OIDC + +async function getOIDCConfigDoc(): Promise { + return getConfig(ConfigType.OIDC) +} + +export async function getOIDCConfig(): Promise { + const config = (await getOIDCConfigDoc())?.config + // default to the 0th config + return config?.configs && config.configs[0] +} + +/** + * @param configId The config id of the inner config to retrieve + */ +export async function getOIDCConfigById( + configId: string +): Promise { + const config = (await getConfig(ConfigType.OIDC))?.config + return config && config.configs.filter((c: any) => c.uuid === configId)[0] +} + +// SMTP + +export async function getSMTPConfigDoc(): Promise { + return getConfig(ConfigType.SMTP) +} + +export async function getSMTPConfig( + isAutomation?: boolean +): Promise { + const config = await getSMTPConfigDoc() + if (config) { + return config.config + } + + // always allow fallback in self host + // in cloud don't allow for automations + const allowFallback = env.SELF_HOSTED || !isAutomation + + // Use an SMTP fallback configuration from env variables + if (env.SMTP_FALLBACK_ENABLED && allowFallback) { + return { + port: env.SMTP_PORT, + host: env.SMTP_HOST!, + secure: false, + from: env.SMTP_FROM_ADDRESS!, + auth: { + user: env.SMTP_USER!, + pass: env.SMTP_PASSWORD!, + }, + } + } +} diff --git a/packages/backend-core/src/configs/index.ts b/packages/backend-core/src/configs/index.ts new file mode 100644 index 0000000000..783f22a0b9 --- /dev/null +++ b/packages/backend-core/src/configs/index.ts @@ -0,0 +1 @@ +export * from "./configs" diff --git a/packages/backend-core/src/configs/tests/configs.spec.ts b/packages/backend-core/src/configs/tests/configs.spec.ts new file mode 100644 index 0000000000..079f2ab681 --- /dev/null +++ b/packages/backend-core/src/configs/tests/configs.spec.ts @@ -0,0 +1,116 @@ +import { DBTestConfiguration, generator, testEnv } from "../../../tests" +import { ConfigType } from "@budibase/types" +import env from "../../environment" +import * as configs from "../configs" + +const DEFAULT_URL = "http://localhost:10000" +const ENV_URL = "http://env.com" + +describe("configs", () => { + const config = new DBTestConfiguration() + + const setDbPlatformUrl = async (dbUrl: string) => { + const settingsConfig = { + _id: configs.generateConfigID(ConfigType.SETTINGS), + type: ConfigType.SETTINGS, + config: { + platformUrl: dbUrl, + }, + } + await configs.save(settingsConfig) + } + + beforeEach(async () => { + config.newTenant() + }) + + describe("getPlatformUrl", () => { + describe("self host", () => { + beforeEach(async () => { + testEnv.selfHosted() + }) + + it("gets the default url", async () => { + await config.doInTenant(async () => { + const url = await configs.getPlatformUrl() + expect(url).toBe(DEFAULT_URL) + }) + }) + + it("gets the platform url from the environment", async () => { + await config.doInTenant(async () => { + env._set("PLATFORM_URL", ENV_URL) + const url = await configs.getPlatformUrl() + expect(url).toBe(ENV_URL) + }) + }) + + it("gets the platform url from the database", async () => { + await config.doInTenant(async () => { + const dbUrl = generator.url() + await setDbPlatformUrl(dbUrl) + const url = await configs.getPlatformUrl() + expect(url).toBe(dbUrl) + }) + }) + }) + + describe("cloud", () => { + function getTenantAwareUrl() { + return `http://${config.tenantId}.env.com` + } + + beforeEach(async () => { + testEnv.cloudHosted() + testEnv.multiTenant() + + env._set("PLATFORM_URL", ENV_URL) + }) + + it("gets the platform url from the environment without tenancy", async () => { + await config.doInTenant(async () => { + const url = await configs.getPlatformUrl({ tenantAware: false }) + expect(url).toBe(ENV_URL) + }) + }) + + it("gets the platform url from the environment with tenancy", async () => { + await config.doInTenant(async () => { + const url = await configs.getPlatformUrl() + expect(url).toBe(getTenantAwareUrl()) + }) + }) + + it("never gets the platform url from the database", async () => { + await config.doInTenant(async () => { + await setDbPlatformUrl(generator.url()) + const url = await configs.getPlatformUrl() + expect(url).toBe(getTenantAwareUrl()) + }) + }) + }) + }) + + describe("getSettingsConfig", () => { + beforeAll(async () => { + testEnv.selfHosted() + env._set("PLATFORM_URL", "") + }) + + it("returns the platform url with an existing config", async () => { + await config.doInTenant(async () => { + const dbUrl = generator.url() + await setDbPlatformUrl(dbUrl) + const config = await configs.getSettingsConfig() + expect(config.platformUrl).toBe(dbUrl) + }) + }) + + it("returns the platform url without an existing config", async () => { + await config.doInTenant(async () => { + const config = await configs.getSettingsConfig() + expect(config.platformUrl).toBe(DEFAULT_URL) + }) + }) + }) +}) diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index f7d15b3880..d41098c405 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -68,6 +68,7 @@ export enum DocumentType { MEM_VIEW = "view", USER_FLAG = "flag", AUTOMATION_METADATA = "meta_au", + AUDIT_LOG = "al", } export const StaticDatabases = { @@ -88,6 +89,9 @@ export const StaticDatabases = { install: "install", }, }, + AUDIT_LOGS: { + name: "audit-logs", + }, } export const APP_PREFIX = DocumentType.APP + SEPARATOR diff --git a/packages/backend-core/src/constants/misc.ts b/packages/backend-core/src/constants/misc.ts index 0bf3df4094..e25c90575f 100644 --- a/packages/backend-core/src/constants/misc.ts +++ b/packages/backend-core/src/constants/misc.ts @@ -41,5 +41,6 @@ export enum Config { OIDC_LOGOS = "logos_oidc", } +export const MIN_VALID_DATE = new Date(-2147483647000) export const MAX_VALID_DATE = new Date(2147483647000) export const DEFAULT_TENANT_ID = "default" diff --git a/packages/backend-core/src/context/Context.ts b/packages/backend-core/src/context/Context.ts index 02b7713764..d29b6935a8 100644 --- a/packages/backend-core/src/context/Context.ts +++ b/packages/backend-core/src/context/Context.ts @@ -1,5 +1,5 @@ import { AsyncLocalStorage } from "async_hooks" -import { ContextMap } from "./mainContext" +import { ContextMap } from "./types" export default class Context { static storage = new AsyncLocalStorage() diff --git a/packages/backend-core/src/context/identity.ts b/packages/backend-core/src/context/identity.ts index 648dd1b5fd..84de3b68c9 100644 --- a/packages/backend-core/src/context/identity.ts +++ b/packages/backend-core/src/context/identity.ts @@ -5,6 +5,8 @@ import { isCloudAccount, Account, AccountUserContext, + UserContext, + Ctx, } from "@budibase/types" import * as context from "." @@ -16,15 +18,22 @@ export function doInIdentityContext(identity: IdentityContext, task: any) { return context.doInIdentityContext(identity, task) } -export function doInUserContext(user: User, task: any) { - const userContext: any = { +// used in server/worker +export function doInUserContext(user: User, ctx: Ctx, task: any) { + const userContext: UserContext = { ...user, _id: user._id as string, type: IdentityType.USER, + hostInfo: { + ipAddress: ctx.request.ip, + // filled in by koa-useragent package + userAgent: ctx.userAgent._agent.source, + }, } return doInIdentityContext(userContext, task) } +// used in account portal export function doInAccountContext(account: Account, task: any) { const _id = getAccountUserId(account) const tenantId = account.tenantId diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 9884d25d5a..02ba16aa8c 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -11,13 +11,7 @@ import { DEFAULT_TENANT_ID, } from "../constants" import { Database, IdentityContext } from "@budibase/types" - -export type ContextMap = { - tenantId?: string - appId?: string - identity?: IdentityContext - environmentVariables?: Record -} +import { ContextMap } from "./types" let TEST_APP_ID: string | null = null @@ -30,14 +24,23 @@ export function getGlobalDBName(tenantId?: string) { return baseGlobalDBName(tenantId) } -export function baseGlobalDBName(tenantId: string | undefined | null) { - let dbName - if (!tenantId || tenantId === DEFAULT_TENANT_ID) { - dbName = StaticDatabases.GLOBAL.name - } else { - dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}` +export function getAuditLogDBName(tenantId?: string) { + if (!tenantId) { + tenantId = getTenantId() + } + if (tenantId === DEFAULT_TENANT_ID) { + return StaticDatabases.AUDIT_LOGS.name + } else { + return `${tenantId}${SEPARATOR}${StaticDatabases.AUDIT_LOGS.name}` + } +} + +export function baseGlobalDBName(tenantId: string | undefined | null) { + if (!tenantId || tenantId === DEFAULT_TENANT_ID) { + return StaticDatabases.GLOBAL.name + } else { + return `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}` } - return dbName } export function isMultiTenant() { @@ -228,6 +231,13 @@ export function getGlobalDB(): Database { return getDB(baseGlobalDBName(context?.tenantId)) } +export function getAuditLogsDB(): Database { + if (!getTenantId()) { + throw new Error("No tenant ID found - cannot open audit log DB") + } + return getDB(getAuditLogDBName()) +} + /** * Gets the app database based on whatever the request * contained, dev or prod. diff --git a/packages/backend-core/src/context/types.ts b/packages/backend-core/src/context/types.ts new file mode 100644 index 0000000000..78197ed528 --- /dev/null +++ b/packages/backend-core/src/context/types.ts @@ -0,0 +1,9 @@ +import { IdentityContext } from "@budibase/types" + +// keep this out of Budibase types, don't want to expose context info +export type ContextMap = { + tenantId?: string + appId?: string + identity?: IdentityContext + environmentVariables?: Record +} diff --git a/packages/backend-core/src/db/index.ts b/packages/backend-core/src/db/index.ts index 0d9f75fa18..a569b17b36 100644 --- a/packages/backend-core/src/db/index.ts +++ b/packages/backend-core/src/db/index.ts @@ -7,3 +7,4 @@ export { default as Replication } from "./Replication" // exports to support old export structure export * from "../constants/db" export { getGlobalDBName, baseGlobalDBName } from "../context" +export * from "./lucene" diff --git a/packages/backend-core/src/db/lucene.ts b/packages/backend-core/src/db/lucene.ts new file mode 100644 index 0000000000..cba2f0138a --- /dev/null +++ b/packages/backend-core/src/db/lucene.ts @@ -0,0 +1,624 @@ +import fetch from "node-fetch" +import { getCouchInfo } from "./couch" +import { SearchFilters, Row } from "@budibase/types" + +const QUERY_START_REGEX = /\d[0-9]*:/g + +interface SearchResponse { + rows: T[] | any[] + bookmark: string +} + +interface PaginatedSearchResponse extends SearchResponse { + hasNextPage: boolean +} + +export type SearchParams = { + tableId?: string + sort?: string + sortOrder?: string + sortType?: string + limit?: number + bookmark?: string + version?: string + indexer?: () => Promise + disableEscaping?: boolean + rows?: T | Row[] +} + +export function removeKeyNumbering(key: any): string { + if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) { + const parts = key.split(":") + // remove the number + parts.shift() + return parts.join(":") + } else { + return key + } +} + +/** + * Class to build lucene query URLs. + * Optionally takes a base lucene query object. + */ +export class QueryBuilder { + dbName: string + index: string + query: SearchFilters + limit: number + sort?: string + bookmark?: string + sortOrder: string + sortType: string + includeDocs: boolean + version?: string + indexBuilder?: () => Promise + noEscaping = false + + constructor(dbName: string, index: string, base?: SearchFilters) { + this.dbName = dbName + this.index = index + this.query = { + allOr: false, + string: {}, + fuzzy: {}, + range: {}, + equal: {}, + notEqual: {}, + empty: {}, + notEmpty: {}, + oneOf: {}, + contains: {}, + notContains: {}, + containsAny: {}, + ...base, + } + this.limit = 50 + this.sortOrder = "ascending" + this.sortType = "string" + this.includeDocs = true + } + + disableEscaping() { + this.noEscaping = true + return this + } + + setIndexBuilder(builderFn: () => Promise) { + this.indexBuilder = builderFn + return this + } + + setVersion(version?: string) { + if (version != null) { + this.version = version + } + return this + } + + setTable(tableId: string) { + this.query.equal!.tableId = tableId + return this + } + + setLimit(limit?: number) { + if (limit != null) { + this.limit = limit + } + return this + } + + setSort(sort?: string) { + if (sort != null) { + this.sort = sort + } + return this + } + + setSortOrder(sortOrder?: string) { + if (sortOrder != null) { + this.sortOrder = sortOrder + } + return this + } + + setSortType(sortType?: string) { + if (sortType != null) { + this.sortType = sortType + } + return this + } + + setBookmark(bookmark?: string) { + if (bookmark != null) { + this.bookmark = bookmark + } + return this + } + + excludeDocs() { + this.includeDocs = false + return this + } + + addString(key: string, partial: string) { + this.query.string![key] = partial + return this + } + + addFuzzy(key: string, fuzzy: string) { + this.query.fuzzy![key] = fuzzy + return this + } + + addRange(key: string, low: string | number, high: string | number) { + this.query.range![key] = { + low, + high, + } + return this + } + + addEqual(key: string, value: any) { + this.query.equal![key] = value + return this + } + + addNotEqual(key: string, value: any) { + this.query.notEqual![key] = value + return this + } + + addEmpty(key: string, value: any) { + this.query.empty![key] = value + return this + } + + addNotEmpty(key: string, value: any) { + this.query.notEmpty![key] = value + return this + } + + addOneOf(key: string, value: any) { + this.query.oneOf![key] = value + return this + } + + addContains(key: string, value: any) { + this.query.contains![key] = value + return this + } + + addNotContains(key: string, value: any) { + this.query.notContains![key] = value + return this + } + + addContainsAny(key: string, value: any) { + this.query.containsAny![key] = value + return this + } + + handleSpaces(input: string) { + if (this.noEscaping) { + return input + } else { + return input.replace(/ /g, "_") + } + } + + /** + * Preprocesses a value before going into a lucene search. + * Transforms strings to lowercase and wraps strings and bools in quotes. + * @param value The value to process + * @param options The preprocess options + * @returns {string|*} + */ + preprocess(value: any, { escape, lowercase, wrap, type }: any = {}) { + const hasVersion = !!this.version + // Determine if type needs wrapped + const originalType = typeof value + // Convert to lowercase + if (value && lowercase) { + value = value.toLowerCase ? value.toLowerCase() : value + } + // Escape characters + if (!this.noEscaping && escape && originalType === "string") { + value = `${value}`.replace(/[ #+\-&|!(){}\]^"~*?:\\]/g, "\\$&") + } + + // Wrap in quotes + if (originalType === "string" && !isNaN(value) && !type) { + value = `"${value}"` + } else if (hasVersion && wrap) { + value = originalType === "number" ? value : `"${value}"` + } + return value + } + + buildSearchQuery() { + const builder = this + let allOr = this.query && this.query.allOr + let query = allOr ? "" : "*:*" + const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true } + let tableId + if (this.query.equal!.tableId) { + tableId = this.query.equal!.tableId + delete this.query.equal!.tableId + } + + const equal = (key: string, value: any) => { + // 0 evaluates to false, which means we would return all rows if we don't check it + if (!value && value !== 0) { + return null + } + return `${key}:${builder.preprocess(value, allPreProcessingOpts)}` + } + + const contains = (key: string, value: any, mode = "AND") => { + if (Array.isArray(value) && value.length === 0) { + return null + } + if (!Array.isArray(value)) { + return `${key}:${value}` + } + let statement = `${builder.preprocess(value[0], { escape: true })}` + for (let i = 1; i < value.length; i++) { + statement += ` ${mode} ${builder.preprocess(value[i], { + escape: true, + })}` + } + return `${key}:(${statement})` + } + + const notContains = (key: string, value: any) => { + // @ts-ignore + const allPrefix = allOr === "" ? "*:* AND" : "" + return allPrefix + "NOT " + contains(key, value) + } + + const containsAny = (key: string, value: any) => { + return contains(key, value, "OR") + } + + const oneOf = (key: string, value: any) => { + if (!Array.isArray(value)) { + if (typeof value === "string") { + value = value.split(",") + } else { + return "" + } + } + let orStatement = `${builder.preprocess(value[0], allPreProcessingOpts)}` + for (let i = 1; i < value.length; i++) { + orStatement += ` OR ${builder.preprocess( + value[i], + allPreProcessingOpts + )}` + } + return `${key}:(${orStatement})` + } + + function build(structure: any, queryFn: any) { + for (let [key, value] of Object.entries(structure)) { + // check for new format - remove numbering if needed + key = removeKeyNumbering(key) + key = builder.preprocess(builder.handleSpaces(key), { + escape: true, + }) + const expression = queryFn(key, value) + if (expression == null) { + continue + } + if (query.length > 0) { + query += ` ${allOr ? "OR" : "AND"} ` + } + query += expression + } + } + + // Construct the actual lucene search query string from JSON structure + if (this.query.string) { + build(this.query.string, (key: string, value: any) => { + if (!value) { + return null + } + value = builder.preprocess(value, { + escape: true, + lowercase: true, + type: "string", + }) + return `${key}:${value}*` + }) + } + if (this.query.range) { + build(this.query.range, (key: string, value: any) => { + if (!value) { + return null + } + if (value.low == null || value.low === "") { + return null + } + if (value.high == null || value.high === "") { + return null + } + const low = builder.preprocess(value.low, allPreProcessingOpts) + const high = builder.preprocess(value.high, allPreProcessingOpts) + return `${key}:[${low} TO ${high}]` + }) + } + if (this.query.fuzzy) { + build(this.query.fuzzy, (key: string, value: any) => { + if (!value) { + return null + } + value = builder.preprocess(value, { + escape: true, + lowercase: true, + type: "fuzzy", + }) + return `${key}:${value}~` + }) + } + if (this.query.equal) { + build(this.query.equal, equal) + } + if (this.query.notEqual) { + build(this.query.notEqual, (key: string, value: any) => { + if (!value) { + return null + } + return `!${key}:${builder.preprocess(value, allPreProcessingOpts)}` + }) + } + if (this.query.empty) { + build(this.query.empty, (key: string) => `!${key}:["" TO *]`) + } + if (this.query.notEmpty) { + build(this.query.notEmpty, (key: string) => `${key}:["" TO *]`) + } + if (this.query.oneOf) { + build(this.query.oneOf, oneOf) + } + if (this.query.contains) { + build(this.query.contains, contains) + } + if (this.query.notContains) { + build(this.query.notContains, notContains) + } + if (this.query.containsAny) { + build(this.query.containsAny, containsAny) + } + // make sure table ID is always added as an AND + if (tableId) { + query = `(${query})` + allOr = false + build({ tableId }, equal) + } + return query + } + + buildSearchBody() { + let body: any = { + q: this.buildSearchQuery(), + limit: Math.min(this.limit, 200), + include_docs: this.includeDocs, + } + if (this.bookmark) { + body.bookmark = this.bookmark + } + if (this.sort) { + const order = this.sortOrder === "descending" ? "-" : "" + const type = `<${this.sortType}>` + body.sort = `${order}${this.handleSpaces(this.sort)}${type}` + } + return body + } + + async run() { + const { url, cookie } = getCouchInfo() + const fullPath = `${url}/${this.dbName}/_design/database/_search/${this.index}` + const body = this.buildSearchBody() + try { + return await runQuery(fullPath, body, cookie) + } catch (err: any) { + if (err.status === 404 && this.indexBuilder) { + await this.indexBuilder() + return await runQuery(fullPath, body, cookie) + } else { + throw err + } + } + } +} + +/** + * Executes a lucene search query. + * @param url The query URL + * @param body The request body defining search criteria + * @param cookie The auth cookie for CouchDB + * @returns {Promise<{rows: []}>} + */ +async function runQuery( + url: string, + body: any, + cookie: string +): Promise> { + const response = await fetch(url, { + body: JSON.stringify(body), + method: "POST", + headers: { + Authorization: cookie, + }, + }) + + if (response.status === 404) { + throw response + } + const json = await response.json() + + let output: any = { + rows: [], + } + if (json.rows != null && json.rows.length > 0) { + output.rows = json.rows.map((row: any) => row.doc) + } + if (json.bookmark) { + output.bookmark = json.bookmark + } + return output +} + +/** + * Gets round the fixed limit of 200 results from a query by fetching as many + * pages as required and concatenating the results. This recursively operates + * until enough results have been found. + * @param dbName {string} Which database to run a lucene query on + * @param index {string} Which search index to utilise + * @param query {object} The JSON query structure + * @param params {object} The search params including: + * tableId {string} The table ID to search + * sort {string} The sort column + * sortOrder {string} The sort order ("ascending" or "descending") + * sortType {string} Whether to treat sortable values as strings or + * numbers. ("string" or "number") + * limit {number} The number of results to fetch + * bookmark {string|null} Current bookmark in the recursive search + * rows {array|null} Current results in the recursive search + * @returns {Promise<*[]|*>} + */ +async function recursiveSearch( + dbName: string, + index: string, + query: any, + params: any +): Promise { + const bookmark = params.bookmark + const rows = params.rows || [] + if (rows.length >= params.limit) { + return rows + } + let pageSize = 200 + if (rows.length > params.limit - 200) { + pageSize = params.limit - rows.length + } + const page = await new QueryBuilder(dbName, index, query) + .setVersion(params.version) + .setTable(params.tableId) + .setBookmark(bookmark) + .setLimit(pageSize) + .setSort(params.sort) + .setSortOrder(params.sortOrder) + .setSortType(params.sortType) + .run() + if (!page.rows.length) { + return rows + } + if (page.rows.length < 200) { + return [...rows, ...page.rows] + } + const newParams = { + ...params, + bookmark: page.bookmark, + rows: [...rows, ...page.rows], + } + return await recursiveSearch(dbName, index, query, newParams) +} + +/** + * Performs a paginated search. A bookmark will be returned to allow the next + * page to be fetched. There is a max limit off 200 results per page in a + * paginated search. + * @param dbName {string} Which database to run a lucene query on + * @param index {string} Which search index to utilise + * @param query {object} The JSON query structure + * @param params {object} The search params including: + * tableId {string} The table ID to search + * sort {string} The sort column + * sortOrder {string} The sort order ("ascending" or "descending") + * sortType {string} Whether to treat sortable values as strings or + * numbers. ("string" or "number") + * limit {number} The desired page size + * bookmark {string} The bookmark to resume from + * @returns {Promise<{hasNextPage: boolean, rows: *[]}>} + */ +export async function paginatedSearch( + dbName: string, + index: string, + query: SearchFilters, + params: SearchParams +) { + let limit = params.limit + if (limit == null || isNaN(limit) || limit < 0) { + limit = 50 + } + limit = Math.min(limit, 200) + const search = new QueryBuilder(dbName, index, query) + if (params.version) { + search.setVersion(params.version) + } + if (params.tableId) { + search.setTable(params.tableId) + } + if (params.sort) { + search + .setSort(params.sort) + .setSortOrder(params.sortOrder) + .setSortType(params.sortType) + } + if (params.indexer) { + search.setIndexBuilder(params.indexer) + } + if (params.disableEscaping) { + search.disableEscaping() + } + const searchResults = await search + .setBookmark(params.bookmark) + .setLimit(limit) + .run() + + // Try fetching 1 row in the next page to see if another page of results + // exists or not + search.setBookmark(searchResults.bookmark).setLimit(1) + if (params.tableId) { + search.setTable(params.tableId) + } + const nextResults = await search.run() + + return { + ...searchResults, + hasNextPage: nextResults.rows && nextResults.rows.length > 0, + } +} + +/** + * Performs a full search, fetching multiple pages if required to return the + * desired amount of results. There is a limit of 1000 results to avoid + * heavy performance hits, and to avoid client components breaking from + * handling too much data. + * @param dbName {string} Which database to run a lucene query on + * @param index {string} Which search index to utilise + * @param query {object} The JSON query structure + * @param params {object} The search params including: + * tableId {string} The table ID to search + * sort {string} The sort column + * sortOrder {string} The sort order ("ascending" or "descending") + * sortType {string} Whether to treat sortable values as strings or + * numbers. ("string" or "number") + * limit {number} The desired number of results + * @returns {Promise<{rows: *}>} + */ +export async function fullSearch( + dbName: string, + index: string, + query: SearchFilters, + params: SearchParams +) { + let limit = params.limit + if (limit == null || isNaN(limit) || limit < 0) { + limit = 1000 + } + params.limit = Math.min(limit, 1000) + const rows = await recursiveSearch(dbName, index, query, params) + return { rows } +} diff --git a/packages/backend-core/src/db/tests/lucene.spec.ts b/packages/backend-core/src/db/tests/lucene.spec.ts new file mode 100644 index 0000000000..23b01e18df --- /dev/null +++ b/packages/backend-core/src/db/tests/lucene.spec.ts @@ -0,0 +1,161 @@ +import { newid } from "../../newid" +import { getDB } from "../db" +import { Database } from "@budibase/types" +import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene" + +const INDEX_NAME = "main" + +const index = `function(doc) { + let props = ["property", "number"] + for (let key of props) { + if (doc[key]) { + index(key, doc[key]) + } + } +}` + +describe("lucene", () => { + let db: Database, dbName: string + + beforeAll(async () => { + dbName = `db-${newid()}` + // create the DB for testing + db = getDB(dbName) + await db.put({ _id: newid(), property: "word" }) + await db.put({ _id: newid(), property: "word2" }) + await db.put({ _id: newid(), property: "word3", number: 1 }) + }) + + it("should be able to create a lucene index", async () => { + const response = await db.put({ + _id: "_design/database", + indexes: { + [INDEX_NAME]: { + index: index, + analyzer: "standard", + }, + }, + }) + expect(response.ok).toBe(true) + }) + + describe("query builder", () => { + it("should be able to perform a basic query", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.setSort("property") + builder.setSortOrder("desc") + builder.setSortType("string") + const resp = await builder.run() + expect(resp.rows.length).toBe(3) + }) + + it("should handle limits", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.setLimit(1) + const resp = await builder.run() + expect(resp.rows.length).toBe(1) + }) + + it("should be able to perform a string search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addString("property", "wo") + const resp = await builder.run() + expect(resp.rows.length).toBe(3) + }) + + it("should be able to perform a range search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addRange("number", 0, 1) + const resp = await builder.run() + expect(resp.rows.length).toBe(1) + }) + + it("should be able to perform an equal search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addEqual("property", "word2") + const resp = await builder.run() + expect(resp.rows.length).toBe(1) + }) + + it("should be able to perform a not equal search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addNotEqual("property", "word2") + const resp = await builder.run() + expect(resp.rows.length).toBe(2) + }) + + it("should be able to perform an empty search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addEmpty("number", true) + const resp = await builder.run() + expect(resp.rows.length).toBe(2) + }) + + it("should be able to perform a not empty search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addNotEmpty("number", true) + const resp = await builder.run() + expect(resp.rows.length).toBe(1) + }) + + it("should be able to perform a one of search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addOneOf("property", ["word", "word2"]) + const resp = await builder.run() + expect(resp.rows.length).toBe(2) + }) + + it("should be able to perform a contains search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addContains("property", ["word"]) + const resp = await builder.run() + expect(resp.rows.length).toBe(1) + }) + + it("should be able to perform a not contains search", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addNotContains("property", ["word2"]) + const resp = await builder.run() + expect(resp.rows.length).toBe(2) + }) + }) + + describe("paginated search", () => { + it("should be able to perform a paginated search", async () => { + const page = await paginatedSearch( + dbName, + INDEX_NAME, + { + string: { + property: "wo", + }, + }, + { + limit: 1, + sort: "property", + sortType: "string", + sortOrder: "desc", + } + ) + expect(page.rows.length).toBe(1) + expect(page.hasNextPage).toBe(true) + expect(page.bookmark).toBeDefined() + }) + }) + + describe("full search", () => { + it("should be able to perform a full search", async () => { + const page = await fullSearch( + dbName, + INDEX_NAME, + { + string: { + property: "wo", + }, + }, + {} + ) + expect(page.rows.length).toBe(3) + }) + }) +}) diff --git a/packages/backend-core/src/db/tests/utils.spec.ts b/packages/backend-core/src/db/tests/utils.spec.ts index 7bdca5ae8b..138457c65e 100644 --- a/packages/backend-core/src/db/tests/utils.spec.ts +++ b/packages/backend-core/src/db/tests/utils.spec.ts @@ -1,19 +1,13 @@ -import { generator, DBTestConfiguration, testEnv } from "../../../tests" import { getDevelopmentAppID, getProdAppID, isDevAppID, isProdAppID, } from "../conversions" -import { generateAppID, getPlatformUrl, getScopedConfig } from "../utils" -import * as context from "../../context" -import { Config } from "../../constants" -import env from "../../environment" +import { generateAppID } from "../utils" describe("utils", () => { - const config = new DBTestConfiguration() - - describe("app ID manipulation", () => { + describe("generateAppID", () => { function getID() { const appId = generateAppID() const split = appId.split("_") @@ -66,127 +60,4 @@ describe("utils", () => { expect(isProdAppID(devAppId)).toEqual(false) }) }) - - const DEFAULT_URL = "http://localhost:10000" - const ENV_URL = "http://env.com" - - const setDbPlatformUrl = async (dbUrl: string) => { - const db = context.getGlobalDB() - await db.put({ - _id: "config_settings", - type: Config.SETTINGS, - config: { - platformUrl: dbUrl, - }, - }) - } - - const clearSettingsConfig = async () => { - await config.doInTenant(async () => { - const db = context.getGlobalDB() - try { - const config = await db.get("config_settings") - await db.remove("config_settings", config._rev) - } catch (e: any) { - if (e.status !== 404) { - throw e - } - } - }) - } - - describe("getPlatformUrl", () => { - describe("self host", () => { - beforeEach(async () => { - testEnv.selfHosted() - await clearSettingsConfig() - }) - - it("gets the default url", async () => { - await config.doInTenant(async () => { - const url = await getPlatformUrl() - expect(url).toBe(DEFAULT_URL) - }) - }) - - it("gets the platform url from the environment", async () => { - await config.doInTenant(async () => { - env._set("PLATFORM_URL", ENV_URL) - const url = await getPlatformUrl() - expect(url).toBe(ENV_URL) - }) - }) - - it("gets the platform url from the database", async () => { - await config.doInTenant(async () => { - const dbUrl = generator.url() - await setDbPlatformUrl(dbUrl) - const url = await getPlatformUrl() - expect(url).toBe(dbUrl) - }) - }) - }) - - describe("cloud", () => { - const TENANT_AWARE_URL = `http://${config.tenantId}.env.com` - - beforeEach(async () => { - testEnv.cloudHosted() - testEnv.multiTenant() - - env._set("PLATFORM_URL", ENV_URL) - await clearSettingsConfig() - }) - - it("gets the platform url from the environment without tenancy", async () => { - await config.doInTenant(async () => { - const url = await getPlatformUrl({ tenantAware: false }) - expect(url).toBe(ENV_URL) - }) - }) - - it("gets the platform url from the environment with tenancy", async () => { - await config.doInTenant(async () => { - const url = await getPlatformUrl() - expect(url).toBe(TENANT_AWARE_URL) - }) - }) - - it("never gets the platform url from the database", async () => { - await config.doInTenant(async () => { - await setDbPlatformUrl(generator.url()) - const url = await getPlatformUrl() - expect(url).toBe(TENANT_AWARE_URL) - }) - }) - }) - }) - - describe("getScopedConfig", () => { - describe("settings config", () => { - beforeEach(async () => { - env._set("SELF_HOSTED", 1) - env._set("PLATFORM_URL", "") - await clearSettingsConfig() - }) - - it("returns the platform url with an existing config", async () => { - await config.doInTenant(async () => { - const dbUrl = generator.url() - await setDbPlatformUrl(dbUrl) - const db = context.getGlobalDB() - const config = await getScopedConfig(db, { type: Config.SETTINGS }) - expect(config.platformUrl).toBe(dbUrl) - }) - }) - - it("returns the platform url without an existing config", async () => { - await config.doInTenant(async () => { - const db = context.getGlobalDB() - const config = await getScopedConfig(db, { type: Config.SETTINGS }) - expect(config.platformUrl).toBe(DEFAULT_URL) - }) - }) - }) - }) }) diff --git a/packages/backend-core/src/db/utils.ts b/packages/backend-core/src/db/utils.ts index 233d044eaa..76c52d08ad 100644 --- a/packages/backend-core/src/db/utils.ts +++ b/packages/backend-core/src/db/utils.ts @@ -9,12 +9,11 @@ import { InternalTable, APP_PREFIX, } from "../constants" -import { getTenantId, getGlobalDB, getGlobalDBName } from "../context" +import { getTenantId, getGlobalDBName } from "../context" import { doWithDB, directCouchAllDbs } from "./db" import { getAppMetadata } from "../cache/appMetadata" import { isDevApp, isDevAppID, getProdAppID } from "./conversions" -import * as events from "../events" -import { App, Database, ConfigType, isSettingsConfig } from "@budibase/types" +import { App, Database } from "@budibase/types" /** * Generates a new app ID. @@ -366,6 +365,16 @@ export async function getAllApps({ } } +export async function getAppsByIDs(appIds: string[]) { + const settled = await Promise.allSettled( + appIds.map(appId => getAppMetadata(appId)) + ) + // have to list the apps which exist, some may have been deleted + return settled + .filter(promise => promise.status === "fulfilled") + .map(promise => (promise as PromiseFulfilledResult).value) +} + /** * Utility function for getAllApps but filters to production apps only. */ @@ -382,6 +391,16 @@ export async function getDevAppIDs() { return apps.filter((id: any) => isDevAppID(id)) } +export function isSameAppID( + appId1: string | undefined, + appId2: string | undefined +) { + if (appId1 == undefined || appId2 == undefined) { + return false + } + return getProdAppID(appId1) === getProdAppID(appId2) +} + export async function dbExists(dbName: any) { return doWithDB( dbName, @@ -392,32 +411,6 @@ export async function dbExists(dbName: any) { ) } -/** - * Generates a new configuration ID. - * @returns {string} The new configuration ID which the config doc can be stored under. - */ -export const generateConfigID = ({ type, workspace, user }: any) => { - const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR) - - return `${DocumentType.CONFIG}${SEPARATOR}${scope}` -} - -/** - * Gets parameters for retrieving configurations. - */ -export const getConfigParams = ( - { type, workspace, user }: any, - otherProps = {} -) => { - const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR) - - return { - ...otherProps, - startkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}`, - endkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`, - } -} - /** * Generates a new dev info document ID - this is scoped to a user. * @returns {string} The new dev info ID which info for dev (like api key) can be stored under. @@ -441,109 +434,6 @@ export const getPluginParams = (pluginId?: string | null, otherProps = {}) => { return getDocParams(DocumentType.PLUGIN, pluginId, otherProps) } -/** - * Returns the most granular configuration document from the DB based on the type, workspace and userID passed. - * @param {Object} db - db instance to query - * @param {Object} scopes - the type, workspace and userID scopes of the configuration. - * @returns The most granular configuration document based on the scope. - */ -export const getScopedFullConfig = async function ( - db: any, - { type, user, workspace }: any -) { - const response = await db.allDocs( - getConfigParams( - { type, user, workspace }, - { - include_docs: true, - } - ) - ) - - function determineScore(row: any) { - const config = row.doc - - // Config is specific to a user and a workspace - if (config._id.includes(generateConfigID({ type, user, workspace }))) { - return 4 - } else if (config._id.includes(generateConfigID({ type, user }))) { - // Config is specific to a user only - return 3 - } else if (config._id.includes(generateConfigID({ type, workspace }))) { - // Config is specific to a workspace only - return 2 - } else if (config._id.includes(generateConfigID({ type }))) { - // Config is specific to a type only - return 1 - } - return 0 - } - - // Find the config with the most granular scope based on context - let scopedConfig = response.rows.sort( - (a: any, b: any) => determineScore(a) - determineScore(b) - )[0] - - // custom logic for settings doc - if (type === ConfigType.SETTINGS) { - if (!scopedConfig || !scopedConfig.doc) { - // defaults - scopedConfig = { - doc: { - _id: generateConfigID({ type, user, workspace }), - type: ConfigType.SETTINGS, - config: { - platformUrl: await getPlatformUrl({ tenantAware: true }), - analyticsEnabled: await events.analytics.enabled(), - }, - }, - } - } - - // will always be true - use assertion function to get type access - if (isSettingsConfig(scopedConfig.doc)) { - // overrides affected by environment - scopedConfig.doc.config.platformUrl = await getPlatformUrl({ - tenantAware: true, - }) - scopedConfig.doc.config.analyticsEnabled = - await events.analytics.enabled() - } - } - - return scopedConfig && scopedConfig.doc -} - -export const getPlatformUrl = async (opts = { tenantAware: true }) => { - let platformUrl = env.PLATFORM_URL || "http://localhost:10000" - - if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) { - // cloud and multi tenant - add the tenant to the default platform url - const tenantId = getTenantId() - if (!platformUrl.includes("localhost:")) { - platformUrl = platformUrl.replace("://", `://${tenantId}.`) - } - } else if (env.SELF_HOSTED) { - const db = getGlobalDB() - // get the doc directly instead of with getScopedConfig to prevent loop - let settings - try { - settings = await db.get(generateConfigID({ type: ConfigType.SETTINGS })) - } catch (e: any) { - if (e.status !== 404) { - throw e - } - } - - // self hosted - check for platform url override - if (settings && settings.config && settings.config.platformUrl) { - platformUrl = settings.config.platformUrl - } - } - - return platformUrl -} - export function pagination( data: any[], pageSize: number, @@ -577,8 +467,3 @@ export function pagination( nextPage, } } - -export async function getScopedConfig(db: any, params: any) { - const configDoc = await getScopedFullConfig(db, params) - return configDoc && configDoc.config ? configDoc.config : configDoc -} diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index ed7a161160..8dc2cce487 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -28,6 +28,8 @@ const DefaultBucketName = { PLUGINS: "plugins", } +const selfHosted = !!parseInt(process.env.SELF_HOSTED || "") + const environment = { isTest, isJest, @@ -58,7 +60,7 @@ const environment = { process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app", ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "", DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL, - SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""), + SELF_HOSTED: selfHosted, COOKIE_DOMAIN: process.env.COOKIE_DOMAIN, PLATFORM_URL: process.env.PLATFORM_URL || "", POSTHOG_TOKEN: process.env.POSTHOG_TOKEN, @@ -84,6 +86,23 @@ const environment = { DEPLOYMENT_ENVIRONMENT: process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose", ENABLE_4XX_HTTP_LOGGING: process.env.ENABLE_4XX_HTTP_LOGGING || true, + ENABLE_AUDIT_LOG_IP_ADDR: process.env.ENABLE_AUDIT_LOG_IP_ADDR, + // smtp + SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED, + SMTP_USER: process.env.SMTP_USER, + SMTP_PASSWORD: process.env.SMTP_PASSWORD, + SMTP_HOST: process.env.SMTP_HOST, + SMTP_PORT: parseInt(process.env.SMTP_PORT || ""), + SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS, + DISABLE_JWT_WARNING: process.env.DISABLE_JWT_WARNING, + /** + * Enable to allow an admin user to login using a password. + * This can be useful to prevent lockout when configuring SSO. + * However, this should be turned OFF by default for security purposes. + */ + ENABLE_SSO_MAINTENANCE_MODE: selfHosted + ? process.env.ENABLE_SSO_MAINTENANCE_MODE + : false, _set(key: any, value: any) { process.env[key] = value // @ts-ignore diff --git a/packages/backend-core/src/events/analytics.ts b/packages/backend-core/src/events/analytics.ts index 7fbc6d9c2b..dcfd6d5104 100644 --- a/packages/backend-core/src/events/analytics.ts +++ b/packages/backend-core/src/events/analytics.ts @@ -1,55 +1,6 @@ -import env from "../environment" -import * as context from "../context" -import * as dbUtils from "../db/utils" -import { Config } from "../constants" -import { withCache, TTL, CacheKey } from "../cache" +import * as configs from "../configs" +// wrapper utility function export const enabled = async () => { - // cloud - always use the environment variable - if (!env.SELF_HOSTED) { - return !!env.ENABLE_ANALYTICS - } - - // self host - prefer the settings doc - // use cache as events have high throughput - const enabledInDB = await withCache( - CacheKey.ANALYTICS_ENABLED, - TTL.ONE_DAY, - async () => { - const settings = await getSettingsDoc() - - // need to do explicit checks in case the field is not set - if (settings?.config?.analyticsEnabled === false) { - return false - } else if (settings?.config?.analyticsEnabled === true) { - return true - } - } - ) - - if (enabledInDB !== undefined) { - return enabledInDB - } - - // fallback to the environment variable - // explicitly check for 0 or false here, undefined or otherwise is treated as true - const envEnabled: any = env.ENABLE_ANALYTICS - if (envEnabled === 0 || envEnabled === false) { - return false - } else { - return true - } -} - -const getSettingsDoc = async () => { - const db = context.getGlobalDB() - let settings - try { - settings = await db.get(dbUtils.generateConfigID({ type: Config.SETTINGS })) - } catch (e: any) { - if (e.status !== 404) { - throw e - } - } - return settings + return configs.analyticsEnabled() } diff --git a/packages/backend-core/src/events/events.ts b/packages/backend-core/src/events/events.ts index 01928221a0..c2f7cf66ec 100644 --- a/packages/backend-core/src/events/events.ts +++ b/packages/backend-core/src/events/events.ts @@ -1,4 +1,4 @@ -import { Event } from "@budibase/types" +import { Event, AuditedEventFriendlyName } from "@budibase/types" import { processors } from "./processors" import identification from "./identification" import * as backfill from "./backfill" diff --git a/packages/backend-core/src/events/identification.ts b/packages/backend-core/src/events/identification.ts index 7cade9e14b..9534fb293d 100644 --- a/packages/backend-core/src/events/identification.ts +++ b/packages/backend-core/src/events/identification.ts @@ -10,7 +10,6 @@ import { isCloudAccount, isSSOAccount, TenantGroup, - SettingsConfig, CloudAccount, UserIdentity, InstallationGroup, @@ -19,10 +18,9 @@ import { isSSOUser, } from "@budibase/types" import { processors } from "./processors" -import * as dbUtils from "../db/utils" -import { Config } from "../constants" import { newid } from "../utils" import * as installation from "../installation" +import * as configs from "../configs" import { withCache, TTL, CacheKey } from "../cache/generic" const pkg = require("../../package.json") @@ -89,6 +87,7 @@ const getCurrentIdentity = async (): Promise => { installationId, tenantId, environment, + hostInfo: userContext.hostInfo, } } else { throw new Error("Unknown identity type") @@ -270,9 +269,7 @@ const getUniqueTenantId = async (tenantId: string): Promise => { return context.doInTenant(tenantId, () => { return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => { const db = context.getGlobalDB() - const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, { - type: Config.SETTINGS, - }) + const config = await configs.getSettingsConfigDoc() let uniqueTenantId: string if (config.config.uniqueTenantId) { diff --git a/packages/backend-core/src/events/processors/AuditLogsProcessor.ts b/packages/backend-core/src/events/processors/AuditLogsProcessor.ts new file mode 100644 index 0000000000..94b4e1b09f --- /dev/null +++ b/packages/backend-core/src/events/processors/AuditLogsProcessor.ts @@ -0,0 +1,93 @@ +import { + Event, + Identity, + Group, + IdentityType, + AuditLogQueueEvent, + AuditLogFn, + HostInfo, +} from "@budibase/types" +import { EventProcessor } from "./types" +import { getAppId, doInTenant, getTenantId } from "../../context" +import BullQueue from "bull" +import { createQueue, JobQueue } from "../../queue" +import { isAudited } from "../../utils" +import env from "../../environment" + +export default class AuditLogsProcessor implements EventProcessor { + static auditLogsEnabled = false + static auditLogQueue: BullQueue.Queue + + // can't use constructor as need to return promise + static init(fn: AuditLogFn) { + AuditLogsProcessor.auditLogsEnabled = true + const writeAuditLogs = fn + AuditLogsProcessor.auditLogQueue = createQueue( + JobQueue.AUDIT_LOG + ) + return AuditLogsProcessor.auditLogQueue.process(async job => { + return doInTenant(job.data.tenantId, async () => { + let properties = job.data.properties + if (properties.audited) { + properties = { + ...properties, + ...properties.audited, + } + delete properties.audited + } + + // this feature is disabled by default due to privacy requirements + // in some countries - available as env var in-case it is desired + // in self host deployments + let hostInfo: HostInfo | undefined = {} + if (env.ENABLE_AUDIT_LOG_IP_ADDR) { + hostInfo = job.data.opts.hostInfo + } + + await writeAuditLogs(job.data.event, properties, { + userId: job.data.opts.userId, + timestamp: job.data.opts.timestamp, + appId: job.data.opts.appId, + hostInfo, + }) + }) + }) + } + + async processEvent( + event: Event, + identity: Identity, + properties: any, + timestamp?: string + ): Promise { + if (AuditLogsProcessor.auditLogsEnabled && isAudited(event)) { + // only audit log actual events, don't include backfills + const userId = + identity.type === IdentityType.USER ? identity.id : undefined + // add to the event queue, rather than just writing immediately + await AuditLogsProcessor.auditLogQueue.add({ + event, + properties, + opts: { + userId, + timestamp, + appId: getAppId(), + hostInfo: identity.hostInfo, + }, + tenantId: getTenantId(), + }) + } + } + + async identify(identity: Identity, timestamp?: string | number) { + // no-op + } + + async identifyGroup(group: Group, timestamp?: string | number) { + // no-op + } + + shutdown(): void { + AuditLogsProcessor.auditLogQueue?.close() + } +} diff --git a/packages/backend-core/src/events/processors/index.ts b/packages/backend-core/src/events/processors/index.ts index 0e75f050db..6646764e47 100644 --- a/packages/backend-core/src/events/processors/index.ts +++ b/packages/backend-core/src/events/processors/index.ts @@ -1,8 +1,19 @@ import AnalyticsProcessor from "./AnalyticsProcessor" import LoggingProcessor from "./LoggingProcessor" +import AuditLogsProcessor from "./AuditLogsProcessor" import Processors from "./Processors" +import { AuditLogFn } from "@budibase/types" export const analyticsProcessor = new AnalyticsProcessor() const loggingProcessor = new LoggingProcessor() +const auditLogsProcessor = new AuditLogsProcessor() -export const processors = new Processors([analyticsProcessor, loggingProcessor]) +export function init(auditingFn: AuditLogFn) { + return AuditLogsProcessor.init(auditingFn) +} + +export const processors = new Processors([ + analyticsProcessor, + loggingProcessor, + auditLogsProcessor, +]) diff --git a/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts b/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts index 593e5ff082..0dbe70d543 100644 --- a/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts +++ b/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts @@ -47,6 +47,8 @@ export default class PosthogProcessor implements EventProcessor { return } + properties = this.clearPIIProperties(properties) + properties.version = pkg.version properties.service = env.SERVICE properties.environment = identity.environment @@ -79,6 +81,16 @@ export default class PosthogProcessor implements EventProcessor { this.posthog.capture(payload) } + clearPIIProperties(properties: any) { + if (properties.email) { + delete properties.email + } + if (properties.audited) { + delete properties.audited + } + return properties + } + async identify(identity: Identity, timestamp?: string | number) { const payload: any = { distinctId: identity.id, properties: identity } if (timestamp) { diff --git a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts index 2c1340d36e..8df4e40bcf 100644 --- a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts +++ b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts @@ -49,6 +49,25 @@ describe("PosthogProcessor", () => { expect(processor.posthog.capture).toHaveBeenCalledTimes(0) }) + it("removes audited information", async () => { + const processor = new PosthogProcessor("test") + + const identity = newIdentity() + const properties = { + email: "test", + audited: { + name: "test", + }, + } + + await processor.processEvent(Event.USER_CREATED, identity, properties) + expect(processor.posthog.capture).toHaveBeenCalled() + // @ts-ignore + const call = processor.posthog.capture.mock.calls[0][0] + expect(call.properties.audited).toBeUndefined() + expect(call.properties.email).toBeUndefined() + }) + describe("rate limiting", () => { it("sends daily event once in same day", async () => { const processor = new PosthogProcessor("test") diff --git a/packages/backend-core/src/events/publishers/app.ts b/packages/backend-core/src/events/publishers/app.ts index 90da21f3f5..d08d59b5f1 100644 --- a/packages/backend-core/src/events/publishers/app.ts +++ b/packages/backend-core/src/events/publishers/app.ts @@ -19,6 +19,9 @@ const created = async (app: App, timestamp?: string | number) => { const properties: AppCreatedEvent = { appId: app.appId, version: app.version, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_CREATED, properties, timestamp) } @@ -27,6 +30,9 @@ async function updated(app: App) { const properties: AppUpdatedEvent = { appId: app.appId, version: app.version, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_UPDATED, properties) } @@ -34,6 +40,9 @@ async function updated(app: App) { async function deleted(app: App) { const properties: AppDeletedEvent = { appId: app.appId, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_DELETED, properties) } @@ -41,6 +50,9 @@ async function deleted(app: App) { async function published(app: App, timestamp?: string | number) { const properties: AppPublishedEvent = { appId: app.appId, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_PUBLISHED, properties, timestamp) } @@ -48,6 +60,9 @@ async function published(app: App, timestamp?: string | number) { async function unpublished(app: App) { const properties: AppUnpublishedEvent = { appId: app.appId, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_UNPUBLISHED, properties) } @@ -55,6 +70,9 @@ async function unpublished(app: App) { async function fileImported(app: App) { const properties: AppFileImportedEvent = { appId: app.appId, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_FILE_IMPORTED, properties) } @@ -63,6 +81,9 @@ async function templateImported(app: App, templateKey: string) { const properties: AppTemplateImportedEvent = { appId: app.appId, templateKey, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_TEMPLATE_IMPORTED, properties) } @@ -76,6 +97,9 @@ async function versionUpdated( appId: app.appId, currentVersion, updatedToVersion, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_VERSION_UPDATED, properties) } @@ -89,6 +113,9 @@ async function versionReverted( appId: app.appId, currentVersion, revertedToVersion, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_VERSION_REVERTED, properties) } @@ -96,6 +123,9 @@ async function versionReverted( async function reverted(app: App) { const properties: AppRevertedEvent = { appId: app.appId, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_REVERTED, properties) } @@ -103,6 +133,9 @@ async function reverted(app: App) { async function exported(app: App) { const properties: AppExportedEvent = { appId: app.appId, + audited: { + name: app.name, + }, } await publishEvent(Event.APP_EXPORTED, properties) } diff --git a/packages/backend-core/src/events/publishers/auditLog.ts b/packages/backend-core/src/events/publishers/auditLog.ts new file mode 100644 index 0000000000..7cfb76147a --- /dev/null +++ b/packages/backend-core/src/events/publishers/auditLog.ts @@ -0,0 +1,26 @@ +import { + Event, + AuditLogSearchParams, + AuditLogFilteredEvent, + AuditLogDownloadedEvent, +} from "@budibase/types" +import { publishEvent } from "../events" + +async function filtered(search: AuditLogSearchParams) { + const properties: AuditLogFilteredEvent = { + filters: search, + } + await publishEvent(Event.AUDIT_LOGS_FILTERED, properties) +} + +async function downloaded(search: AuditLogSearchParams) { + const properties: AuditLogDownloadedEvent = { + filters: search, + } + await publishEvent(Event.AUDIT_LOGS_DOWNLOADED, properties) +} + +export default { + filtered, + downloaded, +} diff --git a/packages/backend-core/src/events/publishers/auth.ts b/packages/backend-core/src/events/publishers/auth.ts index 4436045599..e275d2dbb0 100644 --- a/packages/backend-core/src/events/publishers/auth.ts +++ b/packages/backend-core/src/events/publishers/auth.ts @@ -12,19 +12,25 @@ import { } from "@budibase/types" import { identification } from ".." -async function login(source: LoginSource) { +async function login(source: LoginSource, email: string) { const identity = await identification.getCurrentIdentity() const properties: LoginEvent = { userId: identity.id, source, + audited: { + email, + }, } await publishEvent(Event.AUTH_LOGIN, properties) } -async function logout() { +async function logout(email?: string) { const identity = await identification.getCurrentIdentity() const properties: LogoutEvent = { userId: identity.id, + audited: { + email, + }, } await publishEvent(Event.AUTH_LOGOUT, properties) } diff --git a/packages/backend-core/src/events/publishers/automation.ts b/packages/backend-core/src/events/publishers/automation.ts index 6eb36ab067..419d4136bd 100644 --- a/packages/backend-core/src/events/publishers/automation.ts +++ b/packages/backend-core/src/events/publishers/automation.ts @@ -18,6 +18,9 @@ async function created(automation: Automation, timestamp?: string | number) { automationId: automation._id as string, triggerId: automation.definition?.trigger?.id, triggerType: automation.definition?.trigger?.stepId, + audited: { + name: automation.name, + }, } await publishEvent(Event.AUTOMATION_CREATED, properties, timestamp) } @@ -38,6 +41,9 @@ async function deleted(automation: Automation) { automationId: automation._id as string, triggerId: automation.definition?.trigger?.id, triggerType: automation.definition?.trigger?.stepId, + audited: { + name: automation.name, + }, } await publishEvent(Event.AUTOMATION_DELETED, properties) } @@ -71,6 +77,9 @@ async function stepCreated( triggerType: automation.definition?.trigger?.stepId, stepId: step.id!, stepType: step.stepId, + audited: { + name: automation.name, + }, } await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp) } @@ -83,6 +92,9 @@ async function stepDeleted(automation: Automation, step: AutomationStep) { triggerType: automation.definition?.trigger?.stepId, stepId: step.id!, stepType: step.stepId, + audited: { + name: automation.name, + }, } await publishEvent(Event.AUTOMATION_STEP_DELETED, properties) } diff --git a/packages/backend-core/src/events/publishers/backup.ts b/packages/backend-core/src/events/publishers/backup.ts index 12263fe1ff..d7d87f09f1 100644 --- a/packages/backend-core/src/events/publishers/backup.ts +++ b/packages/backend-core/src/events/publishers/backup.ts @@ -13,6 +13,7 @@ async function appBackupRestored(backup: AppBackup) { appId: backup.appId, restoreId: backup._id!, backupCreatedAt: backup.timestamp, + name: backup.name as string, } await publishEvent(Event.APP_BACKUP_RESTORED, properties) @@ -22,13 +23,15 @@ async function appBackupTriggered( appId: string, backupId: string, type: AppBackupType, - trigger: AppBackupTrigger + trigger: AppBackupTrigger, + name: string ) { const properties: AppBackupTriggeredEvent = { appId: appId, backupId, type, trigger, + name, } await publishEvent(Event.APP_BACKUP_TRIGGERED, properties) } diff --git a/packages/backend-core/src/events/publishers/group.ts b/packages/backend-core/src/events/publishers/group.ts index d79920562b..a000b880a2 100644 --- a/packages/backend-core/src/events/publishers/group.ts +++ b/packages/backend-core/src/events/publishers/group.ts @@ -8,12 +8,16 @@ import { GroupUsersAddedEvent, GroupUsersDeletedEvent, GroupAddedOnboardingEvent, + GroupPermissionsEditedEvent, UserGroupRoles, } from "@budibase/types" async function created(group: UserGroup, timestamp?: number) { const properties: GroupCreatedEvent = { groupId: group._id as string, + audited: { + name: group.name, + }, } await publishEvent(Event.USER_GROUP_CREATED, properties, timestamp) } @@ -21,6 +25,9 @@ async function created(group: UserGroup, timestamp?: number) { async function updated(group: UserGroup) { const properties: GroupUpdatedEvent = { groupId: group._id as string, + audited: { + name: group.name, + }, } await publishEvent(Event.USER_GROUP_UPDATED, properties) } @@ -28,6 +35,9 @@ async function updated(group: UserGroup) { async function deleted(group: UserGroup) { const properties: GroupDeletedEvent = { groupId: group._id as string, + audited: { + name: group.name, + }, } await publishEvent(Event.USER_GROUP_DELETED, properties) } @@ -36,6 +46,9 @@ async function usersAdded(count: number, group: UserGroup) { const properties: GroupUsersAddedEvent = { count, groupId: group._id as string, + audited: { + name: group.name, + }, } await publishEvent(Event.USER_GROUP_USERS_ADDED, properties) } @@ -44,6 +57,9 @@ async function usersDeleted(count: number, group: UserGroup) { const properties: GroupUsersDeletedEvent = { count, groupId: group._id as string, + audited: { + name: group.name, + }, } await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties) } @@ -56,9 +72,13 @@ async function createdOnboarding(groupId: string) { await publishEvent(Event.USER_GROUP_ONBOARDING, properties) } -async function permissionsEdited(roles: UserGroupRoles) { - const properties: UserGroupRoles = { - ...roles, +async function permissionsEdited(group: UserGroup) { + const properties: GroupPermissionsEditedEvent = { + permissions: group.roles!, + groupId: group._id as string, + audited: { + name: group.name, + }, } await publishEvent(Event.USER_GROUP_PERMISSIONS_EDITED, properties) } diff --git a/packages/backend-core/src/events/publishers/index.ts b/packages/backend-core/src/events/publishers/index.ts index 34e47b2990..87a34bf3f1 100644 --- a/packages/backend-core/src/events/publishers/index.ts +++ b/packages/backend-core/src/events/publishers/index.ts @@ -21,3 +21,4 @@ export { default as group } from "./group" export { default as plugin } from "./plugin" export { default as backup } from "./backup" export { default as environmentVariable } from "./environmentVariable" +export { default as auditLog } from "./auditLog" diff --git a/packages/backend-core/src/events/publishers/screen.ts b/packages/backend-core/src/events/publishers/screen.ts index 27264b5847..df486029e8 100644 --- a/packages/backend-core/src/events/publishers/screen.ts +++ b/packages/backend-core/src/events/publishers/screen.ts @@ -11,6 +11,9 @@ async function created(screen: Screen, timestamp?: string | number) { layoutId: screen.layoutId, screenId: screen._id as string, roleId: screen.routing.roleId, + audited: { + name: screen.routing?.route, + }, } await publishEvent(Event.SCREEN_CREATED, properties, timestamp) } @@ -20,6 +23,9 @@ async function deleted(screen: Screen) { layoutId: screen.layoutId, screenId: screen._id as string, roleId: screen.routing.roleId, + audited: { + name: screen.routing?.route, + }, } await publishEvent(Event.SCREEN_DELETED, properties) } diff --git a/packages/backend-core/src/events/publishers/table.ts b/packages/backend-core/src/events/publishers/table.ts index d50f4df0e1..dc3200291a 100644 --- a/packages/backend-core/src/events/publishers/table.ts +++ b/packages/backend-core/src/events/publishers/table.ts @@ -13,6 +13,9 @@ import { async function created(table: Table, timestamp?: string | number) { const properties: TableCreatedEvent = { tableId: table._id as string, + audited: { + name: table.name, + }, } await publishEvent(Event.TABLE_CREATED, properties, timestamp) } @@ -20,6 +23,9 @@ async function created(table: Table, timestamp?: string | number) { async function updated(table: Table) { const properties: TableUpdatedEvent = { tableId: table._id as string, + audited: { + name: table.name, + }, } await publishEvent(Event.TABLE_UPDATED, properties) } @@ -27,6 +33,9 @@ async function updated(table: Table) { async function deleted(table: Table) { const properties: TableDeletedEvent = { tableId: table._id as string, + audited: { + name: table.name, + }, } await publishEvent(Event.TABLE_DELETED, properties) } @@ -35,6 +44,9 @@ async function exported(table: Table, format: TableExportFormat) { const properties: TableExportedEvent = { tableId: table._id as string, format, + audited: { + name: table.name, + }, } await publishEvent(Event.TABLE_EXPORTED, properties) } @@ -42,6 +54,9 @@ async function exported(table: Table, format: TableExportFormat) { async function imported(table: Table) { const properties: TableImportedEvent = { tableId: table._id as string, + audited: { + name: table.name, + }, } await publishEvent(Event.TABLE_IMPORTED, properties) } diff --git a/packages/backend-core/src/events/publishers/user.ts b/packages/backend-core/src/events/publishers/user.ts index 1fe50149b5..8dbc494d1e 100644 --- a/packages/backend-core/src/events/publishers/user.ts +++ b/packages/backend-core/src/events/publishers/user.ts @@ -19,6 +19,9 @@ import { async function created(user: User, timestamp?: number) { const properties: UserCreatedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_CREATED, properties, timestamp) } @@ -26,6 +29,9 @@ async function created(user: User, timestamp?: number) { async function updated(user: User) { const properties: UserUpdatedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_UPDATED, properties) } @@ -33,6 +39,9 @@ async function updated(user: User) { async function deleted(user: User) { const properties: UserDeletedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_DELETED, properties) } @@ -40,6 +49,9 @@ async function deleted(user: User) { export async function onboardingComplete(user: User) { const properties: UserOnboardingEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_ONBOARDING_COMPLETE, properties) } @@ -49,6 +61,9 @@ export async function onboardingComplete(user: User) { async function permissionAdminAssigned(user: User, timestamp?: number) { const properties: UserPermissionAssignedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent( Event.USER_PERMISSION_ADMIN_ASSIGNED, @@ -60,6 +75,9 @@ async function permissionAdminAssigned(user: User, timestamp?: number) { async function permissionAdminRemoved(user: User) { const properties: UserPermissionRemovedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_PERMISSION_ADMIN_REMOVED, properties) } @@ -67,6 +85,9 @@ async function permissionAdminRemoved(user: User) { async function permissionBuilderAssigned(user: User, timestamp?: number) { const properties: UserPermissionAssignedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent( Event.USER_PERMISSION_BUILDER_ASSIGNED, @@ -78,20 +99,30 @@ async function permissionBuilderAssigned(user: User, timestamp?: number) { async function permissionBuilderRemoved(user: User) { const properties: UserPermissionRemovedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_PERMISSION_BUILDER_REMOVED, properties) } // INVITE -async function invited() { - const properties: UserInvitedEvent = {} +async function invited(email: string) { + const properties: UserInvitedEvent = { + audited: { + email, + }, + } await publishEvent(Event.USER_INVITED, properties) } async function inviteAccepted(user: User) { const properties: UserInviteAcceptedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_INVITED_ACCEPTED, properties) } @@ -101,6 +132,9 @@ async function inviteAccepted(user: User) { async function passwordForceReset(user: User) { const properties: UserPasswordForceResetEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_PASSWORD_FORCE_RESET, properties) } @@ -108,6 +142,9 @@ async function passwordForceReset(user: User) { async function passwordUpdated(user: User) { const properties: UserPasswordUpdatedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_PASSWORD_UPDATED, properties) } @@ -115,6 +152,9 @@ async function passwordUpdated(user: User) { async function passwordResetRequested(user: User) { const properties: UserPasswordResetRequestedEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_PASSWORD_RESET_REQUESTED, properties) } @@ -122,6 +162,9 @@ async function passwordResetRequested(user: User) { async function passwordReset(user: User) { const properties: UserPasswordResetEvent = { userId: user._id as string, + audited: { + email: user.email, + }, } await publishEvent(Event.USER_PASSWORD_RESET, properties) } diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts index d507d8175f..48569548e3 100644 --- a/packages/backend-core/src/index.ts +++ b/packages/backend-core/src/index.ts @@ -1,3 +1,4 @@ +export * as configs from "./configs" export * as events from "./events" export * as migrations from "./migrations" export * as users from "./users" @@ -20,11 +21,11 @@ export * as context from "./context" export * as cache from "./cache" export * as objectStore from "./objectStore" export * as redis from "./redis" -export * as locks from "./redis/redlock" +export * as locks from "./redis/redlockImpl" export * as utils from "./utils" export * as errors from "./errors" export { default as env } from "./environment" - +export { SearchParams } from "./db" // Add context to tenancy for backwards compatibility // only do this for external usages to prevent internal // circular dependencies diff --git a/packages/backend-core/src/middleware/authenticated.ts b/packages/backend-core/src/middleware/authenticated.ts index 4bb2aaba76..0708581570 100644 --- a/packages/backend-core/src/middleware/authenticated.ts +++ b/packages/backend-core/src/middleware/authenticated.ts @@ -8,7 +8,7 @@ import { getGlobalDB, doInTenant } from "../context" import { decrypt } from "../security/encryption" import * as identity from "../context/identity" import env from "../environment" -import { BBContext, EndpointMatcher } from "@budibase/types" +import { Ctx, EndpointMatcher } from "@budibase/types" const ONE_MINUTE = env.SESSION_UPDATE_PERIOD ? parseInt(env.SESSION_UPDATE_PERIOD) @@ -73,7 +73,7 @@ export default function ( } ) { const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : [] - return async (ctx: BBContext | any, next: any) => { + return async (ctx: Ctx | any, next: any) => { let publicEndpoint = false const version = ctx.request.headers[Header.API_VER] // the path is not authenticated @@ -115,7 +115,8 @@ export default function ( authenticated = true } catch (err: any) { authenticated = false - console.error("Auth Error", err?.message || err) + console.error(`Auth Error: ${err.message}`) + console.error(err) // remove the cookie as the user does not exist anymore clearCookie(ctx, Cookie.Auth) } @@ -148,12 +149,13 @@ export default function ( finalise(ctx, { authenticated, user, internal, version, publicEndpoint }) if (user && user.email) { - return identity.doInUserContext(user, next) + return identity.doInUserContext(user, ctx, next) } else { return next() } } catch (err: any) { - console.error("Auth Error", err?.message || err) + console.error(`Auth Error: ${err.message}`) + console.error(err) // invalid token, clear the cookie if (err && err.name === "JsonWebTokenError") { clearCookie(ctx, Cookie.Auth) diff --git a/packages/backend-core/src/middleware/errorHandling.ts b/packages/backend-core/src/middleware/errorHandling.ts index 5ac70c33e5..36aff2cdbc 100644 --- a/packages/backend-core/src/middleware/errorHandling.ts +++ b/packages/backend-core/src/middleware/errorHandling.ts @@ -11,6 +11,7 @@ export async function errorHandling(ctx: any, next: any) { if (status > 499 || env.ENABLE_4XX_HTTP_LOGGING) { ctx.log.error(err) + console.trace(err) } const error = errors.getPublicError(err) diff --git a/packages/backend-core/src/middleware/index.ts b/packages/backend-core/src/middleware/index.ts index de609f9a3e..addeac6a1a 100644 --- a/packages/backend-core/src/middleware/index.ts +++ b/packages/backend-core/src/middleware/index.ts @@ -17,4 +17,5 @@ export { default as builderOrAdmin } from "./builderOrAdmin" export { default as builderOnly } from "./builderOnly" export { default as logging } from "./logging" export { default as errorHandling } from "./errorHandling" +export { default as querystringToBody } from "./querystringToBody" export * as joiValidator from "./joi-validator" diff --git a/packages/backend-core/src/middleware/passport/datasource/google.ts b/packages/backend-core/src/middleware/passport/datasource/google.ts index 112f8d2096..32451cb8d2 100644 --- a/packages/backend-core/src/middleware/passport/datasource/google.ts +++ b/packages/backend-core/src/middleware/passport/datasource/google.ts @@ -1,9 +1,8 @@ import * as google from "../sso/google" -import { Cookie, Config } from "../../../constants" +import { Cookie } from "../../../constants" import { clearCookie, getCookie } from "../../../utils" -import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db" -import environment from "../../../environment" -import { getGlobalDB } from "../../../context" +import { doWithDB } from "../../../db" +import * as configs from "../../../configs" import { BBContext, Database, SSOProfile } from "@budibase/types" import { ssoSaveUserNoOp } from "../sso/sso" const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy @@ -13,18 +12,12 @@ type Passport = { } async function fetchGoogleCreds() { - // try and get the config from the tenant - const db = getGlobalDB() - const googleConfig = await getScopedConfig(db, { - type: Config.GOOGLE, - }) - // or fall back to env variables - return ( - googleConfig || { - clientID: environment.GOOGLE_CLIENT_ID, - clientSecret: environment.GOOGLE_CLIENT_SECRET, - } - ) + let config = await configs.getGoogleDatasourceConfig() + + if (!config) { + throw new Error("No google configuration found") + } + return config } export async function preAuth( @@ -34,7 +27,7 @@ export async function preAuth( ) { // get the relevant config const googleConfig = await fetchGoogleCreds() - const platformUrl = await getPlatformUrl({ tenantAware: false }) + const platformUrl = await configs.getPlatformUrl({ tenantAware: false }) let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback` const strategy = await google.strategyFactory( @@ -61,7 +54,7 @@ export async function postAuth( ) { // get the relevant config const config = await fetchGoogleCreds() - const platformUrl = await getPlatformUrl({ tenantAware: false }) + const platformUrl = await configs.getPlatformUrl({ tenantAware: false }) let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback` const authStateCookie = getCookie(ctx, Cookie.DatasourceAuth) diff --git a/packages/backend-core/src/middleware/passport/sso/google.ts b/packages/backend-core/src/middleware/passport/sso/google.ts index d26d7d6a8d..ad7593e63d 100644 --- a/packages/backend-core/src/middleware/passport/sso/google.ts +++ b/packages/backend-core/src/middleware/passport/sso/google.ts @@ -2,12 +2,11 @@ import { ssoCallbackUrl } from "../utils" import * as sso from "./sso" import { ConfigType, - GoogleConfig, - Database, SSOProfile, SSOAuthDetails, SSOProviderType, SaveSSOUserFunction, + GoogleInnerConfig, } from "@budibase/types" const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy @@ -45,7 +44,7 @@ export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) { * @returns Dynamically configured Passport Google Strategy */ export async function strategyFactory( - config: GoogleConfig["config"], + config: GoogleInnerConfig, callbackUrl: string, saveUserFn: SaveSSOUserFunction ) { @@ -73,9 +72,6 @@ export async function strategyFactory( } } -export async function getCallbackUrl( - db: Database, - config: { callbackURL?: string } -) { - return ssoCallbackUrl(db, config, ConfigType.GOOGLE) +export async function getCallbackUrl(config: GoogleInnerConfig) { + return ssoCallbackUrl(ConfigType.GOOGLE, config) } diff --git a/packages/backend-core/src/middleware/passport/sso/oidc.ts b/packages/backend-core/src/middleware/passport/sso/oidc.ts index 1fb44b84a3..b6d5eb52e9 100644 --- a/packages/backend-core/src/middleware/passport/sso/oidc.ts +++ b/packages/backend-core/src/middleware/passport/sso/oidc.ts @@ -4,7 +4,6 @@ import { ssoCallbackUrl } from "../utils" import { ConfigType, OIDCInnerConfig, - Database, SSOProfile, OIDCStrategyConfiguration, SSOAuthDetails, @@ -157,9 +156,6 @@ export async function fetchStrategyConfig( } } -export async function getCallbackUrl( - db: Database, - config: { callbackURL?: string } -) { - return ssoCallbackUrl(db, config, ConfigType.OIDC) +export async function getCallbackUrl() { + return ssoCallbackUrl(ConfigType.OIDC) } diff --git a/packages/backend-core/src/middleware/passport/utils.ts b/packages/backend-core/src/middleware/passport/utils.ts index 6eb3bc29d1..7e0d3863a0 100644 --- a/packages/backend-core/src/middleware/passport/utils.ts +++ b/packages/backend-core/src/middleware/passport/utils.ts @@ -1,6 +1,6 @@ -import { isMultiTenant, getTenantId } from "../../context" -import { getScopedConfig } from "../../db" -import { ConfigType, Database } from "@budibase/types" +import { getTenantId, isMultiTenant } from "../../context" +import * as configs from "../../configs" +import { ConfigType, GoogleInnerConfig } from "@budibase/types" /** * Utility to handle authentication errors. @@ -19,17 +19,14 @@ export function authError(done: Function, message: string, err?: any) { } export async function ssoCallbackUrl( - db: Database, - config?: { callbackURL?: string }, - type?: ConfigType + type: ConfigType, + config?: GoogleInnerConfig ) { // incase there is a callback URL from before - if (config && config.callbackURL) { - return config.callbackURL + if (config && (config as GoogleInnerConfig).callbackURL) { + return (config as GoogleInnerConfig).callbackURL as string } - const publicConfig = await getScopedConfig(db, { - type: ConfigType.SETTINGS, - }) + const settingsConfig = await configs.getSettingsConfig() let callbackUrl = `/api/global/auth` if (isMultiTenant()) { @@ -37,5 +34,5 @@ export async function ssoCallbackUrl( } callbackUrl += `/${type}/callback` - return `${publicConfig.platformUrl}${callbackUrl}` + return `${settingsConfig.platformUrl}${callbackUrl}` } diff --git a/packages/backend-core/src/middleware/querystringToBody.ts b/packages/backend-core/src/middleware/querystringToBody.ts new file mode 100644 index 0000000000..b6f109231a --- /dev/null +++ b/packages/backend-core/src/middleware/querystringToBody.ts @@ -0,0 +1,28 @@ +import { Ctx } from "@budibase/types" + +/** + * Expects a standard "query" query string property which is the JSON body + * of the request, which has to be sent via query string due to the requirement + * of making an endpoint a GET request e.g. downloading a file stream. + */ +export default function (ctx: Ctx, next: any) { + const queryString = ctx.request.query?.query as string | undefined + if (ctx.request.method.toLowerCase() !== "get") { + ctx.throw( + 500, + "Query to download middleware can only be used for get requests." + ) + } + if (!queryString) { + return next() + } + const decoded = decodeURIComponent(queryString) + let json + try { + json = JSON.parse(decoded) + } catch (err) { + return next() + } + ctx.request.body = json + return next() +} diff --git a/packages/backend-core/src/migrations/migrations.ts b/packages/backend-core/src/migrations/migrations.ts index 2e3524775f..ab72091d56 100644 --- a/packages/backend-core/src/migrations/migrations.ts +++ b/packages/backend-core/src/migrations/migrations.ts @@ -87,6 +87,7 @@ export const runMigration = async ( const lengthStatement = length > 1 ? `[${count}/${length}]` : "" const db = getDB(dbName) + try { const doc = await getMigrationsDoc(db) diff --git a/packages/backend-core/src/platform/tenants.ts b/packages/backend-core/src/platform/tenants.ts index b9f946a735..b6bc3410d8 100644 --- a/packages/backend-core/src/platform/tenants.ts +++ b/packages/backend-core/src/platform/tenants.ts @@ -1,7 +1,7 @@ import { StaticDatabases } from "../constants" import { getPlatformDB } from "./platformDb" import { LockName, LockOptions, LockType, Tenants } from "@budibase/types" -import * as locks from "../redis/redlock" +import * as locks from "../redis/redlockImpl" const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index e8323dacb8..9261ed1176 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -1,4 +1,5 @@ export enum JobQueue { AUTOMATION = "automationQueue", APP_BACKUP = "appBackupQueue", + AUDIT_LOG = "auditLogQueue", } diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 8e1fc1fbf3..c57ebafb1f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -40,8 +40,10 @@ export function createQueue( } export async function shutdown() { - if (QUEUES.length) { + if (cleanupInterval) { clearInterval(cleanupInterval) + } + if (QUEUES.length) { for (let queue of QUEUES) { await queue.close() } diff --git a/packages/backend-core/src/redis/index.ts b/packages/backend-core/src/redis/index.ts index 5bf2c65c39..6585d6e4fa 100644 --- a/packages/backend-core/src/redis/index.ts +++ b/packages/backend-core/src/redis/index.ts @@ -3,4 +3,4 @@ export { default as Client } from "./redis" export * as utils from "./utils" export * as clients from "./init" -export * as locks from "./redlock" +export * as locks from "./redlockImpl" diff --git a/packages/backend-core/src/redis/redlock.ts b/packages/backend-core/src/redis/redlockImpl.ts similarity index 83% rename from packages/backend-core/src/redis/redlock.ts rename to packages/backend-core/src/redis/redlockImpl.ts index 136d7f5d33..5e71488689 100644 --- a/packages/backend-core/src/redis/redlock.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -24,7 +24,7 @@ const getClient = async (type: LockType): Promise => { } } -export const OPTIONS = { +const OPTIONS = { TRY_ONCE: { // immediately throws an error if the lock is already held retryCount: 0, @@ -56,14 +56,29 @@ export const OPTIONS = { }, } -export const newRedlock = async (opts: Options = {}) => { +const newRedlock = async (opts: Options = {}) => { let options = { ...OPTIONS.DEFAULT, ...opts } const redisWrapper = await getLockClient() const client = redisWrapper.getClient() return new Redlock([client], options) } -export const doWithLock = async (opts: LockOptions, task: any) => { +type SuccessfulRedlockExecution = { + executed: true + result: T +} +type UnsuccessfulRedlockExecution = { + executed: false +} + +type RedlockExecution = + | SuccessfulRedlockExecution + | UnsuccessfulRedlockExecution + +export const doWithLock = async ( + opts: LockOptions, + task: () => Promise +): Promise> => { const redlock = await getClient(opts.type) let lock try { @@ -73,8 +88,8 @@ export const doWithLock = async (opts: LockOptions, task: any) => { let name: string = `lock:${prefix}_${opts.name}` // add additional unique name if required - if (opts.nameSuffix) { - name = name + `_${opts.nameSuffix}` + if (opts.resource) { + name = name + `_${opts.resource}` } // create the lock @@ -83,7 +98,7 @@ export const doWithLock = async (opts: LockOptions, task: any) => { // perform locked task // need to await to ensure completion before unlocking const result = await task() - return result + return { executed: true, result } } catch (e: any) { console.warn("lock error") // lock limit exceeded @@ -92,7 +107,7 @@ export const doWithLock = async (opts: LockOptions, task: any) => { // don't throw for try-once locks, they will always error // due to retry count (0) exceeded console.warn(e) - return + return { executed: false } } else { console.error(e) throw e diff --git a/packages/backend-core/src/users.ts b/packages/backend-core/src/users.ts index ef76af390d..dfc544c3ed 100644 --- a/packages/backend-core/src/users.ts +++ b/packages/backend-core/src/users.ts @@ -5,19 +5,56 @@ import { generateAppUserID, queryGlobalView, UNICODE_MAX, + DocumentType, + SEPARATOR, + directCouchFind, } from "./db" import { BulkDocsResponse, User } from "@budibase/types" import { getGlobalDB } from "./context" import * as context from "./context" -export const bulkGetGlobalUsersById = async (userIds: string[]) => { +type GetOpts = { cleanup?: boolean } + +function removeUserPassword(users: User | User[]) { + if (Array.isArray(users)) { + return users.map(user => { + if (user) { + delete user.password + return user + } + }) + } else if (users) { + delete users.password + return users + } + return users +} + +export const bulkGetGlobalUsersById = async ( + userIds: string[], + opts?: GetOpts +) => { const db = getGlobalDB() - return ( + let users = ( await db.allDocs({ keys: userIds, include_docs: true, }) ).rows.map(row => row.doc) as User[] + if (opts?.cleanup) { + users = removeUserPassword(users) as User[] + } + return users +} + +export const getAllUserIds = async () => { + const db = getGlobalDB() + const startKey = `${DocumentType.USER}${SEPARATOR}` + const response = await db.allDocs({ + startkey: startKey, + endkey: `${startKey}${UNICODE_MAX}`, + }) + return response.rows.map(row => row.id) } export const bulkUpdateGlobalUsers = async (users: User[]) => { @@ -25,18 +62,22 @@ export const bulkUpdateGlobalUsers = async (users: User[]) => { return (await db.bulkDocs(users)) as BulkDocsResponse } -export async function getById(id: string): Promise { +export async function getById(id: string, opts?: GetOpts): Promise { const db = context.getGlobalDB() - return db.get(id) + let user = await db.get(id) + if (opts?.cleanup) { + user = removeUserPassword(user) + } + return user } /** * Given an email address this will use a view to search through * all the users to find one with this email address. - * @param {string} email the email to lookup the user by. */ export const getGlobalUserByEmail = async ( - email: String + email: String, + opts?: GetOpts ): Promise => { if (email == null) { throw "Must supply an email address to view" @@ -52,10 +93,19 @@ export const getGlobalUserByEmail = async ( throw new Error(`Multiple users found with email address: ${email}`) } - return response + let user = response as User + if (opts?.cleanup) { + user = removeUserPassword(user) as User + } + + return user } -export const searchGlobalUsersByApp = async (appId: any, opts: any) => { +export const searchGlobalUsersByApp = async ( + appId: any, + opts: any, + getOpts?: GetOpts +) => { if (typeof appId !== "string") { throw new Error("Must provide a string based app ID") } @@ -64,10 +114,54 @@ export const searchGlobalUsersByApp = async (appId: any, opts: any) => { }) params.startkey = opts && opts.startkey ? opts.startkey : params.startkey let response = await queryGlobalView(ViewName.USER_BY_APP, params) + if (!response) { response = [] } - return Array.isArray(response) ? response : [response] + let users: User[] = Array.isArray(response) ? response : [response] + if (getOpts?.cleanup) { + users = removeUserPassword(users) as User[] + } + return users +} + +/* + Return any user who potentially has access to the application + Admins, developers and app users with the explicitly role. +*/ +export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => { + const roleSelector = `roles.${appId}` + + let orQuery: any[] = [ + { + "builder.global": true, + }, + { + "admin.global": true, + }, + ] + + if (appId) { + const roleCheck = { + [roleSelector]: { + $exists: true, + }, + } + orQuery.push(roleCheck) + } + + let searchOptions = { + selector: { + $or: orQuery, + _id: { + $regex: "^us_", + }, + }, + limit: opts?.limit || 50, + } + + const resp = await directCouchFind(context.getGlobalDBName(), searchOptions) + return resp?.rows } export const getGlobalUserByAppPage = (appId: string, user: User) => { @@ -80,7 +174,11 @@ export const getGlobalUserByAppPage = (appId: string, user: User) => { /** * Performs a starts with search on the global email view. */ -export const searchGlobalUsersByEmail = async (email: string, opts: any) => { +export const searchGlobalUsersByEmail = async ( + email: string, + opts: any, + getOpts?: GetOpts +) => { if (typeof email !== "string") { throw new Error("Must provide a string to search by") } @@ -95,5 +193,9 @@ export const searchGlobalUsersByEmail = async (email: string, opts: any) => { if (!response) { response = [] } - return Array.isArray(response) ? response : [response] + let users: User[] = Array.isArray(response) ? response : [response] + if (getOpts?.cleanup) { + users = removeUserPassword(users) as User[] + } + return users } diff --git a/packages/backend-core/src/utils/utils.ts b/packages/backend-core/src/utils/utils.ts index 3731e134ad..3efd40ca80 100644 --- a/packages/backend-core/src/utils/utils.ts +++ b/packages/backend-core/src/utils/utils.ts @@ -10,7 +10,13 @@ import { import env from "../environment" import * as tenancy from "../tenancy" import * as context from "../context" -import { App, Ctx, TenantResolutionStrategy } from "@budibase/types" +import { + App, + AuditedEventFriendlyName, + Ctx, + Event, + TenantResolutionStrategy, +} from "@budibase/types" import { SetOption } from "cookies" const jwt = require("jsonwebtoken") @@ -217,3 +223,7 @@ export async function getBuildersCount() { export function timeout(timeMs: number) { return new Promise(resolve => setTimeout(resolve, timeMs)) } + +export function isAudited(event: Event) { + return !!AuditedEventFriendlyName[event] +} diff --git a/packages/backend-core/tests/utilities/index.ts b/packages/backend-core/tests/utilities/index.ts index efe014908b..1c73216d76 100644 --- a/packages/backend-core/tests/utilities/index.ts +++ b/packages/backend-core/tests/utilities/index.ts @@ -4,4 +4,6 @@ export { generator } from "./structures" export * as testEnv from "./testEnv" export * as testContainerUtils from "./testContainerUtils" +export * from "./jestUtils" + export { default as DBTestConfiguration } from "./DBTestConfiguration" diff --git a/packages/backend-core/tests/utilities/jestUtils.ts b/packages/backend-core/tests/utilities/jestUtils.ts new file mode 100644 index 0000000000..d84eac548c --- /dev/null +++ b/packages/backend-core/tests/utilities/jestUtils.ts @@ -0,0 +1,9 @@ +export function expectFunctionWasCalledTimesWith( + jestFunction: any, + times: number, + argument: any +) { + expect( + jestFunction.mock.calls.filter((call: any) => call[0] === argument).length + ).toBe(times) +} diff --git a/packages/backend-core/tests/utilities/mocks/licenses.ts b/packages/backend-core/tests/utilities/mocks/licenses.ts index e374612f5f..2ca41616e4 100644 --- a/packages/backend-core/tests/utilities/mocks/licenses.ts +++ b/packages/backend-core/tests/utilities/mocks/licenses.ts @@ -70,6 +70,10 @@ export const useBackups = () => { return useFeature(Feature.APP_BACKUPS) } +export const useEnforceableSSO = () => { + return useFeature(Feature.ENFORCEABLE_SSO) +} + export const useGroups = () => { return useFeature(Feature.USER_GROUPS) } @@ -78,6 +82,10 @@ export const useEnvironmentVariables = () => { return useFeature(Feature.ENVIRONMENT_VARIABLES) } +export const useAuditLogs = () => { + return useFeature(Feature.AUDIT_LOGS) +} + // QUOTAS export const setAutomationLogsQuota = (value: number) => { diff --git a/packages/backend-core/tests/utilities/structures/accounts.ts b/packages/backend-core/tests/utilities/structures/accounts.ts index 6bfeedf196..62a9ac19d1 100644 --- a/packages/backend-core/tests/utilities/structures/accounts.ts +++ b/packages/backend-core/tests/utilities/structures/accounts.ts @@ -8,6 +8,8 @@ import { CloudAccount, Hosting, SSOAccount, + CreateAccount, + CreatePassswordAccount, } from "@budibase/types" import _ from "lodash" @@ -29,6 +31,10 @@ export const account = (): Account => { } } +export function selfHostAccount() { + return account() +} + export const cloudAccount = (): CloudAccount => { return { ...account(), @@ -47,9 +53,9 @@ function provider(): AccountSSOProvider { return _.sample(Object.values(AccountSSOProvider)) as AccountSSOProvider } -export function ssoAccount(): SSOAccount { +export function ssoAccount(account: Account = cloudAccount()): SSOAccount { return { - ...cloudAccount(), + ...account, authType: AuthType.SSO, oauth2: { accessToken: generator.string(), @@ -61,3 +67,49 @@ export function ssoAccount(): SSOAccount { thirdPartyProfile: {}, } } + +export const cloudCreateAccount: CreatePassswordAccount = { + email: "cloud@budibase.com", + tenantId: "cloud", + hosting: Hosting.CLOUD, + authType: AuthType.PASSWORD, + password: "Password123!", + tenantName: "cloud", + name: "Budi Armstrong", + size: "10+", + profession: "Software Engineer", +} + +export const cloudSSOCreateAccount: CreateAccount = { + email: "cloud-sso@budibase.com", + tenantId: "cloud-sso", + hosting: Hosting.CLOUD, + authType: AuthType.SSO, + tenantName: "cloudsso", + name: "Budi Armstrong", + size: "10+", + profession: "Software Engineer", +} + +export const selfCreateAccount: CreatePassswordAccount = { + email: "self@budibase.com", + tenantId: "self", + hosting: Hosting.SELF, + authType: AuthType.PASSWORD, + password: "Password123!", + tenantName: "self", + name: "Budi Armstrong", + size: "10+", + profession: "Software Engineer", +} + +export const selfSSOCreateAccount: CreateAccount = { + email: "self-sso@budibase.com", + tenantId: "self-sso", + hosting: Hosting.SELF, + authType: AuthType.SSO, + tenantName: "selfsso", + name: "Budi Armstrong", + size: "10+", + profession: "Software Engineer", +} diff --git a/packages/backend-core/tests/utilities/structures/db.ts b/packages/backend-core/tests/utilities/structures/db.ts index e25b707cb9..f4a677e777 100644 --- a/packages/backend-core/tests/utilities/structures/db.ts +++ b/packages/backend-core/tests/utilities/structures/db.ts @@ -1,5 +1,12 @@ +import { structures } from ".." import { newid } from "../../../src/newid" export function id() { return `db_${newid()}` } + +export function rev() { + return `${structures.generator.character({ + numeric: true, + })}-${structures.uuid().replace(/-/, "")}` +} diff --git a/packages/backend-core/tests/utilities/structures/generator.ts b/packages/backend-core/tests/utilities/structures/generator.ts new file mode 100644 index 0000000000..51567b152e --- /dev/null +++ b/packages/backend-core/tests/utilities/structures/generator.ts @@ -0,0 +1,2 @@ +import Chance from "chance" +export const generator = new Chance() diff --git a/packages/backend-core/tests/utilities/structures/index.ts b/packages/backend-core/tests/utilities/structures/index.ts index d0073ba851..ca77f476d0 100644 --- a/packages/backend-core/tests/utilities/structures/index.ts +++ b/packages/backend-core/tests/utilities/structures/index.ts @@ -1,8 +1,4 @@ export * from "./common" - -import Chance from "chance" -export const generator = new Chance() - export * as accounts from "./accounts" export * as apps from "./apps" export * as db from "./db" @@ -12,3 +8,4 @@ export * as plugins from "./plugins" export * as sso from "./sso" export * as tenant from "./tenants" export * as users from "./users" +export { generator } from "./generator" diff --git a/packages/backend-core/tests/utilities/structures/shared.ts b/packages/backend-core/tests/utilities/structures/shared.ts new file mode 100644 index 0000000000..de0e19486c --- /dev/null +++ b/packages/backend-core/tests/utilities/structures/shared.ts @@ -0,0 +1,19 @@ +import { User } from "@budibase/types" +import { generator } from "./generator" +import { uuid } from "./common" + +export const newEmail = () => { + return `${uuid()}@test.com` +} + +export const user = (userProps?: any): User => { + return { + email: newEmail(), + password: "test", + roles: { app_test: "admin" }, + firstName: generator.first(), + lastName: generator.last(), + pictureUrl: "http://test.com", + ...userProps, + } +} diff --git a/packages/backend-core/tests/utilities/structures/sso.ts b/packages/backend-core/tests/utilities/structures/sso.ts index ad5e8e87ef..7413fa3c09 100644 --- a/packages/backend-core/tests/utilities/structures/sso.ts +++ b/packages/backend-core/tests/utilities/structures/sso.ts @@ -1,6 +1,7 @@ import { GoogleInnerConfig, JwtClaims, + OAuth2, OIDCInnerConfig, OIDCWellKnownConfig, SSOAuthDetails, @@ -8,8 +9,40 @@ import { SSOProviderType, User, } from "@budibase/types" -import { uuid, generator, users, email } from "./index" +import { generator } from "./generator" +import { uuid, email } from "./common" +import * as shared from "./shared" import _ from "lodash" +import { user } from "./shared" + +export function OAuth(): OAuth2 { + return { + refreshToken: generator.string(), + accessToken: generator.string(), + } +} + +export function authDetails(userDoc?: User): SSOAuthDetails { + if (!userDoc) { + userDoc = user() + } + + const userId = userDoc._id || uuid() + const provider = generator.string() + + const profile = ssoProfile(userDoc) + profile.provider = provider + profile.id = userId + + return { + email: userDoc.email, + oauth2: OAuth(), + profile, + provider, + providerType: providerType(), + userId, + } +} export function providerType(): SSOProviderType { return _.sample(Object.values(SSOProviderType)) as SSOProviderType @@ -17,7 +50,7 @@ export function providerType(): SSOProviderType { export function ssoProfile(user?: User): SSOProfile { if (!user) { - user = users.user() + user = shared.user() } return { id: user._id!, @@ -33,31 +66,6 @@ export function ssoProfile(user?: User): SSOProfile { } } -export function authDetails(user?: User): SSOAuthDetails { - if (!user) { - user = users.user() - } - - const userId = user._id || uuid() - const provider = generator.string() - - const profile = ssoProfile(user) - profile.provider = provider - profile.id = userId - - return { - email: user.email, - oauth2: { - refreshToken: generator.string(), - accessToken: generator.string(), - }, - profile, - provider, - providerType: providerType(), - userId, - } -} - // OIDC export function oidcConfig(): OIDCInnerConfig { @@ -69,6 +77,7 @@ export function oidcConfig(): OIDCInnerConfig { configUrl: "http://someconfigurl", clientID: generator.string(), clientSecret: generator.string(), + scopes: [], } } diff --git a/packages/backend-core/tests/utilities/structures/users.ts b/packages/backend-core/tests/utilities/structures/users.ts index 332c27ca12..7a6b4f0d80 100644 --- a/packages/backend-core/tests/utilities/structures/users.ts +++ b/packages/backend-core/tests/utilities/structures/users.ts @@ -1,29 +1,13 @@ -import { generator } from "../" import { AdminUser, BuilderUser, SSOAuthDetails, SSOUser, - User, } from "@budibase/types" -import { v4 as uuid } from "uuid" -import * as sso from "./sso" +import { user } from "./shared" +import { authDetails } from "./sso" -export const newEmail = () => { - return `${uuid()}@test.com` -} - -export const user = (userProps?: any): User => { - return { - email: newEmail(), - password: "test", - roles: { app_test: "admin" }, - firstName: generator.first(), - lastName: generator.last(), - pictureUrl: "http://test.com", - ...userProps, - } -} +export { user, newEmail } from "./shared" export const adminUser = (userProps?: any): AdminUser => { return { @@ -53,7 +37,7 @@ export function ssoUser( delete base.password if (!opts.details) { - opts.details = sso.authDetails(base) + opts.details = authDetails(base) } return { diff --git a/packages/backend-core/tests/utilities/testContainerUtils.ts b/packages/backend-core/tests/utilities/testContainerUtils.ts index 11c5fca806..f6c702f7ef 100644 --- a/packages/backend-core/tests/utilities/testContainerUtils.ts +++ b/packages/backend-core/tests/utilities/testContainerUtils.ts @@ -1,3 +1,31 @@ +import { execSync } from "child_process" + +let dockerPsResult: string | undefined + +function formatDockerPsResult(serverName: string, port: number) { + const lines = dockerPsResult?.split("\n") + let first = true + if (!lines) { + return null + } + for (let line of lines) { + if (first) { + first = false + continue + } + let toLookFor = serverName.split("-service")[0] + if (!line.includes(toLookFor)) { + continue + } + const regex = new RegExp(`0.0.0.0:([0-9]*)->${port}`, "g") + const found = line.match(regex) + if (found) { + return found[0].split(":")[1].split("->")[0] + } + } + return null +} + function getTestContainerSettings( serverName: string, key: string @@ -14,10 +42,22 @@ function getTestContainerSettings( } function getContainerInfo(containerName: string, port: number) { - const assignedPort = getTestContainerSettings( + let assignedPort = getTestContainerSettings( containerName.toUpperCase(), `PORT_${port}` ) + if (!dockerPsResult) { + try { + const outputBuffer = execSync("docker ps") + dockerPsResult = outputBuffer.toString("utf8") + } catch (err) { + //no-op + } + } + const possiblePort = formatDockerPsResult(containerName, port) + if (possiblePort) { + assignedPort = possiblePort + } const host = getTestContainerSettings(containerName.toUpperCase(), "IP") return { port: assignedPort, @@ -39,12 +79,15 @@ function getRedisConfig() { } export function setupEnv(...envs: any[]) { + const couch = getCouchConfig(), + minio = getCouchConfig(), + redis = getRedisConfig() const configs = [ - { key: "COUCH_DB_PORT", value: getCouchConfig().port }, - { key: "COUCH_DB_URL", value: getCouchConfig().url }, - { key: "MINIO_PORT", value: getMinioConfig().port }, - { key: "MINIO_URL", value: getMinioConfig().url }, - { key: "REDIS_URL", value: getRedisConfig().url }, + { key: "COUCH_DB_PORT", value: couch.port }, + { key: "COUCH_DB_URL", value: couch.url }, + { key: "MINIO_PORT", value: minio.port }, + { key: "MINIO_URL", value: minio.url }, + { key: "REDIS_URL", value: redis.url }, ] for (const config of configs.filter(x => !!x.value)) { diff --git a/packages/backend-core/yarn.lock b/packages/backend-core/yarn.lock index 5f8edb3df6..91c5c6c9f3 100644 --- a/packages/backend-core/yarn.lock +++ b/packages/backend-core/yarn.lock @@ -475,10 +475,10 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/nano@10.1.1": - version "10.1.1" - resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.1.tgz#36ccda4d9bb64b5ee14dd2b27a295b40739b1038" - integrity sha512-kbMIzMkjVtl+xI0UPwVU0/pn8/ccxTyfzwBz6Z+ZiN2oUSb0fJCe0qwA6o8dxwSa8nZu4MbGAeMJl3CJndmWtA== +"@budibase/nano@10.1.2": + version "10.1.2" + resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.2.tgz#10fae5a1ab39be6a81261f40e7b7ec6d21cbdd4a" + integrity sha512-1w+YN2n/M5aZ9hBKCP4NEjdQbT8BfCLRizkdvm0Je665eEHw3aE1hvo8mon9Ro9QuDdxj1DfDMMFnym6/QUwpQ== dependencies: "@types/tough-cookie" "^4.0.2" axios "^1.1.3" diff --git a/packages/bbui/package.json b/packages/bbui/package.json index b697a532ad..05e826eb81 100644 --- a/packages/bbui/package.json +++ b/packages/bbui/package.json @@ -1,7 +1,7 @@ { "name": "@budibase/bbui", "description": "A UI solution used in the different Budibase projects.", - "version": "2.3.18-alpha.12", + "version": "2.4.12-alpha.0", "license": "MPL-2.0", "svelte": "src/index.js", "module": "dist/bbui.es.js", @@ -38,7 +38,8 @@ ], "dependencies": { "@adobe/spectrum-css-workflow-icons": "1.2.1", - "@budibase/string-templates": "2.3.18-alpha.12", + "@budibase/shared-core": "2.4.12-alpha.0", + "@budibase/string-templates": "2.4.12-alpha.0", "@spectrum-css/accordion": "3.0.24", "@spectrum-css/actionbutton": "1.0.1", "@spectrum-css/actiongroup": "1.0.1", diff --git a/packages/bbui/src/ActionButton/ActionButton.svelte b/packages/bbui/src/ActionButton/ActionButton.svelte index 663128160f..60c8bec80b 100644 --- a/packages/bbui/src/ActionButton/ActionButton.svelte +++ b/packages/bbui/src/ActionButton/ActionButton.svelte @@ -1,6 +1,9 @@ - + + diff --git a/packages/bbui/src/Actions/position_dropdown.js b/packages/bbui/src/Actions/position_dropdown.js index abc7188985..ecbb5747c4 100644 --- a/packages/bbui/src/Actions/position_dropdown.js +++ b/packages/bbui/src/Actions/position_dropdown.js @@ -31,6 +31,7 @@ export default function positionDropdown(element, opts) { styles.top = anchorBounds.top } else if (window.innerHeight - anchorBounds.bottom < 100) { styles.top = anchorBounds.top - elementBounds.height - offset + styles.maxHeight = 240 } else { styles.top = anchorBounds.bottom + offset styles.maxHeight = window.innerHeight - anchorBounds.bottom - 20 diff --git a/packages/bbui/src/Form/Core/Multiselect.svelte b/packages/bbui/src/Form/Core/Multiselect.svelte index d6c4dc23ac..1d04c210f4 100644 --- a/packages/bbui/src/Form/Core/Multiselect.svelte +++ b/packages/bbui/src/Form/Core/Multiselect.svelte @@ -14,6 +14,9 @@ export let autocomplete = false export let sort = false export let autoWidth = false + export let fetchTerm = null + export let useFetch = false + export let customPopoverHeight const dispatch = createEventDispatcher() @@ -83,10 +86,13 @@ {options} isPlaceholder={!value?.length} {autocomplete} + bind:fetchTerm + {useFetch} {isOptionSelected} {getOptionLabel} {getOptionValue} onSelectOption={toggleOption} {sort} {autoWidth} + {customPopoverHeight} /> diff --git a/packages/bbui/src/Form/Core/Picker.svelte b/packages/bbui/src/Form/Core/Picker.svelte index 32cfcf3310..bd575600b1 100644 --- a/packages/bbui/src/Form/Core/Picker.svelte +++ b/packages/bbui/src/Form/Core/Picker.svelte @@ -24,6 +24,7 @@ export let getOptionLabel = option => option export let getOptionValue = option => option export let getOptionIcon = () => null + export let useOptionIconImage = false export let getOptionColour = () => null export let open = false export let readonly = false @@ -31,6 +32,11 @@ export let autoWidth = false export let autocomplete = false export let sort = false + export let fetchTerm = null + export let useFetch = false + export let customPopoverHeight + export let align = "left" + export let footer = null const dispatch = createEventDispatcher() @@ -71,7 +77,7 @@ } const getFilteredOptions = (options, term, getLabel) => { - if (autocomplete && term) { + if (autocomplete && term && !fetchTerm) { const lowerCaseTerm = term.toLowerCase() return options.filter(option => { return `${getLabel(option)}`.toLowerCase().includes(lowerCaseTerm) @@ -130,12 +136,13 @@ (open = false)} useAnchorWidth={!autoWidth} maxWidth={autoWidth ? 400 : null} + customHeight={customPopoverHeight} >
{#if autocomplete} (searchTerm = event.detail)} + value={useFetch ? fetchTerm : searchTerm} + on:change={event => + useFetch ? (fetchTerm = event.detail) : (searchTerm = event.detail)} {disabled} placeholder="Search" /> @@ -183,7 +191,16 @@ > {#if getOptionIcon(option, idx)} - + {#if useOptionIconImage} + icon + {:else} + + {/if} {/if} {#if getOptionColour(option, idx)} @@ -205,6 +222,12 @@ {/each} {/if} + + {#if footer} + + {/if}
@@ -247,7 +270,7 @@ } .popover-content.auto-width .spectrum-Menu-itemLabel { white-space: nowrap; - overflow: hidden; + overflow: none; text-overflow: ellipsis; } .popover-content:not(.auto-width) .spectrum-Menu-itemLabel { @@ -281,4 +304,11 @@ .popover-content :global(.spectrum-Search .spectrum-Textfield-icon) { top: 9px; } + + .footer { + padding: 4px 12px 12px 12px; + font-style: italic; + max-width: 170px; + font-size: 12px; + } diff --git a/packages/bbui/src/Form/Core/Select.svelte b/packages/bbui/src/Form/Core/Select.svelte index 721083e3a6..af45c1d9ff 100644 --- a/packages/bbui/src/Form/Core/Select.svelte +++ b/packages/bbui/src/Form/Core/Select.svelte @@ -11,6 +11,7 @@ export let getOptionLabel = option => option export let getOptionValue = option => option export let getOptionIcon = () => null + export let useOptionIconImage = false export let getOptionColour = () => null export let isOptionEnabled export let readonly = false @@ -18,6 +19,8 @@ export let autoWidth = false export let autocomplete = false export let sort = false + export let align + export let footer = null const dispatch = createEventDispatcher() @@ -41,7 +44,7 @@ const getFieldText = (value, options, placeholder) => { // Always use placeholder if no value if (value == null || value === "") { - return placeholder || "Choose an option" + return placeholder !== false ? "Choose an option" : "" } return getFieldAttribute(getOptionLabel, value, options) @@ -66,15 +69,18 @@ {fieldColour} {options} {autoWidth} + {align} + {footer} {getOptionLabel} {getOptionValue} {getOptionIcon} + {useOptionIconImage} {getOptionColour} {isOptionEnabled} {autocomplete} {sort} isPlaceholder={value == null || value === ""} - placeholderOption={placeholder} + placeholderOption={placeholder === false ? null : placeholder} isOptionSelected={option => option === value} onSelectOption={selectOption} /> diff --git a/packages/bbui/src/Form/Multiselect.svelte b/packages/bbui/src/Form/Multiselect.svelte index 7bcf22aa06..185eb7069b 100644 --- a/packages/bbui/src/Form/Multiselect.svelte +++ b/packages/bbui/src/Form/Multiselect.svelte @@ -15,6 +15,11 @@ export let getOptionValue = option => option export let sort = false export let autoWidth = false + export let autocomplete = false + export let fetchTerm = null + export let useFetch = false + export let customPopoverHeight + const dispatch = createEventDispatcher() const onChange = e => { value = e.detail @@ -34,6 +39,10 @@ {getOptionLabel} {getOptionValue} {autoWidth} + {autocomplete} + {customPopoverHeight} + bind:fetchTerm + {useFetch} on:change={onChange} on:click /> diff --git a/packages/bbui/src/Form/Select.svelte b/packages/bbui/src/Form/Select.svelte index 69126e648d..e87496652d 100644 --- a/packages/bbui/src/Form/Select.svelte +++ b/packages/bbui/src/Form/Select.svelte @@ -14,12 +14,17 @@ export let getOptionLabel = option => extractProperty(option, "label") export let getOptionValue = option => extractProperty(option, "value") export let getOptionIcon = option => option?.icon + export let useOptionIconImage = false export let getOptionColour = option => option?.colour export let isOptionEnabled export let quiet = false export let autoWidth = false export let sort = false export let tooltip = "" + export let autocomplete = false + export let customPopoverHeight + export let align + export let footer = null const dispatch = createEventDispatcher() const onChange = e => { @@ -46,11 +51,16 @@ {placeholder} {autoWidth} {sort} + {align} + {footer} {getOptionLabel} {getOptionValue} {getOptionIcon} {getOptionColour} + {useOptionIconImage} {isOptionEnabled} + {autocomplete} + {customPopoverHeight} on:change={onChange} on:click /> diff --git a/packages/bbui/src/Modal/Modal.svelte b/packages/bbui/src/Modal/Modal.svelte index 45081356c1..f56ef1187f 100644 --- a/packages/bbui/src/Modal/Modal.svelte +++ b/packages/bbui/src/Modal/Modal.svelte @@ -29,6 +29,14 @@ visible = false } + export function toggle() { + if (visible) { + hide() + } else { + show() + } + } + export function cancel() { if (!visible) { return @@ -61,7 +69,7 @@ } } - setContext(Context.Modal, { show, hide, cancel }) + setContext(Context.Modal, { show, hide, toggle, cancel }) onMount(() => { document.addEventListener("keydown", handleKey) diff --git a/packages/bbui/src/Popover/Popover.svelte b/packages/bbui/src/Popover/Popover.svelte index 8f6ef06591..081e3a34df 100644 --- a/packages/bbui/src/Popover/Popover.svelte +++ b/packages/bbui/src/Popover/Popover.svelte @@ -18,6 +18,7 @@ export let useAnchorWidth = false export let dismissible = true export let offset = 5 + export let customHeight $: target = portalTarget || getContext(Context.PopoverRoot) || ".spectrum" @@ -74,6 +75,7 @@ on:keydown={handleEscape} class="spectrum-Popover is-open" role="presentation" + style="height: {customHeight}" transition:fly|local={{ y: -20, duration: 200 }} > diff --git a/packages/bbui/src/helpers.js b/packages/bbui/src/helpers.js index f2246fbb49..32030322d9 100644 --- a/packages/bbui/src/helpers.js +++ b/packages/bbui/src/helpers.js @@ -1,3 +1,6 @@ +import { helpers } from "@budibase/shared-core" +export const deepGet = helpers.deepGet + /** * Generates a DOM safe UUID. * Starting with a letter is important to make it DOM safe. @@ -41,30 +44,6 @@ export const hashString = string => { return hash.toString() } -/** - * Gets a key within an object. The key supports dot syntax for retrieving deep - * fields - e.g. "a.b.c". - * Exact matches of keys with dots in them take precedence over nested keys of - * the same path - e.g. getting "a.b" from { "a.b": "foo", a: { b: "bar" } } - * will return "foo" over "bar". - * @param obj the object - * @param key the key - * @return {*|null} the value or null if a value was not found for this key - */ -export const deepGet = (obj, key) => { - if (!obj || !key) { - return null - } - if (Object.prototype.hasOwnProperty.call(obj, key)) { - return obj[key] - } - const split = key.split(".") - for (let i = 0; i < split.length; i++) { - obj = obj?.[split[i]] - } - return obj -} - /** * Sets a key within an object. The key supports dot syntax for retrieving deep * fields - e.g. "a.b.c". diff --git a/packages/builder/package.json b/packages/builder/package.json index b3afb4de2a..7d8e2a8ae9 100644 --- a/packages/builder/package.json +++ b/packages/builder/package.json @@ -1,6 +1,6 @@ { "name": "@budibase/builder", - "version": "2.3.18-alpha.12", + "version": "2.4.12-alpha.0", "license": "GPL-3.0", "private": true, "scripts": { @@ -58,10 +58,11 @@ } }, "dependencies": { - "@budibase/bbui": "2.3.18-alpha.12", - "@budibase/client": "2.3.18-alpha.12", - "@budibase/frontend-core": "2.3.18-alpha.12", - "@budibase/string-templates": "2.3.18-alpha.12", + "@budibase/bbui": "2.4.12-alpha.0", + "@budibase/client": "2.4.12-alpha.0", + "@budibase/frontend-core": "2.4.12-alpha.0", + "@budibase/shared-core": "2.4.12-alpha.0", + "@budibase/string-templates": "2.4.12-alpha.0", "@fortawesome/fontawesome-svg-core": "^6.2.1", "@fortawesome/free-brands-svg-icons": "^6.2.1", "@fortawesome/free-solid-svg-icons": "^6.2.1", diff --git a/packages/builder/src/builderStore/store/frontend.js b/packages/builder/src/builderStore/store/frontend.js index d58a2d5b9e..51f88add27 100644 --- a/packages/builder/src/builderStore/store/frontend.js +++ b/packages/builder/src/builderStore/store/frontend.js @@ -72,6 +72,8 @@ const INITIAL_FRONTEND_STATE = { // onboarding onboarding: false, tourNodes: null, + + builderSidePanel: false, } export const getFrontendStore = () => { diff --git a/packages/builder/src/components/automation/AutomationBuilder/TestDisplay.svelte b/packages/builder/src/components/automation/AutomationBuilder/TestDisplay.svelte index d74eab3622..043844b6d2 100644 --- a/packages/builder/src/components/automation/AutomationBuilder/TestDisplay.svelte +++ b/packages/builder/src/components/automation/AutomationBuilder/TestDisplay.svelte @@ -73,14 +73,14 @@