Merge branch 'develop' into license-key-mgmt

This commit is contained in:
Mateus Badan de Pieri 2023-03-10 12:29:10 +00:00
commit d8ce0713aa
350 changed files with 12028 additions and 3705 deletions

View File

@ -2,10 +2,11 @@
name: Bug report name: Bug report
about: Create a report to help us improve about: Create a report to help us improve
title: '' title: ''
labels: bug labels: bug, linear
assignees: '' assignees: ''
--- ---
**Checklist** **Checklist**
- [ ] I have searched budibase discussions and github issues to check if my issue already exists - [ ] I have searched budibase discussions and github issues to check if my issue already exists

View File

@ -10,7 +10,7 @@ on:
pull_request: pull_request:
branches: branches:
- master - master
- develop - develop
workflow_dispatch: workflow_dispatch:
env: env:
@ -64,6 +64,20 @@ jobs:
name: codecov-umbrella name: codecov-umbrella
verbose: true verbose: true
test-pro:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- name: Install Pro
run: yarn install:pro $BRANCH $BASE_BRANCH
- run: yarn
- run: yarn bootstrap
- run: yarn test:pro
integration-test: integration-test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
services: services:

View File

@ -22,7 +22,7 @@ jobs:
- name: Pull values.yaml from budibase-infra - name: Pull values.yaml from budibase-infra
run: | run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \ curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \
-H 'Accept: application/vnd.github.v3.raw' \ -H 'Accept: application/vnd.github.v3.raw' \
-o values.production.yaml \ -o values.production.yaml \
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/values.yaml -L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/values.yaml

View File

@ -1,18 +1,16 @@
name: Budibase Deploy Preprod name: "deploy-preprod"
on: on:
workflow_dispatch: workflow_dispatch:
workflow_call:
env:
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
jobs: jobs:
release: deploy-to-legacy-preprod-env:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: 'Get Previous tag'
id: previoustag
uses: "WyriHaximus/github-action-get-previous-tag@v1"
- name: Configure AWS Credentials - name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1 uses: aws-actions/configure-aws-credentials@v1
@ -21,23 +19,16 @@ jobs:
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Pull values.yaml from budibase-infra - name: Pull values.yaml from budibase-infra
run: | run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \ curl -H "Authorization: token ${{ secrets.GH_ACCESS_TOKEN }}" \
-H 'Accept: application/vnd.github.v3.raw' \ -H 'Accept: application/vnd.github.v3.raw' \
-o values.preprod.yaml \ -o values.preprod.yaml \
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml -L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml
wc -l values.preprod.yaml wc -l values.preprod.yaml
- name: Deploy to Preprod Environment - name: Deploy to Preprod Environment
uses: glopezep/helm@v1.7.1 uses: budibase/helm@v1.8.0
with: with:
release: budibase-preprod release: budibase-preprod
namespace: budibase namespace: budibase
@ -46,7 +37,7 @@ jobs:
helm: helm3 helm: helm3
values: | values: |
globals: globals:
appVersion: v${{ env.RELEASE_VERSION }} appVersion: ${{ steps.previoustag.outputs.tag }}
ingress: ingress:
enabled: true enabled: true
nginx: true nginx: true
@ -61,5 +52,5 @@ jobs:
uses: tsickert/discord-webhook@v4.0.0 uses: tsickert/discord-webhook@v4.0.0
with: with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }} webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Preprod Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Pre-prod." content: "Preprod Deployment Complete: ${{ steps.previoustag.outputs.tag }} deployed to Budibase Pre-prod."
embed-title: ${{ env.RELEASE_VERSION }} embed-title: ${{ steps.previoustag.outputs.tag }}

View File

@ -1,88 +0,0 @@
name: Budibase Deploy Release
on:
workflow_dispatch:
jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
- name: Fail if branch is not develop
if: github.ref != 'refs/heads/develop'
run: |
echo "Ref is not develop, you must run this job from develop."
exit 1
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Pull values.yaml from budibase-infra
run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
-H 'Accept: application/vnd.github.v3.raw' \
-o values.release.yaml \
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-release/values.yaml
wc -l values.release.yaml
- name: Deploy to Release Environment
uses: glopezep/helm@v1.7.1
with:
release: budibase-release
namespace: budibase
chart: charts/budibase
token: ${{ github.token }}
helm: helm3
values: |
globals:
appVersion: develop
ingress:
enabled: true
nginx: true
value-files: >-
[
"values.release.yaml"
]
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Release Env Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Release Env."
embed-title: ${{ env.RELEASE_VERSION }}

View File

@ -117,4 +117,4 @@ jobs:
with: with:
repository: budibase/budibase-deploys repository: budibase/budibase-deploys
event: budicloud-qa-deploy event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }} github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -35,9 +35,8 @@ env:
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }} PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
jobs: jobs:
release: release-images:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Fail if branch is not master - name: Fail if branch is not master
if: github.ref != 'refs/heads/master' if: github.ref != 'refs/heads/master'
@ -57,14 +56,6 @@ jobs:
- run: yarn lint - run: yarn lint
- run: yarn build - run: yarn build
- run: yarn build:sdk - run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
- name: Publish budibase packages to NPM - name: Publish budibase packages to NPM
env: env:
@ -90,46 +81,63 @@ jobs:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }} BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
- name: Pull values.yaml from budibase-infra release-helm-chart:
run: | needs: [release-images]
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \ runs-on: ubuntu-latest
-H 'Accept: application/vnd.github.v3.raw' \ steps:
-o values.preprod.yaml \ - uses: actions/checkout@v2
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-preprod/values.yaml - name: Setup Helm
wc -l values.preprod.yaml uses: azure/setup-helm@v1
id: helm-install
- name: Deploy to Preprod Environment - name: 'Get Previous tag'
uses: glopezep/helm@v1.7.1 id: previoustag
with: uses: "WyriHaximus/github-action-get-previous-tag@v1"
release: budibase-preprod
namespace: budibase # due to helm repo index issue: https://github.com/helm/helm/issues/7363
chart: charts/budibase # we need to create new package in a different dir, merge the index and move the package back
token: ${{ github.token }} - name: Build and release helm chart
helm: helm3 run: |
values: | git config user.name "Budibase Helm Bot"
globals: git config user.email "<>"
appVersion: ${{ steps.previoustag.outputs.tag }} git reset --hard
ingress: git pull
enabled: true mkdir sync
nginx: true echo "Packaging chart to sync dir"
value-files: >- helm package charts/budibase --version 0.0.0-master --app-version "$RELEASE_VERSION" --destination sync
[ echo "Packaging successful"
"values.preprod.yaml" git checkout gh-pages
] echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push
env: env:
KUBECONFIG_FILE: '${{ secrets.PREPROD_KUBECONFIG }}' RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }}
- name: Discord Webhook Action deploy-to-legacy-preprod-env:
uses: tsickert/discord-webhook@v4.0.0 needs: [release-images]
uses: ./.github/workflows/deploy-preprod.yml
secrets: inherit
# Trigger deploy to new EKS preprod environment
trigger-deploy-to-preprod-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: 'Get Previous tag'
id: previoustag
uses: "WyriHaximus/github-action-get-previous-tag@v1"
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ steps.previoustag.outputs.tag }}
with: with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }} repository: budibase/budibase-deploys
content: "Preprod Deployment Complete: ${{ steps.previoustag.outputs.tag }} deployed to Budibase Pre-prod." event: budicloud-preprod-deploy
embed-title: ${{ steps.previoustag.outputs.tag }} github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -16,9 +16,13 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
node-version: 14.x
fetch_depth: 0 fetch_depth: 0
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- name: Get the latest budibase release version - name: Get the latest budibase release version
id: version id: version
run: | run: |

View File

@ -1,31 +0,0 @@
name: Budibase Nightly Tests
on:
workflow_dispatch:
schedule:
- cron: "0 5 * * *" # every day at 5AM
jobs:
nightly:
runs-on: [self-hosted, qa]
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 14.x
- name: QA Core Integration Tests
run: |
cd qa-core
yarn
yarn api:test:ci
env:
BUDIBASE_HOST: budicloud.qa.budibase.net
BUDIBASE_ACCOUNTS_URL: https://account-portal.budicloud.qa.budibase.net
- name: Cypress Discord Notify
run: yarn test:notify
env:
WEBHOOK_URL: ${{ secrets.BUDI_QA_WEBHOOK }}
GITHUB_RUN_URL: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID

View File

@ -51,6 +51,14 @@ spec:
value: {{ tpl .Values.services.proxy.upstreams.minio . | quote }} value: {{ tpl .Values.services.proxy.upstreams.minio . | quote }}
- name: COUCHDB_UPSTREAM_URL - name: COUCHDB_UPSTREAM_URL
value: {{ .Values.services.couchdb.url | default (tpl .Values.services.proxy.upstreams.couchdb .) | quote }} value: {{ .Values.services.couchdb.url | default (tpl .Values.services.proxy.upstreams.couchdb .) | quote }}
{{ if .Values.services.proxy.proxyRateLimitWebhooksPerSecond }}
- name: PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND
value: {{ .Values.services.proxy.proxyRateLimitWebhooksPerSecond | quote }}
{{ end }}
{{ if .Values.services.proxy.proxyRateLimitApiPerSecond }}
- name: PROXY_RATE_LIMIT_API_PER_SECOND
value: {{ .Values.services.proxy.proxyRateLimitApiPerSecond | quote }}
{{ end }}
- name: RESOLVER - name: RESOLVER
{{ if .Values.services.proxy.resolver }} {{ if .Values.services.proxy.resolver }}
value: {{ .Values.services.proxy.resolver }} value: {{ .Values.services.proxy.resolver }}

View File

@ -245,7 +245,7 @@ couchdb:
## The CouchDB image ## The CouchDB image
image: image:
repository: couchdb repository: couchdb
tag: 3.2.1 tag: 3.1.1
pullPolicy: IfNotPresent pullPolicy: IfNotPresent
## Experimental integration with Lucene-powered fulltext search ## Experimental integration with Lucene-powered fulltext search

View File

@ -52,4 +52,14 @@ So this command will actually run the application in dev mode. It creates .env f
The dev version will be available on port 10000 i.e. The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin http://127.0.0.1:10000/builder/admin
### File descriptor issues with Vite and Chrome in Linux
If your dev environment stalls forever, with some network requests stuck in flight, it's likely that Chrome is trying to open more file descriptors than your system allows.
To fix this, apply the following tweaks.
Debian based distros:
Add `* - nofile 65536` to `/etc/security/limits.conf`.
Arch:
Add `DefaultLimitNOFILE=65536` to `/etc/systemd/system.conf`.

View File

@ -6,8 +6,7 @@ services:
minio-service: minio-service:
container_name: budi-minio-dev container_name: budi-minio-dev
restart: on-failure restart: on-failure
# Last version that supports the "fs" backend image: minio/minio
image: minio/minio:RELEASE.2022-10-24T18-35-07Z
volumes: volumes:
- minio_data:/data - minio_data:/data
ports: ports:
@ -69,4 +68,4 @@ volumes:
minio_data: minio_data:
driver: local driver: local
redis_data: redis_data:
driver: local driver: local

View File

@ -8,8 +8,8 @@ services:
# Last version that supports the "fs" backend # Last version that supports the "fs" backend
image: minio/minio:RELEASE.2022-10-24T18-35-07Z image: minio/minio:RELEASE.2022-10-24T18-35-07Z
ports: ports:
- 9000 - "9000"
- 9001 - "9001"
environment: environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY} MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY} MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
@ -28,9 +28,9 @@ services:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD} - COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER} - COUCHDB_USER=${COUCH_DB_USER}
ports: ports:
- 5984 - "5984"
- 4369 - "4369"
- 9100 - "9100"
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5984/_up"] test: ["CMD", "curl", "-f", "http://localhost:5984/_up"]
interval: 30s interval: 30s
@ -42,6 +42,6 @@ services:
image: redis image: redis
command: redis-server --requirepass ${REDIS_PASSWORD} command: redis-server --requirepass ${REDIS_PASSWORD}
ports: ports:
- 6379 - "6379"
healthcheck: healthcheck:
test: ["CMD", "redis-cli", "ping"] test: ["CMD", "redis-cli", "ping"]

View File

@ -55,12 +55,12 @@ http {
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com"; set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
set $csp_object "object-src 'none'"; set $csp_object "object-src 'none'";
set $csp_base_uri "base-uri 'self'"; set $csp_base_uri "base-uri 'self'";
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.*.amazonaws.com https://s3.*.amazonaws.com https://api.github.com"; set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com"; set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
set $csp_frame "frame-src 'self' https:"; set $csp_frame "frame-src 'self' https:";
set $csp_img "img-src http: https: data: blob:"; set $csp_img "img-src http: https: data: blob:";
set $csp_manifest "manifest-src 'self'"; set $csp_manifest "manifest-src 'self'";
set $csp_media "media-src 'self' https://js.intercomcdn.com"; set $csp_media "media-src 'self' https://js.intercomcdn.com https://cdn.budi.live";
set $csp_worker "worker-src 'none'"; set $csp_worker "worker-src 'none'";
error_page 502 503 504 /error.html; error_page 502 503 504 /error.html;

View File

@ -1,5 +1,5 @@
{ {
"version": "2.3.18-alpha.12", "version": "2.4.12-alpha.0",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -13,7 +13,7 @@
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"kill-port": "^1.6.1", "kill-port": "^1.6.1",
"lerna": "3.14.1", "lerna": "3.14.1",
"madge": "^5.0.1", "madge": "^6.0.0",
"prettier": "^2.3.1", "prettier": "^2.3.1",
"prettier-plugin-svelte": "^2.3.0", "prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
@ -44,7 +44,7 @@
"dev": "yarn run kill-all && lerna link && lerna run --parallel dev:builder --concurrency 1", "dev": "yarn run kill-all && lerna link && lerna run --parallel dev:builder --concurrency 1",
"dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker", "dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server", "dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server",
"test": "lerna run test && yarn test:pro", "test": "lerna run test",
"test:pro": "bash scripts/pro/test.sh", "test:pro": "bash scripts/pro/test.sh",
"lint:eslint": "eslint packages && eslint qa-core", "lint:eslint": "eslint packages && eslint qa-core",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
@ -84,4 +84,4 @@
"install:pro": "bash scripts/pro/install.sh", "install:pro": "bash scripts/pro/install.sh",
"dep:clean": "yarn clean && yarn bootstrap" "dep:clean": "yarn clean && yarn bootstrap"
} }
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/backend-core", "name": "@budibase/backend-core",
"version": "2.3.18-alpha.12", "version": "2.4.12-alpha.0",
"description": "Budibase backend core libraries used in server and worker", "description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js", "main": "dist/src/index.js",
"types": "dist/src/index.d.ts", "types": "dist/src/index.d.ts",
@ -22,9 +22,9 @@
"test:watch": "jest --watchAll" "test:watch": "jest --watchAll"
}, },
"dependencies": { "dependencies": {
"@budibase/nano": "10.1.1", "@budibase/nano": "10.1.2",
"@budibase/pouchdb-replication-stream": "1.2.10", "@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/types": "2.3.18-alpha.12", "@budibase/types": "2.4.12-alpha.0",
"@shopify/jest-koa-mocks": "5.0.1", "@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2", "@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0", "aws-cloudfront-sign": "2.2.0",

View File

@ -2,25 +2,35 @@ const _passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy const JwtStrategy = require("passport-jwt").Strategy
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
const refresh = require("passport-oauth2-refresh") import { Cookie } from "../constants"
import { Config, Cookie } from "../constants"
import { getScopedConfig } from "../db"
import { getSessionsForUser, invalidateSessions } from "../security/sessions" import { getSessionsForUser, invalidateSessions } from "../security/sessions"
import { import {
authenticated,
csrf,
google,
jwt as jwtPassport, jwt as jwtPassport,
local, local,
authenticated,
tenancy,
csrf,
oidc, oidc,
google, tenancy,
} from "../middleware" } from "../middleware"
import * as userCache from "../cache/user"
import { invalidateUser } from "../cache/user" import { invalidateUser } from "../cache/user"
import { PlatformLogoutOpts, User } from "@budibase/types" import {
ConfigType,
GoogleInnerConfig,
OIDCInnerConfig,
PlatformLogoutOpts,
SSOProviderType,
User,
} from "@budibase/types"
import { logAlert } from "../logging" import { logAlert } from "../logging"
import * as events from "../events" import * as events from "../events"
import * as userCache from "../cache/user" import * as configs from "../configs"
import { clearCookie, getCookie } from "../utils" import { clearCookie, getCookie } from "../utils"
import { ssoSaveUserNoOp } from "../middleware/passport/sso/sso"
import env from "../environment"
const refresh = require("passport-oauth2-refresh")
export { export {
auditLog, auditLog,
authError, authError,
@ -33,7 +43,6 @@ export {
google, google,
oidc, oidc,
} from "../middleware" } from "../middleware"
import { ssoSaveUserNoOp } from "../middleware/passport/sso/sso"
export const buildAuthMiddleware = authenticated export const buildAuthMiddleware = authenticated
export const buildTenancyMiddleware = tenancy export const buildTenancyMiddleware = tenancy
export const buildCsrfMiddleware = csrf export const buildCsrfMiddleware = csrf
@ -44,7 +53,7 @@ export const jwt = require("jsonwebtoken")
_passport.use(new LocalStrategy(local.options, local.authenticate)) _passport.use(new LocalStrategy(local.options, local.authenticate))
if (jwtPassport.options.secretOrKey) { if (jwtPassport.options.secretOrKey) {
_passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate)) _passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate))
} else { } else if (!env.DISABLE_JWT_WARNING) {
logAlert("No JWT Secret supplied, cannot configure JWT strategy") logAlert("No JWT Secret supplied, cannot configure JWT strategy")
} }
@ -63,11 +72,10 @@ _passport.deserializeUser(async (user: User, done: any) => {
}) })
async function refreshOIDCAccessToken( async function refreshOIDCAccessToken(
db: any, chosenConfig: OIDCInnerConfig,
chosenConfig: any,
refreshToken: string refreshToken: string
) { ): Promise<RefreshResponse> {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig) const callbackUrl = await oidc.getCallbackUrl()
let enrichedConfig: any let enrichedConfig: any
let strategy: any let strategy: any
@ -90,7 +98,7 @@ async function refreshOIDCAccessToken(
return new Promise(resolve => { return new Promise(resolve => {
refresh.requestNewAccessToken( refresh.requestNewAccessToken(
Config.OIDC, ConfigType.OIDC,
refreshToken, refreshToken,
(err: any, accessToken: string, refreshToken: any, params: any) => { (err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params }) resolve({ err, accessToken, refreshToken, params })
@ -100,11 +108,10 @@ async function refreshOIDCAccessToken(
} }
async function refreshGoogleAccessToken( async function refreshGoogleAccessToken(
db: any, config: GoogleInnerConfig,
config: any,
refreshToken: any refreshToken: any
) { ): Promise<RefreshResponse> {
let callbackUrl = await google.getCallbackUrl(db, config) let callbackUrl = await google.getCallbackUrl(config)
let strategy let strategy
try { try {
@ -124,7 +131,7 @@ async function refreshGoogleAccessToken(
return new Promise(resolve => { return new Promise(resolve => {
refresh.requestNewAccessToken( refresh.requestNewAccessToken(
Config.GOOGLE, ConfigType.GOOGLE,
refreshToken, refreshToken,
(err: any, accessToken: string, refreshToken: string, params: any) => { (err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params }) resolve({ err, accessToken, refreshToken, params })
@ -133,41 +140,37 @@ async function refreshGoogleAccessToken(
}) })
} }
interface RefreshResponse {
err?: {
data?: string
}
accessToken?: string
refreshToken?: string
params?: any
}
export async function refreshOAuthToken( export async function refreshOAuthToken(
refreshToken: string, refreshToken: string,
configType: string, providerType: SSOProviderType,
configId: string configId?: string
) { ): Promise<RefreshResponse> {
const db = getGlobalDB() switch (providerType) {
case SSOProviderType.OIDC:
const config = await getScopedConfig(db, { if (!configId) {
type: configType, return { err: { data: "OIDC config id not provided" } }
group: {}, }
}) const oidcConfig = await configs.getOIDCConfigById(configId)
if (!oidcConfig) {
let chosenConfig = {} return { err: { data: "OIDC configuration not found" } }
let refreshResponse }
if (configType === Config.OIDC) { return refreshOIDCAccessToken(oidcConfig, refreshToken)
// configId - retrieved from cookie. case SSOProviderType.GOOGLE:
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0] let googleConfig = await configs.getGoogleConfig()
if (!chosenConfig) { if (!googleConfig) {
throw new Error("Invalid OIDC configuration") return { err: { data: "Google configuration not found" } }
} }
refreshResponse = await refreshOIDCAccessToken( return refreshGoogleAccessToken(googleConfig, refreshToken)
db,
chosenConfig,
refreshToken
)
} else {
chosenConfig = config
refreshResponse = await refreshGoogleAccessToken(
db,
chosenConfig,
refreshToken
)
} }
return refreshResponse
} }
// TODO: Refactor to use user save function instead to prevent the need for // TODO: Refactor to use user save function instead to prevent the need for
@ -225,6 +228,6 @@ export async function platformLogout(opts: PlatformLogoutOpts) {
const sessionIds = sessions.map(({ sessionId }) => sessionId) const sessionIds = sessions.map(({ sessionId }) => sessionId)
await invalidateSessions(userId, { sessionIds, reason: "logout" }) await invalidateSessions(userId, { sessionIds, reason: "logout" })
await events.auth.logout() await events.auth.logout(ctx.user?.email)
await userCache.invalidateUser(userId) await userCache.invalidateUser(userId)
} }

View File

@ -1,6 +1,6 @@
import { getAppClient } from "../redis/init" import { getAppClient } from "../redis/init"
import { doWithDB, DocumentType } from "../db" import { doWithDB, DocumentType } from "../db"
import { Database } from "@budibase/types" import { Database, App } from "@budibase/types"
const AppState = { const AppState = {
INVALID: "invalid", INVALID: "invalid",
@ -65,7 +65,7 @@ export async function getAppMetadata(appId: string) {
if (isInvalid(metadata)) { if (isInvalid(metadata)) {
throw { status: 404, message: "No app metadata found" } throw { status: 404, message: "No app metadata found" }
} }
return metadata return metadata as App
} }
/** /**

View File

@ -1,10 +1,13 @@
import { structures, DBTestConfiguration } from "../../../tests" import {
structures,
DBTestConfiguration,
expectFunctionWasCalledTimesWith,
} from "../../../tests"
import { Writethrough } from "../writethrough" import { Writethrough } from "../writethrough"
import { getDB } from "../../db" import { getDB } from "../../db"
import tk from "timekeeper" import tk from "timekeeper"
const START_DATE = Date.now() tk.freeze(Date.now())
tk.freeze(START_DATE)
const DELAY = 5000 const DELAY = 5000
@ -17,34 +20,99 @@ describe("writethrough", () => {
const writethrough = new Writethrough(db, DELAY) const writethrough = new Writethrough(db, DELAY)
const writethrough2 = new Writethrough(db2, DELAY) const writethrough2 = new Writethrough(db2, DELAY)
const docId = structures.uuid()
beforeEach(() => {
jest.clearAllMocks()
})
describe("put", () => { describe("put", () => {
let first: any let current: any
it("should be able to store, will go to DB", async () => { it("should be able to store, will go to DB", async () => {
await config.doInTenant(async () => { await config.doInTenant(async () => {
const response = await writethrough.put({ _id: "test", value: 1 }) const response = await writethrough.put({
_id: docId,
value: 1,
})
const output = await db.get(response.id) const output = await db.get(response.id)
first = output current = output
expect(output.value).toBe(1) expect(output.value).toBe(1)
}) })
}) })
it("second put shouldn't update DB", async () => { it("second put shouldn't update DB", async () => {
await config.doInTenant(async () => { await config.doInTenant(async () => {
const response = await writethrough.put({ ...first, value: 2 }) const response = await writethrough.put({ ...current, value: 2 })
const output = await db.get(response.id) const output = await db.get(response.id)
expect(first._rev).toBe(output._rev) expect(current._rev).toBe(output._rev)
expect(output.value).toBe(1) expect(output.value).toBe(1)
}) })
}) })
it("should put it again after delay period", async () => { it("should put it again after delay period", async () => {
await config.doInTenant(async () => { await config.doInTenant(async () => {
tk.freeze(START_DATE + DELAY + 1) tk.freeze(Date.now() + DELAY + 1)
const response = await writethrough.put({ ...first, value: 3 }) const response = await writethrough.put({ ...current, value: 3 })
const output = await db.get(response.id) const output = await db.get(response.id)
expect(response.rev).not.toBe(first._rev) expect(response.rev).not.toBe(current._rev)
expect(output.value).toBe(3) expect(output.value).toBe(3)
current = output
})
})
it("should handle parallel DB updates ignoring conflicts", async () => {
await config.doInTenant(async () => {
tk.freeze(Date.now() + DELAY + 1)
const responses = await Promise.all([
writethrough.put({ ...current, value: 4 }),
writethrough.put({ ...current, value: 4 }),
writethrough.put({ ...current, value: 4 }),
])
const newRev = responses.map(x => x.rev).find(x => x !== current._rev)
expect(newRev).toBeDefined()
expect(responses.map(x => x.rev)).toEqual(
expect.arrayContaining([current._rev, current._rev, newRev])
)
expectFunctionWasCalledTimesWith(
console.warn,
2,
"bb-warn: Ignoring redlock conflict in write-through cache"
)
const output = await db.get(current._id)
expect(output.value).toBe(4)
expect(output._rev).toBe(newRev)
current = output
})
})
it("should handle updates with documents falling behind", async () => {
await config.doInTenant(async () => {
tk.freeze(Date.now() + DELAY + 1)
const id = structures.uuid()
await writethrough.put({ _id: id, value: 1 })
const doc = await writethrough.get(id)
// Updating document
tk.freeze(Date.now() + DELAY + 1)
await writethrough.put({ ...doc, value: 2 })
// Update with the old rev value
tk.freeze(Date.now() + DELAY + 1)
const res = await writethrough.put({
...doc,
value: 3,
})
expect(res.ok).toBe(true)
const output = await db.get(id)
expect(output.value).toBe(3)
expect(output._rev).toBe(res.rev)
}) })
}) })
}) })
@ -52,8 +120,8 @@ describe("writethrough", () => {
describe("get", () => { describe("get", () => {
it("should be able to retrieve", async () => { it("should be able to retrieve", async () => {
await config.doInTenant(async () => { await config.doInTenant(async () => {
const response = await writethrough.get("test") const response = await writethrough.get(docId)
expect(response.value).toBe(3) expect(response.value).toBe(4)
}) })
}) })
}) })

View File

@ -1,7 +1,8 @@
import BaseCache from "./base" import BaseCache from "./base"
import { getWritethroughClient } from "../redis/init" import { getWritethroughClient } from "../redis/init"
import { logWarn } from "../logging" import { logWarn } from "../logging"
import { Database } from "@budibase/types" import { Database, Document, LockName, LockType } from "@budibase/types"
import * as locks from "../redis/redlockImpl"
const DEFAULT_WRITE_RATE_MS = 10000 const DEFAULT_WRITE_RATE_MS = 10000
let CACHE: BaseCache | null = null let CACHE: BaseCache | null = null
@ -27,44 +28,62 @@ function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem {
return { doc, lastWrite: lastWrite || Date.now() } return { doc, lastWrite: lastWrite || Date.now() }
} }
export async function put( async function put(
db: Database, db: Database,
doc: any, doc: Document,
writeRateMs: number = DEFAULT_WRITE_RATE_MS writeRateMs: number = DEFAULT_WRITE_RATE_MS
) { ) {
const cache = await getCache() const cache = await getCache()
const key = doc._id const key = doc._id
let cacheItem: CacheItem | undefined = await cache.get(makeCacheKey(db, key)) let cacheItem: CacheItem | undefined
if (key) {
cacheItem = await cache.get(makeCacheKey(db, key))
}
const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs
let output = doc let output = doc
if (updateDb) { if (updateDb) {
const writeDb = async (toWrite: any) => { const lockResponse = await locks.doWithLock(
// doc should contain the _id and _rev {
const response = await db.put(toWrite) type: LockType.TRY_ONCE,
output = { name: LockName.PERSIST_WRITETHROUGH,
...doc, resource: key,
_id: response.id, ttl: 1000,
_rev: response.rev, },
} async () => {
} const writeDb = async (toWrite: any) => {
try { // doc should contain the _id and _rev
await writeDb(doc) const response = await db.put(toWrite, { force: true })
} catch (err: any) { output = {
if (err.status !== 409) { ...doc,
throw err _id: response.id,
} else { _rev: response.rev,
// Swallow 409s but log them }
logWarn(`Ignoring conflict in write-through cache`) }
try {
await writeDb(doc)
} catch (err: any) {
if (err.status !== 409) {
throw err
} else {
// Swallow 409s but log them
logWarn(`Ignoring conflict in write-through cache`)
}
}
} }
)
if (!lockResponse.executed) {
logWarn(`Ignoring redlock conflict in write-through cache`)
} }
} }
// if we are updating the DB then need to set the lastWrite to now // if we are updating the DB then need to set the lastWrite to now
cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite) cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite)
await cache.store(makeCacheKey(db, key), cacheItem) if (output._id) {
await cache.store(makeCacheKey(db, output._id), cacheItem)
}
return { ok: true, id: output._id, rev: output._rev } return { ok: true, id: output._id, rev: output._rev }
} }
export async function get(db: Database, id: string): Promise<any> { async function get(db: Database, id: string): Promise<any> {
const cache = await getCache() const cache = await getCache()
const cacheKey = makeCacheKey(db, id) const cacheKey = makeCacheKey(db, id)
let cacheItem: CacheItem = await cache.get(cacheKey) let cacheItem: CacheItem = await cache.get(cacheKey)
@ -76,11 +95,7 @@ export async function get(db: Database, id: string): Promise<any> {
return cacheItem.doc return cacheItem.doc
} }
export async function remove( async function remove(db: Database, docOrId: any, rev?: any): Promise<void> {
db: Database,
docOrId: any,
rev?: any
): Promise<void> {
const cache = await getCache() const cache = await getCache()
if (!docOrId) { if (!docOrId) {
throw new Error("No ID/Rev provided.") throw new Error("No ID/Rev provided.")

View File

@ -0,0 +1,244 @@
import {
Config,
ConfigType,
GoogleConfig,
GoogleInnerConfig,
OIDCConfig,
OIDCInnerConfig,
SettingsConfig,
SettingsInnerConfig,
SMTPConfig,
SMTPInnerConfig,
} from "@budibase/types"
import { DocumentType, SEPARATOR } from "../constants"
import { CacheKey, TTL, withCache } from "../cache"
import * as context from "../context"
import env from "../environment"
import environment from "../environment"
// UTILS
/**
* Generates a new configuration ID.
* @returns {string} The new configuration ID which the config doc can be stored under.
*/
export function generateConfigID(type: ConfigType) {
return `${DocumentType.CONFIG}${SEPARATOR}${type}`
}
export async function getConfig<T extends Config>(
type: ConfigType
): Promise<T | undefined> {
const db = context.getGlobalDB()
try {
// await to catch error
const config = (await db.get(generateConfigID(type))) as T
return config
} catch (e: any) {
if (e.status === 404) {
return
}
throw e
}
}
export async function save(
config: Config
): Promise<{ id: string; rev: string }> {
const db = context.getGlobalDB()
return db.put(config)
}
// SETTINGS
export async function getSettingsConfigDoc(): Promise<SettingsConfig> {
let config = await getConfig<SettingsConfig>(ConfigType.SETTINGS)
if (!config) {
config = {
_id: generateConfigID(ConfigType.SETTINGS),
type: ConfigType.SETTINGS,
config: {},
}
}
// overridden fields
config.config.platformUrl = await getPlatformUrl({
tenantAware: true,
config: config.config,
})
config.config.analyticsEnabled = await analyticsEnabled({
config: config.config,
})
return config
}
export async function getSettingsConfig(): Promise<SettingsInnerConfig> {
return (await getSettingsConfigDoc()).config
}
export async function getPlatformUrl(
opts: { tenantAware: boolean; config?: SettingsInnerConfig } = {
tenantAware: true,
}
) {
let platformUrl = env.PLATFORM_URL || "http://localhost:10000"
if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) {
// cloud and multi tenant - add the tenant to the default platform url
const tenantId = context.getTenantId()
if (!platformUrl.includes("localhost:")) {
platformUrl = platformUrl.replace("://", `://${tenantId}.`)
}
} else if (env.SELF_HOSTED) {
const config = opts?.config
? opts.config
: // direct to db to prevent infinite loop
(await getConfig<SettingsConfig>(ConfigType.SETTINGS))?.config
if (config?.platformUrl) {
platformUrl = config.platformUrl
}
}
return platformUrl
}
export const analyticsEnabled = async (opts?: {
config?: SettingsInnerConfig
}) => {
// cloud - always use the environment variable
if (!env.SELF_HOSTED) {
return !!env.ENABLE_ANALYTICS
}
// self host - prefer the settings doc
// use cache as events have high throughput
const enabledInDB = await withCache(
CacheKey.ANALYTICS_ENABLED,
TTL.ONE_DAY,
async () => {
const config = opts?.config
? opts.config
: // direct to db to prevent infinite loop
(await getConfig<SettingsConfig>(ConfigType.SETTINGS))?.config
// need to do explicit checks in case the field is not set
if (config?.analyticsEnabled === false) {
return false
} else if (config?.analyticsEnabled === true) {
return true
}
}
)
if (enabledInDB !== undefined) {
return enabledInDB
}
// fallback to the environment variable
// explicitly check for 0 or false here, undefined or otherwise is treated as true
const envEnabled: any = env.ENABLE_ANALYTICS
if (envEnabled === 0 || envEnabled === false) {
return false
} else {
return true
}
}
// GOOGLE
async function getGoogleConfigDoc(): Promise<GoogleConfig | undefined> {
return await getConfig<GoogleConfig>(ConfigType.GOOGLE)
}
export async function getGoogleConfig(): Promise<
GoogleInnerConfig | undefined
> {
const config = await getGoogleConfigDoc()
return config?.config
}
export async function getGoogleDatasourceConfig(): Promise<
GoogleInnerConfig | undefined
> {
if (!env.SELF_HOSTED) {
// always use the env vars in cloud
return getDefaultGoogleConfig()
}
// prefer the config in self-host
let config = await getGoogleConfig()
// fallback to env vars
if (!config || !config.activated) {
config = getDefaultGoogleConfig()
}
return config
}
export function getDefaultGoogleConfig(): GoogleInnerConfig | undefined {
if (environment.GOOGLE_CLIENT_ID && environment.GOOGLE_CLIENT_SECRET) {
return {
clientID: environment.GOOGLE_CLIENT_ID!,
clientSecret: environment.GOOGLE_CLIENT_SECRET!,
activated: true,
}
}
}
// OIDC
async function getOIDCConfigDoc(): Promise<OIDCConfig | undefined> {
return getConfig<OIDCConfig>(ConfigType.OIDC)
}
export async function getOIDCConfig(): Promise<OIDCInnerConfig | undefined> {
const config = (await getOIDCConfigDoc())?.config
// default to the 0th config
return config?.configs && config.configs[0]
}
/**
* @param configId The config id of the inner config to retrieve
*/
export async function getOIDCConfigById(
configId: string
): Promise<OIDCInnerConfig | undefined> {
const config = (await getConfig<OIDCConfig>(ConfigType.OIDC))?.config
return config && config.configs.filter((c: any) => c.uuid === configId)[0]
}
// SMTP
export async function getSMTPConfigDoc(): Promise<SMTPConfig | undefined> {
return getConfig<SMTPConfig>(ConfigType.SMTP)
}
export async function getSMTPConfig(
isAutomation?: boolean
): Promise<SMTPInnerConfig | undefined> {
const config = await getSMTPConfigDoc()
if (config) {
return config.config
}
// always allow fallback in self host
// in cloud don't allow for automations
const allowFallback = env.SELF_HOSTED || !isAutomation
// Use an SMTP fallback configuration from env variables
if (env.SMTP_FALLBACK_ENABLED && allowFallback) {
return {
port: env.SMTP_PORT,
host: env.SMTP_HOST!,
secure: false,
from: env.SMTP_FROM_ADDRESS!,
auth: {
user: env.SMTP_USER!,
pass: env.SMTP_PASSWORD!,
},
}
}
}

View File

@ -0,0 +1 @@
export * from "./configs"

View File

@ -0,0 +1,116 @@
import { DBTestConfiguration, generator, testEnv } from "../../../tests"
import { ConfigType } from "@budibase/types"
import env from "../../environment"
import * as configs from "../configs"
const DEFAULT_URL = "http://localhost:10000"
const ENV_URL = "http://env.com"
describe("configs", () => {
const config = new DBTestConfiguration()
const setDbPlatformUrl = async (dbUrl: string) => {
const settingsConfig = {
_id: configs.generateConfigID(ConfigType.SETTINGS),
type: ConfigType.SETTINGS,
config: {
platformUrl: dbUrl,
},
}
await configs.save(settingsConfig)
}
beforeEach(async () => {
config.newTenant()
})
describe("getPlatformUrl", () => {
describe("self host", () => {
beforeEach(async () => {
testEnv.selfHosted()
})
it("gets the default url", async () => {
await config.doInTenant(async () => {
const url = await configs.getPlatformUrl()
expect(url).toBe(DEFAULT_URL)
})
})
it("gets the platform url from the environment", async () => {
await config.doInTenant(async () => {
env._set("PLATFORM_URL", ENV_URL)
const url = await configs.getPlatformUrl()
expect(url).toBe(ENV_URL)
})
})
it("gets the platform url from the database", async () => {
await config.doInTenant(async () => {
const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const url = await configs.getPlatformUrl()
expect(url).toBe(dbUrl)
})
})
})
describe("cloud", () => {
function getTenantAwareUrl() {
return `http://${config.tenantId}.env.com`
}
beforeEach(async () => {
testEnv.cloudHosted()
testEnv.multiTenant()
env._set("PLATFORM_URL", ENV_URL)
})
it("gets the platform url from the environment without tenancy", async () => {
await config.doInTenant(async () => {
const url = await configs.getPlatformUrl({ tenantAware: false })
expect(url).toBe(ENV_URL)
})
})
it("gets the platform url from the environment with tenancy", async () => {
await config.doInTenant(async () => {
const url = await configs.getPlatformUrl()
expect(url).toBe(getTenantAwareUrl())
})
})
it("never gets the platform url from the database", async () => {
await config.doInTenant(async () => {
await setDbPlatformUrl(generator.url())
const url = await configs.getPlatformUrl()
expect(url).toBe(getTenantAwareUrl())
})
})
})
})
describe("getSettingsConfig", () => {
beforeAll(async () => {
testEnv.selfHosted()
env._set("PLATFORM_URL", "")
})
it("returns the platform url with an existing config", async () => {
await config.doInTenant(async () => {
const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const config = await configs.getSettingsConfig()
expect(config.platformUrl).toBe(dbUrl)
})
})
it("returns the platform url without an existing config", async () => {
await config.doInTenant(async () => {
const config = await configs.getSettingsConfig()
expect(config.platformUrl).toBe(DEFAULT_URL)
})
})
})
})

View File

@ -68,6 +68,7 @@ export enum DocumentType {
MEM_VIEW = "view", MEM_VIEW = "view",
USER_FLAG = "flag", USER_FLAG = "flag",
AUTOMATION_METADATA = "meta_au", AUTOMATION_METADATA = "meta_au",
AUDIT_LOG = "al",
} }
export const StaticDatabases = { export const StaticDatabases = {
@ -88,6 +89,9 @@ export const StaticDatabases = {
install: "install", install: "install",
}, },
}, },
AUDIT_LOGS: {
name: "audit-logs",
},
} }
export const APP_PREFIX = DocumentType.APP + SEPARATOR export const APP_PREFIX = DocumentType.APP + SEPARATOR

View File

@ -41,5 +41,6 @@ export enum Config {
OIDC_LOGOS = "logos_oidc", OIDC_LOGOS = "logos_oidc",
} }
export const MIN_VALID_DATE = new Date(-2147483647000)
export const MAX_VALID_DATE = new Date(2147483647000) export const MAX_VALID_DATE = new Date(2147483647000)
export const DEFAULT_TENANT_ID = "default" export const DEFAULT_TENANT_ID = "default"

View File

@ -1,5 +1,5 @@
import { AsyncLocalStorage } from "async_hooks" import { AsyncLocalStorage } from "async_hooks"
import { ContextMap } from "./mainContext" import { ContextMap } from "./types"
export default class Context { export default class Context {
static storage = new AsyncLocalStorage<ContextMap>() static storage = new AsyncLocalStorage<ContextMap>()

View File

@ -5,6 +5,8 @@ import {
isCloudAccount, isCloudAccount,
Account, Account,
AccountUserContext, AccountUserContext,
UserContext,
Ctx,
} from "@budibase/types" } from "@budibase/types"
import * as context from "." import * as context from "."
@ -16,15 +18,22 @@ export function doInIdentityContext(identity: IdentityContext, task: any) {
return context.doInIdentityContext(identity, task) return context.doInIdentityContext(identity, task)
} }
export function doInUserContext(user: User, task: any) { // used in server/worker
const userContext: any = { export function doInUserContext(user: User, ctx: Ctx, task: any) {
const userContext: UserContext = {
...user, ...user,
_id: user._id as string, _id: user._id as string,
type: IdentityType.USER, type: IdentityType.USER,
hostInfo: {
ipAddress: ctx.request.ip,
// filled in by koa-useragent package
userAgent: ctx.userAgent._agent.source,
},
} }
return doInIdentityContext(userContext, task) return doInIdentityContext(userContext, task)
} }
// used in account portal
export function doInAccountContext(account: Account, task: any) { export function doInAccountContext(account: Account, task: any) {
const _id = getAccountUserId(account) const _id = getAccountUserId(account)
const tenantId = account.tenantId const tenantId = account.tenantId

View File

@ -11,13 +11,7 @@ import {
DEFAULT_TENANT_ID, DEFAULT_TENANT_ID,
} from "../constants" } from "../constants"
import { Database, IdentityContext } from "@budibase/types" import { Database, IdentityContext } from "@budibase/types"
import { ContextMap } from "./types"
export type ContextMap = {
tenantId?: string
appId?: string
identity?: IdentityContext
environmentVariables?: Record<string, string>
}
let TEST_APP_ID: string | null = null let TEST_APP_ID: string | null = null
@ -30,14 +24,23 @@ export function getGlobalDBName(tenantId?: string) {
return baseGlobalDBName(tenantId) return baseGlobalDBName(tenantId)
} }
export function baseGlobalDBName(tenantId: string | undefined | null) { export function getAuditLogDBName(tenantId?: string) {
let dbName if (!tenantId) {
if (!tenantId || tenantId === DEFAULT_TENANT_ID) { tenantId = getTenantId()
dbName = StaticDatabases.GLOBAL.name }
} else { if (tenantId === DEFAULT_TENANT_ID) {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}` return StaticDatabases.AUDIT_LOGS.name
} else {
return `${tenantId}${SEPARATOR}${StaticDatabases.AUDIT_LOGS.name}`
}
}
export function baseGlobalDBName(tenantId: string | undefined | null) {
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
return StaticDatabases.GLOBAL.name
} else {
return `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
} }
return dbName
} }
export function isMultiTenant() { export function isMultiTenant() {
@ -228,6 +231,13 @@ export function getGlobalDB(): Database {
return getDB(baseGlobalDBName(context?.tenantId)) return getDB(baseGlobalDBName(context?.tenantId))
} }
export function getAuditLogsDB(): Database {
if (!getTenantId()) {
throw new Error("No tenant ID found - cannot open audit log DB")
}
return getDB(getAuditLogDBName())
}
/** /**
* Gets the app database based on whatever the request * Gets the app database based on whatever the request
* contained, dev or prod. * contained, dev or prod.

View File

@ -0,0 +1,9 @@
import { IdentityContext } from "@budibase/types"
// keep this out of Budibase types, don't want to expose context info
export type ContextMap = {
tenantId?: string
appId?: string
identity?: IdentityContext
environmentVariables?: Record<string, string>
}

View File

@ -7,3 +7,4 @@ export { default as Replication } from "./Replication"
// exports to support old export structure // exports to support old export structure
export * from "../constants/db" export * from "../constants/db"
export { getGlobalDBName, baseGlobalDBName } from "../context" export { getGlobalDBName, baseGlobalDBName } from "../context"
export * from "./lucene"

View File

@ -0,0 +1,624 @@
import fetch from "node-fetch"
import { getCouchInfo } from "./couch"
import { SearchFilters, Row } from "@budibase/types"
const QUERY_START_REGEX = /\d[0-9]*:/g
interface SearchResponse<T> {
rows: T[] | any[]
bookmark: string
}
interface PaginatedSearchResponse<T> extends SearchResponse<T> {
hasNextPage: boolean
}
export type SearchParams<T> = {
tableId?: string
sort?: string
sortOrder?: string
sortType?: string
limit?: number
bookmark?: string
version?: string
indexer?: () => Promise<any>
disableEscaping?: boolean
rows?: T | Row[]
}
export function removeKeyNumbering(key: any): string {
if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) {
const parts = key.split(":")
// remove the number
parts.shift()
return parts.join(":")
} else {
return key
}
}
/**
* Class to build lucene query URLs.
* Optionally takes a base lucene query object.
*/
export class QueryBuilder<T> {
dbName: string
index: string
query: SearchFilters
limit: number
sort?: string
bookmark?: string
sortOrder: string
sortType: string
includeDocs: boolean
version?: string
indexBuilder?: () => Promise<any>
noEscaping = false
constructor(dbName: string, index: string, base?: SearchFilters) {
this.dbName = dbName
this.index = index
this.query = {
allOr: false,
string: {},
fuzzy: {},
range: {},
equal: {},
notEqual: {},
empty: {},
notEmpty: {},
oneOf: {},
contains: {},
notContains: {},
containsAny: {},
...base,
}
this.limit = 50
this.sortOrder = "ascending"
this.sortType = "string"
this.includeDocs = true
}
disableEscaping() {
this.noEscaping = true
return this
}
setIndexBuilder(builderFn: () => Promise<any>) {
this.indexBuilder = builderFn
return this
}
setVersion(version?: string) {
if (version != null) {
this.version = version
}
return this
}
setTable(tableId: string) {
this.query.equal!.tableId = tableId
return this
}
setLimit(limit?: number) {
if (limit != null) {
this.limit = limit
}
return this
}
setSort(sort?: string) {
if (sort != null) {
this.sort = sort
}
return this
}
setSortOrder(sortOrder?: string) {
if (sortOrder != null) {
this.sortOrder = sortOrder
}
return this
}
setSortType(sortType?: string) {
if (sortType != null) {
this.sortType = sortType
}
return this
}
setBookmark(bookmark?: string) {
if (bookmark != null) {
this.bookmark = bookmark
}
return this
}
excludeDocs() {
this.includeDocs = false
return this
}
addString(key: string, partial: string) {
this.query.string![key] = partial
return this
}
addFuzzy(key: string, fuzzy: string) {
this.query.fuzzy![key] = fuzzy
return this
}
addRange(key: string, low: string | number, high: string | number) {
this.query.range![key] = {
low,
high,
}
return this
}
addEqual(key: string, value: any) {
this.query.equal![key] = value
return this
}
addNotEqual(key: string, value: any) {
this.query.notEqual![key] = value
return this
}
addEmpty(key: string, value: any) {
this.query.empty![key] = value
return this
}
addNotEmpty(key: string, value: any) {
this.query.notEmpty![key] = value
return this
}
addOneOf(key: string, value: any) {
this.query.oneOf![key] = value
return this
}
addContains(key: string, value: any) {
this.query.contains![key] = value
return this
}
addNotContains(key: string, value: any) {
this.query.notContains![key] = value
return this
}
addContainsAny(key: string, value: any) {
this.query.containsAny![key] = value
return this
}
handleSpaces(input: string) {
if (this.noEscaping) {
return input
} else {
return input.replace(/ /g, "_")
}
}
/**
* Preprocesses a value before going into a lucene search.
* Transforms strings to lowercase and wraps strings and bools in quotes.
* @param value The value to process
* @param options The preprocess options
* @returns {string|*}
*/
preprocess(value: any, { escape, lowercase, wrap, type }: any = {}) {
const hasVersion = !!this.version
// Determine if type needs wrapped
const originalType = typeof value
// Convert to lowercase
if (value && lowercase) {
value = value.toLowerCase ? value.toLowerCase() : value
}
// Escape characters
if (!this.noEscaping && escape && originalType === "string") {
value = `${value}`.replace(/[ #+\-&|!(){}\]^"~*?:\\]/g, "\\$&")
}
// Wrap in quotes
if (originalType === "string" && !isNaN(value) && !type) {
value = `"${value}"`
} else if (hasVersion && wrap) {
value = originalType === "number" ? value : `"${value}"`
}
return value
}
buildSearchQuery() {
const builder = this
let allOr = this.query && this.query.allOr
let query = allOr ? "" : "*:*"
const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true }
let tableId
if (this.query.equal!.tableId) {
tableId = this.query.equal!.tableId
delete this.query.equal!.tableId
}
const equal = (key: string, value: any) => {
// 0 evaluates to false, which means we would return all rows if we don't check it
if (!value && value !== 0) {
return null
}
return `${key}:${builder.preprocess(value, allPreProcessingOpts)}`
}
const contains = (key: string, value: any, mode = "AND") => {
if (Array.isArray(value) && value.length === 0) {
return null
}
if (!Array.isArray(value)) {
return `${key}:${value}`
}
let statement = `${builder.preprocess(value[0], { escape: true })}`
for (let i = 1; i < value.length; i++) {
statement += ` ${mode} ${builder.preprocess(value[i], {
escape: true,
})}`
}
return `${key}:(${statement})`
}
const notContains = (key: string, value: any) => {
// @ts-ignore
const allPrefix = allOr === "" ? "*:* AND" : ""
return allPrefix + "NOT " + contains(key, value)
}
const containsAny = (key: string, value: any) => {
return contains(key, value, "OR")
}
const oneOf = (key: string, value: any) => {
if (!Array.isArray(value)) {
if (typeof value === "string") {
value = value.split(",")
} else {
return ""
}
}
let orStatement = `${builder.preprocess(value[0], allPreProcessingOpts)}`
for (let i = 1; i < value.length; i++) {
orStatement += ` OR ${builder.preprocess(
value[i],
allPreProcessingOpts
)}`
}
return `${key}:(${orStatement})`
}
function build(structure: any, queryFn: any) {
for (let [key, value] of Object.entries(structure)) {
// check for new format - remove numbering if needed
key = removeKeyNumbering(key)
key = builder.preprocess(builder.handleSpaces(key), {
escape: true,
})
const expression = queryFn(key, value)
if (expression == null) {
continue
}
if (query.length > 0) {
query += ` ${allOr ? "OR" : "AND"} `
}
query += expression
}
}
// Construct the actual lucene search query string from JSON structure
if (this.query.string) {
build(this.query.string, (key: string, value: any) => {
if (!value) {
return null
}
value = builder.preprocess(value, {
escape: true,
lowercase: true,
type: "string",
})
return `${key}:${value}*`
})
}
if (this.query.range) {
build(this.query.range, (key: string, value: any) => {
if (!value) {
return null
}
if (value.low == null || value.low === "") {
return null
}
if (value.high == null || value.high === "") {
return null
}
const low = builder.preprocess(value.low, allPreProcessingOpts)
const high = builder.preprocess(value.high, allPreProcessingOpts)
return `${key}:[${low} TO ${high}]`
})
}
if (this.query.fuzzy) {
build(this.query.fuzzy, (key: string, value: any) => {
if (!value) {
return null
}
value = builder.preprocess(value, {
escape: true,
lowercase: true,
type: "fuzzy",
})
return `${key}:${value}~`
})
}
if (this.query.equal) {
build(this.query.equal, equal)
}
if (this.query.notEqual) {
build(this.query.notEqual, (key: string, value: any) => {
if (!value) {
return null
}
return `!${key}:${builder.preprocess(value, allPreProcessingOpts)}`
})
}
if (this.query.empty) {
build(this.query.empty, (key: string) => `!${key}:["" TO *]`)
}
if (this.query.notEmpty) {
build(this.query.notEmpty, (key: string) => `${key}:["" TO *]`)
}
if (this.query.oneOf) {
build(this.query.oneOf, oneOf)
}
if (this.query.contains) {
build(this.query.contains, contains)
}
if (this.query.notContains) {
build(this.query.notContains, notContains)
}
if (this.query.containsAny) {
build(this.query.containsAny, containsAny)
}
// make sure table ID is always added as an AND
if (tableId) {
query = `(${query})`
allOr = false
build({ tableId }, equal)
}
return query
}
buildSearchBody() {
let body: any = {
q: this.buildSearchQuery(),
limit: Math.min(this.limit, 200),
include_docs: this.includeDocs,
}
if (this.bookmark) {
body.bookmark = this.bookmark
}
if (this.sort) {
const order = this.sortOrder === "descending" ? "-" : ""
const type = `<${this.sortType}>`
body.sort = `${order}${this.handleSpaces(this.sort)}${type}`
}
return body
}
async run() {
const { url, cookie } = getCouchInfo()
const fullPath = `${url}/${this.dbName}/_design/database/_search/${this.index}`
const body = this.buildSearchBody()
try {
return await runQuery<T>(fullPath, body, cookie)
} catch (err: any) {
if (err.status === 404 && this.indexBuilder) {
await this.indexBuilder()
return await runQuery<T>(fullPath, body, cookie)
} else {
throw err
}
}
}
}
/**
* Executes a lucene search query.
* @param url The query URL
* @param body The request body defining search criteria
* @param cookie The auth cookie for CouchDB
* @returns {Promise<{rows: []}>}
*/
async function runQuery<T>(
url: string,
body: any,
cookie: string
): Promise<SearchResponse<T>> {
const response = await fetch(url, {
body: JSON.stringify(body),
method: "POST",
headers: {
Authorization: cookie,
},
})
if (response.status === 404) {
throw response
}
const json = await response.json()
let output: any = {
rows: [],
}
if (json.rows != null && json.rows.length > 0) {
output.rows = json.rows.map((row: any) => row.doc)
}
if (json.bookmark) {
output.bookmark = json.bookmark
}
return output
}
/**
* Gets round the fixed limit of 200 results from a query by fetching as many
* pages as required and concatenating the results. This recursively operates
* until enough results have been found.
* @param dbName {string} Which database to run a lucene query on
* @param index {string} Which search index to utilise
* @param query {object} The JSON query structure
* @param params {object} The search params including:
* tableId {string} The table ID to search
* sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending")
* sortType {string} Whether to treat sortable values as strings or
* numbers. ("string" or "number")
* limit {number} The number of results to fetch
* bookmark {string|null} Current bookmark in the recursive search
* rows {array|null} Current results in the recursive search
* @returns {Promise<*[]|*>}
*/
async function recursiveSearch<T>(
dbName: string,
index: string,
query: any,
params: any
): Promise<any> {
const bookmark = params.bookmark
const rows = params.rows || []
if (rows.length >= params.limit) {
return rows
}
let pageSize = 200
if (rows.length > params.limit - 200) {
pageSize = params.limit - rows.length
}
const page = await new QueryBuilder<T>(dbName, index, query)
.setVersion(params.version)
.setTable(params.tableId)
.setBookmark(bookmark)
.setLimit(pageSize)
.setSort(params.sort)
.setSortOrder(params.sortOrder)
.setSortType(params.sortType)
.run()
if (!page.rows.length) {
return rows
}
if (page.rows.length < 200) {
return [...rows, ...page.rows]
}
const newParams = {
...params,
bookmark: page.bookmark,
rows: [...rows, ...page.rows],
}
return await recursiveSearch(dbName, index, query, newParams)
}
/**
* Performs a paginated search. A bookmark will be returned to allow the next
* page to be fetched. There is a max limit off 200 results per page in a
* paginated search.
* @param dbName {string} Which database to run a lucene query on
* @param index {string} Which search index to utilise
* @param query {object} The JSON query structure
* @param params {object} The search params including:
* tableId {string} The table ID to search
* sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending")
* sortType {string} Whether to treat sortable values as strings or
* numbers. ("string" or "number")
* limit {number} The desired page size
* bookmark {string} The bookmark to resume from
* @returns {Promise<{hasNextPage: boolean, rows: *[]}>}
*/
export async function paginatedSearch<T>(
dbName: string,
index: string,
query: SearchFilters,
params: SearchParams<T>
) {
let limit = params.limit
if (limit == null || isNaN(limit) || limit < 0) {
limit = 50
}
limit = Math.min(limit, 200)
const search = new QueryBuilder<T>(dbName, index, query)
if (params.version) {
search.setVersion(params.version)
}
if (params.tableId) {
search.setTable(params.tableId)
}
if (params.sort) {
search
.setSort(params.sort)
.setSortOrder(params.sortOrder)
.setSortType(params.sortType)
}
if (params.indexer) {
search.setIndexBuilder(params.indexer)
}
if (params.disableEscaping) {
search.disableEscaping()
}
const searchResults = await search
.setBookmark(params.bookmark)
.setLimit(limit)
.run()
// Try fetching 1 row in the next page to see if another page of results
// exists or not
search.setBookmark(searchResults.bookmark).setLimit(1)
if (params.tableId) {
search.setTable(params.tableId)
}
const nextResults = await search.run()
return {
...searchResults,
hasNextPage: nextResults.rows && nextResults.rows.length > 0,
}
}
/**
* Performs a full search, fetching multiple pages if required to return the
* desired amount of results. There is a limit of 1000 results to avoid
* heavy performance hits, and to avoid client components breaking from
* handling too much data.
* @param dbName {string} Which database to run a lucene query on
* @param index {string} Which search index to utilise
* @param query {object} The JSON query structure
* @param params {object} The search params including:
* tableId {string} The table ID to search
* sort {string} The sort column
* sortOrder {string} The sort order ("ascending" or "descending")
* sortType {string} Whether to treat sortable values as strings or
* numbers. ("string" or "number")
* limit {number} The desired number of results
* @returns {Promise<{rows: *}>}
*/
export async function fullSearch<T>(
dbName: string,
index: string,
query: SearchFilters,
params: SearchParams<T>
) {
let limit = params.limit
if (limit == null || isNaN(limit) || limit < 0) {
limit = 1000
}
params.limit = Math.min(limit, 1000)
const rows = await recursiveSearch<T>(dbName, index, query, params)
return { rows }
}

View File

@ -0,0 +1,161 @@
import { newid } from "../../newid"
import { getDB } from "../db"
import { Database } from "@budibase/types"
import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene"
const INDEX_NAME = "main"
const index = `function(doc) {
let props = ["property", "number"]
for (let key of props) {
if (doc[key]) {
index(key, doc[key])
}
}
}`
describe("lucene", () => {
let db: Database, dbName: string
beforeAll(async () => {
dbName = `db-${newid()}`
// create the DB for testing
db = getDB(dbName)
await db.put({ _id: newid(), property: "word" })
await db.put({ _id: newid(), property: "word2" })
await db.put({ _id: newid(), property: "word3", number: 1 })
})
it("should be able to create a lucene index", async () => {
const response = await db.put({
_id: "_design/database",
indexes: {
[INDEX_NAME]: {
index: index,
analyzer: "standard",
},
},
})
expect(response.ok).toBe(true)
})
describe("query builder", () => {
it("should be able to perform a basic query", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.setSort("property")
builder.setSortOrder("desc")
builder.setSortType("string")
const resp = await builder.run()
expect(resp.rows.length).toBe(3)
})
it("should handle limits", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.setLimit(1)
const resp = await builder.run()
expect(resp.rows.length).toBe(1)
})
it("should be able to perform a string search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addString("property", "wo")
const resp = await builder.run()
expect(resp.rows.length).toBe(3)
})
it("should be able to perform a range search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addRange("number", 0, 1)
const resp = await builder.run()
expect(resp.rows.length).toBe(1)
})
it("should be able to perform an equal search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addEqual("property", "word2")
const resp = await builder.run()
expect(resp.rows.length).toBe(1)
})
it("should be able to perform a not equal search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addNotEqual("property", "word2")
const resp = await builder.run()
expect(resp.rows.length).toBe(2)
})
it("should be able to perform an empty search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addEmpty("number", true)
const resp = await builder.run()
expect(resp.rows.length).toBe(2)
})
it("should be able to perform a not empty search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addNotEmpty("number", true)
const resp = await builder.run()
expect(resp.rows.length).toBe(1)
})
it("should be able to perform a one of search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addOneOf("property", ["word", "word2"])
const resp = await builder.run()
expect(resp.rows.length).toBe(2)
})
it("should be able to perform a contains search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addContains("property", ["word"])
const resp = await builder.run()
expect(resp.rows.length).toBe(1)
})
it("should be able to perform a not contains search", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addNotContains("property", ["word2"])
const resp = await builder.run()
expect(resp.rows.length).toBe(2)
})
})
describe("paginated search", () => {
it("should be able to perform a paginated search", async () => {
const page = await paginatedSearch(
dbName,
INDEX_NAME,
{
string: {
property: "wo",
},
},
{
limit: 1,
sort: "property",
sortType: "string",
sortOrder: "desc",
}
)
expect(page.rows.length).toBe(1)
expect(page.hasNextPage).toBe(true)
expect(page.bookmark).toBeDefined()
})
})
describe("full search", () => {
it("should be able to perform a full search", async () => {
const page = await fullSearch(
dbName,
INDEX_NAME,
{
string: {
property: "wo",
},
},
{}
)
expect(page.rows.length).toBe(3)
})
})
})

View File

@ -1,19 +1,13 @@
import { generator, DBTestConfiguration, testEnv } from "../../../tests"
import { import {
getDevelopmentAppID, getDevelopmentAppID,
getProdAppID, getProdAppID,
isDevAppID, isDevAppID,
isProdAppID, isProdAppID,
} from "../conversions" } from "../conversions"
import { generateAppID, getPlatformUrl, getScopedConfig } from "../utils" import { generateAppID } from "../utils"
import * as context from "../../context"
import { Config } from "../../constants"
import env from "../../environment"
describe("utils", () => { describe("utils", () => {
const config = new DBTestConfiguration() describe("generateAppID", () => {
describe("app ID manipulation", () => {
function getID() { function getID() {
const appId = generateAppID() const appId = generateAppID()
const split = appId.split("_") const split = appId.split("_")
@ -66,127 +60,4 @@ describe("utils", () => {
expect(isProdAppID(devAppId)).toEqual(false) expect(isProdAppID(devAppId)).toEqual(false)
}) })
}) })
const DEFAULT_URL = "http://localhost:10000"
const ENV_URL = "http://env.com"
const setDbPlatformUrl = async (dbUrl: string) => {
const db = context.getGlobalDB()
await db.put({
_id: "config_settings",
type: Config.SETTINGS,
config: {
platformUrl: dbUrl,
},
})
}
const clearSettingsConfig = async () => {
await config.doInTenant(async () => {
const db = context.getGlobalDB()
try {
const config = await db.get("config_settings")
await db.remove("config_settings", config._rev)
} catch (e: any) {
if (e.status !== 404) {
throw e
}
}
})
}
describe("getPlatformUrl", () => {
describe("self host", () => {
beforeEach(async () => {
testEnv.selfHosted()
await clearSettingsConfig()
})
it("gets the default url", async () => {
await config.doInTenant(async () => {
const url = await getPlatformUrl()
expect(url).toBe(DEFAULT_URL)
})
})
it("gets the platform url from the environment", async () => {
await config.doInTenant(async () => {
env._set("PLATFORM_URL", ENV_URL)
const url = await getPlatformUrl()
expect(url).toBe(ENV_URL)
})
})
it("gets the platform url from the database", async () => {
await config.doInTenant(async () => {
const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const url = await getPlatformUrl()
expect(url).toBe(dbUrl)
})
})
})
describe("cloud", () => {
const TENANT_AWARE_URL = `http://${config.tenantId}.env.com`
beforeEach(async () => {
testEnv.cloudHosted()
testEnv.multiTenant()
env._set("PLATFORM_URL", ENV_URL)
await clearSettingsConfig()
})
it("gets the platform url from the environment without tenancy", async () => {
await config.doInTenant(async () => {
const url = await getPlatformUrl({ tenantAware: false })
expect(url).toBe(ENV_URL)
})
})
it("gets the platform url from the environment with tenancy", async () => {
await config.doInTenant(async () => {
const url = await getPlatformUrl()
expect(url).toBe(TENANT_AWARE_URL)
})
})
it("never gets the platform url from the database", async () => {
await config.doInTenant(async () => {
await setDbPlatformUrl(generator.url())
const url = await getPlatformUrl()
expect(url).toBe(TENANT_AWARE_URL)
})
})
})
})
describe("getScopedConfig", () => {
describe("settings config", () => {
beforeEach(async () => {
env._set("SELF_HOSTED", 1)
env._set("PLATFORM_URL", "")
await clearSettingsConfig()
})
it("returns the platform url with an existing config", async () => {
await config.doInTenant(async () => {
const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const db = context.getGlobalDB()
const config = await getScopedConfig(db, { type: Config.SETTINGS })
expect(config.platformUrl).toBe(dbUrl)
})
})
it("returns the platform url without an existing config", async () => {
await config.doInTenant(async () => {
const db = context.getGlobalDB()
const config = await getScopedConfig(db, { type: Config.SETTINGS })
expect(config.platformUrl).toBe(DEFAULT_URL)
})
})
})
})
}) })

View File

@ -9,12 +9,11 @@ import {
InternalTable, InternalTable,
APP_PREFIX, APP_PREFIX,
} from "../constants" } from "../constants"
import { getTenantId, getGlobalDB, getGlobalDBName } from "../context" import { getTenantId, getGlobalDBName } from "../context"
import { doWithDB, directCouchAllDbs } from "./db" import { doWithDB, directCouchAllDbs } from "./db"
import { getAppMetadata } from "../cache/appMetadata" import { getAppMetadata } from "../cache/appMetadata"
import { isDevApp, isDevAppID, getProdAppID } from "./conversions" import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
import * as events from "../events" import { App, Database } from "@budibase/types"
import { App, Database, ConfigType, isSettingsConfig } from "@budibase/types"
/** /**
* Generates a new app ID. * Generates a new app ID.
@ -366,6 +365,16 @@ export async function getAllApps({
} }
} }
export async function getAppsByIDs(appIds: string[]) {
const settled = await Promise.allSettled(
appIds.map(appId => getAppMetadata(appId))
)
// have to list the apps which exist, some may have been deleted
return settled
.filter(promise => promise.status === "fulfilled")
.map(promise => (promise as PromiseFulfilledResult<App>).value)
}
/** /**
* Utility function for getAllApps but filters to production apps only. * Utility function for getAllApps but filters to production apps only.
*/ */
@ -382,6 +391,16 @@ export async function getDevAppIDs() {
return apps.filter((id: any) => isDevAppID(id)) return apps.filter((id: any) => isDevAppID(id))
} }
export function isSameAppID(
appId1: string | undefined,
appId2: string | undefined
) {
if (appId1 == undefined || appId2 == undefined) {
return false
}
return getProdAppID(appId1) === getProdAppID(appId2)
}
export async function dbExists(dbName: any) { export async function dbExists(dbName: any) {
return doWithDB( return doWithDB(
dbName, dbName,
@ -392,32 +411,6 @@ export async function dbExists(dbName: any) {
) )
} }
/**
* Generates a new configuration ID.
* @returns {string} The new configuration ID which the config doc can be stored under.
*/
export const generateConfigID = ({ type, workspace, user }: any) => {
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
return `${DocumentType.CONFIG}${SEPARATOR}${scope}`
}
/**
* Gets parameters for retrieving configurations.
*/
export const getConfigParams = (
{ type, workspace, user }: any,
otherProps = {}
) => {
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
return {
...otherProps,
startkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}`,
endkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
}
}
/** /**
* Generates a new dev info document ID - this is scoped to a user. * Generates a new dev info document ID - this is scoped to a user.
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under. * @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
@ -441,109 +434,6 @@ export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
return getDocParams(DocumentType.PLUGIN, pluginId, otherProps) return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
} }
/**
* Returns the most granular configuration document from the DB based on the type, workspace and userID passed.
* @param {Object} db - db instance to query
* @param {Object} scopes - the type, workspace and userID scopes of the configuration.
* @returns The most granular configuration document based on the scope.
*/
export const getScopedFullConfig = async function (
db: any,
{ type, user, workspace }: any
) {
const response = await db.allDocs(
getConfigParams(
{ type, user, workspace },
{
include_docs: true,
}
)
)
function determineScore(row: any) {
const config = row.doc
// Config is specific to a user and a workspace
if (config._id.includes(generateConfigID({ type, user, workspace }))) {
return 4
} else if (config._id.includes(generateConfigID({ type, user }))) {
// Config is specific to a user only
return 3
} else if (config._id.includes(generateConfigID({ type, workspace }))) {
// Config is specific to a workspace only
return 2
} else if (config._id.includes(generateConfigID({ type }))) {
// Config is specific to a type only
return 1
}
return 0
}
// Find the config with the most granular scope based on context
let scopedConfig = response.rows.sort(
(a: any, b: any) => determineScore(a) - determineScore(b)
)[0]
// custom logic for settings doc
if (type === ConfigType.SETTINGS) {
if (!scopedConfig || !scopedConfig.doc) {
// defaults
scopedConfig = {
doc: {
_id: generateConfigID({ type, user, workspace }),
type: ConfigType.SETTINGS,
config: {
platformUrl: await getPlatformUrl({ tenantAware: true }),
analyticsEnabled: await events.analytics.enabled(),
},
},
}
}
// will always be true - use assertion function to get type access
if (isSettingsConfig(scopedConfig.doc)) {
// overrides affected by environment
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
tenantAware: true,
})
scopedConfig.doc.config.analyticsEnabled =
await events.analytics.enabled()
}
}
return scopedConfig && scopedConfig.doc
}
export const getPlatformUrl = async (opts = { tenantAware: true }) => {
let platformUrl = env.PLATFORM_URL || "http://localhost:10000"
if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) {
// cloud and multi tenant - add the tenant to the default platform url
const tenantId = getTenantId()
if (!platformUrl.includes("localhost:")) {
platformUrl = platformUrl.replace("://", `://${tenantId}.`)
}
} else if (env.SELF_HOSTED) {
const db = getGlobalDB()
// get the doc directly instead of with getScopedConfig to prevent loop
let settings
try {
settings = await db.get(generateConfigID({ type: ConfigType.SETTINGS }))
} catch (e: any) {
if (e.status !== 404) {
throw e
}
}
// self hosted - check for platform url override
if (settings && settings.config && settings.config.platformUrl) {
platformUrl = settings.config.platformUrl
}
}
return platformUrl
}
export function pagination( export function pagination(
data: any[], data: any[],
pageSize: number, pageSize: number,
@ -577,8 +467,3 @@ export function pagination(
nextPage, nextPage,
} }
} }
export async function getScopedConfig(db: any, params: any) {
const configDoc = await getScopedFullConfig(db, params)
return configDoc && configDoc.config ? configDoc.config : configDoc
}

View File

@ -28,6 +28,8 @@ const DefaultBucketName = {
PLUGINS: "plugins", PLUGINS: "plugins",
} }
const selfHosted = !!parseInt(process.env.SELF_HOSTED || "")
const environment = { const environment = {
isTest, isTest,
isJest, isJest,
@ -58,7 +60,7 @@ const environment = {
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app", process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "", ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "",
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL, DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""), SELF_HOSTED: selfHosted,
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN, COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
PLATFORM_URL: process.env.PLATFORM_URL || "", PLATFORM_URL: process.env.PLATFORM_URL || "",
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN, POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
@ -84,6 +86,23 @@ const environment = {
DEPLOYMENT_ENVIRONMENT: DEPLOYMENT_ENVIRONMENT:
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose", process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
ENABLE_4XX_HTTP_LOGGING: process.env.ENABLE_4XX_HTTP_LOGGING || true, ENABLE_4XX_HTTP_LOGGING: process.env.ENABLE_4XX_HTTP_LOGGING || true,
ENABLE_AUDIT_LOG_IP_ADDR: process.env.ENABLE_AUDIT_LOG_IP_ADDR,
// smtp
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: parseInt(process.env.SMTP_PORT || ""),
SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS,
DISABLE_JWT_WARNING: process.env.DISABLE_JWT_WARNING,
/**
* Enable to allow an admin user to login using a password.
* This can be useful to prevent lockout when configuring SSO.
* However, this should be turned OFF by default for security purposes.
*/
ENABLE_SSO_MAINTENANCE_MODE: selfHosted
? process.env.ENABLE_SSO_MAINTENANCE_MODE
: false,
_set(key: any, value: any) { _set(key: any, value: any) {
process.env[key] = value process.env[key] = value
// @ts-ignore // @ts-ignore

View File

@ -1,55 +1,6 @@
import env from "../environment" import * as configs from "../configs"
import * as context from "../context"
import * as dbUtils from "../db/utils"
import { Config } from "../constants"
import { withCache, TTL, CacheKey } from "../cache"
// wrapper utility function
export const enabled = async () => { export const enabled = async () => {
// cloud - always use the environment variable return configs.analyticsEnabled()
if (!env.SELF_HOSTED) {
return !!env.ENABLE_ANALYTICS
}
// self host - prefer the settings doc
// use cache as events have high throughput
const enabledInDB = await withCache(
CacheKey.ANALYTICS_ENABLED,
TTL.ONE_DAY,
async () => {
const settings = await getSettingsDoc()
// need to do explicit checks in case the field is not set
if (settings?.config?.analyticsEnabled === false) {
return false
} else if (settings?.config?.analyticsEnabled === true) {
return true
}
}
)
if (enabledInDB !== undefined) {
return enabledInDB
}
// fallback to the environment variable
// explicitly check for 0 or false here, undefined or otherwise is treated as true
const envEnabled: any = env.ENABLE_ANALYTICS
if (envEnabled === 0 || envEnabled === false) {
return false
} else {
return true
}
}
const getSettingsDoc = async () => {
const db = context.getGlobalDB()
let settings
try {
settings = await db.get(dbUtils.generateConfigID({ type: Config.SETTINGS }))
} catch (e: any) {
if (e.status !== 404) {
throw e
}
}
return settings
} }

View File

@ -1,4 +1,4 @@
import { Event } from "@budibase/types" import { Event, AuditedEventFriendlyName } from "@budibase/types"
import { processors } from "./processors" import { processors } from "./processors"
import identification from "./identification" import identification from "./identification"
import * as backfill from "./backfill" import * as backfill from "./backfill"

View File

@ -10,7 +10,6 @@ import {
isCloudAccount, isCloudAccount,
isSSOAccount, isSSOAccount,
TenantGroup, TenantGroup,
SettingsConfig,
CloudAccount, CloudAccount,
UserIdentity, UserIdentity,
InstallationGroup, InstallationGroup,
@ -19,10 +18,9 @@ import {
isSSOUser, isSSOUser,
} from "@budibase/types" } from "@budibase/types"
import { processors } from "./processors" import { processors } from "./processors"
import * as dbUtils from "../db/utils"
import { Config } from "../constants"
import { newid } from "../utils" import { newid } from "../utils"
import * as installation from "../installation" import * as installation from "../installation"
import * as configs from "../configs"
import { withCache, TTL, CacheKey } from "../cache/generic" import { withCache, TTL, CacheKey } from "../cache/generic"
const pkg = require("../../package.json") const pkg = require("../../package.json")
@ -89,6 +87,7 @@ const getCurrentIdentity = async (): Promise<Identity> => {
installationId, installationId,
tenantId, tenantId,
environment, environment,
hostInfo: userContext.hostInfo,
} }
} else { } else {
throw new Error("Unknown identity type") throw new Error("Unknown identity type")
@ -270,9 +269,7 @@ const getUniqueTenantId = async (tenantId: string): Promise<string> => {
return context.doInTenant(tenantId, () => { return context.doInTenant(tenantId, () => {
return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => { return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
const db = context.getGlobalDB() const db = context.getGlobalDB()
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, { const config = await configs.getSettingsConfigDoc()
type: Config.SETTINGS,
})
let uniqueTenantId: string let uniqueTenantId: string
if (config.config.uniqueTenantId) { if (config.config.uniqueTenantId) {

View File

@ -0,0 +1,93 @@
import {
Event,
Identity,
Group,
IdentityType,
AuditLogQueueEvent,
AuditLogFn,
HostInfo,
} from "@budibase/types"
import { EventProcessor } from "./types"
import { getAppId, doInTenant, getTenantId } from "../../context"
import BullQueue from "bull"
import { createQueue, JobQueue } from "../../queue"
import { isAudited } from "../../utils"
import env from "../../environment"
export default class AuditLogsProcessor implements EventProcessor {
static auditLogsEnabled = false
static auditLogQueue: BullQueue.Queue<AuditLogQueueEvent>
// can't use constructor as need to return promise
static init(fn: AuditLogFn) {
AuditLogsProcessor.auditLogsEnabled = true
const writeAuditLogs = fn
AuditLogsProcessor.auditLogQueue = createQueue<AuditLogQueueEvent>(
JobQueue.AUDIT_LOG
)
return AuditLogsProcessor.auditLogQueue.process(async job => {
return doInTenant(job.data.tenantId, async () => {
let properties = job.data.properties
if (properties.audited) {
properties = {
...properties,
...properties.audited,
}
delete properties.audited
}
// this feature is disabled by default due to privacy requirements
// in some countries - available as env var in-case it is desired
// in self host deployments
let hostInfo: HostInfo | undefined = {}
if (env.ENABLE_AUDIT_LOG_IP_ADDR) {
hostInfo = job.data.opts.hostInfo
}
await writeAuditLogs(job.data.event, properties, {
userId: job.data.opts.userId,
timestamp: job.data.opts.timestamp,
appId: job.data.opts.appId,
hostInfo,
})
})
})
}
async processEvent(
event: Event,
identity: Identity,
properties: any,
timestamp?: string
): Promise<void> {
if (AuditLogsProcessor.auditLogsEnabled && isAudited(event)) {
// only audit log actual events, don't include backfills
const userId =
identity.type === IdentityType.USER ? identity.id : undefined
// add to the event queue, rather than just writing immediately
await AuditLogsProcessor.auditLogQueue.add({
event,
properties,
opts: {
userId,
timestamp,
appId: getAppId(),
hostInfo: identity.hostInfo,
},
tenantId: getTenantId(),
})
}
}
async identify(identity: Identity, timestamp?: string | number) {
// no-op
}
async identifyGroup(group: Group, timestamp?: string | number) {
// no-op
}
shutdown(): void {
AuditLogsProcessor.auditLogQueue?.close()
}
}

View File

@ -1,8 +1,19 @@
import AnalyticsProcessor from "./AnalyticsProcessor" import AnalyticsProcessor from "./AnalyticsProcessor"
import LoggingProcessor from "./LoggingProcessor" import LoggingProcessor from "./LoggingProcessor"
import AuditLogsProcessor from "./AuditLogsProcessor"
import Processors from "./Processors" import Processors from "./Processors"
import { AuditLogFn } from "@budibase/types"
export const analyticsProcessor = new AnalyticsProcessor() export const analyticsProcessor = new AnalyticsProcessor()
const loggingProcessor = new LoggingProcessor() const loggingProcessor = new LoggingProcessor()
const auditLogsProcessor = new AuditLogsProcessor()
export const processors = new Processors([analyticsProcessor, loggingProcessor]) export function init(auditingFn: AuditLogFn) {
return AuditLogsProcessor.init(auditingFn)
}
export const processors = new Processors([
analyticsProcessor,
loggingProcessor,
auditLogsProcessor,
])

View File

@ -47,6 +47,8 @@ export default class PosthogProcessor implements EventProcessor {
return return
} }
properties = this.clearPIIProperties(properties)
properties.version = pkg.version properties.version = pkg.version
properties.service = env.SERVICE properties.service = env.SERVICE
properties.environment = identity.environment properties.environment = identity.environment
@ -79,6 +81,16 @@ export default class PosthogProcessor implements EventProcessor {
this.posthog.capture(payload) this.posthog.capture(payload)
} }
clearPIIProperties(properties: any) {
if (properties.email) {
delete properties.email
}
if (properties.audited) {
delete properties.audited
}
return properties
}
async identify(identity: Identity, timestamp?: string | number) { async identify(identity: Identity, timestamp?: string | number) {
const payload: any = { distinctId: identity.id, properties: identity } const payload: any = { distinctId: identity.id, properties: identity }
if (timestamp) { if (timestamp) {

View File

@ -49,6 +49,25 @@ describe("PosthogProcessor", () => {
expect(processor.posthog.capture).toHaveBeenCalledTimes(0) expect(processor.posthog.capture).toHaveBeenCalledTimes(0)
}) })
it("removes audited information", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {
email: "test",
audited: {
name: "test",
},
}
await processor.processEvent(Event.USER_CREATED, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalled()
// @ts-ignore
const call = processor.posthog.capture.mock.calls[0][0]
expect(call.properties.audited).toBeUndefined()
expect(call.properties.email).toBeUndefined()
})
describe("rate limiting", () => { describe("rate limiting", () => {
it("sends daily event once in same day", async () => { it("sends daily event once in same day", async () => {
const processor = new PosthogProcessor("test") const processor = new PosthogProcessor("test")

View File

@ -19,6 +19,9 @@ const created = async (app: App, timestamp?: string | number) => {
const properties: AppCreatedEvent = { const properties: AppCreatedEvent = {
appId: app.appId, appId: app.appId,
version: app.version, version: app.version,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_CREATED, properties, timestamp) await publishEvent(Event.APP_CREATED, properties, timestamp)
} }
@ -27,6 +30,9 @@ async function updated(app: App) {
const properties: AppUpdatedEvent = { const properties: AppUpdatedEvent = {
appId: app.appId, appId: app.appId,
version: app.version, version: app.version,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_UPDATED, properties) await publishEvent(Event.APP_UPDATED, properties)
} }
@ -34,6 +40,9 @@ async function updated(app: App) {
async function deleted(app: App) { async function deleted(app: App) {
const properties: AppDeletedEvent = { const properties: AppDeletedEvent = {
appId: app.appId, appId: app.appId,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_DELETED, properties) await publishEvent(Event.APP_DELETED, properties)
} }
@ -41,6 +50,9 @@ async function deleted(app: App) {
async function published(app: App, timestamp?: string | number) { async function published(app: App, timestamp?: string | number) {
const properties: AppPublishedEvent = { const properties: AppPublishedEvent = {
appId: app.appId, appId: app.appId,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_PUBLISHED, properties, timestamp) await publishEvent(Event.APP_PUBLISHED, properties, timestamp)
} }
@ -48,6 +60,9 @@ async function published(app: App, timestamp?: string | number) {
async function unpublished(app: App) { async function unpublished(app: App) {
const properties: AppUnpublishedEvent = { const properties: AppUnpublishedEvent = {
appId: app.appId, appId: app.appId,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_UNPUBLISHED, properties) await publishEvent(Event.APP_UNPUBLISHED, properties)
} }
@ -55,6 +70,9 @@ async function unpublished(app: App) {
async function fileImported(app: App) { async function fileImported(app: App) {
const properties: AppFileImportedEvent = { const properties: AppFileImportedEvent = {
appId: app.appId, appId: app.appId,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_FILE_IMPORTED, properties) await publishEvent(Event.APP_FILE_IMPORTED, properties)
} }
@ -63,6 +81,9 @@ async function templateImported(app: App, templateKey: string) {
const properties: AppTemplateImportedEvent = { const properties: AppTemplateImportedEvent = {
appId: app.appId, appId: app.appId,
templateKey, templateKey,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_TEMPLATE_IMPORTED, properties) await publishEvent(Event.APP_TEMPLATE_IMPORTED, properties)
} }
@ -76,6 +97,9 @@ async function versionUpdated(
appId: app.appId, appId: app.appId,
currentVersion, currentVersion,
updatedToVersion, updatedToVersion,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_VERSION_UPDATED, properties) await publishEvent(Event.APP_VERSION_UPDATED, properties)
} }
@ -89,6 +113,9 @@ async function versionReverted(
appId: app.appId, appId: app.appId,
currentVersion, currentVersion,
revertedToVersion, revertedToVersion,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_VERSION_REVERTED, properties) await publishEvent(Event.APP_VERSION_REVERTED, properties)
} }
@ -96,6 +123,9 @@ async function versionReverted(
async function reverted(app: App) { async function reverted(app: App) {
const properties: AppRevertedEvent = { const properties: AppRevertedEvent = {
appId: app.appId, appId: app.appId,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_REVERTED, properties) await publishEvent(Event.APP_REVERTED, properties)
} }
@ -103,6 +133,9 @@ async function reverted(app: App) {
async function exported(app: App) { async function exported(app: App) {
const properties: AppExportedEvent = { const properties: AppExportedEvent = {
appId: app.appId, appId: app.appId,
audited: {
name: app.name,
},
} }
await publishEvent(Event.APP_EXPORTED, properties) await publishEvent(Event.APP_EXPORTED, properties)
} }

View File

@ -0,0 +1,26 @@
import {
Event,
AuditLogSearchParams,
AuditLogFilteredEvent,
AuditLogDownloadedEvent,
} from "@budibase/types"
import { publishEvent } from "../events"
async function filtered(search: AuditLogSearchParams) {
const properties: AuditLogFilteredEvent = {
filters: search,
}
await publishEvent(Event.AUDIT_LOGS_FILTERED, properties)
}
async function downloaded(search: AuditLogSearchParams) {
const properties: AuditLogDownloadedEvent = {
filters: search,
}
await publishEvent(Event.AUDIT_LOGS_DOWNLOADED, properties)
}
export default {
filtered,
downloaded,
}

View File

@ -12,19 +12,25 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { identification } from ".." import { identification } from ".."
async function login(source: LoginSource) { async function login(source: LoginSource, email: string) {
const identity = await identification.getCurrentIdentity() const identity = await identification.getCurrentIdentity()
const properties: LoginEvent = { const properties: LoginEvent = {
userId: identity.id, userId: identity.id,
source, source,
audited: {
email,
},
} }
await publishEvent(Event.AUTH_LOGIN, properties) await publishEvent(Event.AUTH_LOGIN, properties)
} }
async function logout() { async function logout(email?: string) {
const identity = await identification.getCurrentIdentity() const identity = await identification.getCurrentIdentity()
const properties: LogoutEvent = { const properties: LogoutEvent = {
userId: identity.id, userId: identity.id,
audited: {
email,
},
} }
await publishEvent(Event.AUTH_LOGOUT, properties) await publishEvent(Event.AUTH_LOGOUT, properties)
} }

View File

@ -18,6 +18,9 @@ async function created(automation: Automation, timestamp?: string | number) {
automationId: automation._id as string, automationId: automation._id as string,
triggerId: automation.definition?.trigger?.id, triggerId: automation.definition?.trigger?.id,
triggerType: automation.definition?.trigger?.stepId, triggerType: automation.definition?.trigger?.stepId,
audited: {
name: automation.name,
},
} }
await publishEvent(Event.AUTOMATION_CREATED, properties, timestamp) await publishEvent(Event.AUTOMATION_CREATED, properties, timestamp)
} }
@ -38,6 +41,9 @@ async function deleted(automation: Automation) {
automationId: automation._id as string, automationId: automation._id as string,
triggerId: automation.definition?.trigger?.id, triggerId: automation.definition?.trigger?.id,
triggerType: automation.definition?.trigger?.stepId, triggerType: automation.definition?.trigger?.stepId,
audited: {
name: automation.name,
},
} }
await publishEvent(Event.AUTOMATION_DELETED, properties) await publishEvent(Event.AUTOMATION_DELETED, properties)
} }
@ -71,6 +77,9 @@ async function stepCreated(
triggerType: automation.definition?.trigger?.stepId, triggerType: automation.definition?.trigger?.stepId,
stepId: step.id!, stepId: step.id!,
stepType: step.stepId, stepType: step.stepId,
audited: {
name: automation.name,
},
} }
await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp) await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)
} }
@ -83,6 +92,9 @@ async function stepDeleted(automation: Automation, step: AutomationStep) {
triggerType: automation.definition?.trigger?.stepId, triggerType: automation.definition?.trigger?.stepId,
stepId: step.id!, stepId: step.id!,
stepType: step.stepId, stepType: step.stepId,
audited: {
name: automation.name,
},
} }
await publishEvent(Event.AUTOMATION_STEP_DELETED, properties) await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)
} }

View File

@ -13,6 +13,7 @@ async function appBackupRestored(backup: AppBackup) {
appId: backup.appId, appId: backup.appId,
restoreId: backup._id!, restoreId: backup._id!,
backupCreatedAt: backup.timestamp, backupCreatedAt: backup.timestamp,
name: backup.name as string,
} }
await publishEvent(Event.APP_BACKUP_RESTORED, properties) await publishEvent(Event.APP_BACKUP_RESTORED, properties)
@ -22,13 +23,15 @@ async function appBackupTriggered(
appId: string, appId: string,
backupId: string, backupId: string,
type: AppBackupType, type: AppBackupType,
trigger: AppBackupTrigger trigger: AppBackupTrigger,
name: string
) { ) {
const properties: AppBackupTriggeredEvent = { const properties: AppBackupTriggeredEvent = {
appId: appId, appId: appId,
backupId, backupId,
type, type,
trigger, trigger,
name,
} }
await publishEvent(Event.APP_BACKUP_TRIGGERED, properties) await publishEvent(Event.APP_BACKUP_TRIGGERED, properties)
} }

View File

@ -8,12 +8,16 @@ import {
GroupUsersAddedEvent, GroupUsersAddedEvent,
GroupUsersDeletedEvent, GroupUsersDeletedEvent,
GroupAddedOnboardingEvent, GroupAddedOnboardingEvent,
GroupPermissionsEditedEvent,
UserGroupRoles, UserGroupRoles,
} from "@budibase/types" } from "@budibase/types"
async function created(group: UserGroup, timestamp?: number) { async function created(group: UserGroup, timestamp?: number) {
const properties: GroupCreatedEvent = { const properties: GroupCreatedEvent = {
groupId: group._id as string, groupId: group._id as string,
audited: {
name: group.name,
},
} }
await publishEvent(Event.USER_GROUP_CREATED, properties, timestamp) await publishEvent(Event.USER_GROUP_CREATED, properties, timestamp)
} }
@ -21,6 +25,9 @@ async function created(group: UserGroup, timestamp?: number) {
async function updated(group: UserGroup) { async function updated(group: UserGroup) {
const properties: GroupUpdatedEvent = { const properties: GroupUpdatedEvent = {
groupId: group._id as string, groupId: group._id as string,
audited: {
name: group.name,
},
} }
await publishEvent(Event.USER_GROUP_UPDATED, properties) await publishEvent(Event.USER_GROUP_UPDATED, properties)
} }
@ -28,6 +35,9 @@ async function updated(group: UserGroup) {
async function deleted(group: UserGroup) { async function deleted(group: UserGroup) {
const properties: GroupDeletedEvent = { const properties: GroupDeletedEvent = {
groupId: group._id as string, groupId: group._id as string,
audited: {
name: group.name,
},
} }
await publishEvent(Event.USER_GROUP_DELETED, properties) await publishEvent(Event.USER_GROUP_DELETED, properties)
} }
@ -36,6 +46,9 @@ async function usersAdded(count: number, group: UserGroup) {
const properties: GroupUsersAddedEvent = { const properties: GroupUsersAddedEvent = {
count, count,
groupId: group._id as string, groupId: group._id as string,
audited: {
name: group.name,
},
} }
await publishEvent(Event.USER_GROUP_USERS_ADDED, properties) await publishEvent(Event.USER_GROUP_USERS_ADDED, properties)
} }
@ -44,6 +57,9 @@ async function usersDeleted(count: number, group: UserGroup) {
const properties: GroupUsersDeletedEvent = { const properties: GroupUsersDeletedEvent = {
count, count,
groupId: group._id as string, groupId: group._id as string,
audited: {
name: group.name,
},
} }
await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties) await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties)
} }
@ -56,9 +72,13 @@ async function createdOnboarding(groupId: string) {
await publishEvent(Event.USER_GROUP_ONBOARDING, properties) await publishEvent(Event.USER_GROUP_ONBOARDING, properties)
} }
async function permissionsEdited(roles: UserGroupRoles) { async function permissionsEdited(group: UserGroup) {
const properties: UserGroupRoles = { const properties: GroupPermissionsEditedEvent = {
...roles, permissions: group.roles!,
groupId: group._id as string,
audited: {
name: group.name,
},
} }
await publishEvent(Event.USER_GROUP_PERMISSIONS_EDITED, properties) await publishEvent(Event.USER_GROUP_PERMISSIONS_EDITED, properties)
} }

View File

@ -21,3 +21,4 @@ export { default as group } from "./group"
export { default as plugin } from "./plugin" export { default as plugin } from "./plugin"
export { default as backup } from "./backup" export { default as backup } from "./backup"
export { default as environmentVariable } from "./environmentVariable" export { default as environmentVariable } from "./environmentVariable"
export { default as auditLog } from "./auditLog"

View File

@ -11,6 +11,9 @@ async function created(screen: Screen, timestamp?: string | number) {
layoutId: screen.layoutId, layoutId: screen.layoutId,
screenId: screen._id as string, screenId: screen._id as string,
roleId: screen.routing.roleId, roleId: screen.routing.roleId,
audited: {
name: screen.routing?.route,
},
} }
await publishEvent(Event.SCREEN_CREATED, properties, timestamp) await publishEvent(Event.SCREEN_CREATED, properties, timestamp)
} }
@ -20,6 +23,9 @@ async function deleted(screen: Screen) {
layoutId: screen.layoutId, layoutId: screen.layoutId,
screenId: screen._id as string, screenId: screen._id as string,
roleId: screen.routing.roleId, roleId: screen.routing.roleId,
audited: {
name: screen.routing?.route,
},
} }
await publishEvent(Event.SCREEN_DELETED, properties) await publishEvent(Event.SCREEN_DELETED, properties)
} }

View File

@ -13,6 +13,9 @@ import {
async function created(table: Table, timestamp?: string | number) { async function created(table: Table, timestamp?: string | number) {
const properties: TableCreatedEvent = { const properties: TableCreatedEvent = {
tableId: table._id as string, tableId: table._id as string,
audited: {
name: table.name,
},
} }
await publishEvent(Event.TABLE_CREATED, properties, timestamp) await publishEvent(Event.TABLE_CREATED, properties, timestamp)
} }
@ -20,6 +23,9 @@ async function created(table: Table, timestamp?: string | number) {
async function updated(table: Table) { async function updated(table: Table) {
const properties: TableUpdatedEvent = { const properties: TableUpdatedEvent = {
tableId: table._id as string, tableId: table._id as string,
audited: {
name: table.name,
},
} }
await publishEvent(Event.TABLE_UPDATED, properties) await publishEvent(Event.TABLE_UPDATED, properties)
} }
@ -27,6 +33,9 @@ async function updated(table: Table) {
async function deleted(table: Table) { async function deleted(table: Table) {
const properties: TableDeletedEvent = { const properties: TableDeletedEvent = {
tableId: table._id as string, tableId: table._id as string,
audited: {
name: table.name,
},
} }
await publishEvent(Event.TABLE_DELETED, properties) await publishEvent(Event.TABLE_DELETED, properties)
} }
@ -35,6 +44,9 @@ async function exported(table: Table, format: TableExportFormat) {
const properties: TableExportedEvent = { const properties: TableExportedEvent = {
tableId: table._id as string, tableId: table._id as string,
format, format,
audited: {
name: table.name,
},
} }
await publishEvent(Event.TABLE_EXPORTED, properties) await publishEvent(Event.TABLE_EXPORTED, properties)
} }
@ -42,6 +54,9 @@ async function exported(table: Table, format: TableExportFormat) {
async function imported(table: Table) { async function imported(table: Table) {
const properties: TableImportedEvent = { const properties: TableImportedEvent = {
tableId: table._id as string, tableId: table._id as string,
audited: {
name: table.name,
},
} }
await publishEvent(Event.TABLE_IMPORTED, properties) await publishEvent(Event.TABLE_IMPORTED, properties)
} }

View File

@ -19,6 +19,9 @@ import {
async function created(user: User, timestamp?: number) { async function created(user: User, timestamp?: number) {
const properties: UserCreatedEvent = { const properties: UserCreatedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_CREATED, properties, timestamp) await publishEvent(Event.USER_CREATED, properties, timestamp)
} }
@ -26,6 +29,9 @@ async function created(user: User, timestamp?: number) {
async function updated(user: User) { async function updated(user: User) {
const properties: UserUpdatedEvent = { const properties: UserUpdatedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_UPDATED, properties) await publishEvent(Event.USER_UPDATED, properties)
} }
@ -33,6 +39,9 @@ async function updated(user: User) {
async function deleted(user: User) { async function deleted(user: User) {
const properties: UserDeletedEvent = { const properties: UserDeletedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_DELETED, properties) await publishEvent(Event.USER_DELETED, properties)
} }
@ -40,6 +49,9 @@ async function deleted(user: User) {
export async function onboardingComplete(user: User) { export async function onboardingComplete(user: User) {
const properties: UserOnboardingEvent = { const properties: UserOnboardingEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_ONBOARDING_COMPLETE, properties) await publishEvent(Event.USER_ONBOARDING_COMPLETE, properties)
} }
@ -49,6 +61,9 @@ export async function onboardingComplete(user: User) {
async function permissionAdminAssigned(user: User, timestamp?: number) { async function permissionAdminAssigned(user: User, timestamp?: number) {
const properties: UserPermissionAssignedEvent = { const properties: UserPermissionAssignedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent( await publishEvent(
Event.USER_PERMISSION_ADMIN_ASSIGNED, Event.USER_PERMISSION_ADMIN_ASSIGNED,
@ -60,6 +75,9 @@ async function permissionAdminAssigned(user: User, timestamp?: number) {
async function permissionAdminRemoved(user: User) { async function permissionAdminRemoved(user: User) {
const properties: UserPermissionRemovedEvent = { const properties: UserPermissionRemovedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_PERMISSION_ADMIN_REMOVED, properties) await publishEvent(Event.USER_PERMISSION_ADMIN_REMOVED, properties)
} }
@ -67,6 +85,9 @@ async function permissionAdminRemoved(user: User) {
async function permissionBuilderAssigned(user: User, timestamp?: number) { async function permissionBuilderAssigned(user: User, timestamp?: number) {
const properties: UserPermissionAssignedEvent = { const properties: UserPermissionAssignedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent( await publishEvent(
Event.USER_PERMISSION_BUILDER_ASSIGNED, Event.USER_PERMISSION_BUILDER_ASSIGNED,
@ -78,20 +99,30 @@ async function permissionBuilderAssigned(user: User, timestamp?: number) {
async function permissionBuilderRemoved(user: User) { async function permissionBuilderRemoved(user: User) {
const properties: UserPermissionRemovedEvent = { const properties: UserPermissionRemovedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_PERMISSION_BUILDER_REMOVED, properties) await publishEvent(Event.USER_PERMISSION_BUILDER_REMOVED, properties)
} }
// INVITE // INVITE
async function invited() { async function invited(email: string) {
const properties: UserInvitedEvent = {} const properties: UserInvitedEvent = {
audited: {
email,
},
}
await publishEvent(Event.USER_INVITED, properties) await publishEvent(Event.USER_INVITED, properties)
} }
async function inviteAccepted(user: User) { async function inviteAccepted(user: User) {
const properties: UserInviteAcceptedEvent = { const properties: UserInviteAcceptedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_INVITED_ACCEPTED, properties) await publishEvent(Event.USER_INVITED_ACCEPTED, properties)
} }
@ -101,6 +132,9 @@ async function inviteAccepted(user: User) {
async function passwordForceReset(user: User) { async function passwordForceReset(user: User) {
const properties: UserPasswordForceResetEvent = { const properties: UserPasswordForceResetEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_PASSWORD_FORCE_RESET, properties) await publishEvent(Event.USER_PASSWORD_FORCE_RESET, properties)
} }
@ -108,6 +142,9 @@ async function passwordForceReset(user: User) {
async function passwordUpdated(user: User) { async function passwordUpdated(user: User) {
const properties: UserPasswordUpdatedEvent = { const properties: UserPasswordUpdatedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_PASSWORD_UPDATED, properties) await publishEvent(Event.USER_PASSWORD_UPDATED, properties)
} }
@ -115,6 +152,9 @@ async function passwordUpdated(user: User) {
async function passwordResetRequested(user: User) { async function passwordResetRequested(user: User) {
const properties: UserPasswordResetRequestedEvent = { const properties: UserPasswordResetRequestedEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_PASSWORD_RESET_REQUESTED, properties) await publishEvent(Event.USER_PASSWORD_RESET_REQUESTED, properties)
} }
@ -122,6 +162,9 @@ async function passwordResetRequested(user: User) {
async function passwordReset(user: User) { async function passwordReset(user: User) {
const properties: UserPasswordResetEvent = { const properties: UserPasswordResetEvent = {
userId: user._id as string, userId: user._id as string,
audited: {
email: user.email,
},
} }
await publishEvent(Event.USER_PASSWORD_RESET, properties) await publishEvent(Event.USER_PASSWORD_RESET, properties)
} }

View File

@ -1,3 +1,4 @@
export * as configs from "./configs"
export * as events from "./events" export * as events from "./events"
export * as migrations from "./migrations" export * as migrations from "./migrations"
export * as users from "./users" export * as users from "./users"
@ -20,11 +21,11 @@ export * as context from "./context"
export * as cache from "./cache" export * as cache from "./cache"
export * as objectStore from "./objectStore" export * as objectStore from "./objectStore"
export * as redis from "./redis" export * as redis from "./redis"
export * as locks from "./redis/redlock" export * as locks from "./redis/redlockImpl"
export * as utils from "./utils" export * as utils from "./utils"
export * as errors from "./errors" export * as errors from "./errors"
export { default as env } from "./environment" export { default as env } from "./environment"
export { SearchParams } from "./db"
// Add context to tenancy for backwards compatibility // Add context to tenancy for backwards compatibility
// only do this for external usages to prevent internal // only do this for external usages to prevent internal
// circular dependencies // circular dependencies

View File

@ -8,7 +8,7 @@ import { getGlobalDB, doInTenant } from "../context"
import { decrypt } from "../security/encryption" import { decrypt } from "../security/encryption"
import * as identity from "../context/identity" import * as identity from "../context/identity"
import env from "../environment" import env from "../environment"
import { BBContext, EndpointMatcher } from "@budibase/types" import { Ctx, EndpointMatcher } from "@budibase/types"
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
? parseInt(env.SESSION_UPDATE_PERIOD) ? parseInt(env.SESSION_UPDATE_PERIOD)
@ -73,7 +73,7 @@ export default function (
} }
) { ) {
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : [] const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
return async (ctx: BBContext | any, next: any) => { return async (ctx: Ctx | any, next: any) => {
let publicEndpoint = false let publicEndpoint = false
const version = ctx.request.headers[Header.API_VER] const version = ctx.request.headers[Header.API_VER]
// the path is not authenticated // the path is not authenticated
@ -115,7 +115,8 @@ export default function (
authenticated = true authenticated = true
} catch (err: any) { } catch (err: any) {
authenticated = false authenticated = false
console.error("Auth Error", err?.message || err) console.error(`Auth Error: ${err.message}`)
console.error(err)
// remove the cookie as the user does not exist anymore // remove the cookie as the user does not exist anymore
clearCookie(ctx, Cookie.Auth) clearCookie(ctx, Cookie.Auth)
} }
@ -148,12 +149,13 @@ export default function (
finalise(ctx, { authenticated, user, internal, version, publicEndpoint }) finalise(ctx, { authenticated, user, internal, version, publicEndpoint })
if (user && user.email) { if (user && user.email) {
return identity.doInUserContext(user, next) return identity.doInUserContext(user, ctx, next)
} else { } else {
return next() return next()
} }
} catch (err: any) { } catch (err: any) {
console.error("Auth Error", err?.message || err) console.error(`Auth Error: ${err.message}`)
console.error(err)
// invalid token, clear the cookie // invalid token, clear the cookie
if (err && err.name === "JsonWebTokenError") { if (err && err.name === "JsonWebTokenError") {
clearCookie(ctx, Cookie.Auth) clearCookie(ctx, Cookie.Auth)

View File

@ -11,6 +11,7 @@ export async function errorHandling(ctx: any, next: any) {
if (status > 499 || env.ENABLE_4XX_HTTP_LOGGING) { if (status > 499 || env.ENABLE_4XX_HTTP_LOGGING) {
ctx.log.error(err) ctx.log.error(err)
console.trace(err)
} }
const error = errors.getPublicError(err) const error = errors.getPublicError(err)

View File

@ -17,4 +17,5 @@ export { default as builderOrAdmin } from "./builderOrAdmin"
export { default as builderOnly } from "./builderOnly" export { default as builderOnly } from "./builderOnly"
export { default as logging } from "./logging" export { default as logging } from "./logging"
export { default as errorHandling } from "./errorHandling" export { default as errorHandling } from "./errorHandling"
export { default as querystringToBody } from "./querystringToBody"
export * as joiValidator from "./joi-validator" export * as joiValidator from "./joi-validator"

View File

@ -1,9 +1,8 @@
import * as google from "../sso/google" import * as google from "../sso/google"
import { Cookie, Config } from "../../../constants" import { Cookie } from "../../../constants"
import { clearCookie, getCookie } from "../../../utils" import { clearCookie, getCookie } from "../../../utils"
import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db" import { doWithDB } from "../../../db"
import environment from "../../../environment" import * as configs from "../../../configs"
import { getGlobalDB } from "../../../context"
import { BBContext, Database, SSOProfile } from "@budibase/types" import { BBContext, Database, SSOProfile } from "@budibase/types"
import { ssoSaveUserNoOp } from "../sso/sso" import { ssoSaveUserNoOp } from "../sso/sso"
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
@ -13,18 +12,12 @@ type Passport = {
} }
async function fetchGoogleCreds() { async function fetchGoogleCreds() {
// try and get the config from the tenant let config = await configs.getGoogleDatasourceConfig()
const db = getGlobalDB()
const googleConfig = await getScopedConfig(db, { if (!config) {
type: Config.GOOGLE, throw new Error("No google configuration found")
}) }
// or fall back to env variables return config
return (
googleConfig || {
clientID: environment.GOOGLE_CLIENT_ID,
clientSecret: environment.GOOGLE_CLIENT_SECRET,
}
)
} }
export async function preAuth( export async function preAuth(
@ -34,7 +27,7 @@ export async function preAuth(
) { ) {
// get the relevant config // get the relevant config
const googleConfig = await fetchGoogleCreds() const googleConfig = await fetchGoogleCreds()
const platformUrl = await getPlatformUrl({ tenantAware: false }) const platformUrl = await configs.getPlatformUrl({ tenantAware: false })
let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback` let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`
const strategy = await google.strategyFactory( const strategy = await google.strategyFactory(
@ -61,7 +54,7 @@ export async function postAuth(
) { ) {
// get the relevant config // get the relevant config
const config = await fetchGoogleCreds() const config = await fetchGoogleCreds()
const platformUrl = await getPlatformUrl({ tenantAware: false }) const platformUrl = await configs.getPlatformUrl({ tenantAware: false })
let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback` let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`
const authStateCookie = getCookie(ctx, Cookie.DatasourceAuth) const authStateCookie = getCookie(ctx, Cookie.DatasourceAuth)

View File

@ -2,12 +2,11 @@ import { ssoCallbackUrl } from "../utils"
import * as sso from "./sso" import * as sso from "./sso"
import { import {
ConfigType, ConfigType,
GoogleConfig,
Database,
SSOProfile, SSOProfile,
SSOAuthDetails, SSOAuthDetails,
SSOProviderType, SSOProviderType,
SaveSSOUserFunction, SaveSSOUserFunction,
GoogleInnerConfig,
} from "@budibase/types" } from "@budibase/types"
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
@ -45,7 +44,7 @@ export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
* @returns Dynamically configured Passport Google Strategy * @returns Dynamically configured Passport Google Strategy
*/ */
export async function strategyFactory( export async function strategyFactory(
config: GoogleConfig["config"], config: GoogleInnerConfig,
callbackUrl: string, callbackUrl: string,
saveUserFn: SaveSSOUserFunction saveUserFn: SaveSSOUserFunction
) { ) {
@ -73,9 +72,6 @@ export async function strategyFactory(
} }
} }
export async function getCallbackUrl( export async function getCallbackUrl(config: GoogleInnerConfig) {
db: Database, return ssoCallbackUrl(ConfigType.GOOGLE, config)
config: { callbackURL?: string }
) {
return ssoCallbackUrl(db, config, ConfigType.GOOGLE)
} }

View File

@ -4,7 +4,6 @@ import { ssoCallbackUrl } from "../utils"
import { import {
ConfigType, ConfigType,
OIDCInnerConfig, OIDCInnerConfig,
Database,
SSOProfile, SSOProfile,
OIDCStrategyConfiguration, OIDCStrategyConfiguration,
SSOAuthDetails, SSOAuthDetails,
@ -157,9 +156,6 @@ export async function fetchStrategyConfig(
} }
} }
export async function getCallbackUrl( export async function getCallbackUrl() {
db: Database, return ssoCallbackUrl(ConfigType.OIDC)
config: { callbackURL?: string }
) {
return ssoCallbackUrl(db, config, ConfigType.OIDC)
} }

View File

@ -1,6 +1,6 @@
import { isMultiTenant, getTenantId } from "../../context" import { getTenantId, isMultiTenant } from "../../context"
import { getScopedConfig } from "../../db" import * as configs from "../../configs"
import { ConfigType, Database } from "@budibase/types" import { ConfigType, GoogleInnerConfig } from "@budibase/types"
/** /**
* Utility to handle authentication errors. * Utility to handle authentication errors.
@ -19,17 +19,14 @@ export function authError(done: Function, message: string, err?: any) {
} }
export async function ssoCallbackUrl( export async function ssoCallbackUrl(
db: Database, type: ConfigType,
config?: { callbackURL?: string }, config?: GoogleInnerConfig
type?: ConfigType
) { ) {
// incase there is a callback URL from before // incase there is a callback URL from before
if (config && config.callbackURL) { if (config && (config as GoogleInnerConfig).callbackURL) {
return config.callbackURL return (config as GoogleInnerConfig).callbackURL as string
} }
const publicConfig = await getScopedConfig(db, { const settingsConfig = await configs.getSettingsConfig()
type: ConfigType.SETTINGS,
})
let callbackUrl = `/api/global/auth` let callbackUrl = `/api/global/auth`
if (isMultiTenant()) { if (isMultiTenant()) {
@ -37,5 +34,5 @@ export async function ssoCallbackUrl(
} }
callbackUrl += `/${type}/callback` callbackUrl += `/${type}/callback`
return `${publicConfig.platformUrl}${callbackUrl}` return `${settingsConfig.platformUrl}${callbackUrl}`
} }

View File

@ -0,0 +1,28 @@
import { Ctx } from "@budibase/types"
/**
* Expects a standard "query" query string property which is the JSON body
* of the request, which has to be sent via query string due to the requirement
* of making an endpoint a GET request e.g. downloading a file stream.
*/
export default function (ctx: Ctx, next: any) {
const queryString = ctx.request.query?.query as string | undefined
if (ctx.request.method.toLowerCase() !== "get") {
ctx.throw(
500,
"Query to download middleware can only be used for get requests."
)
}
if (!queryString) {
return next()
}
const decoded = decodeURIComponent(queryString)
let json
try {
json = JSON.parse(decoded)
} catch (err) {
return next()
}
ctx.request.body = json
return next()
}

View File

@ -87,6 +87,7 @@ export const runMigration = async (
const lengthStatement = length > 1 ? `[${count}/${length}]` : "" const lengthStatement = length > 1 ? `[${count}/${length}]` : ""
const db = getDB(dbName) const db = getDB(dbName)
try { try {
const doc = await getMigrationsDoc(db) const doc = await getMigrationsDoc(db)

View File

@ -1,7 +1,7 @@
import { StaticDatabases } from "../constants" import { StaticDatabases } from "../constants"
import { getPlatformDB } from "./platformDb" import { getPlatformDB } from "./platformDb"
import { LockName, LockOptions, LockType, Tenants } from "@budibase/types" import { LockName, LockOptions, LockType, Tenants } from "@budibase/types"
import * as locks from "../redis/redlock" import * as locks from "../redis/redlockImpl"
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants

View File

@ -1,4 +1,5 @@
export enum JobQueue { export enum JobQueue {
AUTOMATION = "automationQueue", AUTOMATION = "automationQueue",
APP_BACKUP = "appBackupQueue", APP_BACKUP = "appBackupQueue",
AUDIT_LOG = "auditLogQueue",
} }

View File

@ -40,8 +40,10 @@ export function createQueue<T>(
} }
export async function shutdown() { export async function shutdown() {
if (QUEUES.length) { if (cleanupInterval) {
clearInterval(cleanupInterval) clearInterval(cleanupInterval)
}
if (QUEUES.length) {
for (let queue of QUEUES) { for (let queue of QUEUES) {
await queue.close() await queue.close()
} }

View File

@ -3,4 +3,4 @@
export { default as Client } from "./redis" export { default as Client } from "./redis"
export * as utils from "./utils" export * as utils from "./utils"
export * as clients from "./init" export * as clients from "./init"
export * as locks from "./redlock" export * as locks from "./redlockImpl"

View File

@ -24,7 +24,7 @@ const getClient = async (type: LockType): Promise<Redlock> => {
} }
} }
export const OPTIONS = { const OPTIONS = {
TRY_ONCE: { TRY_ONCE: {
// immediately throws an error if the lock is already held // immediately throws an error if the lock is already held
retryCount: 0, retryCount: 0,
@ -56,14 +56,29 @@ export const OPTIONS = {
}, },
} }
export const newRedlock = async (opts: Options = {}) => { const newRedlock = async (opts: Options = {}) => {
let options = { ...OPTIONS.DEFAULT, ...opts } let options = { ...OPTIONS.DEFAULT, ...opts }
const redisWrapper = await getLockClient() const redisWrapper = await getLockClient()
const client = redisWrapper.getClient() const client = redisWrapper.getClient()
return new Redlock([client], options) return new Redlock([client], options)
} }
export const doWithLock = async (opts: LockOptions, task: any) => { type SuccessfulRedlockExecution<T> = {
executed: true
result: T
}
type UnsuccessfulRedlockExecution = {
executed: false
}
type RedlockExecution<T> =
| SuccessfulRedlockExecution<T>
| UnsuccessfulRedlockExecution
export const doWithLock = async <T>(
opts: LockOptions,
task: () => Promise<T>
): Promise<RedlockExecution<T>> => {
const redlock = await getClient(opts.type) const redlock = await getClient(opts.type)
let lock let lock
try { try {
@ -73,8 +88,8 @@ export const doWithLock = async (opts: LockOptions, task: any) => {
let name: string = `lock:${prefix}_${opts.name}` let name: string = `lock:${prefix}_${opts.name}`
// add additional unique name if required // add additional unique name if required
if (opts.nameSuffix) { if (opts.resource) {
name = name + `_${opts.nameSuffix}` name = name + `_${opts.resource}`
} }
// create the lock // create the lock
@ -83,7 +98,7 @@ export const doWithLock = async (opts: LockOptions, task: any) => {
// perform locked task // perform locked task
// need to await to ensure completion before unlocking // need to await to ensure completion before unlocking
const result = await task() const result = await task()
return result return { executed: true, result }
} catch (e: any) { } catch (e: any) {
console.warn("lock error") console.warn("lock error")
// lock limit exceeded // lock limit exceeded
@ -92,7 +107,7 @@ export const doWithLock = async (opts: LockOptions, task: any) => {
// don't throw for try-once locks, they will always error // don't throw for try-once locks, they will always error
// due to retry count (0) exceeded // due to retry count (0) exceeded
console.warn(e) console.warn(e)
return return { executed: false }
} else { } else {
console.error(e) console.error(e)
throw e throw e

View File

@ -5,19 +5,56 @@ import {
generateAppUserID, generateAppUserID,
queryGlobalView, queryGlobalView,
UNICODE_MAX, UNICODE_MAX,
DocumentType,
SEPARATOR,
directCouchFind,
} from "./db" } from "./db"
import { BulkDocsResponse, User } from "@budibase/types" import { BulkDocsResponse, User } from "@budibase/types"
import { getGlobalDB } from "./context" import { getGlobalDB } from "./context"
import * as context from "./context" import * as context from "./context"
export const bulkGetGlobalUsersById = async (userIds: string[]) => { type GetOpts = { cleanup?: boolean }
function removeUserPassword(users: User | User[]) {
if (Array.isArray(users)) {
return users.map(user => {
if (user) {
delete user.password
return user
}
})
} else if (users) {
delete users.password
return users
}
return users
}
export const bulkGetGlobalUsersById = async (
userIds: string[],
opts?: GetOpts
) => {
const db = getGlobalDB() const db = getGlobalDB()
return ( let users = (
await db.allDocs({ await db.allDocs({
keys: userIds, keys: userIds,
include_docs: true, include_docs: true,
}) })
).rows.map(row => row.doc) as User[] ).rows.map(row => row.doc) as User[]
if (opts?.cleanup) {
users = removeUserPassword(users) as User[]
}
return users
}
export const getAllUserIds = async () => {
const db = getGlobalDB()
const startKey = `${DocumentType.USER}${SEPARATOR}`
const response = await db.allDocs({
startkey: startKey,
endkey: `${startKey}${UNICODE_MAX}`,
})
return response.rows.map(row => row.id)
} }
export const bulkUpdateGlobalUsers = async (users: User[]) => { export const bulkUpdateGlobalUsers = async (users: User[]) => {
@ -25,18 +62,22 @@ export const bulkUpdateGlobalUsers = async (users: User[]) => {
return (await db.bulkDocs(users)) as BulkDocsResponse return (await db.bulkDocs(users)) as BulkDocsResponse
} }
export async function getById(id: string): Promise<User> { export async function getById(id: string, opts?: GetOpts): Promise<User> {
const db = context.getGlobalDB() const db = context.getGlobalDB()
return db.get(id) let user = await db.get(id)
if (opts?.cleanup) {
user = removeUserPassword(user)
}
return user
} }
/** /**
* Given an email address this will use a view to search through * Given an email address this will use a view to search through
* all the users to find one with this email address. * all the users to find one with this email address.
* @param {string} email the email to lookup the user by.
*/ */
export const getGlobalUserByEmail = async ( export const getGlobalUserByEmail = async (
email: String email: String,
opts?: GetOpts
): Promise<User | undefined> => { ): Promise<User | undefined> => {
if (email == null) { if (email == null) {
throw "Must supply an email address to view" throw "Must supply an email address to view"
@ -52,10 +93,19 @@ export const getGlobalUserByEmail = async (
throw new Error(`Multiple users found with email address: ${email}`) throw new Error(`Multiple users found with email address: ${email}`)
} }
return response let user = response as User
if (opts?.cleanup) {
user = removeUserPassword(user) as User
}
return user
} }
export const searchGlobalUsersByApp = async (appId: any, opts: any) => { export const searchGlobalUsersByApp = async (
appId: any,
opts: any,
getOpts?: GetOpts
) => {
if (typeof appId !== "string") { if (typeof appId !== "string") {
throw new Error("Must provide a string based app ID") throw new Error("Must provide a string based app ID")
} }
@ -64,10 +114,54 @@ export const searchGlobalUsersByApp = async (appId: any, opts: any) => {
}) })
params.startkey = opts && opts.startkey ? opts.startkey : params.startkey params.startkey = opts && opts.startkey ? opts.startkey : params.startkey
let response = await queryGlobalView(ViewName.USER_BY_APP, params) let response = await queryGlobalView(ViewName.USER_BY_APP, params)
if (!response) { if (!response) {
response = [] response = []
} }
return Array.isArray(response) ? response : [response] let users: User[] = Array.isArray(response) ? response : [response]
if (getOpts?.cleanup) {
users = removeUserPassword(users) as User[]
}
return users
}
/*
Return any user who potentially has access to the application
Admins, developers and app users with the explicitly role.
*/
export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
const roleSelector = `roles.${appId}`
let orQuery: any[] = [
{
"builder.global": true,
},
{
"admin.global": true,
},
]
if (appId) {
const roleCheck = {
[roleSelector]: {
$exists: true,
},
}
orQuery.push(roleCheck)
}
let searchOptions = {
selector: {
$or: orQuery,
_id: {
$regex: "^us_",
},
},
limit: opts?.limit || 50,
}
const resp = await directCouchFind(context.getGlobalDBName(), searchOptions)
return resp?.rows
} }
export const getGlobalUserByAppPage = (appId: string, user: User) => { export const getGlobalUserByAppPage = (appId: string, user: User) => {
@ -80,7 +174,11 @@ export const getGlobalUserByAppPage = (appId: string, user: User) => {
/** /**
* Performs a starts with search on the global email view. * Performs a starts with search on the global email view.
*/ */
export const searchGlobalUsersByEmail = async (email: string, opts: any) => { export const searchGlobalUsersByEmail = async (
email: string,
opts: any,
getOpts?: GetOpts
) => {
if (typeof email !== "string") { if (typeof email !== "string") {
throw new Error("Must provide a string to search by") throw new Error("Must provide a string to search by")
} }
@ -95,5 +193,9 @@ export const searchGlobalUsersByEmail = async (email: string, opts: any) => {
if (!response) { if (!response) {
response = [] response = []
} }
return Array.isArray(response) ? response : [response] let users: User[] = Array.isArray(response) ? response : [response]
if (getOpts?.cleanup) {
users = removeUserPassword(users) as User[]
}
return users
} }

View File

@ -10,7 +10,13 @@ import {
import env from "../environment" import env from "../environment"
import * as tenancy from "../tenancy" import * as tenancy from "../tenancy"
import * as context from "../context" import * as context from "../context"
import { App, Ctx, TenantResolutionStrategy } from "@budibase/types" import {
App,
AuditedEventFriendlyName,
Ctx,
Event,
TenantResolutionStrategy,
} from "@budibase/types"
import { SetOption } from "cookies" import { SetOption } from "cookies"
const jwt = require("jsonwebtoken") const jwt = require("jsonwebtoken")
@ -217,3 +223,7 @@ export async function getBuildersCount() {
export function timeout(timeMs: number) { export function timeout(timeMs: number) {
return new Promise(resolve => setTimeout(resolve, timeMs)) return new Promise(resolve => setTimeout(resolve, timeMs))
} }
export function isAudited(event: Event) {
return !!AuditedEventFriendlyName[event]
}

View File

@ -4,4 +4,6 @@ export { generator } from "./structures"
export * as testEnv from "./testEnv" export * as testEnv from "./testEnv"
export * as testContainerUtils from "./testContainerUtils" export * as testContainerUtils from "./testContainerUtils"
export * from "./jestUtils"
export { default as DBTestConfiguration } from "./DBTestConfiguration" export { default as DBTestConfiguration } from "./DBTestConfiguration"

View File

@ -0,0 +1,9 @@
export function expectFunctionWasCalledTimesWith(
jestFunction: any,
times: number,
argument: any
) {
expect(
jestFunction.mock.calls.filter((call: any) => call[0] === argument).length
).toBe(times)
}

View File

@ -70,6 +70,10 @@ export const useBackups = () => {
return useFeature(Feature.APP_BACKUPS) return useFeature(Feature.APP_BACKUPS)
} }
export const useEnforceableSSO = () => {
return useFeature(Feature.ENFORCEABLE_SSO)
}
export const useGroups = () => { export const useGroups = () => {
return useFeature(Feature.USER_GROUPS) return useFeature(Feature.USER_GROUPS)
} }
@ -78,6 +82,10 @@ export const useEnvironmentVariables = () => {
return useFeature(Feature.ENVIRONMENT_VARIABLES) return useFeature(Feature.ENVIRONMENT_VARIABLES)
} }
export const useAuditLogs = () => {
return useFeature(Feature.AUDIT_LOGS)
}
// QUOTAS // QUOTAS
export const setAutomationLogsQuota = (value: number) => { export const setAutomationLogsQuota = (value: number) => {

View File

@ -8,6 +8,8 @@ import {
CloudAccount, CloudAccount,
Hosting, Hosting,
SSOAccount, SSOAccount,
CreateAccount,
CreatePassswordAccount,
} from "@budibase/types" } from "@budibase/types"
import _ from "lodash" import _ from "lodash"
@ -29,6 +31,10 @@ export const account = (): Account => {
} }
} }
export function selfHostAccount() {
return account()
}
export const cloudAccount = (): CloudAccount => { export const cloudAccount = (): CloudAccount => {
return { return {
...account(), ...account(),
@ -47,9 +53,9 @@ function provider(): AccountSSOProvider {
return _.sample(Object.values(AccountSSOProvider)) as AccountSSOProvider return _.sample(Object.values(AccountSSOProvider)) as AccountSSOProvider
} }
export function ssoAccount(): SSOAccount { export function ssoAccount(account: Account = cloudAccount()): SSOAccount {
return { return {
...cloudAccount(), ...account,
authType: AuthType.SSO, authType: AuthType.SSO,
oauth2: { oauth2: {
accessToken: generator.string(), accessToken: generator.string(),
@ -61,3 +67,49 @@ export function ssoAccount(): SSOAccount {
thirdPartyProfile: {}, thirdPartyProfile: {},
} }
} }
export const cloudCreateAccount: CreatePassswordAccount = {
email: "cloud@budibase.com",
tenantId: "cloud",
hosting: Hosting.CLOUD,
authType: AuthType.PASSWORD,
password: "Password123!",
tenantName: "cloud",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
}
export const cloudSSOCreateAccount: CreateAccount = {
email: "cloud-sso@budibase.com",
tenantId: "cloud-sso",
hosting: Hosting.CLOUD,
authType: AuthType.SSO,
tenantName: "cloudsso",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
}
export const selfCreateAccount: CreatePassswordAccount = {
email: "self@budibase.com",
tenantId: "self",
hosting: Hosting.SELF,
authType: AuthType.PASSWORD,
password: "Password123!",
tenantName: "self",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
}
export const selfSSOCreateAccount: CreateAccount = {
email: "self-sso@budibase.com",
tenantId: "self-sso",
hosting: Hosting.SELF,
authType: AuthType.SSO,
tenantName: "selfsso",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
}

View File

@ -1,5 +1,12 @@
import { structures } from ".."
import { newid } from "../../../src/newid" import { newid } from "../../../src/newid"
export function id() { export function id() {
return `db_${newid()}` return `db_${newid()}`
} }
export function rev() {
return `${structures.generator.character({
numeric: true,
})}-${structures.uuid().replace(/-/, "")}`
}

View File

@ -0,0 +1,2 @@
import Chance from "chance"
export const generator = new Chance()

View File

@ -1,8 +1,4 @@
export * from "./common" export * from "./common"
import Chance from "chance"
export const generator = new Chance()
export * as accounts from "./accounts" export * as accounts from "./accounts"
export * as apps from "./apps" export * as apps from "./apps"
export * as db from "./db" export * as db from "./db"
@ -12,3 +8,4 @@ export * as plugins from "./plugins"
export * as sso from "./sso" export * as sso from "./sso"
export * as tenant from "./tenants" export * as tenant from "./tenants"
export * as users from "./users" export * as users from "./users"
export { generator } from "./generator"

View File

@ -0,0 +1,19 @@
import { User } from "@budibase/types"
import { generator } from "./generator"
import { uuid } from "./common"
export const newEmail = () => {
return `${uuid()}@test.com`
}
export const user = (userProps?: any): User => {
return {
email: newEmail(),
password: "test",
roles: { app_test: "admin" },
firstName: generator.first(),
lastName: generator.last(),
pictureUrl: "http://test.com",
...userProps,
}
}

View File

@ -1,6 +1,7 @@
import { import {
GoogleInnerConfig, GoogleInnerConfig,
JwtClaims, JwtClaims,
OAuth2,
OIDCInnerConfig, OIDCInnerConfig,
OIDCWellKnownConfig, OIDCWellKnownConfig,
SSOAuthDetails, SSOAuthDetails,
@ -8,8 +9,40 @@ import {
SSOProviderType, SSOProviderType,
User, User,
} from "@budibase/types" } from "@budibase/types"
import { uuid, generator, users, email } from "./index" import { generator } from "./generator"
import { uuid, email } from "./common"
import * as shared from "./shared"
import _ from "lodash" import _ from "lodash"
import { user } from "./shared"
export function OAuth(): OAuth2 {
return {
refreshToken: generator.string(),
accessToken: generator.string(),
}
}
export function authDetails(userDoc?: User): SSOAuthDetails {
if (!userDoc) {
userDoc = user()
}
const userId = userDoc._id || uuid()
const provider = generator.string()
const profile = ssoProfile(userDoc)
profile.provider = provider
profile.id = userId
return {
email: userDoc.email,
oauth2: OAuth(),
profile,
provider,
providerType: providerType(),
userId,
}
}
export function providerType(): SSOProviderType { export function providerType(): SSOProviderType {
return _.sample(Object.values(SSOProviderType)) as SSOProviderType return _.sample(Object.values(SSOProviderType)) as SSOProviderType
@ -17,7 +50,7 @@ export function providerType(): SSOProviderType {
export function ssoProfile(user?: User): SSOProfile { export function ssoProfile(user?: User): SSOProfile {
if (!user) { if (!user) {
user = users.user() user = shared.user()
} }
return { return {
id: user._id!, id: user._id!,
@ -33,31 +66,6 @@ export function ssoProfile(user?: User): SSOProfile {
} }
} }
export function authDetails(user?: User): SSOAuthDetails {
if (!user) {
user = users.user()
}
const userId = user._id || uuid()
const provider = generator.string()
const profile = ssoProfile(user)
profile.provider = provider
profile.id = userId
return {
email: user.email,
oauth2: {
refreshToken: generator.string(),
accessToken: generator.string(),
},
profile,
provider,
providerType: providerType(),
userId,
}
}
// OIDC // OIDC
export function oidcConfig(): OIDCInnerConfig { export function oidcConfig(): OIDCInnerConfig {
@ -69,6 +77,7 @@ export function oidcConfig(): OIDCInnerConfig {
configUrl: "http://someconfigurl", configUrl: "http://someconfigurl",
clientID: generator.string(), clientID: generator.string(),
clientSecret: generator.string(), clientSecret: generator.string(),
scopes: [],
} }
} }

View File

@ -1,29 +1,13 @@
import { generator } from "../"
import { import {
AdminUser, AdminUser,
BuilderUser, BuilderUser,
SSOAuthDetails, SSOAuthDetails,
SSOUser, SSOUser,
User,
} from "@budibase/types" } from "@budibase/types"
import { v4 as uuid } from "uuid" import { user } from "./shared"
import * as sso from "./sso" import { authDetails } from "./sso"
export const newEmail = () => { export { user, newEmail } from "./shared"
return `${uuid()}@test.com`
}
export const user = (userProps?: any): User => {
return {
email: newEmail(),
password: "test",
roles: { app_test: "admin" },
firstName: generator.first(),
lastName: generator.last(),
pictureUrl: "http://test.com",
...userProps,
}
}
export const adminUser = (userProps?: any): AdminUser => { export const adminUser = (userProps?: any): AdminUser => {
return { return {
@ -53,7 +37,7 @@ export function ssoUser(
delete base.password delete base.password
if (!opts.details) { if (!opts.details) {
opts.details = sso.authDetails(base) opts.details = authDetails(base)
} }
return { return {

View File

@ -1,3 +1,31 @@
import { execSync } from "child_process"
let dockerPsResult: string | undefined
function formatDockerPsResult(serverName: string, port: number) {
const lines = dockerPsResult?.split("\n")
let first = true
if (!lines) {
return null
}
for (let line of lines) {
if (first) {
first = false
continue
}
let toLookFor = serverName.split("-service")[0]
if (!line.includes(toLookFor)) {
continue
}
const regex = new RegExp(`0.0.0.0:([0-9]*)->${port}`, "g")
const found = line.match(regex)
if (found) {
return found[0].split(":")[1].split("->")[0]
}
}
return null
}
function getTestContainerSettings( function getTestContainerSettings(
serverName: string, serverName: string,
key: string key: string
@ -14,10 +42,22 @@ function getTestContainerSettings(
} }
function getContainerInfo(containerName: string, port: number) { function getContainerInfo(containerName: string, port: number) {
const assignedPort = getTestContainerSettings( let assignedPort = getTestContainerSettings(
containerName.toUpperCase(), containerName.toUpperCase(),
`PORT_${port}` `PORT_${port}`
) )
if (!dockerPsResult) {
try {
const outputBuffer = execSync("docker ps")
dockerPsResult = outputBuffer.toString("utf8")
} catch (err) {
//no-op
}
}
const possiblePort = formatDockerPsResult(containerName, port)
if (possiblePort) {
assignedPort = possiblePort
}
const host = getTestContainerSettings(containerName.toUpperCase(), "IP") const host = getTestContainerSettings(containerName.toUpperCase(), "IP")
return { return {
port: assignedPort, port: assignedPort,
@ -39,12 +79,15 @@ function getRedisConfig() {
} }
export function setupEnv(...envs: any[]) { export function setupEnv(...envs: any[]) {
const couch = getCouchConfig(),
minio = getCouchConfig(),
redis = getRedisConfig()
const configs = [ const configs = [
{ key: "COUCH_DB_PORT", value: getCouchConfig().port }, { key: "COUCH_DB_PORT", value: couch.port },
{ key: "COUCH_DB_URL", value: getCouchConfig().url }, { key: "COUCH_DB_URL", value: couch.url },
{ key: "MINIO_PORT", value: getMinioConfig().port }, { key: "MINIO_PORT", value: minio.port },
{ key: "MINIO_URL", value: getMinioConfig().url }, { key: "MINIO_URL", value: minio.url },
{ key: "REDIS_URL", value: getRedisConfig().url }, { key: "REDIS_URL", value: redis.url },
] ]
for (const config of configs.filter(x => !!x.value)) { for (const config of configs.filter(x => !!x.value)) {

View File

@ -475,10 +475,10 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/nano@10.1.1": "@budibase/nano@10.1.2":
version "10.1.1" version "10.1.2"
resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.1.tgz#36ccda4d9bb64b5ee14dd2b27a295b40739b1038" resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.2.tgz#10fae5a1ab39be6a81261f40e7b7ec6d21cbdd4a"
integrity sha512-kbMIzMkjVtl+xI0UPwVU0/pn8/ccxTyfzwBz6Z+ZiN2oUSb0fJCe0qwA6o8dxwSa8nZu4MbGAeMJl3CJndmWtA== integrity sha512-1w+YN2n/M5aZ9hBKCP4NEjdQbT8BfCLRizkdvm0Je665eEHw3aE1hvo8mon9Ro9QuDdxj1DfDMMFnym6/QUwpQ==
dependencies: dependencies:
"@types/tough-cookie" "^4.0.2" "@types/tough-cookie" "^4.0.2"
axios "^1.1.3" axios "^1.1.3"

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "2.3.18-alpha.12", "version": "2.4.12-alpha.0",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",
@ -38,7 +38,8 @@
], ],
"dependencies": { "dependencies": {
"@adobe/spectrum-css-workflow-icons": "1.2.1", "@adobe/spectrum-css-workflow-icons": "1.2.1",
"@budibase/string-templates": "2.3.18-alpha.12", "@budibase/shared-core": "2.4.12-alpha.0",
"@budibase/string-templates": "2.4.12-alpha.0",
"@spectrum-css/accordion": "3.0.24", "@spectrum-css/accordion": "3.0.24",
"@spectrum-css/actionbutton": "1.0.1", "@spectrum-css/actionbutton": "1.0.1",
"@spectrum-css/actiongroup": "1.0.1", "@spectrum-css/actiongroup": "1.0.1",

View File

@ -1,6 +1,9 @@
<script> <script>
import "@spectrum-css/actionbutton/dist/index-vars.css" import "@spectrum-css/actionbutton/dist/index-vars.css"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import Tooltip from "../Tooltip/Tooltip.svelte"
import { fade } from "svelte/transition"
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
export let quiet = false export let quiet = false
@ -13,6 +16,9 @@
export let active = false export let active = false
export let fullWidth = false export let fullWidth = false
export let noPadding = false export let noPadding = false
export let tooltip = ""
let showTooltip = false
function longPress(element) { function longPress(element) {
if (!longPressable) return if (!longPressable) return
@ -35,42 +41,54 @@
} }
</script> </script>
<button <span
use:longPress class="btn-wrap"
class:spectrum-ActionButton--quiet={quiet} on:mouseover={() => (showTooltip = true)}
class:spectrum-ActionButton--emphasized={emphasized} on:mouseleave={() => (showTooltip = false)}
class:is-selected={selected} on:focus={() => (showTooltip = true)}
class:noPadding
class:fullWidth
class="spectrum-ActionButton spectrum-ActionButton--size{size}"
class:active
{disabled}
on:longPress
on:click|preventDefault
> >
{#if longPressable} <button
<svg use:longPress
class="spectrum-Icon spectrum-UIIcon-CornerTriangle100 spectrum-ActionButton-hold" class:spectrum-ActionButton--quiet={quiet}
focusable="false" class:spectrum-ActionButton--emphasized={emphasized}
aria-hidden="true" class:is-selected={selected}
> class:noPadding
<use xlink:href="#spectrum-css-icon-CornerTriangle100" /> class:fullWidth
</svg> class="spectrum-ActionButton spectrum-ActionButton--size{size}"
{/if} class:active
{#if icon} {disabled}
<svg on:longPress
class="spectrum-Icon spectrum-Icon--size{size}" on:click|preventDefault
focusable="false" >
aria-hidden="true" {#if longPressable}
aria-label={icon} <svg
> class="spectrum-Icon spectrum-UIIcon-CornerTriangle100 spectrum-ActionButton-hold"
<use xlink:href="#spectrum-icon-18-{icon}" /> focusable="false"
</svg> aria-hidden="true"
{/if} >
{#if $$slots} <use xlink:href="#spectrum-css-icon-CornerTriangle100" />
<span class="spectrum-ActionButton-label"><slot /></span> </svg>
{/if} {/if}
</button> {#if icon}
<svg
class="spectrum-Icon spectrum-Icon--size{size}"
focusable="false"
aria-hidden="true"
aria-label={icon}
>
<use xlink:href="#spectrum-icon-18-{icon}" />
</svg>
{/if}
{#if $$slots}
<span class="spectrum-ActionButton-label"><slot /></span>
{/if}
{#if tooltip && showTooltip}
<div class="tooltip" in:fade={{ duration: 130, delay: 250 }}>
<Tooltip textWrapping direction="bottom" text={tooltip} />
</div>
{/if}
</button>
</span>
<style> <style>
.fullWidth { .fullWidth {
@ -95,7 +113,20 @@
.spectrum-ActionButton--quiet { .spectrum-ActionButton--quiet {
padding: 0 8px; padding: 0 8px;
} }
.spectrum-ActionButton--quiet.is-selected {
color: var(--spectrum-global-color-gray-900);
}
.is-selected:not(.emphasized) .spectrum-Icon { .is-selected:not(.emphasized) .spectrum-Icon {
color: var(--spectrum-global-color-gray-900); color: var(--spectrum-global-color-gray-900);
} }
.tooltip {
position: absolute;
pointer-events: none;
left: 50%;
top: calc(100% + 4px);
width: 100vw;
max-width: 150px;
transform: translateX(-50%);
text-align: center;
}
</style> </style>

View File

@ -31,6 +31,7 @@ export default function positionDropdown(element, opts) {
styles.top = anchorBounds.top styles.top = anchorBounds.top
} else if (window.innerHeight - anchorBounds.bottom < 100) { } else if (window.innerHeight - anchorBounds.bottom < 100) {
styles.top = anchorBounds.top - elementBounds.height - offset styles.top = anchorBounds.top - elementBounds.height - offset
styles.maxHeight = 240
} else { } else {
styles.top = anchorBounds.bottom + offset styles.top = anchorBounds.bottom + offset
styles.maxHeight = window.innerHeight - anchorBounds.bottom - 20 styles.maxHeight = window.innerHeight - anchorBounds.bottom - 20

View File

@ -14,6 +14,9 @@
export let autocomplete = false export let autocomplete = false
export let sort = false export let sort = false
export let autoWidth = false export let autoWidth = false
export let fetchTerm = null
export let useFetch = false
export let customPopoverHeight
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
@ -83,10 +86,13 @@
{options} {options}
isPlaceholder={!value?.length} isPlaceholder={!value?.length}
{autocomplete} {autocomplete}
bind:fetchTerm
{useFetch}
{isOptionSelected} {isOptionSelected}
{getOptionLabel} {getOptionLabel}
{getOptionValue} {getOptionValue}
onSelectOption={toggleOption} onSelectOption={toggleOption}
{sort} {sort}
{autoWidth} {autoWidth}
{customPopoverHeight}
/> />

View File

@ -24,6 +24,7 @@
export let getOptionLabel = option => option export let getOptionLabel = option => option
export let getOptionValue = option => option export let getOptionValue = option => option
export let getOptionIcon = () => null export let getOptionIcon = () => null
export let useOptionIconImage = false
export let getOptionColour = () => null export let getOptionColour = () => null
export let open = false export let open = false
export let readonly = false export let readonly = false
@ -31,6 +32,11 @@
export let autoWidth = false export let autoWidth = false
export let autocomplete = false export let autocomplete = false
export let sort = false export let sort = false
export let fetchTerm = null
export let useFetch = false
export let customPopoverHeight
export let align = "left"
export let footer = null
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
@ -71,7 +77,7 @@
} }
const getFilteredOptions = (options, term, getLabel) => { const getFilteredOptions = (options, term, getLabel) => {
if (autocomplete && term) { if (autocomplete && term && !fetchTerm) {
const lowerCaseTerm = term.toLowerCase() const lowerCaseTerm = term.toLowerCase()
return options.filter(option => { return options.filter(option => {
return `${getLabel(option)}`.toLowerCase().includes(lowerCaseTerm) return `${getLabel(option)}`.toLowerCase().includes(lowerCaseTerm)
@ -130,12 +136,13 @@
<Popover <Popover
anchor={button} anchor={button}
align="left" align={align || "left"}
bind:this={popover} bind:this={popover}
{open} {open}
on:close={() => (open = false)} on:close={() => (open = false)}
useAnchorWidth={!autoWidth} useAnchorWidth={!autoWidth}
maxWidth={autoWidth ? 400 : null} maxWidth={autoWidth ? 400 : null}
customHeight={customPopoverHeight}
> >
<div <div
class="popover-content" class="popover-content"
@ -144,8 +151,9 @@
> >
{#if autocomplete} {#if autocomplete}
<Search <Search
value={searchTerm} value={useFetch ? fetchTerm : searchTerm}
on:change={event => (searchTerm = event.detail)} on:change={event =>
useFetch ? (fetchTerm = event.detail) : (searchTerm = event.detail)}
{disabled} {disabled}
placeholder="Search" placeholder="Search"
/> />
@ -183,7 +191,16 @@
> >
{#if getOptionIcon(option, idx)} {#if getOptionIcon(option, idx)}
<span class="option-extra icon"> <span class="option-extra icon">
<Icon size="S" name={getOptionIcon(option, idx)} /> {#if useOptionIconImage}
<img
src={getOptionIcon(option, idx)}
alt="icon"
width="15"
height="15"
/>
{:else}
<Icon size="S" name={getOptionIcon(option, idx)} />
{/if}
</span> </span>
{/if} {/if}
{#if getOptionColour(option, idx)} {#if getOptionColour(option, idx)}
@ -205,6 +222,12 @@
{/each} {/each}
{/if} {/if}
</ul> </ul>
{#if footer}
<div class="footer">
{footer}
</div>
{/if}
</div> </div>
</Popover> </Popover>
@ -247,7 +270,7 @@
} }
.popover-content.auto-width .spectrum-Menu-itemLabel { .popover-content.auto-width .spectrum-Menu-itemLabel {
white-space: nowrap; white-space: nowrap;
overflow: hidden; overflow: none;
text-overflow: ellipsis; text-overflow: ellipsis;
} }
.popover-content:not(.auto-width) .spectrum-Menu-itemLabel { .popover-content:not(.auto-width) .spectrum-Menu-itemLabel {
@ -281,4 +304,11 @@
.popover-content :global(.spectrum-Search .spectrum-Textfield-icon) { .popover-content :global(.spectrum-Search .spectrum-Textfield-icon) {
top: 9px; top: 9px;
} }
.footer {
padding: 4px 12px 12px 12px;
font-style: italic;
max-width: 170px;
font-size: 12px;
}
</style> </style>

View File

@ -11,6 +11,7 @@
export let getOptionLabel = option => option export let getOptionLabel = option => option
export let getOptionValue = option => option export let getOptionValue = option => option
export let getOptionIcon = () => null export let getOptionIcon = () => null
export let useOptionIconImage = false
export let getOptionColour = () => null export let getOptionColour = () => null
export let isOptionEnabled export let isOptionEnabled
export let readonly = false export let readonly = false
@ -18,6 +19,8 @@
export let autoWidth = false export let autoWidth = false
export let autocomplete = false export let autocomplete = false
export let sort = false export let sort = false
export let align
export let footer = null
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
@ -41,7 +44,7 @@
const getFieldText = (value, options, placeholder) => { const getFieldText = (value, options, placeholder) => {
// Always use placeholder if no value // Always use placeholder if no value
if (value == null || value === "") { if (value == null || value === "") {
return placeholder || "Choose an option" return placeholder !== false ? "Choose an option" : ""
} }
return getFieldAttribute(getOptionLabel, value, options) return getFieldAttribute(getOptionLabel, value, options)
@ -66,15 +69,18 @@
{fieldColour} {fieldColour}
{options} {options}
{autoWidth} {autoWidth}
{align}
{footer}
{getOptionLabel} {getOptionLabel}
{getOptionValue} {getOptionValue}
{getOptionIcon} {getOptionIcon}
{useOptionIconImage}
{getOptionColour} {getOptionColour}
{isOptionEnabled} {isOptionEnabled}
{autocomplete} {autocomplete}
{sort} {sort}
isPlaceholder={value == null || value === ""} isPlaceholder={value == null || value === ""}
placeholderOption={placeholder} placeholderOption={placeholder === false ? null : placeholder}
isOptionSelected={option => option === value} isOptionSelected={option => option === value}
onSelectOption={selectOption} onSelectOption={selectOption}
/> />

View File

@ -15,6 +15,11 @@
export let getOptionValue = option => option export let getOptionValue = option => option
export let sort = false export let sort = false
export let autoWidth = false export let autoWidth = false
export let autocomplete = false
export let fetchTerm = null
export let useFetch = false
export let customPopoverHeight
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const onChange = e => { const onChange = e => {
value = e.detail value = e.detail
@ -34,6 +39,10 @@
{getOptionLabel} {getOptionLabel}
{getOptionValue} {getOptionValue}
{autoWidth} {autoWidth}
{autocomplete}
{customPopoverHeight}
bind:fetchTerm
{useFetch}
on:change={onChange} on:change={onChange}
on:click on:click
/> />

View File

@ -14,12 +14,17 @@
export let getOptionLabel = option => extractProperty(option, "label") export let getOptionLabel = option => extractProperty(option, "label")
export let getOptionValue = option => extractProperty(option, "value") export let getOptionValue = option => extractProperty(option, "value")
export let getOptionIcon = option => option?.icon export let getOptionIcon = option => option?.icon
export let useOptionIconImage = false
export let getOptionColour = option => option?.colour export let getOptionColour = option => option?.colour
export let isOptionEnabled export let isOptionEnabled
export let quiet = false export let quiet = false
export let autoWidth = false export let autoWidth = false
export let sort = false export let sort = false
export let tooltip = "" export let tooltip = ""
export let autocomplete = false
export let customPopoverHeight
export let align
export let footer = null
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const onChange = e => { const onChange = e => {
@ -46,11 +51,16 @@
{placeholder} {placeholder}
{autoWidth} {autoWidth}
{sort} {sort}
{align}
{footer}
{getOptionLabel} {getOptionLabel}
{getOptionValue} {getOptionValue}
{getOptionIcon} {getOptionIcon}
{getOptionColour} {getOptionColour}
{useOptionIconImage}
{isOptionEnabled} {isOptionEnabled}
{autocomplete}
{customPopoverHeight}
on:change={onChange} on:change={onChange}
on:click on:click
/> />

View File

@ -29,6 +29,14 @@
visible = false visible = false
} }
export function toggle() {
if (visible) {
hide()
} else {
show()
}
}
export function cancel() { export function cancel() {
if (!visible) { if (!visible) {
return return
@ -61,7 +69,7 @@
} }
} }
setContext(Context.Modal, { show, hide, cancel }) setContext(Context.Modal, { show, hide, toggle, cancel })
onMount(() => { onMount(() => {
document.addEventListener("keydown", handleKey) document.addEventListener("keydown", handleKey)

View File

@ -18,6 +18,7 @@
export let useAnchorWidth = false export let useAnchorWidth = false
export let dismissible = true export let dismissible = true
export let offset = 5 export let offset = 5
export let customHeight
$: target = portalTarget || getContext(Context.PopoverRoot) || ".spectrum" $: target = portalTarget || getContext(Context.PopoverRoot) || ".spectrum"
@ -74,6 +75,7 @@
on:keydown={handleEscape} on:keydown={handleEscape}
class="spectrum-Popover is-open" class="spectrum-Popover is-open"
role="presentation" role="presentation"
style="height: {customHeight}"
transition:fly|local={{ y: -20, duration: 200 }} transition:fly|local={{ y: -20, duration: 200 }}
> >
<slot /> <slot />

View File

@ -1,3 +1,6 @@
import { helpers } from "@budibase/shared-core"
export const deepGet = helpers.deepGet
/** /**
* Generates a DOM safe UUID. * Generates a DOM safe UUID.
* Starting with a letter is important to make it DOM safe. * Starting with a letter is important to make it DOM safe.
@ -41,30 +44,6 @@ export const hashString = string => {
return hash.toString() return hash.toString()
} }
/**
* Gets a key within an object. The key supports dot syntax for retrieving deep
* fields - e.g. "a.b.c".
* Exact matches of keys with dots in them take precedence over nested keys of
* the same path - e.g. getting "a.b" from { "a.b": "foo", a: { b: "bar" } }
* will return "foo" over "bar".
* @param obj the object
* @param key the key
* @return {*|null} the value or null if a value was not found for this key
*/
export const deepGet = (obj, key) => {
if (!obj || !key) {
return null
}
if (Object.prototype.hasOwnProperty.call(obj, key)) {
return obj[key]
}
const split = key.split(".")
for (let i = 0; i < split.length; i++) {
obj = obj?.[split[i]]
}
return obj
}
/** /**
* Sets a key within an object. The key supports dot syntax for retrieving deep * Sets a key within an object. The key supports dot syntax for retrieving deep
* fields - e.g. "a.b.c". * fields - e.g. "a.b.c".

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "2.3.18-alpha.12", "version": "2.4.12-alpha.0",
"license": "GPL-3.0", "license": "GPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -58,10 +58,11 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "2.3.18-alpha.12", "@budibase/bbui": "2.4.12-alpha.0",
"@budibase/client": "2.3.18-alpha.12", "@budibase/client": "2.4.12-alpha.0",
"@budibase/frontend-core": "2.3.18-alpha.12", "@budibase/frontend-core": "2.4.12-alpha.0",
"@budibase/string-templates": "2.3.18-alpha.12", "@budibase/shared-core": "2.4.12-alpha.0",
"@budibase/string-templates": "2.4.12-alpha.0",
"@fortawesome/fontawesome-svg-core": "^6.2.1", "@fortawesome/fontawesome-svg-core": "^6.2.1",
"@fortawesome/free-brands-svg-icons": "^6.2.1", "@fortawesome/free-brands-svg-icons": "^6.2.1",
"@fortawesome/free-solid-svg-icons": "^6.2.1", "@fortawesome/free-solid-svg-icons": "^6.2.1",

View File

@ -72,6 +72,8 @@ const INITIAL_FRONTEND_STATE = {
// onboarding // onboarding
onboarding: false, onboarding: false,
tourNodes: null, tourNodes: null,
builderSidePanel: false,
} }
export const getFrontendStore = () => { export const getFrontendStore = () => {

View File

@ -73,14 +73,14 @@
<Tabs noHorizPadding selected="Input"> <Tabs noHorizPadding selected="Input">
<Tab title="Input"> <Tab title="Input">
<TextArea <TextArea
minHeight="80px" minHeight="160px"
disabled disabled
value={textArea(filteredResults?.[idx]?.inputs, "No input")} value={textArea(filteredResults?.[idx]?.inputs, "No input")}
/> />
</Tab> </Tab>
<Tab title="Output"> <Tab title="Output">
<TextArea <TextArea
minHeight="100px" minHeight="160px"
disabled disabled
value={textArea(filteredResults?.[idx]?.outputs, "No output")} value={textArea(filteredResults?.[idx]?.outputs, "No output")}
/> />
@ -98,8 +98,9 @@
<style> <style>
.container { .container {
padding: 0 30px 0 30px; padding: 0 30px 30px 30px;
height: 100%; height: 100%;
overflow: auto;
} }
.tabs { .tabs {

View File

@ -192,13 +192,13 @@
editableColumn.name = originalName editableColumn.name = originalName
} }
function deleteColumn() { async function deleteColumn() {
try { try {
editableColumn.name = deleteColName editableColumn.name = deleteColName
if (editableColumn.name === $tables.selected.primaryDisplay) { if (editableColumn.name === $tables.selected.primaryDisplay) {
notifications.error("You cannot delete the display column") notifications.error("You cannot delete the display column")
} else { } else {
tables.deleteField(editableColumn) await tables.deleteField(editableColumn)
notifications.success(`Column ${editableColumn.name} deleted.`) notifications.success(`Column ${editableColumn.name} deleted.`)
confirmDeleteDialog.hide() confirmDeleteDialog.hide()
hide() hide()

View File

@ -0,0 +1,333 @@
<script>
import {
Context,
Icon,
Input,
ModalContent,
Detail,
notifications,
} from "@budibase/bbui"
import { API } from "api"
import { goto } from "@roxi/routify"
import {
store,
sortedScreens,
automationStore,
themeStore,
} from "builderStore"
import { datasources, queries, tables, views } from "stores/backend"
import { getContext } from "svelte"
import { Constants } from "@budibase/frontend-core"
const modalContext = getContext(Context.Modal)
const commands = [
{
type: "Access",
name: "Invite users and manage app access",
description: "",
icon: "User",
action: () =>
store.update(state => ({ ...state, builderSidePanel: true })),
},
{
type: "Navigate",
name: "Portal",
description: "",
icon: "Compass",
action: () => $goto("../../portal"),
},
{
type: "Navigate",
name: "Data",
description: "",
icon: "Compass",
action: () => $goto("./data"),
},
{
type: "Navigate",
name: "Design",
description: "",
icon: "Compass",
action: () => $goto("./design"),
},
{
type: "Navigate",
name: "Automations",
description: "",
icon: "Compass",
action: () => $goto("./automate"),
},
{
type: "Publish",
name: "App",
description: "Deploy your application",
icon: "Box",
action: deployApp,
},
{
type: "Preview",
name: "App",
description: "",
icon: "Play",
action: () => window.open(`/${$store.appId}`),
},
{
type: "Preview",
name: "Published App",
icon: "Play",
action: () => window.open(`/app${$store.url}`),
},
{
type: "Support",
name: "Raise Github Discussion",
icon: "Help",
action: () =>
window.open(`https://github.com/Budibase/budibase/discussions/new`),
},
{
type: "Support",
name: "Raise A Bug",
icon: "Bug",
action: () =>
window.open(
`https://github.com/Budibase/budibase/issues/new?assignees=&labels=bug&template=bug_report.md&title=`
),
},
...$datasources?.list.map(datasource => ({
type: "Datasource",
name: `${datasource.name}`,
icon: "Data",
action: () => $goto(`./data/datasource/${datasource._id}`),
})),
...$tables?.list.map(table => ({
type: "Table",
name: table.name,
icon: "Table",
action: () => $goto(`./data/table/${table._id}`),
})),
...$views?.list.map(view => ({
type: "View",
name: view.name,
icon: "Remove",
action: () => $goto(`./data/view/${view.name}`),
})),
...$queries?.list.map(query => ({
type: "Query",
name: query.name,
icon: "SQLQuery",
action: () => $goto(`./data/query/${query._id}`),
})),
...$sortedScreens.map(screen => ({
type: "Screen",
name: screen.routing.route,
icon: "WebPage",
action: () => $goto(`./design/${screen._id}/components`),
})),
...$automationStore?.automations.map(automation => ({
type: "Automation",
name: automation.name,
icon: "ShareAndroid",
action: () => $goto(`./automate/${automation._id}`),
})),
...Constants.Themes.map(theme => ({
type: "Change Builder Theme",
name: theme.name,
icon: "ColorPalette",
action: () =>
themeStore.update(state => {
state.theme = theme.class
return state
}),
})),
]
let search
let selected = null
$: enrichedCommands = commands.map(cmd => ({
...cmd,
searchValue: `${cmd.type} ${cmd.name}`.toLowerCase(),
}))
$: results = filterResults(enrichedCommands, search)
$: categories = groupResults(results)
const filterResults = (commands, search) => {
if (!search) {
selected = null
return commands
}
selected = 0
search = search.toLowerCase()
return commands
.filter(cmd => cmd.searchValue.includes(search))
.map((cmd, idx) => ({
...cmd,
idx,
}))
}
const groupResults = results => {
let categories = {}
results?.forEach(result => {
if (!categories[result.type]) {
categories[result.type] = []
}
categories[result.type].push(result)
})
return Object.entries(categories)
}
const onKeyDown = e => {
if (e.key === "ArrowDown") {
e.preventDefault()
if (selected === null) {
selected = 0
return
}
if (selected < results.length - 1) {
selected += 1
}
} else if (e.key === "ArrowUp") {
e.preventDefault()
if (selected === null) {
selected = results.length - 1
return
}
if (selected > 0) {
selected -= 1
}
} else if (e.key === "Enter") {
if (selected == null) {
return
}
runAction(results[selected])
} else if (e.key === "Escape") {
modalContext.hide()
}
}
async function deployApp() {
try {
await API.deployAppChanges()
notifications.success("Application published successfully")
} catch (error) {
notifications.error("Error publishing app")
}
}
const runAction = command => {
if (!command) {
return
}
command.action()
modalContext.hide()
}
</script>
<svelte:window on:keydown={onKeyDown} />
<ModalContent
size="L"
showCancelButton={false}
showConfirmButton={false}
showCloseIcon={false}
>
<div class="content">
<div class="title">
<Icon size="XL" name="Search" />
<Input bind:value={search} quiet placeholder="Search for command" />
</div>
<div class="commands">
{#each categories as [name, results], catIdx}
<div class="category">
<Detail>{name}</Detail>
<div class="options">
{#each results as command, cmdIdx}
<div
class="command"
on:click={() => runAction(command)}
class:selected={command.idx === selected}
>
<Icon size="M" name={command.icon} />
<strong>{command.type}:&nbsp;</strong>
<div class="name">
{command.name}
</div>
</div>
{/each}
</div>
</div>
{/each}
</div>
</div>
</ModalContent>
<style>
.content {
margin: -40px;
overflow: hidden;
}
.title {
display: flex;
flex-direction: row;
justify-content: flex-start;
align-items: center;
padding: var(--spacing-xl) var(--spacing-xl) var(--spacing-l)
var(--spacing-xl);
border-bottom: var(--border-dark);
gap: var(--spacing-m);
border-bottom-width: 2px;
}
.title :global(.spectrum-Textfield-input) {
border-bottom: none;
font-size: 20px;
}
.commands {
height: 378px;
overflow: scroll;
}
.category {
padding: var(--spacing-m) var(--spacing-xl);
border-bottom: var(--border-light);
}
.category:last-of-type {
border-bottom: none;
}
.category :global(.spectrum-Detail) {
color: var(--spectrum-global-color-gray-600);
}
.options {
padding-top: var(--spacing-m);
margin: 0 calc(-1 * var(--spacing-xl));
}
.command {
display: flex;
flex-direction: row;
justify-content: flex-start;
align-items: center;
padding: var(--spacing-s) var(--spacing-xl);
cursor: pointer;
overflow: hidden;
transition: color 130ms ease-out, background-color 130ms ease-out;
}
.command:hover,
.selected {
color: var(--spectrum-global-color-gray-900);
background-color: var(--spectrum-global-color-gray-300);
}
.command strong {
margin-left: var(--spacing-m);
}
.name {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
footer {
display: flex;
justify-content: center;
}
</style>

Some files were not shown because too many files have changed in this diff Show More