Merge branch 'develop' of github.com:Budibase/budibase into jonny/api-metrics
This commit is contained in:
commit
c51b9fb4b3
|
@ -10,8 +10,7 @@ on:
|
|||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
- release
|
||||
- develop
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
|
@ -20,9 +19,67 @@ env:
|
|||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- run: yarn
|
||||
- run: yarn lint
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn build
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn test
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
|
||||
files: ./packages/server/coverage/clover.xml,./packages/worker/coverage/clover.xml,./packages/backend-core/coverage/clover.xml
|
||||
name: codecov-umbrella
|
||||
verbose: true
|
||||
|
||||
test-pro:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn test:pro
|
||||
|
||||
integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
couchdb:
|
||||
image: ibmcom/couchdb3
|
||||
|
@ -31,39 +88,18 @@ jobs:
|
|||
COUCHDB_USER: budibase
|
||||
ports:
|
||||
- 4567:5984
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [14.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn lint
|
||||
- run: yarn build
|
||||
- run: yarn test
|
||||
env:
|
||||
CI: true
|
||||
name: Budibase CI
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
|
||||
files: ./packages/server/coverage/clover.xml,./packages/worker/coverage/clover.xml,./packages/backend-core/coverage/clover.xml
|
||||
name: codecov-umbrella
|
||||
verbose: true
|
||||
|
||||
- name: QA Core Integration Tests
|
||||
run: |
|
||||
cd qa-core
|
||||
yarn
|
||||
yarn api:test:ci
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- name: Install Pro
|
||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn build
|
||||
- run: |
|
||||
cd qa-core
|
||||
yarn
|
||||
yarn api:test:ci
|
||||
|
|
|
@ -37,7 +37,7 @@ jobs:
|
|||
wc -l values.preprod.yaml
|
||||
|
||||
- name: Deploy to Preprod Environment
|
||||
uses: glopezep/helm@v1.7.1
|
||||
uses: budibase/helm@v1.8.0
|
||||
with:
|
||||
release: budibase-preprod
|
||||
namespace: budibase
|
||||
|
|
|
@ -38,7 +38,7 @@ jobs:
|
|||
wc -l values.release.yaml
|
||||
|
||||
- name: Deploy to Release Environment
|
||||
uses: glopezep/helm@v1.7.1
|
||||
uses: budibase/helm@v1.8.0
|
||||
with:
|
||||
release: budibase-release
|
||||
namespace: budibase
|
||||
|
|
|
@ -45,10 +45,9 @@ jobs:
|
|||
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn lint
|
||||
- run: yarn build
|
||||
- run: yarn build:sdk
|
||||
- run: yarn test
|
||||
# - run: yarn test
|
||||
|
||||
- name: Publish budibase packages to NPM
|
||||
env:
|
||||
|
@ -69,83 +68,6 @@ jobs:
|
|||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
|
||||
deploy-to-release-env:
|
||||
needs: [release-images]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Get the current budibase release version
|
||||
id: version
|
||||
run: |
|
||||
release_version=$(cat lerna.json | jq -r '.version')
|
||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: eu-west-1
|
||||
|
||||
- name: Pull values.yaml from budibase-infra
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||
-H 'Accept: application/vnd.github.v3.raw' \
|
||||
-o values.release.yaml \
|
||||
-L https://api.github.com/repos/budibase/budibase-infra/contents/kubernetes/budibase-release/values.yaml
|
||||
wc -l values.release.yaml
|
||||
|
||||
- name: Deploy to Release Environment
|
||||
uses: glopezep/helm@v1.7.1
|
||||
with:
|
||||
release: budibase-release
|
||||
namespace: budibase
|
||||
chart: charts/budibase
|
||||
token: ${{ github.token }}
|
||||
helm: helm3
|
||||
values: |
|
||||
globals:
|
||||
appVersion: develop
|
||||
ingress:
|
||||
enabled: true
|
||||
nginx: true
|
||||
value-files: >-
|
||||
[
|
||||
"values.release.yaml"
|
||||
]
|
||||
env:
|
||||
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
|
||||
|
||||
- name: Re roll app-service
|
||||
uses: actions-hub/kubectl@master
|
||||
env:
|
||||
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
|
||||
with:
|
||||
args: rollout restart deployment app-service -n budibase
|
||||
|
||||
- name: Re roll proxy-service
|
||||
uses: actions-hub/kubectl@master
|
||||
env:
|
||||
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
|
||||
with:
|
||||
args: rollout restart deployment proxy-service -n budibase
|
||||
|
||||
- name: Re roll worker-service
|
||||
uses: actions-hub/kubectl@master
|
||||
env:
|
||||
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
|
||||
with:
|
||||
args: rollout restart deployment worker-service -n budibase
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v4.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
|
||||
content: "Release Env Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Release Env."
|
||||
embed-title: ${{ env.RELEASE_VERSION }}
|
||||
|
||||
release-helm-chart:
|
||||
needs: [release-images]
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -194,5 +116,5 @@ jobs:
|
|||
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
|
||||
with:
|
||||
repository: budibase/budibase-deploys
|
||||
event: deploy-budibase-develop-to-qa
|
||||
event: budicloud-qa-deploy
|
||||
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}
|
|
@ -16,9 +16,13 @@ jobs:
|
|||
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
node-version: 14.x
|
||||
fetch_depth: 0
|
||||
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
|
||||
- name: Get the latest budibase release version
|
||||
id: version
|
||||
run: |
|
||||
|
|
|
@ -107,7 +107,7 @@ jobs:
|
|||
wc -l values.preprod.yaml
|
||||
|
||||
- name: Deploy to Preprod Environment
|
||||
uses: glopezep/helm@v1.7.1
|
||||
uses: budibase/helm@v1.8.0
|
||||
with:
|
||||
release: budibase-preprod
|
||||
namespace: budibase
|
||||
|
|
|
@ -7,7 +7,7 @@ on:
|
|||
|
||||
jobs:
|
||||
nightly:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: [self-hosted, qa]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -15,30 +15,17 @@ jobs:
|
|||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- run: yarn
|
||||
- run: yarn bootstrap
|
||||
- run: yarn build
|
||||
- name: Pull from budibase-infra
|
||||
- name: QA Core Integration Tests
|
||||
run: |
|
||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||
-H 'Accept: application/vnd.github.v3.raw' \
|
||||
-o
|
||||
-L
|
||||
wc -l
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Test Reports
|
||||
path:
|
||||
cd qa-core
|
||||
yarn
|
||||
yarn api:test:ci
|
||||
env:
|
||||
BUDIBASE_HOST: budicloud.qa.budibase.net
|
||||
BUDIBASE_ACCOUNTS_URL: https://account-portal.budicloud.qa.budibase.net
|
||||
|
||||
# TODO: enable once running in QA test env
|
||||
# - name: Configure AWS Credentials
|
||||
# uses: aws-actions/configure-aws-credentials@v1
|
||||
# with:
|
||||
# aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
# aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
# aws-region: eu-west-1
|
||||
|
||||
# - name: Upload test results HTML
|
||||
# uses: aws-actions/configure-aws-credentials@v1
|
||||
# run: aws s3 cp packages/builder/cypress/reports/testReport.html s3://{{ secrets.BUDI_QA_REPORTS_BUCKET_NAME }}/$GITHUB_RUN_ID/index.html
|
||||
- name: Cypress Discord Notify
|
||||
run: yarn test:notify
|
||||
env:
|
||||
WEBHOOK_URL: ${{ secrets.BUDI_QA_WEBHOOK }}
|
||||
GITHUB_RUN_URL: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
|
|
@ -1,4 +1,2 @@
|
|||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
yarn run lint
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
nodejs 14.19.3
|
||||
python 3.11.1
|
||||
python 3.10.0
|
|
@ -51,6 +51,14 @@ spec:
|
|||
value: {{ tpl .Values.services.proxy.upstreams.minio . | quote }}
|
||||
- name: COUCHDB_UPSTREAM_URL
|
||||
value: {{ .Values.services.couchdb.url | default (tpl .Values.services.proxy.upstreams.couchdb .) | quote }}
|
||||
{{ if .Values.services.proxy.proxyRateLimitWebhooksPerSecond }}
|
||||
- name: PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND
|
||||
value: {{ .Values.services.proxy.proxyRateLimitWebhooksPerSecond | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.proxy.proxyRateLimitApiPerSecond }}
|
||||
- name: PROXY_RATE_LIMIT_API_PER_SECOND
|
||||
value: {{ .Values.services.proxy.proxyRateLimitApiPerSecond | quote }}
|
||||
{{ end }}
|
||||
- name: RESOLVER
|
||||
{{ if .Values.services.proxy.resolver }}
|
||||
value: {{ .Values.services.proxy.resolver }}
|
||||
|
|
|
@ -245,7 +245,7 @@ couchdb:
|
|||
## The CouchDB image
|
||||
image:
|
||||
repository: couchdb
|
||||
tag: 3.2.1
|
||||
tag: 3.1.1
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
## Experimental integration with Lucene-powered fulltext search
|
||||
|
|
|
@ -8,8 +8,8 @@ services:
|
|||
# Last version that supports the "fs" backend
|
||||
image: minio/minio:RELEASE.2022-10-24T18-35-07Z
|
||||
ports:
|
||||
- 9000
|
||||
- 9001
|
||||
- "9000"
|
||||
- "9001"
|
||||
environment:
|
||||
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
|
||||
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
|
||||
|
@ -28,9 +28,9 @@ services:
|
|||
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
|
||||
- COUCHDB_USER=${COUCH_DB_USER}
|
||||
ports:
|
||||
- 5984
|
||||
- 4369
|
||||
- 9100
|
||||
- "5984"
|
||||
- "4369"
|
||||
- "9100"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:5984/_up"]
|
||||
interval: 30s
|
||||
|
@ -42,6 +42,6 @@ services:
|
|||
image: redis
|
||||
command: redis-server --requirepass ${REDIS_PASSWORD}
|
||||
ports:
|
||||
- 6379
|
||||
- "6379"
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
test: ["CMD", "redis-cli", "ping"]
|
|
@ -55,7 +55,7 @@ http {
|
|||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
||||
set $csp_object "object-src 'none'";
|
||||
set $csp_base_uri "base-uri 'self'";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.*.amazonaws.com https://s3.*.amazonaws.com https://api.github.com";
|
||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
||||
set $csp_frame "frame-src 'self' https:";
|
||||
set $csp_img "img-src http: https: data: blob:";
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.3.16",
|
||||
"version": "2.3.21-alpha.1",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
"js-yaml": "^4.1.0",
|
||||
"kill-port": "^1.6.1",
|
||||
"lerna": "3.14.1",
|
||||
"madge": "^5.0.1",
|
||||
"madge": "^6.0.0",
|
||||
"prettier": "^2.3.1",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"rimraf": "^3.0.2",
|
||||
|
@ -44,7 +44,7 @@
|
|||
"dev": "yarn run kill-all && lerna link && lerna run --parallel dev:builder --concurrency 1",
|
||||
"dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server",
|
||||
"test": "lerna run test && yarn test:pro",
|
||||
"test": "lerna run test",
|
||||
"test:pro": "bash scripts/pro/test.sh",
|
||||
"lint:eslint": "eslint packages && eslint qa-core",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
|
@ -84,4 +84,4 @@
|
|||
"install:pro": "bash scripts/pro/install.sh",
|
||||
"dep:clean": "yarn clean && yarn bootstrap"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,15 +9,9 @@ const baseConfig: Config.InitialProjectOptions = {
|
|||
transform: {
|
||||
"^.+\\.ts?$": "@swc/jest",
|
||||
},
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
// use sources when not in CI
|
||||
baseConfig.moduleNameMapper = {
|
||||
moduleNameMapper: {
|
||||
"@budibase/types": "<rootDir>/../types/src",
|
||||
}
|
||||
} else {
|
||||
console.log("Running tests with compiled dependency sources")
|
||||
},
|
||||
}
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "2.3.16",
|
||||
"version": "2.3.21-alpha.1",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "dist/src/index.js",
|
||||
"types": "dist/src/index.d.ts",
|
||||
|
@ -18,13 +18,13 @@
|
|||
"build:pro": "../../scripts/pro/build.sh",
|
||||
"postbuild": "yarn run build:pro",
|
||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"test": "jest --coverage --maxWorkers=2",
|
||||
"test": "bash scripts/test.sh",
|
||||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/nano": "10.1.1",
|
||||
"@budibase/nano": "10.1.2",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.10",
|
||||
"@budibase/types": "^2.3.16",
|
||||
"@budibase/types": "2.3.21-alpha.1",
|
||||
"@shopify/jest-koa-mocks": "5.0.1",
|
||||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"aws-cloudfront-sign": "2.2.0",
|
||||
|
@ -62,7 +62,7 @@
|
|||
"@trendyol/jest-testcontainers": "^2.1.1",
|
||||
"@types/chance": "1.1.3",
|
||||
"@types/ioredis": "4.28.0",
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/jest": "28.1.1",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa-pino-logger": "3.0.0",
|
||||
"@types/lodash": "4.14.180",
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [[ -n $CI ]]
|
||||
then
|
||||
# --runInBand performs better in ci where resources are limited
|
||||
echo "jest --coverage --runInBand"
|
||||
jest --coverage --runInBand
|
||||
else
|
||||
# --maxWorkers performs better in development
|
||||
echo "jest --coverage"
|
||||
jest --coverage
|
||||
fi
|
|
@ -1,13 +1,24 @@
|
|||
import API from "./api"
|
||||
import env from "../environment"
|
||||
import { Header } from "../constants"
|
||||
import { CloudAccount } from "@budibase/types"
|
||||
import { CloudAccount, HealthStatusResponse } from "@budibase/types"
|
||||
|
||||
const api = new API(env.ACCOUNT_PORTAL_URL)
|
||||
|
||||
/**
|
||||
* This client is intended to be used in a cloud hosted deploy only.
|
||||
* Rather than relying on each consumer to perform the necessary environmental checks
|
||||
* we use the following check to exit early with a undefined response which should be
|
||||
* handled by the caller.
|
||||
*/
|
||||
const EXIT_EARLY = env.SELF_HOSTED || env.DISABLE_ACCOUNT_PORTAL
|
||||
|
||||
export const getAccount = async (
|
||||
email: string
|
||||
): Promise<CloudAccount | undefined> => {
|
||||
if (EXIT_EARLY) {
|
||||
return
|
||||
}
|
||||
const payload = {
|
||||
email,
|
||||
}
|
||||
|
@ -29,6 +40,9 @@ export const getAccount = async (
|
|||
export const getAccountByTenantId = async (
|
||||
tenantId: string
|
||||
): Promise<CloudAccount | undefined> => {
|
||||
if (EXIT_EARLY) {
|
||||
return
|
||||
}
|
||||
const payload = {
|
||||
tenantId,
|
||||
}
|
||||
|
@ -47,7 +61,12 @@ export const getAccountByTenantId = async (
|
|||
return json[0]
|
||||
}
|
||||
|
||||
export const getStatus = async () => {
|
||||
export const getStatus = async (): Promise<
|
||||
HealthStatusResponse | undefined
|
||||
> => {
|
||||
if (EXIT_EARLY) {
|
||||
return
|
||||
}
|
||||
const response = await api.get(`/api/status`, {
|
||||
headers: {
|
||||
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
|
@ -0,0 +1 @@
|
|||
export * from "./accounts"
|
|
@ -1,22 +1,36 @@
|
|||
const _passport = require("koa-passport")
|
||||
const LocalStrategy = require("passport-local").Strategy
|
||||
const JwtStrategy = require("passport-jwt").Strategy
|
||||
import { getGlobalDB } from "../tenancy"
|
||||
const refresh = require("passport-oauth2-refresh")
|
||||
import { Config } from "../constants"
|
||||
import { getScopedConfig } from "../db"
|
||||
import { getGlobalDB } from "../context"
|
||||
import { Cookie } from "../constants"
|
||||
import { getSessionsForUser, invalidateSessions } from "../security/sessions"
|
||||
import {
|
||||
authenticated,
|
||||
csrf,
|
||||
google,
|
||||
jwt as jwtPassport,
|
||||
local,
|
||||
authenticated,
|
||||
tenancy,
|
||||
csrf,
|
||||
oidc,
|
||||
google,
|
||||
tenancy,
|
||||
} from "../middleware"
|
||||
import * as userCache from "../cache/user"
|
||||
import { invalidateUser } from "../cache/user"
|
||||
import { User } from "@budibase/types"
|
||||
import {
|
||||
ConfigType,
|
||||
GoogleInnerConfig,
|
||||
OIDCInnerConfig,
|
||||
PlatformLogoutOpts,
|
||||
SSOProviderType,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
import { logAlert } from "../logging"
|
||||
import * as events from "../events"
|
||||
import * as configs from "../configs"
|
||||
import { clearCookie, getCookie } from "../utils"
|
||||
import { ssoSaveUserNoOp } from "../middleware/passport/sso/sso"
|
||||
import env from "../environment"
|
||||
|
||||
const refresh = require("passport-oauth2-refresh")
|
||||
export {
|
||||
auditLog,
|
||||
authError,
|
||||
|
@ -39,7 +53,7 @@ export const jwt = require("jsonwebtoken")
|
|||
_passport.use(new LocalStrategy(local.options, local.authenticate))
|
||||
if (jwtPassport.options.secretOrKey) {
|
||||
_passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate))
|
||||
} else {
|
||||
} else if (!env.DISABLE_JWT_WARNING) {
|
||||
logAlert("No JWT Secret supplied, cannot configure JWT strategy")
|
||||
}
|
||||
|
||||
|
@ -58,11 +72,10 @@ _passport.deserializeUser(async (user: User, done: any) => {
|
|||
})
|
||||
|
||||
async function refreshOIDCAccessToken(
|
||||
db: any,
|
||||
chosenConfig: any,
|
||||
chosenConfig: OIDCInnerConfig,
|
||||
refreshToken: string
|
||||
) {
|
||||
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
|
||||
): Promise<RefreshResponse> {
|
||||
const callbackUrl = await oidc.getCallbackUrl()
|
||||
let enrichedConfig: any
|
||||
let strategy: any
|
||||
|
||||
|
@ -71,7 +84,7 @@ async function refreshOIDCAccessToken(
|
|||
if (!enrichedConfig) {
|
||||
throw new Error("OIDC Config contents invalid")
|
||||
}
|
||||
strategy = await oidc.strategyFactory(enrichedConfig)
|
||||
strategy = await oidc.strategyFactory(enrichedConfig, ssoSaveUserNoOp)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
throw new Error("Could not refresh OAuth Token")
|
||||
|
@ -85,7 +98,7 @@ async function refreshOIDCAccessToken(
|
|||
|
||||
return new Promise(resolve => {
|
||||
refresh.requestNewAccessToken(
|
||||
Config.OIDC,
|
||||
ConfigType.OIDC,
|
||||
refreshToken,
|
||||
(err: any, accessToken: string, refreshToken: any, params: any) => {
|
||||
resolve({ err, accessToken, refreshToken, params })
|
||||
|
@ -95,15 +108,18 @@ async function refreshOIDCAccessToken(
|
|||
}
|
||||
|
||||
async function refreshGoogleAccessToken(
|
||||
db: any,
|
||||
config: any,
|
||||
config: GoogleInnerConfig,
|
||||
refreshToken: any
|
||||
) {
|
||||
let callbackUrl = await google.getCallbackUrl(db, config)
|
||||
): Promise<RefreshResponse> {
|
||||
let callbackUrl = await google.getCallbackUrl(config)
|
||||
|
||||
let strategy
|
||||
try {
|
||||
strategy = await google.strategyFactory(config, callbackUrl)
|
||||
strategy = await google.strategyFactory(
|
||||
config,
|
||||
callbackUrl,
|
||||
ssoSaveUserNoOp
|
||||
)
|
||||
} catch (err: any) {
|
||||
console.error(err)
|
||||
throw new Error(
|
||||
|
@ -115,7 +131,7 @@ async function refreshGoogleAccessToken(
|
|||
|
||||
return new Promise(resolve => {
|
||||
refresh.requestNewAccessToken(
|
||||
Config.GOOGLE,
|
||||
ConfigType.GOOGLE,
|
||||
refreshToken,
|
||||
(err: any, accessToken: string, refreshToken: string, params: any) => {
|
||||
resolve({ err, accessToken, refreshToken, params })
|
||||
|
@ -124,43 +140,41 @@ async function refreshGoogleAccessToken(
|
|||
})
|
||||
}
|
||||
|
||||
export async function refreshOAuthToken(
|
||||
refreshToken: string,
|
||||
configType: string,
|
||||
configId: string
|
||||
) {
|
||||
const db = getGlobalDB()
|
||||
|
||||
const config = await getScopedConfig(db, {
|
||||
type: configType,
|
||||
group: {},
|
||||
})
|
||||
|
||||
let chosenConfig = {}
|
||||
let refreshResponse
|
||||
if (configType === Config.OIDC) {
|
||||
// configId - retrieved from cookie.
|
||||
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
|
||||
if (!chosenConfig) {
|
||||
throw new Error("Invalid OIDC configuration")
|
||||
}
|
||||
refreshResponse = await refreshOIDCAccessToken(
|
||||
db,
|
||||
chosenConfig,
|
||||
refreshToken
|
||||
)
|
||||
} else {
|
||||
chosenConfig = config
|
||||
refreshResponse = await refreshGoogleAccessToken(
|
||||
db,
|
||||
chosenConfig,
|
||||
refreshToken
|
||||
)
|
||||
interface RefreshResponse {
|
||||
err?: {
|
||||
data?: string
|
||||
}
|
||||
|
||||
return refreshResponse
|
||||
accessToken?: string
|
||||
refreshToken?: string
|
||||
params?: any
|
||||
}
|
||||
|
||||
export async function refreshOAuthToken(
|
||||
refreshToken: string,
|
||||
providerType: SSOProviderType,
|
||||
configId?: string
|
||||
): Promise<RefreshResponse> {
|
||||
switch (providerType) {
|
||||
case SSOProviderType.OIDC:
|
||||
if (!configId) {
|
||||
return { err: { data: "OIDC config id not provided" } }
|
||||
}
|
||||
const oidcConfig = await configs.getOIDCConfigById(configId)
|
||||
if (!oidcConfig) {
|
||||
return { err: { data: "OIDC configuration not found" } }
|
||||
}
|
||||
return refreshOIDCAccessToken(oidcConfig, refreshToken)
|
||||
case SSOProviderType.GOOGLE:
|
||||
let googleConfig = await configs.getGoogleConfig()
|
||||
if (!googleConfig) {
|
||||
return { err: { data: "Google configuration not found" } }
|
||||
}
|
||||
return refreshGoogleAccessToken(googleConfig, refreshToken)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Refactor to use user save function instead to prevent the need for
|
||||
// manually saving and invalidating on callback
|
||||
export async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
||||
const details = {
|
||||
accessToken: oAuthConfig.accessToken,
|
||||
|
@ -188,3 +202,32 @@ export async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
|||
console.error("Could not update OAuth details for current user", e)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a user out from budibase. Re-used across account portal and builder.
|
||||
*/
|
||||
export async function platformLogout(opts: PlatformLogoutOpts) {
|
||||
const ctx = opts.ctx
|
||||
const userId = opts.userId
|
||||
const keepActiveSession = opts.keepActiveSession
|
||||
|
||||
if (!ctx) throw new Error("Koa context must be supplied to logout.")
|
||||
|
||||
const currentSession = getCookie(ctx, Cookie.Auth)
|
||||
let sessions = await getSessionsForUser(userId)
|
||||
|
||||
if (keepActiveSession) {
|
||||
sessions = sessions.filter(
|
||||
session => session.sessionId !== currentSession.sessionId
|
||||
)
|
||||
} else {
|
||||
// clear cookies
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
clearCookie(ctx, Cookie.CurrentApp)
|
||||
}
|
||||
|
||||
const sessionIds = sessions.map(({ sessionId }) => sessionId)
|
||||
await invalidateSessions(userId, { sessionIds, reason: "logout" })
|
||||
await events.auth.logout(ctx.user?.email)
|
||||
await userCache.invalidateUser(userId)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
import { structures, testEnv } from "../../../tests"
|
||||
import * as auth from "../auth"
|
||||
import * as events from "../../events"
|
||||
|
||||
describe("platformLogout", () => {
|
||||
it("should call platform logout", async () => {
|
||||
await testEnv.withTenant(async () => {
|
||||
const ctx = structures.koa.newContext()
|
||||
await auth.platformLogout({ ctx, userId: "test" })
|
||||
expect(events.auth.logout).toBeCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,6 +1,6 @@
|
|||
import { getAppClient } from "../redis/init"
|
||||
import { doWithDB, DocumentType } from "../db"
|
||||
import { Database } from "@budibase/types"
|
||||
import { Database, App } from "@budibase/types"
|
||||
|
||||
const AppState = {
|
||||
INVALID: "invalid",
|
||||
|
@ -65,7 +65,7 @@ export async function getAppMetadata(appId: string) {
|
|||
if (isInvalid(metadata)) {
|
||||
throw { status: 404, message: "No app metadata found" }
|
||||
}
|
||||
return metadata
|
||||
return metadata as App
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
require("../../../tests")
|
||||
const { Writethrough } = require("../writethrough")
|
||||
const { getDB } = require("../../db")
|
||||
const tk = require("timekeeper")
|
||||
const { structures } = require("../../../tests")
|
||||
|
||||
const START_DATE = Date.now()
|
||||
tk.freeze(START_DATE)
|
||||
|
||||
|
||||
const DELAY = 5000
|
||||
|
||||
const db = getDB(structures.db.id())
|
||||
const db2 = getDB(structures.db.id())
|
||||
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
|
||||
|
||||
describe("writethrough", () => {
|
||||
describe("put", () => {
|
||||
let first
|
||||
it("should be able to store, will go to DB", async () => {
|
||||
const response = await writethrough.put({ _id: "test", value: 1 })
|
||||
const output = await db.get(response.id)
|
||||
first = output
|
||||
expect(output.value).toBe(1)
|
||||
})
|
||||
|
||||
it("second put shouldn't update DB", async () => {
|
||||
const response = await writethrough.put({ ...first, value: 2 })
|
||||
const output = await db.get(response.id)
|
||||
expect(first._rev).toBe(output._rev)
|
||||
expect(output.value).toBe(1)
|
||||
})
|
||||
|
||||
it("should put it again after delay period", async () => {
|
||||
tk.freeze(START_DATE + DELAY + 1)
|
||||
const response = await writethrough.put({ ...first, value: 3 })
|
||||
const output = await db.get(response.id)
|
||||
expect(response.rev).not.toBe(first._rev)
|
||||
expect(output.value).toBe(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe("get", () => {
|
||||
it("should be able to retrieve", async () => {
|
||||
const response = await writethrough.get("test")
|
||||
expect(response.value).toBe(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe("same doc, different databases (tenancy)", () => {
|
||||
it("should be able to two different databases", async () => {
|
||||
const resp1 = await writethrough.put({ _id: "db1", value: "first" })
|
||||
const resp2 = await writethrough2.put({ _id: "db1", value: "second" })
|
||||
expect(resp1.rev).toBeDefined()
|
||||
expect(resp2.rev).toBeDefined()
|
||||
expect((await db.get("db1")).value).toBe("first")
|
||||
expect((await db2.get("db1")).value).toBe("second")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
import { structures, DBTestConfiguration } from "../../../tests"
|
||||
import { Writethrough } from "../writethrough"
|
||||
import { getDB } from "../../db"
|
||||
import tk from "timekeeper"
|
||||
|
||||
const START_DATE = Date.now()
|
||||
tk.freeze(START_DATE)
|
||||
|
||||
const DELAY = 5000
|
||||
|
||||
describe("writethrough", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
const db = getDB(structures.db.id())
|
||||
const db2 = getDB(structures.db.id())
|
||||
|
||||
const writethrough = new Writethrough(db, DELAY)
|
||||
const writethrough2 = new Writethrough(db2, DELAY)
|
||||
|
||||
describe("put", () => {
|
||||
let first: any
|
||||
|
||||
it("should be able to store, will go to DB", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const response = await writethrough.put({ _id: "test", value: 1 })
|
||||
const output = await db.get(response.id)
|
||||
first = output
|
||||
expect(output.value).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
it("second put shouldn't update DB", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const response = await writethrough.put({ ...first, value: 2 })
|
||||
const output = await db.get(response.id)
|
||||
expect(first._rev).toBe(output._rev)
|
||||
expect(output.value).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
it("should put it again after delay period", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
tk.freeze(START_DATE + DELAY + 1)
|
||||
const response = await writethrough.put({ ...first, value: 3 })
|
||||
const output = await db.get(response.id)
|
||||
expect(response.rev).not.toBe(first._rev)
|
||||
expect(output.value).toBe(3)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("get", () => {
|
||||
it("should be able to retrieve", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const response = await writethrough.get("test")
|
||||
expect(response.value).toBe(3)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("same doc, different databases (tenancy)", () => {
|
||||
it("should be able to two different databases", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const resp1 = await writethrough.put({ _id: "db1", value: "first" })
|
||||
const resp2 = await writethrough2.put({ _id: "db1", value: "second" })
|
||||
expect(resp1.rev).toBeDefined()
|
||||
expect(resp2.rev).toBeDefined()
|
||||
expect((await db.get("db1")).value).toBe("first")
|
||||
expect((await db2.get("db1")).value).toBe("second")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,8 +1,9 @@
|
|||
import * as redis from "../redis/init"
|
||||
import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy"
|
||||
import * as tenancy from "../tenancy"
|
||||
import * as context from "../context"
|
||||
import * as platform from "../platform"
|
||||
import env from "../environment"
|
||||
import * as accounts from "../cloud/accounts"
|
||||
import { Database } from "@budibase/types"
|
||||
import * as accounts from "../accounts"
|
||||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
|
@ -10,7 +11,8 @@ const EXPIRY_SECONDS = 3600
|
|||
* The default populate user function
|
||||
*/
|
||||
async function populateFromDB(userId: string, tenantId: string) {
|
||||
const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId))
|
||||
const db = tenancy.getTenantDB(tenantId)
|
||||
const user = await db.get(userId)
|
||||
user.budibaseAccess = true
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
|
@ -42,9 +44,9 @@ export async function getUser(
|
|||
}
|
||||
if (!tenantId) {
|
||||
try {
|
||||
tenantId = getTenantId()
|
||||
tenantId = context.getTenantId()
|
||||
} catch (err) {
|
||||
tenantId = await lookupTenantId(userId)
|
||||
tenantId = await platform.users.lookupTenantId(userId)
|
||||
}
|
||||
}
|
||||
const client = await redis.getUserClient()
|
||||
|
|
|
@ -0,0 +1,244 @@
|
|||
import {
|
||||
Config,
|
||||
ConfigType,
|
||||
GoogleConfig,
|
||||
GoogleInnerConfig,
|
||||
OIDCConfig,
|
||||
OIDCInnerConfig,
|
||||
SettingsConfig,
|
||||
SettingsInnerConfig,
|
||||
SMTPConfig,
|
||||
SMTPInnerConfig,
|
||||
} from "@budibase/types"
|
||||
import { DocumentType, SEPARATOR } from "../constants"
|
||||
import { CacheKey, TTL, withCache } from "../cache"
|
||||
import * as context from "../context"
|
||||
import env from "../environment"
|
||||
import environment from "../environment"
|
||||
|
||||
// UTILS
|
||||
|
||||
/**
|
||||
* Generates a new configuration ID.
|
||||
* @returns {string} The new configuration ID which the config doc can be stored under.
|
||||
*/
|
||||
export function generateConfigID(type: ConfigType) {
|
||||
return `${DocumentType.CONFIG}${SEPARATOR}${type}`
|
||||
}
|
||||
|
||||
export async function getConfig<T extends Config>(
|
||||
type: ConfigType
|
||||
): Promise<T | undefined> {
|
||||
const db = context.getGlobalDB()
|
||||
try {
|
||||
// await to catch error
|
||||
const config = (await db.get(generateConfigID(type))) as T
|
||||
return config
|
||||
} catch (e: any) {
|
||||
if (e.status === 404) {
|
||||
return
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
export async function save(
|
||||
config: Config
|
||||
): Promise<{ id: string; rev: string }> {
|
||||
const db = context.getGlobalDB()
|
||||
return db.put(config)
|
||||
}
|
||||
|
||||
// SETTINGS
|
||||
|
||||
export async function getSettingsConfigDoc(): Promise<SettingsConfig> {
|
||||
let config = await getConfig<SettingsConfig>(ConfigType.SETTINGS)
|
||||
|
||||
if (!config) {
|
||||
config = {
|
||||
_id: generateConfigID(ConfigType.SETTINGS),
|
||||
type: ConfigType.SETTINGS,
|
||||
config: {},
|
||||
}
|
||||
}
|
||||
|
||||
// overridden fields
|
||||
config.config.platformUrl = await getPlatformUrl({
|
||||
tenantAware: true,
|
||||
config: config.config,
|
||||
})
|
||||
config.config.analyticsEnabled = await analyticsEnabled({
|
||||
config: config.config,
|
||||
})
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
export async function getSettingsConfig(): Promise<SettingsInnerConfig> {
|
||||
return (await getSettingsConfigDoc()).config
|
||||
}
|
||||
|
||||
export async function getPlatformUrl(
|
||||
opts: { tenantAware: boolean; config?: SettingsInnerConfig } = {
|
||||
tenantAware: true,
|
||||
}
|
||||
) {
|
||||
let platformUrl = env.PLATFORM_URL || "http://localhost:10000"
|
||||
|
||||
if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) {
|
||||
// cloud and multi tenant - add the tenant to the default platform url
|
||||
const tenantId = context.getTenantId()
|
||||
if (!platformUrl.includes("localhost:")) {
|
||||
platformUrl = platformUrl.replace("://", `://${tenantId}.`)
|
||||
}
|
||||
} else if (env.SELF_HOSTED) {
|
||||
const config = opts?.config
|
||||
? opts.config
|
||||
: // direct to db to prevent infinite loop
|
||||
(await getConfig<SettingsConfig>(ConfigType.SETTINGS))?.config
|
||||
if (config?.platformUrl) {
|
||||
platformUrl = config.platformUrl
|
||||
}
|
||||
}
|
||||
|
||||
return platformUrl
|
||||
}
|
||||
|
||||
export const analyticsEnabled = async (opts?: {
|
||||
config?: SettingsInnerConfig
|
||||
}) => {
|
||||
// cloud - always use the environment variable
|
||||
if (!env.SELF_HOSTED) {
|
||||
return !!env.ENABLE_ANALYTICS
|
||||
}
|
||||
|
||||
// self host - prefer the settings doc
|
||||
// use cache as events have high throughput
|
||||
const enabledInDB = await withCache(
|
||||
CacheKey.ANALYTICS_ENABLED,
|
||||
TTL.ONE_DAY,
|
||||
async () => {
|
||||
const config = opts?.config
|
||||
? opts.config
|
||||
: // direct to db to prevent infinite loop
|
||||
(await getConfig<SettingsConfig>(ConfigType.SETTINGS))?.config
|
||||
|
||||
// need to do explicit checks in case the field is not set
|
||||
if (config?.analyticsEnabled === false) {
|
||||
return false
|
||||
} else if (config?.analyticsEnabled === true) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (enabledInDB !== undefined) {
|
||||
return enabledInDB
|
||||
}
|
||||
|
||||
// fallback to the environment variable
|
||||
// explicitly check for 0 or false here, undefined or otherwise is treated as true
|
||||
const envEnabled: any = env.ENABLE_ANALYTICS
|
||||
if (envEnabled === 0 || envEnabled === false) {
|
||||
return false
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// GOOGLE
|
||||
|
||||
async function getGoogleConfigDoc(): Promise<GoogleConfig | undefined> {
|
||||
return await getConfig<GoogleConfig>(ConfigType.GOOGLE)
|
||||
}
|
||||
|
||||
export async function getGoogleConfig(): Promise<
|
||||
GoogleInnerConfig | undefined
|
||||
> {
|
||||
const config = await getGoogleConfigDoc()
|
||||
return config?.config
|
||||
}
|
||||
|
||||
export async function getGoogleDatasourceConfig(): Promise<
|
||||
GoogleInnerConfig | undefined
|
||||
> {
|
||||
if (!env.SELF_HOSTED) {
|
||||
// always use the env vars in cloud
|
||||
return getDefaultGoogleConfig()
|
||||
}
|
||||
|
||||
// prefer the config in self-host
|
||||
let config = await getGoogleConfig()
|
||||
|
||||
// fallback to env vars
|
||||
if (!config || !config.activated) {
|
||||
config = getDefaultGoogleConfig()
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
export function getDefaultGoogleConfig(): GoogleInnerConfig | undefined {
|
||||
if (environment.GOOGLE_CLIENT_ID && environment.GOOGLE_CLIENT_SECRET) {
|
||||
return {
|
||||
clientID: environment.GOOGLE_CLIENT_ID!,
|
||||
clientSecret: environment.GOOGLE_CLIENT_SECRET!,
|
||||
activated: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// OIDC
|
||||
|
||||
async function getOIDCConfigDoc(): Promise<OIDCConfig | undefined> {
|
||||
return getConfig<OIDCConfig>(ConfigType.OIDC)
|
||||
}
|
||||
|
||||
export async function getOIDCConfig(): Promise<OIDCInnerConfig | undefined> {
|
||||
const config = (await getOIDCConfigDoc())?.config
|
||||
// default to the 0th config
|
||||
return config?.configs && config.configs[0]
|
||||
}
|
||||
|
||||
/**
|
||||
* @param configId The config id of the inner config to retrieve
|
||||
*/
|
||||
export async function getOIDCConfigById(
|
||||
configId: string
|
||||
): Promise<OIDCInnerConfig | undefined> {
|
||||
const config = (await getConfig<OIDCConfig>(ConfigType.OIDC))?.config
|
||||
return config && config.configs.filter((c: any) => c.uuid === configId)[0]
|
||||
}
|
||||
|
||||
// SMTP
|
||||
|
||||
export async function getSMTPConfigDoc(): Promise<SMTPConfig | undefined> {
|
||||
return getConfig<SMTPConfig>(ConfigType.SMTP)
|
||||
}
|
||||
|
||||
export async function getSMTPConfig(
|
||||
isAutomation?: boolean
|
||||
): Promise<SMTPInnerConfig | undefined> {
|
||||
const config = await getSMTPConfigDoc()
|
||||
if (config) {
|
||||
return config.config
|
||||
}
|
||||
|
||||
// always allow fallback in self host
|
||||
// in cloud don't allow for automations
|
||||
const allowFallback = env.SELF_HOSTED || !isAutomation
|
||||
|
||||
// Use an SMTP fallback configuration from env variables
|
||||
if (env.SMTP_FALLBACK_ENABLED && allowFallback) {
|
||||
return {
|
||||
port: env.SMTP_PORT,
|
||||
host: env.SMTP_HOST!,
|
||||
secure: false,
|
||||
from: env.SMTP_FROM_ADDRESS!,
|
||||
auth: {
|
||||
user: env.SMTP_USER!,
|
||||
pass: env.SMTP_PASSWORD!,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export * from "./configs"
|
|
@ -0,0 +1,116 @@
|
|||
import { DBTestConfiguration, generator, testEnv } from "../../../tests"
|
||||
import { ConfigType } from "@budibase/types"
|
||||
import env from "../../environment"
|
||||
import * as configs from "../configs"
|
||||
|
||||
const DEFAULT_URL = "http://localhost:10000"
|
||||
const ENV_URL = "http://env.com"
|
||||
|
||||
describe("configs", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
const setDbPlatformUrl = async (dbUrl: string) => {
|
||||
const settingsConfig = {
|
||||
_id: configs.generateConfigID(ConfigType.SETTINGS),
|
||||
type: ConfigType.SETTINGS,
|
||||
config: {
|
||||
platformUrl: dbUrl,
|
||||
},
|
||||
}
|
||||
await configs.save(settingsConfig)
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
config.newTenant()
|
||||
})
|
||||
|
||||
describe("getPlatformUrl", () => {
|
||||
describe("self host", () => {
|
||||
beforeEach(async () => {
|
||||
testEnv.selfHosted()
|
||||
})
|
||||
|
||||
it("gets the default url", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const url = await configs.getPlatformUrl()
|
||||
expect(url).toBe(DEFAULT_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
env._set("PLATFORM_URL", ENV_URL)
|
||||
const url = await configs.getPlatformUrl()
|
||||
expect(url).toBe(ENV_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the database", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const dbUrl = generator.url()
|
||||
await setDbPlatformUrl(dbUrl)
|
||||
const url = await configs.getPlatformUrl()
|
||||
expect(url).toBe(dbUrl)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("cloud", () => {
|
||||
function getTenantAwareUrl() {
|
||||
return `http://${config.tenantId}.env.com`
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
testEnv.cloudHosted()
|
||||
testEnv.multiTenant()
|
||||
|
||||
env._set("PLATFORM_URL", ENV_URL)
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment without tenancy", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const url = await configs.getPlatformUrl({ tenantAware: false })
|
||||
expect(url).toBe(ENV_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment with tenancy", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const url = await configs.getPlatformUrl()
|
||||
expect(url).toBe(getTenantAwareUrl())
|
||||
})
|
||||
})
|
||||
|
||||
it("never gets the platform url from the database", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await setDbPlatformUrl(generator.url())
|
||||
const url = await configs.getPlatformUrl()
|
||||
expect(url).toBe(getTenantAwareUrl())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("getSettingsConfig", () => {
|
||||
beforeAll(async () => {
|
||||
testEnv.selfHosted()
|
||||
env._set("PLATFORM_URL", "")
|
||||
})
|
||||
|
||||
it("returns the platform url with an existing config", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const dbUrl = generator.url()
|
||||
await setDbPlatformUrl(dbUrl)
|
||||
const config = await configs.getSettingsConfig()
|
||||
expect(config.platformUrl).toBe(dbUrl)
|
||||
})
|
||||
})
|
||||
|
||||
it("returns the platform url without an existing config", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const config = await configs.getSettingsConfig()
|
||||
expect(config.platformUrl).toBe(DEFAULT_URL)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -68,6 +68,7 @@ export enum DocumentType {
|
|||
MEM_VIEW = "view",
|
||||
USER_FLAG = "flag",
|
||||
AUTOMATION_METADATA = "meta_au",
|
||||
AUDIT_LOG = "al",
|
||||
}
|
||||
|
||||
export const StaticDatabases = {
|
||||
|
@ -88,6 +89,9 @@ export const StaticDatabases = {
|
|||
install: "install",
|
||||
},
|
||||
},
|
||||
AUDIT_LOGS: {
|
||||
name: "audit-logs",
|
||||
},
|
||||
}
|
||||
|
||||
export const APP_PREFIX = DocumentType.APP + SEPARATOR
|
||||
|
|
|
@ -41,5 +41,6 @@ export enum Config {
|
|||
OIDC_LOGOS = "logos_oidc",
|
||||
}
|
||||
|
||||
export const MIN_VALID_DATE = new Date(-2147483647000)
|
||||
export const MAX_VALID_DATE = new Date(2147483647000)
|
||||
export const DEFAULT_TENANT_ID = "default"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { AsyncLocalStorage } from "async_hooks"
|
||||
import { ContextMap } from "./mainContext"
|
||||
import { ContextMap } from "./types"
|
||||
|
||||
export default class Context {
|
||||
static storage = new AsyncLocalStorage<ContextMap>()
|
||||
|
|
|
@ -1,108 +0,0 @@
|
|||
import {
|
||||
getGlobalUserParams,
|
||||
getAllApps,
|
||||
doWithDB,
|
||||
StaticDatabases,
|
||||
} from "../db"
|
||||
import { doWithGlobalDB } from "../tenancy"
|
||||
import { App, Tenants, User, Database } from "@budibase/types"
|
||||
|
||||
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
|
||||
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
|
||||
|
||||
async function removeTenantFromInfoDB(tenantId: string) {
|
||||
try {
|
||||
await doWithDB(PLATFORM_INFO_DB, async (infoDb: Database) => {
|
||||
const tenants = (await infoDb.get(TENANT_DOC)) as Tenants
|
||||
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
|
||||
|
||||
await infoDb.put(tenants)
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} from info db`, err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
export async function removeUserFromInfoDB(dbUser: User) {
|
||||
await doWithDB(PLATFORM_INFO_DB, async (infoDb: Database) => {
|
||||
const keys = [dbUser._id!, dbUser.email]
|
||||
const userDocs = await infoDb.allDocs({
|
||||
keys,
|
||||
include_docs: true,
|
||||
})
|
||||
const toDelete = userDocs.rows.map((row: any) => {
|
||||
return {
|
||||
...row.doc,
|
||||
_deleted: true,
|
||||
}
|
||||
})
|
||||
await infoDb.bulkDocs(toDelete)
|
||||
})
|
||||
}
|
||||
|
||||
async function removeUsersFromInfoDB(tenantId: string) {
|
||||
return doWithGlobalDB(tenantId, async (db: any) => {
|
||||
try {
|
||||
const allUsers = await db.allDocs(
|
||||
getGlobalUserParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
await doWithDB(PLATFORM_INFO_DB, async (infoDb: any) => {
|
||||
const allEmails = allUsers.rows.map((row: any) => row.doc.email)
|
||||
// get the id docs
|
||||
let keys = allUsers.rows.map((row: any) => row.id)
|
||||
// and the email docs
|
||||
keys = keys.concat(allEmails)
|
||||
// retrieve the docs and delete them
|
||||
const userDocs = await infoDb.allDocs({
|
||||
keys,
|
||||
include_docs: true,
|
||||
})
|
||||
const toDelete = userDocs.rows.map((row: any) => {
|
||||
return {
|
||||
...row.doc,
|
||||
_deleted: true,
|
||||
}
|
||||
})
|
||||
await infoDb.bulkDocs(toDelete)
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} users from info db`, err)
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function removeGlobalDB(tenantId: string) {
|
||||
return doWithGlobalDB(tenantId, async (db: Database) => {
|
||||
try {
|
||||
await db.destroy()
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} users from info db`, err)
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function removeTenantApps(tenantId: string) {
|
||||
try {
|
||||
const apps = (await getAllApps({ all: true })) as App[]
|
||||
const destroyPromises = apps.map(app =>
|
||||
doWithDB(app.appId, (db: Database) => db.destroy())
|
||||
)
|
||||
await Promise.allSettled(destroyPromises)
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} apps`, err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
// can't live in tenancy package due to circular dependency on db/utils
|
||||
export async function deleteTenant(tenantId: string) {
|
||||
await removeTenantFromInfoDB(tenantId)
|
||||
await removeUsersFromInfoDB(tenantId)
|
||||
await removeGlobalDB(tenantId)
|
||||
await removeTenantApps(tenantId)
|
||||
}
|
|
@ -5,6 +5,8 @@ import {
|
|||
isCloudAccount,
|
||||
Account,
|
||||
AccountUserContext,
|
||||
UserContext,
|
||||
Ctx,
|
||||
} from "@budibase/types"
|
||||
import * as context from "."
|
||||
|
||||
|
@ -16,15 +18,22 @@ export function doInIdentityContext(identity: IdentityContext, task: any) {
|
|||
return context.doInIdentityContext(identity, task)
|
||||
}
|
||||
|
||||
export function doInUserContext(user: User, task: any) {
|
||||
const userContext: any = {
|
||||
// used in server/worker
|
||||
export function doInUserContext(user: User, ctx: Ctx, task: any) {
|
||||
const userContext: UserContext = {
|
||||
...user,
|
||||
_id: user._id as string,
|
||||
type: IdentityType.USER,
|
||||
hostInfo: {
|
||||
ipAddress: ctx.request.ip,
|
||||
// filled in by koa-useragent package
|
||||
userAgent: ctx.userAgent._agent.source,
|
||||
},
|
||||
}
|
||||
return doInIdentityContext(userContext, task)
|
||||
}
|
||||
|
||||
// used in account portal
|
||||
export function doInAccountContext(account: Account, task: any) {
|
||||
const _id = getAccountUserId(account)
|
||||
const tenantId = account.tenantId
|
||||
|
|
|
@ -11,13 +11,7 @@ import {
|
|||
DEFAULT_TENANT_ID,
|
||||
} from "../constants"
|
||||
import { Database, IdentityContext } from "@budibase/types"
|
||||
|
||||
export type ContextMap = {
|
||||
tenantId?: string
|
||||
appId?: string
|
||||
identity?: IdentityContext
|
||||
environmentVariables?: Record<string, string>
|
||||
}
|
||||
import { ContextMap } from "./types"
|
||||
|
||||
let TEST_APP_ID: string | null = null
|
||||
|
||||
|
@ -30,14 +24,23 @@ export function getGlobalDBName(tenantId?: string) {
|
|||
return baseGlobalDBName(tenantId)
|
||||
}
|
||||
|
||||
export function baseGlobalDBName(tenantId: string | undefined | null) {
|
||||
let dbName
|
||||
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
||||
dbName = StaticDatabases.GLOBAL.name
|
||||
} else {
|
||||
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
|
||||
export function getAuditLogDBName(tenantId?: string) {
|
||||
if (!tenantId) {
|
||||
tenantId = getTenantId()
|
||||
}
|
||||
if (tenantId === DEFAULT_TENANT_ID) {
|
||||
return StaticDatabases.AUDIT_LOGS.name
|
||||
} else {
|
||||
return `${tenantId}${SEPARATOR}${StaticDatabases.AUDIT_LOGS.name}`
|
||||
}
|
||||
}
|
||||
|
||||
export function baseGlobalDBName(tenantId: string | undefined | null) {
|
||||
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
||||
return StaticDatabases.GLOBAL.name
|
||||
} else {
|
||||
return `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
|
||||
}
|
||||
return dbName
|
||||
}
|
||||
|
||||
export function isMultiTenant() {
|
||||
|
@ -228,6 +231,13 @@ export function getGlobalDB(): Database {
|
|||
return getDB(baseGlobalDBName(context?.tenantId))
|
||||
}
|
||||
|
||||
export function getAuditLogsDB(): Database {
|
||||
if (!getTenantId()) {
|
||||
throw new Error("No tenant ID found - cannot open audit log DB")
|
||||
}
|
||||
return getDB(getAuditLogDBName())
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the app database based on whatever the request
|
||||
* contained, dev or prod.
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
require("../../../tests")
|
||||
import { testEnv } from "../../../tests"
|
||||
const context = require("../")
|
||||
const { DEFAULT_TENANT_ID } = require("../../constants")
|
||||
import env from "../../environment"
|
||||
|
||||
describe("context", () => {
|
||||
describe("doInTenant", () => {
|
||||
describe("single-tenancy", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.singleTenant()
|
||||
})
|
||||
|
||||
it("defaults to the default tenant", () => {
|
||||
const tenantId = context.getTenantId()
|
||||
expect(tenantId).toBe(DEFAULT_TENANT_ID)
|
||||
|
@ -20,8 +23,8 @@ describe("context", () => {
|
|||
})
|
||||
|
||||
describe("multi-tenancy", () => {
|
||||
beforeEach(() => {
|
||||
env._set("MULTI_TENANCY", 1)
|
||||
beforeAll(() => {
|
||||
testEnv.multiTenant()
|
||||
})
|
||||
|
||||
it("fails when no tenant id is set", () => {
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import { IdentityContext } from "@budibase/types"
|
||||
|
||||
// keep this out of Budibase types, don't want to expose context info
|
||||
export type ContextMap = {
|
||||
tenantId?: string
|
||||
appId?: string
|
||||
identity?: IdentityContext
|
||||
environmentVariables?: Record<string, string>
|
||||
}
|
|
@ -1,7 +1,6 @@
|
|||
import env from "../environment"
|
||||
import { directCouchQuery, getPouchDB } from "./couch"
|
||||
import { directCouchQuery, DatabaseImpl } from "./couch"
|
||||
import { CouchFindOptions, Database } from "@budibase/types"
|
||||
import { DatabaseImpl } from "../db"
|
||||
|
||||
const dbList = new Set()
|
||||
|
||||
|
|
|
@ -7,3 +7,4 @@ export { default as Replication } from "./Replication"
|
|||
// exports to support old export structure
|
||||
export * from "../constants/db"
|
||||
export { getGlobalDBName, baseGlobalDBName } from "../context"
|
||||
export * from "./lucene"
|
||||
|
|
|
@ -0,0 +1,624 @@
|
|||
import fetch from "node-fetch"
|
||||
import { getCouchInfo } from "./couch"
|
||||
import { SearchFilters, Row } from "@budibase/types"
|
||||
|
||||
const QUERY_START_REGEX = /\d[0-9]*:/g
|
||||
|
||||
interface SearchResponse<T> {
|
||||
rows: T[] | any[]
|
||||
bookmark: string
|
||||
}
|
||||
|
||||
interface PaginatedSearchResponse<T> extends SearchResponse<T> {
|
||||
hasNextPage: boolean
|
||||
}
|
||||
|
||||
export type SearchParams<T> = {
|
||||
tableId?: string
|
||||
sort?: string
|
||||
sortOrder?: string
|
||||
sortType?: string
|
||||
limit?: number
|
||||
bookmark?: string
|
||||
version?: string
|
||||
indexer?: () => Promise<any>
|
||||
disableEscaping?: boolean
|
||||
rows?: T | Row[]
|
||||
}
|
||||
|
||||
export function removeKeyNumbering(key: any): string {
|
||||
if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) {
|
||||
const parts = key.split(":")
|
||||
// remove the number
|
||||
parts.shift()
|
||||
return parts.join(":")
|
||||
} else {
|
||||
return key
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Class to build lucene query URLs.
|
||||
* Optionally takes a base lucene query object.
|
||||
*/
|
||||
export class QueryBuilder<T> {
|
||||
dbName: string
|
||||
index: string
|
||||
query: SearchFilters
|
||||
limit: number
|
||||
sort?: string
|
||||
bookmark?: string
|
||||
sortOrder: string
|
||||
sortType: string
|
||||
includeDocs: boolean
|
||||
version?: string
|
||||
indexBuilder?: () => Promise<any>
|
||||
noEscaping = false
|
||||
|
||||
constructor(dbName: string, index: string, base?: SearchFilters) {
|
||||
this.dbName = dbName
|
||||
this.index = index
|
||||
this.query = {
|
||||
allOr: false,
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
oneOf: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
containsAny: {},
|
||||
...base,
|
||||
}
|
||||
this.limit = 50
|
||||
this.sortOrder = "ascending"
|
||||
this.sortType = "string"
|
||||
this.includeDocs = true
|
||||
}
|
||||
|
||||
disableEscaping() {
|
||||
this.noEscaping = true
|
||||
return this
|
||||
}
|
||||
|
||||
setIndexBuilder(builderFn: () => Promise<any>) {
|
||||
this.indexBuilder = builderFn
|
||||
return this
|
||||
}
|
||||
|
||||
setVersion(version?: string) {
|
||||
if (version != null) {
|
||||
this.version = version
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
setTable(tableId: string) {
|
||||
this.query.equal!.tableId = tableId
|
||||
return this
|
||||
}
|
||||
|
||||
setLimit(limit?: number) {
|
||||
if (limit != null) {
|
||||
this.limit = limit
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
setSort(sort?: string) {
|
||||
if (sort != null) {
|
||||
this.sort = sort
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
setSortOrder(sortOrder?: string) {
|
||||
if (sortOrder != null) {
|
||||
this.sortOrder = sortOrder
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
setSortType(sortType?: string) {
|
||||
if (sortType != null) {
|
||||
this.sortType = sortType
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
setBookmark(bookmark?: string) {
|
||||
if (bookmark != null) {
|
||||
this.bookmark = bookmark
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
excludeDocs() {
|
||||
this.includeDocs = false
|
||||
return this
|
||||
}
|
||||
|
||||
addString(key: string, partial: string) {
|
||||
this.query.string![key] = partial
|
||||
return this
|
||||
}
|
||||
|
||||
addFuzzy(key: string, fuzzy: string) {
|
||||
this.query.fuzzy![key] = fuzzy
|
||||
return this
|
||||
}
|
||||
|
||||
addRange(key: string, low: string | number, high: string | number) {
|
||||
this.query.range![key] = {
|
||||
low,
|
||||
high,
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
addEqual(key: string, value: any) {
|
||||
this.query.equal![key] = value
|
||||
return this
|
||||
}
|
||||
|
||||
addNotEqual(key: string, value: any) {
|
||||
this.query.notEqual![key] = value
|
||||
return this
|
||||
}
|
||||
|
||||
addEmpty(key: string, value: any) {
|
||||
this.query.empty![key] = value
|
||||
return this
|
||||
}
|
||||
|
||||
addNotEmpty(key: string, value: any) {
|
||||
this.query.notEmpty![key] = value
|
||||
return this
|
||||
}
|
||||
|
||||
addOneOf(key: string, value: any) {
|
||||
this.query.oneOf![key] = value
|
||||
return this
|
||||
}
|
||||
|
||||
addContains(key: string, value: any) {
|
||||
this.query.contains![key] = value
|
||||
return this
|
||||
}
|
||||
|
||||
addNotContains(key: string, value: any) {
|
||||
this.query.notContains![key] = value
|
||||
return this
|
||||
}
|
||||
|
||||
addContainsAny(key: string, value: any) {
|
||||
this.query.containsAny![key] = value
|
||||
return this
|
||||
}
|
||||
|
||||
handleSpaces(input: string) {
|
||||
if (this.noEscaping) {
|
||||
return input
|
||||
} else {
|
||||
return input.replace(/ /g, "_")
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocesses a value before going into a lucene search.
|
||||
* Transforms strings to lowercase and wraps strings and bools in quotes.
|
||||
* @param value The value to process
|
||||
* @param options The preprocess options
|
||||
* @returns {string|*}
|
||||
*/
|
||||
preprocess(value: any, { escape, lowercase, wrap, type }: any = {}) {
|
||||
const hasVersion = !!this.version
|
||||
// Determine if type needs wrapped
|
||||
const originalType = typeof value
|
||||
// Convert to lowercase
|
||||
if (value && lowercase) {
|
||||
value = value.toLowerCase ? value.toLowerCase() : value
|
||||
}
|
||||
// Escape characters
|
||||
if (!this.noEscaping && escape && originalType === "string") {
|
||||
value = `${value}`.replace(/[ #+\-&|!(){}\]^"~*?:\\]/g, "\\$&")
|
||||
}
|
||||
|
||||
// Wrap in quotes
|
||||
if (originalType === "string" && !isNaN(value) && !type) {
|
||||
value = `"${value}"`
|
||||
} else if (hasVersion && wrap) {
|
||||
value = originalType === "number" ? value : `"${value}"`
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
buildSearchQuery() {
|
||||
const builder = this
|
||||
let allOr = this.query && this.query.allOr
|
||||
let query = allOr ? "" : "*:*"
|
||||
const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true }
|
||||
let tableId
|
||||
if (this.query.equal!.tableId) {
|
||||
tableId = this.query.equal!.tableId
|
||||
delete this.query.equal!.tableId
|
||||
}
|
||||
|
||||
const equal = (key: string, value: any) => {
|
||||
// 0 evaluates to false, which means we would return all rows if we don't check it
|
||||
if (!value && value !== 0) {
|
||||
return null
|
||||
}
|
||||
return `${key}:${builder.preprocess(value, allPreProcessingOpts)}`
|
||||
}
|
||||
|
||||
const contains = (key: string, value: any, mode = "AND") => {
|
||||
if (Array.isArray(value) && value.length === 0) {
|
||||
return null
|
||||
}
|
||||
if (!Array.isArray(value)) {
|
||||
return `${key}:${value}`
|
||||
}
|
||||
let statement = `${builder.preprocess(value[0], { escape: true })}`
|
||||
for (let i = 1; i < value.length; i++) {
|
||||
statement += ` ${mode} ${builder.preprocess(value[i], {
|
||||
escape: true,
|
||||
})}`
|
||||
}
|
||||
return `${key}:(${statement})`
|
||||
}
|
||||
|
||||
const notContains = (key: string, value: any) => {
|
||||
// @ts-ignore
|
||||
const allPrefix = allOr === "" ? "*:* AND" : ""
|
||||
return allPrefix + "NOT " + contains(key, value)
|
||||
}
|
||||
|
||||
const containsAny = (key: string, value: any) => {
|
||||
return contains(key, value, "OR")
|
||||
}
|
||||
|
||||
const oneOf = (key: string, value: any) => {
|
||||
if (!Array.isArray(value)) {
|
||||
if (typeof value === "string") {
|
||||
value = value.split(",")
|
||||
} else {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
let orStatement = `${builder.preprocess(value[0], allPreProcessingOpts)}`
|
||||
for (let i = 1; i < value.length; i++) {
|
||||
orStatement += ` OR ${builder.preprocess(
|
||||
value[i],
|
||||
allPreProcessingOpts
|
||||
)}`
|
||||
}
|
||||
return `${key}:(${orStatement})`
|
||||
}
|
||||
|
||||
function build(structure: any, queryFn: any) {
|
||||
for (let [key, value] of Object.entries(structure)) {
|
||||
// check for new format - remove numbering if needed
|
||||
key = removeKeyNumbering(key)
|
||||
key = builder.preprocess(builder.handleSpaces(key), {
|
||||
escape: true,
|
||||
})
|
||||
const expression = queryFn(key, value)
|
||||
if (expression == null) {
|
||||
continue
|
||||
}
|
||||
if (query.length > 0) {
|
||||
query += ` ${allOr ? "OR" : "AND"} `
|
||||
}
|
||||
query += expression
|
||||
}
|
||||
}
|
||||
|
||||
// Construct the actual lucene search query string from JSON structure
|
||||
if (this.query.string) {
|
||||
build(this.query.string, (key: string, value: any) => {
|
||||
if (!value) {
|
||||
return null
|
||||
}
|
||||
value = builder.preprocess(value, {
|
||||
escape: true,
|
||||
lowercase: true,
|
||||
type: "string",
|
||||
})
|
||||
return `${key}:${value}*`
|
||||
})
|
||||
}
|
||||
if (this.query.range) {
|
||||
build(this.query.range, (key: string, value: any) => {
|
||||
if (!value) {
|
||||
return null
|
||||
}
|
||||
if (value.low == null || value.low === "") {
|
||||
return null
|
||||
}
|
||||
if (value.high == null || value.high === "") {
|
||||
return null
|
||||
}
|
||||
const low = builder.preprocess(value.low, allPreProcessingOpts)
|
||||
const high = builder.preprocess(value.high, allPreProcessingOpts)
|
||||
return `${key}:[${low} TO ${high}]`
|
||||
})
|
||||
}
|
||||
if (this.query.fuzzy) {
|
||||
build(this.query.fuzzy, (key: string, value: any) => {
|
||||
if (!value) {
|
||||
return null
|
||||
}
|
||||
value = builder.preprocess(value, {
|
||||
escape: true,
|
||||
lowercase: true,
|
||||
type: "fuzzy",
|
||||
})
|
||||
return `${key}:${value}~`
|
||||
})
|
||||
}
|
||||
if (this.query.equal) {
|
||||
build(this.query.equal, equal)
|
||||
}
|
||||
if (this.query.notEqual) {
|
||||
build(this.query.notEqual, (key: string, value: any) => {
|
||||
if (!value) {
|
||||
return null
|
||||
}
|
||||
return `!${key}:${builder.preprocess(value, allPreProcessingOpts)}`
|
||||
})
|
||||
}
|
||||
if (this.query.empty) {
|
||||
build(this.query.empty, (key: string) => `!${key}:["" TO *]`)
|
||||
}
|
||||
if (this.query.notEmpty) {
|
||||
build(this.query.notEmpty, (key: string) => `${key}:["" TO *]`)
|
||||
}
|
||||
if (this.query.oneOf) {
|
||||
build(this.query.oneOf, oneOf)
|
||||
}
|
||||
if (this.query.contains) {
|
||||
build(this.query.contains, contains)
|
||||
}
|
||||
if (this.query.notContains) {
|
||||
build(this.query.notContains, notContains)
|
||||
}
|
||||
if (this.query.containsAny) {
|
||||
build(this.query.containsAny, containsAny)
|
||||
}
|
||||
// make sure table ID is always added as an AND
|
||||
if (tableId) {
|
||||
query = `(${query})`
|
||||
allOr = false
|
||||
build({ tableId }, equal)
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
buildSearchBody() {
|
||||
let body: any = {
|
||||
q: this.buildSearchQuery(),
|
||||
limit: Math.min(this.limit, 200),
|
||||
include_docs: this.includeDocs,
|
||||
}
|
||||
if (this.bookmark) {
|
||||
body.bookmark = this.bookmark
|
||||
}
|
||||
if (this.sort) {
|
||||
const order = this.sortOrder === "descending" ? "-" : ""
|
||||
const type = `<${this.sortType}>`
|
||||
body.sort = `${order}${this.handleSpaces(this.sort)}${type}`
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
async run() {
|
||||
const { url, cookie } = getCouchInfo()
|
||||
const fullPath = `${url}/${this.dbName}/_design/database/_search/${this.index}`
|
||||
const body = this.buildSearchBody()
|
||||
try {
|
||||
return await runQuery<T>(fullPath, body, cookie)
|
||||
} catch (err: any) {
|
||||
if (err.status === 404 && this.indexBuilder) {
|
||||
await this.indexBuilder()
|
||||
return await runQuery<T>(fullPath, body, cookie)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a lucene search query.
|
||||
* @param url The query URL
|
||||
* @param body The request body defining search criteria
|
||||
* @param cookie The auth cookie for CouchDB
|
||||
* @returns {Promise<{rows: []}>}
|
||||
*/
|
||||
async function runQuery<T>(
|
||||
url: string,
|
||||
body: any,
|
||||
cookie: string
|
||||
): Promise<SearchResponse<T>> {
|
||||
const response = await fetch(url, {
|
||||
body: JSON.stringify(body),
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: cookie,
|
||||
},
|
||||
})
|
||||
|
||||
if (response.status === 404) {
|
||||
throw response
|
||||
}
|
||||
const json = await response.json()
|
||||
|
||||
let output: any = {
|
||||
rows: [],
|
||||
}
|
||||
if (json.rows != null && json.rows.length > 0) {
|
||||
output.rows = json.rows.map((row: any) => row.doc)
|
||||
}
|
||||
if (json.bookmark) {
|
||||
output.bookmark = json.bookmark
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets round the fixed limit of 200 results from a query by fetching as many
|
||||
* pages as required and concatenating the results. This recursively operates
|
||||
* until enough results have been found.
|
||||
* @param dbName {string} Which database to run a lucene query on
|
||||
* @param index {string} Which search index to utilise
|
||||
* @param query {object} The JSON query structure
|
||||
* @param params {object} The search params including:
|
||||
* tableId {string} The table ID to search
|
||||
* sort {string} The sort column
|
||||
* sortOrder {string} The sort order ("ascending" or "descending")
|
||||
* sortType {string} Whether to treat sortable values as strings or
|
||||
* numbers. ("string" or "number")
|
||||
* limit {number} The number of results to fetch
|
||||
* bookmark {string|null} Current bookmark in the recursive search
|
||||
* rows {array|null} Current results in the recursive search
|
||||
* @returns {Promise<*[]|*>}
|
||||
*/
|
||||
async function recursiveSearch<T>(
|
||||
dbName: string,
|
||||
index: string,
|
||||
query: any,
|
||||
params: any
|
||||
): Promise<any> {
|
||||
const bookmark = params.bookmark
|
||||
const rows = params.rows || []
|
||||
if (rows.length >= params.limit) {
|
||||
return rows
|
||||
}
|
||||
let pageSize = 200
|
||||
if (rows.length > params.limit - 200) {
|
||||
pageSize = params.limit - rows.length
|
||||
}
|
||||
const page = await new QueryBuilder<T>(dbName, index, query)
|
||||
.setVersion(params.version)
|
||||
.setTable(params.tableId)
|
||||
.setBookmark(bookmark)
|
||||
.setLimit(pageSize)
|
||||
.setSort(params.sort)
|
||||
.setSortOrder(params.sortOrder)
|
||||
.setSortType(params.sortType)
|
||||
.run()
|
||||
if (!page.rows.length) {
|
||||
return rows
|
||||
}
|
||||
if (page.rows.length < 200) {
|
||||
return [...rows, ...page.rows]
|
||||
}
|
||||
const newParams = {
|
||||
...params,
|
||||
bookmark: page.bookmark,
|
||||
rows: [...rows, ...page.rows],
|
||||
}
|
||||
return await recursiveSearch(dbName, index, query, newParams)
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a paginated search. A bookmark will be returned to allow the next
|
||||
* page to be fetched. There is a max limit off 200 results per page in a
|
||||
* paginated search.
|
||||
* @param dbName {string} Which database to run a lucene query on
|
||||
* @param index {string} Which search index to utilise
|
||||
* @param query {object} The JSON query structure
|
||||
* @param params {object} The search params including:
|
||||
* tableId {string} The table ID to search
|
||||
* sort {string} The sort column
|
||||
* sortOrder {string} The sort order ("ascending" or "descending")
|
||||
* sortType {string} Whether to treat sortable values as strings or
|
||||
* numbers. ("string" or "number")
|
||||
* limit {number} The desired page size
|
||||
* bookmark {string} The bookmark to resume from
|
||||
* @returns {Promise<{hasNextPage: boolean, rows: *[]}>}
|
||||
*/
|
||||
export async function paginatedSearch<T>(
|
||||
dbName: string,
|
||||
index: string,
|
||||
query: SearchFilters,
|
||||
params: SearchParams<T>
|
||||
) {
|
||||
let limit = params.limit
|
||||
if (limit == null || isNaN(limit) || limit < 0) {
|
||||
limit = 50
|
||||
}
|
||||
limit = Math.min(limit, 200)
|
||||
const search = new QueryBuilder<T>(dbName, index, query)
|
||||
if (params.version) {
|
||||
search.setVersion(params.version)
|
||||
}
|
||||
if (params.tableId) {
|
||||
search.setTable(params.tableId)
|
||||
}
|
||||
if (params.sort) {
|
||||
search
|
||||
.setSort(params.sort)
|
||||
.setSortOrder(params.sortOrder)
|
||||
.setSortType(params.sortType)
|
||||
}
|
||||
if (params.indexer) {
|
||||
search.setIndexBuilder(params.indexer)
|
||||
}
|
||||
if (params.disableEscaping) {
|
||||
search.disableEscaping()
|
||||
}
|
||||
const searchResults = await search
|
||||
.setBookmark(params.bookmark)
|
||||
.setLimit(limit)
|
||||
.run()
|
||||
|
||||
// Try fetching 1 row in the next page to see if another page of results
|
||||
// exists or not
|
||||
search.setBookmark(searchResults.bookmark).setLimit(1)
|
||||
if (params.tableId) {
|
||||
search.setTable(params.tableId)
|
||||
}
|
||||
const nextResults = await search.run()
|
||||
|
||||
return {
|
||||
...searchResults,
|
||||
hasNextPage: nextResults.rows && nextResults.rows.length > 0,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a full search, fetching multiple pages if required to return the
|
||||
* desired amount of results. There is a limit of 1000 results to avoid
|
||||
* heavy performance hits, and to avoid client components breaking from
|
||||
* handling too much data.
|
||||
* @param dbName {string} Which database to run a lucene query on
|
||||
* @param index {string} Which search index to utilise
|
||||
* @param query {object} The JSON query structure
|
||||
* @param params {object} The search params including:
|
||||
* tableId {string} The table ID to search
|
||||
* sort {string} The sort column
|
||||
* sortOrder {string} The sort order ("ascending" or "descending")
|
||||
* sortType {string} Whether to treat sortable values as strings or
|
||||
* numbers. ("string" or "number")
|
||||
* limit {number} The desired number of results
|
||||
* @returns {Promise<{rows: *}>}
|
||||
*/
|
||||
export async function fullSearch<T>(
|
||||
dbName: string,
|
||||
index: string,
|
||||
query: SearchFilters,
|
||||
params: SearchParams<T>
|
||||
) {
|
||||
let limit = params.limit
|
||||
if (limit == null || isNaN(limit) || limit < 0) {
|
||||
limit = 1000
|
||||
}
|
||||
params.limit = Math.min(limit, 1000)
|
||||
const rows = await recursiveSearch<T>(dbName, index, query, params)
|
||||
return { rows }
|
||||
}
|
|
@ -0,0 +1,161 @@
|
|||
import { newid } from "../../newid"
|
||||
import { getDB } from "../db"
|
||||
import { Database } from "@budibase/types"
|
||||
import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene"
|
||||
|
||||
const INDEX_NAME = "main"
|
||||
|
||||
const index = `function(doc) {
|
||||
let props = ["property", "number"]
|
||||
for (let key of props) {
|
||||
if (doc[key]) {
|
||||
index(key, doc[key])
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
describe("lucene", () => {
|
||||
let db: Database, dbName: string
|
||||
|
||||
beforeAll(async () => {
|
||||
dbName = `db-${newid()}`
|
||||
// create the DB for testing
|
||||
db = getDB(dbName)
|
||||
await db.put({ _id: newid(), property: "word" })
|
||||
await db.put({ _id: newid(), property: "word2" })
|
||||
await db.put({ _id: newid(), property: "word3", number: 1 })
|
||||
})
|
||||
|
||||
it("should be able to create a lucene index", async () => {
|
||||
const response = await db.put({
|
||||
_id: "_design/database",
|
||||
indexes: {
|
||||
[INDEX_NAME]: {
|
||||
index: index,
|
||||
analyzer: "standard",
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(response.ok).toBe(true)
|
||||
})
|
||||
|
||||
describe("query builder", () => {
|
||||
it("should be able to perform a basic query", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.setSort("property")
|
||||
builder.setSortOrder("desc")
|
||||
builder.setSortType("string")
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(3)
|
||||
})
|
||||
|
||||
it("should handle limits", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.setLimit(1)
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(1)
|
||||
})
|
||||
|
||||
it("should be able to perform a string search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addString("property", "wo")
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(3)
|
||||
})
|
||||
|
||||
it("should be able to perform a range search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addRange("number", 0, 1)
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(1)
|
||||
})
|
||||
|
||||
it("should be able to perform an equal search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addEqual("property", "word2")
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(1)
|
||||
})
|
||||
|
||||
it("should be able to perform a not equal search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addNotEqual("property", "word2")
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(2)
|
||||
})
|
||||
|
||||
it("should be able to perform an empty search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addEmpty("number", true)
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(2)
|
||||
})
|
||||
|
||||
it("should be able to perform a not empty search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addNotEmpty("number", true)
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(1)
|
||||
})
|
||||
|
||||
it("should be able to perform a one of search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addOneOf("property", ["word", "word2"])
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(2)
|
||||
})
|
||||
|
||||
it("should be able to perform a contains search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addContains("property", ["word"])
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(1)
|
||||
})
|
||||
|
||||
it("should be able to perform a not contains search", async () => {
|
||||
const builder = new QueryBuilder(dbName, INDEX_NAME)
|
||||
builder.addNotContains("property", ["word2"])
|
||||
const resp = await builder.run()
|
||||
expect(resp.rows.length).toBe(2)
|
||||
})
|
||||
})
|
||||
|
||||
describe("paginated search", () => {
|
||||
it("should be able to perform a paginated search", async () => {
|
||||
const page = await paginatedSearch(
|
||||
dbName,
|
||||
INDEX_NAME,
|
||||
{
|
||||
string: {
|
||||
property: "wo",
|
||||
},
|
||||
},
|
||||
{
|
||||
limit: 1,
|
||||
sort: "property",
|
||||
sortType: "string",
|
||||
sortOrder: "desc",
|
||||
}
|
||||
)
|
||||
expect(page.rows.length).toBe(1)
|
||||
expect(page.hasNextPage).toBe(true)
|
||||
expect(page.bookmark).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("full search", () => {
|
||||
it("should be able to perform a full search", async () => {
|
||||
const page = await fullSearch(
|
||||
dbName,
|
||||
INDEX_NAME,
|
||||
{
|
||||
string: {
|
||||
property: "wo",
|
||||
},
|
||||
},
|
||||
{}
|
||||
)
|
||||
expect(page.rows.length).toBe(3)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,190 +0,0 @@
|
|||
require("../../../tests")
|
||||
const {
|
||||
getDevelopmentAppID,
|
||||
getProdAppID,
|
||||
isDevAppID,
|
||||
isProdAppID,
|
||||
} = require("../conversions")
|
||||
const { generateAppID, getPlatformUrl, getScopedConfig } = require("../utils")
|
||||
const tenancy = require("../../tenancy")
|
||||
const { Config, DEFAULT_TENANT_ID } = require("../../constants")
|
||||
import { generator } from "../../../tests"
|
||||
import env from "../../environment"
|
||||
|
||||
describe("utils", () => {
|
||||
describe("app ID manipulation", () => {
|
||||
function getID() {
|
||||
const appId = generateAppID()
|
||||
const split = appId.split("_")
|
||||
const uuid = split[split.length - 1]
|
||||
const devAppId = `app_dev_${uuid}`
|
||||
return { appId, devAppId, split, uuid }
|
||||
}
|
||||
|
||||
it("should be able to generate a new app ID", () => {
|
||||
expect(generateAppID().startsWith("app_")).toEqual(true)
|
||||
})
|
||||
|
||||
it("should be able to convert a production app ID to development", () => {
|
||||
const { appId, uuid } = getID()
|
||||
expect(getDevelopmentAppID(appId)).toEqual(`app_dev_${uuid}`)
|
||||
})
|
||||
|
||||
it("should be able to convert a development app ID to development", () => {
|
||||
const { devAppId, uuid } = getID()
|
||||
expect(getDevelopmentAppID(devAppId)).toEqual(`app_dev_${uuid}`)
|
||||
})
|
||||
|
||||
it("should be able to convert a development ID to a production", () => {
|
||||
const { devAppId, uuid } = getID()
|
||||
expect(getProdAppID(devAppId)).toEqual(`app_${uuid}`)
|
||||
})
|
||||
|
||||
it("should be able to convert a production ID to production", () => {
|
||||
const { appId, uuid } = getID()
|
||||
expect(getProdAppID(appId)).toEqual(`app_${uuid}`)
|
||||
})
|
||||
|
||||
it("should be able to confirm dev app ID is development", () => {
|
||||
const { devAppId } = getID()
|
||||
expect(isDevAppID(devAppId)).toEqual(true)
|
||||
})
|
||||
|
||||
it("should be able to confirm prod app ID is not development", () => {
|
||||
const { appId } = getID()
|
||||
expect(isDevAppID(appId)).toEqual(false)
|
||||
})
|
||||
|
||||
it("should be able to confirm prod app ID is prod", () => {
|
||||
const { appId } = getID()
|
||||
expect(isProdAppID(appId)).toEqual(true)
|
||||
})
|
||||
|
||||
it("should be able to confirm dev app ID is not prod", () => {
|
||||
const { devAppId } = getID()
|
||||
expect(isProdAppID(devAppId)).toEqual(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const DEFAULT_URL = "http://localhost:10000"
|
||||
const ENV_URL = "http://env.com"
|
||||
|
||||
const setDbPlatformUrl = async (dbUrl: string) => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
await db.put({
|
||||
_id: "config_settings",
|
||||
type: Config.SETTINGS,
|
||||
config: {
|
||||
platformUrl: dbUrl,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const clearSettingsConfig = async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
try {
|
||||
const config = await db.get("config_settings")
|
||||
await db.remove("config_settings", config._rev)
|
||||
} catch (e: any) {
|
||||
if (e.status !== 404) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
describe("getPlatformUrl", () => {
|
||||
describe("self host", () => {
|
||||
beforeEach(async () => {
|
||||
env._set("SELF_HOST", 1)
|
||||
await clearSettingsConfig()
|
||||
})
|
||||
|
||||
it("gets the default url", async () => {
|
||||
await tenancy.doInTenant(null, async () => {
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(DEFAULT_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment", async () => {
|
||||
await tenancy.doInTenant(null, async () => {
|
||||
env._set("PLATFORM_URL", ENV_URL)
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(ENV_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the database", async () => {
|
||||
await tenancy.doInTenant(null, async () => {
|
||||
const dbUrl = generator.url()
|
||||
await setDbPlatformUrl(dbUrl)
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(dbUrl)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("cloud", () => {
|
||||
const TENANT_AWARE_URL = "http://default.env.com"
|
||||
|
||||
beforeEach(async () => {
|
||||
env._set("SELF_HOSTED", 0)
|
||||
env._set("MULTI_TENANCY", 1)
|
||||
env._set("PLATFORM_URL", ENV_URL)
|
||||
await clearSettingsConfig()
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment without tenancy", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const url = await getPlatformUrl({ tenantAware: false })
|
||||
expect(url).toBe(ENV_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets the platform url from the environment with tenancy", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(TENANT_AWARE_URL)
|
||||
})
|
||||
})
|
||||
|
||||
it("never gets the platform url from the database", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
await setDbPlatformUrl(generator.url())
|
||||
const url = await getPlatformUrl()
|
||||
expect(url).toBe(TENANT_AWARE_URL)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("getScopedConfig", () => {
|
||||
describe("settings config", () => {
|
||||
beforeEach(async () => {
|
||||
env._set("SELF_HOSTED", 1)
|
||||
env._set("PLATFORM_URL", "")
|
||||
await clearSettingsConfig()
|
||||
})
|
||||
|
||||
it("returns the platform url with an existing config", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const dbUrl = generator.url()
|
||||
await setDbPlatformUrl(dbUrl)
|
||||
const db = tenancy.getGlobalDB()
|
||||
const config = await getScopedConfig(db, { type: Config.SETTINGS })
|
||||
expect(config.platformUrl).toBe(dbUrl)
|
||||
})
|
||||
})
|
||||
|
||||
it("returns the platform url without an existing config", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
const config = await getScopedConfig(db, { type: Config.SETTINGS })
|
||||
expect(config.platformUrl).toBe(DEFAULT_URL)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,63 @@
|
|||
import {
|
||||
getDevelopmentAppID,
|
||||
getProdAppID,
|
||||
isDevAppID,
|
||||
isProdAppID,
|
||||
} from "../conversions"
|
||||
import { generateAppID } from "../utils"
|
||||
|
||||
describe("utils", () => {
|
||||
describe("generateAppID", () => {
|
||||
function getID() {
|
||||
const appId = generateAppID()
|
||||
const split = appId.split("_")
|
||||
const uuid = split[split.length - 1]
|
||||
const devAppId = `app_dev_${uuid}`
|
||||
return { appId, devAppId, split, uuid }
|
||||
}
|
||||
|
||||
it("should be able to generate a new app ID", () => {
|
||||
expect(generateAppID().startsWith("app_")).toEqual(true)
|
||||
})
|
||||
|
||||
it("should be able to convert a production app ID to development", () => {
|
||||
const { appId, uuid } = getID()
|
||||
expect(getDevelopmentAppID(appId)).toEqual(`app_dev_${uuid}`)
|
||||
})
|
||||
|
||||
it("should be able to convert a development app ID to development", () => {
|
||||
const { devAppId, uuid } = getID()
|
||||
expect(getDevelopmentAppID(devAppId)).toEqual(`app_dev_${uuid}`)
|
||||
})
|
||||
|
||||
it("should be able to convert a development ID to a production", () => {
|
||||
const { devAppId, uuid } = getID()
|
||||
expect(getProdAppID(devAppId)).toEqual(`app_${uuid}`)
|
||||
})
|
||||
|
||||
it("should be able to convert a production ID to production", () => {
|
||||
const { appId, uuid } = getID()
|
||||
expect(getProdAppID(appId)).toEqual(`app_${uuid}`)
|
||||
})
|
||||
|
||||
it("should be able to confirm dev app ID is development", () => {
|
||||
const { devAppId } = getID()
|
||||
expect(isDevAppID(devAppId)).toEqual(true)
|
||||
})
|
||||
|
||||
it("should be able to confirm prod app ID is not development", () => {
|
||||
const { appId } = getID()
|
||||
expect(isDevAppID(appId)).toEqual(false)
|
||||
})
|
||||
|
||||
it("should be able to confirm prod app ID is prod", () => {
|
||||
const { appId } = getID()
|
||||
expect(isProdAppID(appId)).toEqual(true)
|
||||
})
|
||||
|
||||
it("should be able to confirm dev app ID is not prod", () => {
|
||||
const { devAppId } = getID()
|
||||
expect(isProdAppID(devAppId)).toEqual(false)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -9,12 +9,11 @@ import {
|
|||
InternalTable,
|
||||
APP_PREFIX,
|
||||
} from "../constants"
|
||||
import { getTenantId, getGlobalDB, getGlobalDBName } from "../context"
|
||||
import { getTenantId, getGlobalDBName } from "../context"
|
||||
import { doWithDB, directCouchAllDbs } from "./db"
|
||||
import { getAppMetadata } from "../cache/appMetadata"
|
||||
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
|
||||
import * as events from "../events"
|
||||
import { App, Database, ConfigType, isSettingsConfig } from "@budibase/types"
|
||||
import { App, Database } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* Generates a new app ID.
|
||||
|
@ -366,6 +365,16 @@ export async function getAllApps({
|
|||
}
|
||||
}
|
||||
|
||||
export async function getAppsByIDs(appIds: string[]) {
|
||||
const settled = await Promise.allSettled(
|
||||
appIds.map(appId => getAppMetadata(appId))
|
||||
)
|
||||
// have to list the apps which exist, some may have been deleted
|
||||
return settled
|
||||
.filter(promise => promise.status === "fulfilled")
|
||||
.map(promise => (promise as PromiseFulfilledResult<App>).value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function for getAllApps but filters to production apps only.
|
||||
*/
|
||||
|
@ -382,6 +391,16 @@ export async function getDevAppIDs() {
|
|||
return apps.filter((id: any) => isDevAppID(id))
|
||||
}
|
||||
|
||||
export function isSameAppID(
|
||||
appId1: string | undefined,
|
||||
appId2: string | undefined
|
||||
) {
|
||||
if (appId1 == undefined || appId2 == undefined) {
|
||||
return false
|
||||
}
|
||||
return getProdAppID(appId1) === getProdAppID(appId2)
|
||||
}
|
||||
|
||||
export async function dbExists(dbName: any) {
|
||||
return doWithDB(
|
||||
dbName,
|
||||
|
@ -392,32 +411,6 @@ export async function dbExists(dbName: any) {
|
|||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a new configuration ID.
|
||||
* @returns {string} The new configuration ID which the config doc can be stored under.
|
||||
*/
|
||||
export const generateConfigID = ({ type, workspace, user }: any) => {
|
||||
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
|
||||
|
||||
return `${DocumentType.CONFIG}${SEPARATOR}${scope}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving configurations.
|
||||
*/
|
||||
export const getConfigParams = (
|
||||
{ type, workspace, user }: any,
|
||||
otherProps = {}
|
||||
) => {
|
||||
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
|
||||
|
||||
return {
|
||||
...otherProps,
|
||||
startkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}`,
|
||||
endkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a new dev info document ID - this is scoped to a user.
|
||||
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
|
||||
|
@ -441,109 +434,6 @@ export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
|
|||
return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the most granular configuration document from the DB based on the type, workspace and userID passed.
|
||||
* @param {Object} db - db instance to query
|
||||
* @param {Object} scopes - the type, workspace and userID scopes of the configuration.
|
||||
* @returns The most granular configuration document based on the scope.
|
||||
*/
|
||||
export const getScopedFullConfig = async function (
|
||||
db: any,
|
||||
{ type, user, workspace }: any
|
||||
) {
|
||||
const response = await db.allDocs(
|
||||
getConfigParams(
|
||||
{ type, user, workspace },
|
||||
{
|
||||
include_docs: true,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
function determineScore(row: any) {
|
||||
const config = row.doc
|
||||
|
||||
// Config is specific to a user and a workspace
|
||||
if (config._id.includes(generateConfigID({ type, user, workspace }))) {
|
||||
return 4
|
||||
} else if (config._id.includes(generateConfigID({ type, user }))) {
|
||||
// Config is specific to a user only
|
||||
return 3
|
||||
} else if (config._id.includes(generateConfigID({ type, workspace }))) {
|
||||
// Config is specific to a workspace only
|
||||
return 2
|
||||
} else if (config._id.includes(generateConfigID({ type }))) {
|
||||
// Config is specific to a type only
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// Find the config with the most granular scope based on context
|
||||
let scopedConfig = response.rows.sort(
|
||||
(a: any, b: any) => determineScore(a) - determineScore(b)
|
||||
)[0]
|
||||
|
||||
// custom logic for settings doc
|
||||
if (type === ConfigType.SETTINGS) {
|
||||
if (!scopedConfig || !scopedConfig.doc) {
|
||||
// defaults
|
||||
scopedConfig = {
|
||||
doc: {
|
||||
_id: generateConfigID({ type, user, workspace }),
|
||||
type: ConfigType.SETTINGS,
|
||||
config: {
|
||||
platformUrl: await getPlatformUrl({ tenantAware: true }),
|
||||
analyticsEnabled: await events.analytics.enabled(),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// will always be true - use assertion function to get type access
|
||||
if (isSettingsConfig(scopedConfig.doc)) {
|
||||
// overrides affected by environment
|
||||
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
|
||||
tenantAware: true,
|
||||
})
|
||||
scopedConfig.doc.config.analyticsEnabled =
|
||||
await events.analytics.enabled()
|
||||
}
|
||||
}
|
||||
|
||||
return scopedConfig && scopedConfig.doc
|
||||
}
|
||||
|
||||
export const getPlatformUrl = async (opts = { tenantAware: true }) => {
|
||||
let platformUrl = env.PLATFORM_URL || "http://localhost:10000"
|
||||
|
||||
if (!env.SELF_HOSTED && env.MULTI_TENANCY && opts.tenantAware) {
|
||||
// cloud and multi tenant - add the tenant to the default platform url
|
||||
const tenantId = getTenantId()
|
||||
if (!platformUrl.includes("localhost:")) {
|
||||
platformUrl = platformUrl.replace("://", `://${tenantId}.`)
|
||||
}
|
||||
} else if (env.SELF_HOSTED) {
|
||||
const db = getGlobalDB()
|
||||
// get the doc directly instead of with getScopedConfig to prevent loop
|
||||
let settings
|
||||
try {
|
||||
settings = await db.get(generateConfigID({ type: ConfigType.SETTINGS }))
|
||||
} catch (e: any) {
|
||||
if (e.status !== 404) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// self hosted - check for platform url override
|
||||
if (settings && settings.config && settings.config.platformUrl) {
|
||||
platformUrl = settings.config.platformUrl
|
||||
}
|
||||
}
|
||||
|
||||
return platformUrl
|
||||
}
|
||||
|
||||
export function pagination(
|
||||
data: any[],
|
||||
pageSize: number,
|
||||
|
@ -577,8 +467,3 @@ export function pagination(
|
|||
nextPage,
|
||||
}
|
||||
}
|
||||
|
||||
export async function getScopedConfig(db: any, params: any) {
|
||||
const configDoc = await getScopedFullConfig(db, params)
|
||||
return configDoc && configDoc.config ? configDoc.config : configDoc
|
||||
}
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
import {
|
||||
DocumentType,
|
||||
ViewName,
|
||||
DeprecatedViews,
|
||||
DocumentType,
|
||||
SEPARATOR,
|
||||
StaticDatabases,
|
||||
ViewName,
|
||||
} from "../constants"
|
||||
import { getGlobalDB } from "../context"
|
||||
import { doWithDB } from "./"
|
||||
import { Database, DatabaseQueryOpts } from "@budibase/types"
|
||||
import env from "../environment"
|
||||
|
||||
const DESIGN_DB = "_design/database"
|
||||
|
||||
|
@ -69,17 +70,6 @@ export const createNewUserEmailView = async () => {
|
|||
await createView(db, viewJs, ViewName.USER_BY_EMAIL)
|
||||
}
|
||||
|
||||
export const createAccountEmailView = async () => {
|
||||
const viewJs = `function(doc) {
|
||||
if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
|
||||
emit(doc.email.toLowerCase(), doc._id)
|
||||
}
|
||||
}`
|
||||
await doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {
|
||||
await createView(db, viewJs, ViewName.ACCOUNT_BY_EMAIL)
|
||||
})
|
||||
}
|
||||
|
||||
export const createUserAppView = async () => {
|
||||
const db = getGlobalDB()
|
||||
const viewJs = `function(doc) {
|
||||
|
@ -113,17 +103,6 @@ export const createUserBuildersView = async () => {
|
|||
await createView(db, viewJs, ViewName.USER_BY_BUILDERS)
|
||||
}
|
||||
|
||||
export const createPlatformUserView = async () => {
|
||||
const viewJs = `function(doc) {
|
||||
if (doc.tenantId) {
|
||||
emit(doc._id.toLowerCase(), doc._id)
|
||||
}
|
||||
}`
|
||||
await doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {
|
||||
await createView(db, viewJs, ViewName.PLATFORM_USERS_LOWERCASE)
|
||||
})
|
||||
}
|
||||
|
||||
export interface QueryViewOptions {
|
||||
arrayResponse?: boolean
|
||||
}
|
||||
|
@ -162,13 +141,48 @@ export const queryView = async <T>(
|
|||
}
|
||||
}
|
||||
|
||||
// PLATFORM
|
||||
|
||||
async function createPlatformView(viewJs: string, viewName: ViewName) {
|
||||
try {
|
||||
await doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {
|
||||
await createView(db, viewJs, viewName)
|
||||
})
|
||||
} catch (e: any) {
|
||||
if (e.status === 409 && env.isTest()) {
|
||||
// multiple tests can try to initialise platforms views
|
||||
// at once - safe to exit on conflict
|
||||
return
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
export const createPlatformAccountEmailView = async () => {
|
||||
const viewJs = `function(doc) {
|
||||
if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
|
||||
emit(doc.email.toLowerCase(), doc._id)
|
||||
}
|
||||
}`
|
||||
await createPlatformView(viewJs, ViewName.ACCOUNT_BY_EMAIL)
|
||||
}
|
||||
|
||||
export const createPlatformUserView = async () => {
|
||||
const viewJs = `function(doc) {
|
||||
if (doc.tenantId) {
|
||||
emit(doc._id.toLowerCase(), doc._id)
|
||||
}
|
||||
}`
|
||||
await createPlatformView(viewJs, ViewName.PLATFORM_USERS_LOWERCASE)
|
||||
}
|
||||
|
||||
export const queryPlatformView = async <T>(
|
||||
viewName: ViewName,
|
||||
params: DatabaseQueryOpts,
|
||||
opts?: QueryViewOptions
|
||||
): Promise<T[] | T | undefined> => {
|
||||
const CreateFuncByName: any = {
|
||||
[ViewName.ACCOUNT_BY_EMAIL]: createAccountEmailView,
|
||||
[ViewName.ACCOUNT_BY_EMAIL]: createPlatformAccountEmailView,
|
||||
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
|
||||
}
|
||||
|
||||
|
|
|
@ -28,6 +28,8 @@ const DefaultBucketName = {
|
|||
PLUGINS: "plugins",
|
||||
}
|
||||
|
||||
const selfHosted = !!parseInt(process.env.SELF_HOSTED || "")
|
||||
|
||||
const environment = {
|
||||
isTest,
|
||||
isJest,
|
||||
|
@ -44,8 +46,9 @@ const environment = {
|
|||
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
||||
GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET,
|
||||
SALT_ROUNDS: process.env.SALT_ROUNDS,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
REDIS_URL: process.env.REDIS_URL || "localhost:6379",
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD || "budibase",
|
||||
MOCK_REDIS: process.env.MOCK_REDIS,
|
||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||
AWS_REGION: process.env.AWS_REGION,
|
||||
|
@ -57,7 +60,7 @@ const environment = {
|
|||
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
|
||||
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "",
|
||||
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
|
||||
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
|
||||
SELF_HOSTED: selfHosted,
|
||||
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
|
||||
PLATFORM_URL: process.env.PLATFORM_URL || "",
|
||||
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
|
||||
|
@ -82,6 +85,24 @@ const environment = {
|
|||
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
|
||||
DEPLOYMENT_ENVIRONMENT:
|
||||
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
|
||||
ENABLE_4XX_HTTP_LOGGING: process.env.ENABLE_4XX_HTTP_LOGGING || true,
|
||||
ENABLE_AUDIT_LOG_IP_ADDR: process.env.ENABLE_AUDIT_LOG_IP_ADDR,
|
||||
// smtp
|
||||
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: parseInt(process.env.SMTP_PORT || ""),
|
||||
SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS,
|
||||
DISABLE_JWT_WARNING: process.env.DISABLE_JWT_WARNING,
|
||||
/**
|
||||
* Enable to allow an admin user to login using a password.
|
||||
* This can be useful to prevent lockout when configuring SSO.
|
||||
* However, this should be turned OFF by default for security purposes.
|
||||
*/
|
||||
ENABLE_SSO_MAINTENANCE_MODE: selfHosted
|
||||
? process.env.ENABLE_SSO_MAINTENANCE_MODE
|
||||
: false,
|
||||
_set(key: any, value: any) {
|
||||
process.env[key] = value
|
||||
// @ts-ignore
|
||||
|
|
|
@ -1,55 +1,6 @@
|
|||
import env from "../environment"
|
||||
import * as tenancy from "../tenancy"
|
||||
import * as dbUtils from "../db/utils"
|
||||
import { Config } from "../constants"
|
||||
import { withCache, TTL, CacheKey } from "../cache"
|
||||
import * as configs from "../configs"
|
||||
|
||||
// wrapper utility function
|
||||
export const enabled = async () => {
|
||||
// cloud - always use the environment variable
|
||||
if (!env.SELF_HOSTED) {
|
||||
return !!env.ENABLE_ANALYTICS
|
||||
}
|
||||
|
||||
// self host - prefer the settings doc
|
||||
// use cache as events have high throughput
|
||||
const enabledInDB = await withCache(
|
||||
CacheKey.ANALYTICS_ENABLED,
|
||||
TTL.ONE_DAY,
|
||||
async () => {
|
||||
const settings = await getSettingsDoc()
|
||||
|
||||
// need to do explicit checks in case the field is not set
|
||||
if (settings?.config?.analyticsEnabled === false) {
|
||||
return false
|
||||
} else if (settings?.config?.analyticsEnabled === true) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (enabledInDB !== undefined) {
|
||||
return enabledInDB
|
||||
}
|
||||
|
||||
// fallback to the environment variable
|
||||
// explicitly check for 0 or false here, undefined or otherwise is treated as true
|
||||
const envEnabled: any = env.ENABLE_ANALYTICS
|
||||
if (envEnabled === 0 || envEnabled === false) {
|
||||
return false
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const getSettingsDoc = async () => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
let settings
|
||||
try {
|
||||
settings = await db.get(dbUtils.generateConfigID({ type: Config.SETTINGS }))
|
||||
} catch (e: any) {
|
||||
if (e.status !== 404) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
return settings
|
||||
return configs.analyticsEnabled()
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { Event } from "@budibase/types"
|
||||
import { Event, AuditedEventFriendlyName } from "@budibase/types"
|
||||
import { processors } from "./processors"
|
||||
import identification from "./identification"
|
||||
import * as backfill from "./backfill"
|
||||
|
|
|
@ -10,18 +10,17 @@ import {
|
|||
isCloudAccount,
|
||||
isSSOAccount,
|
||||
TenantGroup,
|
||||
SettingsConfig,
|
||||
CloudAccount,
|
||||
UserIdentity,
|
||||
InstallationGroup,
|
||||
UserContext,
|
||||
Group,
|
||||
isSSOUser,
|
||||
} from "@budibase/types"
|
||||
import { processors } from "./processors"
|
||||
import * as dbUtils from "../db/utils"
|
||||
import { Config } from "../constants"
|
||||
import { newid } from "../utils"
|
||||
import * as installation from "../installation"
|
||||
import * as configs from "../configs"
|
||||
import { withCache, TTL, CacheKey } from "../cache/generic"
|
||||
|
||||
const pkg = require("../../package.json")
|
||||
|
@ -88,6 +87,7 @@ const getCurrentIdentity = async (): Promise<Identity> => {
|
|||
installationId,
|
||||
tenantId,
|
||||
environment,
|
||||
hostInfo: userContext.hostInfo,
|
||||
}
|
||||
} else {
|
||||
throw new Error("Unknown identity type")
|
||||
|
@ -166,7 +166,10 @@ const identifyUser = async (
|
|||
const type = IdentityType.USER
|
||||
let builder = user.builder?.global || false
|
||||
let admin = user.admin?.global || false
|
||||
let providerType = user.providerType
|
||||
let providerType
|
||||
if (isSSOUser(user)) {
|
||||
providerType = user.providerType
|
||||
}
|
||||
const accountHolder = account?.budibaseUserId === user._id || false
|
||||
const verified =
|
||||
account && account?.budibaseUserId === user._id ? account.verified : false
|
||||
|
@ -266,9 +269,7 @@ const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
|||
return context.doInTenant(tenantId, () => {
|
||||
return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||
const db = context.getGlobalDB()
|
||||
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
|
||||
type: Config.SETTINGS,
|
||||
})
|
||||
const config = await configs.getSettingsConfigDoc()
|
||||
|
||||
let uniqueTenantId: string
|
||||
if (config.config.uniqueTenantId) {
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
import {
|
||||
Event,
|
||||
Identity,
|
||||
Group,
|
||||
IdentityType,
|
||||
AuditLogQueueEvent,
|
||||
AuditLogFn,
|
||||
HostInfo,
|
||||
} from "@budibase/types"
|
||||
import { EventProcessor } from "./types"
|
||||
import { getAppId, doInTenant, getTenantId } from "../../context"
|
||||
import BullQueue from "bull"
|
||||
import { createQueue, JobQueue } from "../../queue"
|
||||
import { isAudited } from "../../utils"
|
||||
import env from "../../environment"
|
||||
|
||||
export default class AuditLogsProcessor implements EventProcessor {
|
||||
static auditLogsEnabled = false
|
||||
static auditLogQueue: BullQueue.Queue<AuditLogQueueEvent>
|
||||
|
||||
// can't use constructor as need to return promise
|
||||
static init(fn: AuditLogFn) {
|
||||
AuditLogsProcessor.auditLogsEnabled = true
|
||||
const writeAuditLogs = fn
|
||||
AuditLogsProcessor.auditLogQueue = createQueue<AuditLogQueueEvent>(
|
||||
JobQueue.AUDIT_LOG
|
||||
)
|
||||
return AuditLogsProcessor.auditLogQueue.process(async job => {
|
||||
return doInTenant(job.data.tenantId, async () => {
|
||||
let properties = job.data.properties
|
||||
if (properties.audited) {
|
||||
properties = {
|
||||
...properties,
|
||||
...properties.audited,
|
||||
}
|
||||
delete properties.audited
|
||||
}
|
||||
|
||||
// this feature is disabled by default due to privacy requirements
|
||||
// in some countries - available as env var in-case it is desired
|
||||
// in self host deployments
|
||||
let hostInfo: HostInfo | undefined = {}
|
||||
if (env.ENABLE_AUDIT_LOG_IP_ADDR) {
|
||||
hostInfo = job.data.opts.hostInfo
|
||||
}
|
||||
|
||||
await writeAuditLogs(job.data.event, properties, {
|
||||
userId: job.data.opts.userId,
|
||||
timestamp: job.data.opts.timestamp,
|
||||
appId: job.data.opts.appId,
|
||||
hostInfo,
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string
|
||||
): Promise<void> {
|
||||
if (AuditLogsProcessor.auditLogsEnabled && isAudited(event)) {
|
||||
// only audit log actual events, don't include backfills
|
||||
const userId =
|
||||
identity.type === IdentityType.USER ? identity.id : undefined
|
||||
// add to the event queue, rather than just writing immediately
|
||||
await AuditLogsProcessor.auditLogQueue.add({
|
||||
event,
|
||||
properties,
|
||||
opts: {
|
||||
userId,
|
||||
timestamp,
|
||||
appId: getAppId(),
|
||||
hostInfo: identity.hostInfo,
|
||||
},
|
||||
tenantId: getTenantId(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
// no-op
|
||||
}
|
||||
|
||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||
// no-op
|
||||
}
|
||||
|
||||
shutdown(): void {
|
||||
AuditLogsProcessor.auditLogQueue?.close()
|
||||
}
|
||||
}
|
|
@ -1,8 +1,19 @@
|
|||
import AnalyticsProcessor from "./AnalyticsProcessor"
|
||||
import LoggingProcessor from "./LoggingProcessor"
|
||||
import AuditLogsProcessor from "./AuditLogsProcessor"
|
||||
import Processors from "./Processors"
|
||||
import { AuditLogFn } from "@budibase/types"
|
||||
|
||||
export const analyticsProcessor = new AnalyticsProcessor()
|
||||
const loggingProcessor = new LoggingProcessor()
|
||||
const auditLogsProcessor = new AuditLogsProcessor()
|
||||
|
||||
export const processors = new Processors([analyticsProcessor, loggingProcessor])
|
||||
export function init(auditingFn: AuditLogFn) {
|
||||
return AuditLogsProcessor.init(auditingFn)
|
||||
}
|
||||
|
||||
export const processors = new Processors([
|
||||
analyticsProcessor,
|
||||
loggingProcessor,
|
||||
auditLogsProcessor,
|
||||
])
|
||||
|
|
|
@ -47,6 +47,8 @@ export default class PosthogProcessor implements EventProcessor {
|
|||
return
|
||||
}
|
||||
|
||||
properties = this.clearPIIProperties(properties)
|
||||
|
||||
properties.version = pkg.version
|
||||
properties.service = env.SERVICE
|
||||
properties.environment = identity.environment
|
||||
|
@ -79,6 +81,16 @@ export default class PosthogProcessor implements EventProcessor {
|
|||
this.posthog.capture(payload)
|
||||
}
|
||||
|
||||
clearPIIProperties(properties: any) {
|
||||
if (properties.email) {
|
||||
delete properties.email
|
||||
}
|
||||
if (properties.audited) {
|
||||
delete properties.audited
|
||||
}
|
||||
return properties
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
const payload: any = { distinctId: identity.id, properties: identity }
|
||||
if (timestamp) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import "../../../../../tests"
|
||||
import { testEnv } from "../../../../../tests"
|
||||
import PosthogProcessor from "../PosthogProcessor"
|
||||
import { Event, IdentityType, Hosting } from "@budibase/types"
|
||||
const tk = require("timekeeper")
|
||||
|
@ -16,6 +16,10 @@ const newIdentity = () => {
|
|||
}
|
||||
|
||||
describe("PosthogProcessor", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.singleTenant()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
await cache.bustCache(
|
||||
|
@ -45,6 +49,25 @@ describe("PosthogProcessor", () => {
|
|||
expect(processor.posthog.capture).toHaveBeenCalledTimes(0)
|
||||
})
|
||||
|
||||
it("removes audited information", async () => {
|
||||
const processor = new PosthogProcessor("test")
|
||||
|
||||
const identity = newIdentity()
|
||||
const properties = {
|
||||
email: "test",
|
||||
audited: {
|
||||
name: "test",
|
||||
},
|
||||
}
|
||||
|
||||
await processor.processEvent(Event.USER_CREATED, identity, properties)
|
||||
expect(processor.posthog.capture).toHaveBeenCalled()
|
||||
// @ts-ignore
|
||||
const call = processor.posthog.capture.mock.calls[0][0]
|
||||
expect(call.properties.audited).toBeUndefined()
|
||||
expect(call.properties.email).toBeUndefined()
|
||||
})
|
||||
|
||||
describe("rate limiting", () => {
|
||||
it("sends daily event once in same day", async () => {
|
||||
const processor = new PosthogProcessor("test")
|
||||
|
|
|
@ -19,6 +19,9 @@ const created = async (app: App, timestamp?: string | number) => {
|
|||
const properties: AppCreatedEvent = {
|
||||
appId: app.appId,
|
||||
version: app.version,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_CREATED, properties, timestamp)
|
||||
}
|
||||
|
@ -27,6 +30,9 @@ async function updated(app: App) {
|
|||
const properties: AppUpdatedEvent = {
|
||||
appId: app.appId,
|
||||
version: app.version,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_UPDATED, properties)
|
||||
}
|
||||
|
@ -34,6 +40,9 @@ async function updated(app: App) {
|
|||
async function deleted(app: App) {
|
||||
const properties: AppDeletedEvent = {
|
||||
appId: app.appId,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_DELETED, properties)
|
||||
}
|
||||
|
@ -41,6 +50,9 @@ async function deleted(app: App) {
|
|||
async function published(app: App, timestamp?: string | number) {
|
||||
const properties: AppPublishedEvent = {
|
||||
appId: app.appId,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_PUBLISHED, properties, timestamp)
|
||||
}
|
||||
|
@ -48,6 +60,9 @@ async function published(app: App, timestamp?: string | number) {
|
|||
async function unpublished(app: App) {
|
||||
const properties: AppUnpublishedEvent = {
|
||||
appId: app.appId,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_UNPUBLISHED, properties)
|
||||
}
|
||||
|
@ -55,6 +70,9 @@ async function unpublished(app: App) {
|
|||
async function fileImported(app: App) {
|
||||
const properties: AppFileImportedEvent = {
|
||||
appId: app.appId,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_FILE_IMPORTED, properties)
|
||||
}
|
||||
|
@ -63,6 +81,9 @@ async function templateImported(app: App, templateKey: string) {
|
|||
const properties: AppTemplateImportedEvent = {
|
||||
appId: app.appId,
|
||||
templateKey,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_TEMPLATE_IMPORTED, properties)
|
||||
}
|
||||
|
@ -76,6 +97,9 @@ async function versionUpdated(
|
|||
appId: app.appId,
|
||||
currentVersion,
|
||||
updatedToVersion,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_VERSION_UPDATED, properties)
|
||||
}
|
||||
|
@ -89,6 +113,9 @@ async function versionReverted(
|
|||
appId: app.appId,
|
||||
currentVersion,
|
||||
revertedToVersion,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_VERSION_REVERTED, properties)
|
||||
}
|
||||
|
@ -96,6 +123,9 @@ async function versionReverted(
|
|||
async function reverted(app: App) {
|
||||
const properties: AppRevertedEvent = {
|
||||
appId: app.appId,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_REVERTED, properties)
|
||||
}
|
||||
|
@ -103,6 +133,9 @@ async function reverted(app: App) {
|
|||
async function exported(app: App) {
|
||||
const properties: AppExportedEvent = {
|
||||
appId: app.appId,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_EXPORTED, properties)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
import {
|
||||
Event,
|
||||
AuditLogSearchParams,
|
||||
AuditLogFilteredEvent,
|
||||
AuditLogDownloadedEvent,
|
||||
} from "@budibase/types"
|
||||
import { publishEvent } from "../events"
|
||||
|
||||
async function filtered(search: AuditLogSearchParams) {
|
||||
const properties: AuditLogFilteredEvent = {
|
||||
filters: search,
|
||||
}
|
||||
await publishEvent(Event.AUDIT_LOGS_FILTERED, properties)
|
||||
}
|
||||
|
||||
async function downloaded(search: AuditLogSearchParams) {
|
||||
const properties: AuditLogDownloadedEvent = {
|
||||
filters: search,
|
||||
}
|
||||
await publishEvent(Event.AUDIT_LOGS_DOWNLOADED, properties)
|
||||
}
|
||||
|
||||
export default {
|
||||
filtered,
|
||||
downloaded,
|
||||
}
|
|
@ -12,19 +12,25 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { identification } from ".."
|
||||
|
||||
async function login(source: LoginSource) {
|
||||
async function login(source: LoginSource, email: string) {
|
||||
const identity = await identification.getCurrentIdentity()
|
||||
const properties: LoginEvent = {
|
||||
userId: identity.id,
|
||||
source,
|
||||
audited: {
|
||||
email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.AUTH_LOGIN, properties)
|
||||
}
|
||||
|
||||
async function logout() {
|
||||
async function logout(email?: string) {
|
||||
const identity = await identification.getCurrentIdentity()
|
||||
const properties: LogoutEvent = {
|
||||
userId: identity.id,
|
||||
audited: {
|
||||
email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.AUTH_LOGOUT, properties)
|
||||
}
|
||||
|
|
|
@ -18,6 +18,9 @@ async function created(automation: Automation, timestamp?: string | number) {
|
|||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
audited: {
|
||||
name: automation.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_CREATED, properties, timestamp)
|
||||
}
|
||||
|
@ -38,6 +41,9 @@ async function deleted(automation: Automation) {
|
|||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
audited: {
|
||||
name: automation.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_DELETED, properties)
|
||||
}
|
||||
|
@ -71,6 +77,9 @@ async function stepCreated(
|
|||
triggerType: automation.definition?.trigger?.stepId,
|
||||
stepId: step.id!,
|
||||
stepType: step.stepId,
|
||||
audited: {
|
||||
name: automation.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)
|
||||
}
|
||||
|
@ -83,6 +92,9 @@ async function stepDeleted(automation: Automation, step: AutomationStep) {
|
|||
triggerType: automation.definition?.trigger?.stepId,
|
||||
stepId: step.id!,
|
||||
stepType: step.stepId,
|
||||
audited: {
|
||||
name: automation.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ async function appBackupRestored(backup: AppBackup) {
|
|||
appId: backup.appId,
|
||||
restoreId: backup._id!,
|
||||
backupCreatedAt: backup.timestamp,
|
||||
name: backup.name as string,
|
||||
}
|
||||
|
||||
await publishEvent(Event.APP_BACKUP_RESTORED, properties)
|
||||
|
@ -22,13 +23,15 @@ async function appBackupTriggered(
|
|||
appId: string,
|
||||
backupId: string,
|
||||
type: AppBackupType,
|
||||
trigger: AppBackupTrigger
|
||||
trigger: AppBackupTrigger,
|
||||
name: string
|
||||
) {
|
||||
const properties: AppBackupTriggeredEvent = {
|
||||
appId: appId,
|
||||
backupId,
|
||||
type,
|
||||
trigger,
|
||||
name,
|
||||
}
|
||||
await publishEvent(Event.APP_BACKUP_TRIGGERED, properties)
|
||||
}
|
||||
|
|
|
@ -8,12 +8,16 @@ import {
|
|||
GroupUsersAddedEvent,
|
||||
GroupUsersDeletedEvent,
|
||||
GroupAddedOnboardingEvent,
|
||||
GroupPermissionsEditedEvent,
|
||||
UserGroupRoles,
|
||||
} from "@budibase/types"
|
||||
|
||||
async function created(group: UserGroup, timestamp?: number) {
|
||||
const properties: GroupCreatedEvent = {
|
||||
groupId: group._id as string,
|
||||
audited: {
|
||||
name: group.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_GROUP_CREATED, properties, timestamp)
|
||||
}
|
||||
|
@ -21,6 +25,9 @@ async function created(group: UserGroup, timestamp?: number) {
|
|||
async function updated(group: UserGroup) {
|
||||
const properties: GroupUpdatedEvent = {
|
||||
groupId: group._id as string,
|
||||
audited: {
|
||||
name: group.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_GROUP_UPDATED, properties)
|
||||
}
|
||||
|
@ -28,6 +35,9 @@ async function updated(group: UserGroup) {
|
|||
async function deleted(group: UserGroup) {
|
||||
const properties: GroupDeletedEvent = {
|
||||
groupId: group._id as string,
|
||||
audited: {
|
||||
name: group.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_GROUP_DELETED, properties)
|
||||
}
|
||||
|
@ -36,6 +46,9 @@ async function usersAdded(count: number, group: UserGroup) {
|
|||
const properties: GroupUsersAddedEvent = {
|
||||
count,
|
||||
groupId: group._id as string,
|
||||
audited: {
|
||||
name: group.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_GROUP_USERS_ADDED, properties)
|
||||
}
|
||||
|
@ -44,6 +57,9 @@ async function usersDeleted(count: number, group: UserGroup) {
|
|||
const properties: GroupUsersDeletedEvent = {
|
||||
count,
|
||||
groupId: group._id as string,
|
||||
audited: {
|
||||
name: group.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties)
|
||||
}
|
||||
|
@ -56,9 +72,13 @@ async function createdOnboarding(groupId: string) {
|
|||
await publishEvent(Event.USER_GROUP_ONBOARDING, properties)
|
||||
}
|
||||
|
||||
async function permissionsEdited(roles: UserGroupRoles) {
|
||||
const properties: UserGroupRoles = {
|
||||
...roles,
|
||||
async function permissionsEdited(group: UserGroup) {
|
||||
const properties: GroupPermissionsEditedEvent = {
|
||||
permissions: group.roles!,
|
||||
groupId: group._id as string,
|
||||
audited: {
|
||||
name: group.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_GROUP_PERMISSIONS_EDITED, properties)
|
||||
}
|
||||
|
|
|
@ -21,3 +21,4 @@ export { default as group } from "./group"
|
|||
export { default as plugin } from "./plugin"
|
||||
export { default as backup } from "./backup"
|
||||
export { default as environmentVariable } from "./environmentVariable"
|
||||
export { default as auditLog } from "./auditLog"
|
||||
|
|
|
@ -11,6 +11,9 @@ async function created(screen: Screen, timestamp?: string | number) {
|
|||
layoutId: screen.layoutId,
|
||||
screenId: screen._id as string,
|
||||
roleId: screen.routing.roleId,
|
||||
audited: {
|
||||
name: screen.routing?.route,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.SCREEN_CREATED, properties, timestamp)
|
||||
}
|
||||
|
@ -20,6 +23,9 @@ async function deleted(screen: Screen) {
|
|||
layoutId: screen.layoutId,
|
||||
screenId: screen._id as string,
|
||||
roleId: screen.routing.roleId,
|
||||
audited: {
|
||||
name: screen.routing?.route,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.SCREEN_DELETED, properties)
|
||||
}
|
||||
|
|
|
@ -13,6 +13,9 @@ import {
|
|||
async function created(table: Table, timestamp?: string | number) {
|
||||
const properties: TableCreatedEvent = {
|
||||
tableId: table._id as string,
|
||||
audited: {
|
||||
name: table.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.TABLE_CREATED, properties, timestamp)
|
||||
}
|
||||
|
@ -20,6 +23,9 @@ async function created(table: Table, timestamp?: string | number) {
|
|||
async function updated(table: Table) {
|
||||
const properties: TableUpdatedEvent = {
|
||||
tableId: table._id as string,
|
||||
audited: {
|
||||
name: table.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.TABLE_UPDATED, properties)
|
||||
}
|
||||
|
@ -27,6 +33,9 @@ async function updated(table: Table) {
|
|||
async function deleted(table: Table) {
|
||||
const properties: TableDeletedEvent = {
|
||||
tableId: table._id as string,
|
||||
audited: {
|
||||
name: table.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.TABLE_DELETED, properties)
|
||||
}
|
||||
|
@ -35,6 +44,9 @@ async function exported(table: Table, format: TableExportFormat) {
|
|||
const properties: TableExportedEvent = {
|
||||
tableId: table._id as string,
|
||||
format,
|
||||
audited: {
|
||||
name: table.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.TABLE_EXPORTED, properties)
|
||||
}
|
||||
|
@ -42,6 +54,9 @@ async function exported(table: Table, format: TableExportFormat) {
|
|||
async function imported(table: Table) {
|
||||
const properties: TableImportedEvent = {
|
||||
tableId: table._id as string,
|
||||
audited: {
|
||||
name: table.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.TABLE_IMPORTED, properties)
|
||||
}
|
||||
|
|
|
@ -19,6 +19,9 @@ import {
|
|||
async function created(user: User, timestamp?: number) {
|
||||
const properties: UserCreatedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_CREATED, properties, timestamp)
|
||||
}
|
||||
|
@ -26,6 +29,9 @@ async function created(user: User, timestamp?: number) {
|
|||
async function updated(user: User) {
|
||||
const properties: UserUpdatedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_UPDATED, properties)
|
||||
}
|
||||
|
@ -33,6 +39,9 @@ async function updated(user: User) {
|
|||
async function deleted(user: User) {
|
||||
const properties: UserDeletedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_DELETED, properties)
|
||||
}
|
||||
|
@ -40,6 +49,9 @@ async function deleted(user: User) {
|
|||
export async function onboardingComplete(user: User) {
|
||||
const properties: UserOnboardingEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_ONBOARDING_COMPLETE, properties)
|
||||
}
|
||||
|
@ -49,6 +61,9 @@ export async function onboardingComplete(user: User) {
|
|||
async function permissionAdminAssigned(user: User, timestamp?: number) {
|
||||
const properties: UserPermissionAssignedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(
|
||||
Event.USER_PERMISSION_ADMIN_ASSIGNED,
|
||||
|
@ -60,6 +75,9 @@ async function permissionAdminAssigned(user: User, timestamp?: number) {
|
|||
async function permissionAdminRemoved(user: User) {
|
||||
const properties: UserPermissionRemovedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_PERMISSION_ADMIN_REMOVED, properties)
|
||||
}
|
||||
|
@ -67,6 +85,9 @@ async function permissionAdminRemoved(user: User) {
|
|||
async function permissionBuilderAssigned(user: User, timestamp?: number) {
|
||||
const properties: UserPermissionAssignedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(
|
||||
Event.USER_PERMISSION_BUILDER_ASSIGNED,
|
||||
|
@ -78,20 +99,30 @@ async function permissionBuilderAssigned(user: User, timestamp?: number) {
|
|||
async function permissionBuilderRemoved(user: User) {
|
||||
const properties: UserPermissionRemovedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_PERMISSION_BUILDER_REMOVED, properties)
|
||||
}
|
||||
|
||||
// INVITE
|
||||
|
||||
async function invited() {
|
||||
const properties: UserInvitedEvent = {}
|
||||
async function invited(email: string) {
|
||||
const properties: UserInvitedEvent = {
|
||||
audited: {
|
||||
email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_INVITED, properties)
|
||||
}
|
||||
|
||||
async function inviteAccepted(user: User) {
|
||||
const properties: UserInviteAcceptedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_INVITED_ACCEPTED, properties)
|
||||
}
|
||||
|
@ -101,6 +132,9 @@ async function inviteAccepted(user: User) {
|
|||
async function passwordForceReset(user: User) {
|
||||
const properties: UserPasswordForceResetEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_PASSWORD_FORCE_RESET, properties)
|
||||
}
|
||||
|
@ -108,6 +142,9 @@ async function passwordForceReset(user: User) {
|
|||
async function passwordUpdated(user: User) {
|
||||
const properties: UserPasswordUpdatedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_PASSWORD_UPDATED, properties)
|
||||
}
|
||||
|
@ -115,6 +152,9 @@ async function passwordUpdated(user: User) {
|
|||
async function passwordResetRequested(user: User) {
|
||||
const properties: UserPasswordResetRequestedEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_PASSWORD_RESET_REQUESTED, properties)
|
||||
}
|
||||
|
@ -122,6 +162,9 @@ async function passwordResetRequested(user: User) {
|
|||
async function passwordReset(user: User) {
|
||||
const properties: UserPasswordResetEvent = {
|
||||
userId: user._id as string,
|
||||
audited: {
|
||||
email: user.email,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.USER_PASSWORD_RESET, properties)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import env from "../environment"
|
||||
import * as tenancy from "../tenancy"
|
||||
import * as context from "../context"
|
||||
|
||||
/**
|
||||
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
||||
|
@ -28,7 +28,7 @@ export function buildFeatureFlags() {
|
|||
}
|
||||
|
||||
export function isEnabled(featureFlag: string) {
|
||||
const tenantId = tenancy.getTenantId()
|
||||
const tenantId = context.getTenantId()
|
||||
const flags = getTenantFeatureFlags(tenantId)
|
||||
return flags.includes(featureFlag)
|
||||
}
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
export * as configs from "./configs"
|
||||
export * as events from "./events"
|
||||
export * as migrations from "./migrations"
|
||||
export * as users from "./users"
|
||||
export * as roles from "./security/roles"
|
||||
export * as permissions from "./security/permissions"
|
||||
export * as accounts from "./cloud/accounts"
|
||||
export * as accounts from "./accounts"
|
||||
export * as installation from "./installation"
|
||||
export * as tenancy from "./tenancy"
|
||||
export * as featureFlags from "./featureFlags"
|
||||
export * as sessions from "./security/sessions"
|
||||
export * as deprovisioning from "./context/deprovision"
|
||||
export * as platform from "./platform"
|
||||
export * as auth from "./auth"
|
||||
export * as constants from "./constants"
|
||||
export * as logging from "./logging"
|
||||
|
@ -21,9 +21,20 @@ export * as context from "./context"
|
|||
export * as cache from "./cache"
|
||||
export * as objectStore from "./objectStore"
|
||||
export * as redis from "./redis"
|
||||
export * as locks from "./redis/redlockImpl"
|
||||
export * as utils from "./utils"
|
||||
export * as errors from "./errors"
|
||||
export { default as env } from "./environment"
|
||||
export { SearchParams } from "./db"
|
||||
// Add context to tenancy for backwards compatibility
|
||||
// only do this for external usages to prevent internal
|
||||
// circular dependencies
|
||||
import * as context from "./context"
|
||||
import * as _tenancy from "./tenancy"
|
||||
export const tenancy = {
|
||||
..._tenancy,
|
||||
...context,
|
||||
}
|
||||
|
||||
// expose error classes directly
|
||||
export * from "./errors"
|
||||
|
@ -31,10 +42,6 @@ export * from "./errors"
|
|||
// expose constants directly
|
||||
export * from "./constants"
|
||||
|
||||
// expose inner locks from redis directly
|
||||
import * as redis from "./redis"
|
||||
export const locks = redis.redlock
|
||||
|
||||
// expose package init function
|
||||
import * as db from "./db"
|
||||
export const init = (opts: any = {}) => {
|
||||
|
|
|
@ -4,11 +4,11 @@ import { getUser } from "../cache/user"
|
|||
import { getSession, updateSessionTTL } from "../security/sessions"
|
||||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
|
||||
import { getGlobalDB, doInTenant } from "../tenancy"
|
||||
import { getGlobalDB, doInTenant } from "../context"
|
||||
import { decrypt } from "../security/encryption"
|
||||
import * as identity from "../context/identity"
|
||||
import env from "../environment"
|
||||
import { BBContext, EndpointMatcher } from "@budibase/types"
|
||||
import { Ctx, EndpointMatcher } from "@budibase/types"
|
||||
|
||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
|
||||
? parseInt(env.SESSION_UPDATE_PERIOD)
|
||||
|
@ -73,7 +73,7 @@ export default function (
|
|||
}
|
||||
) {
|
||||
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
|
||||
return async (ctx: BBContext | any, next: any) => {
|
||||
return async (ctx: Ctx | any, next: any) => {
|
||||
let publicEndpoint = false
|
||||
const version = ctx.request.headers[Header.API_VER]
|
||||
// the path is not authenticated
|
||||
|
@ -115,7 +115,8 @@ export default function (
|
|||
authenticated = true
|
||||
} catch (err: any) {
|
||||
authenticated = false
|
||||
console.error("Auth Error", err?.message || err)
|
||||
console.error(`Auth Error: ${err.message}`)
|
||||
console.error(err)
|
||||
// remove the cookie as the user does not exist anymore
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
}
|
||||
|
@ -148,12 +149,13 @@ export default function (
|
|||
finalise(ctx, { authenticated, user, internal, version, publicEndpoint })
|
||||
|
||||
if (user && user.email) {
|
||||
return identity.doInUserContext(user, next)
|
||||
return identity.doInUserContext(user, ctx, next)
|
||||
} else {
|
||||
return next()
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error("Auth Error", err?.message || err)
|
||||
console.error(`Auth Error: ${err.message}`)
|
||||
console.error(err)
|
||||
// invalid token, clear the cookie
|
||||
if (err && err.name === "JsonWebTokenError") {
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
import { APIError } from "@budibase/types"
|
||||
import * as errors from "../errors"
|
||||
import env from "../environment"
|
||||
|
||||
export async function errorHandling(ctx: any, next: any) {
|
||||
try {
|
||||
await next()
|
||||
} catch (err: any) {
|
||||
const status = err.status || err.statusCode || 500
|
||||
ctx.status = status
|
||||
|
||||
if (status > 499 || env.ENABLE_4XX_HTTP_LOGGING) {
|
||||
ctx.log.error(err)
|
||||
console.trace(err)
|
||||
}
|
||||
|
||||
const error = errors.getPublicError(err)
|
||||
const body: APIError = {
|
||||
message: err.message,
|
||||
status: status,
|
||||
validationErrors: err.validation,
|
||||
error,
|
||||
}
|
||||
|
||||
ctx.body = body
|
||||
}
|
||||
}
|
||||
|
||||
export default errorHandling
|
|
@ -1,7 +1,7 @@
|
|||
export * as jwt from "./passport/jwt"
|
||||
export * as local from "./passport/local"
|
||||
export * as google from "./passport/google"
|
||||
export * as oidc from "./passport/oidc"
|
||||
export * as google from "./passport/sso/google"
|
||||
export * as oidc from "./passport/sso/oidc"
|
||||
import * as datasourceGoogle from "./passport/datasource/google"
|
||||
export const datasource = {
|
||||
google: datasourceGoogle,
|
||||
|
@ -16,4 +16,6 @@ export { default as adminOnly } from "./adminOnly"
|
|||
export { default as builderOrAdmin } from "./builderOrAdmin"
|
||||
export { default as builderOnly } from "./builderOnly"
|
||||
export { default as logging } from "./logging"
|
||||
export { default as errorHandling } from "./errorHandling"
|
||||
export { default as querystringToBody } from "./querystringToBody"
|
||||
export * as joiValidator from "./joi-validator"
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import * as google from "../google"
|
||||
import { Cookie, Config } from "../../../constants"
|
||||
import * as google from "../sso/google"
|
||||
import { Cookie } from "../../../constants"
|
||||
import { clearCookie, getCookie } from "../../../utils"
|
||||
import { getScopedConfig, getPlatformUrl, doWithDB } from "../../../db"
|
||||
import environment from "../../../environment"
|
||||
import { getGlobalDB } from "../../../tenancy"
|
||||
import { doWithDB } from "../../../db"
|
||||
import * as configs from "../../../configs"
|
||||
import { BBContext, Database, SSOProfile } from "@budibase/types"
|
||||
import { ssoSaveUserNoOp } from "../sso/sso"
|
||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
|
||||
type Passport = {
|
||||
|
@ -12,18 +12,12 @@ type Passport = {
|
|||
}
|
||||
|
||||
async function fetchGoogleCreds() {
|
||||
// try and get the config from the tenant
|
||||
const db = getGlobalDB()
|
||||
const googleConfig = await getScopedConfig(db, {
|
||||
type: Config.GOOGLE,
|
||||
})
|
||||
// or fall back to env variables
|
||||
return (
|
||||
googleConfig || {
|
||||
clientID: environment.GOOGLE_CLIENT_ID,
|
||||
clientSecret: environment.GOOGLE_CLIENT_SECRET,
|
||||
}
|
||||
)
|
||||
let config = await configs.getGoogleDatasourceConfig()
|
||||
|
||||
if (!config) {
|
||||
throw new Error("No google configuration found")
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
export async function preAuth(
|
||||
|
@ -33,10 +27,14 @@ export async function preAuth(
|
|||
) {
|
||||
// get the relevant config
|
||||
const googleConfig = await fetchGoogleCreds()
|
||||
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
||||
const platformUrl = await configs.getPlatformUrl({ tenantAware: false })
|
||||
|
||||
let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`
|
||||
const strategy = await google.strategyFactory(googleConfig, callbackUrl)
|
||||
const strategy = await google.strategyFactory(
|
||||
googleConfig,
|
||||
callbackUrl,
|
||||
ssoSaveUserNoOp
|
||||
)
|
||||
|
||||
if (!ctx.query.appId || !ctx.query.datasourceId) {
|
||||
ctx.throw(400, "appId and datasourceId query params not present.")
|
||||
|
@ -56,7 +54,7 @@ export async function postAuth(
|
|||
) {
|
||||
// get the relevant config
|
||||
const config = await fetchGoogleCreds()
|
||||
const platformUrl = await getPlatformUrl({ tenantAware: false })
|
||||
const platformUrl = await configs.getPlatformUrl({ tenantAware: false })
|
||||
|
||||
let callbackUrl = `${platformUrl}/api/global/auth/datasource/google/callback`
|
||||
const authStateCookie = getCookie(ctx, Cookie.DatasourceAuth)
|
||||
|
|
|
@ -1,15 +1,10 @@
|
|||
import { UserStatus } from "../../constants"
|
||||
import { compare, newid } from "../../utils"
|
||||
import env from "../../environment"
|
||||
import { compare } from "../../utils"
|
||||
import * as users from "../../users"
|
||||
import { authError } from "./utils"
|
||||
import { createASession } from "../../security/sessions"
|
||||
import { getTenantId } from "../../tenancy"
|
||||
import { BBContext } from "@budibase/types"
|
||||
const jwt = require("jsonwebtoken")
|
||||
|
||||
const INVALID_ERR = "Invalid credentials"
|
||||
const SSO_NO_PASSWORD = "SSO user does not have a password set"
|
||||
const EXPIRED = "This account has expired. Please reset your password"
|
||||
|
||||
export const options = {
|
||||
|
@ -35,50 +30,25 @@ export async function authenticate(
|
|||
|
||||
const dbUser = await users.getGlobalUserByEmail(email)
|
||||
if (dbUser == null) {
|
||||
return authError(done, `User not found: [${email}]`)
|
||||
}
|
||||
|
||||
// check that the user is currently inactive, if this is the case throw invalid
|
||||
if (dbUser.status === UserStatus.INACTIVE) {
|
||||
console.info(`user=${email} could not be found`)
|
||||
return authError(done, INVALID_ERR)
|
||||
}
|
||||
|
||||
// check that the user has a stored password before proceeding
|
||||
if (!dbUser.password) {
|
||||
if (
|
||||
(dbUser.account && dbUser.account.authType === "sso") || // root account sso
|
||||
dbUser.thirdPartyProfile // internal sso
|
||||
) {
|
||||
return authError(done, SSO_NO_PASSWORD)
|
||||
}
|
||||
if (dbUser.status === UserStatus.INACTIVE) {
|
||||
console.info(`user=${email} is inactive`, dbUser)
|
||||
return authError(done, INVALID_ERR)
|
||||
}
|
||||
|
||||
console.error("Non SSO usser has no password set", dbUser)
|
||||
if (!dbUser.password) {
|
||||
console.info(`user=${email} has no password set`, dbUser)
|
||||
return authError(done, EXPIRED)
|
||||
}
|
||||
|
||||
// authenticate
|
||||
if (await compare(password, dbUser.password)) {
|
||||
const sessionId = newid()
|
||||
const tenantId = getTenantId()
|
||||
|
||||
await createASession(dbUser._id!, { sessionId, tenantId })
|
||||
|
||||
const token = jwt.sign(
|
||||
{
|
||||
userId: dbUser._id,
|
||||
sessionId,
|
||||
tenantId,
|
||||
},
|
||||
env.JWT_SECRET
|
||||
)
|
||||
// Remove users password in payload
|
||||
delete dbUser.password
|
||||
|
||||
return done(null, {
|
||||
...dbUser,
|
||||
token,
|
||||
})
|
||||
} else {
|
||||
if (!(await compare(password, dbUser.password))) {
|
||||
return authError(done, INVALID_ERR)
|
||||
}
|
||||
|
||||
// intentionally remove the users password in payload
|
||||
delete dbUser.password
|
||||
return done(null, dbUser)
|
||||
}
|
||||
|
|
|
@ -1,18 +1,25 @@
|
|||
import { ssoCallbackUrl } from "./utils"
|
||||
import { authenticateThirdParty, SaveUserFunction } from "./third-party-common"
|
||||
import { ConfigType, GoogleConfig, Database, SSOProfile } from "@budibase/types"
|
||||
import { ssoCallbackUrl } from "../utils"
|
||||
import * as sso from "./sso"
|
||||
import {
|
||||
ConfigType,
|
||||
SSOProfile,
|
||||
SSOAuthDetails,
|
||||
SSOProviderType,
|
||||
SaveSSOUserFunction,
|
||||
GoogleInnerConfig,
|
||||
} from "@budibase/types"
|
||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
|
||||
export function buildVerifyFn(saveUserFn?: SaveUserFunction) {
|
||||
export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
|
||||
return (
|
||||
accessToken: string,
|
||||
refreshToken: string,
|
||||
profile: SSOProfile,
|
||||
done: Function
|
||||
) => {
|
||||
const thirdPartyUser = {
|
||||
provider: profile.provider, // should always be 'google'
|
||||
providerType: "google",
|
||||
const details: SSOAuthDetails = {
|
||||
provider: "google",
|
||||
providerType: SSOProviderType.GOOGLE,
|
||||
userId: profile.id,
|
||||
profile: profile,
|
||||
email: profile._json.email,
|
||||
|
@ -22,8 +29,8 @@ export function buildVerifyFn(saveUserFn?: SaveUserFunction) {
|
|||
},
|
||||
}
|
||||
|
||||
return authenticateThirdParty(
|
||||
thirdPartyUser,
|
||||
return sso.authenticate(
|
||||
details,
|
||||
true, // require local accounts to exist
|
||||
done,
|
||||
saveUserFn
|
||||
|
@ -37,9 +44,9 @@ export function buildVerifyFn(saveUserFn?: SaveUserFunction) {
|
|||
* @returns Dynamically configured Passport Google Strategy
|
||||
*/
|
||||
export async function strategyFactory(
|
||||
config: GoogleConfig["config"],
|
||||
config: GoogleInnerConfig,
|
||||
callbackUrl: string,
|
||||
saveUserFn?: SaveUserFunction
|
||||
saveUserFn: SaveSSOUserFunction
|
||||
) {
|
||||
try {
|
||||
const { clientID, clientSecret } = config
|
||||
|
@ -65,9 +72,6 @@ export async function strategyFactory(
|
|||
}
|
||||
}
|
||||
|
||||
export async function getCallbackUrl(
|
||||
db: Database,
|
||||
config: { callbackURL?: string }
|
||||
) {
|
||||
return ssoCallbackUrl(db, config, ConfigType.GOOGLE)
|
||||
export async function getCallbackUrl(config: GoogleInnerConfig) {
|
||||
return ssoCallbackUrl(ConfigType.GOOGLE, config)
|
||||
}
|
|
@ -1,22 +1,19 @@
|
|||
import fetch from "node-fetch"
|
||||
import { authenticateThirdParty, SaveUserFunction } from "./third-party-common"
|
||||
import { ssoCallbackUrl } from "./utils"
|
||||
import * as sso from "./sso"
|
||||
import { ssoCallbackUrl } from "../utils"
|
||||
import {
|
||||
ConfigType,
|
||||
OIDCInnerCfg,
|
||||
Database,
|
||||
OIDCInnerConfig,
|
||||
SSOProfile,
|
||||
ThirdPartyUser,
|
||||
OIDCConfiguration,
|
||||
OIDCStrategyConfiguration,
|
||||
SSOAuthDetails,
|
||||
SSOProviderType,
|
||||
JwtClaims,
|
||||
SaveSSOUserFunction,
|
||||
} from "@budibase/types"
|
||||
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||
|
||||
type JwtClaims = {
|
||||
preferred_username: string
|
||||
email: string
|
||||
}
|
||||
|
||||
export function buildVerifyFn(saveUserFn?: SaveUserFunction) {
|
||||
export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
|
||||
/**
|
||||
* @param {*} issuer The identity provider base URL
|
||||
* @param {*} sub The user ID
|
||||
|
@ -39,10 +36,10 @@ export function buildVerifyFn(saveUserFn?: SaveUserFunction) {
|
|||
params: any,
|
||||
done: Function
|
||||
) => {
|
||||
const thirdPartyUser: ThirdPartyUser = {
|
||||
const details: SSOAuthDetails = {
|
||||
// store the issuer info to enable sync in future
|
||||
provider: issuer,
|
||||
providerType: "oidc",
|
||||
providerType: SSOProviderType.OIDC,
|
||||
userId: profile.id,
|
||||
profile: profile,
|
||||
email: getEmail(profile, jwtClaims),
|
||||
|
@ -52,8 +49,8 @@ export function buildVerifyFn(saveUserFn?: SaveUserFunction) {
|
|||
},
|
||||
}
|
||||
|
||||
return authenticateThirdParty(
|
||||
thirdPartyUser,
|
||||
return sso.authenticate(
|
||||
details,
|
||||
false, // don't require local accounts to exist
|
||||
done,
|
||||
saveUserFn
|
||||
|
@ -104,8 +101,8 @@ function validEmail(value: string) {
|
|||
* @returns Dynamically configured Passport OIDC Strategy
|
||||
*/
|
||||
export async function strategyFactory(
|
||||
config: OIDCConfiguration,
|
||||
saveUserFn?: SaveUserFunction
|
||||
config: OIDCStrategyConfiguration,
|
||||
saveUserFn: SaveSSOUserFunction
|
||||
) {
|
||||
try {
|
||||
const verify = buildVerifyFn(saveUserFn)
|
||||
|
@ -119,14 +116,14 @@ export async function strategyFactory(
|
|||
}
|
||||
|
||||
export async function fetchStrategyConfig(
|
||||
enrichedConfig: OIDCInnerCfg,
|
||||
oidcConfig: OIDCInnerConfig,
|
||||
callbackUrl?: string
|
||||
): Promise<OIDCConfiguration> {
|
||||
): Promise<OIDCStrategyConfiguration> {
|
||||
try {
|
||||
const { clientID, clientSecret, configUrl } = enrichedConfig
|
||||
const { clientID, clientSecret, configUrl } = oidcConfig
|
||||
|
||||
if (!clientID || !clientSecret || !callbackUrl || !configUrl) {
|
||||
//check for remote config and all required elements
|
||||
// check for remote config and all required elements
|
||||
throw new Error(
|
||||
"Configuration invalid. Must contain clientID, clientSecret, callbackUrl and configUrl"
|
||||
)
|
||||
|
@ -159,9 +156,6 @@ export async function fetchStrategyConfig(
|
|||
}
|
||||
}
|
||||
|
||||
export async function getCallbackUrl(
|
||||
db: Database,
|
||||
config: { callbackURL?: string }
|
||||
) {
|
||||
return ssoCallbackUrl(db, config, ConfigType.OIDC)
|
||||
export async function getCallbackUrl() {
|
||||
return ssoCallbackUrl(ConfigType.OIDC)
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
import { generateGlobalUserID } from "../../../db"
|
||||
import { authError } from "../utils"
|
||||
import * as users from "../../../users"
|
||||
import * as context from "../../../context"
|
||||
import fetch from "node-fetch"
|
||||
import {
|
||||
SaveSSOUserFunction,
|
||||
SaveUserOpts,
|
||||
SSOAuthDetails,
|
||||
SSOUser,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
|
||||
// no-op function for user save
|
||||
// - this allows datasource auth and access token refresh to work correctly
|
||||
// - prefer no-op over an optional argument to ensure function is provided to login flows
|
||||
export const ssoSaveUserNoOp: SaveSSOUserFunction = (
|
||||
user: SSOUser,
|
||||
opts: SaveUserOpts
|
||||
) => Promise.resolve(user)
|
||||
|
||||
/**
|
||||
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
||||
*/
|
||||
export async function authenticate(
|
||||
details: SSOAuthDetails,
|
||||
requireLocalAccount: boolean = true,
|
||||
done: any,
|
||||
saveUserFn: SaveSSOUserFunction
|
||||
) {
|
||||
if (!saveUserFn) {
|
||||
throw new Error("Save user function must be provided")
|
||||
}
|
||||
if (!details.userId) {
|
||||
return authError(done, "sso user id required")
|
||||
}
|
||||
if (!details.email) {
|
||||
return authError(done, "sso user email required")
|
||||
}
|
||||
|
||||
// use the third party id
|
||||
const userId = generateGlobalUserID(details.userId)
|
||||
|
||||
let dbUser: User | undefined
|
||||
|
||||
// try to load by id
|
||||
try {
|
||||
dbUser = await users.getById(userId)
|
||||
} catch (err: any) {
|
||||
// abort when not 404 error
|
||||
if (!err.status || err.status !== 404) {
|
||||
return authError(
|
||||
done,
|
||||
"Unexpected error when retrieving existing user",
|
||||
err
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to loading by email
|
||||
if (!dbUser) {
|
||||
dbUser = await users.getGlobalUserByEmail(details.email)
|
||||
}
|
||||
|
||||
// exit early if there is still no user and auto creation is disabled
|
||||
if (!dbUser && requireLocalAccount) {
|
||||
return authError(
|
||||
done,
|
||||
"Email does not yet exist. You must set up your local budibase account first."
|
||||
)
|
||||
}
|
||||
|
||||
// first time creation
|
||||
if (!dbUser) {
|
||||
// setup a blank user using the third party id
|
||||
dbUser = {
|
||||
_id: userId,
|
||||
email: details.email,
|
||||
roles: {},
|
||||
tenantId: context.getTenantId(),
|
||||
}
|
||||
}
|
||||
|
||||
let ssoUser = await syncUser(dbUser, details)
|
||||
// never prompt for password reset
|
||||
ssoUser.forceResetPassword = false
|
||||
|
||||
try {
|
||||
// don't try to re-save any existing password
|
||||
delete ssoUser.password
|
||||
// create or sync the user
|
||||
ssoUser = (await saveUserFn(ssoUser, {
|
||||
hashPassword: false,
|
||||
requirePassword: false,
|
||||
})) as SSOUser
|
||||
} catch (err: any) {
|
||||
return authError(done, "Error saving user", err)
|
||||
}
|
||||
|
||||
return done(null, ssoUser)
|
||||
}
|
||||
|
||||
async function getProfilePictureUrl(user: User, details: SSOAuthDetails) {
|
||||
const pictureUrl = details.profile?._json.picture
|
||||
if (pictureUrl) {
|
||||
const response = await fetch(pictureUrl)
|
||||
if (response.status === 200) {
|
||||
const type = response.headers.get("content-type") as string
|
||||
if (type.startsWith("image/")) {
|
||||
return pictureUrl
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns a user that has been sync'd with third party information
|
||||
*/
|
||||
async function syncUser(user: User, details: SSOAuthDetails): Promise<SSOUser> {
|
||||
let firstName
|
||||
let lastName
|
||||
let pictureUrl
|
||||
let oauth2
|
||||
let thirdPartyProfile
|
||||
|
||||
if (details.profile) {
|
||||
const profile = details.profile
|
||||
|
||||
if (profile.name) {
|
||||
const name = profile.name
|
||||
// first name
|
||||
if (name.givenName) {
|
||||
firstName = name.givenName
|
||||
}
|
||||
// last name
|
||||
if (name.familyName) {
|
||||
lastName = name.familyName
|
||||
}
|
||||
}
|
||||
|
||||
pictureUrl = await getProfilePictureUrl(user, details)
|
||||
|
||||
thirdPartyProfile = {
|
||||
...profile._json,
|
||||
}
|
||||
}
|
||||
|
||||
// oauth tokens for future use
|
||||
if (details.oauth2) {
|
||||
oauth2 = {
|
||||
...details.oauth2,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...user,
|
||||
provider: details.provider,
|
||||
providerType: details.providerType,
|
||||
firstName,
|
||||
lastName,
|
||||
thirdPartyProfile,
|
||||
pictureUrl,
|
||||
oauth2,
|
||||
}
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
import { generator, structures } from "../../../../../tests"
|
||||
import { SSOProviderType } from "@budibase/types"
|
||||
|
||||
jest.mock("passport-google-oauth")
|
||||
const mockStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
|
||||
jest.mock("../sso")
|
||||
import * as _sso from "../sso"
|
||||
const sso = jest.mocked(_sso)
|
||||
|
||||
const mockSaveUserFn = jest.fn()
|
||||
const mockDone = jest.fn()
|
||||
|
||||
import * as google from "../google"
|
||||
|
||||
describe("google", () => {
|
||||
describe("strategyFactory", () => {
|
||||
const googleConfig = structures.sso.googleConfig()
|
||||
const callbackUrl = generator.url()
|
||||
|
||||
it("should create successfully create a google strategy", async () => {
|
||||
await google.strategyFactory(googleConfig, callbackUrl, mockSaveUserFn)
|
||||
|
||||
const expectedOptions = {
|
||||
clientID: googleConfig.clientID,
|
||||
clientSecret: googleConfig.clientSecret,
|
||||
callbackURL: callbackUrl,
|
||||
}
|
||||
|
||||
expect(mockStrategy).toHaveBeenCalledWith(
|
||||
expectedOptions,
|
||||
expect.anything()
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("authenticate", () => {
|
||||
const details = structures.sso.authDetails()
|
||||
details.provider = "google"
|
||||
details.providerType = SSOProviderType.GOOGLE
|
||||
|
||||
const profile = details.profile!
|
||||
profile.provider = "google"
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("delegates authentication to third party common", async () => {
|
||||
const authenticate = await google.buildVerifyFn(mockSaveUserFn)
|
||||
|
||||
await authenticate(
|
||||
details.oauth2.accessToken,
|
||||
details.oauth2.refreshToken!,
|
||||
profile,
|
||||
mockDone
|
||||
)
|
||||
|
||||
expect(sso.authenticate).toHaveBeenCalledWith(
|
||||
details,
|
||||
true,
|
||||
mockDone,
|
||||
mockSaveUserFn
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,152 @@
|
|||
import { generator, mocks, structures } from "../../../../../tests"
|
||||
import {
|
||||
JwtClaims,
|
||||
OIDCInnerConfig,
|
||||
SSOAuthDetails,
|
||||
SSOProviderType,
|
||||
} from "@budibase/types"
|
||||
import * as _sso from "../sso"
|
||||
import * as oidc from "../oidc"
|
||||
|
||||
jest.mock("@techpass/passport-openidconnect")
|
||||
const mockStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||
|
||||
jest.mock("../sso")
|
||||
const sso = jest.mocked(_sso)
|
||||
|
||||
const mockSaveUser = jest.fn()
|
||||
const mockDone = jest.fn()
|
||||
|
||||
describe("oidc", () => {
|
||||
const callbackUrl = generator.url()
|
||||
const oidcConfig: OIDCInnerConfig = structures.sso.oidcConfig()
|
||||
const wellKnownConfig = structures.sso.oidcWellKnownConfig()
|
||||
|
||||
function mockRetrieveWellKnownConfig() {
|
||||
// mock the request to retrieve the oidc configuration
|
||||
mocks.fetch.mockReturnValue({
|
||||
ok: true,
|
||||
json: () => wellKnownConfig,
|
||||
})
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
mockRetrieveWellKnownConfig()
|
||||
})
|
||||
|
||||
describe("strategyFactory", () => {
|
||||
it("should create successfully create an oidc strategy", async () => {
|
||||
const strategyConfiguration = await oidc.fetchStrategyConfig(
|
||||
oidcConfig,
|
||||
callbackUrl
|
||||
)
|
||||
await oidc.strategyFactory(strategyConfiguration, mockSaveUser)
|
||||
|
||||
expect(mocks.fetch).toHaveBeenCalledWith(oidcConfig.configUrl)
|
||||
|
||||
const expectedOptions = {
|
||||
issuer: wellKnownConfig.issuer,
|
||||
authorizationURL: wellKnownConfig.authorization_endpoint,
|
||||
tokenURL: wellKnownConfig.token_endpoint,
|
||||
userInfoURL: wellKnownConfig.userinfo_endpoint,
|
||||
clientID: oidcConfig.clientID,
|
||||
clientSecret: oidcConfig.clientSecret,
|
||||
callbackURL: callbackUrl,
|
||||
}
|
||||
expect(mockStrategy).toHaveBeenCalledWith(
|
||||
expectedOptions,
|
||||
expect.anything()
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("authenticate", () => {
|
||||
const details: SSOAuthDetails = structures.sso.authDetails()
|
||||
details.providerType = SSOProviderType.OIDC
|
||||
const profile = details.profile!
|
||||
const issuer = profile.provider
|
||||
|
||||
const sub = generator.string()
|
||||
const idToken = generator.string()
|
||||
const params = {}
|
||||
|
||||
let authenticateFn: any
|
||||
let jwtClaims: JwtClaims
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
authenticateFn = await oidc.buildVerifyFn(mockSaveUser)
|
||||
})
|
||||
|
||||
async function authenticate() {
|
||||
await authenticateFn(
|
||||
issuer,
|
||||
sub,
|
||||
profile,
|
||||
jwtClaims,
|
||||
details.oauth2.accessToken,
|
||||
details.oauth2.refreshToken,
|
||||
idToken,
|
||||
params,
|
||||
mockDone
|
||||
)
|
||||
}
|
||||
|
||||
it("passes auth details to sso module", async () => {
|
||||
await authenticate()
|
||||
|
||||
expect(sso.authenticate).toHaveBeenCalledWith(
|
||||
details,
|
||||
false,
|
||||
mockDone,
|
||||
mockSaveUser
|
||||
)
|
||||
})
|
||||
|
||||
it("uses JWT email to get email", async () => {
|
||||
delete profile._json.email
|
||||
|
||||
jwtClaims = {
|
||||
email: details.email,
|
||||
}
|
||||
|
||||
await authenticate()
|
||||
|
||||
expect(sso.authenticate).toHaveBeenCalledWith(
|
||||
details,
|
||||
false,
|
||||
mockDone,
|
||||
mockSaveUser
|
||||
)
|
||||
})
|
||||
|
||||
it("uses JWT username to get email", async () => {
|
||||
delete profile._json.email
|
||||
|
||||
jwtClaims = {
|
||||
email: details.email,
|
||||
}
|
||||
|
||||
await authenticate()
|
||||
|
||||
expect(sso.authenticate).toHaveBeenCalledWith(
|
||||
details,
|
||||
false,
|
||||
mockDone,
|
||||
mockSaveUser
|
||||
)
|
||||
})
|
||||
|
||||
it("uses JWT invalid username to get email", async () => {
|
||||
delete profile._json.email
|
||||
|
||||
jwtClaims = {
|
||||
preferred_username: "invalidUsername",
|
||||
}
|
||||
|
||||
await expect(authenticate()).rejects.toThrow(
|
||||
"Could not determine user email from profile"
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,196 @@
|
|||
import { structures, testEnv, mocks } from "../../../../../tests"
|
||||
import { SSOAuthDetails, User } from "@budibase/types"
|
||||
|
||||
import { HTTPError } from "../../../../errors"
|
||||
import * as sso from "../sso"
|
||||
import * as context from "../../../../context"
|
||||
|
||||
const mockDone = jest.fn()
|
||||
const mockSaveUser = jest.fn()
|
||||
|
||||
jest.mock("../../../../users")
|
||||
import * as _users from "../../../../users"
|
||||
const users = jest.mocked(_users)
|
||||
|
||||
const getErrorMessage = () => {
|
||||
return mockDone.mock.calls[0][2].message
|
||||
}
|
||||
|
||||
describe("sso", () => {
|
||||
describe("authenticate", () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
testEnv.singleTenant()
|
||||
})
|
||||
|
||||
describe("validation", () => {
|
||||
const testValidation = async (
|
||||
details: SSOAuthDetails,
|
||||
message: string
|
||||
) => {
|
||||
await sso.authenticate(details, false, mockDone, mockSaveUser)
|
||||
|
||||
expect(mockDone.mock.calls.length).toBe(1)
|
||||
expect(getErrorMessage()).toContain(message)
|
||||
}
|
||||
|
||||
it("user id fails", async () => {
|
||||
const details = structures.sso.authDetails()
|
||||
details.userId = undefined!
|
||||
|
||||
await testValidation(details, "sso user id required")
|
||||
})
|
||||
|
||||
it("email fails", async () => {
|
||||
const details = structures.sso.authDetails()
|
||||
details.email = undefined!
|
||||
|
||||
await testValidation(details, "sso user email required")
|
||||
})
|
||||
})
|
||||
|
||||
function mockGetProfilePicture() {
|
||||
mocks.fetch.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
status: 200,
|
||||
headers: { get: () => "image/" },
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
describe("when the user doesn't exist", () => {
|
||||
let user: User
|
||||
let details: SSOAuthDetails
|
||||
|
||||
beforeEach(() => {
|
||||
users.getById.mockImplementationOnce(() => {
|
||||
throw new HTTPError("", 404)
|
||||
})
|
||||
mockGetProfilePicture()
|
||||
|
||||
user = structures.users.user()
|
||||
delete user._rev
|
||||
delete user._id
|
||||
|
||||
details = structures.sso.authDetails(user)
|
||||
details.userId = structures.uuid()
|
||||
})
|
||||
|
||||
describe("when a local account is required", () => {
|
||||
it("returns an error message", async () => {
|
||||
const details = structures.sso.authDetails()
|
||||
|
||||
await sso.authenticate(details, true, mockDone, mockSaveUser)
|
||||
|
||||
expect(mockDone.mock.calls.length).toBe(1)
|
||||
expect(getErrorMessage()).toContain(
|
||||
"Email does not yet exist. You must set up your local budibase account first."
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when a local account isn't required", () => {
|
||||
it("creates and authenticates the user", async () => {
|
||||
const ssoUser = structures.users.ssoUser({ user, details })
|
||||
mockSaveUser.mockReturnValueOnce(ssoUser)
|
||||
|
||||
await sso.authenticate(details, false, mockDone, mockSaveUser)
|
||||
|
||||
// default roles for new user
|
||||
ssoUser.roles = {}
|
||||
|
||||
// modified external id to match user format
|
||||
ssoUser._id = "us_" + details.userId
|
||||
|
||||
// new sso user won't have a password
|
||||
delete ssoUser.password
|
||||
|
||||
// new user isn't saved with rev
|
||||
delete ssoUser._rev
|
||||
|
||||
// tenant id added
|
||||
ssoUser.tenantId = context.getTenantId()
|
||||
|
||||
expect(mockSaveUser).toBeCalledWith(ssoUser, {
|
||||
hashPassword: false,
|
||||
requirePassword: false,
|
||||
})
|
||||
expect(mockDone).toBeCalledWith(null, ssoUser)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the user exists", () => {
|
||||
let existingUser: User
|
||||
let details: SSOAuthDetails
|
||||
|
||||
beforeEach(() => {
|
||||
existingUser = structures.users.user()
|
||||
existingUser._id = structures.uuid()
|
||||
details = structures.sso.authDetails(existingUser)
|
||||
mockGetProfilePicture()
|
||||
})
|
||||
|
||||
describe("exists by email", () => {
|
||||
beforeEach(() => {
|
||||
users.getById.mockImplementationOnce(() => {
|
||||
throw new HTTPError("", 404)
|
||||
})
|
||||
users.getGlobalUserByEmail.mockReturnValueOnce(
|
||||
Promise.resolve(existingUser)
|
||||
)
|
||||
})
|
||||
|
||||
it("syncs and authenticates the user", async () => {
|
||||
const ssoUser = structures.users.ssoUser({
|
||||
user: existingUser,
|
||||
details,
|
||||
})
|
||||
mockSaveUser.mockReturnValueOnce(ssoUser)
|
||||
|
||||
await sso.authenticate(details, true, mockDone, mockSaveUser)
|
||||
|
||||
// roles preserved
|
||||
ssoUser.roles = existingUser.roles
|
||||
|
||||
// existing id preserved
|
||||
ssoUser._id = existingUser._id
|
||||
|
||||
expect(mockSaveUser).toBeCalledWith(ssoUser, {
|
||||
hashPassword: false,
|
||||
requirePassword: false,
|
||||
})
|
||||
expect(mockDone).toBeCalledWith(null, ssoUser)
|
||||
})
|
||||
})
|
||||
|
||||
describe("exists by id", () => {
|
||||
beforeEach(() => {
|
||||
users.getById.mockReturnValueOnce(Promise.resolve(existingUser))
|
||||
})
|
||||
|
||||
it("syncs and authenticates the user", async () => {
|
||||
const ssoUser = structures.users.ssoUser({
|
||||
user: existingUser,
|
||||
details,
|
||||
})
|
||||
mockSaveUser.mockReturnValueOnce(ssoUser)
|
||||
|
||||
await sso.authenticate(details, true, mockDone, mockSaveUser)
|
||||
|
||||
// roles preserved
|
||||
ssoUser.roles = existingUser.roles
|
||||
|
||||
// existing id preserved
|
||||
ssoUser._id = existingUser._id
|
||||
|
||||
expect(mockSaveUser).toBeCalledWith(ssoUser, {
|
||||
hashPassword: false,
|
||||
requirePassword: false,
|
||||
})
|
||||
expect(mockDone).toBeCalledWith(null, ssoUser)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,79 +0,0 @@
|
|||
// Mock data
|
||||
|
||||
const { data } = require("./utilities/mock-data")
|
||||
|
||||
const TENANT_ID = "default"
|
||||
|
||||
const googleConfig = {
|
||||
clientID: data.clientID,
|
||||
clientSecret: data.clientSecret,
|
||||
}
|
||||
|
||||
const profile = {
|
||||
id: "mockId",
|
||||
_json: {
|
||||
email : data.email
|
||||
},
|
||||
provider: "google"
|
||||
}
|
||||
|
||||
const user = data.buildThirdPartyUser("google", "google", profile)
|
||||
|
||||
describe("google", () => {
|
||||
describe("strategyFactory", () => {
|
||||
// mock passport strategy factory
|
||||
jest.mock("passport-google-oauth")
|
||||
const mockStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
|
||||
it("should create successfully create a google strategy", async () => {
|
||||
const google = require("../google")
|
||||
|
||||
const callbackUrl = `/api/global/auth/${TENANT_ID}/google/callback`
|
||||
await google.strategyFactory(googleConfig, callbackUrl)
|
||||
|
||||
const expectedOptions = {
|
||||
clientID: googleConfig.clientID,
|
||||
clientSecret: googleConfig.clientSecret,
|
||||
callbackURL: callbackUrl,
|
||||
}
|
||||
|
||||
expect(mockStrategy).toHaveBeenCalledWith(
|
||||
expectedOptions,
|
||||
expect.anything()
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("authenticate", () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
// mock third party common authentication
|
||||
jest.mock("../third-party-common")
|
||||
const authenticateThirdParty = require("../third-party-common").authenticateThirdParty
|
||||
|
||||
// mock the passport callback
|
||||
const mockDone = jest.fn()
|
||||
|
||||
it("delegates authentication to third party common", async () => {
|
||||
const google = require("../google")
|
||||
const mockSaveUserFn = jest.fn()
|
||||
const authenticate = await google.buildVerifyFn(mockSaveUserFn)
|
||||
|
||||
await authenticate(
|
||||
data.accessToken,
|
||||
data.refreshToken,
|
||||
profile,
|
||||
mockDone
|
||||
)
|
||||
|
||||
expect(authenticateThirdParty).toHaveBeenCalledWith(
|
||||
user,
|
||||
true,
|
||||
mockDone,
|
||||
mockSaveUserFn)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -1,144 +0,0 @@
|
|||
// Mock data
|
||||
const mockFetch = require("node-fetch")
|
||||
const { data } = require("./utilities/mock-data")
|
||||
const issuer = "mockIssuer"
|
||||
const sub = "mockSub"
|
||||
const profile = {
|
||||
id: "mockId",
|
||||
_json: {
|
||||
email : data.email
|
||||
}
|
||||
}
|
||||
let jwtClaims = {}
|
||||
const idToken = "mockIdToken"
|
||||
const params = {}
|
||||
|
||||
const callbackUrl = "http://somecallbackurl"
|
||||
|
||||
// response from .well-known/openid-configuration
|
||||
const oidcConfigUrlResponse = {
|
||||
issuer: issuer,
|
||||
authorization_endpoint: "mockAuthorizationEndpoint",
|
||||
token_endpoint: "mockTokenEndpoint",
|
||||
userinfo_endpoint: "mockUserInfoEndpoint"
|
||||
}
|
||||
|
||||
const oidcConfig = {
|
||||
configUrl: "http://someconfigurl",
|
||||
clientID: data.clientID,
|
||||
clientSecret: data.clientSecret,
|
||||
}
|
||||
|
||||
const user = data.buildThirdPartyUser(issuer, "oidc", profile)
|
||||
|
||||
describe("oidc", () => {
|
||||
describe("strategyFactory", () => {
|
||||
// mock passport strategy factory
|
||||
jest.mock("@techpass/passport-openidconnect")
|
||||
const mockStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||
|
||||
// mock the request to retrieve the oidc configuration
|
||||
mockFetch.mockReturnValue({
|
||||
ok: true,
|
||||
json: () => oidcConfigUrlResponse
|
||||
})
|
||||
|
||||
it("should create successfully create an oidc strategy", async () => {
|
||||
const oidc = require("../oidc")
|
||||
const enrichedConfig = await oidc.fetchStrategyConfig(oidcConfig, callbackUrl)
|
||||
await oidc.strategyFactory(enrichedConfig, callbackUrl)
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(oidcConfig.configUrl)
|
||||
|
||||
const expectedOptions = {
|
||||
issuer: oidcConfigUrlResponse.issuer,
|
||||
authorizationURL: oidcConfigUrlResponse.authorization_endpoint,
|
||||
tokenURL: oidcConfigUrlResponse.token_endpoint,
|
||||
userInfoURL: oidcConfigUrlResponse.userinfo_endpoint,
|
||||
clientID: oidcConfig.clientID,
|
||||
clientSecret: oidcConfig.clientSecret,
|
||||
callbackURL: callbackUrl,
|
||||
}
|
||||
expect(mockStrategy).toHaveBeenCalledWith(
|
||||
expectedOptions,
|
||||
expect.anything()
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("authenticate", () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks()
|
||||
});
|
||||
|
||||
// mock third party common authentication
|
||||
jest.mock("../third-party-common")
|
||||
const authenticateThirdParty = require("../third-party-common").authenticateThirdParty
|
||||
|
||||
// mock the passport callback
|
||||
const mockDone = jest.fn()
|
||||
const mockSaveUserFn = jest.fn()
|
||||
|
||||
async function doAuthenticate() {
|
||||
const oidc = require("../oidc")
|
||||
const authenticate = await oidc.buildVerifyFn(mockSaveUserFn)
|
||||
|
||||
await authenticate(
|
||||
issuer,
|
||||
sub,
|
||||
profile,
|
||||
jwtClaims,
|
||||
data.accessToken,
|
||||
data.refreshToken,
|
||||
idToken,
|
||||
params,
|
||||
mockDone
|
||||
)
|
||||
}
|
||||
|
||||
async function doTest() {
|
||||
await doAuthenticate()
|
||||
|
||||
expect(authenticateThirdParty).toHaveBeenCalledWith(
|
||||
user,
|
||||
false,
|
||||
mockDone,
|
||||
mockSaveUserFn,
|
||||
)
|
||||
}
|
||||
|
||||
it("delegates authentication to third party common", async () => {
|
||||
await doTest()
|
||||
})
|
||||
|
||||
it("uses JWT email to get email", async () => {
|
||||
delete profile._json.email
|
||||
jwtClaims = {
|
||||
email : "mock@budibase.com"
|
||||
}
|
||||
|
||||
await doTest()
|
||||
})
|
||||
|
||||
it("uses JWT username to get email", async () => {
|
||||
delete profile._json.email
|
||||
jwtClaims = {
|
||||
preferred_username : "mock@budibase.com"
|
||||
}
|
||||
|
||||
await doTest()
|
||||
})
|
||||
|
||||
it("uses JWT invalid username to get email", async () => {
|
||||
delete profile._json.email
|
||||
|
||||
jwtClaims = {
|
||||
preferred_username : "invalidUsername"
|
||||
}
|
||||
|
||||
await expect(doAuthenticate()).rejects.toThrow("Could not determine user email from profile");
|
||||
})
|
||||
|
||||
})
|
||||
})
|
||||
|
|
@ -1,178 +0,0 @@
|
|||
require("../../../../tests")
|
||||
const { authenticateThirdParty } = require("../third-party-common")
|
||||
const { data } = require("./utilities/mock-data")
|
||||
const { DEFAULT_TENANT_ID } = require("../../../constants")
|
||||
|
||||
const { generateGlobalUserID } = require("../../../db/utils")
|
||||
const { newid } = require("../../../utils")
|
||||
const { doWithGlobalDB, doInTenant } = require("../../../tenancy")
|
||||
|
||||
const done = jest.fn()
|
||||
|
||||
const getErrorMessage = () => {
|
||||
return done.mock.calls[0][2].message
|
||||
}
|
||||
|
||||
const saveUser = async (user) => {
|
||||
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
|
||||
return await db.put(user)
|
||||
})
|
||||
}
|
||||
|
||||
function authenticate(user, requireLocal, saveFn) {
|
||||
return doInTenant(DEFAULT_TENANT_ID, () => {
|
||||
return authenticateThirdParty(user, requireLocal, done, saveFn)
|
||||
})
|
||||
}
|
||||
|
||||
describe("third party common", () => {
|
||||
describe("authenticateThirdParty", () => {
|
||||
let thirdPartyUser
|
||||
|
||||
beforeEach(() => {
|
||||
thirdPartyUser = data.buildThirdPartyUser()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
|
||||
jest.clearAllMocks()
|
||||
await db.destroy()
|
||||
})
|
||||
})
|
||||
|
||||
describe("validation", () => {
|
||||
const testValidation = async (message) => {
|
||||
await authenticate(thirdPartyUser, false, saveUser)
|
||||
expect(done.mock.calls.length).toBe(1)
|
||||
expect(getErrorMessage()).toContain(message)
|
||||
}
|
||||
|
||||
it("provider fails", async () => {
|
||||
delete thirdPartyUser.provider
|
||||
await testValidation("third party user provider required")
|
||||
})
|
||||
|
||||
it("user id fails", async () => {
|
||||
delete thirdPartyUser.userId
|
||||
await testValidation("third party user id required")
|
||||
})
|
||||
|
||||
it("email fails", async () => {
|
||||
delete thirdPartyUser.email
|
||||
await testValidation("third party user email required")
|
||||
})
|
||||
})
|
||||
|
||||
const expectUserIsAuthenticated = () => {
|
||||
const user = done.mock.calls[0][1]
|
||||
expect(user).toBeDefined()
|
||||
expect(user._id).toBeDefined()
|
||||
expect(user._rev).toBeDefined()
|
||||
expect(user.token).toBeDefined()
|
||||
return user
|
||||
}
|
||||
|
||||
const expectUserIsSynced = (user, thirdPartyUser) => {
|
||||
expect(user.provider).toBe(thirdPartyUser.provider)
|
||||
expect(user.firstName).toBe(thirdPartyUser.profile.name.givenName)
|
||||
expect(user.lastName).toBe(thirdPartyUser.profile.name.familyName)
|
||||
expect(user.thirdPartyProfile).toStrictEqual(thirdPartyUser.profile._json)
|
||||
expect(user.oauth2).toStrictEqual(thirdPartyUser.oauth2)
|
||||
}
|
||||
|
||||
describe("when the user doesn't exist", () => {
|
||||
describe("when a local account is required", () => {
|
||||
it("returns an error message", async () => {
|
||||
await authenticate(thirdPartyUser, true, saveUser)
|
||||
expect(done.mock.calls.length).toBe(1)
|
||||
expect(getErrorMessage()).toContain("Email does not yet exist. You must set up your local budibase account first.")
|
||||
})
|
||||
})
|
||||
|
||||
describe("when a local account isn't required", () => {
|
||||
it("creates and authenticates the user", async () => {
|
||||
await authenticate(thirdPartyUser, false, saveUser)
|
||||
const user = expectUserIsAuthenticated()
|
||||
expectUserIsSynced(user, thirdPartyUser)
|
||||
expect(user.roles).toStrictEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the user exists", () => {
|
||||
let dbUser
|
||||
let id
|
||||
let email
|
||||
|
||||
const createUser = async () => {
|
||||
return doWithGlobalDB(DEFAULT_TENANT_ID, async db => {
|
||||
dbUser = {
|
||||
_id: id,
|
||||
email: email,
|
||||
}
|
||||
const response = await db.put(dbUser)
|
||||
dbUser._rev = response.rev
|
||||
return dbUser
|
||||
})
|
||||
}
|
||||
|
||||
const expectUserIsUpdated = (user) => {
|
||||
// id is unchanged
|
||||
expect(user._id).toBe(id)
|
||||
// user is updated
|
||||
expect(user._rev).not.toBe(dbUser._rev)
|
||||
}
|
||||
|
||||
describe("exists by email", () => {
|
||||
beforeEach(async () => {
|
||||
id = generateGlobalUserID(newid()) // random id
|
||||
email = thirdPartyUser.email // matching email
|
||||
await createUser()
|
||||
})
|
||||
|
||||
it("syncs and authenticates the user", async () => {
|
||||
await authenticate(thirdPartyUser, true, saveUser)
|
||||
|
||||
const user = expectUserIsAuthenticated()
|
||||
expectUserIsSynced(user, thirdPartyUser)
|
||||
expectUserIsUpdated(user)
|
||||
})
|
||||
})
|
||||
|
||||
describe("exists by email with different casing", () => {
|
||||
beforeEach(async () => {
|
||||
id = generateGlobalUserID(newid()) // random id
|
||||
email = thirdPartyUser.email.toUpperCase() // matching email except for casing
|
||||
await createUser()
|
||||
})
|
||||
|
||||
it("syncs and authenticates the user", async () => {
|
||||
await authenticate(thirdPartyUser, true, saveUser)
|
||||
|
||||
const user = expectUserIsAuthenticated()
|
||||
expectUserIsSynced(user, thirdPartyUser)
|
||||
expectUserIsUpdated(user)
|
||||
expect(user.email).toBe(thirdPartyUser.email.toUpperCase())
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe("exists by id", () => {
|
||||
beforeEach(async () => {
|
||||
id = generateGlobalUserID(thirdPartyUser.userId) // matching id
|
||||
email = "test@test.com" // random email
|
||||
await createUser()
|
||||
})
|
||||
|
||||
it("syncs and authenticates the user", async () => {
|
||||
await authenticate(thirdPartyUser, true, saveUser)
|
||||
|
||||
const user = expectUserIsAuthenticated()
|
||||
expectUserIsSynced(user, thirdPartyUser)
|
||||
expectUserIsUpdated(user)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
// Mock Data
|
||||
|
||||
const mockClientID = "mockClientID"
|
||||
const mockClientSecret = "mockClientSecret"
|
||||
|
||||
const mockEmail = "mock@budibase.com"
|
||||
const mockAccessToken = "mockAccessToken"
|
||||
const mockRefreshToken = "mockRefreshToken"
|
||||
|
||||
const mockProvider = "mockProvider"
|
||||
const mockProviderType = "mockProviderType"
|
||||
|
||||
const mockProfile = {
|
||||
id: "mockId",
|
||||
name: {
|
||||
givenName: "mockGivenName",
|
||||
familyName: "mockFamilyName",
|
||||
},
|
||||
_json: {
|
||||
email: mockEmail,
|
||||
},
|
||||
}
|
||||
|
||||
const buildOauth2 = (
|
||||
accessToken = mockAccessToken,
|
||||
refreshToken = mockRefreshToken
|
||||
) => ({
|
||||
accessToken: accessToken,
|
||||
refreshToken: refreshToken,
|
||||
})
|
||||
|
||||
const buildThirdPartyUser = (
|
||||
provider = mockProvider,
|
||||
providerType = mockProviderType,
|
||||
profile = mockProfile,
|
||||
email = mockEmail,
|
||||
oauth2 = buildOauth2()
|
||||
) => ({
|
||||
provider: provider,
|
||||
providerType: providerType,
|
||||
userId: profile.id,
|
||||
profile: profile,
|
||||
email: email,
|
||||
oauth2: oauth2,
|
||||
})
|
||||
|
||||
exports.data = {
|
||||
clientID: mockClientID,
|
||||
clientSecret: mockClientSecret,
|
||||
email: mockEmail,
|
||||
accessToken: mockAccessToken,
|
||||
refreshToken: mockRefreshToken,
|
||||
buildThirdPartyUser,
|
||||
}
|
|
@ -1,177 +0,0 @@
|
|||
import env from "../../environment"
|
||||
import { generateGlobalUserID } from "../../db"
|
||||
import { authError } from "./utils"
|
||||
import { newid } from "../../utils"
|
||||
import { createASession } from "../../security/sessions"
|
||||
import * as users from "../../users"
|
||||
import { getGlobalDB, getTenantId } from "../../tenancy"
|
||||
import fetch from "node-fetch"
|
||||
import { ThirdPartyUser } from "@budibase/types"
|
||||
const jwt = require("jsonwebtoken")
|
||||
|
||||
type SaveUserOpts = {
|
||||
requirePassword?: boolean
|
||||
hashPassword?: boolean
|
||||
currentUserId?: string
|
||||
}
|
||||
|
||||
export type SaveUserFunction = (
|
||||
user: ThirdPartyUser,
|
||||
opts: SaveUserOpts
|
||||
) => Promise<any>
|
||||
|
||||
/**
|
||||
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
||||
*/
|
||||
export async function authenticateThirdParty(
|
||||
thirdPartyUser: ThirdPartyUser,
|
||||
requireLocalAccount: boolean = true,
|
||||
done: Function,
|
||||
saveUserFn?: SaveUserFunction
|
||||
) {
|
||||
if (!saveUserFn) {
|
||||
throw new Error("Save user function must be provided")
|
||||
}
|
||||
if (!thirdPartyUser.provider) {
|
||||
return authError(done, "third party user provider required")
|
||||
}
|
||||
if (!thirdPartyUser.userId) {
|
||||
return authError(done, "third party user id required")
|
||||
}
|
||||
if (!thirdPartyUser.email) {
|
||||
return authError(done, "third party user email required")
|
||||
}
|
||||
|
||||
// use the third party id
|
||||
const userId = generateGlobalUserID(thirdPartyUser.userId)
|
||||
const db = getGlobalDB()
|
||||
|
||||
let dbUser
|
||||
|
||||
// try to load by id
|
||||
try {
|
||||
dbUser = await db.get(userId)
|
||||
} catch (err: any) {
|
||||
// abort when not 404 error
|
||||
if (!err.status || err.status !== 404) {
|
||||
return authError(
|
||||
done,
|
||||
"Unexpected error when retrieving existing user",
|
||||
err
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// fallback to loading by email
|
||||
if (!dbUser) {
|
||||
dbUser = await users.getGlobalUserByEmail(thirdPartyUser.email)
|
||||
}
|
||||
|
||||
// exit early if there is still no user and auto creation is disabled
|
||||
if (!dbUser && requireLocalAccount) {
|
||||
return authError(
|
||||
done,
|
||||
"Email does not yet exist. You must set up your local budibase account first."
|
||||
)
|
||||
}
|
||||
|
||||
// first time creation
|
||||
if (!dbUser) {
|
||||
// setup a blank user using the third party id
|
||||
dbUser = {
|
||||
_id: userId,
|
||||
email: thirdPartyUser.email,
|
||||
roles: {},
|
||||
}
|
||||
}
|
||||
|
||||
dbUser = await syncUser(dbUser, thirdPartyUser)
|
||||
|
||||
// never prompt for password reset
|
||||
dbUser.forceResetPassword = false
|
||||
|
||||
// create or sync the user
|
||||
try {
|
||||
await saveUserFn(dbUser, { hashPassword: false, requirePassword: false })
|
||||
} catch (err: any) {
|
||||
return authError(done, "Error saving user", err)
|
||||
}
|
||||
|
||||
// now that we're sure user exists, load them from the db
|
||||
dbUser = await db.get(dbUser._id)
|
||||
|
||||
// authenticate
|
||||
const sessionId = newid()
|
||||
const tenantId = getTenantId()
|
||||
await createASession(dbUser._id, { sessionId, tenantId })
|
||||
|
||||
dbUser.token = jwt.sign(
|
||||
{
|
||||
userId: dbUser._id,
|
||||
sessionId,
|
||||
},
|
||||
env.JWT_SECRET
|
||||
)
|
||||
|
||||
return done(null, dbUser)
|
||||
}
|
||||
|
||||
async function syncProfilePicture(
|
||||
user: ThirdPartyUser,
|
||||
thirdPartyUser: ThirdPartyUser
|
||||
) {
|
||||
const pictureUrl = thirdPartyUser.profile?._json.picture
|
||||
if (pictureUrl) {
|
||||
const response = await fetch(pictureUrl)
|
||||
|
||||
if (response.status === 200) {
|
||||
const type = response.headers.get("content-type") as string
|
||||
if (type.startsWith("image/")) {
|
||||
user.pictureUrl = pictureUrl
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return user
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns a user that has been sync'd with third party information
|
||||
*/
|
||||
async function syncUser(user: ThirdPartyUser, thirdPartyUser: ThirdPartyUser) {
|
||||
// provider
|
||||
user.provider = thirdPartyUser.provider
|
||||
user.providerType = thirdPartyUser.providerType
|
||||
|
||||
if (thirdPartyUser.profile) {
|
||||
const profile = thirdPartyUser.profile
|
||||
|
||||
if (profile.name) {
|
||||
const name = profile.name
|
||||
// first name
|
||||
if (name.givenName) {
|
||||
user.firstName = name.givenName
|
||||
}
|
||||
// last name
|
||||
if (name.familyName) {
|
||||
user.lastName = name.familyName
|
||||
}
|
||||
}
|
||||
|
||||
user = await syncProfilePicture(user, thirdPartyUser)
|
||||
|
||||
// profile
|
||||
user.thirdPartyProfile = {
|
||||
...profile._json,
|
||||
}
|
||||
}
|
||||
|
||||
// oauth tokens for future use
|
||||
if (thirdPartyUser.oauth2) {
|
||||
user.oauth2 = {
|
||||
...thirdPartyUser.oauth2,
|
||||
}
|
||||
}
|
||||
|
||||
return user
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
import { isMultiTenant, getTenantId } from "../../tenancy"
|
||||
import { getScopedConfig } from "../../db"
|
||||
import { ConfigType, Database, Config } from "@budibase/types"
|
||||
import { getTenantId, isMultiTenant } from "../../context"
|
||||
import * as configs from "../../configs"
|
||||
import { ConfigType, GoogleInnerConfig } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* Utility to handle authentication errors.
|
||||
|
@ -19,17 +19,14 @@ export function authError(done: Function, message: string, err?: any) {
|
|||
}
|
||||
|
||||
export async function ssoCallbackUrl(
|
||||
db: Database,
|
||||
config?: { callbackURL?: string },
|
||||
type?: ConfigType
|
||||
type: ConfigType,
|
||||
config?: GoogleInnerConfig
|
||||
) {
|
||||
// incase there is a callback URL from before
|
||||
if (config && config.callbackURL) {
|
||||
return config.callbackURL
|
||||
if (config && (config as GoogleInnerConfig).callbackURL) {
|
||||
return (config as GoogleInnerConfig).callbackURL as string
|
||||
}
|
||||
const publicConfig = await getScopedConfig(db, {
|
||||
type: ConfigType.SETTINGS,
|
||||
})
|
||||
const settingsConfig = await configs.getSettingsConfig()
|
||||
|
||||
let callbackUrl = `/api/global/auth`
|
||||
if (isMultiTenant()) {
|
||||
|
@ -37,5 +34,5 @@ export async function ssoCallbackUrl(
|
|||
}
|
||||
callbackUrl += `/${type}/callback`
|
||||
|
||||
return `${publicConfig.platformUrl}${callbackUrl}`
|
||||
return `${settingsConfig.platformUrl}${callbackUrl}`
|
||||
}
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
import { Ctx } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* Expects a standard "query" query string property which is the JSON body
|
||||
* of the request, which has to be sent via query string due to the requirement
|
||||
* of making an endpoint a GET request e.g. downloading a file stream.
|
||||
*/
|
||||
export default function (ctx: Ctx, next: any) {
|
||||
const queryString = ctx.request.query?.query as string | undefined
|
||||
if (ctx.request.method.toLowerCase() !== "get") {
|
||||
ctx.throw(
|
||||
500,
|
||||
"Query to download middleware can only be used for get requests."
|
||||
)
|
||||
}
|
||||
if (!queryString) {
|
||||
return next()
|
||||
}
|
||||
const decoded = decodeURIComponent(queryString)
|
||||
let json
|
||||
try {
|
||||
json = JSON.parse(decoded)
|
||||
} catch (err) {
|
||||
return next()
|
||||
}
|
||||
ctx.request.body = json
|
||||
return next()
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
import { doInTenant, getTenantIDFromCtx } from "../tenancy"
|
||||
import { doInTenant } from "../context"
|
||||
import { getTenantIDFromCtx } from "../tenancy"
|
||||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { Header } from "../constants"
|
||||
import {
|
||||
|
|
|
@ -4,10 +4,10 @@ import {
|
|||
StaticDatabases,
|
||||
getAllApps,
|
||||
getGlobalDBName,
|
||||
doWithDB,
|
||||
getDB,
|
||||
} from "../db"
|
||||
import environment from "../environment"
|
||||
import { doInTenant, getTenantIds, getTenantId } from "../tenancy"
|
||||
import * as platform from "../platform"
|
||||
import * as context from "../context"
|
||||
import { DEFINITIONS } from "."
|
||||
import {
|
||||
|
@ -47,7 +47,7 @@ export const runMigration = async (
|
|||
const migrationType = migration.type
|
||||
let tenantId: string | undefined
|
||||
if (migrationType !== MigrationType.INSTALLATION) {
|
||||
tenantId = getTenantId()
|
||||
tenantId = context.getTenantId()
|
||||
}
|
||||
const migrationName = migration.name
|
||||
const silent = migration.silent
|
||||
|
@ -86,66 +86,66 @@ export const runMigration = async (
|
|||
count++
|
||||
const lengthStatement = length > 1 ? `[${count}/${length}]` : ""
|
||||
|
||||
await doWithDB(dbName, async (db: any) => {
|
||||
try {
|
||||
const doc = await getMigrationsDoc(db)
|
||||
const db = getDB(dbName)
|
||||
|
||||
// the migration has already been run
|
||||
if (doc[migrationName]) {
|
||||
// check for force
|
||||
if (
|
||||
options.force &&
|
||||
options.force[migrationType] &&
|
||||
options.force[migrationType].includes(migrationName)
|
||||
) {
|
||||
log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Forcing`
|
||||
)
|
||||
} else {
|
||||
// no force, exit
|
||||
return
|
||||
}
|
||||
}
|
||||
try {
|
||||
const doc = await getMigrationsDoc(db)
|
||||
|
||||
// check if the migration is not a no-op
|
||||
if (!options.noOp) {
|
||||
// the migration has already been run
|
||||
if (doc[migrationName]) {
|
||||
// check for force
|
||||
if (
|
||||
options.force &&
|
||||
options.force[migrationType] &&
|
||||
options.force[migrationType].includes(migrationName)
|
||||
) {
|
||||
log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Running ${lengthStatement}`
|
||||
)
|
||||
|
||||
if (migration.preventRetry) {
|
||||
// eagerly set the completion date
|
||||
// so that we never run this migration twice even upon failure
|
||||
doc[migrationName] = Date.now()
|
||||
const response = await db.put(doc)
|
||||
doc._rev = response.rev
|
||||
}
|
||||
|
||||
// run the migration
|
||||
if (migrationType === MigrationType.APP) {
|
||||
await context.doInAppContext(db.name, async () => {
|
||||
await migration.fn(db)
|
||||
})
|
||||
} else {
|
||||
await migration.fn(db)
|
||||
}
|
||||
|
||||
log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Complete`
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Forcing`
|
||||
)
|
||||
} else {
|
||||
// no force, exit
|
||||
return
|
||||
}
|
||||
|
||||
// mark as complete
|
||||
doc[migrationName] = Date.now()
|
||||
await db.put(doc)
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Error: `,
|
||||
err
|
||||
)
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
// check if the migration is not a no-op
|
||||
if (!options.noOp) {
|
||||
log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Running ${lengthStatement}`
|
||||
)
|
||||
|
||||
if (migration.preventRetry) {
|
||||
// eagerly set the completion date
|
||||
// so that we never run this migration twice even upon failure
|
||||
doc[migrationName] = Date.now()
|
||||
const response = await db.put(doc)
|
||||
doc._rev = response.rev
|
||||
}
|
||||
|
||||
// run the migration
|
||||
if (migrationType === MigrationType.APP) {
|
||||
await context.doInAppContext(db.name, async () => {
|
||||
await migration.fn(db)
|
||||
})
|
||||
} else {
|
||||
await migration.fn(db)
|
||||
}
|
||||
|
||||
log(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Complete`
|
||||
)
|
||||
}
|
||||
|
||||
// mark as complete
|
||||
doc[migrationName] = Date.now()
|
||||
await db.put(doc)
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Error: `,
|
||||
err
|
||||
)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -160,7 +160,7 @@ export const runMigrations = async (
|
|||
tenantIds = [options.noOp.tenantId]
|
||||
} else if (!options.tenantIds || !options.tenantIds.length) {
|
||||
// run for all tenants
|
||||
tenantIds = await getTenantIds()
|
||||
tenantIds = await platform.tenants.getTenantIds()
|
||||
} else {
|
||||
tenantIds = options.tenantIds
|
||||
}
|
||||
|
@ -185,7 +185,10 @@ export const runMigrations = async (
|
|||
// for all migrations
|
||||
for (const migration of migrations) {
|
||||
// run the migration
|
||||
await doInTenant(tenantId, () => runMigration(migration, options))
|
||||
await context.doInTenant(
|
||||
tenantId,
|
||||
async () => await runMigration(migration, options)
|
||||
)
|
||||
}
|
||||
}
|
||||
console.log("Migrations complete")
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
require("../../../tests")
|
||||
const { runMigrations, getMigrationsDoc } = require("../index")
|
||||
const { getGlobalDBName, getDB } = require("../../db")
|
||||
|
||||
const { structures, testEnv } = require("../../../tests")
|
||||
testEnv.multiTenant()
|
||||
|
||||
let db
|
||||
|
||||
describe("migrations", () => {
|
||||
|
||||
const migrationFunction = jest.fn()
|
||||
|
||||
const MIGRATIONS = [{
|
||||
type: "global",
|
||||
name: "test",
|
||||
fn: migrationFunction
|
||||
}]
|
||||
|
||||
let tenantId
|
||||
|
||||
beforeEach(() => {
|
||||
tenantId = structures.tenant.id()
|
||||
db = getDB(getGlobalDBName(tenantId))
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
await db.destroy()
|
||||
})
|
||||
|
||||
const migrate = () => {
|
||||
return runMigrations(MIGRATIONS, { tenantIds: [tenantId]})
|
||||
}
|
||||
|
||||
it("should run a new migration", async () => {
|
||||
await migrate()
|
||||
expect(migrationFunction).toHaveBeenCalled()
|
||||
const doc = await getMigrationsDoc(db)
|
||||
expect(doc.test).toBeDefined()
|
||||
})
|
||||
|
||||
it("should match snapshot", async () => {
|
||||
await migrate()
|
||||
const doc = await getMigrationsDoc(db)
|
||||
expect(doc).toMatchSnapshot()
|
||||
})
|
||||
|
||||
it("should skip a previously run migration", async () => {
|
||||
await migrate()
|
||||
const previousMigrationTime = await getMigrationsDoc(db).test
|
||||
await migrate()
|
||||
const currentMigrationTime = await getMigrationsDoc(db).test
|
||||
expect(migrationFunction).toHaveBeenCalledTimes(1)
|
||||
expect(currentMigrationTime).toBe(previousMigrationTime)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,64 @@
|
|||
import { testEnv, DBTestConfiguration } from "../../../tests"
|
||||
import * as migrations from "../index"
|
||||
import * as context from "../../context"
|
||||
import { MigrationType } from "@budibase/types"
|
||||
|
||||
testEnv.multiTenant()
|
||||
|
||||
describe("migrations", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
const migrationFunction = jest.fn()
|
||||
|
||||
const MIGRATIONS = [
|
||||
{
|
||||
type: MigrationType.GLOBAL,
|
||||
name: "test" as any,
|
||||
fn: migrationFunction,
|
||||
},
|
||||
]
|
||||
|
||||
beforeEach(() => {
|
||||
config.newTenant()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
const migrate = () => {
|
||||
return migrations.runMigrations(MIGRATIONS, {
|
||||
tenantIds: [config.tenantId],
|
||||
})
|
||||
}
|
||||
|
||||
it("should run a new migration", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await migrate()
|
||||
expect(migrationFunction).toHaveBeenCalled()
|
||||
const db = context.getGlobalDB()
|
||||
const doc = await migrations.getMigrationsDoc(db)
|
||||
expect(doc.test).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("should match snapshot", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await migrate()
|
||||
const doc = await migrations.getMigrationsDoc(context.getGlobalDB())
|
||||
expect(doc).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
|
||||
it("should skip a previously run migration", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const db = context.getGlobalDB()
|
||||
await migrate()
|
||||
const previousDoc = await migrations.getMigrationsDoc(db)
|
||||
await migrate()
|
||||
const currentDoc = await migrations.getMigrationsDoc(db)
|
||||
expect(migrationFunction).toHaveBeenCalledTimes(1)
|
||||
expect(currentDoc.test).toBe(previousDoc.test)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,5 +1,5 @@
|
|||
import env from "../../environment"
|
||||
import * as tenancy from "../../tenancy"
|
||||
import * as context from "../../context"
|
||||
import * as objectStore from "../objectStore"
|
||||
import * as cloudfront from "../cloudfront"
|
||||
|
||||
|
@ -22,7 +22,7 @@ export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
|
|||
export const getGlobalFileS3Key = (type: string, name: string) => {
|
||||
let file = `${type}/${name}`
|
||||
if (env.MULTI_TENANCY) {
|
||||
const tenantId = tenancy.getTenantId()
|
||||
const tenantId = context.getTenantId()
|
||||
file = `${tenantId}/${file}`
|
||||
}
|
||||
return file
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import env from "../../environment"
|
||||
import * as objectStore from "../objectStore"
|
||||
import * as tenancy from "../../tenancy"
|
||||
import * as context from "../../context"
|
||||
import * as cloudfront from "../cloudfront"
|
||||
import { Plugin } from "@budibase/types"
|
||||
|
||||
|
@ -61,7 +61,7 @@ const getPluginS3Key = (plugin: Plugin, fileName: string) => {
|
|||
export const getPluginS3Dir = (pluginName: string) => {
|
||||
let s3Key = `${pluginName}`
|
||||
if (env.MULTI_TENANCY) {
|
||||
const tenantId = tenancy.getTenantId()
|
||||
const tenantId = context.getTenantId()
|
||||
s3Key = `${tenantId}/${s3Key}`
|
||||
}
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
export * as users from "./users"
|
||||
export * as tenants from "./tenants"
|
||||
export * from "./platformDb"
|
|
@ -0,0 +1,6 @@
|
|||
import { StaticDatabases } from "../constants"
|
||||
import { getDB } from "../db/db"
|
||||
|
||||
export function getPlatformDB() {
|
||||
return getDB(StaticDatabases.PLATFORM_INFO.name)
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
import { StaticDatabases } from "../constants"
|
||||
import { getPlatformDB } from "./platformDb"
|
||||
import { LockName, LockOptions, LockType, Tenants } from "@budibase/types"
|
||||
import * as locks from "../redis/redlockImpl"
|
||||
|
||||
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
|
||||
|
||||
export const tenacyLockOptions: LockOptions = {
|
||||
type: LockType.DEFAULT,
|
||||
name: LockName.UPDATE_TENANTS_DOC,
|
||||
ttl: 10 * 1000, // auto expire after 10 seconds
|
||||
systemLock: true,
|
||||
}
|
||||
|
||||
// READ
|
||||
|
||||
export async function getTenantIds(): Promise<string[]> {
|
||||
const tenants = await getTenants()
|
||||
return tenants.tenantIds
|
||||
}
|
||||
|
||||
async function getTenants(): Promise<Tenants> {
|
||||
const db = getPlatformDB()
|
||||
let tenants: Tenants
|
||||
|
||||
try {
|
||||
tenants = await db.get(TENANT_DOC)
|
||||
} catch (e: any) {
|
||||
// doesn't exist yet - create
|
||||
if (e.status === 404) {
|
||||
tenants = await createTenantsDoc()
|
||||
} else {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
return tenants
|
||||
}
|
||||
|
||||
export async function exists(tenantId: string) {
|
||||
const tenants = await getTenants()
|
||||
return tenants.tenantIds.indexOf(tenantId) !== -1
|
||||
}
|
||||
|
||||
// CREATE / UPDATE
|
||||
|
||||
function newTenantsDoc(): Tenants {
|
||||
return {
|
||||
_id: TENANT_DOC,
|
||||
tenantIds: [],
|
||||
}
|
||||
}
|
||||
|
||||
async function createTenantsDoc(): Promise<Tenants> {
|
||||
const db = getPlatformDB()
|
||||
let tenants = newTenantsDoc()
|
||||
|
||||
try {
|
||||
const response = await db.put(tenants)
|
||||
tenants._rev = response.rev
|
||||
} catch (e: any) {
|
||||
// don't throw 409 is doc has already been created
|
||||
if (e.status === 409) {
|
||||
return db.get(TENANT_DOC)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
|
||||
return tenants
|
||||
}
|
||||
|
||||
export async function addTenant(tenantId: string) {
|
||||
const db = getPlatformDB()
|
||||
|
||||
// use a lock as tenant creation is conflict prone
|
||||
await locks.doWithLock(tenacyLockOptions, async () => {
|
||||
const tenants = await getTenants()
|
||||
|
||||
// write the new tenant if it doesn't already exist
|
||||
if (tenants.tenantIds.indexOf(tenantId) === -1) {
|
||||
tenants.tenantIds.push(tenantId)
|
||||
await db.put(tenants)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// DELETE
|
||||
|
||||
export async function removeTenant(tenantId: string) {
|
||||
try {
|
||||
await locks.doWithLock(tenacyLockOptions, async () => {
|
||||
const db = getPlatformDB()
|
||||
const tenants = await getTenants()
|
||||
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
|
||||
await db.put(tenants)
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(`Error removing tenant ${tenantId} from info db`, err)
|
||||
throw err
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
import { DBTestConfiguration, structures } from "../../../tests"
|
||||
import * as tenants from "../tenants"
|
||||
|
||||
describe("tenants", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
describe("addTenant", () => {
|
||||
it("concurrently adds multiple tenants safely", async () => {
|
||||
const tenant1 = structures.tenant.id()
|
||||
const tenant2 = structures.tenant.id()
|
||||
const tenant3 = structures.tenant.id()
|
||||
|
||||
await Promise.all([
|
||||
tenants.addTenant(tenant1),
|
||||
tenants.addTenant(tenant2),
|
||||
tenants.addTenant(tenant3),
|
||||
])
|
||||
|
||||
const tenantIds = await tenants.getTenantIds()
|
||||
expect(tenantIds.includes(tenant1)).toBe(true)
|
||||
expect(tenantIds.includes(tenant2)).toBe(true)
|
||||
expect(tenantIds.includes(tenant3)).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,90 @@
|
|||
import { getPlatformDB } from "./platformDb"
|
||||
import { DEFAULT_TENANT_ID } from "../constants"
|
||||
import env from "../environment"
|
||||
import {
|
||||
PlatformUser,
|
||||
PlatformUserByEmail,
|
||||
PlatformUserById,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
|
||||
// READ
|
||||
|
||||
export async function lookupTenantId(userId: string) {
|
||||
if (!env.MULTI_TENANCY) {
|
||||
return DEFAULT_TENANT_ID
|
||||
}
|
||||
|
||||
const user = await getUserDoc(userId)
|
||||
return user.tenantId
|
||||
}
|
||||
|
||||
async function getUserDoc(emailOrId: string): Promise<PlatformUser> {
|
||||
const db = getPlatformDB()
|
||||
return db.get(emailOrId)
|
||||
}
|
||||
|
||||
// CREATE
|
||||
|
||||
function newUserIdDoc(id: string, tenantId: string): PlatformUserById {
|
||||
return {
|
||||
_id: id,
|
||||
tenantId,
|
||||
}
|
||||
}
|
||||
|
||||
function newUserEmailDoc(
|
||||
userId: string,
|
||||
email: string,
|
||||
tenantId: string
|
||||
): PlatformUserByEmail {
|
||||
return {
|
||||
_id: email,
|
||||
userId,
|
||||
tenantId,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new user id or email doc if it doesn't exist.
|
||||
*/
|
||||
async function addUserDoc(emailOrId: string, newDocFn: () => PlatformUser) {
|
||||
const db = getPlatformDB()
|
||||
let user: PlatformUser
|
||||
|
||||
try {
|
||||
await db.get(emailOrId)
|
||||
} catch (e: any) {
|
||||
if (e.status === 404) {
|
||||
user = newDocFn()
|
||||
await db.put(user)
|
||||
} else {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function addUser(tenantId: string, userId: string, email: string) {
|
||||
await Promise.all([
|
||||
addUserDoc(userId, () => newUserIdDoc(userId, tenantId)),
|
||||
addUserDoc(email, () => newUserEmailDoc(userId, email, tenantId)),
|
||||
])
|
||||
}
|
||||
|
||||
// DELETE
|
||||
|
||||
export async function removeUser(user: User) {
|
||||
const db = getPlatformDB()
|
||||
const keys = [user._id!, user.email]
|
||||
const userDocs = await db.allDocs({
|
||||
keys,
|
||||
include_docs: true,
|
||||
})
|
||||
const toDelete = userDocs.rows.map((row: any) => {
|
||||
return {
|
||||
...row.doc,
|
||||
_deleted: true,
|
||||
}
|
||||
})
|
||||
await db.bulkDocs(toDelete)
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
export enum JobQueue {
|
||||
AUTOMATION = "automationQueue",
|
||||
APP_BACKUP = "appBackupQueue",
|
||||
AUDIT_LOG = "auditLogQueue",
|
||||
}
|
||||
|
|
|
@ -4,7 +4,6 @@ import { JobQueue } from "./constants"
|
|||
import InMemoryQueue from "./inMemoryQueue"
|
||||
import BullQueue from "bull"
|
||||
import { addListeners, StalledFn } from "./listeners"
|
||||
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
|
||||
|
||||
const CLEANUP_PERIOD_MS = 60 * 1000
|
||||
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
|
||||
|
@ -20,6 +19,7 @@ export function createQueue<T>(
|
|||
jobQueue: JobQueue,
|
||||
opts: { removeStalledCb?: StalledFn } = {}
|
||||
): BullQueue.Queue<T> {
|
||||
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
|
||||
const queueConfig: any = redisProtocolUrl || { redis: redisOpts }
|
||||
let queue: any
|
||||
if (!env.isTest()) {
|
||||
|
@ -40,8 +40,10 @@ export function createQueue<T>(
|
|||
}
|
||||
|
||||
export async function shutdown() {
|
||||
if (QUEUES.length) {
|
||||
if (cleanupInterval) {
|
||||
clearInterval(cleanupInterval)
|
||||
}
|
||||
if (QUEUES.length) {
|
||||
for (let queue of QUEUES) {
|
||||
await queue.close()
|
||||
}
|
||||
|
|
|
@ -3,4 +3,4 @@
|
|||
export { default as Client } from "./redis"
|
||||
export * as utils from "./utils"
|
||||
export * as clients from "./init"
|
||||
export * as redlock from "./redlock"
|
||||
export * as locks from "./redlockImpl"
|
||||
|
|
|
@ -20,13 +20,17 @@ async function init() {
|
|||
).init()
|
||||
}
|
||||
|
||||
process.on("exit", async () => {
|
||||
export async function shutdown() {
|
||||
if (userClient) await userClient.finish()
|
||||
if (sessionClient) await sessionClient.finish()
|
||||
if (appClient) await appClient.finish()
|
||||
if (cacheClient) await cacheClient.finish()
|
||||
if (writethroughClient) await writethroughClient.finish()
|
||||
if (lockClient) await lockClient.finish()
|
||||
}
|
||||
|
||||
process.on("exit", async () => {
|
||||
await shutdown()
|
||||
})
|
||||
|
||||
export async function getUserClient() {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue