Merge remote-tracking branch 'origin/master' into fix/update-csp-s3

This commit is contained in:
Peter Clement 2023-02-16 10:11:39 +00:00
commit 3033190a47
1450 changed files with 54351 additions and 33700 deletions

View File

@ -3,6 +3,8 @@ public
dist
packages/server/builder
packages/server/coverage
packages/worker/coverage
packages/backend-core/coverage
packages/server/client
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js

View File

@ -6,6 +6,8 @@ labels: bug
assignees: ''
---
**Checklist**
- [ ] I have searched budibase discussions and github issues to check if my issue already exists
**Hosting**
<!-- Delete as appropriate -->

View File

@ -58,7 +58,7 @@ jobs:
- uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
files: ./packages/server/coverage/clover.xml
files: ./packages/server/coverage/clover.xml,./packages/worker/coverage/clover.xml,./packages/backend-core/coverage/clover.xml
name: codecov-umbrella
verbose: true

View File

@ -38,17 +38,6 @@ jobs:
fi
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Proxy service docker image
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:proxy:prod
docker tag proxy-service budibase/proxy:$PROD_TAG
docker push budibase/proxy:$PROD_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
PROD_TAG: k8s
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:

View File

@ -28,17 +28,6 @@ jobs:
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Proxy service docker image
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:proxy:preprod
docker tag proxy-service budibase/proxy:$PREPROD_TAG
docker push budibase/proxy:$PREPROD_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
PREPROD_TAG: k8s-preprod
- name: Pull values.yaml from budibase-infra
run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \

View File

@ -29,17 +29,6 @@ jobs:
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Proxy service docker image
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:proxy:release
docker tag proxy-service budibase/proxy:$RELEASE_TAG
docker push budibase/proxy:$RELEASE_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
RELEASE_TAG: k8s-release
- name: Pull values.yaml from budibase-infra
run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \

View File

@ -26,7 +26,7 @@ env:
FEATURE_PREVIEW_URL: https://budirelease.live
jobs:
release:
release-images:
runs-on: ubuntu-latest
steps:
@ -44,19 +44,12 @@ jobs:
run: yarn install:pro develop
- run: yarn
- run: yarn bootstrap
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
- run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
@ -76,22 +69,25 @@ jobs:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest budibase release version
deploy-to-release-env:
needs: [release-images]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Proxy service docker image
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:proxy:release
docker tag proxy-service budibase/proxy:$RELEASE_TAG
docker push budibase/proxy:$RELEASE_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
RELEASE_TAG: k8s-release
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
- name: Pull values.yaml from budibase-infra
run: |
@ -149,3 +145,54 @@ jobs:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Release Env Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Release Env."
embed-title: ${{ env.RELEASE_VERSION }}
release-helm-chart:
needs: [release-images]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git pull
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version 0.0.0-develop --app-version develop --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: develop"
git push
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: deploy-budibase-develop-to-qa
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -67,16 +67,24 @@ jobs:
uses: azure/setup-helm@v1
id: helm-install
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git pull
helm package charts/budibase
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version "$RELEASE_VERSION" --app-version "$RELEASE_VERSION" --destination sync
echo "Packaging successful"
git checkout gh-pages
mv *.tgz docs
helm repo index docs
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push

View File

@ -98,17 +98,6 @@ jobs:
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-west-1
- name: Tag and release Proxy service docker image
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:proxy:preprod
docker tag proxy-service budibase/proxy:$PREPROD_TAG
docker push budibase/proxy:$PREPROD_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
PREPROD_TAG: k8s-preprod
- name: Pull values.yaml from budibase-infra
run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \

View File

@ -18,30 +18,18 @@ jobs:
- run: yarn
- run: yarn bootstrap
- run: yarn build
- name: Pull cypress.env.yaml from budibase-infra
- name: Pull from budibase-infra
run: |
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
-H 'Accept: application/vnd.github.v3.raw' \
-o packages/builder/cypress.env.json \
-L https://api.github.com/repos/budibase/budibase-infra/contents/test/cypress.env.json
wc -l packages/builder/cypress.env.json
- name: Cypress run
id: cypress
continue-on-error: true
uses: cypress-io/github-action@v2
with:
record: true
install: false
tag: nightly
command: yarn test:e2e:ci:record
env:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
-o
-L
wc -l
- uses: actions/upload-artifact@v3
with:
name: Test Reports
path: packages/builder/cypress/reports/testReport.html
path:
# TODO: enable once running in QA test env
# - name: Configure AWS Credentials
@ -54,11 +42,3 @@ jobs:
# - name: Upload test results HTML
# uses: aws-actions/configure-aws-credentials@v1
# run: aws s3 cp packages/builder/cypress/reports/testReport.html s3://{{ secrets.BUDI_QA_REPORTS_BUCKET_NAME }}/$GITHUB_RUN_ID/index.html
- name: Cypress Discord Notify
run: yarn test:e2e:ci:notify
env:
CYPRESS_WEBHOOK_URL: ${{ secrets.BUDI_QA_WEBHOOK }}
CYPRESS_OUTCOME: ${{ steps.cypress.outcome }}
CYPRESS_DASHBOARD_URL: ${{ steps.cypress.outputs.dashboardUrl }}
GITHUB_RUN_URL: $GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID

7
.gitignore vendored
View File

@ -4,6 +4,7 @@ builder/*
packages/server/runtime_apps/
.idea/
bb-airgapped.tar.gz
*.iml
# Logs
logs
@ -65,8 +66,6 @@ typings/
.env
!qa-core/.env
!hosting/.env
hosting/.generated-nginx.dev.conf
hosting/proxy/.generated-nginx.prod.conf
# parcel-bundler cache (https://parceljs.org/)
.cache
@ -104,5 +103,9 @@ stats.html
# TypeScript cache
*.tsbuildinfo
# plugins
budibase-component
budibase-datasource
*.iml

1
.nvmrc Normal file
View File

@ -0,0 +1 @@
v14.19.3

View File

@ -4,6 +4,8 @@ dist
packages/builder/src/components/design/AppPreview/CurrentItemPreview.svelte
packages/server/builder
packages/server/coverage
packages/worker/coverage
packages/backend-core/coverage
packages/server/client
packages/server/src/definitions/openapi.ts
packages/builder/.routify

1
.python-version Normal file
View File

@ -0,0 +1 @@
3.11.1

2
.tool-versions Normal file
View File

@ -0,0 +1,2 @@
nodejs 14.19.3
python 3.11.1

6
.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,6 @@
{
"recommendations": [
"esbenp.prettier-vscode",
"svelte.svelte-vscode"
]
}

43
.vscode/settings.json vendored
View File

@ -1,19 +1,28 @@
{
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": true
},
"editor.defaultFormatter": "svelte.svelte-vscode",
"[json]": {
"editor.defaultFormatter": "vscode.json-language-features"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"debug.javascript.terminalOptions": {
"skipFiles": [
"${workspaceFolder}/packages/backend-core/node_modules/**",
"<node_internals>/**"
]
},
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": true
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"debug.javascript.terminalOptions": {
"skipFiles": [
"${workspaceFolder}/packages/backend-core/node_modules/**",
"<node_internals>/**"
]
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[dockercompose]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode"
}
}

10
artifacthub-repo.yml Normal file
View File

@ -0,0 +1,10 @@
# Artifact Hub repository metadata file
# This file is used to verify ownership of the budibase Helm chart repo
# so that we appear as a verified owner on artifacthub.io
repositoryID: a7536764-e72e-4961-87d8-efe7c8dedfa3
owners: # (optional, used to claim repository ownership)
- name: Martin
email: budimaster@budibase.com
- name: DevOps
email: devops@budibase.com

View File

@ -11,11 +11,13 @@ sources:
- https://github.com/Budibase/budibase
- https://budibase.com
type: application
version: 0.2.11
appVersion: 1.0.214
# populates on packaging
version: 0.0.0
# populates on packaging
appVersion: 0.0.0
dependencies:
- name: couchdb
version: 3.6.1
version: 3.3.4
repository: https://apache.github.io/couchdb-helm
condition: services.couchdb.enabled
- name: ingress-nginx

View File

@ -14,6 +14,9 @@ metadata:
alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS":443}]'
alb.ingress.kubernetes.io/certificate-arn: {{ .Values.ingress.certificateArn }}
{{- end }}
{{- if .Values.ingress.securityGroups }}
alb.ingress.kubernetes.io/security-groups: {{ .Values.ingress.securityGroups }}
{{- end }}
spec:
rules:
- http:

View File

@ -20,6 +20,9 @@ spec:
annotations:
kompose.cmd: kompose convert
kompose.version: 1.21.0 (992df58d8)
{{ if .Values.services.apps.annotations }}
{{- toYaml .Values.services.apps.annotations | indent 8 -}}
{{ end }}
creationTimestamp: null
labels:
io.kompose.service: app-service
@ -60,12 +63,12 @@ spec:
secretKeyRef:
name: {{ template "budibase.fullname" . }}
key: jwtSecret
- name: LOG_LEVEL
value: {{ .Values.services.apps.logLevel | default "info" | quote }}
{{ if .Values.services.objectStore.region }}
- name: AWS_REGION
value: {{ .Values.services.objectStore.region }}
{{ end }}
- name: MINIO_ENABLED
value: {{ .Values.services.objectStore.minio | quote }}
- name: MINIO_ACCESS_KEY
valueFrom:
secretKeyRef:
@ -76,10 +79,22 @@ spec:
secretKeyRef:
name: {{ template "budibase.fullname" . }}
key: objectStoreSecret
- name: CLOUDFRONT_CDN
value: {{ .Values.services.objectStore.cloudfront.cdn | quote }}
- name: CLOUDFRONT_PUBLIC_KEY_ID
value: {{ .Values.services.objectStore.cloudfront.publicKeyId | quote }}
- name: CLOUDFRONT_PRIVATE_KEY_64
value: {{ .Values.services.objectStore.cloudfront.privateKey64 | quote }}
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }}
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
- name: APPS_BUCKET_NAME
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: PORT
value: {{ .Values.services.apps.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
@ -124,6 +139,8 @@ spec:
value: {{ .Values.globals.automationMaxIterations | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
- name: ENCRYPTION_KEY
value: {{ .Values.globals.bbEncryptionKey | quote }}
{{ if .Values.globals.bbAdminUserEmail }}
- name: BB_ADMIN_USER_EMAIL
value: {{ .Values.globals.bbAdminUserEmail | quote }}
@ -152,6 +169,24 @@ spec:
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
{{ if .Values.globals.globalAgentHttpProxy }}
- name: GLOBAL_AGENT_HTTP_PROXY
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
{{ end }}
{{ if .Values.globals.globalAgentHttpsProxy }}
- name: GLOBAL_AGENT_HTTPS_PROXY
value: {{ .Values.globals.globalAgentHttpsProxy | quote }}
{{ end }}
{{ if .Values.globals.globalAgentNoProxy }}
- name: GLOBAL_AGENT_NO_PROXY
value: {{ .Values.globals.globalAgentNoProxy | quote }}
{{ end }}
- name: CDN_URL
value: {{ .Values.globals.cdnUrl }}
{{ if .Values.services.tlsRejectUnauthorized }}
- name: NODE_TLS_REJECT_UNAUTHORIZED
value: {{ .Values.services.tlsRejectUnauthorized }}
{{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }}
imagePullPolicy: Always

View File

@ -42,6 +42,7 @@ spec:
secretKeyRef:
name: {{ template "budibase.fullname" . }}
key: objectStoreSecret
image: minio/minio
imagePullPolicy: ""
livenessProbe:

View File

@ -20,16 +20,34 @@ spec:
annotations:
kompose.cmd: kompose convert
kompose.version: 1.21.0 (992df58d8)
{{ if .Values.services.proxy.annotations }}
{{- toYaml .Values.services.proxy.annotations | indent 8 -}}
{{ end }}
creationTimestamp: null
labels:
app.kubernetes.io/name: budibase-proxy
spec:
containers:
- image: budibase/proxy:{{ .Values.services.proxy.tag | default "k8s" }}
- image: budibase/proxy:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
name: proxy-service
ports:
- containerPort: {{ .Values.services.proxy.port }}
env:
- name: APPS_UPSTREAM_URL
value: {{ tpl .Values.services.proxy.upstreams.apps . | quote }}
- name: WORKER_UPSTREAM_URL
value: {{ tpl .Values.services.proxy.upstreams.worker . | quote }}
- name: MINIO_UPSTREAM_URL
value: {{ tpl .Values.services.proxy.upstreams.minio . | quote }}
- name: COUCHDB_UPSTREAM_URL
value: {{ .Values.services.couchdb.url | default (tpl .Values.services.proxy.upstreams.couchdb .) | quote }}
- name: RESOLVER
{{ if .Values.services.proxy.resolver }}
value: {{ .Values.services.proxy.resolver }}
{{ else }}
value: kube-dns.kube-system.svc.{{ .Values.services.dns }}
{{ end }}
{{ with .Values.services.proxy.resources }}
resources:
{{- toYaml . | nindent 10 }}

View File

@ -60,5 +60,6 @@ spec:
- name: redis-data
persistentVolumeClaim:
claimName: redis-data
status: {}
{{- end }}

View File

@ -21,6 +21,9 @@ spec:
annotations:
kompose.cmd: kompose convert
kompose.version: 1.21.0 (992df58d8)
{{ if .Values.services.worker.annotations }}
{{- toYaml .Values.services.worker.annotations | indent 8 -}}
{{ end }}
creationTimestamp: null
labels:
io.kompose.service: worker-service
@ -65,6 +68,8 @@ spec:
- name: AWS_REGION
value: {{ .Values.services.objectStore.region }}
{{ end }}
- name: MINIO_ENABLED
value: {{ .Values.services.objectStore.minio | quote }}
- name: MINIO_ACCESS_KEY
valueFrom:
secretKeyRef:
@ -77,8 +82,20 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: CLOUDFRONT_CDN
value: {{ .Values.services.objectStore.cloudfront.cdn | quote }}
- name: CLOUDFRONT_PUBLIC_KEY_ID
value: {{ .Values.services.objectStore.cloudfront.publicKeyId | quote }}
- name: CLOUDFRONT_PRIVATE_KEY_64
value: {{ .Values.services.objectStore.cloudfront.privateKey64 | quote }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }}
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
- name: APPS_BUCKET_NAME
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: PORT
value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY
@ -129,6 +146,8 @@ spec:
value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
- name: ENCRYPTION_KEY
value: {{ .Values.globals.bbEncryptionKey | quote }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
@ -141,6 +160,24 @@ spec:
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
{{ if .Values.globals.globalAgentHttpProxy }}
- name: GLOBAL_AGENT_HTTP_PROXY
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
{{ end }}
{{ if .Values.globals.globalAgentHttpsProxy }}
- name: GLOBAL_AGENT_HTTPS_PROXY
value: {{ .Values.globals.globalAgentHttpsProxy | quote }}
{{ end }}
{{ if .Values.globals.globalAgentNoProxy }}
- name: GLOBAL_AGENT_NO_PROXY
value: {{ .Values.globals.globalAgentNoProxy | quote }}
{{ end }}
- name: CDN_URL
value: {{ .Values.globals.cdnUrl }}
{{ if .Values.services.tlsRejectUnauthorized }}
- name: NODE_TLS_REJECT_UNAUTHORIZED
value: {{ .Values.services.tlsRejectUnauthorized }}
{{ end }}
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always

View File

@ -76,7 +76,7 @@ affinity: {}
globals:
appVersion: "latest"
budibaseEnv: PRODUCTION
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS"
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
@ -98,6 +98,7 @@ globals:
# if createSecrets is set to false, you can hard-code your secrets here
internalApiKey: ""
jwtSecret: ""
cdnUrl: ""
smtp:
enabled: false
@ -105,15 +106,28 @@ globals:
# elasticApmEnabled:
# elasticApmSecretToken:
# elasticApmServerUrl:
# globalAgentHttpProxy:
# globalAgentHttpsProxy:
# globalAgentNoProxy:
services:
budibaseVersion: latest
dns: cluster.local
# tlsRejectUnauthorized: 0
proxy:
port: 10000
replicaCount: 1
upstreams:
apps: 'http://app-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.apps.port }}'
worker: 'http://worker-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.worker.port }}'
minio: 'http://minio-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.objectStore.port }}'
couchdb: 'http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}'
resources: {}
# annotations:
# co.elastic.logs/module: nginx
# co.elastic.logs/fileset.stdout: access
# co.elastic.logs/fileset.stderr: error
apps:
port: 4002
@ -121,11 +135,20 @@ services:
logLevel: info
resources: {}
# nodeDebug: "" # set the value of NODE_DEBUG
# annotations:
# co.elastic.logs/multiline.type: pattern
# co.elastic.logs/multiline.pattern: '^[[:space:]]'
# co.elastic.logs/multiline.negate: false
# co.elastic.logs/multiline.match: after
worker:
port: 4003
replicaCount: 1
resources: {}
# annotations:
# co.elastic.logs/multiline.type: pattern
# co.elastic.logs/multiline.pattern: '^[[:space:]]'
# co.elastic.logs/multiline.negate: false
# co.elastic.logs/multiline.match: after
couchdb:
enabled: true
@ -156,6 +179,7 @@ services:
resources: {}
objectStore:
# Set to false if using another object store such as S3
minio: true
browser: true
port: 9000
@ -171,6 +195,13 @@ services:
## set, choosing the default provisioner.
storageClass: ""
resources: {}
cloudfront:
# Set the url of a distribution to enable cloudfront
cdn: ""
# ID of public key stored in cloudfront
publicKeyId: ""
# Base64 encoded private key for the above public key
privateKey64: ""
# Override values in couchDB subchart
couchdb:

View File

@ -9,7 +9,6 @@ From opening a bug report to creating a pull request: every contribution is appr
- [Glossary of Terms](#glossary-of-terms)
- [Contributing to Budibase](#contributing-to-budibase)
## Not Sure Where to Start?
Budibase is a low-code web application builder that creates svelte-based web applications.
@ -22,7 +21,7 @@ Budibase is a monorepo managed by [lerna](https://github.com/lerna/lerna). Lerna
- **packages/server** - The budibase server. This [Koa](https://koajs.com/) app is responsible for serving the JS for the builder and budibase apps, as well as providing the API for interaction with the database and file system.
- **packages/worker** - This [Koa](https://koajs.com/) app is responsible for providing global apis for managing your budibase installation. Authentication, Users, Email, Org and Auth configs are all provided by the worker.
- **packages/worker** - This [Koa](https://koajs.com/) app is responsible for providing global apis for managing your budibase installation. Authentication, Users, Email, Org and Auth configs are all provided by the worker.
## Contributor License Agreement (CLA)
@ -45,7 +44,7 @@ A client represents a single budibase customer. Each budibase client will have 1
### App
A client can have one or more budibase applications. Budibase applications would be things like "Developer Inventory Management" or "Goat Herder CRM". Think of a budibase application as a tree.
A client can have one or more budibase applications. Budibase applications would be things like "Developer Inventory Management" or "Goat Herder CRM". Think of a budibase application as a tree.
### Database
@ -73,28 +72,55 @@ A component is the basic frontend building block of a budibase app.
### Component Library
Component libraries are collections of components as well as the definition of their props contained in a file called `components.json`.
Component libraries are collections of components as well as the definition of their props contained in a file called `components.json`.
## Contributing to Budibase
* Please maintain the existing code style.
- Please maintain the existing code style.
* Please try to keep your commits small and focused.
- Please try to keep your commits small and focused.
* Please write tests.
- Please write tests.
* If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
- If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
* Once your work is completed, please raise a PR against the `develop` branch with some information about what has changed and why.
- Once your work is completed, please raise a PR against the `develop` branch with some information about what has changed and why.
### Getting Started For Contributors
#### 1. Prerequisites
NodeJS Version `14.x.x`
#### 1. Prerequisites
*yarn -* `npm install -g yarn`
- NodeJS version `14.x.x`
- Python version `3.x`
*jest* - `npm install -g jest`
### Using asdf (recommended)
Asdf is a package manager that allows managing multiple dependencies.
You can install them following any of the steps described below:
- Install using script (only for mac users):
`./scripts/install-contributor-dependencies.sh`
- Or, manually:
- Installation steps: https://asdf-vm.com/guide/getting-started.html
- asdf plugin add nodejs
- asdf plugin add python
- npm install -g yarn
### Using NVM and pyenv
- NVM:
- Install: https://github.com/nvm-sh/nvm#installing-and-updating
- Setup: `nvm use`
- Pyenv:
- Install: https://github.com/pyenv/pyenv#installation
- Setup: `pyenv install -v 3.7.2`
- _yarn -_ `npm install -g yarn`
#### 2. Clone this repository
@ -102,7 +128,7 @@ NodeJS Version `14.x.x`
then `cd ` into your local copy.
#### 3. Install and Build
#### 3. Install and Build
| **NOTE**: On Windows, all yarn commands must be executed on a bash shell (e.g. git bash)
@ -134,9 +160,9 @@ This will enable watch mode for both the builder app, server, client library and
#### 5. Debugging using VS Code
To debug the budibase server and worker a VS Code launch configuration has been provided.
To debug the budibase server and worker a VS Code launch configuration has been provided.
Visit the debug window and select `Budibase Server` or `Budibase Worker` to debug the respective component.
Visit the debug window and select `Budibase Server` or `Budibase Worker` to debug the respective component.
Alternatively to start both components simultaneously select `Start Budibase`.
In addition to the above, the remaining budibase components may be run in dev mode using: `yarn dev:noserver`.
@ -156,11 +182,11 @@ For the backend we run [Redis](https://redis.io/), [CouchDB](https://couchdb.apa
When you are running locally, budibase stores data on disk using docker volumes. The volumes and the types of data associated with each are:
- `redis_data`
- `redis_data`
- Sessions, email tokens
- `couchdb3_data`
- `couchdb3_data`
- Global and app databases
- `minio_data`
- `minio_data`
- App manifest, budibase client, static assets
### Development Modes
@ -172,34 +198,42 @@ A combination of environment variables controls the mode budibase runs in.
Yarn commands can be used to mimic the different modes as described in the sections below:
#### Self Hosted
The default mode. A single tenant installation with no usage restrictions.
The default mode. A single tenant installation with no usage restrictions.
To enable this mode, use:
```
yarn mode:self
```
#### Cloud
The cloud mode, with account portal turned off.
The cloud mode, with account portal turned off.
To enable this mode, use:
```
yarn mode:cloud
```
#### Cloud & Account
The cloud mode, with account portal turned on. This is a replica of the mode that runs at https://budibase.app
#### Cloud & Account
The cloud mode, with account portal turned on. This is a replica of the mode that runs at https://budibase.app
To enable this mode, use:
```
yarn mode:account
```
### CI
An overview of the CI pipelines can be found [here](../.github/workflows/README.md)
An overview of the CI pipelines can be found [here](../.github/workflows/README.md)
### Pro
@budibase/pro is the closed source package that supports licensed features in budibase. By default the package will be pulled from NPM and will not normally need to be touched in local development. If you require to update code inside the pro package it can be cloned to the same root level as budibase, e.g.
@budibase/pro is the closed source package that supports licensed features in budibase. By default the package will be pulled from NPM and will not normally need to be touched in local development. If you require to update code inside the pro package it can be cloned to the same root level as budibase, e.g.
```
.
@ -207,13 +241,14 @@ yarn mode:account
|_ budibase-pro
```
Note that only budibase maintainers will be able to access the pro repo.
Note that only budibase maintainers will be able to access the pro repo.
The `yarn bootstrap` command can be used to replace the NPM supplied dependency with the local source aware version. This is achieved using the `yarn link` command. To see specifically how dependencies are linked see [scripts/link-dependencies.sh](../scripts/link-dependencies.sh). The same link script is used to link dependencies to account-portal in local dev.
The `yarn bootstrap` command can be used to replace the NPM supplied dependency with the local source aware version. This is achieved using the `yarn link` command. To see specifically how dependencies are linked see [scripts/link-dependencies.sh](../scripts/link-dependencies.sh). The same link script is used to link dependencies to account-portal in local dev.
### Troubleshooting
Sometimes, things go wrong. This can be due to incompatible updates on the budibase platform. To clear down your development environment and start again follow **Step 6. Cleanup**, then proceed from **Step 3. Install and Build** in the setup guide above to create a fresh Budibase installation.
### Running tests
#### End-to-end Tests
@ -226,12 +261,11 @@ yarn test:e2e
Or if you are in the builder you can run `yarn cy:test`.
### Other Useful Information
* The contributors are listed in [AUTHORS.md](https://github.com/Budibase/budibase/blob/master/.github/AUTHORS.md) (add yourself).
- The contributors are listed in [AUTHORS.md](https://github.com/Budibase/budibase/blob/master/.github/AUTHORS.md) (add yourself).
* This project uses a modified version of the MPLv2 license, see [LICENSE](https://github.com/budibase/server/blob/master/LICENSE).
- This project uses a modified version of the MPLv2 license, see [LICENSE](https://github.com/budibase/server/blob/master/LICENSE).
* We use the [C4 (Collective Code Construction Contract)](https://rfc.zeromq.org/spec:42/C4/) process for contributions.
- We use the [C4 (Collective Code Construction Contract)](https://rfc.zeromq.org/spec:42/C4/) process for contributions.
Please read this if you are unfamiliar with it.

View File

@ -1,12 +1,15 @@
## Dev Environment on Debian 11
### Install Node
### Install NVM & Node 14
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
Budibase requires a recent version of node (14+):
Install NVM
```
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
apt -y install nodejs
node -v
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
```
Install Node 14
```
nvm install 14
```
### Install npm requirements
@ -31,7 +34,7 @@ This setup process was tested on Debian 11 (bullseye) with version numbers show
- Docker: 20.10.5
- Docker-Compose: 1.29.2
- Node: v16.15.1
- Node: v14.20.1
- Yarn: 1.22.19
- Lerna: 5.1.4

View File

@ -11,7 +11,7 @@ through brew.
### Install Node
Budibase requires a recent version of node (14+):
Budibase requires a recent version of node 14:
```
brew install node npm
node -v
@ -38,7 +38,7 @@ This setup process was tested on Mac OSX 12 (Monterey) with version numbers show
- Docker: 20.10.14
- Docker-Compose: 2.6.0
- Node: 18.3.0
- Node: 14.20.1
- Yarn: 1.22.19
- Lerna: 5.1.4
@ -59,4 +59,7 @@ The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin
| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
### Troubleshooting
If there are errors with the `yarn setup` command, you can try installing nvm and node 14. This is the same as the instructions for Debian 11.

81
docs/DEV-SETUP-WINDOWS.md Normal file
View File

@ -0,0 +1,81 @@
## Dev Environment on Windows 10/11 (WSL2)
### Install WSL with Ubuntu LTS
Enable WSL 2 on Windows 10/11 for docker support.
```
wsl --set-default-version 2
```
Install Ubuntu LTS.
```
wsl --install Ubuntu
```
Or follow the instruction here:
https://learn.microsoft.com/en-us/windows/wsl/install
### Install Docker in windows
Download the installer from docker and install it.
Check this url for more detailed instructions:
https://docs.docker.com/desktop/install/windows-install/
You should follow the next steps from within the Ubuntu terminal.
### Install NVM & Node 14
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
Install NVM
```
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
```
Install Node 14
```
nvm install 14
```
### Install npm requirements
```
npm install -g yarn jest lerna
```
### Clone the repo
```
git clone https://github.com/Budibase/budibase.git
```
### Check Versions
This setup process was tested on Windows 11 with version numbers show below. Your mileage may vary using anything else.
- Docker: 20.10.7
- Docker-Compose: 2.10.2
- Node: v14.20.1
- Yarn: 1.22.19
- Lerna: 5.5.4
### Build
```
cd budibase
yarn setup
```
The yarn setup command runs several build steps i.e.
```
node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
```
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin
### Working with the code
Here are the instructions to work on the application from within Visual Studio Code (in Windows) through the WSL. All the commands and files are within the Ubuntu system and it should run as if you were working on a Linux machine.
https://code.visualstudio.com/docs/remote/wsl
Note you will be able to run the application from within the WSL terminal and you will be able to access the application from the a browser in Windows.

File diff suppressed because it is too large Load Diff

View File

@ -6,7 +6,8 @@ services:
minio-service:
container_name: budi-minio-dev
restart: on-failure
image: minio/minio
# Last version that supports the "fs" backend
image: minio/minio:RELEASE.2022-10-24T18-35-07Z
volumes:
- minio_data:/data
ports:
@ -25,9 +26,9 @@ services:
proxy-service:
container_name: budi-nginx-dev
restart: on-failure
image: nginx:latest
image: budibase/proxy:latest
volumes:
- ./.generated-nginx.dev.conf:/etc/nginx/nginx.conf
- ./nginx.dev.conf:/etc/nginx/templates/nginx.conf.template
- ./proxy/error.html:/usr/share/nginx/html/error.html
ports:
- "${MAIN_PORT}:10000"
@ -36,28 +37,21 @@ services:
- couchdb-service
extra_hosts:
- "host.docker.internal:host-gateway"
environment:
- PROXY_ADDRESS=host.docker.internal
couchdb-service:
# platform: linux/amd64
container_name: budi-couchdb-dev
container_name: budi-couchdb3-dev
restart: on-failure
image: ibmcom/couchdb3
image: budibase/couchdb
environment:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER}
ports:
- "${COUCH_DB_PORT}:5984"
volumes:
- couchdb3_data:/opt/couchdb/data
couch-init:
container_name: budi-couchdb-init-dev
image: curlimages/curl
environment:
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
depends_on:
- couchdb-service
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
- couchdb_data:/data
redis-service:
container_name: budi-redis-dev
@ -70,9 +64,9 @@ services:
- redis_data:/data
volumes:
couchdb3_data:
couchdb_data:
driver: local
minio_data:
driver: local
redis_data:
driver: local
driver: local

View File

@ -82,6 +82,12 @@ services:
environment:
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
- PROXY_RATE_LIMIT_API_PER_SECOND=20
- APPS_UPSTREAM_URL=http://app-service:4002
- WORKER_UPSTREAM_URL=http://worker-service:4003
- MINIO_UPSTREAM_URL=http://minio-service:9000
- COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
- WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
- RESOLVER=127.0.0.11
depends_on:
- minio-service
- worker-service

215
hosting/nginx.dev.conf Normal file
View File

@ -0,0 +1,215 @@
user nginx;
error_log /var/log/nginx/error.log debug;
pid /var/run/nginx.pid;
worker_processes auto;
worker_rlimit_nofile 33282;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
proxy_set_header Host $host;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
}
upstream app-service {
server ${PROXY_ADDRESS}:4001;
keepalive 32;
}
upstream worker-service {
server ${PROXY_ADDRESS}:4002;
keepalive 32;
}
upstream builder {
server ${PROXY_ADDRESS}:3000;
keepalive 32;
}
server {
listen 10000 default_server;
server_name _;
client_max_body_size 1000m;
ignore_invalid_headers off;
proxy_buffering off;
error_page 502 503 504 /error.html;
location = /error.html {
root /usr/share/nginx/html;
internal;
}
location /db/ {
proxy_pass http://couchdb-service:5984;
rewrite ^/db/(.*)$ /$1 break;
}
location ~ ^/api/(system|admin|global)/ {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://worker-service;
}
location /api/backups/ {
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_pass http://app-service;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location /api/ {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://app-service;
}
location = / {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://app-service;
}
location /app_ {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://app-service;
}
location /app {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://app-service;
}
location /builder {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://builder;
rewrite ^/builder(.*)$ /builder/$1 break;
}
location /builder/ {
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_pass http://builder;
}
location /vite/ {
proxy_pass http://builder;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
rewrite ^/vite(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://app-service;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://minio-service:9000;
}
location /files/signed/ {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# IMPORTANT: Signed urls will inspect the host header of the request.
# Normally a signed url will need to be generated with a specified client host in mind.
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
proxy_set_header Host minio-service;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://minio-service:9000;
rewrite ^/files/signed/(.*)$ /$1 break;
}
client_header_timeout 60;
client_body_timeout 60;
keepalive_timeout 60;
gzip off;
gzip_comp_level 4;
}
}

View File

@ -1,117 +0,0 @@
user nginx;
error_log /var/log/nginx/error.log debug;
pid /var/run/nginx.pid;
worker_processes auto;
worker_rlimit_nofile 33282;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
proxy_set_header Host $host;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
}
server {
listen 10000 default_server;
server_name _;
client_max_body_size 1000m;
ignore_invalid_headers off;
proxy_buffering off;
error_page 502 503 504 /error.html;
location = /error.html {
root /usr/share/nginx/html;
internal;
}
location /db/ {
proxy_pass http://couchdb-service:5984;
rewrite ^/db/(.*)$ /$1 break;
}
location ~ ^/api/(system|admin|global)/ {
proxy_pass http://{{ address }}:4002;
}
location /api/ {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_pass http://{{ address }}:4001;
}
location = / {
proxy_pass http://{{ address }}:4001;
}
location /app_ {
proxy_pass http://{{ address }}:4001;
}
location /app {
proxy_pass http://{{ address }}:4001;
}
location /builder {
proxy_pass http://{{ address }}:3000;
rewrite ^/builder(.*)$ /builder/$1 break;
}
location /builder/ {
proxy_pass http://{{ address }}:3000;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
location /vite/ {
proxy_pass http://{{ address }}:3000;
rewrite ^/vite(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://{{ address }}:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://minio-service:9000;
}
client_header_timeout 60;
client_body_timeout 60;
keepalive_timeout 60;
gzip off;
gzip_comp_level 4;
}
}

View File

@ -0,0 +1,24 @@
#!/bin/sh
# vim:sw=4:ts=4:et
set -e
ME=$(basename $0)
NGINX_CONF_FILE="/etc/nginx/nginx.conf"
DEFAULT_CONF_FILE="/etc/nginx/conf.d/default.conf"
# check if we have ipv6 available
if [ ! -f "/proc/net/if_inet6" ]; then
# ipv6 not available so delete lines from nginx conf
if [ -f "$NGINX_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $NGINX_CONF_FILE
fi
if [ -f "$DEFAULT_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $DEFAULT_CONF_FILE
fi
echo "$ME: info: ipv6 not available so delete lines from nginx conf"
else
echo "$ME: info: ipv6 is available so no need to delete lines from nginx conf"
fi
exit 0

View File

@ -0,0 +1,24 @@
#!/bin/sh
# vim:sw=4:ts=4:et
set -e
ME=$(basename $0)
NGINX_CONF_FILE="/etc/nginx/nginx.conf"
DEFAULT_CONF_FILE="/etc/nginx/conf.d/default.conf"
# check if we have ipv6 available
if [ ! -f "/proc/net/if_inet6" ]; then
# ipv6 not available so delete lines from nginx conf
if [ -f "$NGINX_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $NGINX_CONF_FILE
fi
if [ -f "$DEFAULT_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $DEFAULT_CONF_FILE
fi
echo "$ME: info: ipv6 not available so delete lines from nginx conf"
else
echo "$ME: info: ipv6 is available so no need to delete lines from nginx conf"
fi
exit 0

View File

@ -4,11 +4,23 @@ FROM nginx:latest
# use the default nginx behaviour for *.template files which are processed with envsubst
# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
COPY nginx.prod.conf /etc/nginx/templates/nginx.conf.template
# IPv6 removal needs to happen after envsubst
RUN rm -rf /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
COPY 80-listen-on-ipv6-by-default.sh /docker-entrypoint.d/80-listen-on-ipv6-by-default.sh
RUN chmod +x /docker-entrypoint.d/80-listen-on-ipv6-by-default.sh
# Error handling
COPY error.html /usr/share/nginx/html/error.html
# Default environment
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20
# Use docker-compose values as defaults for backwards compatibility
ENV APPS_UPSTREAM_URL=http://app-service:4002
ENV WORKER_UPSTREAM_URL=http://worker-service:4003
ENV MINIO_UPSTREAM_URL=http://minio-service:9000
ENV COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
ENV WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
ENV RESOLVER=127.0.0.11

View File

@ -23,7 +23,7 @@ http {
tcp_nodelay on;
server_tokens off;
types_hash_max_size 2048;
resolver {{ resolver }} valid=10s ipv6=off;
resolver ${RESOLVER} valid=10s ipv6=off;
# buffering
client_header_buffer_size 1k;
@ -51,11 +51,11 @@ http {
proxy_buffering off;
set $csp_default "default-src 'self'";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
set $csp_object "object-src 'none'";
set $csp_base_uri "base-uri 'self'";
set $csp_connect "connect-src 'self' https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com";
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
set $csp_frame "frame-src 'self' https:";
set $csp_img "img-src http: https: data: blob:";
@ -76,46 +76,65 @@ http {
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
# upstreams
set $apps {{ apps }};
set $worker {{ worker }};
set $minio {{ minio }};
set $couchdb {{ couchdb }};
{{#if watchtower}}
set $watchtower {{ watchtower }};
{{/if}}
set $apps ${APPS_UPSTREAM_URL};
set $worker ${WORKER_UPSTREAM_URL};
set $minio ${MINIO_UPSTREAM_URL};
set $couchdb ${COUCHDB_UPSTREAM_URL};
set $watchtower ${WATCHTOWER_UPSTREAM_URL};
location /app {
proxy_pass http://$apps:4002;
proxy_pass $apps;
}
location = / {
proxy_pass http://$apps:4002;
proxy_pass $apps;
}
{{#if watchtower}}
location = /v1/update {
proxy_pass http://$watchtower:8080;
proxy_pass $watchtower;
}
{{/if}}
location ~ ^/(builder|app_) {
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://$apps:4002;
proxy_set_header Host $host;
proxy_pass $apps;
}
location ~ ^/api/(system|admin|global)/ {
proxy_pass http://$worker:4003;
proxy_set_header Host $host;
proxy_pass $worker;
}
location /worker/ {
proxy_pass http://$worker:4003;
proxy_set_header Host $host;
proxy_pass $worker;
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/backups/ {
# calls to export apps are limited
limit_req zone=ratelimit burst=20 nodelay;
# 1800s timeout for app export requests
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass $apps;
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
@ -130,8 +149,9 @@ http {
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_pass http://$apps:4002;
proxy_pass $apps;
}
location /api/webhooks/ {
@ -149,12 +169,13 @@ http {
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_pass http://$apps:4002;
proxy_pass $apps;
}
location /db/ {
proxy_pass http://$couchdb:5984;
proxy_pass $couchdb;
rewrite ^/db/(.*)$ /$1 break;
}
@ -164,19 +185,41 @@ http {
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://$apps:4002;
proxy_pass $apps;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://$minio:9000;
proxy_pass $minio;
}
location /files/signed/ {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# IMPORTANT: Signed urls will inspect the host header of the request.
# Normally a signed url will need to be generated with a specified client host in mind.
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
proxy_set_header Host minio-service;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass $minio;
rewrite ^/files/signed/(.*)$ /$1 break;
}
client_header_timeout 60;

View File

@ -4,6 +4,7 @@ echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/

View File

@ -19,8 +19,8 @@ ADD packages/worker .
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
ARG TARGETARCH=amd64
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
@ -61,7 +61,8 @@ ADD hosting/single/nginx/nginx.conf /etc/nginx
ADD hosting/single/nginx/nginx-default-site.conf /etc/nginx/sites-enabled/default
RUN mkdir -p /var/log/nginx && \
touch /var/log/nginx/error.log && \
touch /var/run/nginx.pid
touch /var/run/nginx.pid && \
usermod -a -G tty www-data
WORKDIR /
RUN mkdir -p scripts/integrations/oracle

View File

@ -2,7 +2,8 @@ server {
listen 80 default_server;
listen [::]:80 default_server;
server_name _;
error_log /dev/stderr warn;
access_log /dev/stdout main;
client_max_body_size 1000m;
ignore_invalid_headers off;
proxy_buffering off;
@ -43,6 +44,24 @@ server {
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/backups/ {
# calls to export apps are limited
limit_req zone=ratelimit burst=20 nodelay;
# 1800s timeout for app export requests
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://127.0.0.1:4001;
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
@ -76,15 +95,37 @@ server {
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
}
location /files/signed/ {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# IMPORTANT: Signed urls will inspect the host header of the request.
# Normally a signed url will need to be generated with a specified client host in mind.
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
proxy_set_header Host minio-service;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
rewrite ^/files/signed/(.*)$ /$1 break;
}
client_header_timeout 60;

View File

@ -1,5 +1,5 @@
user www-data www-data;
error_log /var/log/nginx/error.log;
error_log /dev/stderr warn;
pid /var/run/nginx.pid;
worker_processes auto;
worker_rlimit_nofile 8192;

View File

@ -10,7 +10,7 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS"
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
@ -18,13 +18,24 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
/etc/init.d/ssh start
else
DATA_DIR=${DATA_DIR:-/data}
fi
mkdir -p ${DATA_DIR}
# Mount NFS or GCP Filestore if env vars exist for it
if [[ ! -z ${FILESHARE_IP} && ! -z ${FILESHARE_NAME} ]]; then
echo "Mounting NFS share"
apt update && apt install -y nfs-common nfs-kernel-server
echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}"
mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR}
echo "Mounting result: $?"
fi
if [ -f "${DATA_DIR}/.env" ]; then
# Read in the .env file and export the variables
@ -65,9 +76,9 @@ mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server ${DATA_DIR}/minio &
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
@ -76,16 +87,18 @@ if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
chmod +x /etc/cron.d/certificate-renew
# Request the certbot certificate
/app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN}
/etc/init.d/nginx restart
fi
/etc/init.d/nginx restart
pushd app
pm2 start --name app "yarn run:docker"
pm2 start -l /dev/stdout --name app "yarn run:docker"
popd
pushd worker
pm2 start --name worker "yarn run:docker"
pm2 start -l /dev/stdout --name worker "yarn run:docker"
popd
sleep 10
echo "curl to couchdb endpoints"
curl -X PUT ${COUCH_DB_URL}/_users
curl -X PUT ${COUCH_DB_URL}/_replicator
echo "end of runner.sh, sleeping ..."
sleep infinity

View File

@ -1,5 +1,5 @@
{
"version": "2.0.22",
"version": "2.3.16",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -3,8 +3,7 @@
"private": true,
"devDependencies": {
"@rollup/plugin-json": "^4.0.2",
"@types/mongodb": "3.6.3",
"@typescript-eslint/parser": "4.28.0",
"@typescript-eslint/parser": "5.45.0",
"babel-eslint": "^10.0.3",
"eslint": "^7.28.0",
"eslint-plugin-cypress": "^2.11.3",
@ -19,13 +18,14 @@
"rimraf": "^3.0.2",
"rollup-plugin-replace": "^2.2.0",
"svelte": "^3.38.2",
"typescript": "4.5.5"
"typescript": "4.7.3"
},
"scripts": {
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
"build:backend": "lerna run build --ignore @budibase/client --ignore @budibase/bbui --ignore @budibase/builder --ignore @budibase/cli",
"build:sdk": "lerna run build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
@ -45,26 +45,18 @@
"dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server",
"test": "lerna run test && yarn test:pro",
"test:pro": "bash scripts/pro/test.sh",
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint:eslint": "eslint packages && eslint qa-core",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
"lint:fix:eslint": "eslint --fix packages qa-core",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test --stream",
"test:e2e:ci": "lerna run cy:ci --stream",
"test:e2e:ci:record": "lerna run cy:ci:record --stream",
"test:e2e:ci:notify": "lerna run cy:ci:notify",
"build:specs": "lerna run specs",
"build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker": "lerna run build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:pre": "lerna run build && lerna run predocker",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:proxy:compose": "node scripts/proxy/generateProxyConfig compose && npm run build:docker:proxy",
"build:docker:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod && npm run build:docker:proxy",
"build:docker:proxy:release": "node scripts/proxy/generateProxyConfig release && npm run build:docker:proxy",
"build:docker:proxy:prod": "node scripts/proxy/generateProxyConfig prod && npm run build:docker:proxy",
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
@ -76,8 +68,8 @@
"env:multi:disable": "lerna run env:multi:disable",
"env:selfhost:enable": "lerna run env:selfhost:enable",
"env:selfhost:disable": "lerna run env:selfhost:disable",
"env:localdomain:enable": "lerna run env:localdomain:enable",
"env:localdomain:disable": "lerna run env:localdomain:disable",
"env:localdomain:enable": "./scripts/localdomain.sh enable",
"env:localdomain:disable": "./scripts/localdomain.sh disable",
"env:account:enable": "lerna run env:account:enable",
"env:account:disable": "lerna run env:account:disable",
"mode:self": "yarn env:selfhost:enable && yarn env:multi:disable && yarn env:account:disable",

View File

@ -0,0 +1,18 @@
const mockS3 = {
headBucket: jest.fn().mockReturnThis(),
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`
}),
promise: jest.fn().mockReturnThis(),
catch: jest.fn(),
}
const AWS = {
S3: jest.fn(() => mockS3),
}
export default AWS

View File

@ -1 +0,0 @@
module.exports = require("./src/cloud/accounts")

View File

@ -1 +0,0 @@
module.exports = require("./src/auth")

View File

@ -1,9 +0,0 @@
const generic = require("./src/cache/generic")
module.exports = {
user: require("./src/cache/user"),
app: require("./src/cache/appMetadata"),
writethrough: require("./src/cache/writethrough"),
...generic,
cache: generic,
}

View File

@ -1 +0,0 @@
module.exports = require("./src/constants")

View File

@ -1,22 +0,0 @@
const {
getAppDB,
getDevAppDB,
getProdAppDB,
getAppId,
updateAppId,
doInAppContext,
doInTenant,
} = require("./src/context")
const identity = require("./src/context/identity")
module.exports = {
getAppDB,
getDevAppDB,
getProdAppDB,
getAppId,
updateAppId,
doInAppContext,
doInTenant,
identity,
}

View File

@ -1,7 +0,0 @@
module.exports = {
...require("./src/db/utils"),
...require("./src/db/constants"),
...require("./src/db"),
...require("./src/db/views"),
...require("./src/db/pouch"),
}

View File

@ -1 +0,0 @@
module.exports = require("./src/context/deprovision")

View File

@ -1 +0,0 @@
module.exports = require("./src/security/encryption")

View File

@ -0,0 +1,24 @@
import { Config } from "@jest/types"
const config: Config.InitialOptions = {
preset: "ts-jest",
testEnvironment: "node",
setupFiles: ["./tests/jestSetup.ts"],
collectCoverageFrom: ["src/**/*.{js,ts}"],
coverageReporters: ["lcov", "json", "clover"],
transform: {
"^.+\\.ts?$": "@swc/jest",
},
}
if (!process.env.CI) {
// use sources when not in CI
config.moduleNameMapper = {
"@budibase/types": "<rootDir>/../types/src",
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
}
} else {
console.log("Running tests with compiled dependency sources")
}
export default config

View File

@ -1 +0,0 @@
module.exports = require("./src/logging")

View File

@ -1 +0,0 @@
module.exports = require("./src/middleware")

View File

@ -1 +0,0 @@
module.exports = require("./src/migrations")

View File

@ -1,4 +0,0 @@
module.exports = {
...require("./src/objectStore"),
...require("./src/objectStore/utils"),
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "2.0.22",
"version": "2.3.16",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -15,27 +15,32 @@
"prebuild": "rimraf dist/",
"prepack": "cp package.json dist",
"build": "tsc -p tsconfig.build.json",
"build:pro": "../../scripts/pro/build.sh",
"postbuild": "yarn run build:pro",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"test": "jest",
"test": "jest --coverage --maxWorkers=2",
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "^2.0.22",
"@budibase/nano": "10.1.1",
"@budibase/types": "^2.3.16",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcryptjs": "2.4.3",
"bull": "4.10.1",
"correlation-id": "4.0.0",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",
"joi": "17.6.0",
"jsonwebtoken": "8.5.1",
"jsonwebtoken": "9.0.0",
"koa-passport": "4.1.4",
"lodash": "4.17.21",
"lodash.isarguments": "3.1.0",
"node-fetch": "2.6.7",
"passport-google-auth": "1.0.2",
"passport-google-oauth": "2.0.0",
"passport-jwt": "4.0.0",
"passport-local": "1.0.0",
@ -51,34 +56,33 @@
"uuid": "8.3.2",
"zlib": "1.0.5"
},
"jest": {
"preset": "ts-jest",
"testEnvironment": "node",
"moduleNameMapper": {
"@budibase/types": "<rootDir>/../types/src"
},
"setupFiles": [
"./scripts/jestSetup.ts"
]
},
"devDependencies": {
"@swc/core": "^1.3.25",
"@swc/jest": "^0.2.24",
"@types/chance": "1.1.3",
"@types/ioredis": "4.28.0",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/koa": "2.13.4",
"@types/koa-pino-logger": "3.0.0",
"@types/lodash": "4.14.180",
"@types/node": "14.18.20",
"@types/node-fetch": "2.6.1",
"@types/pino-http": "5.8.1",
"@types/pouchdb": "6.4.0",
"@types/redlock": "4.0.3",
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
"chance": "1.1.8",
"ioredis-mock": "5.8.0",
"jest": "27.5.1",
"koa": "2.7.0",
"jest": "28.1.1",
"koa": "2.13.4",
"nodemon": "2.0.16",
"pouchdb-adapter-memory": "7.2.2",
"timekeeper": "2.2.0",
"ts-jest": "27.1.5",
"ts-jest": "28.0.4",
"ts-node": "10.8.1",
"tsconfig-paths": "4.0.0",
"typescript": "4.7.3"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"

View File

@ -1 +0,0 @@
module.exports = require("./src/security/permissions")

View File

@ -1,3 +0,0 @@
module.exports = {
...require("./src/plugin"),
}

View File

@ -0,0 +1 @@
export * from "./src/plugin"

View File

@ -1,5 +0,0 @@
module.exports = {
Client: require("./src/redis"),
utils: require("./src/redis/utils"),
clients: require("./src/redis/init"),
}

View File

@ -1 +0,0 @@
module.exports = require("./src/security/roles")

View File

@ -1,12 +0,0 @@
import env from "../src/environment"
import { mocks } from "../tests/utilities"
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
env._set("JWT_SECRET", "test-jwtsecret")
env._set("LOG_LEVEL", "silent")

View File

@ -1 +0,0 @@
module.exports = require("./src/security/sessions")

View File

@ -1,16 +1,14 @@
const passport = require("koa-passport")
const _passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
import { getGlobalDB } from "./tenancy"
import { getGlobalDB } from "../tenancy"
const refresh = require("passport-oauth2-refresh")
import { Configs } from "./constants"
import { getScopedConfig } from "./db/utils"
import { Config } from "../constants"
import { getScopedConfig } from "../db"
import {
jwt,
jwt as jwtPassport,
local,
authenticated,
google,
oidc,
auditLog,
tenancy,
authError,
@ -21,17 +19,41 @@ import {
builderOnly,
builderOrAdmin,
joiValidator,
} from "./middleware"
import { invalidateUser } from "./cache/user"
oidc,
google,
} from "../middleware"
import { invalidateUser } from "../cache/user"
import { User } from "@budibase/types"
import { logAlert } from "../logging"
export {
auditLog,
authError,
internalApi,
ssoCallbackUrl,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
google,
oidc,
} from "../middleware"
export const buildAuthMiddleware = authenticated
export const buildTenancyMiddleware = tenancy
export const buildCsrfMiddleware = csrf
export const passport = _passport
export const jwt = require("jsonwebtoken")
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
_passport.use(new LocalStrategy(local.options, local.authenticate))
if (jwtPassport.options.secretOrKey) {
_passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate))
} else {
logAlert("No JWT Secret supplied, cannot configure JWT strategy")
}
passport.serializeUser((user: User, done: any) => done(null, user))
_passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user: User, done: any) => {
_passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
@ -71,7 +93,7 @@ async function refreshOIDCAccessToken(
return new Promise(resolve => {
refresh.requestNewAccessToken(
Configs.OIDC,
Config.OIDC,
refreshToken,
(err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params })
@ -101,7 +123,7 @@ async function refreshGoogleAccessToken(
return new Promise(resolve => {
refresh.requestNewAccessToken(
Configs.GOOGLE,
Config.GOOGLE,
refreshToken,
(err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params })
@ -110,7 +132,7 @@ async function refreshGoogleAccessToken(
})
}
async function refreshOAuthToken(
export async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
@ -124,7 +146,7 @@ async function refreshOAuthToken(
let chosenConfig = {}
let refreshResponse
if (configType === Configs.OIDC) {
if (configType === Config.OIDC) {
// configId - retrieved from cookie.
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) {
@ -147,7 +169,7 @@ async function refreshOAuthToken(
return refreshResponse
}
async function updateUserOAuth(userId: string, oAuthConfig: any) {
export async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@ -174,23 +196,3 @@ async function updateUserOAuth(userId: string, oAuthConfig: any) {
console.error("Could not update OAuth details for current user", e)
}
}
export = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,
internalApi,
refreshOAuthToken,
updateUserOAuth,
ssoCallbackUrl,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
}

View File

@ -0,0 +1 @@
export * from "./auth"

View File

@ -1,6 +1,6 @@
const redis = require("../redis/init")
const { doWithDB } = require("../db")
const { DocumentType } = require("../db/constants")
import { getAppClient } from "../redis/init"
import { doWithDB, DocumentType } from "../db"
import { Database } from "@budibase/types"
const AppState = {
INVALID: "invalid",
@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600
/**
* The default populate app metadata function
*/
const populateFromDB = async appId => {
async function populateFromDB(appId: string) {
return doWithDB(
appId,
db => {
(db: Database) => {
return db.get(DocumentType.APP_METADATA)
},
{ skip_setup: true }
)
}
const isInvalid = metadata => {
function isInvalid(metadata?: { state: string }) {
return !metadata || metadata.state === AppState.INVALID
}
@ -31,15 +31,15 @@ const isInvalid = metadata => {
* @param {string} appId the id of the app to get metadata from.
* @returns {object} the app metadata.
*/
exports.getAppMetadata = async appId => {
const client = await redis.getAppClient()
export async function getAppMetadata(appId: string) {
const client = await getAppClient()
// try cache
let metadata = await client.get(appId)
if (!metadata) {
let expiry = EXPIRY_SECONDS
let expiry: number | undefined = EXPIRY_SECONDS
try {
metadata = await populateFromDB(appId)
} catch (err) {
} catch (err: any) {
// app DB left around, but no metadata, it is invalid
if (err && err.status === 404) {
metadata = { state: AppState.INVALID }
@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => {
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
* @return {Promise<void>} will respond with success when cache is updated.
*/
exports.invalidateAppMetadata = async (appId, newMetadata = null) => {
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
if (!appId) {
throw "Cannot invalidate if no app ID provided."
}
const client = await redis.getAppClient()
const client = await getAppClient()
await client.delete(appId)
if (newMetadata) {
await client.store(appId, newMetadata, EXPIRY_SECONDS)

View File

@ -1,16 +1,16 @@
import { getTenantId } from "../../context"
import redis from "../../redis/init"
import RedisWrapper from "../../redis"
import * as redis from "../../redis/init"
import { Client } from "../../redis"
function generateTenantKey(key: string) {
const tenantId = getTenantId()
return `${key}:${tenantId}`
}
export = class BaseCache {
client: RedisWrapper | undefined
export default class BaseCache {
client: Client | undefined
constructor(client: RedisWrapper | undefined = undefined) {
constructor(client: Client | undefined = undefined) {
this.client = client
}

View File

@ -1,30 +0,0 @@
const BaseCache = require("./base")
const GENERIC = new BaseCache()
exports.CacheKeys = {
CHECKLIST: "checklist",
INSTALLATION: "installation",
ANALYTICS_ENABLED: "analyticsEnabled",
UNIQUE_TENANT_ID: "uniqueTenantId",
EVENTS: "events",
BACKFILL_METADATA: "backfillMetadata",
EVENTS_RATE_LIMIT: "eventsRateLimit",
}
exports.TTL = {
ONE_MINUTE: 600,
ONE_HOUR: 3600,
ONE_DAY: 86400,
}
function performExport(funcName) {
return (...args) => GENERIC[funcName](...args)
}
exports.keys = performExport("keys")
exports.get = performExport("get")
exports.store = performExport("store")
exports.delete = performExport("delete")
exports.withCache = performExport("withCache")
exports.bustCache = performExport("bustCache")

View File

@ -0,0 +1,30 @@
const BaseCache = require("./base")
const GENERIC = new BaseCache.default()
export enum CacheKey {
CHECKLIST = "checklist",
INSTALLATION = "installation",
ANALYTICS_ENABLED = "analyticsEnabled",
UNIQUE_TENANT_ID = "uniqueTenantId",
EVENTS = "events",
BACKFILL_METADATA = "backfillMetadata",
EVENTS_RATE_LIMIT = "eventsRateLimit",
}
export enum TTL {
ONE_MINUTE = 600,
ONE_HOUR = 3600,
ONE_DAY = 86400,
}
function performExport(funcName: string) {
return (...args: any) => GENERIC[funcName](...args)
}
export const keys = performExport("keys")
export const get = performExport("get")
export const store = performExport("store")
export const destroy = performExport("delete")
export const withCache = performExport("withCache")
export const bustCache = performExport("bustCache")

View File

@ -0,0 +1,5 @@
export * as generic from "./generic"
export * as user from "./user"
export * as app from "./appMetadata"
export * as writethrough from "./writethrough"
export * from "./generic"

View File

@ -1,6 +1,6 @@
require("../../../tests/utilities/TestConfiguration")
require("../../../tests")
const { Writethrough } = require("../writethrough")
const { dangerousGetDB } = require("../../db")
const { getDB } = require("../../db")
const tk = require("timekeeper")
const START_DATE = Date.now()
@ -8,8 +8,8 @@ tk.freeze(START_DATE)
const DELAY = 5000
const db = dangerousGetDB("test")
const db2 = dangerousGetDB("test2")
const db = getDB("test")
const db2 = getDB("test2")
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
describe("writethrough", () => {

View File

@ -1,15 +1,16 @@
const redis = require("../redis/init")
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
const env = require("../environment")
const accounts = require("../cloud/accounts")
import * as redis from "../redis/init"
import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy"
import env from "../environment"
import * as accounts from "../cloud/accounts"
import { Database } from "@budibase/types"
const EXPIRY_SECONDS = 3600
/**
* The default populate user function
*/
const populateFromDB = async (userId, tenantId) => {
const user = await doWithGlobalDB(tenantId, db => db.get(userId))
async function populateFromDB(userId: string, tenantId: string) {
const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId))
user.budibaseAccess = true
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const account = await accounts.getAccount(user.email)
@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => {
* @param {*} populateUser function to provide the user for re-caching. default to couch db
* @returns
*/
exports.getUser = async (userId, tenantId = null, populateUser = null) => {
export async function getUser(
userId: string,
tenantId?: string,
populateUser?: any
) {
if (!populateUser) {
populateUser = populateFromDB
}
@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
let user = await client.get(userId)
if (!user) {
user = await populateUser(userId, tenantId)
client.store(userId, user, EXPIRY_SECONDS)
await client.store(userId, user, EXPIRY_SECONDS)
}
if (user && !user.tenantId && tenantId) {
// make sure the tenant ID is always correct/set
@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
return user
}
exports.invalidateUser = async userId => {
export async function invalidateUser(userId: string) {
const client = await redis.getUserClient()
await client.delete(userId)
}

View File

@ -1,6 +1,7 @@
import BaseCache from "./base"
import { getWritethroughClient } from "../redis/init"
import { logWarn } from "../logging"
import { Database } from "@budibase/types"
const DEFAULT_WRITE_RATE_MS = 10000
let CACHE: BaseCache | null = null
@ -18,7 +19,7 @@ async function getCache() {
return CACHE
}
function makeCacheKey(db: PouchDB.Database, key: string) {
function makeCacheKey(db: Database, key: string) {
return db.name + key
}
@ -27,7 +28,7 @@ function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem {
}
export async function put(
db: PouchDB.Database,
db: Database,
doc: any,
writeRateMs: number = DEFAULT_WRITE_RATE_MS
) {
@ -63,7 +64,7 @@ export async function put(
return { ok: true, id: output._id, rev: output._rev }
}
export async function get(db: PouchDB.Database, id: string): Promise<any> {
export async function get(db: Database, id: string): Promise<any> {
const cache = await getCache()
const cacheKey = makeCacheKey(db, id)
let cacheItem: CacheItem = await cache.get(cacheKey)
@ -76,7 +77,7 @@ export async function get(db: PouchDB.Database, id: string): Promise<any> {
}
export async function remove(
db: PouchDB.Database,
db: Database,
docOrId: any,
rev?: any
): Promise<void> {
@ -94,13 +95,10 @@ export async function remove(
}
export class Writethrough {
db: PouchDB.Database
db: Database
writeRateMs: number
constructor(
db: PouchDB.Database,
writeRateMs: number = DEFAULT_WRITE_RATE_MS
) {
constructor(db: Database, writeRateMs: number = DEFAULT_WRITE_RATE_MS) {
this.db = db
this.writeRateMs = writeRateMs
}

View File

@ -1,6 +1,6 @@
import API from "./api"
import env from "../environment"
import { Headers } from "../constants"
import { Header } from "../constants"
import { CloudAccount } from "@budibase/types"
const api = new API(env.ACCOUNT_PORTAL_URL)
@ -14,7 +14,7 @@ export const getAccount = async (
const response = await api.post(`/api/accounts/search`, {
body: payload,
headers: {
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
},
})
@ -35,7 +35,7 @@ export const getAccountByTenantId = async (
const response = await api.post(`/api/accounts/search`, {
body: payload,
headers: {
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
},
})
@ -50,7 +50,7 @@ export const getAccountByTenantId = async (
export const getStatus = async () => {
const response = await api.get(`/api/status`, {
headers: {
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
},
})
const json = await response.json()

View File

@ -1,42 +0,0 @@
const fetch = require("node-fetch")
class API {
constructor(host) {
this.host = host
}
apiCall =
method =>
async (url = "", options = {}) => {
if (!options.headers) {
options.headers = {}
}
if (!options.headers["Content-Type"]) {
options.headers = {
"Content-Type": "application/json",
Accept: "application/json",
...options.headers,
}
}
let json = options.headers["Content-Type"] === "application/json"
const requestOptions = {
method: method,
body: json ? JSON.stringify(options.body) : options.body,
headers: options.headers,
// TODO: See if this is necessary
credentials: "include",
}
return await fetch(`${this.host}${url}`, requestOptions)
}
post = this.apiCall("POST")
get = this.apiCall("GET")
patch = this.apiCall("PATCH")
del = this.apiCall("DELETE")
put = this.apiCall("PUT")
}
module.exports = API

View File

@ -0,0 +1,59 @@
import fetch from "node-fetch"
import * as logging from "../logging"
export default class API {
host: string
constructor(host: string) {
this.host = host
}
async apiCall(method: string, url: string, options?: any) {
if (!options.headers) {
options.headers = {}
}
if (!options.headers["Content-Type"]) {
options.headers = {
"Content-Type": "application/json",
Accept: "application/json",
...options.headers,
}
}
let json = options.headers["Content-Type"] === "application/json"
// add x-budibase-correlation-id header
logging.correlation.setHeader(options.headers)
const requestOptions = {
method: method,
body: json ? JSON.stringify(options.body) : options.body,
headers: options.headers,
// TODO: See if this is necessary
credentials: "include",
}
return await fetch(`${this.host}${url}`, requestOptions)
}
async post(url: string, options?: any) {
return this.apiCall("POST", url, options)
}
async get(url: string, options?: any) {
return this.apiCall("GET", url, options)
}
async patch(url: string, options?: any) {
return this.apiCall("PATCH", url, options)
}
async del(url: string, options?: any) {
return this.apiCall("DELETE", url, options)
}
async put(url: string, options?: any) {
return this.apiCall("PUT", url, options)
}
}

View File

@ -1,650 +0,0 @@
const util = require("util")
const assert = require("assert")
const wrapEmitter = require("emitter-listener")
const async_hooks = require("async_hooks")
const CONTEXTS_SYMBOL = "cls@contexts"
const ERROR_SYMBOL = "error@context"
const DEBUG_CLS_HOOKED = process.env.DEBUG_CLS_HOOKED
let currentUid = -1
module.exports = {
getNamespace: getNamespace,
createNamespace: createNamespace,
destroyNamespace: destroyNamespace,
reset: reset,
ERROR_SYMBOL: ERROR_SYMBOL,
}
function Namespace(name) {
this.name = name
// changed in 2.7: no default context
this.active = null
this._set = []
this.id = null
this._contexts = new Map()
this._indent = 0
this._hook = null
}
Namespace.prototype.set = function set(key, value) {
if (!this.active) {
throw new Error(
"No context available. ns.run() or ns.bind() must be called first."
)
}
this.active[key] = value
if (DEBUG_CLS_HOOKED) {
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
indentStr +
"CONTEXT-SET KEY:" +
key +
"=" +
value +
" in ns:" +
this.name +
" currentUid:" +
currentUid +
" active:" +
util.inspect(this.active, { showHidden: true, depth: 2, colors: true })
)
}
return value
}
Namespace.prototype.get = function get(key) {
if (!this.active) {
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.currentId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-GETTING KEY NO ACTIVE NS: (${this.name}) ${key}=undefined currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${this._set.length}`
)
}
return undefined
}
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
indentStr +
"CONTEXT-GETTING KEY:" +
key +
"=" +
this.active[key] +
" (" +
this.name +
") currentUid:" +
currentUid +
" active:" +
util.inspect(this.active, { showHidden: true, depth: 2, colors: true })
)
debug2(
`${indentStr}CONTEXT-GETTING KEY: (${this.name}) ${key}=${
this.active[key]
} currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${
this._set.length
} active:${util.inspect(this.active)}`
)
}
return this.active[key]
}
Namespace.prototype.createContext = function createContext() {
// Prototype inherit existing context if created a new child context within existing context.
let context = Object.create(this.active ? this.active : Object.prototype)
context._ns_name = this.name
context.id = currentUid
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-CREATED Context: (${
this.name
}) currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${
this._set.length
} context:${util.inspect(context, {
showHidden: true,
depth: 2,
colors: true,
})}`
)
}
return context
}
Namespace.prototype.run = function run(fn) {
let context = this.createContext()
this.enter(context)
try {
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-RUN BEGIN: (${
this.name
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
this._set.length
} context:${util.inspect(context)}`
)
}
fn(context)
return context
} catch (exception) {
if (exception) {
exception[ERROR_SYMBOL] = context
}
throw exception
} finally {
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-RUN END: (${
this.name
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
this._set.length
} ${util.inspect(context)}`
)
}
this.exit(context)
}
}
Namespace.prototype.runAndReturn = function runAndReturn(fn) {
let value
this.run(function (context) {
value = fn(context)
})
return value
}
/**
* Uses global Promise and assumes Promise is cls friendly or wrapped already.
* @param {function} fn
* @returns {*}
*/
Namespace.prototype.runPromise = function runPromise(fn) {
let context = this.createContext()
this.enter(context)
let promise = fn(context)
if (!promise || !promise.then || !promise.catch) {
throw new Error("fn must return a promise.")
}
if (DEBUG_CLS_HOOKED) {
debug2(
"CONTEXT-runPromise BEFORE: (" +
this.name +
") currentUid:" +
currentUid +
" len:" +
this._set.length +
" " +
util.inspect(context)
)
}
return promise
.then(result => {
if (DEBUG_CLS_HOOKED) {
debug2(
"CONTEXT-runPromise AFTER then: (" +
this.name +
") currentUid:" +
currentUid +
" len:" +
this._set.length +
" " +
util.inspect(context)
)
}
this.exit(context)
return result
})
.catch(err => {
err[ERROR_SYMBOL] = context
if (DEBUG_CLS_HOOKED) {
debug2(
"CONTEXT-runPromise AFTER catch: (" +
this.name +
") currentUid:" +
currentUid +
" len:" +
this._set.length +
" " +
util.inspect(context)
)
}
this.exit(context)
throw err
})
}
Namespace.prototype.bind = function bindFactory(fn, context) {
if (!context) {
if (!this.active) {
context = this.createContext()
} else {
context = this.active
}
}
let self = this
return function clsBind() {
self.enter(context)
try {
return fn.apply(this, arguments)
} catch (exception) {
if (exception) {
exception[ERROR_SYMBOL] = context
}
throw exception
} finally {
self.exit(context)
}
}
}
Namespace.prototype.enter = function enter(context) {
assert.ok(context, "context must be provided for entering")
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-ENTER: (${
this.name
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
this._set.length
} ${util.inspect(context)}`
)
}
this._set.push(this.active)
this.active = context
}
Namespace.prototype.exit = function exit(context) {
assert.ok(context, "context must be provided for exiting")
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-EXIT: (${
this.name
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
this._set.length
} ${util.inspect(context)}`
)
}
// Fast path for most exits that are at the top of the stack
if (this.active === context) {
assert.ok(this._set.length, "can't remove top context")
this.active = this._set.pop()
return
}
// Fast search in the stack using lastIndexOf
let index = this._set.lastIndexOf(context)
if (index < 0) {
if (DEBUG_CLS_HOOKED) {
debug2(
"??ERROR?? context exiting but not entered - ignoring: " +
util.inspect(context)
)
}
assert.ok(
index >= 0,
"context not currently entered; can't exit. \n" +
util.inspect(this) +
"\n" +
util.inspect(context)
)
} else {
assert.ok(index, "can't remove top context")
this._set.splice(index, 1)
}
}
Namespace.prototype.bindEmitter = function bindEmitter(emitter) {
assert.ok(
emitter.on && emitter.addListener && emitter.emit,
"can only bind real EEs"
)
let namespace = this
let thisSymbol = "context@" + this.name
// Capture the context active at the time the emitter is bound.
function attach(listener) {
if (!listener) {
return
}
if (!listener[CONTEXTS_SYMBOL]) {
listener[CONTEXTS_SYMBOL] = Object.create(null)
}
listener[CONTEXTS_SYMBOL][thisSymbol] = {
namespace: namespace,
context: namespace.active,
}
}
// At emit time, bind the listener within the correct context.
function bind(unwrapped) {
if (!(unwrapped && unwrapped[CONTEXTS_SYMBOL])) {
return unwrapped
}
let wrapped = unwrapped
let unwrappedContexts = unwrapped[CONTEXTS_SYMBOL]
Object.keys(unwrappedContexts).forEach(function (name) {
let thunk = unwrappedContexts[name]
wrapped = thunk.namespace.bind(wrapped, thunk.context)
})
return wrapped
}
wrapEmitter(emitter, attach, bind)
}
/**
* If an error comes out of a namespace, it will have a context attached to it.
* This function knows how to find it.
*
* @param {Error} exception Possibly annotated error.
*/
Namespace.prototype.fromException = function fromException(exception) {
return exception[ERROR_SYMBOL]
}
function getNamespace(name) {
return process.namespaces[name]
}
function createNamespace(name) {
assert.ok(name, "namespace must be given a name.")
if (DEBUG_CLS_HOOKED) {
debug2(`NS-CREATING NAMESPACE (${name})`)
}
let namespace = new Namespace(name)
namespace.id = currentUid
const hook = async_hooks.createHook({
init(asyncId, type, triggerId, resource) {
currentUid = async_hooks.executionAsyncId()
//CHAIN Parent's Context onto child if none exists. This is needed to pass net-events.spec
// let initContext = namespace.active;
// if(!initContext && triggerId) {
// let parentContext = namespace._contexts.get(triggerId);
// if (parentContext) {
// namespace.active = parentContext;
// namespace._contexts.set(currentUid, parentContext);
// if (DEBUG_CLS_HOOKED) {
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
// debug2(`${indentStr}INIT [${type}] (${name}) WITH PARENT CONTEXT asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
// }
// } else if (DEBUG_CLS_HOOKED) {
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
// debug2(`${indentStr}INIT [${type}] (${name}) MISSING CONTEXT asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
// }
// }else {
// namespace._contexts.set(currentUid, namespace.active);
// if (DEBUG_CLS_HOOKED) {
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
// debug2(`${indentStr}INIT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
// }
// }
if (namespace.active) {
namespace._contexts.set(asyncId, namespace.active)
if (DEBUG_CLS_HOOKED) {
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}INIT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} resource:${resource}`
)
}
} else if (currentUid === 0) {
// CurrentId will be 0 when triggered from C++. Promise events
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
const triggerId = async_hooks.triggerAsyncId()
const triggerIdContext = namespace._contexts.get(triggerId)
if (triggerIdContext) {
namespace._contexts.set(asyncId, triggerIdContext)
if (DEBUG_CLS_HOOKED) {
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}INIT USING CONTEXT FROM TRIGGERID [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} resource:${resource}`
)
}
} else if (DEBUG_CLS_HOOKED) {
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}INIT MISSING CONTEXT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} resource:${resource}`
)
}
}
if (DEBUG_CLS_HOOKED && type === "PROMISE") {
debug2(util.inspect(resource, { showHidden: true }))
const parentId = resource.parentId
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}INIT RESOURCE-PROMISE [${type}] (${name}) parentId:${parentId} asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} resource:${resource}`
)
}
},
before(asyncId) {
currentUid = async_hooks.executionAsyncId()
let context
/*
if(currentUid === 0){
// CurrentId will be 0 when triggered from C++. Promise events
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
//const triggerId = async_hooks.triggerAsyncId();
context = namespace._contexts.get(asyncId); // || namespace._contexts.get(triggerId);
}else{
context = namespace._contexts.get(currentUid);
}
*/
//HACK to work with promises until they are fixed in node > 8.1.1
context =
namespace._contexts.get(asyncId) || namespace._contexts.get(currentUid)
if (context) {
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}BEFORE (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} context:${util.inspect(context)}`
)
namespace._indent += 2
}
namespace.enter(context)
} else if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}BEFORE MISSING CONTEXT (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} namespace._contexts:${util.inspect(namespace._contexts, {
showHidden: true,
depth: 2,
colors: true,
})}`
)
namespace._indent += 2
}
},
after(asyncId) {
currentUid = async_hooks.executionAsyncId()
let context // = namespace._contexts.get(currentUid);
/*
if(currentUid === 0){
// CurrentId will be 0 when triggered from C++. Promise events
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
//const triggerId = async_hooks.triggerAsyncId();
context = namespace._contexts.get(asyncId); // || namespace._contexts.get(triggerId);
}else{
context = namespace._contexts.get(currentUid);
}
*/
//HACK to work with promises until they are fixed in node > 8.1.1
context =
namespace._contexts.get(asyncId) || namespace._contexts.get(currentUid)
if (context) {
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
namespace._indent -= 2
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}AFTER (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} context:${util.inspect(context)}`
)
}
namespace.exit(context)
} else if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
namespace._indent -= 2
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}AFTER MISSING CONTEXT (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} context:${util.inspect(context)}`
)
}
},
destroy(asyncId) {
currentUid = async_hooks.executionAsyncId()
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}DESTROY (${name}) currentUid:${currentUid} asyncId:${asyncId} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} context:${util.inspect(namespace._contexts.get(currentUid))}`
)
}
namespace._contexts.delete(asyncId)
},
})
hook.enable()
namespace._hook = hook
process.namespaces[name] = namespace
return namespace
}
function destroyNamespace(name) {
let namespace = getNamespace(name)
assert.ok(namespace, "can't delete nonexistent namespace! \"" + name + '"')
assert.ok(
namespace.id,
"don't assign to process.namespaces directly! " + util.inspect(namespace)
)
namespace._hook.disable()
namespace._contexts = null
process.namespaces[name] = null
}
function reset() {
// must unregister async listeners
if (process.namespaces) {
Object.keys(process.namespaces).forEach(function (name) {
destroyNamespace(name)
})
}
process.namespaces = Object.create(null)
}
process.namespaces = process.namespaces || {}
//const fs = require('fs');
function debug2(...args) {
if (DEBUG_CLS_HOOKED) {
//fs.writeSync(1, `${util.format(...args)}\n`);
process._rawDebug(`${util.format(...args)}`)
}
}
/*function getFunctionName(fn) {
if (!fn) {
return fn;
}
if (typeof fn === 'function') {
if (fn.name) {
return fn.name;
}
return (fn.toString().trim().match(/^function\s*([^\s(]+)/) || [])[1];
} else if (fn.constructor && fn.constructor.name) {
return fn.constructor.name;
}
}*/

View File

@ -1,44 +0,0 @@
exports.UserStatus = {
ACTIVE: "active",
INACTIVE: "inactive",
}
exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}
exports.Headers = {
API_KEY: "x-budibase-api-key",
LICENSE_KEY: "x-budibase-license-key",
API_VER: "x-budibase-api-version",
APP_ID: "x-budibase-app-id",
TYPE: "x-budibase-type",
PREVIEW_ROLE: "x-budibase-role",
TENANT_ID: "x-budibase-tenant-id",
TOKEN: "x-budibase-token",
CSRF_TOKEN: "x-csrf-token",
}
exports.GlobalRoles = {
OWNER: "owner",
ADMIN: "admin",
BUILDER: "builder",
WORKSPACE_MANAGER: "workspace_manager",
}
exports.Configs = {
SETTINGS: "settings",
ACCOUNT: "account",
SMTP: "smtp",
GOOGLE: "google",
OIDC: "oidc",
OIDC_LOGOS: "logos_oidc",
}
exports.MAX_VALID_DATE = new Date(2147483647000)
exports.DEFAULT_TENANT_ID = "default"

View File

@ -21,6 +21,7 @@ export enum ViewName {
ACCOUNT_BY_EMAIL = "account_by_email",
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
USER_BY_GROUP = "by_group_user",
APP_BACKUP_BY_TRIGGER = "by_trigger",
}
export const DeprecatedViews = {
@ -30,6 +31,10 @@ export const DeprecatedViews = {
],
}
export enum InternalTable {
USER_METADATA = "ta_users",
}
export enum DocumentType {
USER = "us",
GROUP = "gr",
@ -46,6 +51,23 @@ export enum DocumentType {
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
PLUGIN = "plg",
DATASOURCE = "datasource",
DATASOURCE_PLUS = "datasource_plus",
APP_BACKUP = "backup",
TABLE = "ta",
ROW = "ro",
AUTOMATION = "au",
LINK = "li",
WEBHOOK = "wh",
INSTANCE = "inst",
LAYOUT = "layout",
SCREEN = "screen",
QUERY = "query",
DEPLOYMENTS = "deployments",
METADATA = "metadata",
MEM_VIEW = "view",
USER_FLAG = "flag",
AUTOMATION_METADATA = "meta_au",
}
export const StaticDatabases = {
@ -55,6 +77,7 @@ export const StaticDatabases = {
apiKeys: "apikeys",
usageQuota: "usage_quota",
licenseInfo: "license_info",
environmentVariables: "environmentvariables",
},
},
// contains information about tenancy and so on
@ -70,3 +93,4 @@ export const StaticDatabases = {
export const APP_PREFIX = DocumentType.APP + SEPARATOR
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
export const APP_DEV_PREFIX = APP_DEV
export const BUDIBASE_DATASOURCE_TYPE = "budibase"

View File

@ -0,0 +1,2 @@
export * from "./db"
export * from "./misc"

View File

@ -0,0 +1,45 @@
export enum UserStatus {
ACTIVE = "active",
INACTIVE = "inactive",
}
export enum Cookie {
CurrentApp = "budibase:currentapp",
Auth = "budibase:auth",
Init = "budibase:init",
ACCOUNT_RETURN_URL = "budibase:account:returnurl",
DatasourceAuth = "budibase:datasourceauth",
OIDC_CONFIG = "budibase:oidc:config",
}
export enum Header {
API_KEY = "x-budibase-api-key",
LICENSE_KEY = "x-budibase-license-key",
API_VER = "x-budibase-api-version",
APP_ID = "x-budibase-app-id",
TYPE = "x-budibase-type",
PREVIEW_ROLE = "x-budibase-role",
TENANT_ID = "x-budibase-tenant-id",
TOKEN = "x-budibase-token",
CSRF_TOKEN = "x-csrf-token",
CORRELATION_ID = "x-budibase-correlation-id",
}
export enum GlobalRole {
OWNER = "owner",
ADMIN = "admin",
BUILDER = "builder",
WORKSPACE_MANAGER = "workspace_manager",
}
export enum Config {
SETTINGS = "settings",
ACCOUNT = "account",
SMTP = "smtp",
GOOGLE = "google",
OIDC = "oidc",
OIDC_LOGOS = "logos_oidc",
}
export const MAX_VALID_DATE = new Date(2147483647000)
export const DEFAULT_TENANT_ID = "default"

View File

@ -0,0 +1,14 @@
import { AsyncLocalStorage } from "async_hooks"
import { ContextMap } from "./mainContext"
export default class Context {
static storage = new AsyncLocalStorage<ContextMap>()
static run(context: ContextMap, func: any) {
return Context.storage.run(context, () => func())
}
static get(): ContextMap {
return Context.storage.getStore() as ContextMap
}
}

View File

@ -1,47 +0,0 @@
const cls = require("../clshooked")
const { newid } = require("../hashing")
const REQUEST_ID_KEY = "requestId"
const MAIN_CTX = cls.createNamespace("main")
function getContextStorage(namespace) {
if (namespace && namespace.active) {
let contextData = namespace.active
delete contextData.id
delete contextData._ns_name
return contextData
}
return {}
}
class FunctionContext {
static run(callback) {
return MAIN_CTX.runAndReturn(async () => {
const namespaceId = newid()
MAIN_CTX.set(REQUEST_ID_KEY, namespaceId)
const namespace = cls.createNamespace(namespaceId)
let response = await namespace.runAndReturn(callback)
cls.destroyNamespace(namespaceId)
return response
})
}
static setOnContext(key, value) {
const namespaceId = MAIN_CTX.get(REQUEST_ID_KEY)
const namespace = cls.getNamespace(namespaceId)
namespace.set(key, value)
}
static getFromContext(key) {
const namespaceId = MAIN_CTX.get(REQUEST_ID_KEY)
const namespace = cls.getNamespace(namespaceId)
const context = getContextStorage(namespace)
if (context) {
return context[key]
} else {
return null
}
}
}
module.exports = FunctionContext

View File

@ -1,17 +0,0 @@
export enum ContextKey {
TENANT_ID = "tenantId",
GLOBAL_DB = "globalDb",
APP_ID = "appId",
IDENTITY = "identity",
// whatever the request app DB was
CURRENT_DB = "currentDb",
// get the prod app DB from the request
PROD_DB = "prodDb",
// get the dev app DB from the request
DEV_DB = "devDb",
DB_OPTS = "dbOpts",
// check if something else is using the context, don't close DB
TENANCY_IN_USE = "tenancyInUse",
APP_IN_USE = "appInUse",
IDENTITY_IN_USE = "identityInUse",
}

View File

@ -1,15 +1,19 @@
const { getGlobalUserParams, getAllApps } = require("../db/utils")
const { doWithDB } = require("../db")
const { doWithGlobalDB } = require("../tenancy")
const { StaticDatabases } = require("../db/constants")
import {
getGlobalUserParams,
getAllApps,
doWithDB,
StaticDatabases,
} from "../db"
import { doWithGlobalDB } from "../tenancy"
import { App, Tenants, User, Database } from "@budibase/types"
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
const removeTenantFromInfoDB = async tenantId => {
async function removeTenantFromInfoDB(tenantId: string) {
try {
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
let tenants = await infoDb.get(TENANT_DOC)
await doWithDB(PLATFORM_INFO_DB, async (infoDb: Database) => {
const tenants = (await infoDb.get(TENANT_DOC)) as Tenants
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
await infoDb.put(tenants)
@ -20,14 +24,14 @@ const removeTenantFromInfoDB = async tenantId => {
}
}
exports.removeUserFromInfoDB = async dbUser => {
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const keys = [dbUser._id, dbUser.email]
export async function removeUserFromInfoDB(dbUser: User) {
await doWithDB(PLATFORM_INFO_DB, async (infoDb: Database) => {
const keys = [dbUser._id!, dbUser.email]
const userDocs = await infoDb.allDocs({
keys,
include_docs: true,
})
const toDelete = userDocs.rows.map(row => {
const toDelete = userDocs.rows.map((row: any) => {
return {
...row.doc,
_deleted: true,
@ -37,18 +41,18 @@ exports.removeUserFromInfoDB = async dbUser => {
})
}
const removeUsersFromInfoDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
async function removeUsersFromInfoDB(tenantId: string) {
return doWithGlobalDB(tenantId, async (db: any) => {
try {
const allUsers = await db.allDocs(
getGlobalUserParams(null, {
include_docs: true,
})
)
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const allEmails = allUsers.rows.map(row => row.doc.email)
await doWithDB(PLATFORM_INFO_DB, async (infoDb: any) => {
const allEmails = allUsers.rows.map((row: any) => row.doc.email)
// get the id docs
let keys = allUsers.rows.map(row => row.id)
let keys = allUsers.rows.map((row: any) => row.id)
// and the email docs
keys = keys.concat(allEmails)
// retrieve the docs and delete them
@ -56,7 +60,7 @@ const removeUsersFromInfoDB = async tenantId => {
keys,
include_docs: true,
})
const toDelete = userDocs.rows.map(row => {
const toDelete = userDocs.rows.map((row: any) => {
return {
...row.doc,
_deleted: true,
@ -71,8 +75,8 @@ const removeUsersFromInfoDB = async tenantId => {
})
}
const removeGlobalDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
async function removeGlobalDB(tenantId: string) {
return doWithGlobalDB(tenantId, async (db: Database) => {
try {
await db.destroy()
} catch (err) {
@ -82,11 +86,11 @@ const removeGlobalDB = async tenantId => {
})
}
const removeTenantApps = async tenantId => {
async function removeTenantApps(tenantId: string) {
try {
const apps = await getAllApps({ all: true })
const apps = (await getAllApps({ all: true })) as App[]
const destroyPromises = apps.map(app =>
doWithDB(app.appId, db => db.destroy())
doWithDB(app.appId, (db: Database) => db.destroy())
)
await Promise.allSettled(destroyPromises)
} catch (err) {
@ -96,7 +100,7 @@ const removeTenantApps = async tenantId => {
}
// can't live in tenancy package due to circular dependency on db/utils
exports.deleteTenant = async tenantId => {
export async function deleteTenant(tenantId: string) {
await removeTenantFromInfoDB(tenantId)
await removeUsersFromInfoDB(tenantId)
await removeGlobalDB(tenantId)

View File

@ -2,23 +2,22 @@ import {
IdentityContext,
IdentityType,
User,
UserContext,
isCloudAccount,
Account,
AccountUserContext,
} from "@budibase/types"
import * as context from "."
export const getIdentity = (): IdentityContext | undefined => {
export function getIdentity(): IdentityContext | undefined {
return context.getIdentity()
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
export function doInIdentityContext(identity: IdentityContext, task: any) {
return context.doInIdentityContext(identity, task)
}
export const doInUserContext = (user: User, task: any) => {
const userContext: UserContext = {
export function doInUserContext(user: User, task: any) {
const userContext: any = {
...user,
_id: user._id as string,
type: IdentityType.USER,
@ -26,7 +25,7 @@ export const doInUserContext = (user: User, task: any) => {
return doInIdentityContext(userContext, task)
}
export const doInAccountContext = (account: Account, task: any) => {
export function doInAccountContext(account: Account, task: any) {
const _id = getAccountUserId(account)
const tenantId = account.tenantId
const accountContext: AccountUserContext = {
@ -38,12 +37,12 @@ export const doInAccountContext = (account: Account, task: any) => {
return doInIdentityContext(accountContext, task)
}
export const getAccountUserId = (account: Account) => {
export function getAccountUserId(account: Account) {
let userId: string
if (isCloudAccount(account)) {
userId = account.budibaseUserId
} else {
// use account id as user id for self hosting
// use account id as user id for self-hosting
userId = account.accountId
}
return userId

View File

@ -1,255 +1,3 @@
import env from "../environment"
import { SEPARATOR, DocumentType } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
import { baseGlobalDBName } from "../db/tenancy"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKey } from "./constants"
import {
updateUsing,
closeWithUsing,
setAppTenantId,
setIdentity,
closeAppDBs,
getContextDB,
} from "./utils"
export const DEFAULT_TENANT_ID = _DEFAULT_TENANT_ID
// some test cases call functions directly, need to
// store an app ID to pretend there is a context
let TEST_APP_ID: string | null = null
export const closeTenancy = async () => {
let db
try {
if (env.USE_COUCH) {
db = getGlobalDB()
}
} catch (err) {
// no DB found - skip closing
return
}
await closeDB(db)
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKey.TENANT_ID, null)
cls.setOnContext(ContextKey.GLOBAL_DB, null)
}
// export const isDefaultTenant = () => {
// return getTenantId() === DEFAULT_TENANT_ID
// }
export const isMultiTenant = () => {
return env.MULTI_TENANCY
}
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
*/
export const getTenantIDFromAppID = (appId: string) => {
if (!appId) {
return null
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return null
}
if (hasDev) {
return split[2]
} else {
return split[1]
}
}
// used for automations, API endpoints should always be in context already
export const doInTenant = (tenantId: string | null, task: any) => {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
tenantId = tenantId || DEFAULT_TENANT_ID
}
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the tenant id + global db if this is a new context
if (!opts.existing) {
updateTenantId(tenantId)
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKey.TENANCY_IN_USE, () => {
return closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKey.TENANT_ID) === tenantId
return updateUsing(ContextKey.TENANCY_IN_USE, existing, internal)
}
export const doInAppContext = (appId: string, task: any) => {
if (!appId) {
throw new Error("appId is required")
}
const identity = getIdentity()
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the app tenant id
if (!opts.existing) {
setAppTenantId(appId)
}
// set the app ID
cls.setOnContext(ContextKey.APP_ID, appId)
// preserve the identity
if (identity) {
setIdentity(identity)
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKey.APP_IN_USE, async () => {
await closeAppDBs()
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKey.APP_ID) === appId
return updateUsing(ContextKey.APP_IN_USE, existing, internal)
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
if (!identity) {
throw new Error("identity is required")
}
async function internal(opts = { existing: false }) {
if (!opts.existing) {
cls.setOnContext(ContextKey.IDENTITY, identity)
// set the tenant so that doInTenant will preserve identity
if (identity.tenantId) {
updateTenantId(identity.tenantId)
}
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKey.IDENTITY_IN_USE, async () => {
setIdentity(null)
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKey.IDENTITY)
return updateUsing(ContextKey.IDENTITY_IN_USE, existing, internal)
}
export const getIdentity = (): IdentityContext | undefined => {
try {
return cls.getFromContext(ContextKey.IDENTITY)
} catch (e) {
// do nothing - identity is not in context
}
}
export const updateTenantId = (tenantId: string | null) => {
cls.setOnContext(ContextKey.TENANT_ID, tenantId)
if (env.USE_COUCH) {
setGlobalDB(tenantId)
}
}
export const updateAppId = async (appId: string) => {
try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKey.APP_ID, appId)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
} else {
throw err
}
}
}
export const setGlobalDB = (tenantId: string | null) => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKey.GLOBAL_DB, db)
return db
}
export const getGlobalDB = () => {
const db = cls.getFromContext(ContextKey.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
return db
}
export const isTenantIdSet = () => {
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
return !!tenantId
}
export const getTenantId = () => {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
if (!tenantId) {
throw new Error("Tenant id not found")
}
return tenantId
}
export const getAppId = () => {
const foundId = cls.getFromContext(ContextKey.APP_ID)
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
return foundId
}
}
export const isTenancyEnabled = () => {
return env.MULTI_TENANCY
}
/**
* Opens the app database based on whatever the request
* contained, dev or prod.
*/
export const getAppDB = (opts?: any) => {
return getContextDB(ContextKey.CURRENT_DB, opts)
}
/**
* This specifically gets the prod app ID, if the request
* contained a development app ID, this will open the prod one.
*/
export const getProdAppDB = (opts?: any) => {
return getContextDB(ContextKey.PROD_DB, opts)
}
/**
* This specifically gets the dev app ID, if the request
* contained a prod app ID, this will open the dev one.
*/
export const getDevAppDB = (opts?: any) => {
return getContextDB(ContextKey.DEV_DB, opts)
}
export { DEFAULT_TENANT_ID } from "../constants"
export * as identity from "./identity"
export * from "./mainContext"

View File

@ -0,0 +1,262 @@
// some test cases call functions directly, need to
// store an app ID to pretend there is a context
import env from "../environment"
import Context from "./Context"
import * as conversions from "../db/conversions"
import { getDB } from "../db/db"
import {
DocumentType,
SEPARATOR,
StaticDatabases,
DEFAULT_TENANT_ID,
} from "../constants"
import { Database, IdentityContext } from "@budibase/types"
export type ContextMap = {
tenantId?: string
appId?: string
identity?: IdentityContext
environmentVariables?: Record<string, string>
}
let TEST_APP_ID: string | null = null
export function getGlobalDBName(tenantId?: string) {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export function baseGlobalDBName(tenantId: string | undefined | null) {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}
export function isMultiTenant() {
return env.MULTI_TENANCY
}
export function isTenantIdSet() {
const context = Context.get()
return !!context?.tenantId
}
export function isTenancyEnabled() {
return env.MULTI_TENANCY
}
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
*/
export function getTenantIDFromAppID(appId: string) {
if (!appId) {
return undefined
}
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return undefined
}
if (hasDev) {
return split[2]
} else {
return split[1]
}
}
function updateContext(updates: ContextMap): ContextMap {
let context: ContextMap
try {
context = Context.get()
} catch (err) {
// no context, start empty
context = {}
}
context = {
...context,
...updates,
}
return context
}
async function newContext(updates: ContextMap, task: any) {
// see if there already is a context setup
let context: ContextMap = updateContext(updates)
return Context.run(context, task)
}
export async function doInContext(appId: string, task: any): Promise<any> {
const tenantId = getTenantIDFromAppID(appId)
return newContext(
{
tenantId,
appId,
},
task
)
}
export async function doInTenant(
tenantId: string | null,
task: any
): Promise<any> {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
tenantId = tenantId || DEFAULT_TENANT_ID
}
const updates = tenantId ? { tenantId } : {}
return newContext(updates, task)
}
export async function doInAppContext(
appId: string | null,
task: any
): Promise<any> {
if (!appId && !env.isTest()) {
throw new Error("appId is required")
}
let updates: ContextMap
if (!appId) {
updates = { appId: "" }
} else {
const tenantId = getTenantIDFromAppID(appId)
updates = { appId }
if (tenantId) {
updates.tenantId = tenantId
}
}
return newContext(updates, task)
}
export async function doInIdentityContext(
identity: IdentityContext,
task: any
): Promise<any> {
if (!identity) {
throw new Error("identity is required")
}
const context: ContextMap = {
identity,
}
if (identity.tenantId) {
context.tenantId = identity.tenantId
}
return newContext(context, task)
}
export function getIdentity(): IdentityContext | undefined {
try {
const context = Context.get()
return context?.identity
} catch (e) {
// do nothing - identity is not in context
}
}
export function getTenantId(): string {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const context = Context.get()
const tenantId = context?.tenantId
if (!tenantId) {
throw new Error("Tenant id not found")
}
return tenantId
}
export function getAppId(): string | undefined {
const context = Context.get()
const foundId = context?.appId
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
return foundId
}
}
export const getProdAppId = () => {
const appId = getAppId()
if (!appId) {
throw new Error("Could not get appId")
}
return conversions.getProdAppID(appId)
}
export function doInEnvironmentContext(
values: Record<string, string>,
task: any
) {
if (!values) {
throw new Error("Must supply environment variables.")
}
const updates = {
environmentVariables: values,
}
return newContext(updates, task)
}
export function getEnvironmentVariables() {
const context = Context.get()
if (!context.environmentVariables) {
return null
} else {
return context.environmentVariables
}
}
export function getGlobalDB(): Database {
const context = Context.get()
if (!context || (env.MULTI_TENANCY && !context.tenantId)) {
throw new Error("Global DB not found")
}
return getDB(baseGlobalDBName(context?.tenantId))
}
/**
* Gets the app database based on whatever the request
* contained, dev or prod.
*/
export function getAppDB(opts?: any): Database {
const appId = getAppId()
return getDB(appId, opts)
}
/**
* This specifically gets the prod app ID, if the request
* contained a development app ID, this will get the prod one.
*/
export function getProdAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve prod DB - no app ID.")
}
return getDB(conversions.getProdAppID(appId), opts)
}
/**
* This specifically gets the dev app ID, if the request
* contained a prod app ID, this will get the dev one.
*/
export function getDevAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve dev DB - no app ID.")
}
return getDB(conversions.getDevelopmentAppID(appId), opts)
}

View File

@ -1,18 +1,9 @@
import "../../../tests/utilities/TestConfiguration"
import * as context from ".."
import { DEFAULT_TENANT_ID } from "../../constants"
require("../../../tests")
const context = require("../")
const { DEFAULT_TENANT_ID } = require("../../constants")
import env from "../../environment"
// must use require to spy index file exports due to known issue in jest
const dbUtils = require("../../db")
jest.spyOn(dbUtils, "closeDB")
jest.spyOn(dbUtils, "dangerousGetDB")
describe("context", () => {
beforeEach(() => {
jest.clearAllMocks()
})
describe("doInTenant", () => {
describe("single-tenancy", () => {
it("defaults to the default tenant", () => {
@ -25,8 +16,6 @@ describe("context", () => {
const db = context.getGlobalDB()
expect(db.name).toBe("global-db")
})
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
})
@ -37,10 +26,10 @@ describe("context", () => {
it("fails when no tenant id is set", () => {
const test = () => {
let error
let error: any
try {
context.getTenantId()
} catch (e: any) {
} catch (e) {
error = e
}
expect(error.message).toBe("Tenant id not found")
@ -56,10 +45,10 @@ describe("context", () => {
it("fails when no tenant db is set", () => {
const test = () => {
let error
let error: any
try {
context.getGlobalDB()
} catch (e: any) {
} catch (e) {
error = e
}
expect(error.message).toBe("Global DB not found")
@ -85,8 +74,6 @@ describe("context", () => {
const db = context.getGlobalDB()
expect(db.name).toBe("test_global-db")
})
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
it("sets the tenant id when nested with same tenant id", async () => {
@ -121,10 +108,6 @@ describe("context", () => {
})
})
})
// only 1 db is opened and closed
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
it("sets different tenant id inside another context", () => {

View File

@ -1,109 +0,0 @@
import {
DEFAULT_TENANT_ID,
getAppId,
getTenantIDFromAppID,
updateTenantId,
} from "./index"
import cls from "./FunctionContext"
import { IdentityContext } from "@budibase/types"
import { ContextKey } from "./constants"
import { dangerousGetDB, closeDB } from "../db"
import { isEqual } from "lodash"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
import env from "../environment"
export async function updateUsing(
usingKey: string,
existing: boolean,
internal: (opts: { existing: boolean }) => Promise<any>
) {
const using = cls.getFromContext(usingKey)
if (using && existing) {
cls.setOnContext(usingKey, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(usingKey, 1)
return internal({ existing: false })
})
}
}
export async function closeWithUsing(
usingKey: string,
closeFn: () => Promise<any>
) {
const using = cls.getFromContext(usingKey)
if (!using || using <= 1) {
await closeFn()
} else {
cls.setOnContext(usingKey, using - 1)
}
}
export const setAppTenantId = (appId: string) => {
const appTenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID
updateTenantId(appTenantId)
}
export const setIdentity = (identity: IdentityContext | null) => {
cls.setOnContext(ContextKey.IDENTITY, identity)
}
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
export async function closeAppDBs() {
const dbKeys = [ContextKey.CURRENT_DB, ContextKey.PROD_DB, ContextKey.DEV_DB]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
continue
}
await closeDB(db)
// clear the DB from context, incase someone tries to use it again
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKey.APP_ID)) {
cls.setOnContext(ContextKey.APP_ID, null)
}
if (cls.getFromContext(ContextKey.DB_OPTS)) {
cls.setOnContext(ContextKey.DB_OPTS, null)
}
}
export function getContextDB(key: string, opts: any) {
const dbOptsKey = `${key}${ContextKey.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key)
if (db && isEqual(opts, storedOpts)) {
return db
}
const appId = getAppId()
let toUseAppId
switch (key) {
case ContextKey.CURRENT_DB:
toUseAppId = appId
break
case ContextKey.PROD_DB:
toUseAppId = getProdAppID(appId)
break
case ContextKey.DEV_DB:
toUseAppId = getDevelopmentAppID(appId)
break
}
db = dangerousGetDB(toUseAppId, opts)
try {
cls.setOnContext(key, db)
if (opts) {
cls.setOnContext(dbOptsKey, opts)
}
} catch (err) {
if (!env.isTest()) {
throw err
}
}
return db
}

View File

@ -1,5 +1,5 @@
import { dangerousGetDB, closeDB } from "."
import { DocumentType } from "./constants"
import { getPouchDB, closePouchDB } from "./couch"
import { DocumentType } from "../constants"
class Replication {
source: any
@ -12,12 +12,12 @@ class Replication {
* @param {String} target - the DB you want to replicate to, or rollback from
*/
constructor({ source, target }: any) {
this.source = dangerousGetDB(source)
this.target = dangerousGetDB(target)
this.source = getPouchDB(source)
this.target = getPouchDB(target)
}
close() {
return Promise.all([closeDB(this.source), closeDB(this.target)])
return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
}
promisify(operation: any, opts = {}) {
@ -68,7 +68,7 @@ class Replication {
async rollback() {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
this.target = getPouchDB(this.target.name)
// take the opportunity to remove deleted tombstones
await this.replicate()
}

View File

@ -1,32 +1,33 @@
import { APP_DEV_PREFIX, APP_PREFIX } from "../constants"
import { App } from "@budibase/types"
const NO_APP_ERROR = "No app provided"
const { APP_DEV_PREFIX, APP_PREFIX } = require("./constants")
exports.isDevAppID = appId => {
export function isDevAppID(appId?: string) {
if (!appId) {
throw NO_APP_ERROR
}
return appId.startsWith(APP_DEV_PREFIX)
}
exports.isProdAppID = appId => {
export function isProdAppID(appId?: string) {
if (!appId) {
throw NO_APP_ERROR
}
return appId.startsWith(APP_PREFIX) && !exports.isDevAppID(appId)
return appId.startsWith(APP_PREFIX) && !isDevAppID(appId)
}
exports.isDevApp = app => {
export function isDevApp(app: App) {
if (!app) {
throw NO_APP_ERROR
}
return exports.isDevAppID(app.appId)
return isDevAppID(app.appId)
}
/**
* Generates a development app ID from a real app ID.
* @returns {string} the dev app ID which can be used for dev database.
*/
exports.getDevelopmentAppID = appId => {
export function getDevelopmentAppID(appId: string) {
if (!appId || appId.startsWith(APP_DEV_PREFIX)) {
return appId
}
@ -36,12 +37,12 @@ exports.getDevelopmentAppID = appId => {
const rest = split.join(APP_PREFIX)
return `${APP_DEV_PREFIX}${rest}`
}
exports.getDevAppID = exports.getDevelopmentAppID
export const getDevAppID = getDevelopmentAppID
/**
* Convert a development app ID to a deployed app ID.
*/
exports.getProdAppID = appId => {
export function getProdAppID(appId: string) {
if (!appId || !appId.startsWith(APP_DEV_PREFIX)) {
return appId
}
@ -52,7 +53,7 @@ exports.getProdAppID = appId => {
return `${APP_PREFIX}${rest}`
}
exports.extractAppUUID = id => {
export function extractAppUUID(id: string) {
const split = id?.split("_") || []
return split.length ? split[split.length - 1] : null
}

View File

@ -0,0 +1,225 @@
import Nano from "@budibase/nano"
import {
AllDocsResponse,
AnyDocument,
Database,
DatabaseOpts,
DatabaseQueryOpts,
DatabasePutOpts,
DatabaseCreateIndexOpts,
DatabaseDeleteIndexOpts,
Document,
isDocument,
} from "@budibase/types"
import { getCouchInfo } from "./connections"
import { directCouchCall } from "./utils"
import { getPouchDB } from "./pouchDB"
import { WriteStream, ReadStream } from "fs"
import { newid } from "../../newid"
function buildNano(couchInfo: { url: string; cookie: string }) {
return Nano({
url: couchInfo.url,
requestDefaults: {
headers: {
Authorization: couchInfo.cookie,
},
},
parseUrl: false,
})
}
export function DatabaseWithConnection(
dbName: string,
connection: string,
opts?: DatabaseOpts
) {
if (!connection) {
throw new Error("Must provide connection details")
}
return new DatabaseImpl(dbName, opts, connection)
}
export class DatabaseImpl implements Database {
public readonly name: string
private static nano: Nano.ServerScope
private readonly instanceNano?: Nano.ServerScope
private readonly pouchOpts: DatabaseOpts
constructor(dbName?: string, opts?: DatabaseOpts, connection?: string) {
if (dbName == null) {
throw new Error("Database name cannot be undefined.")
}
this.name = dbName
this.pouchOpts = opts || {}
if (connection) {
const couchInfo = getCouchInfo(connection)
this.instanceNano = buildNano(couchInfo)
}
if (!DatabaseImpl.nano) {
DatabaseImpl.init()
}
}
static init() {
const couchInfo = getCouchInfo()
DatabaseImpl.nano = buildNano(couchInfo)
}
async exists() {
let response = await directCouchCall(`/${this.name}`, "HEAD")
return response.status === 200
}
private nano() {
return this.instanceNano || DatabaseImpl.nano
}
async checkSetup() {
let shouldCreate = !this.pouchOpts?.skip_setup
// check exists in a lightweight fashion
let exists = await this.exists()
if (!shouldCreate && !exists) {
throw new Error("DB does not exist")
}
if (!exists) {
await this.nano().db.create(this.name)
}
return this.nano().db.use(this.name)
}
private async updateOutput(fnc: any) {
try {
return await fnc()
} catch (err: any) {
if (err.statusCode) {
err.status = err.statusCode
}
throw err
}
}
async get<T>(id?: string): Promise<T | any> {
const db = await this.checkSetup()
if (!id) {
throw new Error("Unable to get doc without a valid _id.")
}
return this.updateOutput(() => db.get(id))
}
async remove(idOrDoc: string | Document, rev?: string) {
const db = await this.checkSetup()
let _id: string
let _rev: string
if (isDocument(idOrDoc)) {
_id = idOrDoc._id!
_rev = idOrDoc._rev!
} else {
_id = idOrDoc
_rev = rev!
}
if (!_id || !_rev) {
throw new Error("Unable to remove doc without a valid _id and _rev.")
}
return this.updateOutput(() => db.destroy(_id, _rev))
}
async post(document: AnyDocument, opts?: DatabasePutOpts) {
if (!document._id) {
document._id = newid()
}
return this.put(document, opts)
}
async put(document: AnyDocument, opts?: DatabasePutOpts) {
if (!document._id) {
throw new Error("Cannot store document without _id field.")
}
const db = await this.checkSetup()
if (!document.createdAt) {
document.createdAt = new Date().toISOString()
}
document.updatedAt = new Date().toISOString()
if (opts?.force && document._id) {
try {
const existing = await this.get(document._id)
if (existing) {
document._rev = existing._rev
}
} catch (err: any) {
if (err.status !== 404) {
throw err
}
}
}
return this.updateOutput(() => db.insert(document))
}
async bulkDocs(documents: AnyDocument[]) {
const db = await this.checkSetup()
return this.updateOutput(() => db.bulk({ docs: documents }))
}
async allDocs<T>(params: DatabaseQueryOpts): Promise<AllDocsResponse<T>> {
const db = await this.checkSetup()
return this.updateOutput(() => db.list(params))
}
async query<T>(
viewName: string,
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
const db = await this.checkSetup()
const [database, view] = viewName.split("/")
return this.updateOutput(() => db.view(database, view, params))
}
async destroy() {
try {
await this.nano().db.destroy(this.name)
} catch (err: any) {
// didn't exist, don't worry
if (err.statusCode === 404) {
return
} else {
throw { ...err, status: err.statusCode }
}
}
}
async compact() {
const db = await this.checkSetup()
return this.updateOutput(() => db.compact())
}
// All below functions are in-frequently called, just utilise PouchDB
// for them as it implements them better than we can
async dump(stream: WriteStream, opts?: { filter?: any }) {
const pouch = getPouchDB(this.name)
// @ts-ignore
return pouch.dump(stream, opts)
}
async load(stream: ReadStream) {
const pouch = getPouchDB(this.name)
// @ts-ignore
return pouch.load(stream)
}
async createIndex(opts: DatabaseCreateIndexOpts) {
const pouch = getPouchDB(this.name)
return pouch.createIndex(opts)
}
async deleteIndex(opts: DatabaseDeleteIndexOpts) {
const pouch = getPouchDB(this.name)
return pouch.deleteIndex(opts)
}
async getIndexes() {
const pouch = getPouchDB(this.name)
return pouch.getIndexes()
}
}

View File

@ -1,7 +1,39 @@
const PouchDB = require("pouchdb")
const env = require("../environment")
import env from "../../environment"
exports.getUrlInfo = (url = env.COUCH_DB_URL) => {
export const getCouchInfo = (connection?: string) => {
const urlInfo = getUrlInfo(connection)
let username
let password
if (env.COUCH_DB_USERNAME) {
// set from env
username = env.COUCH_DB_USERNAME
} else if (urlInfo.auth.username) {
// set from url
username = urlInfo.auth.username
} else if (!env.isTest()) {
throw new Error("CouchDB username not set")
}
if (env.COUCH_DB_PASSWORD) {
// set from env
password = env.COUCH_DB_PASSWORD
} else if (urlInfo.auth.password) {
// set from url
password = urlInfo.auth.password
} else if (!env.isTest()) {
throw new Error("CouchDB password not set")
}
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
return {
url: urlInfo.url!,
auth: {
username: username,
password: password,
},
cookie: `Basic ${authCookie}`,
}
}
export const getUrlInfo = (url = env.COUCH_DB_URL) => {
let cleanUrl, username, password, host
if (url) {
// Ensure the URL starts with a protocol
@ -43,82 +75,3 @@ exports.getUrlInfo = (url = env.COUCH_DB_URL) => {
},
}
}
exports.getCouchInfo = () => {
const urlInfo = exports.getUrlInfo()
let username
let password
if (env.COUCH_DB_USERNAME) {
// set from env
username = env.COUCH_DB_USERNAME
} else if (urlInfo.auth.username) {
// set from url
username = urlInfo.auth.username
} else if (!env.isTest()) {
throw new Error("CouchDB username not set")
}
if (env.COUCH_DB_PASSWORD) {
// set from env
password = env.COUCH_DB_PASSWORD
} else if (urlInfo.auth.password) {
// set from url
password = urlInfo.auth.password
} else if (!env.isTest()) {
throw new Error("CouchDB password not set")
}
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
return {
url: urlInfo.url,
auth: {
username: username,
password: password,
},
cookie: `Basic ${authCookie}`,
}
}
/**
* Return a constructor for PouchDB.
* This should be rarely used outside of the main application config.
* Exposed for exceptional cases such as in-memory views.
*/
exports.getPouch = (opts = {}) => {
let { url, cookie } = exports.getCouchInfo()
let POUCH_DB_DEFAULTS = {
prefix: url,
fetch: (url, opts) => {
// use a specific authorization cookie - be very explicit about how we authenticate
opts.headers.set("Authorization", cookie)
return PouchDB.fetch(url, opts)
},
}
if (opts.inMemory) {
const inMemory = require("pouchdb-adapter-memory")
PouchDB.plugin(inMemory)
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "memory",
}
}
if (opts.onDisk) {
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "leveldb",
}
}
if (opts.replication) {
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
}
if (opts.find) {
const find = require("pouchdb-find")
PouchDB.plugin(find)
}
return PouchDB.defaults(POUCH_DB_DEFAULTS)
}

View File

@ -0,0 +1,4 @@
export * from "./connections"
export * from "./DatabaseImpl"
export * from "./utils"
export { init, getPouch, getPouchDB, closePouchDB } from "./pouchDB"

Some files were not shown because too many files have changed in this diff Show More