Merge branch 'develop' of github.com:Budibase/budibase into fix/charset-encoding

This commit is contained in:
Michael Drury 2022-11-29 11:57:45 +00:00
commit bcf2aadbaa
1310 changed files with 59621 additions and 27028 deletions

View File

@ -162,6 +162,7 @@
"translation"
]
},
{
"login": "mslourens",
"name": "Maurits Lourens",
"avatar_url": "https://avatars.githubusercontent.com/u/1907152?v=4",

View File

@ -3,8 +3,11 @@ public
dist
packages/server/builder
packages/server/coverage
packages/worker/coverage
packages/backend-core/coverage
packages/server/client
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
packages/builder/cypress/reports
packages/builder/cypress/reports
packages/sdk/sdk

24
.github/ISSUE_TEMPLATE/epic.md vendored Normal file
View File

@ -0,0 +1,24 @@
---
name: Epic
about: Plan a new project
title: ''
labels: epic
assignees: ''
---
## Description
Brief summary of what this Epic is, whether it's a larger project, goal, or user story. Describe the job to be done, which persona this Epic is mainly for, or if more multiple, break it down by user and job story.
## Spec
Link to confluence spec
## Teams and Stakeholders
Describe who needs to be kept up-to-date about this Epic, included in discussions, or updated along the way. Stakeholders can be both in Product/Engineering, as well as other teams like Customer Success who might want to keep customers updated on the Epic project.
## Workflow
- [ ] Spec Created and pasted above
- [ ] Product Review
- [ ] Designs created
- [ ] Individual Tasks created and assigned to Epic

View File

@ -23,6 +23,15 @@ jobs:
build:
runs-on: ubuntu-latest
services:
couchdb:
image: ibmcom/couchdb3
env:
COUCHDB_PASSWORD: budibase
COUCHDB_USER: budibase
ports:
- 4567:5984
strategy:
matrix:
node-version: [14.x]
@ -49,13 +58,12 @@ jobs:
- uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
files: ./packages/server/coverage/clover.xml
files: ./packages/server/coverage/clover.xml,./packages/worker/coverage/clover.xml,./packages/backend-core/coverage/clover.xml
name: codecov-umbrella
verbose: true
# TODO: parallelise this
- name: Cypress run
uses: cypress-io/github-action@v2
with:
install: false
command: yarn test:e2e:ci
- name: QA Core Integration Tests
run: |
cd qa-core
yarn
yarn api:test:ci

View File

@ -69,6 +69,28 @@ jobs:
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with:

View File

@ -4,8 +4,6 @@ on:
workflow_dispatch:
env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
BRANCH: ${{ github.event.pull_request.head.ref }}
CI: true
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
@ -17,6 +15,11 @@ jobs:
matrix:
node-version: [14.x]
steps:
- name: Fail if branch is not master
if: github.ref != 'refs/heads/master'
run: |
echo "Ref is not master, you must run this job from master."
exit 1
- name: "Checkout"
uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
@ -28,8 +31,6 @@ jobs:
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Install Pro
run: yarn install:pro $BRANCH $BASE_BRANCH
- name: Run Yarn
run: yarn
- name: Run Yarn Bootstrap

View File

@ -18,8 +18,9 @@ on:
workflow_dispatch:
env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
# Posthog token used by ui at build time
# disable unless needed for testing
# POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
FEATURE_PREVIEW_URL: https://budirelease.live
@ -45,7 +46,8 @@ jobs:
- run: yarn
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
- run: yarn build
- run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials
@ -119,6 +121,27 @@ jobs:
]
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0

View File

@ -8,19 +8,28 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Fail if branch is not master
if: github.ref != 'refs/heads/master'
run: |
echo "Ref is not master, you must run this job from master."
exit 1
- uses: actions/checkout@v2
with:
node-version: 14.x
fetch_depth: 0
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Docker images (Self Host)
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
# Get latest release version
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
release_tag=v$release_version
release_tag=v${{ env.RELEASE_VERSION }}
# Pull apps and worker images
docker pull budibase/apps:$release_tag
@ -40,13 +49,12 @@ jobs:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
SELFHOST_TAG: latest
- name: Build CLI executables
- name: Bootstrap and build (CLI)
run: |
pushd packages/cli
yarn
yarn bootstrap
yarn build
popd
- name: Build OpenAPI spec
run: |
@ -93,4 +101,4 @@ jobs:
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Self Host Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Self Host."
embed-title: ${{ env.RELEASE_VERSION }}
embed-title: ${{ env.RELEASE_VERSION }}

View File

@ -29,7 +29,7 @@ on:
env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
@ -56,6 +56,7 @@ jobs:
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
- run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials

View File

@ -1,4 +1,4 @@
name: Budibase Smoke Test
name: Budibase Nightly Tests
on:
workflow_dispatch:
@ -6,7 +6,7 @@ on:
- cron: "0 5 * * *" # every day at 5AM
jobs:
release:
nightly:
runs-on: ubuntu-latest
steps:
@ -43,6 +43,18 @@ jobs:
name: Test Reports
path: packages/builder/cypress/reports/testReport.html
# TODO: enable once running in QA test env
# - name: Configure AWS Credentials
# uses: aws-actions/configure-aws-credentials@v1
# with:
# aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
# aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-region: eu-west-1
# - name: Upload test results HTML
# uses: aws-actions/configure-aws-credentials@v1
# run: aws s3 cp packages/builder/cypress/reports/testReport.html s3://{{ secrets.BUDI_QA_REPORTS_BUCKET_NAME }}/$GITHUB_RUN_ID/index.html
- name: Cypress Discord Notify
run: yarn test:e2e:ci:notify
env:

5
.gitignore vendored
View File

@ -63,6 +63,7 @@ typings/
# dotenv environment variables file
.env
!qa-core/.env
!hosting/.env
hosting/.generated-nginx.dev.conf
hosting/proxy/.generated-nginx.prod.conf
@ -102,4 +103,6 @@ packages/builder/cypress/reports
stats.html
# TypeScript cache
*.tsbuildinfo
*.tsbuildinfo
budibase-component
budibase-datasource

View File

@ -4,8 +4,11 @@ dist
packages/builder/src/components/design/AppPreview/CurrentItemPreview.svelte
packages/server/builder
packages/server/coverage
packages/worker/coverage
packages/backend-core/coverage
packages/server/client
packages/server/src/definitions/openapi.ts
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
packages/sdk/sdk

View File

@ -4,7 +4,7 @@
"singleQuote": false,
"trailingComma": "es5",
"arrowParens": "avoid",
"jsxBracketSameLine": false,
"bracketSameLine": false,
"plugins": ["prettier-plugin-svelte"],
"svelteSortOrder": "options-scripts-markup-styles"
}

View File

@ -65,7 +65,7 @@ Budibase is open-source - licensed as GPL v3. This should fill you with confiden
<br /><br />
### Load data or start from scratch
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no datasources. [Request new datasources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/data_n1tlhf.png">

View File

@ -60,8 +60,6 @@ spec:
secretKeyRef:
name: {{ template "budibase.fullname" . }}
key: jwtSecret
- name: LOG_LEVEL
value: {{ .Values.services.apps.logLevel | default "info" | quote }}
{{ if .Values.services.objectStore.region }}
- name: AWS_REGION
value: {{ .Values.services.objectStore.region }}
@ -78,6 +76,14 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
- name: APPS_BUCKET_NAME
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_CLOUD_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: PORT
value: {{ .Values.services.apps.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
@ -124,12 +130,34 @@ spec:
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.bbAdminUserEmail }}
- name: BB_ADMIN_USER_EMAIL
value: { { .Values.globals.bbAdminUserEmail | quote } }
value: {{ .Values.globals.bbAdminUserEmail | quote }}
{{ end }}
{{ if .Values.globals.bbAdminUserPassword }}
- name: BB_ADMIN_USER_PASSWORD
value: { { .Values.globals.bbAdminUserPassword | quote } }
value: {{ .Values.globals.bbAdminUserPassword | quote }}
{{ end }}
{{ if .Values.globals.pluginsDir }}
- name: PLUGINS_DIR
value: {{ .Values.globals.pluginsDir | quote }}
{{ end }}
{{ if .Values.services.apps.nodeDebug }}
- name: NODE_DEBUG
value: {{ .Values.services.apps.nodeDebug | quote }}
{{ end }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
- name: CDN_URL
value: {{ .Values.globals.cdnUrl }}
image: budibase/apps:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
@ -142,7 +170,10 @@ spec:
name: bbapps
ports:
- containerPort: {{ .Values.services.apps.port }}
resources: {}
{{ with .Values.services.apps.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -38,7 +38,10 @@ spec:
image: redgeoff/replicate-couchdb-cluster
imagePullPolicy: Always
name: couchdb-backup
resources: {}
{{ with .Values.services.couchdb.backup.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -56,7 +56,10 @@ spec:
name: minio-service
ports:
- containerPort: {{ .Values.services.objectStore.port }}
resources: {}
{{ with .Values.services.objectStore.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: minio-data

View File

@ -30,7 +30,10 @@ spec:
name: proxy-service
ports:
- containerPort: {{ .Values.services.proxy.port }}
resources: {}
{{ with .Values.services.proxy.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
{{- with .Values.affinity }}
affinity:

View File

@ -35,7 +35,10 @@ spec:
name: redis-service
ports:
- containerPort: {{ .Values.services.redis.port }}
resources: {}
{{ with .Values.services.redis.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: redis-data

View File

@ -27,6 +27,8 @@ spec:
spec:
containers:
- env:
- name: BUDIBASE_ENVIRONMENT
value: {{ .Values.globals.budibaseEnv }}
- name: DEPLOYMENT_ENVIRONMENT
value: "kubernetes"
- name: CLUSTER_PORT
@ -75,6 +77,14 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
- name: APPS_BUCKET_NAME
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_CLOUD_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
- name: PORT
value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY
@ -125,6 +135,21 @@ spec:
value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
- name: CDN_URL
value: {{ .Values.globals.cdnUrl }}
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
livenessProbe:
@ -136,7 +161,10 @@ spec:
name: bbworker
ports:
- containerPort: {{ .Values.services.worker.port }}
resources: {}
{{ with .Values.services.worker.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -60,19 +60,6 @@ ingress:
port:
number: 10000
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
@ -89,9 +76,10 @@ affinity: {}
globals:
appVersion: "latest"
budibaseEnv: PRODUCTION
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS"
enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS"
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
logLevel: info
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
@ -110,10 +98,15 @@ globals:
# if createSecrets is set to false, you can hard-code your secrets here
internalApiKey: ""
jwtSecret: ""
cdnUrl: ""
smtp:
enabled: false
# elasticApmEnabled:
# elasticApmSecretToken:
# elasticApmServerUrl:
services:
budibaseVersion: latest
dns: cluster.local
@ -121,15 +114,19 @@ services:
proxy:
port: 10000
replicaCount: 1
resources: {}
apps:
port: 4002
replicaCount: 1
logLevel: info
resources: {}
# nodeDebug: "" # set the value of NODE_DEBUG
worker:
port: 4003
replicaCount: 1
resources: {}
couchdb:
enabled: true
@ -143,6 +140,7 @@ services:
target: ""
# backup interval in seconds
interval: ""
resources: {}
redis:
enabled: true # disable if using external redis
@ -156,6 +154,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
objectStore:
minio: true
@ -172,6 +171,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
# Override values in couchDB subchart
couchdb:

View File

@ -1,12 +1,15 @@
## Dev Environment on Debian 11
### Install Node
### Install NVM & Node 14
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
Budibase requires a recent version of node (14+):
Install NVM
```
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
apt -y install nodejs
node -v
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
```
Install Node 14
```
nvm install 14
```
### Install npm requirements
@ -31,7 +34,7 @@ This setup process was tested on Debian 11 (bullseye) with version numbers show
- Docker: 20.10.5
- Docker-Compose: 1.29.2
- Node: v16.15.1
- Node: v14.20.1
- Yarn: 1.22.19
- Lerna: 5.1.4

View File

@ -11,7 +11,7 @@ through brew.
### Install Node
Budibase requires a recent version of node (14+):
Budibase requires a recent version of node 14:
```
brew install node npm
node -v
@ -38,7 +38,7 @@ This setup process was tested on Mac OSX 12 (Monterey) with version numbers show
- Docker: 20.10.14
- Docker-Compose: 2.6.0
- Node: 18.3.0
- Node: 14.20.1
- Yarn: 1.22.19
- Lerna: 5.1.4
@ -59,4 +59,7 @@ The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin
| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
### Troubleshooting
If there are errors with the `yarn setup` command, you can try installing nvm and node 14. This is the same as the instructions for Debian 11.

81
docs/DEV-SETUP-WINDOWS.md Normal file
View File

@ -0,0 +1,81 @@
## Dev Environment on Windows 10/11 (WSL2)
### Install WSL with Ubuntu LTS
Enable WSL 2 on Windows 10/11 for docker support.
```
wsl --set-default-version 2
```
Install Ubuntu LTS.
```
wsl --install Ubuntu
```
Or follow the instruction here:
https://learn.microsoft.com/en-us/windows/wsl/install
### Install Docker in windows
Download the installer from docker and install it.
Check this url for more detailed instructions:
https://docs.docker.com/desktop/install/windows-install/
You should follow the next steps from within the Ubuntu terminal.
### Install NVM & Node 14
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
Install NVM
```
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
```
Install Node 14
```
nvm install 14
```
### Install npm requirements
```
npm install -g yarn jest lerna
```
### Clone the repo
```
git clone https://github.com/Budibase/budibase.git
```
### Check Versions
This setup process was tested on Windows 11 with version numbers show below. Your mileage may vary using anything else.
- Docker: 20.10.7
- Docker-Compose: 2.10.2
- Node: v14.20.1
- Yarn: 1.22.19
- Lerna: 5.5.4
### Build
```
cd budibase
yarn setup
```
The yarn setup command runs several build steps i.e.
```
node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
```
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin
### Working with the code
Here are the instructions to work on the application from within Visual Studio Code (in Windows) through the WSL. All the commands and files are within the Ubuntu system and it should run as if you were working on a Linux machine.
https://code.visualstudio.com/docs/remote/wsl
Note you will be able to run the application from within the WSL terminal and you will be able to access the application from the a browser in Windows.

View File

@ -348,7 +348,7 @@ export interface paths {
}
}
responses: {
/** Returns the created table, including the ID which has been generated for it. This can be internal or external data sources. */
/** Returns the created table, including the ID which has been generated for it. This can be internal or external datasources. */
200: {
content: {
"application/json": components["schemas"]["tableOutput"]
@ -959,7 +959,7 @@ export interface components {
query: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
@ -983,7 +983,7 @@ export interface components {
data: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]

View File

@ -11,8 +11,8 @@
"dependencies": {
"bulma": "^0.9.3",
"next": "12.1.0",
"node-fetch": "^3.2.2",
"node-sass": "^7.0.1",
"node-fetch": "^3.2.10",
"sass": "^1.52.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-notifications-component": "^3.4.1"
@ -24,4 +24,4 @@
"eslint-config-next": "12.1.0",
"typescript": "4.6.2"
}
}
}

View File

@ -2020,10 +2020,10 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-fetch@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.2.tgz#16d33fbe32ca7c6ca1ca8ba5dfea1dd885c59f04"
integrity sha512-Cwhq1JFIoon15wcIkFzubVNFE5GvXGV82pKf4knXXjvGmn7RJKcypeuqcVNZMGDZsAFWyIRya/anwAJr7TWJ7w==
node-fetch@^3.2.10:
version "3.2.10"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.10.tgz#e8347f94b54ae18b57c9c049ef641cef398a85c8"
integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==
dependencies:
data-uri-to-buffer "^4.0.0"
fetch-blob "^3.1.4"

View File

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View File

@ -25,9 +25,12 @@ services:
REDIS_PASSWORD: ${REDIS_PASSWORD}
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
PLUGINS_DIR: ${PLUGINS_DIR}
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
restart: unless-stopped
@ -76,6 +79,9 @@ services:
- "${MAIN_PORT}:10000"
container_name: bbproxy
image: budibase/proxy
environment:
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
- PROXY_RATE_LIMIT_API_PER_SECOND=20
depends_on:
- minio-service
- worker-service

View File

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View File

@ -15,12 +15,30 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
}
upstream app-service {
server {{address}}:4001;
keepalive 32;
}
upstream worker-service {
server {{address}}:4002;
keepalive 32;
}
upstream builder {
server {{address}}:3000;
keepalive 32;
}
server {
listen 10000 default_server;
server_name _;
@ -40,41 +58,118 @@ http {
}
location ~ ^/api/(system|admin|global)/ {
proxy_pass http://{{ address }}:4002;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://worker-service;
}
location /api/backups/ {
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_pass http://app-service;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location /api/ {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_pass http://{{ address }}:4001;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://app-service;
}
location = / {
proxy_pass http://{{ address }}:4001;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://app-service;
}
location /app_ {
proxy_pass http://{{ address }}:4001;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://app-service;
}
location /app {
proxy_pass http://{{ address }}:4001;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://app-service;
}
location /builder {
proxy_pass http://{{ address }}:3000;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection "";
proxy_pass http://builder;
rewrite ^/builder(.*)$ /builder/$1 break;
}
location /builder/ {
proxy_pass http://{{ address }}:3000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_pass http://builder;
}
location /vite/ {
proxy_pass http://builder;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
rewrite ^/vite(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://app-service;
}
location / {

View File

@ -9,7 +9,11 @@ events {
}
http {
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
# rate limiting
limit_req_status 429;
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=${PROXY_RATE_LIMIT_API_PER_SECOND}r/s;
limit_req_zone $binary_remote_addr zone=webhooks:10m rate=${PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND}r/s;
include /etc/nginx/mime.types;
default_type application/octet-stream;
proxy_set_header Host $host;
@ -29,7 +33,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -44,11 +51,11 @@ http {
proxy_buffering off;
set $csp_default "default-src 'self'";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
set $csp_object "object-src 'none'";
set $csp_base_uri "base-uri 'self'";
set $csp_connect "connect-src 'self' https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
set $csp_frame "frame-src 'self' https:";
set $csp_img "img-src http: https: data: blob:";
@ -90,24 +97,50 @@ http {
proxy_pass http://$watchtower:8080;
}
{{/if}}
location ~ ^/(builder|app_) {
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_pass http://$apps:4002;
}
location ~ ^/api/(system|admin|global)/ {
proxy_set_header Host $host;
proxy_pass http://$worker:4003;
}
location /worker/ {
proxy_set_header Host $host;
proxy_pass http://$worker:4003;
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/backups/ {
# calls to export apps are limited
limit_req zone=ratelimit burst=20 nodelay;
# 1800s timeout for app export requests
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://$apps:4002;
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
@ -122,6 +155,27 @@ http {
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_pass http://$apps:4002;
}
location /api/webhooks/ {
# calls to webhooks are rate limited
limit_req zone=webhooks nodelay;
# Rest of configuration copied from /api/ location above
# 120s timeout on API requests
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
proxy_pass http://$apps:4002;
}
@ -131,15 +185,26 @@ http {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://$apps:4002;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://$minio:9000;
}

View File

@ -0,0 +1,24 @@
#!/bin/sh
# vim:sw=4:ts=4:et
set -e
ME=$(basename $0)
NGINX_CONF_FILE="/etc/nginx/nginx.conf"
DEFAULT_CONF_FILE="/etc/nginx/conf.d/default.conf"
# check if we have ipv6 available
if [ ! -f "/proc/net/if_inet6" ]; then
# ipv6 not available so delete lines from nginx conf
if [ -f "$NGINX_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $NGINX_CONF_FILE
fi
if [ -f "$DEFAULT_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $DEFAULT_CONF_FILE
fi
echo "$ME: info: ipv6 not available so delete lines from nginx conf"
else
echo "$ME: info: ipv6 is available so no need to delete lines from nginx conf"
fi
exit 0

View File

@ -0,0 +1,24 @@
#!/bin/sh
# vim:sw=4:ts=4:et
set -e
ME=$(basename $0)
NGINX_CONF_FILE="/etc/nginx/nginx.conf"
DEFAULT_CONF_FILE="/etc/nginx/conf.d/default.conf"
# check if we have ipv6 available
if [ ! -f "/proc/net/if_inet6" ]; then
# ipv6 not available so delete lines from nginx conf
if [ -f "$NGINX_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $NGINX_CONF_FILE
fi
if [ -f "$DEFAULT_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $DEFAULT_CONF_FILE
fi
echo "$ME: info: ipv6 not available so delete lines from nginx conf"
else
echo "$ME: info: ipv6 is available so no need to delete lines from nginx conf"
fi
exit 0

View File

@ -1,3 +1,19 @@
FROM nginx:latest
COPY .generated-nginx.prod.conf /etc/nginx/nginx.conf
COPY error.html /usr/share/nginx/html/error.html
# nginx.conf
# use the default nginx behaviour for *.template files which are processed with envsubst
# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
# IPv6 removal needs to happen after envsubst
RUN rm -rf /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
COPY 80-listen-on-ipv6-by-default.sh /docker-entrypoint.d/80-listen-on-ipv6-by-default.sh
RUN chmod +x /docker-entrypoint.d/80-listen-on-ipv6-by-default.sh
# Error handling
COPY error.html /usr/share/nginx/html/error.html
# Default environment
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20

View File

@ -3,15 +3,22 @@
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
mkdir -p /home/{search,minio,couch}
mkdir -p /home/couch/{dbs,views}
chown -R couchdb:couchdb /home/couch/
DATA_DIR=/home
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
sed -i 's#dir=/opt/couchdb/data/search#dir=/home/search#' /opt/clouseau/clouseau.ini
sed -i 's#/minio/minio server /minio &#/minio/minio server /home/minio &#' /runner.sh
sed -i 's#database_dir = ./data#database_dir = /home/couch/dbs#' /opt/couchdb/etc/default.ini
sed -i 's#view_index_dir = ./data#view_index_dir = /home/couch/views#' /opt/couchdb/etc/default.ini
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
fi
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

View File

@ -19,32 +19,18 @@ ADD packages/worker .
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
ARG TARGETARCH amd64
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD single
ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
ENV \
APP_PORT=4001 \
ARCHITECTURE=amd \
BUDIBASE_ENVIRONMENT=PRODUCTION \
CLUSTER_PORT=80 \
# CUSTOM_DOMAIN=budi001.custom.com \
DEPLOYMENT_ENVIRONMENT=docker \
MINIO_URL=http://localhost:9000 \
POSTHOG_TOKEN=phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS \
REDIS_URL=localhost:6379 \
SELF_HOSTED=1 \
TARGETBUILD=$TARGETBUILD \
WORKER_PORT=4002 \
WORKER_URL=http://localhost:4002 \
APPS_URL=http://localhost:4001
# ENV CUSTOM_DOMAIN=budi001.custom.com \
# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
@ -114,7 +100,10 @@ RUN chmod +x ./healthcheck.sh
ADD hosting/scripts/build-target-paths.sh .
RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
ADD hosting/single/ssh/sshd_config /etc/
ADD hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# cleanup cache
@ -122,6 +111,8 @@ RUN yarn cache clean -f
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
# setup letsencrypt certificate

View File

@ -7,7 +7,7 @@ name=clouseau@127.0.0.1
cookie=monster
; the path where you would like to store the search index files
dir=/data/search
dir=DATA_DIR/search
; the number of search indexes that can be open simultaneously
max_indexes_open=500

View File

@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
database_dir = /data/couch/dbs
view_index_dir = /data/couch/views
database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views

View File

@ -3,6 +3,11 @@ healthy=true
if [ -f "/data/.env" ]; then
export $(cat /data/.env | xargs)
elif [ -f "/home/.env" ]; then
export $(cat /home/.env | xargs)
else
echo "No .env file found"
healthy=false
fi
if [[ $(curl -Lfk -s -w "%{http_code}\n" http://localhost/ -o /dev/null) -ne 200 ]]; then

View File

@ -43,6 +43,24 @@ server {
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/backups/ {
# calls to export apps are limited
limit_req zone=ratelimit burst=20 nodelay;
# 1800s timeout for app export requests
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://127.0.0.1:4001;
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
@ -66,6 +84,15 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://127.0.0.1:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -1,9 +1,47 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
if [ -f "/data/.env" ]; then
export $(cat /data/.env | xargs)
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL")
# Check the env vars set in Dockerfile have come through, AAS seems to drop them
[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS"
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
/etc/init.d/ssh start
else
DATA_DIR=${DATA_DIR:-/data}
fi
# first randomise any unset environment variables
mkdir -p ${DATA_DIR}
# Mount NFS or GCP Filestore if env vars exist for it
if [[ ! -z ${FILESHARE_IP} && ! -z ${FILESHARE_NAME} ]]; then
echo "Mounting NFS share"
apt update && apt install -y nfs-common nfs-kernel-server
echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}"
mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR}
echo "Mounting result: $?"
fi
if [ -f "${DATA_DIR}/.env" ]; then
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
fi
# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
@ -14,21 +52,33 @@ done
if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
fi
if [ ! -f "/data/.env" ]; then
touch /data/.env
if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> /data/.env
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
for ENV_VAR in "${DOCKER_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
fi
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
mkdir -p /data/couch/{dbs,views} /home/couch/{dbs,views}
chown -R couchdb:couchdb /data/couch /home/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server /data/minio &
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
/minio/minio server ${DATA_DIR}/minio > /dev/stdout 2>&1 &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
@ -37,16 +87,18 @@ if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
chmod +x /etc/cron.d/certificate-renew
# Request the certbot certificate
/app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN}
/etc/init.d/nginx restart
fi
/etc/init.d/nginx restart
pushd app
pm2 start --name app "yarn run:docker"
pm2 start -l /dev/stdout --name app "yarn run:docker"
popd
pushd worker
pm2 start --name worker "yarn run:docker"
pm2 start -l /dev/stdout --name worker "yarn run:docker"
popd
sleep 10
echo "curl to couchdb endpoints"
curl -X PUT ${COUCH_DB_URL}/_users
curl -X PUT ${COUCH_DB_URL}/_replicator
echo "end of runner.sh, sleeping ..."
sleep infinity

View File

@ -0,0 +1,8 @@
#!/bin/sh
ssh-keygen -A
#prepare run dir
if [ ! -d "/var/run/sshd" ]; then
mkdir -p /var/run/sshd
fi

View File

@ -0,0 +1,12 @@
Port 2222
ListenAddress 0.0.0.0
LoginGraceTime 180
X11Forwarding yes
Ciphers aes128-cbc,3des-cbc,aes256-cbc,aes128-ctr,aes192-ctr,aes256-ctr
MACs hmac-sha1,hmac-sha1-96
StrictModes yes
SyslogFacility DAEMON
PasswordAuthentication yes
PermitEmptyPasswords no
PermitRootLogin yes
Subsystem sftp internal-sftp

View File

@ -1,5 +1,5 @@
{
"version": "1.2.20",
"version": "2.1.40-alpha.0",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -3,7 +3,6 @@
"private": true,
"devDependencies": {
"@rollup/plugin-json": "^4.0.2",
"@types/mongodb": "3.6.3",
"@typescript-eslint/parser": "4.28.0",
"babel-eslint": "^10.0.3",
"eslint": "^7.28.0",
@ -13,18 +12,21 @@
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1",
"lerna": "3.14.1",
"madge": "^5.0.1",
"prettier": "^2.3.1",
"prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
"rollup-plugin-replace": "^2.2.0",
"svelte": "^3.38.2",
"typescript": "4.5.5"
"typescript": "4.7.3"
},
"scripts": {
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
"build:sdk": "lerna run build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
"release:pro": "bash scripts/pro/release.sh",
@ -45,8 +47,8 @@
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
"lint:fix:eslint": "eslint --fix packages",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
"lint:fix:eslint": "eslint --fix packages qa-core",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test --stream",
"test:e2e:ci": "lerna run cy:ci --stream",
@ -73,8 +75,8 @@
"env:multi:disable": "lerna run env:multi:disable",
"env:selfhost:enable": "lerna run env:selfhost:enable",
"env:selfhost:disable": "lerna run env:selfhost:disable",
"env:localdomain:enable": "lerna run env:localdomain:enable",
"env:localdomain:disable": "lerna run env:localdomain:disable",
"env:localdomain:enable": "./scripts/localdomain.sh enable",
"env:localdomain:disable": "./scripts/localdomain.sh disable",
"env:account:enable": "lerna run env:account:enable",
"env:account:disable": "lerna run env:account:disable",
"mode:self": "yarn env:selfhost:enable && yarn env:multi:disable && yarn env:account:disable",

View File

@ -0,0 +1,15 @@
const mockS3 = {
headBucket: jest.fn().mockReturnThis(),
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
listObjects: jest.fn().mockReturnThis(),
promise: jest.fn().mockReturnThis(),
catch: jest.fn(),
}
const AWS = {
S3: jest.fn(() => mockS3),
}
export default AWS

View File

@ -1 +0,0 @@
module.exports = require("./src/cloud/accounts")

View File

@ -1 +0,0 @@
module.exports = require("./src/auth")

View File

@ -1,9 +0,0 @@
const generic = require("./src/cache/generic")
module.exports = {
user: require("./src/cache/user"),
app: require("./src/cache/appMetadata"),
writethrough: require("./src/cache/writethrough"),
...generic,
cache: generic,
}

View File

@ -1 +0,0 @@
module.exports = require("./src/constants")

View File

@ -1,22 +0,0 @@
const {
getAppDB,
getDevAppDB,
getProdAppDB,
getAppId,
updateAppId,
doInAppContext,
doInTenant,
} = require("./src/context")
const identity = require("./src/context/identity")
module.exports = {
getAppDB,
getDevAppDB,
getProdAppDB,
getAppId,
updateAppId,
doInAppContext,
doInTenant,
identity,
}

View File

@ -1,7 +0,0 @@
module.exports = {
...require("./src/db/utils"),
...require("./src/db/constants"),
...require("./src/db"),
...require("./src/db/views"),
...require("./src/db/pouch"),
}

View File

@ -1 +0,0 @@
module.exports = require("./src/context/deprovision")

View File

@ -1 +0,0 @@
module.exports = require("./src/security/encryption")

View File

@ -0,0 +1,21 @@
import { Config } from "@jest/types"
const config: Config.InitialOptions = {
preset: "ts-jest",
testEnvironment: "node",
setupFiles: ["./tests/jestSetup.ts"],
collectCoverageFrom: ["src/**/*.{js,ts}"],
coverageReporters: ["lcov", "json", "clover"],
}
if (!process.env.CI) {
// use sources when not in CI
config.moduleNameMapper = {
"@budibase/types": "<rootDir>/../types/src",
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
}
} else {
console.log("Running tests with compiled dependency sources")
}
export default config

View File

@ -1 +0,0 @@
module.exports = require("./src/logging")

View File

@ -1 +0,0 @@
module.exports = require("./src/middleware")

View File

@ -1 +0,0 @@
module.exports = require("./src/migrations")

View File

@ -1,4 +0,0 @@
module.exports = {
...require("./src/objectStore"),
...require("./src/objectStore/utils"),
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.2.20",
"version": "2.1.40-alpha.0",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -16,23 +16,27 @@
"prepack": "cp package.json dist",
"build": "tsc -p tsconfig.build.json",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"test": "jest",
"test": "jest --coverage",
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "^1.2.20",
"@budibase/types": "2.1.40-alpha.0",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcryptjs": "2.4.3",
"bull": "4.10.1",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",
"joi": "17.6.0",
"jsonwebtoken": "8.5.1",
"koa-passport": "4.1.4",
"lodash": "4.17.21",
"lodash.isarguments": "3.1.0",
"nano": "^10.1.0",
"node-fetch": "2.6.7",
"passport-google-auth": "1.0.2",
"passport-google-oauth": "2.0.0",
"passport-jwt": "4.0.0",
"passport-local": "1.0.0",
@ -48,20 +52,11 @@
"uuid": "8.3.2",
"zlib": "1.0.5"
},
"jest": {
"preset": "ts-jest",
"testEnvironment": "node",
"moduleNameMapper": {
"@budibase/types": "<rootDir>/../types/src"
},
"setupFiles": [
"./scripts/jestSetup.ts"
]
},
"devDependencies": {
"@shopify/jest-koa-mocks": "3.1.5",
"@types/chance": "1.1.3",
"@types/ioredis": "4.28.0",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/koa": "2.13.4",
"@types/lodash": "4.14.180",
"@types/node": "14.18.20",
"@types/node-fetch": "2.6.1",
@ -70,13 +65,16 @@
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
"chance": "1.1.3",
"ioredis-mock": "5.8.0",
"jest": "27.5.1",
"koa": "2.7.0",
"jest": "28.1.1",
"koa": "2.13.4",
"nodemon": "2.0.16",
"pouchdb-adapter-memory": "7.2.2",
"timekeeper": "2.2.0",
"ts-jest": "27.1.5",
"ts-jest": "28.0.4",
"ts-node": "10.8.1",
"tsconfig-paths": "4.0.0",
"typescript": "4.7.3"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"

View File

@ -1 +0,0 @@
module.exports = require("./src/security/permissions")

View File

@ -1,5 +0,0 @@
module.exports = {
Client: require("./src/redis"),
utils: require("./src/redis/utils"),
clients: require("./src/redis/init"),
}

View File

@ -1 +0,0 @@
module.exports = require("./src/security/roles")

View File

@ -1,12 +0,0 @@
import env from "../src/environment"
import { mocks } from "../tests/utilities"
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
env._set("JWT_SECRET", "test-jwtsecret")
env._set("LOG_LEVEL", "silent")

View File

@ -1 +0,0 @@
module.exports = require("./src/security/sessions")

View File

@ -1,19 +1,16 @@
const passport = require("koa-passport")
const _passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
const { getGlobalDB } = require("./tenancy")
import { getGlobalDB } from "../tenancy"
const refresh = require("passport-oauth2-refresh")
const { Configs } = require("./constants")
const { getScopedConfig } = require("./db/utils")
const {
jwt,
import { Config } from "../constants"
import { getScopedConfig } from "../db"
import {
jwt as jwtPassport,
local,
authenticated,
google,
oidc,
auditLog,
tenancy,
appTenancy,
authError,
ssoCallbackUrl,
csrf,
@ -22,32 +19,60 @@ const {
builderOnly,
builderOrAdmin,
joiValidator,
} = require("./middleware")
const { invalidateUser } = require("./cache/user")
oidc,
google,
} from "../middleware"
import { invalidateUser } from "../cache/user"
import { User } from "@budibase/types"
import { logAlert } from "../logging"
export {
auditLog,
authError,
internalApi,
ssoCallbackUrl,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
google,
oidc,
} from "../middleware"
export const buildAuthMiddleware = authenticated
export const buildTenancyMiddleware = tenancy
export const buildCsrfMiddleware = csrf
export const passport = _passport
export const jwt = require("jsonwebtoken")
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
_passport.use(new LocalStrategy(local.options, local.authenticate))
if (jwtPassport.options.secretOrKey) {
_passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate))
} else {
logAlert("No JWT Secret supplied, cannot configure JWT strategy")
}
passport.serializeUser((user, done) => done(null, user))
_passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user, done) => {
_passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
const user = await db.get(user._id)
return done(null, user)
const dbUser = await db.get(user._id)
return done(null, dbUser)
} catch (err) {
console.error(`User not found`, err)
return done(null, false, { message: "User not found" })
}
})
async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
async function refreshOIDCAccessToken(
db: any,
chosenConfig: any,
refreshToken: string
) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
let enrichedConfig
let strategy
let enrichedConfig: any
let strategy: any
try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
@ -68,40 +93,50 @@ async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
return new Promise(resolve => {
refresh.requestNewAccessToken(
Configs.OIDC,
Config.OIDC,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshGoogleAccessToken(db, config, refreshToken) {
async function refreshGoogleAccessToken(
db: any,
config: any,
refreshToken: any
) {
let callbackUrl = await google.getCallbackUrl(db, config)
let strategy
try {
strategy = await google.strategyFactory(config, callbackUrl)
} catch (err) {
} catch (err: any) {
console.error(err)
throw new Error("Error constructing OIDC refresh strategy", err)
throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}`
)
}
refresh.use(strategy)
return new Promise(resolve => {
refresh.requestNewAccessToken(
Configs.GOOGLE,
Config.GOOGLE,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshOAuthToken(refreshToken, configType, configId) {
export async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
@ -111,9 +146,9 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
let chosenConfig = {}
let refreshResponse
if (configType === Configs.OIDC) {
if (configType === Config.OIDC) {
// configId - retrieved from cookie.
chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) {
throw new Error("Invalid OIDC configuration")
}
@ -134,7 +169,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
return refreshResponse
}
async function updateUserOAuth(userId, oAuthConfig) {
export async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@ -161,24 +196,3 @@ async function updateUserOAuth(userId, oAuthConfig) {
console.error("Could not update OAuth details for current user", e)
}
}
module.exports = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
buildAppTenancyMiddleware: appTenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,
internalApi,
refreshOAuthToken,
updateUserOAuth,
ssoCallbackUrl,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
}

View File

@ -0,0 +1 @@
export * from "./auth"

View File

@ -1,6 +1,6 @@
const redis = require("../redis/init")
const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants")
import { getAppClient } from "../redis/init"
import { doWithDB, DocumentType } from "../db"
import { Database } from "@budibase/types"
const AppState = {
INVALID: "invalid",
@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600
/**
* The default populate app metadata function
*/
const populateFromDB = async appId => {
async function populateFromDB(appId: string) {
return doWithDB(
appId,
db => {
return db.get(DocumentTypes.APP_METADATA)
(db: Database) => {
return db.get(DocumentType.APP_METADATA)
},
{ skip_setup: true }
)
}
const isInvalid = metadata => {
function isInvalid(metadata?: { state: string }) {
return !metadata || metadata.state === AppState.INVALID
}
@ -31,15 +31,15 @@ const isInvalid = metadata => {
* @param {string} appId the id of the app to get metadata from.
* @returns {object} the app metadata.
*/
exports.getAppMetadata = async appId => {
const client = await redis.getAppClient()
export async function getAppMetadata(appId: string) {
const client = await getAppClient()
// try cache
let metadata = await client.get(appId)
if (!metadata) {
let expiry = EXPIRY_SECONDS
let expiry: number | undefined = EXPIRY_SECONDS
try {
metadata = await populateFromDB(appId)
} catch (err) {
} catch (err: any) {
// app DB left around, but no metadata, it is invalid
if (err && err.status === 404) {
metadata = { state: AppState.INVALID }
@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => {
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
* @return {Promise<void>} will respond with success when cache is updated.
*/
exports.invalidateAppMetadata = async (appId, newMetadata = null) => {
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
if (!appId) {
throw "Cannot invalidate if no app ID provided."
}
const client = await redis.getAppClient()
const client = await getAppClient()
await client.delete(appId)
if (newMetadata) {
await client.store(appId, newMetadata, EXPIRY_SECONDS)

View File

@ -1,6 +1,6 @@
import { getTenantId } from "../../context"
import redis from "../../redis/init"
import RedisWrapper from "../../redis"
import * as redis from "../../redis/init"
import { Client } from "../../redis"
function generateTenantKey(key: string) {
const tenantId = getTenantId()
@ -8,9 +8,9 @@ function generateTenantKey(key: string) {
}
export = class BaseCache {
client: RedisWrapper | undefined
client: Client | undefined
constructor(client: RedisWrapper | undefined = undefined) {
constructor(client: Client | undefined = undefined) {
this.client = client
}

View File

@ -1,29 +0,0 @@
const BaseCache = require("./base")
const GENERIC = new BaseCache()
exports.CacheKeys = {
CHECKLIST: "checklist",
INSTALLATION: "installation",
ANALYTICS_ENABLED: "analyticsEnabled",
UNIQUE_TENANT_ID: "uniqueTenantId",
EVENTS: "events",
BACKFILL_METADATA: "backfillMetadata",
}
exports.TTL = {
ONE_MINUTE: 600,
ONE_HOUR: 3600,
ONE_DAY: 86400,
}
function performExport(funcName) {
return (...args) => GENERIC[funcName](...args)
}
exports.keys = performExport("keys")
exports.get = performExport("get")
exports.store = performExport("store")
exports.delete = performExport("delete")
exports.withCache = performExport("withCache")
exports.bustCache = performExport("bustCache")

View File

@ -0,0 +1,30 @@
const BaseCache = require("./base")
const GENERIC = new BaseCache()
export enum CacheKey {
CHECKLIST = "checklist",
INSTALLATION = "installation",
ANALYTICS_ENABLED = "analyticsEnabled",
UNIQUE_TENANT_ID = "uniqueTenantId",
EVENTS = "events",
BACKFILL_METADATA = "backfillMetadata",
EVENTS_RATE_LIMIT = "eventsRateLimit",
}
export enum TTL {
ONE_MINUTE = 600,
ONE_HOUR = 3600,
ONE_DAY = 86400,
}
function performExport(funcName: string) {
return (...args: any) => GENERIC[funcName](...args)
}
export const keys = performExport("keys")
export const get = performExport("get")
export const store = performExport("store")
export const destroy = performExport("delete")
export const withCache = performExport("withCache")
export const bustCache = performExport("bustCache")

View File

@ -0,0 +1,5 @@
export * as generic from "./generic"
export * as user from "./user"
export * as app from "./appMetadata"
export * as writethrough from "./writethrough"
export * from "./generic"

View File

@ -1,6 +1,6 @@
require("../../../tests/utilities/TestConfiguration")
require("../../../tests")
const { Writethrough } = require("../writethrough")
const { dangerousGetDB } = require("../../db")
const { getDB } = require("../../db")
const tk = require("timekeeper")
const START_DATE = Date.now()
@ -8,8 +8,8 @@ tk.freeze(START_DATE)
const DELAY = 5000
const db = dangerousGetDB("test")
const db2 = dangerousGetDB("test2")
const db = getDB("test")
const db2 = getDB("test2")
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
describe("writethrough", () => {

View File

@ -1,15 +1,16 @@
const redis = require("../redis/init")
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
const env = require("../environment")
const accounts = require("../cloud/accounts")
import * as redis from "../redis/init"
import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy"
import env from "../environment"
import * as accounts from "../cloud/accounts"
import { Database } from "@budibase/types"
const EXPIRY_SECONDS = 3600
/**
* The default populate user function
*/
const populateFromDB = async (userId, tenantId) => {
const user = await doWithGlobalDB(tenantId, db => db.get(userId))
async function populateFromDB(userId: string, tenantId: string) {
const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId))
user.budibaseAccess = true
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const account = await accounts.getAccount(user.email)
@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => {
* @param {*} populateUser function to provide the user for re-caching. default to couch db
* @returns
*/
exports.getUser = async (userId, tenantId = null, populateUser = null) => {
export async function getUser(
userId: string,
tenantId?: string,
populateUser?: any
) {
if (!populateUser) {
populateUser = populateFromDB
}
@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
let user = await client.get(userId)
if (!user) {
user = await populateUser(userId, tenantId)
client.store(userId, user, EXPIRY_SECONDS)
await client.store(userId, user, EXPIRY_SECONDS)
}
if (user && !user.tenantId && tenantId) {
// make sure the tenant ID is always correct/set
@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
return user
}
exports.invalidateUser = async userId => {
export async function invalidateUser(userId: string) {
const client = await redis.getUserClient()
await client.delete(userId)
}

View File

@ -1,6 +1,7 @@
import BaseCache from "./base"
import { getWritethroughClient } from "../redis/init"
import { logWarn } from "../logging"
import { Database } from "@budibase/types"
const DEFAULT_WRITE_RATE_MS = 10000
let CACHE: BaseCache | null = null
@ -18,7 +19,7 @@ async function getCache() {
return CACHE
}
function makeCacheKey(db: PouchDB.Database, key: string) {
function makeCacheKey(db: Database, key: string) {
return db.name + key
}
@ -27,7 +28,7 @@ function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem {
}
export async function put(
db: PouchDB.Database,
db: Database,
doc: any,
writeRateMs: number = DEFAULT_WRITE_RATE_MS
) {
@ -63,7 +64,7 @@ export async function put(
return { ok: true, id: output._id, rev: output._rev }
}
export async function get(db: PouchDB.Database, id: string): Promise<any> {
export async function get(db: Database, id: string): Promise<any> {
const cache = await getCache()
const cacheKey = makeCacheKey(db, id)
let cacheItem: CacheItem = await cache.get(cacheKey)
@ -76,7 +77,7 @@ export async function get(db: PouchDB.Database, id: string): Promise<any> {
}
export async function remove(
db: PouchDB.Database,
db: Database,
docOrId: any,
rev?: any
): Promise<void> {
@ -94,13 +95,10 @@ export async function remove(
}
export class Writethrough {
db: PouchDB.Database
db: Database
writeRateMs: number
constructor(
db: PouchDB.Database,
writeRateMs: number = DEFAULT_WRITE_RATE_MS
) {
constructor(db: Database, writeRateMs: number = DEFAULT_WRITE_RATE_MS) {
this.db = db
this.writeRateMs = writeRateMs
}

View File

@ -1,6 +1,6 @@
import API from "./api"
import env from "../environment"
import { Headers } from "../constants"
import { Header } from "../constants"
import { CloudAccount } from "@budibase/types"
const api = new API(env.ACCOUNT_PORTAL_URL)
@ -14,7 +14,7 @@ export const getAccount = async (
const response = await api.post(`/api/accounts/search`, {
body: payload,
headers: {
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
},
})
@ -35,7 +35,7 @@ export const getAccountByTenantId = async (
const response = await api.post(`/api/accounts/search`, {
body: payload,
headers: {
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
},
})
@ -50,7 +50,7 @@ export const getAccountByTenantId = async (
export const getStatus = async () => {
const response = await api.get(`/api/status`, {
headers: {
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
},
})
const json = await response.json()

View File

@ -1,42 +0,0 @@
const fetch = require("node-fetch")
class API {
constructor(host) {
this.host = host
}
apiCall =
method =>
async (url = "", options = {}) => {
if (!options.headers) {
options.headers = {}
}
if (!options.headers["Content-Type"]) {
options.headers = {
"Content-Type": "application/json",
Accept: "application/json",
...options.headers,
}
}
let json = options.headers["Content-Type"] === "application/json"
const requestOptions = {
method: method,
body: json ? JSON.stringify(options.body) : options.body,
headers: options.headers,
// TODO: See if this is necessary
credentials: "include",
}
return await fetch(`${this.host}${url}`, requestOptions)
}
post = this.apiCall("POST")
get = this.apiCall("GET")
patch = this.apiCall("PATCH")
del = this.apiCall("DELETE")
put = this.apiCall("PUT")
}
module.exports = API

View File

@ -0,0 +1,55 @@
import fetch from "node-fetch"
export = class API {
host: string
constructor(host: string) {
this.host = host
}
async apiCall(method: string, url: string, options?: any) {
if (!options.headers) {
options.headers = {}
}
if (!options.headers["Content-Type"]) {
options.headers = {
"Content-Type": "application/json",
Accept: "application/json",
...options.headers,
}
}
let json = options.headers["Content-Type"] === "application/json"
const requestOptions = {
method: method,
body: json ? JSON.stringify(options.body) : options.body,
headers: options.headers,
// TODO: See if this is necessary
credentials: "include",
}
return await fetch(`${this.host}${url}`, requestOptions)
}
async post(url: string, options?: any) {
return this.apiCall("POST", url, options)
}
async get(url: string, options?: any) {
return this.apiCall("GET", url, options)
}
async patch(url: string, options?: any) {
return this.apiCall("PATCH", url, options)
}
async del(url: string, options?: any) {
return this.apiCall("DELETE", url, options)
}
async put(url: string, options?: any) {
return this.apiCall("PUT", url, options)
}
}

View File

@ -1,650 +0,0 @@
const util = require("util")
const assert = require("assert")
const wrapEmitter = require("emitter-listener")
const async_hooks = require("async_hooks")
const CONTEXTS_SYMBOL = "cls@contexts"
const ERROR_SYMBOL = "error@context"
const DEBUG_CLS_HOOKED = process.env.DEBUG_CLS_HOOKED
let currentUid = -1
module.exports = {
getNamespace: getNamespace,
createNamespace: createNamespace,
destroyNamespace: destroyNamespace,
reset: reset,
ERROR_SYMBOL: ERROR_SYMBOL,
}
function Namespace(name) {
this.name = name
// changed in 2.7: no default context
this.active = null
this._set = []
this.id = null
this._contexts = new Map()
this._indent = 0
this._hook = null
}
Namespace.prototype.set = function set(key, value) {
if (!this.active) {
throw new Error(
"No context available. ns.run() or ns.bind() must be called first."
)
}
this.active[key] = value
if (DEBUG_CLS_HOOKED) {
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
indentStr +
"CONTEXT-SET KEY:" +
key +
"=" +
value +
" in ns:" +
this.name +
" currentUid:" +
currentUid +
" active:" +
util.inspect(this.active, { showHidden: true, depth: 2, colors: true })
)
}
return value
}
Namespace.prototype.get = function get(key) {
if (!this.active) {
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.currentId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-GETTING KEY NO ACTIVE NS: (${this.name}) ${key}=undefined currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${this._set.length}`
)
}
return undefined
}
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
indentStr +
"CONTEXT-GETTING KEY:" +
key +
"=" +
this.active[key] +
" (" +
this.name +
") currentUid:" +
currentUid +
" active:" +
util.inspect(this.active, { showHidden: true, depth: 2, colors: true })
)
debug2(
`${indentStr}CONTEXT-GETTING KEY: (${this.name}) ${key}=${
this.active[key]
} currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${
this._set.length
} active:${util.inspect(this.active)}`
)
}
return this.active[key]
}
Namespace.prototype.createContext = function createContext() {
// Prototype inherit existing context if created a new child context within existing context.
let context = Object.create(this.active ? this.active : Object.prototype)
context._ns_name = this.name
context.id = currentUid
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-CREATED Context: (${
this.name
}) currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${
this._set.length
} context:${util.inspect(context, {
showHidden: true,
depth: 2,
colors: true,
})}`
)
}
return context
}
Namespace.prototype.run = function run(fn) {
let context = this.createContext()
this.enter(context)
try {
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-RUN BEGIN: (${
this.name
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
this._set.length
} context:${util.inspect(context)}`
)
}
fn(context)
return context
} catch (exception) {
if (exception) {
exception[ERROR_SYMBOL] = context
}
throw exception
} finally {
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-RUN END: (${
this.name
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
this._set.length
} ${util.inspect(context)}`
)
}
this.exit(context)
}
}
Namespace.prototype.runAndReturn = function runAndReturn(fn) {
let value
this.run(function (context) {
value = fn(context)
})
return value
}
/**
* Uses global Promise and assumes Promise is cls friendly or wrapped already.
* @param {function} fn
* @returns {*}
*/
Namespace.prototype.runPromise = function runPromise(fn) {
let context = this.createContext()
this.enter(context)
let promise = fn(context)
if (!promise || !promise.then || !promise.catch) {
throw new Error("fn must return a promise.")
}
if (DEBUG_CLS_HOOKED) {
debug2(
"CONTEXT-runPromise BEFORE: (" +
this.name +
") currentUid:" +
currentUid +
" len:" +
this._set.length +
" " +
util.inspect(context)
)
}
return promise
.then(result => {
if (DEBUG_CLS_HOOKED) {
debug2(
"CONTEXT-runPromise AFTER then: (" +
this.name +
") currentUid:" +
currentUid +
" len:" +
this._set.length +
" " +
util.inspect(context)
)
}
this.exit(context)
return result
})
.catch(err => {
err[ERROR_SYMBOL] = context
if (DEBUG_CLS_HOOKED) {
debug2(
"CONTEXT-runPromise AFTER catch: (" +
this.name +
") currentUid:" +
currentUid +
" len:" +
this._set.length +
" " +
util.inspect(context)
)
}
this.exit(context)
throw err
})
}
Namespace.prototype.bind = function bindFactory(fn, context) {
if (!context) {
if (!this.active) {
context = this.createContext()
} else {
context = this.active
}
}
let self = this
return function clsBind() {
self.enter(context)
try {
return fn.apply(this, arguments)
} catch (exception) {
if (exception) {
exception[ERROR_SYMBOL] = context
}
throw exception
} finally {
self.exit(context)
}
}
}
Namespace.prototype.enter = function enter(context) {
assert.ok(context, "context must be provided for entering")
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-ENTER: (${
this.name
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
this._set.length
} ${util.inspect(context)}`
)
}
this._set.push(this.active)
this.active = context
}
Namespace.prototype.exit = function exit(context) {
assert.ok(context, "context must be provided for exiting")
if (DEBUG_CLS_HOOKED) {
const asyncHooksCurrentId = async_hooks.executionAsyncId()
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
debug2(
`${indentStr}CONTEXT-EXIT: (${
this.name
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
this._set.length
} ${util.inspect(context)}`
)
}
// Fast path for most exits that are at the top of the stack
if (this.active === context) {
assert.ok(this._set.length, "can't remove top context")
this.active = this._set.pop()
return
}
// Fast search in the stack using lastIndexOf
let index = this._set.lastIndexOf(context)
if (index < 0) {
if (DEBUG_CLS_HOOKED) {
debug2(
"??ERROR?? context exiting but not entered - ignoring: " +
util.inspect(context)
)
}
assert.ok(
index >= 0,
"context not currently entered; can't exit. \n" +
util.inspect(this) +
"\n" +
util.inspect(context)
)
} else {
assert.ok(index, "can't remove top context")
this._set.splice(index, 1)
}
}
Namespace.prototype.bindEmitter = function bindEmitter(emitter) {
assert.ok(
emitter.on && emitter.addListener && emitter.emit,
"can only bind real EEs"
)
let namespace = this
let thisSymbol = "context@" + this.name
// Capture the context active at the time the emitter is bound.
function attach(listener) {
if (!listener) {
return
}
if (!listener[CONTEXTS_SYMBOL]) {
listener[CONTEXTS_SYMBOL] = Object.create(null)
}
listener[CONTEXTS_SYMBOL][thisSymbol] = {
namespace: namespace,
context: namespace.active,
}
}
// At emit time, bind the listener within the correct context.
function bind(unwrapped) {
if (!(unwrapped && unwrapped[CONTEXTS_SYMBOL])) {
return unwrapped
}
let wrapped = unwrapped
let unwrappedContexts = unwrapped[CONTEXTS_SYMBOL]
Object.keys(unwrappedContexts).forEach(function (name) {
let thunk = unwrappedContexts[name]
wrapped = thunk.namespace.bind(wrapped, thunk.context)
})
return wrapped
}
wrapEmitter(emitter, attach, bind)
}
/**
* If an error comes out of a namespace, it will have a context attached to it.
* This function knows how to find it.
*
* @param {Error} exception Possibly annotated error.
*/
Namespace.prototype.fromException = function fromException(exception) {
return exception[ERROR_SYMBOL]
}
function getNamespace(name) {
return process.namespaces[name]
}
function createNamespace(name) {
assert.ok(name, "namespace must be given a name.")
if (DEBUG_CLS_HOOKED) {
debug2(`NS-CREATING NAMESPACE (${name})`)
}
let namespace = new Namespace(name)
namespace.id = currentUid
const hook = async_hooks.createHook({
init(asyncId, type, triggerId, resource) {
currentUid = async_hooks.executionAsyncId()
//CHAIN Parent's Context onto child if none exists. This is needed to pass net-events.spec
// let initContext = namespace.active;
// if(!initContext && triggerId) {
// let parentContext = namespace._contexts.get(triggerId);
// if (parentContext) {
// namespace.active = parentContext;
// namespace._contexts.set(currentUid, parentContext);
// if (DEBUG_CLS_HOOKED) {
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
// debug2(`${indentStr}INIT [${type}] (${name}) WITH PARENT CONTEXT asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
// }
// } else if (DEBUG_CLS_HOOKED) {
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
// debug2(`${indentStr}INIT [${type}] (${name}) MISSING CONTEXT asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
// }
// }else {
// namespace._contexts.set(currentUid, namespace.active);
// if (DEBUG_CLS_HOOKED) {
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
// debug2(`${indentStr}INIT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
// }
// }
if (namespace.active) {
namespace._contexts.set(asyncId, namespace.active)
if (DEBUG_CLS_HOOKED) {
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}INIT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} resource:${resource}`
)
}
} else if (currentUid === 0) {
// CurrentId will be 0 when triggered from C++. Promise events
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
const triggerId = async_hooks.triggerAsyncId()
const triggerIdContext = namespace._contexts.get(triggerId)
if (triggerIdContext) {
namespace._contexts.set(asyncId, triggerIdContext)
if (DEBUG_CLS_HOOKED) {
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}INIT USING CONTEXT FROM TRIGGERID [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} resource:${resource}`
)
}
} else if (DEBUG_CLS_HOOKED) {
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}INIT MISSING CONTEXT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} resource:${resource}`
)
}
}
if (DEBUG_CLS_HOOKED && type === "PROMISE") {
debug2(util.inspect(resource, { showHidden: true }))
const parentId = resource.parentId
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}INIT RESOURCE-PROMISE [${type}] (${name}) parentId:${parentId} asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} resource:${resource}`
)
}
},
before(asyncId) {
currentUid = async_hooks.executionAsyncId()
let context
/*
if(currentUid === 0){
// CurrentId will be 0 when triggered from C++. Promise events
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
//const triggerId = async_hooks.triggerAsyncId();
context = namespace._contexts.get(asyncId); // || namespace._contexts.get(triggerId);
}else{
context = namespace._contexts.get(currentUid);
}
*/
//HACK to work with promises until they are fixed in node > 8.1.1
context =
namespace._contexts.get(asyncId) || namespace._contexts.get(currentUid)
if (context) {
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}BEFORE (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} context:${util.inspect(context)}`
)
namespace._indent += 2
}
namespace.enter(context)
} else if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}BEFORE MISSING CONTEXT (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} namespace._contexts:${util.inspect(namespace._contexts, {
showHidden: true,
depth: 2,
colors: true,
})}`
)
namespace._indent += 2
}
},
after(asyncId) {
currentUid = async_hooks.executionAsyncId()
let context // = namespace._contexts.get(currentUid);
/*
if(currentUid === 0){
// CurrentId will be 0 when triggered from C++. Promise events
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
//const triggerId = async_hooks.triggerAsyncId();
context = namespace._contexts.get(asyncId); // || namespace._contexts.get(triggerId);
}else{
context = namespace._contexts.get(currentUid);
}
*/
//HACK to work with promises until they are fixed in node > 8.1.1
context =
namespace._contexts.get(asyncId) || namespace._contexts.get(currentUid)
if (context) {
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
namespace._indent -= 2
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}AFTER (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} context:${util.inspect(context)}`
)
}
namespace.exit(context)
} else if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
namespace._indent -= 2
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}AFTER MISSING CONTEXT (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} context:${util.inspect(context)}`
)
}
},
destroy(asyncId) {
currentUid = async_hooks.executionAsyncId()
if (DEBUG_CLS_HOOKED) {
const triggerId = async_hooks.triggerAsyncId()
const indentStr = " ".repeat(
namespace._indent < 0 ? 0 : namespace._indent
)
debug2(
`${indentStr}DESTROY (${name}) currentUid:${currentUid} asyncId:${asyncId} triggerId:${triggerId} active:${util.inspect(
namespace.active,
{ showHidden: true, depth: 2, colors: true }
)} context:${util.inspect(namespace._contexts.get(currentUid))}`
)
}
namespace._contexts.delete(asyncId)
},
})
hook.enable()
namespace._hook = hook
process.namespaces[name] = namespace
return namespace
}
function destroyNamespace(name) {
let namespace = getNamespace(name)
assert.ok(namespace, "can't delete nonexistent namespace! \"" + name + '"')
assert.ok(
namespace.id,
"don't assign to process.namespaces directly! " + util.inspect(namespace)
)
namespace._hook.disable()
namespace._contexts = null
process.namespaces[name] = null
}
function reset() {
// must unregister async listeners
if (process.namespaces) {
Object.keys(process.namespaces).forEach(function (name) {
destroyNamespace(name)
})
}
process.namespaces = Object.create(null)
}
process.namespaces = process.namespaces || {}
//const fs = require('fs');
function debug2(...args) {
if (DEBUG_CLS_HOOKED) {
//fs.writeSync(1, `${util.format(...args)}\n`);
process._rawDebug(`${util.format(...args)}`)
}
}
/*function getFunctionName(fn) {
if (!fn) {
return fn;
}
if (typeof fn === 'function') {
if (fn.name) {
return fn.name;
}
return (fn.toString().trim().match(/^function\s*([^\s(]+)/) || [])[1];
} else if (fn.constructor && fn.constructor.name) {
return fn.constructor.name;
}
}*/

View File

@ -1,43 +0,0 @@
exports.UserStatus = {
ACTIVE: "active",
INACTIVE: "inactive",
}
exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}
exports.Headers = {
API_KEY: "x-budibase-api-key",
LICENSE_KEY: "x-budibase-license-key",
API_VER: "x-budibase-api-version",
APP_ID: "x-budibase-app-id",
TYPE: "x-budibase-type",
PREVIEW_ROLE: "x-budibase-role",
TENANT_ID: "x-budibase-tenant-id",
TOKEN: "x-budibase-token",
CSRF_TOKEN: "x-csrf-token",
}
exports.GlobalRoles = {
OWNER: "owner",
ADMIN: "admin",
BUILDER: "builder",
WORKSPACE_MANAGER: "workspace_manager",
}
exports.Configs = {
SETTINGS: "settings",
ACCOUNT: "account",
SMTP: "smtp",
GOOGLE: "google",
OIDC: "oidc",
OIDC_LOGOS: "logos_oidc",
}
exports.MAX_VALID_DATE = new Date(2147483647000)
exports.DEFAULT_TENANT_ID = "default"

View File

@ -4,13 +4,13 @@ export const UNICODE_MAX = "\ufff0"
/**
* Can be used to create a few different forms of querying a view.
*/
export enum AutomationViewModes {
export enum AutomationViewMode {
ALL = "all",
AUTOMATION = "automation",
STATUS = "status",
}
export enum ViewNames {
export enum ViewName {
USER_BY_APP = "by_app",
USER_BY_EMAIL = "by_email2",
BY_API_KEY = "by_api_key",
@ -18,16 +18,24 @@ export enum ViewNames {
LINK = "by_link",
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
ACCOUNT_BY_EMAIL = "account_by_email",
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
USER_BY_GROUP = "by_group_user",
APP_BACKUP_BY_TRIGGER = "by_trigger",
}
export const DeprecatedViews = {
[ViewNames.USER_BY_EMAIL]: [
[ViewName.USER_BY_EMAIL]: [
// removed due to inaccuracy in view doc filter logic
"by_email",
],
}
export enum DocumentTypes {
export enum InternalTable {
USER_METADATA = "ta_users",
}
export enum DocumentType {
USER = "us",
GROUP = "gr",
WORKSPACE = "workspace",
@ -41,6 +49,25 @@ export enum DocumentTypes {
MIGRATIONS = "migrations",
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
PLUGIN = "plg",
DATASOURCE = "datasource",
DATASOURCE_PLUS = "datasource_plus",
APP_BACKUP = "backup",
TABLE = "ta",
ROW = "ro",
AUTOMATION = "au",
LINK = "li",
WEBHOOK = "wh",
INSTANCE = "inst",
LAYOUT = "layout",
SCREEN = "screen",
QUERY = "query",
DEPLOYMENTS = "deployments",
METADATA = "metadata",
MEM_VIEW = "view",
USER_FLAG = "flag",
AUTOMATION_METADATA = "meta_au",
}
export const StaticDatabases = {
@ -62,6 +89,7 @@ export const StaticDatabases = {
},
}
export const APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR
export const APP_DEV = exports.DocumentTypes.APP_DEV + exports.SEPARATOR
export const APP_PREFIX = DocumentType.APP + SEPARATOR
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
export const APP_DEV_PREFIX = APP_DEV
export const BUDIBASE_DATASOURCE_TYPE = "budibase"

View File

@ -0,0 +1,2 @@
export * from "./db"
export * from "./misc"

View File

@ -0,0 +1,44 @@
export enum UserStatus {
ACTIVE = "active",
INACTIVE = "inactive",
}
export enum Cookie {
CurrentApp = "budibase:currentapp",
Auth = "budibase:auth",
Init = "budibase:init",
ACCOUNT_RETURN_URL = "budibase:account:returnurl",
DatasourceAuth = "budibase:datasourceauth",
OIDC_CONFIG = "budibase:oidc:config",
}
export enum Header {
API_KEY = "x-budibase-api-key",
LICENSE_KEY = "x-budibase-license-key",
API_VER = "x-budibase-api-version",
APP_ID = "x-budibase-app-id",
TYPE = "x-budibase-type",
PREVIEW_ROLE = "x-budibase-role",
TENANT_ID = "x-budibase-tenant-id",
TOKEN = "x-budibase-token",
CSRF_TOKEN = "x-csrf-token",
}
export enum GlobalRole {
OWNER = "owner",
ADMIN = "admin",
BUILDER = "builder",
WORKSPACE_MANAGER = "workspace_manager",
}
export enum Config {
SETTINGS = "settings",
ACCOUNT = "account",
SMTP = "smtp",
GOOGLE = "google",
OIDC = "oidc",
OIDC_LOGOS = "logos_oidc",
}
export const MAX_VALID_DATE = new Date(2147483647000)
export const DEFAULT_TENANT_ID = "default"

View File

@ -0,0 +1,17 @@
import { AsyncLocalStorage } from "async_hooks"
export default class Context {
static storage = new AsyncLocalStorage<Record<string, any>>()
static run(context: Record<string, any>, func: any) {
return Context.storage.run(context, () => func())
}
static get(): Record<string, any> {
return Context.storage.getStore() as Record<string, any>
}
static set(context: Record<string, any>) {
Context.storage.enterWith(context)
}
}

View File

@ -1,47 +0,0 @@
const cls = require("../clshooked")
const { newid } = require("../hashing")
const REQUEST_ID_KEY = "requestId"
const MAIN_CTX = cls.createNamespace("main")
function getContextStorage(namespace) {
if (namespace && namespace.active) {
let contextData = namespace.active
delete contextData.id
delete contextData._ns_name
return contextData
}
return {}
}
class FunctionContext {
static run(callback) {
return MAIN_CTX.runAndReturn(async () => {
const namespaceId = newid()
MAIN_CTX.set(REQUEST_ID_KEY, namespaceId)
const namespace = cls.createNamespace(namespaceId)
let response = await namespace.runAndReturn(callback)
cls.destroyNamespace(namespaceId)
return response
})
}
static setOnContext(key, value) {
const namespaceId = MAIN_CTX.get(REQUEST_ID_KEY)
const namespace = cls.getNamespace(namespaceId)
namespace.set(key, value)
}
static getFromContext(key) {
const namespaceId = MAIN_CTX.get(REQUEST_ID_KEY)
const namespace = cls.getNamespace(namespaceId)
const context = getContextStorage(namespace)
if (context) {
return context[key]
} else {
return null
}
}
}
module.exports = FunctionContext

View File

@ -1,17 +0,0 @@
export enum ContextKeys {
TENANT_ID = "tenantId",
GLOBAL_DB = "globalDb",
APP_ID = "appId",
IDENTITY = "identity",
// whatever the request app DB was
CURRENT_DB = "currentDb",
// get the prod app DB from the request
PROD_DB = "prodDb",
// get the dev app DB from the request
DEV_DB = "devDb",
DB_OPTS = "dbOpts",
// check if something else is using the context, don't close DB
TENANCY_IN_USE = "tenancyInUse",
APP_IN_USE = "appInUse",
IDENTITY_IN_USE = "identityInUse",
}

View File

@ -1,15 +1,19 @@
const { getGlobalUserParams, getAllApps } = require("../db/utils")
const { doWithDB } = require("../db")
const { doWithGlobalDB } = require("../tenancy")
const { StaticDatabases } = require("../db/constants")
import {
getGlobalUserParams,
getAllApps,
doWithDB,
StaticDatabases,
} from "../db"
import { doWithGlobalDB } from "../tenancy"
import { App, Tenants, User, Database } from "@budibase/types"
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
const removeTenantFromInfoDB = async tenantId => {
async function removeTenantFromInfoDB(tenantId: string) {
try {
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
let tenants = await infoDb.get(TENANT_DOC)
await doWithDB(PLATFORM_INFO_DB, async (infoDb: Database) => {
const tenants = (await infoDb.get(TENANT_DOC)) as Tenants
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
await infoDb.put(tenants)
@ -20,14 +24,14 @@ const removeTenantFromInfoDB = async tenantId => {
}
}
exports.removeUserFromInfoDB = async dbUser => {
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const keys = [dbUser._id, dbUser.email]
export async function removeUserFromInfoDB(dbUser: User) {
await doWithDB(PLATFORM_INFO_DB, async (infoDb: Database) => {
const keys = [dbUser._id!, dbUser.email]
const userDocs = await infoDb.allDocs({
keys,
include_docs: true,
})
const toDelete = userDocs.rows.map(row => {
const toDelete = userDocs.rows.map((row: any) => {
return {
...row.doc,
_deleted: true,
@ -37,18 +41,18 @@ exports.removeUserFromInfoDB = async dbUser => {
})
}
const removeUsersFromInfoDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
async function removeUsersFromInfoDB(tenantId: string) {
return doWithGlobalDB(tenantId, async (db: any) => {
try {
const allUsers = await db.allDocs(
getGlobalUserParams(null, {
include_docs: true,
})
)
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
const allEmails = allUsers.rows.map(row => row.doc.email)
await doWithDB(PLATFORM_INFO_DB, async (infoDb: any) => {
const allEmails = allUsers.rows.map((row: any) => row.doc.email)
// get the id docs
let keys = allUsers.rows.map(row => row.id)
let keys = allUsers.rows.map((row: any) => row.id)
// and the email docs
keys = keys.concat(allEmails)
// retrieve the docs and delete them
@ -56,7 +60,7 @@ const removeUsersFromInfoDB = async tenantId => {
keys,
include_docs: true,
})
const toDelete = userDocs.rows.map(row => {
const toDelete = userDocs.rows.map((row: any) => {
return {
...row.doc,
_deleted: true,
@ -71,8 +75,8 @@ const removeUsersFromInfoDB = async tenantId => {
})
}
const removeGlobalDB = async tenantId => {
return doWithGlobalDB(tenantId, async db => {
async function removeGlobalDB(tenantId: string) {
return doWithGlobalDB(tenantId, async (db: Database) => {
try {
await db.destroy()
} catch (err) {
@ -82,11 +86,11 @@ const removeGlobalDB = async tenantId => {
})
}
const removeTenantApps = async tenantId => {
async function removeTenantApps(tenantId: string) {
try {
const apps = await getAllApps({ all: true })
const apps = (await getAllApps({ all: true })) as App[]
const destroyPromises = apps.map(app =>
doWithDB(app.appId, db => db.destroy())
doWithDB(app.appId, (db: Database) => db.destroy())
)
await Promise.allSettled(destroyPromises)
} catch (err) {
@ -96,7 +100,7 @@ const removeTenantApps = async tenantId => {
}
// can't live in tenancy package due to circular dependency on db/utils
exports.deleteTenant = async tenantId => {
export async function deleteTenant(tenantId: string) {
await removeTenantFromInfoDB(tenantId)
await removeUsersFromInfoDB(tenantId)
await removeGlobalDB(tenantId)

View File

@ -2,23 +2,22 @@ import {
IdentityContext,
IdentityType,
User,
UserContext,
isCloudAccount,
Account,
AccountUserContext,
} from "@budibase/types"
import * as context from "."
export const getIdentity = (): IdentityContext | undefined => {
export function getIdentity(): IdentityContext | undefined {
return context.getIdentity()
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
export function doInIdentityContext(identity: IdentityContext, task: any) {
return context.doInIdentityContext(identity, task)
}
export const doInUserContext = (user: User, task: any) => {
const userContext: UserContext = {
export function doInUserContext(user: User, task: any) {
const userContext: any = {
...user,
_id: user._id as string,
type: IdentityType.USER,
@ -26,7 +25,7 @@ export const doInUserContext = (user: User, task: any) => {
return doInIdentityContext(userContext, task)
}
export const doInAccountContext = (account: Account, task: any) => {
export function doInAccountContext(account: Account, task: any) {
const _id = getAccountUserId(account)
const tenantId = account.tenantId
const accountContext: AccountUserContext = {
@ -38,12 +37,12 @@ export const doInAccountContext = (account: Account, task: any) => {
return doInIdentityContext(accountContext, task)
}
export const getAccountUserId = (account: Account) => {
export function getAccountUserId(account: Account) {
let userId: string
if (isCloudAccount(account)) {
userId = account.budibaseUserId
} else {
// use account id as user id for self hosting
// use account id as user id for self-hosting
userId = account.accountId
}
return userId

View File

@ -1,251 +1,3 @@
import env from "../environment"
import { SEPARATOR, DocumentTypes } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
import { baseGlobalDBName } from "../tenancy/utils"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKeys } from "./constants"
import {
updateUsing,
closeWithUsing,
setAppTenantId,
setIdentity,
closeAppDBs,
getContextDB,
} from "./utils"
export const DEFAULT_TENANT_ID = _DEFAULT_TENANT_ID
// some test cases call functions directly, need to
// store an app ID to pretend there is a context
let TEST_APP_ID: string | null = null
export const closeTenancy = async () => {
let db
try {
if (env.USE_COUCH) {
db = getGlobalDB()
}
} catch (err) {
// no DB found - skip closing
return
}
await closeDB(db)
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKeys.TENANT_ID, null)
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
}
// export const isDefaultTenant = () => {
// return getTenantId() === DEFAULT_TENANT_ID
// }
export const isMultiTenant = () => {
return env.MULTI_TENANCY
}
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
*/
export const getTenantIDFromAppID = (appId: string) => {
if (!appId) {
return null
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentTypes.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return null
}
if (hasDev) {
return split[2]
} else {
return split[1]
}
}
// used for automations, API endpoints should always be in context already
export const doInTenant = (tenantId: string | null, task: any) => {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
tenantId = tenantId || DEFAULT_TENANT_ID
}
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the tenant id + global db if this is a new context
if (!opts.existing) {
updateTenantId(tenantId)
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.TENANCY_IN_USE, () => {
return closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.TENANT_ID) === tenantId
return updateUsing(ContextKeys.TENANCY_IN_USE, existing, internal)
}
export const doInAppContext = (appId: string, task: any) => {
if (!appId) {
throw new Error("appId is required")
}
const identity = getIdentity()
// the internal function is so that we can re-use an existing
// context - don't want to close DB on a parent context
async function internal(opts = { existing: false }) {
// set the app tenant id
if (!opts.existing) {
setAppTenantId(appId)
}
// set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId)
// preserve the identity
if (identity) {
setIdentity(identity)
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.APP_IN_USE, async () => {
await closeAppDBs()
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.APP_ID) === appId
return updateUsing(ContextKeys.APP_IN_USE, existing, internal)
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
if (!identity) {
throw new Error("identity is required")
}
async function internal(opts = { existing: false }) {
if (!opts.existing) {
cls.setOnContext(ContextKeys.IDENTITY, identity)
// set the tenant so that doInTenant will preserve identity
if (identity.tenantId) {
updateTenantId(identity.tenantId)
}
}
try {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.IDENTITY_IN_USE, async () => {
setIdentity(null)
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.IDENTITY)
return updateUsing(ContextKeys.IDENTITY_IN_USE, existing, internal)
}
export const getIdentity = (): IdentityContext | undefined => {
try {
return cls.getFromContext(ContextKeys.IDENTITY)
} catch (e) {
// do nothing - identity is not in context
}
}
export const updateTenantId = (tenantId: string | null) => {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
if (env.USE_COUCH) {
setGlobalDB(tenantId)
}
}
export const updateAppId = async (appId: string) => {
try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
} else {
throw err
}
}
}
export const setGlobalDB = (tenantId: string | null) => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
return db
}
export const getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
return db
}
export const isTenantIdSet = () => {
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
return !!tenantId
}
export const getTenantId = () => {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
if (!tenantId) {
throw new Error("Tenant id not found")
}
return tenantId
}
export const getAppId = () => {
const foundId = cls.getFromContext(ContextKeys.APP_ID)
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
return foundId
}
}
/**
* Opens the app database based on whatever the request
* contained, dev or prod.
*/
export const getAppDB = (opts?: any) => {
return getContextDB(ContextKeys.CURRENT_DB, opts)
}
/**
* This specifically gets the prod app ID, if the request
* contained a development app ID, this will open the prod one.
*/
export const getProdAppDB = (opts?: any) => {
return getContextDB(ContextKeys.PROD_DB, opts)
}
/**
* This specifically gets the dev app ID, if the request
* contained a prod app ID, this will open the dev one.
*/
export const getDevAppDB = (opts?: any) => {
return getContextDB(ContextKeys.DEV_DB, opts)
}
export { DEFAULT_TENANT_ID } from "../constants"
export * as identity from "./identity"
export * from "./mainContext"

View File

@ -0,0 +1,245 @@
// some test cases call functions directly, need to
// store an app ID to pretend there is a context
import env from "../environment"
import Context from "./Context"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
import { getDB } from "../db/db"
import {
DocumentType,
SEPARATOR,
StaticDatabases,
DEFAULT_TENANT_ID,
} from "../constants"
import { Database, IdentityContext } from "@budibase/types"
export type ContextMap = {
tenantId?: string
appId?: string
identity?: IdentityContext
}
let TEST_APP_ID: string | null = null
export function getGlobalDBName(tenantId?: string) {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export function baseGlobalDBName(tenantId: string | undefined | null) {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}
export function isMultiTenant() {
return env.MULTI_TENANCY
}
export function isTenantIdSet() {
const context = Context.get()
return !!context?.tenantId
}
export function isTenancyEnabled() {
return env.MULTI_TENANCY
}
/**
* Given an app ID this will attempt to retrieve the tenant ID from it.
* @return {null|string} The tenant ID found within the app ID.
*/
export function getTenantIDFromAppID(appId: string) {
if (!appId) {
return undefined
}
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return undefined
}
if (hasDev) {
return split[2]
} else {
return split[1]
}
}
function updateContext(updates: ContextMap) {
let context: ContextMap
try {
context = Context.get()
} catch (err) {
// no context, start empty
context = {}
}
context = {
...context,
...updates,
}
return context
}
async function newContext(updates: ContextMap, task: any) {
// see if there already is a context setup
let context: ContextMap = updateContext(updates)
return Context.run(context, task)
}
export async function doInContext(appId: string, task: any): Promise<any> {
const tenantId = getTenantIDFromAppID(appId)
return newContext(
{
tenantId,
appId,
},
task
)
}
export async function doInTenant(
tenantId: string | null,
task: any
): Promise<any> {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
tenantId = tenantId || DEFAULT_TENANT_ID
}
const updates = tenantId ? { tenantId } : {}
return newContext(updates, task)
}
export async function doInAppContext(appId: string, task: any): Promise<any> {
if (!appId) {
throw new Error("appId is required")
}
const tenantId = getTenantIDFromAppID(appId)
const updates: ContextMap = { appId }
if (tenantId) {
updates.tenantId = tenantId
}
return newContext(updates, task)
}
export async function doInIdentityContext(
identity: IdentityContext,
task: any
): Promise<any> {
if (!identity) {
throw new Error("identity is required")
}
const context: ContextMap = {
identity,
}
if (identity.tenantId) {
context.tenantId = identity.tenantId
}
return newContext(context, task)
}
export function getIdentity(): IdentityContext | undefined {
try {
const context = Context.get()
return context?.identity
} catch (e) {
// do nothing - identity is not in context
}
}
export function getTenantId(): string {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const context = Context.get()
const tenantId = context?.tenantId
if (!tenantId) {
throw new Error("Tenant id not found")
}
return tenantId
}
export function getAppId(): string | undefined {
const context = Context.get()
const foundId = context?.appId
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
return foundId
}
}
export function updateTenantId(tenantId?: string) {
let context: ContextMap = updateContext({
tenantId,
})
Context.set(context)
}
export function updateAppId(appId: string) {
let context: ContextMap = updateContext({
appId,
})
try {
Context.set(context)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
} else {
throw err
}
}
}
export function getGlobalDB(): Database {
const context = Context.get()
if (!context || (env.MULTI_TENANCY && !context.tenantId)) {
throw new Error("Global DB not found")
}
return getDB(baseGlobalDBName(context?.tenantId))
}
/**
* Gets the app database based on whatever the request
* contained, dev or prod.
*/
export function getAppDB(opts?: any): Database {
const appId = getAppId()
return getDB(appId, opts)
}
/**
* This specifically gets the prod app ID, if the request
* contained a development app ID, this will get the prod one.
*/
export function getProdAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve prod DB - no app ID.")
}
return getDB(getProdAppID(appId), opts)
}
/**
* This specifically gets the dev app ID, if the request
* contained a prod app ID, this will get the dev one.
*/
export function getDevAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve dev DB - no app ID.")
}
return getDB(getDevelopmentAppID(appId), opts)
}

View File

@ -1,18 +1,9 @@
import "../../../tests/utilities/TestConfiguration"
import * as context from ".."
import { DEFAULT_TENANT_ID } from "../../constants"
import env from "../../environment"
// must use require to spy index file exports due to known issue in jest
const dbUtils = require("../../db")
jest.spyOn(dbUtils, "closeDB")
jest.spyOn(dbUtils, "dangerousGetDB")
require("../../../tests")
const context = require("../")
const { DEFAULT_TENANT_ID } = require("../../constants")
const env = require("../../environment")
describe("context", () => {
beforeEach(() => {
jest.clearAllMocks()
})
describe("doInTenant", () => {
describe("single-tenancy", () => {
it("defaults to the default tenant", () => {
@ -25,8 +16,6 @@ describe("context", () => {
const db = context.getGlobalDB()
expect(db.name).toBe("global-db")
})
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
})
@ -40,7 +29,7 @@ describe("context", () => {
let error
try {
context.getTenantId()
} catch (e: any) {
} catch (e) {
error = e
}
expect(error.message).toBe("Tenant id not found")
@ -59,7 +48,7 @@ describe("context", () => {
let error
try {
context.getGlobalDB()
} catch (e: any) {
} catch (e) {
error = e
}
expect(error.message).toBe("Global DB not found")
@ -85,8 +74,6 @@ describe("context", () => {
const db = context.getGlobalDB()
expect(db.name).toBe("test_global-db")
})
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
it("sets the tenant id when nested with same tenant id", async () => {
@ -121,10 +108,6 @@ describe("context", () => {
})
})
})
// only 1 db is opened and closed
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
})
it("sets different tenant id inside another context", () => {

View File

@ -1,113 +0,0 @@
import {
DEFAULT_TENANT_ID,
getAppId,
getTenantIDFromAppID,
updateTenantId,
} from "./index"
import cls from "./FunctionContext"
import { IdentityContext } from "@budibase/types"
import { ContextKeys } from "./constants"
import { dangerousGetDB, closeDB } from "../db"
import { isEqual } from "lodash"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
import env from "../environment"
export async function updateUsing(
usingKey: string,
existing: boolean,
internal: (opts: { existing: boolean }) => Promise<any>
) {
const using = cls.getFromContext(usingKey)
if (using && existing) {
cls.setOnContext(usingKey, using + 1)
return internal({ existing: true })
} else {
return cls.run(async () => {
cls.setOnContext(usingKey, 1)
return internal({ existing: false })
})
}
}
export async function closeWithUsing(
usingKey: string,
closeFn: () => Promise<any>
) {
const using = cls.getFromContext(usingKey)
if (!using || using <= 1) {
await closeFn()
} else {
cls.setOnContext(usingKey, using - 1)
}
}
export const setAppTenantId = (appId: string) => {
const appTenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID
updateTenantId(appTenantId)
}
export const setIdentity = (identity: IdentityContext | null) => {
cls.setOnContext(ContextKeys.IDENTITY, identity)
}
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
export async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
continue
}
await closeDB(db)
// clear the DB from context, incase someone tries to use it again
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKeys.APP_ID)) {
cls.setOnContext(ContextKeys.APP_ID, null)
}
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
cls.setOnContext(ContextKeys.DB_OPTS, null)
}
}
export function getContextDB(key: string, opts: any) {
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key)
if (db && isEqual(opts, storedOpts)) {
return db
}
const appId = getAppId()
let toUseAppId
switch (key) {
case ContextKeys.CURRENT_DB:
toUseAppId = appId
break
case ContextKeys.PROD_DB:
toUseAppId = getProdAppID(appId)
break
case ContextKeys.DEV_DB:
toUseAppId = getDevelopmentAppID(appId)
break
}
db = dangerousGetDB(toUseAppId, opts)
try {
cls.setOnContext(key, db)
if (opts) {
cls.setOnContext(dbOptsKey, opts)
}
} catch (err) {
if (!env.isTest()) {
throw err
}
}
return db
}

View File

@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
import { getPouchDB, closePouchDB } from "./couch"
import { DocumentType } from "../constants"
class Replication {
source: any
@ -11,12 +12,12 @@ class Replication {
* @param {String} target - the DB you want to replicate to, or rollback from
*/
constructor({ source, target }: any) {
this.source = dangerousGetDB(source)
this.target = dangerousGetDB(target)
this.source = getPouchDB(source)
this.target = getPouchDB(target)
}
close() {
return Promise.all([closeDB(this.source), closeDB(this.target)])
return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
}
promisify(operation: any, opts = {}) {
@ -53,13 +54,22 @@ class Replication {
return this.replication
}
appReplicateOpts() {
return {
filter: (doc: any) => {
return doc._id !== DocumentType.APP_METADATA
},
}
}
/**
* Rollback the target DB back to the state of the source DB
*/
async rollback() {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
this.target = getPouchDB(this.target.name)
// take the opportunity to remove deleted tombstones
await this.replicate()
}

View File

@ -1,32 +1,33 @@
import { APP_DEV_PREFIX, APP_PREFIX } from "../constants"
import { App } from "@budibase/types"
const NO_APP_ERROR = "No app provided"
const { APP_DEV_PREFIX, APP_PREFIX } = require("./constants")
exports.isDevAppID = appId => {
export function isDevAppID(appId?: string) {
if (!appId) {
throw NO_APP_ERROR
}
return appId.startsWith(APP_DEV_PREFIX)
}
exports.isProdAppID = appId => {
export function isProdAppID(appId?: string) {
if (!appId) {
throw NO_APP_ERROR
}
return appId.startsWith(APP_PREFIX) && !exports.isDevAppID(appId)
return appId.startsWith(APP_PREFIX) && !isDevAppID(appId)
}
exports.isDevApp = app => {
export function isDevApp(app: App) {
if (!app) {
throw NO_APP_ERROR
}
return exports.isDevAppID(app.appId)
return isDevAppID(app.appId)
}
/**
* Generates a development app ID from a real app ID.
* @returns {string} the dev app ID which can be used for dev database.
*/
exports.getDevelopmentAppID = appId => {
export function getDevelopmentAppID(appId: string) {
if (!appId || appId.startsWith(APP_DEV_PREFIX)) {
return appId
}
@ -36,11 +37,12 @@ exports.getDevelopmentAppID = appId => {
const rest = split.join(APP_PREFIX)
return `${APP_DEV_PREFIX}${rest}`
}
export const getDevAppID = getDevelopmentAppID
/**
* Convert a development app ID to a deployed app ID.
*/
exports.getProdAppID = appId => {
export function getProdAppID(appId: string) {
if (!appId || !appId.startsWith(APP_DEV_PREFIX)) {
return appId
}
@ -51,7 +53,7 @@ exports.getProdAppID = appId => {
return `${APP_PREFIX}${rest}`
}
exports.extractAppUUID = id => {
export function extractAppUUID(id: string) {
const split = id?.split("_") || []
return split.length ? split[split.length - 1] : null
}

View File

@ -0,0 +1,193 @@
import Nano from "nano"
import {
AllDocsResponse,
AnyDocument,
Database,
DatabaseOpts,
DatabaseQueryOpts,
DatabasePutOpts,
DatabaseCreateIndexOpts,
DatabaseDeleteIndexOpts,
Document,
isDocument,
} from "@budibase/types"
import { getCouchInfo } from "./connections"
import { directCouchCall } from "./utils"
import { getPouchDB } from "./pouchDB"
import { WriteStream, ReadStream } from "fs"
export class DatabaseImpl implements Database {
public readonly name: string
private static nano: Nano.ServerScope
private readonly pouchOpts: DatabaseOpts
constructor(dbName?: string, opts?: DatabaseOpts) {
if (dbName == null) {
throw new Error("Database name cannot be undefined.")
}
this.name = dbName
this.pouchOpts = opts || {}
if (!DatabaseImpl.nano) {
DatabaseImpl.init()
}
}
static init() {
const couchInfo = getCouchInfo()
DatabaseImpl.nano = Nano({
url: couchInfo.url,
requestDefaults: {
headers: {
Authorization: couchInfo.cookie,
},
},
parseUrl: false,
})
}
async exists() {
let response = await directCouchCall(`/${this.name}`, "HEAD")
return response.status === 200
}
async checkSetup() {
let shouldCreate = !this.pouchOpts?.skip_setup
// check exists in a lightweight fashion
let exists = await this.exists()
if (!shouldCreate && !exists) {
throw new Error("DB does not exist")
}
if (!exists) {
await DatabaseImpl.nano.db.create(this.name)
}
return DatabaseImpl.nano.db.use(this.name)
}
private async updateOutput(fnc: any) {
try {
return await fnc()
} catch (err: any) {
if (err.statusCode) {
err.status = err.statusCode
}
throw err
}
}
async get<T>(id?: string): Promise<T | any> {
const db = await this.checkSetup()
if (!id) {
throw new Error("Unable to get doc without a valid _id.")
}
return this.updateOutput(() => db.get(id))
}
async remove(idOrDoc: string | Document, rev?: string) {
const db = await this.checkSetup()
let _id: string
let _rev: string
if (isDocument(idOrDoc)) {
_id = idOrDoc._id!
_rev = idOrDoc._rev!
} else {
_id = idOrDoc
_rev = rev!
}
if (!_id || !_rev) {
throw new Error("Unable to remove doc without a valid _id and _rev.")
}
return this.updateOutput(() => db.destroy(_id, _rev))
}
async put(document: AnyDocument, opts?: DatabasePutOpts) {
if (!document._id) {
throw new Error("Cannot store document without _id field.")
}
const db = await this.checkSetup()
if (!document.createdAt) {
document.createdAt = new Date().toISOString()
}
document.updatedAt = new Date().toISOString()
if (opts?.force && document._id) {
try {
const existing = await this.get(document._id)
if (existing) {
document._rev = existing._rev
}
} catch (err: any) {
if (err.status !== 404) {
throw err
}
}
}
return this.updateOutput(() => db.insert(document))
}
async bulkDocs(documents: AnyDocument[]) {
const db = await this.checkSetup()
return this.updateOutput(() => db.bulk({ docs: documents }))
}
async allDocs<T>(params: DatabaseQueryOpts): Promise<AllDocsResponse<T>> {
const db = await this.checkSetup()
return this.updateOutput(() => db.list(params))
}
async query<T>(
viewName: string,
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
const db = await this.checkSetup()
const [database, view] = viewName.split("/")
return this.updateOutput(() => db.view(database, view, params))
}
async destroy() {
try {
await DatabaseImpl.nano.db.destroy(this.name)
} catch (err: any) {
// didn't exist, don't worry
if (err.statusCode === 404) {
return
} else {
throw { ...err, status: err.statusCode }
}
}
}
async compact() {
const db = await this.checkSetup()
return this.updateOutput(() => db.compact())
}
// All below functions are in-frequently called, just utilise PouchDB
// for them as it implements them better than we can
async dump(stream: WriteStream, opts?: { filter?: any }) {
const pouch = getPouchDB(this.name)
// @ts-ignore
return pouch.dump(stream, opts)
}
async load(stream: ReadStream) {
const pouch = getPouchDB(this.name)
// @ts-ignore
return pouch.load(stream)
}
async createIndex(opts: DatabaseCreateIndexOpts) {
const pouch = getPouchDB(this.name)
return pouch.createIndex(opts)
}
async deleteIndex(opts: DatabaseDeleteIndexOpts) {
const pouch = getPouchDB(this.name)
return pouch.deleteIndex(opts)
}
async getIndexes() {
const pouch = getPouchDB(this.name)
return pouch.getIndexes()
}
}

View File

@ -1,7 +1,39 @@
const PouchDB = require("pouchdb")
const env = require("../environment")
import env from "../../environment"
exports.getUrlInfo = (url = env.COUCH_DB_URL) => {
export const getCouchInfo = () => {
const urlInfo = getUrlInfo()
let username
let password
if (env.COUCH_DB_USERNAME) {
// set from env
username = env.COUCH_DB_USERNAME
} else if (urlInfo.auth.username) {
// set from url
username = urlInfo.auth.username
} else if (!env.isTest()) {
throw new Error("CouchDB username not set")
}
if (env.COUCH_DB_PASSWORD) {
// set from env
password = env.COUCH_DB_PASSWORD
} else if (urlInfo.auth.password) {
// set from url
password = urlInfo.auth.password
} else if (!env.isTest()) {
throw new Error("CouchDB password not set")
}
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
return {
url: urlInfo.url!,
auth: {
username: username,
password: password,
},
cookie: `Basic ${authCookie}`,
}
}
export const getUrlInfo = (url = env.COUCH_DB_URL) => {
let cleanUrl, username, password, host
if (url) {
// Ensure the URL starts with a protocol
@ -43,82 +75,3 @@ exports.getUrlInfo = (url = env.COUCH_DB_URL) => {
},
}
}
exports.getCouchInfo = () => {
const urlInfo = exports.getUrlInfo()
let username
let password
if (env.COUCH_DB_USERNAME) {
// set from env
username = env.COUCH_DB_USERNAME
} else if (urlInfo.auth.username) {
// set from url
username = urlInfo.auth.username
} else if (!env.isTest()) {
throw new Error("CouchDB username not set")
}
if (env.COUCH_DB_PASSWORD) {
// set from env
password = env.COUCH_DB_PASSWORD
} else if (urlInfo.auth.password) {
// set from url
password = urlInfo.auth.password
} else if (!env.isTest()) {
throw new Error("CouchDB password not set")
}
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
return {
url: urlInfo.url,
auth: {
username: username,
password: password,
},
cookie: `Basic ${authCookie}`,
}
}
/**
* Return a constructor for PouchDB.
* This should be rarely used outside of the main application config.
* Exposed for exceptional cases such as in-memory views.
*/
exports.getPouch = (opts = {}) => {
let { url, cookie } = exports.getCouchInfo()
let POUCH_DB_DEFAULTS = {
prefix: url,
fetch: (url, opts) => {
// use a specific authorization cookie - be very explicit about how we authenticate
opts.headers.set("Authorization", cookie)
return PouchDB.fetch(url, opts)
},
}
if (opts.inMemory) {
const inMemory = require("pouchdb-adapter-memory")
PouchDB.plugin(inMemory)
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "memory",
}
}
if (opts.onDisk) {
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "leveldb",
}
}
if (opts.replication) {
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
}
if (opts.find) {
const find = require("pouchdb-find")
PouchDB.plugin(find)
}
return PouchDB.defaults(POUCH_DB_DEFAULTS)
}

View File

@ -0,0 +1,4 @@
export * from "./connections"
export * from "./DatabaseImpl"
export * from "./utils"
export { init, getPouch, getPouchDB, closePouchDB } from "./pouchDB"

View File

@ -0,0 +1,97 @@
import PouchDB from "pouchdb"
import env from "../../environment"
import { PouchOptions } from "@budibase/types"
import { getCouchInfo } from "./connections"
let Pouch: any
let initialised = false
/**
* Return a constructor for PouchDB.
* This should be rarely used outside of the main application config.
* Exposed for exceptional cases such as in-memory views.
*/
export const getPouch = (opts: PouchOptions = {}) => {
let { url, cookie } = getCouchInfo()
let POUCH_DB_DEFAULTS = {
prefix: url,
fetch: (url: string, opts: any) => {
// use a specific authorization cookie - be very explicit about how we authenticate
opts.headers.set("Authorization", cookie)
return PouchDB.fetch(url, opts)
},
}
if (opts.inMemory) {
const inMemory = require("pouchdb-adapter-memory")
PouchDB.plugin(inMemory)
POUCH_DB_DEFAULTS = {
// @ts-ignore
adapter: "memory",
}
}
if (opts.onDisk) {
POUCH_DB_DEFAULTS = {
// @ts-ignore
adapter: "leveldb",
}
}
if (opts.replication) {
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
// @ts-ignore
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
}
if (opts.find) {
const find = require("pouchdb-find")
PouchDB.plugin(find)
}
return PouchDB.defaults(POUCH_DB_DEFAULTS)
}
export function init(opts?: PouchOptions) {
Pouch = getPouch(opts)
initialised = true
}
const checkInitialised = () => {
if (!initialised) {
throw new Error("init has not been called")
}
}
export function getPouchDB(dbName: string, opts?: any): PouchDB.Database {
checkInitialised()
const db = new Pouch(dbName, opts)
const dbPut = db.put
db.put = async (doc: any, options = {}) => {
if (!doc.createdAt) {
doc.createdAt = new Date().toISOString()
}
doc.updatedAt = new Date().toISOString()
return dbPut(doc, options)
}
db.exists = async () => {
const info = await db.info()
return !info.error
}
return db
}
// use this function if you have called getPouchDB - close
// the databases you've opened once finished
export async function closePouchDB(db: PouchDB.Database) {
if (!db || env.isTest()) {
return
}
try {
// specifically await so that if there is an error, it can be ignored
return await db.close()
} catch (err) {
// ignore error, already closed
}
}

View File

@ -0,0 +1,36 @@
import { getCouchInfo } from "./connections"
import fetch from "node-fetch"
import { checkSlashesInUrl } from "../../helpers"
export async function directCouchCall(
path: string,
method: string = "GET",
body?: any
) {
let { url, cookie } = getCouchInfo()
const couchUrl = `${url}/${path}`
const params: any = {
method: method,
headers: {
Authorization: cookie,
},
}
if (body && method !== "GET") {
params.body = JSON.stringify(body)
params.headers["Content-Type"] = "application/json"
}
return await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params)
}
export async function directCouchQuery(
path: string,
method: string = "GET",
body?: any
) {
const response = await directCouchCall(path, method, body)
if (response.status < 300) {
return await response.json()
} else {
throw "Cannot connect to CouchDB instance"
}
}

Some files were not shown because too many files have changed in this diff Show More