Merge branch 'develop' into feature/s3-bucket-connector

This commit is contained in:
Mel O'Hagan 2022-10-26 16:37:11 +01:00
commit 4661c083e2
844 changed files with 36117 additions and 14016 deletions

View File

@ -7,4 +7,5 @@ packages/server/client
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
packages/builder/cypress/reports
packages/builder/cypress/reports
packages/sdk/sdk

24
.github/ISSUE_TEMPLATE/epic.md vendored Normal file
View File

@ -0,0 +1,24 @@
---
name: Epic
about: Plan a new project
title: ''
labels: epic
assignees: ''
---
## Description
Brief summary of what this Epic is, whether it's a larger project, goal, or user story. Describe the job to be done, which persona this Epic is mainly for, or if more multiple, break it down by user and job story.
## Spec
Link to confluence spec
## Teams and Stakeholders
Describe who needs to be kept up-to-date about this Epic, included in discussions, or updated along the way. Stakeholders can be both in Product/Engineering, as well as other teams like Customer Success who might want to keep customers updated on the Epic project.
## Workflow
- [ ] Spec Created and pasted above
- [ ] Product Review
- [ ] Designs created
- [ ] Individual Tasks created and assigned to Epic

View File

@ -23,6 +23,15 @@ jobs:
build:
runs-on: ubuntu-latest
services:
couchdb:
image: ibmcom/couchdb3
env:
COUCHDB_PASSWORD: budibase
COUCHDB_USER: budibase
ports:
- 4567:5984
strategy:
matrix:
node-version: [14.x]
@ -53,9 +62,8 @@ jobs:
name: codecov-umbrella
verbose: true
# TODO: parallelise this
- name: Cypress run
uses: cypress-io/github-action@v2
with:
install: false
command: yarn test:e2e:ci
- name: QA Core Integration Tests
run: |
cd qa-core
yarn
yarn api:test:ci

View File

@ -4,8 +4,6 @@ on:
workflow_dispatch:
env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
BRANCH: ${{ github.event.pull_request.head.ref }}
CI: true
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
@ -17,6 +15,11 @@ jobs:
matrix:
node-version: [14.x]
steps:
- name: Fail if branch is not master
if: github.ref != 'refs/heads/master'
run: |
echo "Ref is not master, you must run this job from master."
exit 1
- name: "Checkout"
uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
@ -28,8 +31,6 @@ jobs:
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Install Pro
run: yarn install:pro $BRANCH $BASE_BRANCH
- name: Run Yarn
run: yarn
- name: Run Yarn Bootstrap

View File

@ -46,7 +46,8 @@ jobs:
- run: yarn
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
- run: yarn build
- run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials

View File

@ -3,10 +3,6 @@ name: Budibase Release Selfhost
on:
workflow_dispatch:
env:
BRANCH: ${{ github.event.pull_request.head.ref }}
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
jobs:
release:
runs-on: ubuntu-latest
@ -54,9 +50,6 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
SELFHOST_TAG: latest
- name: Install Pro
run: yarn install:pro $BRANCH $BASE_BRANCH
- name: Bootstrap and build (CLI)
run: |
yarn

View File

@ -56,6 +56,7 @@ jobs:
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
- run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials

5
.gitignore vendored
View File

@ -63,6 +63,7 @@ typings/
# dotenv environment variables file
.env
!qa-core/.env
!hosting/.env
hosting/.generated-nginx.dev.conf
hosting/proxy/.generated-nginx.prod.conf
@ -102,4 +103,6 @@ packages/builder/cypress/reports
stats.html
# TypeScript cache
*.tsbuildinfo
*.tsbuildinfo
budibase-component
budibase-datasource

View File

@ -9,3 +9,4 @@ packages/server/src/definitions/openapi.ts
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
packages/sdk/sdk

View File

@ -65,7 +65,7 @@ Budibase is open-source - licensed as GPL v3. This should fill you with confiden
<br /><br />
### Load data or start from scratch
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no datasources. [Request new datasources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/data_n1tlhf.png">

View File

@ -78,6 +78,12 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
- name: APPS_BUCKET_NAME
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_CLOUD_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: PORT
value: {{ .Values.services.apps.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
@ -124,11 +130,31 @@ spec:
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.bbAdminUserEmail }}
- name: BB_ADMIN_USER_EMAIL
value: { { .Values.globals.bbAdminUserEmail | quote } }
value: {{ .Values.globals.bbAdminUserEmail | quote }}
{{ end }}
{{ if .Values.globals.bbAdminUserPassword }}
- name: BB_ADMIN_USER_PASSWORD
value: { { .Values.globals.bbAdminUserPassword | quote } }
value: {{ .Values.globals.bbAdminUserPassword | quote }}
{{ end }}
{{ if .Values.globals.pluginsDir }}
- name: PLUGINS_DIR
value: {{ .Values.globals.pluginsDir | quote }}
{{ end }}
{{ if .Values.services.apps.nodeDebug }}
- name: NODE_DEBUG
value: {{ .Values.services.apps.nodeDebug | quote }}
{{ end }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }}
@ -142,7 +168,10 @@ spec:
name: bbapps
ports:
- containerPort: {{ .Values.services.apps.port }}
resources: {}
{{ with .Values.services.apps.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -38,7 +38,10 @@ spec:
image: redgeoff/replicate-couchdb-cluster
imagePullPolicy: Always
name: couchdb-backup
resources: {}
{{ with .Values.services.couchdb.backup.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -56,7 +56,10 @@ spec:
name: minio-service
ports:
- containerPort: {{ .Values.services.objectStore.port }}
resources: {}
{{ with .Values.services.objectStore.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: minio-data

View File

@ -30,7 +30,10 @@ spec:
name: proxy-service
ports:
- containerPort: {{ .Values.services.proxy.port }}
resources: {}
{{ with .Values.services.proxy.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
{{- with .Values.affinity }}
affinity:

View File

@ -35,7 +35,10 @@ spec:
name: redis-service
ports:
- containerPort: {{ .Values.services.redis.port }}
resources: {}
{{ with .Values.services.redis.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: redis-data

View File

@ -27,6 +27,8 @@ spec:
spec:
containers:
- env:
- name: BUDIBASE_ENVIRONMENT
value: {{ .Values.globals.budibaseEnv }}
- name: DEPLOYMENT_ENVIRONMENT
value: "kubernetes"
- name: CLUSTER_PORT
@ -75,6 +77,12 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
- name: APPS_BUCKET_NAME
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_CLOUD_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: PORT
value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY
@ -125,6 +133,19 @@ spec:
value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
livenessProbe:
@ -136,7 +157,10 @@ spec:
name: bbworker
ports:
- containerPort: {{ .Values.services.worker.port }}
resources: {}
{{ with .Values.services.worker.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -60,19 +60,6 @@ ingress:
port:
number: 10000
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
@ -89,6 +76,7 @@ affinity: {}
globals:
appVersion: "latest"
budibaseEnv: PRODUCTION
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS"
enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
@ -114,6 +102,10 @@ globals:
smtp:
enabled: false
# elasticApmEnabled:
# elasticApmSecretToken:
# elasticApmServerUrl:
services:
budibaseVersion: latest
dns: cluster.local
@ -121,15 +113,19 @@ services:
proxy:
port: 10000
replicaCount: 1
resources: {}
apps:
port: 4002
replicaCount: 1
logLevel: info
resources: {}
# nodeDebug: "" # set the value of NODE_DEBUG
worker:
port: 4003
replicaCount: 1
resources: {}
couchdb:
enabled: true
@ -143,6 +139,7 @@ services:
target: ""
# backup interval in seconds
interval: ""
resources: {}
redis:
enabled: true # disable if using external redis
@ -156,6 +153,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
objectStore:
minio: true
@ -172,6 +170,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
# Override values in couchDB subchart
couchdb:

View File

@ -1,12 +1,15 @@
## Dev Environment on Debian 11
### Install Node
### Install NVM & Node 14
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
Budibase requires a recent version of node (14+):
Install NVM
```
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
apt -y install nodejs
node -v
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
```
Install Node 14
```
nvm install 14
```
### Install npm requirements
@ -31,7 +34,7 @@ This setup process was tested on Debian 11 (bullseye) with version numbers show
- Docker: 20.10.5
- Docker-Compose: 1.29.2
- Node: v16.15.1
- Node: v14.20.1
- Yarn: 1.22.19
- Lerna: 5.1.4

View File

@ -11,7 +11,7 @@ through brew.
### Install Node
Budibase requires a recent version of node (14+):
Budibase requires a recent version of node 14:
```
brew install node npm
node -v
@ -38,7 +38,7 @@ This setup process was tested on Mac OSX 12 (Monterey) with version numbers show
- Docker: 20.10.14
- Docker-Compose: 2.6.0
- Node: 18.3.0
- Node: 14.20.1
- Yarn: 1.22.19
- Lerna: 5.1.4
@ -59,4 +59,7 @@ The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin
| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
### Troubleshooting
If there are errors with the `yarn setup` command, you can try installing nvm and node 14. This is the same as the instructions for Debian 11.

81
docs/DEV-SETUP-WINDOWS.md Normal file
View File

@ -0,0 +1,81 @@
## Dev Environment on Windows 10/11 (WSL2)
### Install WSL with Ubuntu LTS
Enable WSL 2 on Windows 10/11 for docker support.
```
wsl --set-default-version 2
```
Install Ubuntu LTS.
```
wsl --install Ubuntu
```
Or follow the instruction here:
https://learn.microsoft.com/en-us/windows/wsl/install
### Install Docker in windows
Download the installer from docker and install it.
Check this url for more detailed instructions:
https://docs.docker.com/desktop/install/windows-install/
You should follow the next steps from within the Ubuntu terminal.
### Install NVM & Node 14
NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
Install NVM
```
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
```
Install Node 14
```
nvm install 14
```
### Install npm requirements
```
npm install -g yarn jest lerna
```
### Clone the repo
```
git clone https://github.com/Budibase/budibase.git
```
### Check Versions
This setup process was tested on Windows 11 with version numbers show below. Your mileage may vary using anything else.
- Docker: 20.10.7
- Docker-Compose: 2.10.2
- Node: v14.20.1
- Yarn: 1.22.19
- Lerna: 5.5.4
### Build
```
cd budibase
yarn setup
```
The yarn setup command runs several build steps i.e.
```
node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
```
So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin
### Working with the code
Here are the instructions to work on the application from within Visual Studio Code (in Windows) through the WSL. All the commands and files are within the Ubuntu system and it should run as if you were working on a Linux machine.
https://code.visualstudio.com/docs/remote/wsl
Note you will be able to run the application from within the WSL terminal and you will be able to access the application from the a browser in Windows.

View File

@ -348,7 +348,7 @@ export interface paths {
}
}
responses: {
/** Returns the created table, including the ID which has been generated for it. This can be internal or external data sources. */
/** Returns the created table, including the ID which has been generated for it. This can be internal or external datasources. */
200: {
content: {
"application/json": components["schemas"]["tableOutput"]
@ -959,7 +959,7 @@ export interface components {
query: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
@ -983,7 +983,7 @@ export interface components {
data: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]

View File

@ -11,8 +11,8 @@
"dependencies": {
"bulma": "^0.9.3",
"next": "12.1.0",
"node-fetch": "^3.2.2",
"node-sass": "^7.0.1",
"node-fetch": "^3.2.10",
"sass": "^1.52.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-notifications-component": "^3.4.1"
@ -24,4 +24,4 @@
"eslint-config-next": "12.1.0",
"typescript": "4.6.2"
}
}
}

View File

@ -2020,10 +2020,10 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-fetch@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.2.tgz#16d33fbe32ca7c6ca1ca8ba5dfea1dd885c59f04"
integrity sha512-Cwhq1JFIoon15wcIkFzubVNFE5GvXGV82pKf4knXXjvGmn7RJKcypeuqcVNZMGDZsAFWyIRya/anwAJr7TWJ7w==
node-fetch@^3.2.10:
version "3.2.10"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.10.tgz#e8347f94b54ae18b57c9c049ef641cef398a85c8"
integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==
dependencies:
data-uri-to-buffer "^4.0.0"
fetch-blob "^3.1.4"

View File

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View File

@ -25,9 +25,12 @@ services:
REDIS_PASSWORD: ${REDIS_PASSWORD}
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
PLUGINS_DIR: ${PLUGINS_DIR}
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
restart: unless-stopped
@ -78,6 +81,7 @@ services:
image: budibase/proxy
environment:
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
- PROXY_RATE_LIMIT_API_PER_SECOND=20
depends_on:
- minio-service
- worker-service

View File

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View File

@ -15,12 +15,30 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
}
upstream app-service {
server {{address}}:4001;
keepalive 32;
}
upstream worker-service {
server {{address}}:4002;
keepalive 32;
}
upstream builder {
server {{address}}:3000;
keepalive 32;
}
server {
listen 10000 default_server;
server_name _;
@ -40,45 +58,88 @@ http {
}
location ~ ^/api/(system|admin|global)/ {
proxy_pass http://{{ address }}:4002;
proxy_pass http://worker-service;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location /api/ {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_pass http://{{ address }}:4001;
proxy_pass http://app-service;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location = / {
proxy_pass http://{{ address }}:4001;
proxy_pass http://app-service;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location /app_ {
proxy_pass http://{{ address }}:4001;
proxy_pass http://app-service;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location /app {
proxy_pass http://{{ address }}:4001;
}
location /preview {
proxy_pass http://{{ address }}:4001;
proxy_pass http://app-service;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection "";
}
location /builder {
proxy_pass http://{{ address }}:3000;
proxy_pass http://builder;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection "";
rewrite ^/builder(.*)$ /builder/$1 break;
}
location /builder/ {
proxy_pass http://{{ address }}:3000;
proxy_pass http://builder;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
}
location /vite/ {
proxy_pass http://builder;
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
rewrite ^/vite(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://app-service;
}
location / {

View File

@ -11,7 +11,7 @@ events {
http {
# rate limiting
limit_req_status 429;
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=${PROXY_RATE_LIMIT_API_PER_SECOND}r/s;
limit_req_zone $binary_remote_addr zone=webhooks:10m rate=${PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND}r/s;
include /etc/nginx/mime.types;
@ -33,7 +33,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -85,10 +88,6 @@ http {
proxy_pass http://$apps:4002;
}
location /preview {
proxy_pass http://$apps:4002;
}
location = / {
proxy_pass http://$apps:4002;
}
@ -159,6 +158,15 @@ http {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://$apps:4002;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -0,0 +1,24 @@
#!/bin/sh
# vim:sw=4:ts=4:et
set -e
ME=$(basename $0)
NGINX_CONF_FILE="/etc/nginx/nginx.conf"
DEFAULT_CONF_FILE="/etc/nginx/conf.d/default.conf"
# check if we have ipv6 available
if [ ! -f "/proc/net/if_inet6" ]; then
# ipv6 not available so delete lines from nginx conf
if [ -f "$NGINX_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $NGINX_CONF_FILE
fi
if [ -f "$DEFAULT_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $DEFAULT_CONF_FILE
fi
echo "$ME: info: ipv6 not available so delete lines from nginx conf"
else
echo "$ME: info: ipv6 is available so no need to delete lines from nginx conf"
fi
exit 0

View File

@ -0,0 +1,24 @@
#!/bin/sh
# vim:sw=4:ts=4:et
set -e
ME=$(basename $0)
NGINX_CONF_FILE="/etc/nginx/nginx.conf"
DEFAULT_CONF_FILE="/etc/nginx/conf.d/default.conf"
# check if we have ipv6 available
if [ ! -f "/proc/net/if_inet6" ]; then
# ipv6 not available so delete lines from nginx conf
if [ -f "$NGINX_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $NGINX_CONF_FILE
fi
if [ -f "$DEFAULT_CONF_FILE" ]; then
sed -i '/listen \[::\]/d' $DEFAULT_CONF_FILE
fi
echo "$ME: info: ipv6 not available so delete lines from nginx conf"
else
echo "$ME: info: ipv6 is available so no need to delete lines from nginx conf"
fi
exit 0

View File

@ -6,8 +6,14 @@ FROM nginx:latest
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
# IPv6 removal needs to happen after envsubst
RUN rm -rf /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
COPY 80-listen-on-ipv6-by-default.sh /docker-entrypoint.d/80-listen-on-ipv6-by-default.sh
RUN chmod +x /docker-entrypoint.d/80-listen-on-ipv6-by-default.sh
# Error handling
COPY error.html /usr/share/nginx/html/error.html
# Default environment
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20

View File

@ -4,17 +4,21 @@ echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
mkdir -p $DATA_DIR/{search,minio,couchdb}
mkdir -p $DATA_DIR/couchdb/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couchdb/
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

View File

@ -19,8 +19,8 @@ ADD packages/worker .
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
ARG TARGETARCH=amd64
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
@ -29,23 +29,8 @@ ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
ENV \
APP_PORT=4001 \
ARCHITECTURE=amd \
BUDIBASE_ENVIRONMENT=PRODUCTION \
CLUSTER_PORT=80 \
# CUSTOM_DOMAIN=budi001.custom.com \
DATA_DIR=/data \
DEPLOYMENT_ENVIRONMENT=docker \
MINIO_URL=http://localhost:9000 \
POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU \
REDIS_URL=localhost:6379 \
SELF_HOSTED=1 \
TARGETBUILD=$TARGETBUILD \
WORKER_PORT=4002 \
WORKER_URL=http://localhost:4002 \
APPS_URL=http://localhost:4001
# ENV CUSTOM_DOMAIN=budi001.custom.com \
# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
@ -117,6 +102,8 @@ RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
ADD hosting/single/ssh/sshd_config /etc/
ADD hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# cleanup cache
@ -124,6 +111,8 @@ RUN yarn cache clean -f
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
# setup letsencrypt certificate

View File

@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
database_dir = DATA_DIR/couchdb/dbs
view_index_dir = DATA_DIR/couchdb/views
database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views

View File

@ -66,6 +66,15 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://127.0.0.1:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -1,18 +1,45 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL")
# Check the env vars set in Dockerfile have come through, AAS seems to drop them
[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS"
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
/etc/init.d/ssh start
else
DATA_DIR=${DATA_DIR:-/data}
fi
if [ -f "${DATA_DIR}/.env" ]; then
export $(cat ${DATA_DIR}/.env | xargs)
# Mount NFS or GCP Filestore if env vars exist for it
if [[ -z ${FILESHARE_IP} && -z ${FILESHARE_NAME} ]]; then
echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}"
mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR}
echo "Mounting completed."
fi
# first randomise any unset environment variables
if [ -f "${DATA_DIR}/.env" ]; then
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
fi
# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
@ -30,16 +57,23 @@ if [ ! -f "${DATA_DIR}/.env" ]; then
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
for ENV_VAR in "${DOCKER_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
fi
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/couchdb/{dbs,views}
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couchdb
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server ${DATA_DIR}/minio &

View File

@ -0,0 +1,8 @@
#!/bin/sh
ssh-keygen -A
#prepare run dir
if [ ! -d "/var/run/sshd" ]; then
mkdir -p /var/run/sshd
fi

View File

@ -0,0 +1,12 @@
Port 2222
ListenAddress 0.0.0.0
LoginGraceTime 180
X11Forwarding yes
Ciphers aes128-cbc,3des-cbc,aes256-cbc,aes128-ctr,aes192-ctr,aes256-ctr
MACs hmac-sha1,hmac-sha1-96
StrictModes yes
SyslogFacility DAEMON
PasswordAuthentication yes
PermitEmptyPasswords no
PermitRootLogin yes
Subsystem sftp internal-sftp

View File

@ -1,5 +1,5 @@
{
"version": "1.2.44-alpha.10",
"version": "2.0.34-alpha.4",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -3,7 +3,6 @@
"private": true,
"devDependencies": {
"@rollup/plugin-json": "^4.0.2",
"@types/mongodb": "3.6.3",
"@typescript-eslint/parser": "4.28.0",
"babel-eslint": "^10.0.3",
"eslint": "^7.28.0",
@ -13,6 +12,7 @@
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1",
"lerna": "3.14.1",
"madge": "^5.0.1",
"prettier": "^2.3.1",
"prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
@ -25,6 +25,8 @@
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
"build:sdk": "lerna run build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
"release:pro": "bash scripts/pro/release.sh",
@ -45,8 +47,8 @@
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
"lint:fix:eslint": "eslint --fix packages",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
"lint:fix:eslint": "eslint --fix packages qa-core",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test --stream",
"test:e2e:ci": "lerna run cy:ci --stream",

View File

@ -6,6 +6,7 @@ const {
updateAppId,
doInAppContext,
doInTenant,
doInContext,
} = require("./src/context")
const identity = require("./src/context/identity")
@ -19,4 +20,5 @@ module.exports = {
doInAppContext,
doInTenant,
identity,
doInContext,
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.2.44-alpha.10",
"version": "2.0.34-alpha.4",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,10 +20,13 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "1.2.44-alpha.10",
"@budibase/types": "2.0.34-alpha.4",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcryptjs": "2.4.3",
"bull": "4.10.1",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",
@ -60,7 +63,8 @@
]
},
"devDependencies": {
"@shopify/jest-koa-mocks": "3.1.5",
"@types/chance": "1.1.3",
"@types/ioredis": "4.28.0",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/lodash": "4.14.180",
@ -71,6 +75,7 @@
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
"chance": "1.1.3",
"ioredis-mock": "5.8.0",
"jest": "27.5.1",
"koa": "2.7.0",

View File

@ -0,0 +1,3 @@
module.exports = {
...require("./src/plugin"),
}

View File

@ -1,11 +1,11 @@
const passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
const { getGlobalDB } = require("./tenancy")
import { getGlobalDB } from "./tenancy"
const refresh = require("passport-oauth2-refresh")
const { Configs } = require("./constants")
const { getScopedConfig } = require("./db/utils")
const {
import { Configs } from "./constants"
import { getScopedConfig } from "./db/utils"
import {
jwt,
local,
authenticated,
@ -13,7 +13,6 @@ const {
oidc,
auditLog,
tenancy,
appTenancy,
authError,
ssoCallbackUrl,
csrf,
@ -22,32 +21,36 @@ const {
builderOnly,
builderOrAdmin,
joiValidator,
} = require("./middleware")
const { invalidateUser } = require("./cache/user")
} from "./middleware"
import { invalidateUser } from "./cache/user"
import { User } from "@budibase/types"
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
passport.serializeUser((user, done) => done(null, user))
passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user, done) => {
passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
const user = await db.get(user._id)
return done(null, user)
const dbUser = await db.get(user._id)
return done(null, dbUser)
} catch (err) {
console.error(`User not found`, err)
return done(null, false, { message: "User not found" })
}
})
async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
async function refreshOIDCAccessToken(
db: any,
chosenConfig: any,
refreshToken: string
) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
let enrichedConfig
let strategy
let enrichedConfig: any
let strategy: any
try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
@ -70,22 +73,28 @@ async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
refresh.requestNewAccessToken(
Configs.OIDC,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshGoogleAccessToken(db, config, refreshToken) {
async function refreshGoogleAccessToken(
db: any,
config: any,
refreshToken: any
) {
let callbackUrl = await google.getCallbackUrl(db, config)
let strategy
try {
strategy = await google.strategyFactory(config, callbackUrl)
} catch (err) {
} catch (err: any) {
console.error(err)
throw new Error("Error constructing OIDC refresh strategy", err)
throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}`
)
}
refresh.use(strategy)
@ -94,14 +103,18 @@ async function refreshGoogleAccessToken(db, config, refreshToken) {
refresh.requestNewAccessToken(
Configs.GOOGLE,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshOAuthToken(refreshToken, configType, configId) {
async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
@ -113,7 +126,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
let refreshResponse
if (configType === Configs.OIDC) {
// configId - retrieved from cookie.
chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) {
throw new Error("Invalid OIDC configuration")
}
@ -134,7 +147,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
return refreshResponse
}
async function updateUserOAuth(userId, oAuthConfig) {
async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@ -162,14 +175,13 @@ async function updateUserOAuth(userId, oAuthConfig) {
}
}
module.exports = {
export = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
buildAppTenancyMiddleware: appTenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,

View File

@ -1,6 +1,7 @@
import BaseCache from "./base"
import { getWritethroughClient } from "../redis/init"
import { logWarn } from "../logging"
import PouchDB from "pouchdb"
const DEFAULT_WRITE_RATE_MS = 10000
let CACHE: BaseCache | null = null

View File

@ -7,6 +7,7 @@ exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}

View File

@ -2,7 +2,7 @@ import env from "../environment"
import { SEPARATOR, DocumentType } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
import { baseGlobalDBName } from "../tenancy/utils"
import { baseGlobalDBName } from "../db/tenancy"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKey } from "./constants"
@ -53,6 +53,9 @@ export const getTenantIDFromAppID = (appId: string) => {
if (!appId) {
return null
}
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
@ -65,7 +68,16 @@ export const getTenantIDFromAppID = (appId: string) => {
}
}
// used for automations, API endpoints should always be in context already
export const doInContext = async (appId: string, task: any) => {
// gets the tenant ID from the app ID
const tenantId = getTenantIDFromAppID(appId)
return doInTenant(tenantId, async () => {
return doInAppContext(appId, async () => {
return task()
})
})
}
export const doInTenant = (tenantId: string | null, task: any) => {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
@ -226,6 +238,10 @@ export const getAppId = () => {
}
}
export const isTenancyEnabled = () => {
return env.MULTI_TENANCY
}
/**
* Opens the app database based on whatever the request
* contained, dev or prod.

View File

@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
import { DocumentType } from "./constants"
class Replication {
source: any
@ -53,6 +54,14 @@ class Replication {
return this.replication
}
appReplicateOpts() {
return {
filter: (doc: any) => {
return doc._id !== DocumentType.APP_METADATA
},
}
}
/**
* Rollback the target DB back to the state of the source DB
*/
@ -60,6 +69,7 @@ class Replication {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
// take the opportunity to remove deleted tombstones
await this.replicate()
}

View File

@ -18,6 +18,10 @@ export enum ViewName {
LINK = "by_link",
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
ACCOUNT_BY_EMAIL = "account_by_email",
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
USER_BY_GROUP = "by_group_user",
APP_BACKUP_BY_TRIGGER = "by_trigger",
}
export const DeprecatedViews = {
@ -27,6 +31,10 @@ export const DeprecatedViews = {
],
}
export enum InternalTable {
USER_METADATA = "ta_users",
}
export enum DocumentType {
USER = "us",
GROUP = "gr",
@ -41,6 +49,25 @@ export enum DocumentType {
MIGRATIONS = "migrations",
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
PLUGIN = "plg",
DATASOURCE = "datasource",
DATASOURCE_PLUS = "datasource_plus",
APP_BACKUP = "backup",
TABLE = "ta",
ROW = "ro",
AUTOMATION = "au",
LINK = "li",
WEBHOOK = "wh",
INSTANCE = "inst",
LAYOUT = "layout",
SCREEN = "screen",
QUERY = "query",
DEPLOYMENTS = "deployments",
METADATA = "metadata",
MEM_VIEW = "view",
USER_FLAG = "flag",
AUTOMATION_METADATA = "meta_au",
}
export const StaticDatabases = {

View File

@ -36,6 +36,7 @@ exports.getDevelopmentAppID = appId => {
const rest = split.join(APP_PREFIX)
return `${APP_DEV_PREFIX}${rest}`
}
exports.getDevAppID = exports.getDevelopmentAppID
/**
* Convert a development app ID to a deployed app ID.

View File

@ -1,91 +0,0 @@
const pouch = require("./pouch")
const env = require("../environment")
const openDbs = []
let PouchDB
let initialised = false
const dbList = new Set()
if (env.MEMORY_LEAK_CHECK) {
setInterval(() => {
console.log("--- OPEN DBS ---")
console.log(openDbs)
}, 5000)
}
const put =
dbPut =>
async (doc, options = {}) => {
if (!doc.createdAt) {
doc.createdAt = new Date().toISOString()
}
doc.updatedAt = new Date().toISOString()
return dbPut(doc, options)
}
const checkInitialised = () => {
if (!initialised) {
throw new Error("init has not been called")
}
}
exports.init = opts => {
PouchDB = pouch.getPouch(opts)
initialised = true
}
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
// this function is prone to leaks, should only be used
// in situations that using the function doWithDB does not work
exports.dangerousGetDB = (dbName, opts) => {
checkInitialised()
if (env.isTest()) {
dbList.add(dbName)
}
const db = new PouchDB(dbName, opts)
if (env.MEMORY_LEAK_CHECK) {
openDbs.push(db.name)
}
const dbPut = db.put
db.put = put(dbPut)
return db
}
// use this function if you have called dangerousGetDB - close
// the databases you've opened once finished
exports.closeDB = async db => {
if (!db || env.isTest()) {
return
}
if (env.MEMORY_LEAK_CHECK) {
openDbs.splice(openDbs.indexOf(db.name), 1)
}
try {
// specifically await so that if there is an error, it can be ignored
return await db.close()
} catch (err) {
// ignore error, already closed
}
}
// we have to use a callback for this so that we can close
// the DB when we're done, without this manual requests would
// need to close the database when done with it to avoid memory leaks
exports.doWithDB = async (dbName, cb, opts = {}) => {
const db = exports.dangerousGetDB(dbName, opts)
// need this to be async so that we can correctly close DB after all
// async operations have been completed
try {
return await cb(db)
} finally {
await exports.closeDB(db)
}
}
exports.allDbs = () => {
if (!env.isTest()) {
throw new Error("Cannot be used outside test environment.")
}
checkInitialised()
return [...dbList]
}

View File

@ -0,0 +1,133 @@
import * as pouch from "./pouch"
import env from "../environment"
import { checkSlashesInUrl } from "../helpers"
import fetch from "node-fetch"
import { PouchOptions, CouchFindOptions } from "@budibase/types"
import PouchDB from "pouchdb"
const openDbs: string[] = []
let Pouch: any
let initialised = false
const dbList = new Set()
if (env.MEMORY_LEAK_CHECK) {
setInterval(() => {
console.log("--- OPEN DBS ---")
console.log(openDbs)
}, 5000)
}
const put =
(dbPut: any) =>
async (doc: any, options = {}) => {
if (!doc.createdAt) {
doc.createdAt = new Date().toISOString()
}
doc.updatedAt = new Date().toISOString()
return dbPut(doc, options)
}
const checkInitialised = () => {
if (!initialised) {
throw new Error("init has not been called")
}
}
export async function init(opts?: PouchOptions) {
Pouch = pouch.getPouch(opts)
initialised = true
}
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
// this function is prone to leaks, should only be used
// in situations that using the function doWithDB does not work
export function dangerousGetDB(dbName: string, opts?: any): PouchDB.Database {
checkInitialised()
if (env.isTest()) {
dbList.add(dbName)
}
const db = new Pouch(dbName, opts)
if (env.MEMORY_LEAK_CHECK) {
openDbs.push(db.name)
}
const dbPut = db.put
db.put = put(dbPut)
return db
}
// use this function if you have called dangerousGetDB - close
// the databases you've opened once finished
export async function closeDB(db: PouchDB.Database) {
if (!db || env.isTest()) {
return
}
if (env.MEMORY_LEAK_CHECK) {
openDbs.splice(openDbs.indexOf(db.name), 1)
}
try {
// specifically await so that if there is an error, it can be ignored
return await db.close()
} catch (err) {
// ignore error, already closed
}
}
// we have to use a callback for this so that we can close
// the DB when we're done, without this manual requests would
// need to close the database when done with it to avoid memory leaks
export async function doWithDB(dbName: string, cb: any, opts = {}) {
const db = dangerousGetDB(dbName, opts)
// need this to be async so that we can correctly close DB after all
// async operations have been completed
try {
return await cb(db)
} finally {
await closeDB(db)
}
}
export function allDbs() {
if (!env.isTest()) {
throw new Error("Cannot be used outside test environment.")
}
checkInitialised()
return [...dbList]
}
export async function directCouchQuery(
path: string,
method: string = "GET",
body?: any
) {
let { url, cookie } = pouch.getCouchInfo()
const couchUrl = `${url}/${path}`
const params: any = {
method: method,
headers: {
Authorization: cookie,
},
}
if (body && method !== "GET") {
params.body = JSON.stringify(body)
params.headers["Content-Type"] = "application/json"
}
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params)
if (response.status < 300) {
return await response.json()
} else {
throw "Cannot connect to CouchDB instance"
}
}
export async function directCouchAllDbs(queryString?: string) {
let couchPath = "/_all_dbs"
if (queryString) {
couchPath += `?${queryString}`
}
return await directCouchQuery(couchPath)
}
export async function directCouchFind(dbName: string, opts: CouchFindOptions) {
const json = await directCouchQuery(`${dbName}/_find`, "POST", opts)
return { rows: json.docs, bookmark: json.bookmark }
}

View File

@ -1,7 +1,7 @@
const PouchDB = require("pouchdb")
const env = require("../environment")
import PouchDB from "pouchdb"
import env from "../environment"
exports.getUrlInfo = (url = env.COUCH_DB_URL) => {
export const getUrlInfo = (url = env.COUCH_DB_URL) => {
let cleanUrl, username, password, host
if (url) {
// Ensure the URL starts with a protocol
@ -44,8 +44,8 @@ exports.getUrlInfo = (url = env.COUCH_DB_URL) => {
}
}
exports.getCouchInfo = () => {
const urlInfo = exports.getUrlInfo()
export const getCouchInfo = () => {
const urlInfo = getUrlInfo()
let username
let password
if (env.COUCH_DB_USERNAME) {
@ -82,11 +82,11 @@ exports.getCouchInfo = () => {
* This should be rarely used outside of the main application config.
* Exposed for exceptional cases such as in-memory views.
*/
exports.getPouch = (opts = {}) => {
let { url, cookie } = exports.getCouchInfo()
export const getPouch = (opts: any = {}) => {
let { url, cookie } = getCouchInfo()
let POUCH_DB_DEFAULTS = {
prefix: url,
fetch: (url, opts) => {
fetch: (url: string, opts: any) => {
// use a specific authorization cookie - be very explicit about how we authenticate
opts.headers.set("Authorization", cookie)
return PouchDB.fetch(url, opts)
@ -98,6 +98,7 @@ exports.getPouch = (opts = {}) => {
PouchDB.plugin(inMemory)
POUCH_DB_DEFAULTS = {
prefix: undefined,
// @ts-ignore
adapter: "memory",
}
}
@ -105,6 +106,7 @@ exports.getPouch = (opts = {}) => {
if (opts.onDisk) {
POUCH_DB_DEFAULTS = {
prefix: undefined,
// @ts-ignore
adapter: "leveldb",
}
}
@ -112,6 +114,7 @@ exports.getPouch = (opts = {}) => {
if (opts.replication) {
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
// @ts-ignore
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
}

View File

@ -0,0 +1,22 @@
import { DEFAULT_TENANT_ID } from "../constants"
import { StaticDatabases, SEPARATOR } from "./constants"
import { getTenantId } from "../context"
export const getGlobalDBName = (tenantId?: string) => {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export const baseGlobalDBName = (tenantId: string | undefined | null) => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View File

@ -1,13 +1,17 @@
import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants"
import env from "../environment"
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
import fetch from "node-fetch"
import { doWithDB, allDbs } from "./index"
import { getCouchInfo } from "./pouch"
import {
SEPARATOR,
DocumentType,
UNICODE_MAX,
ViewName,
InternalTable,
} from "./constants"
import { getTenantId, getGlobalDB } from "../context"
import { getGlobalDBName } from "./tenancy"
import { doWithDB, allDbs, directCouchAllDbs } from "./index"
import { getAppMetadata } from "../cache/appMetadata"
import { checkSlashesInUrl } from "../helpers"
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
import { APP_PREFIX } from "./constants"
import * as events from "../events"
@ -15,6 +19,7 @@ import * as events from "../events"
export * from "./constants"
export * from "./conversions"
export { default as Replication } from "./Replication"
export * from "./tenancy"
/**
* Generates a new app ID.
@ -41,8 +46,8 @@ export const generateAppID = (tenantId = null) => {
* @returns {object} Parameters which can then be used with an allDocs request.
*/
export function getDocParams(
docType: any,
docId: any = null,
docType: string,
docId?: string | null,
otherProps: any = {}
) {
if (docId == null) {
@ -55,6 +60,28 @@ export function getDocParams(
}
}
/**
* Gets the DB allDocs/query params for retrieving a row.
* @param {string|null} tableId The table in which the rows have been stored.
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
* left null to get all the rows in the table.
* @param {object} otherProps Any other properties to add to the request.
* @returns {object} Parameters which can then be used with an allDocs request.
*/
export function getRowParams(
tableId?: string | null,
rowId?: string | null,
otherProps = {}
) {
if (tableId == null) {
return getDocParams(DocumentType.ROW, null, otherProps)
}
const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId
return getDocParams(DocumentType.ROW, endOfKey, otherProps)
}
/**
* Retrieve the correct index for a view based on default design DB.
*/
@ -62,6 +89,39 @@ export function getQueryIndex(viewName: ViewName) {
return `database/${viewName}`
}
/**
* Gets a new row ID for the specified table.
* @param {string} tableId The table which the row is being created for.
* @param {string|null} id If an ID is to be used then the UUID can be substituted for this.
* @returns {string} The new ID which a row doc can be stored under.
*/
export function generateRowID(tableId: string, id?: string) {
id = id || newid()
return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
}
/**
* Check if a given ID is that of a table.
* @returns {boolean}
*/
export const isTableId = (id: string) => {
// this includes datasource plus tables
return (
id &&
(id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||
id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))
)
}
/**
* Check if a given ID is that of a datasource or datasource plus.
* @returns {boolean}
*/
export const isDatasourceId = (id: string) => {
// this covers both datasources and datasource plus
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
}
/**
* Generates a new workspace ID.
* @returns {string} The new workspace ID which the workspace doc can be stored under.
@ -107,6 +167,33 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
}
}
/**
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
*/
export function getUserMetadataParams(userId?: string, otherProps = {}) {
return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
}
/**
* Generates a new user ID based on the passed in global ID.
* @param {string} globalId The ID of the global user.
* @returns {string} The new user ID which the user doc can be stored under.
*/
export function generateUserMetadataID(globalId: string) {
return generateRowID(InternalTable.USER_METADATA, globalId)
}
/**
* Breaks up the ID to get the global ID.
*/
export function getGlobalIDFromUserMetadataID(id: string) {
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
if (!id || !id.includes(prefix)) {
return id
}
return id.split(prefix)[1]
}
export function getUsersByAppParams(appId: any, otherProps: any = {}) {
const prodAppId = getProdAppID(appId)
return {
@ -167,9 +254,9 @@ export function getRoleParams(roleId = null, otherProps = {}) {
return getDocParams(DocumentType.ROLE, roleId, otherProps)
}
export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) {
export function getStartEndKeyURL(baseKey: any, tenantId = null) {
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
return `${base}?startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
return `startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
}
/**
@ -185,22 +272,10 @@ export async function getAllDbs(opts = { efficient: false }) {
return allDbs()
}
let dbs: any[] = []
let { url, cookie } = getCouchInfo()
async function addDbs(couchUrl: string) {
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), {
method: "GET",
headers: {
Authorization: cookie,
},
})
if (response.status === 200) {
let json = await response.json()
dbs = dbs.concat(json)
} else {
throw "Cannot connect to CouchDB instance"
}
async function addDbs(queryString?: string) {
const json = await directCouchAllDbs(queryString)
dbs = dbs.concat(json)
}
let couchUrl = `${url}/_all_dbs`
let tenantId = getTenantId()
if (!env.MULTI_TENANCY || (!efficient && tenantId === DEFAULT_TENANT_ID)) {
// just get all DBs when:
@ -208,12 +283,12 @@ export async function getAllDbs(opts = { efficient: false }) {
// - default tenant
// - apps dbs don't contain tenant id
// - non-default tenant dbs are filtered out application side in getAllApps
await addDbs(couchUrl)
await addDbs()
} else {
// get prod apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId))
await addDbs(getStartEndKeyURL(DocumentType.APP, tenantId))
// get dev apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId))
await addDbs(getStartEndKeyURL(DocumentType.APP_DEV, tenantId))
// add global db name
dbs.push(getGlobalDBName(tenantId))
}
@ -254,7 +329,16 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
return false
})
if (idsOnly) {
return appDbNames
const devAppIds = appDbNames.filter(appId => isDevAppID(appId))
const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))
switch (dev) {
case true:
return devAppIds
case false:
return prodAppIds
default:
return appDbNames
}
}
const appPromises = appDbNames.map((app: any) =>
// skip setup otherwise databases could be re-created
@ -357,6 +441,21 @@ export const generateDevInfoID = (userId: any) => {
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
}
/**
* Generates a new plugin ID - to be used in the global DB.
* @returns {string} The new plugin ID which a plugin metadata document can be stored under.
*/
export const generatePluginID = (name: string) => {
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
}
/**
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
*/
export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
}
/**
* Returns the most granular configuration document from the DB based on the type, workspace and userID passed.
* @param {Object} db - db instance to query

View File

@ -1,158 +0,0 @@
const {
DocumentType,
ViewName,
DeprecatedViews,
SEPARATOR,
} = require("./utils")
const { getGlobalDB } = require("../tenancy")
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
async function removeDeprecated(db, viewName) {
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get(DESIGN_DB)
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
exports.createNewUserEmailView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_EMAIL]: view,
}
await db.put(designDoc)
}
exports.createUserAppView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
}
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_APP]: view,
}
await db.put(designDoc)
}
exports.createApiKeyView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.BY_API_KEY]: view,
}
await db.put(designDoc)
}
exports.createUserBuildersView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_BUILDERS]: view,
}
await db.put(designDoc)
}
exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = {
[ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView,
[ViewName.BY_API_KEY]: exports.createApiKeyView,
[ViewName.USER_BY_BUILDERS]: exports.createUserBuildersView,
[ViewName.USER_BY_APP]: exports.createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {
db = getGlobalDB()
}
try {
let response = (await db.query(`database/${viewName}`, params)).rows
response = response.map(resp =>
params.include_docs ? resp.doc : resp.value
)
return response.length <= 1 ? response[0] : response
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryGlobalView(viewName, params)
} else {
throw err
}
}
}

View File

@ -0,0 +1,199 @@
import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
import { getGlobalDB } from "../context"
import PouchDB from "pouchdb"
import { StaticDatabases } from "./constants"
import { doWithDB } from "./"
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
interface DesignDocument {
views: any
}
async function removeDeprecated(db: PouchDB.Database, viewName: ViewName) {
// @ts-ignore
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get<DesignDocument>(DESIGN_DB)
// @ts-ignore
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
export async function createView(db: any, viewJs: string, viewName: string) {
let designDoc
try {
designDoc = (await db.get(DESIGN_DB)) as DesignDocument
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: viewJs,
}
designDoc.views = {
...designDoc.views,
[viewName]: view,
}
await db.put(designDoc)
}
export const createNewUserEmailView = async () => {
const db = getGlobalDB()
const viewJs = `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`
await createView(db, viewJs, ViewName.USER_BY_EMAIL)
}
export const createAccountEmailView = async () => {
const viewJs = `function(doc) {
if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`
await doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
await createView(db, viewJs, ViewName.ACCOUNT_BY_EMAIL)
}
)
}
export const createUserAppView = async () => {
const db = getGlobalDB() as PouchDB.Database
const viewJs = `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
}
}
}`
await createView(db, viewJs, ViewName.USER_BY_APP)
}
export const createApiKeyView = async () => {
const db = getGlobalDB()
const viewJs = `function(doc) {
if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`
await createView(db, viewJs, ViewName.BY_API_KEY)
}
export const createUserBuildersView = async () => {
const db = getGlobalDB()
const viewJs = `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`
await createView(db, viewJs, ViewName.USER_BY_BUILDERS)
}
export const createPlatformUserView = async () => {
const viewJs = `function(doc) {
if (doc.tenantId) {
emit(doc._id.toLowerCase(), doc._id)
}
}`
await doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
await createView(db, viewJs, ViewName.PLATFORM_USERS_LOWERCASE)
}
)
}
export interface QueryViewOptions {
arrayResponse?: boolean
}
export const queryView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
db: PouchDB.Database,
createFunc: any,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
try {
let response = await db.query<T, T>(`database/${viewName}`, params)
const rows = response.rows
const docs = rows.map(row => (params.include_docs ? row.doc : row.value))
// if arrayResponse has been requested, always return array regardless of length
if (opts?.arrayResponse) {
return docs
} else {
// return the single document if there is only one
return docs.length <= 1 ? docs[0] : docs
}
} catch (err: any) {
if (err != null && err.name === "not_found") {
await removeDeprecated(db, viewName)
await createFunc()
return queryView(viewName, params, db, createFunc, opts)
} else {
throw err
}
}
}
export const queryPlatformView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
const CreateFuncByName: any = {
[ViewName.ACCOUNT_BY_EMAIL]: createAccountEmailView,
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
}
return doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
const createFn = CreateFuncByName[viewName]
return queryView(viewName, params, db, createFn, opts)
}
)
}
export const queryGlobalView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
db?: PouchDB.Database,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
const CreateFuncByName: any = {
[ViewName.USER_BY_EMAIL]: createNewUserEmailView,
[ViewName.BY_API_KEY]: createApiKeyView,
[ViewName.USER_BY_BUILDERS]: createUserBuildersView,
[ViewName.USER_BY_APP]: createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {
db = getGlobalDB() as PouchDB.Database
}
const createFn = CreateFuncByName[viewName]
return queryView(viewName, params, db, createFn, opts)
}

View File

@ -16,9 +16,19 @@ if (!LOADED && isDev() && !isTest()) {
LOADED = true
}
const DefaultBucketName = {
BACKUPS: "backups",
APPS: "prod-budi-app-assets",
TEMPLATES: "templates",
GLOBAL: "global",
CLOUD: "prod-budi-tenant-uploads",
PLUGINS: "plugins",
}
const env = {
isTest,
isDev,
JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
@ -36,7 +46,7 @@ const env = {
MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL:
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "",
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
@ -44,12 +54,17 @@ const env = {
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
BACKUPS_BUCKET_NAME: process.env.BACKUPS_BUCKET_NAME || "backups",
APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || "prod-budi-app-assets",
TEMPLATES_BUCKET_NAME: process.env.TEMPLATES_BUCKET_NAME || "templates",
GLOBAL_BUCKET_NAME: process.env.GLOBAL_BUCKET_NAME || "global",
BACKUPS_BUCKET_NAME:
process.env.BACKUPS_BUCKET_NAME || DefaultBucketName.BACKUPS,
APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || DefaultBucketName.APPS,
TEMPLATES_BUCKET_NAME:
process.env.TEMPLATES_BUCKET_NAME || DefaultBucketName.TEMPLATES,
GLOBAL_BUCKET_NAME:
process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,
GLOBAL_CLOUD_BUCKET_NAME:
process.env.GLOBAL_CLOUD_BUCKET_NAME || "prod-budi-tenant-uploads",
process.env.GLOBAL_CLOUD_BUCKET_NAME || DefaultBucketName.CLOUD,
PLUGIN_BUCKET_NAME:
process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,
USE_COUCH: process.env.USE_COUCH || true,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,

View File

@ -1,11 +0,0 @@
class BudibaseError extends Error {
constructor(message, code, type) {
super(message)
this.code = code
this.type = type
}
}
module.exports = {
BudibaseError,
}

View File

@ -0,0 +1,10 @@
export class BudibaseError extends Error {
code: string
type: string
constructor(message: string, code: string, type: string) {
super(message)
this.code = code
this.type = type
}
}

View File

@ -1,11 +0,0 @@
const { BudibaseError } = require("./base")
class GenericError extends BudibaseError {
constructor(message, code, type) {
super(message, code, type ? type : "generic")
}
}
module.exports = {
GenericError,
}

View File

@ -0,0 +1,7 @@
import { BudibaseError } from "./base"
export class GenericError extends BudibaseError {
constructor(message: string, code: string, type: string) {
super(message, code, type ? type : "generic")
}
}

View File

@ -1,12 +0,0 @@
const { GenericError } = require("./generic")
class HTTPError extends GenericError {
constructor(message, httpStatus, code = "http", type = "generic") {
super(message, code, type)
this.status = httpStatus
}
}
module.exports = {
HTTPError,
}

View File

@ -0,0 +1,15 @@
import { GenericError } from "./generic"
export class HTTPError extends GenericError {
status: number
constructor(
message: string,
httpStatus: number,
code = "http",
type = "generic"
) {
super(message, code, type)
this.status = httpStatus
}
}

View File

@ -1,5 +1,6 @@
const http = require("./http")
const licensing = require("./licensing")
import { HTTPError } from "./http"
import { UsageLimitError, FeatureDisabledError } from "./licensing"
import * as licensing from "./licensing"
const codes = {
...licensing.codes,
@ -11,7 +12,7 @@ const context = {
...licensing.context,
}
const getPublicError = err => {
const getPublicError = (err: any) => {
let error
if (err.code || err.type) {
// add generic error information
@ -32,13 +33,15 @@ const getPublicError = err => {
return error
}
module.exports = {
const pkg = {
codes,
types,
errors: {
UsageLimitError: licensing.UsageLimitError,
FeatureDisabledError: licensing.FeatureDisabledError,
HTTPError: http.HTTPError,
UsageLimitError,
FeatureDisabledError,
HTTPError,
},
getPublicError,
}
export = pkg

View File

@ -1,43 +0,0 @@
const { HTTPError } = require("./http")
const type = "license_error"
const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
FEATURE_DISABLED: "feature_disabled",
}
const context = {
[codes.USAGE_LIMIT_EXCEEDED]: err => {
return {
limitName: err.limitName,
}
},
[codes.FEATURE_DISABLED]: err => {
return {
featureName: err.featureName,
}
},
}
class UsageLimitError extends HTTPError {
constructor(message, limitName) {
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
this.limitName = limitName
}
}
class FeatureDisabledError extends HTTPError {
constructor(message, featureName) {
super(message, 400, codes.FEATURE_DISABLED, type)
this.featureName = featureName
}
}
module.exports = {
type,
codes,
context,
UsageLimitError,
FeatureDisabledError,
}

View File

@ -0,0 +1,39 @@
import { HTTPError } from "./http"
export const type = "license_error"
export const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
FEATURE_DISABLED: "feature_disabled",
}
export const context = {
[codes.USAGE_LIMIT_EXCEEDED]: (err: any) => {
return {
limitName: err.limitName,
}
},
[codes.FEATURE_DISABLED]: (err: any) => {
return {
featureName: err.featureName,
}
},
}
export class UsageLimitError extends HTTPError {
limitName: string
constructor(message: string, limitName: string) {
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
this.limitName = limitName
}
}
export class FeatureDisabledError extends HTTPError {
featureName: string
constructor(message: string, featureName: string) {
super(message, 400, codes.FEATURE_DISABLED, type)
this.featureName = featureName
}
}

View File

@ -8,4 +8,5 @@ import { processors } from "./processors"
export const shutdown = () => {
processors.shutdown()
console.log("Events shutdown")
}

View File

@ -23,9 +23,11 @@ export default class LoggingProcessor implements EventProcessor {
return
}
let timestampString = getTimestampString(timestamp)
console.log(
`[audit] [tenant=${identity.tenantId}] [identityType=${identity.type}] [identity=${identity.id}] ${timestampString} ${event} `
)
let message = `[audit] [tenant=${identity.tenantId}] [identityType=${identity.type}] [identity=${identity.id}] ${timestampString} ${event} `
if (env.isDev()) {
message = message + `[debug: [properties=${JSON.stringify(properties)}] ]`
}
console.log(message)
}
async identify(identity: Identity, timestamp?: string | number) {

View File

@ -0,0 +1,12 @@
import { AppBackup, AppBackupRestoreEvent, Event } from "@budibase/types"
import { publishEvent } from "../events"
export async function appBackupRestored(backup: AppBackup) {
const properties: AppBackupRestoreEvent = {
appId: backup.appId,
backupName: backup.name!,
backupCreatedAt: backup.timestamp,
}
await publishEvent(Event.APP_BACKUP_RESTORED, properties)
}

View File

@ -5,8 +5,15 @@ import {
DatasourceCreatedEvent,
DatasourceUpdatedEvent,
DatasourceDeletedEvent,
SourceName,
} from "@budibase/types"
function isCustom(datasource: Datasource) {
const sources = Object.values(SourceName)
// if not in the base source list, then it must be custom
return !sources.includes(datasource.source)
}
export async function created(
datasource: Datasource,
timestamp?: string | number
@ -14,6 +21,7 @@ export async function created(
const properties: DatasourceCreatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_CREATED, properties, timestamp)
}
@ -22,6 +30,7 @@ export async function updated(datasource: Datasource) {
const properties: DatasourceUpdatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_UPDATED, properties)
}
@ -30,6 +39,7 @@ export async function deleted(datasource: Datasource) {
const properties: DatasourceDeletedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_DELETED, properties)
}

View File

@ -40,9 +40,9 @@ export async function usersAdded(count: number, group: UserGroup) {
await publishEvent(Event.USER_GROUP_USERS_ADDED, properties)
}
export async function usersDeleted(emails: string[], group: UserGroup) {
export async function usersDeleted(count: number, group: UserGroup) {
const properties: GroupUsersDeletedEvent = {
count: emails.length,
count,
groupId: group._id as string,
}
await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties)

View File

@ -18,3 +18,5 @@ export * as view from "./view"
export * as installation from "./installation"
export * as backfill from "./backfill"
export * as group from "./group"
export * as plugin from "./plugin"
export * as backup from "./backup"

View File

@ -1,27 +1,78 @@
import { publishEvent } from "../events"
import {
Event,
License,
LicenseActivatedEvent,
LicenseDowngradedEvent,
LicenseUpdatedEvent,
LicenseUpgradedEvent,
LicensePlanChangedEvent,
LicenseTierChangedEvent,
PlanType,
Account,
LicensePortalOpenedEvent,
LicenseCheckoutSuccessEvent,
LicenseCheckoutOpenedEvent,
LicensePaymentFailedEvent,
LicensePaymentRecoveredEvent,
} from "@budibase/types"
// TODO
export async function updgraded(license: License) {
const properties: LicenseUpgradedEvent = {}
await publishEvent(Event.LICENSE_UPGRADED, properties)
export async function tierChanged(account: Account, from: number, to: number) {
const properties: LicenseTierChangedEvent = {
accountId: account.accountId,
to,
from,
}
await publishEvent(Event.LICENSE_TIER_CHANGED, properties)
}
// TODO
export async function downgraded(license: License) {
const properties: LicenseDowngradedEvent = {}
await publishEvent(Event.LICENSE_DOWNGRADED, properties)
export async function planChanged(
account: Account,
from: PlanType,
to: PlanType
) {
const properties: LicensePlanChangedEvent = {
accountId: account.accountId,
to,
from,
}
await publishEvent(Event.LICENSE_PLAN_CHANGED, properties)
}
// TODO
export async function activated(license: License) {
const properties: LicenseActivatedEvent = {}
export async function activated(account: Account) {
const properties: LicenseActivatedEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_ACTIVATED, properties)
}
export async function checkoutOpened(account: Account) {
const properties: LicenseCheckoutOpenedEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_CHECKOUT_OPENED, properties)
}
export async function checkoutSuccess(account: Account) {
const properties: LicenseCheckoutSuccessEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_CHECKOUT_SUCCESS, properties)
}
export async function portalOpened(account: Account) {
const properties: LicensePortalOpenedEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_PORTAL_OPENED, properties)
}
export async function paymentFailed(account: Account) {
const properties: LicensePaymentFailedEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_PAYMENT_FAILED, properties)
}
export async function paymentRecovered(account: Account) {
const properties: LicensePaymentRecoveredEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_PAYMENT_RECOVERED, properties)
}

View File

@ -0,0 +1,41 @@
import { publishEvent } from "../events"
import {
Event,
Plugin,
PluginDeletedEvent,
PluginImportedEvent,
PluginInitEvent,
} from "@budibase/types"
export async function init(plugin: Plugin) {
const properties: PluginInitEvent = {
type: plugin.schema.type,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_INIT, properties)
}
export async function imported(plugin: Plugin) {
const properties: PluginImportedEvent = {
pluginId: plugin._id as string,
type: plugin.schema.type,
source: plugin.source,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_IMPORTED, properties)
}
export async function deleted(plugin: Plugin) {
const properties: PluginDeletedEvent = {
pluginId: plugin._id as string,
type: plugin.schema.type,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_DELETED, properties)
}

View File

@ -31,23 +31,29 @@ const TENANT_FEATURE_FLAGS = getFeatureFlags()
exports.isEnabled = featureFlag => {
const tenantId = tenancy.getTenantId()
return (
TENANT_FEATURE_FLAGS &&
TENANT_FEATURE_FLAGS[tenantId] &&
TENANT_FEATURE_FLAGS[tenantId].includes(featureFlag)
)
const flags = exports.getTenantFeatureFlags(tenantId)
return flags.includes(featureFlag)
}
exports.getTenantFeatureFlags = tenantId => {
if (TENANT_FEATURE_FLAGS && TENANT_FEATURE_FLAGS[tenantId]) {
return TENANT_FEATURE_FLAGS[tenantId]
const flags = []
if (TENANT_FEATURE_FLAGS) {
const globalFlags = TENANT_FEATURE_FLAGS["*"]
const tenantFlags = TENANT_FEATURE_FLAGS[tenantId]
if (globalFlags) {
flags.push(...globalFlags)
}
if (tenantFlags) {
flags.push(...tenantFlags)
}
}
return []
return flags
}
exports.FeatureFlag = {
exports.TenantFeatureFlag = {
LICENSING: "LICENSING",
GOOGLE_SHEETS: "GOOGLE_SHEETS",
USER_GROUPS: "USER_GROUPS",

View File

@ -1,5 +1,5 @@
const bcrypt = require("bcrypt")
const env = require("./environment")
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
const { v4 } = require("uuid")
const SALT_ROUNDS = env.SALT_ROUNDS || 10

View File

@ -1,5 +1,4 @@
import errors from "./errors"
const errorClasses = errors.errors
import * as events from "./events"
import * as migrations from "./migrations"
@ -15,8 +14,12 @@ import deprovisioning from "./context/deprovision"
import auth from "./auth"
import constants from "./constants"
import * as dbConstants from "./db/constants"
import logging from "./logging"
import * as logging from "./logging"
import pino from "./pino"
import * as middleware from "./middleware"
import plugins from "./plugin"
import encryption from "./security/encryption"
import * as queue from "./queue"
// mimic the outer package exports
import * as db from "./pkg/db"
@ -35,6 +38,7 @@ const core = {
db,
...dbConstants,
redis,
locks: redis.redlock,
objectStore,
utils,
users,
@ -55,8 +59,12 @@ const core = {
errors,
logging,
roles,
plugins,
...pino,
...errorClasses,
middleware,
encryption,
queue,
}
export = core

View File

@ -65,7 +65,7 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
* The tenancy modules should not be used here and it should be assumed that the tenancy context
* has not yet been populated.
*/
module.exports = (
export = (
noAuthPatterns = [],
opts: { publicAllowed: boolean; populateUser?: Function } = {
publicAllowed: false,
@ -106,6 +106,7 @@ module.exports = (
user = await getUser(userId, session.tenantId)
}
user.csrfToken = session.csrfToken
if (session?.lastAccessedAt < timeMinusOneMinute()) {
// make sure we denote that the session is still in use
await updateSessionTTL(session)

View File

@ -13,7 +13,8 @@ const adminOnly = require("./adminOnly")
const builderOrAdmin = require("./builderOrAdmin")
const builderOnly = require("./builderOnly")
const joiValidator = require("./joi-validator")
module.exports = {
const pkg = {
google,
oidc,
jwt,
@ -33,3 +34,5 @@ module.exports = {
builderOrAdmin,
joiValidator,
}
export = pkg

View File

@ -13,10 +13,13 @@ function validate(schema, property) {
params = ctx.request[property]
}
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
// not all schemas have the append property e.g. array schemas
if (schema.append) {
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
}
const { error } = schema.validate(params)
if (error) {

View File

@ -11,20 +11,12 @@ export const DEFINITIONS: MigrationDefinition[] = [
},
{
type: MigrationType.GLOBAL,
name: MigrationName.QUOTAS_1,
name: MigrationName.SYNC_QUOTAS,
},
{
type: MigrationType.APP,
name: MigrationName.APP_URLS,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.DEVELOPER_QUOTA,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.PUBLISHED_APP_QUOTA,
},
{
type: MigrationType.APP,
name: MigrationName.EVENT_APP_BACKFILL,

View File

@ -3,12 +3,8 @@ import { doWithDB } from "../db"
import { DocumentType, StaticDatabases } from "../db/constants"
import { getAllApps } from "../db/utils"
import environment from "../environment"
import {
doInTenant,
getTenantIds,
getGlobalDBName,
getTenantId,
} from "../tenancy"
import { doInTenant, getTenantIds, getTenantId } from "../tenancy"
import { getGlobalDBName } from "../db/tenancy"
import * as context from "../context"
import { DEFINITIONS } from "."
import {

View File

@ -18,11 +18,16 @@ const STATE = {
bucketCreationPromises: {},
}
type ListParams = {
ContinuationToken?: string
}
const CONTENT_TYPE_MAP: any = {
html: "text/html",
css: "text/css",
js: "application/javascript",
json: "application/json",
gz: "application/gzip",
}
const STRING_CONTENT_TYPES = [
CONTENT_TYPE_MAP.html,
@ -32,16 +37,16 @@ const STRING_CONTENT_TYPES = [
]
// does normal sanitization and then swaps dev apps to apps
export function sanitizeKey(input: any) {
export function sanitizeKey(input: string) {
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
}
// simply handles the dev app to app conversion
export function sanitizeBucket(input: any) {
export function sanitizeBucket(input: string) {
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
}
function publicPolicy(bucketName: any) {
function publicPolicy(bucketName: string) {
return {
Version: "2012-10-17",
Statement: [
@ -57,7 +62,11 @@ function publicPolicy(bucketName: any) {
}
}
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
const PUBLIC_BUCKETS = [
ObjectStoreBuckets.APPS,
ObjectStoreBuckets.GLOBAL,
ObjectStoreBuckets.PLUGINS,
]
/**
* Gets a connection to the object store using the S3 SDK.
@ -65,16 +74,14 @@ const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor
*/
export const ObjectStore = (bucket: any) => {
AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
})
export const ObjectStore = (bucket: string) => {
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
@ -91,7 +98,7 @@ export const ObjectStore = (bucket: any) => {
* Given an object store and a bucket name this will make sure the bucket exists,
* if it does not exist then it will create it.
*/
export const makeSureBucketExists = async (client: any, bucketName: any) => {
export const makeSureBucketExists = async (client: any, bucketName: string) => {
bucketName = sanitizeBucket(bucketName)
try {
await client
@ -143,7 +150,7 @@ export const upload = async ({
type,
metadata,
}: any) => {
const extension = [...filename.split(".")].pop()
const extension = filename.split(".").pop()
const fileBytes = fs.readFileSync(path)
const objectStore = ObjectStore(bucketName)
@ -166,14 +173,27 @@ export const upload = async ({
* through to the object store.
*/
export const streamUpload = async (
bucketName: any,
filename: any,
bucketName: string,
filename: string,
stream: any,
extra = {}
) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
// Set content type for certain known extensions
if (filename?.endsWith(".js")) {
extra = {
...extra,
ContentType: "application/javascript",
}
} else if (filename?.endsWith(".svg")) {
extra = {
...extra,
ContentType: "image",
}
}
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
@ -187,7 +207,7 @@ export const streamUpload = async (
* retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream.
*/
export const retrieve = async (bucketName: any, filepath: any) => {
export const retrieve = async (bucketName: string, filepath: string) => {
const objectStore = ObjectStore(bucketName)
const params = {
Bucket: sanitizeBucket(bucketName),
@ -202,10 +222,38 @@ export const retrieve = async (bucketName: any, filepath: any) => {
}
}
export const listAllObjects = async (bucketName: string, path: string) => {
const objectStore = ObjectStore(bucketName)
const list = (params: ListParams = {}) => {
return objectStore
.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
.promise()
}
let isTruncated = false,
token,
objects: AWS.S3.Types.Object[] = []
do {
let params: ListParams = {}
if (token) {
params.ContinuationToken = token
}
const response = await list(params)
if (response.Contents) {
objects = objects.concat(response.Contents)
}
isTruncated = !!response.IsTruncated
} while (isTruncated)
return objects
}
/**
* Same as retrieval function but puts to a temporary file.
*/
export const retrieveToTmp = async (bucketName: any, filepath: any) => {
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
bucketName = sanitizeBucket(bucketName)
filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath)
@ -214,10 +262,31 @@ export const retrieveToTmp = async (bucketName: any, filepath: any) => {
return outputPath
}
export const retrieveDirectory = async (bucketName: string, path: string) => {
let writePath = join(budibaseTempDir(), v4())
fs.mkdirSync(writePath)
const objects = await listAllObjects(bucketName, path)
let fullObjects = await Promise.all(
objects.map(obj => retrieve(bucketName, obj.Key!))
)
let count = 0
for (let obj of objects) {
const filename = obj.Key!
const data = fullObjects[count++]
const possiblePath = filename.split("/")
if (possiblePath.length > 1) {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
}
fs.writeFileSync(join(writePath, ...possiblePath), data)
}
return writePath
}
/**
* Delete a single file.
*/
export const deleteFile = async (bucketName: any, filepath: any) => {
export const deleteFile = async (bucketName: string, filepath: string) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = {
@ -227,7 +296,7 @@ export const deleteFile = async (bucketName: any, filepath: any) => {
return objectStore.deleteObject(params)
}
export const deleteFiles = async (bucketName: any, filepaths: any) => {
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = {
@ -243,8 +312,8 @@ export const deleteFiles = async (bucketName: any, filepaths: any) => {
* Delete a path, including everything within.
*/
export const deleteFolder = async (
bucketName: any,
folder: any
bucketName: string,
folder: string
): Promise<any> => {
bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder)
@ -277,9 +346,9 @@ export const deleteFolder = async (
}
export const uploadDirectory = async (
bucketName: any,
localPath: any,
bucketPath: any
bucketName: string,
localPath: string,
bucketPath: string
) => {
bucketName = sanitizeBucket(bucketName)
let uploads = []
@ -297,9 +366,13 @@ export const uploadDirectory = async (
return files
}
exports.downloadTarballDirect = async (url: string, path: string) => {
exports.downloadTarballDirect = async (
url: string,
path: string,
headers = {}
) => {
path = sanitizeKey(path)
const response = await fetch(url)
const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
@ -307,7 +380,11 @@ exports.downloadTarballDirect = async (url: string, path: string) => {
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
}
export const downloadTarball = async (url: any, bucketName: any, path: any) => {
export const downloadTarball = async (
url: string,
bucketName: string,
path: string
) => {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const response = await fetch(url)

View File

@ -1,15 +1,27 @@
const { join } = require("path")
const { tmpdir } = require("os")
const fs = require("fs")
const env = require("../environment")
/****************************************************
* NOTE: When adding a new bucket - name *
* sure that S3 usages (like budibase-infra) *
* have been updated to have a unique bucket name. *
****************************************************/
exports.ObjectStoreBuckets = {
BACKUPS: env.BACKUPS_BUCKET_NAME,
APPS: env.APPS_BUCKET_NAME,
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
GLOBAL: env.GLOBAL_BUCKET_NAME,
GLOBAL_CLOUD: env.GLOBAL_CLOUD_BUCKET_NAME,
PLUGINS: env.PLUGIN_BUCKET_NAME,
}
const bbTmp = join(tmpdir(), ".budibase")
if (!fs.existsSync(bbTmp)) {
fs.mkdirSync(bbTmp)
}
exports.budibaseTempDir = function () {
return join(tmpdir(), ".budibase")
return bbTmp
}

View File

@ -8,6 +8,7 @@ import {
updateAppId,
doInAppContext,
doInTenant,
doInContext,
} from "../context"
import * as identity from "../context/identity"
@ -20,5 +21,6 @@ export = {
updateAppId,
doInAppContext,
doInTenant,
doInContext,
identity,
}

View File

@ -3,9 +3,11 @@
import Client from "../redis"
import utils from "../redis/utils"
import clients from "../redis/init"
import * as redlock from "../redis/redlock"
export = {
Client,
utils,
clients,
redlock,
}

View File

@ -0,0 +1,7 @@
import * as utils from "./utils"
const pkg = {
...utils,
}
export = pkg

View File

@ -0,0 +1,99 @@
const {
DatasourceFieldType,
QueryType,
PluginType,
} = require("@budibase/types")
const joi = require("joi")
const DATASOURCE_TYPES = [
"Relational",
"Non-relational",
"Spreadsheet",
"Object store",
"Graph",
"API",
]
function runJoi(validator, schema) {
const { error } = validator.validate(schema)
if (error) {
throw error
}
}
function validateComponent(schema) {
const validator = joi.object({
type: joi.string().allow("component").required(),
metadata: joi.object().unknown(true).required(),
hash: joi.string().optional(),
version: joi.string().optional(),
schema: joi
.object({
name: joi.string().required(),
settings: joi.array().items(joi.object().unknown(true)).required(),
})
.unknown(true),
})
runJoi(validator, schema)
}
function validateDatasource(schema) {
const fieldValidator = joi.object({
type: joi
.string()
.allow(...Object.values(DatasourceFieldType))
.required(),
required: joi.boolean().required(),
default: joi.any(),
display: joi.string(),
})
const queryValidator = joi
.object({
type: joi.string().allow(...Object.values(QueryType)),
fields: joi.object().pattern(joi.string(), fieldValidator),
})
.required()
const validator = joi.object({
type: joi.string().allow("datasource").required(),
metadata: joi.object().unknown(true).required(),
hash: joi.string().optional(),
version: joi.string().optional(),
schema: joi.object({
docs: joi.string(),
friendlyName: joi.string().required(),
type: joi.string().allow(...DATASOURCE_TYPES),
description: joi.string().required(),
datasource: joi.object().pattern(joi.string(), fieldValidator).required(),
query: joi
.object()
.pattern(joi.string(), queryValidator)
.unknown(true)
.required(),
extra: joi.object().pattern(
joi.string(),
joi.object({
type: joi.string().required(),
displayName: joi.string().required(),
required: joi.boolean(),
data: joi.object(),
})
),
}),
})
runJoi(validator, schema)
}
exports.validate = schema => {
switch (schema?.type) {
case PluginType.COMPONENT:
validateComponent(schema)
break
case PluginType.DATASOURCE:
validateDatasource(schema)
break
default:
throw new Error(`Unknown plugin type - check schema.json: ${schema.type}`)
}
}

View File

@ -0,0 +1,4 @@
export enum JobQueue {
AUTOMATION = "automationQueue",
APP_BACKUP = "appBackupQueue",
}

View File

@ -0,0 +1,127 @@
import events from "events"
/**
* Bull works with a Job wrapper around all messages that contains a lot more information about
* the state of the message, this object constructor implements the same schema of Bull jobs
* for the sake of maintaining API consistency.
* @param {string} queue The name of the queue which the message will be carried on.
* @param {object} message The JSON message which will be passed back to the consumer.
* @returns {Object} A new job which can now be put onto the queue, this is mostly an
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
*/
function newJob(queue: string, message: any) {
return {
timestamp: Date.now(),
queue: queue,
data: message,
}
}
/**
* This is designed to replicate Bull (https://github.com/OptimalBits/bull) in memory as a sort of mock.
* It is relatively simple, using an event emitter internally to register when messages are available
* to the consumers - in can support many inputs and many consumers.
*/
class InMemoryQueue {
_name: string
_opts?: any
_messages: any[]
_emitter: EventEmitter
/**
* The constructor the queue, exactly the same as that of Bulls.
* @param {string} name The name of the queue which is being configured.
* @param {object|null} opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull
*/
constructor(name: string, opts = null) {
this._name = name
this._opts = opts
this._messages = []
this._emitter = new events.EventEmitter()
}
/**
* Same callback API as Bull, each callback passed to this will consume messages as they are
* available. Please note this is a queue service, not a notification service, so each
* consumer will receive different messages.
* @param {function<object>} func The callback function which will return a "Job", the same
* as the Bull API, within this job the property "data" contains the JSON message. Please
* note this is incredibly limited compared to Bull as in reality the Job would contain
* a lot more information about the queue and current status of Bull cluster.
*/
process(func: any) {
this._emitter.on("message", async () => {
if (this._messages.length <= 0) {
return
}
let msg = this._messages.shift()
let resp = func(msg)
if (resp.then != null) {
await resp
}
})
}
// simply puts a message to the queue and emits to the queue for processing
/**
* Simple function to replicate the add message functionality of Bull, putting
* a new message on the queue. This then emits an event which will be used to
* return the message to a consumer (if one is attached).
* @param {object} msg A message to be transported over the queue, this should be
* a JSON message as this is required by Bull.
* @param {boolean} repeat serves no purpose for the import queue.
*/
// eslint-disable-next-line no-unused-vars
add(msg: any, repeat: boolean) {
if (typeof msg !== "object") {
throw "Queue only supports carrying JSON."
}
this._messages.push(newJob(this._name, msg))
this._emitter.emit("message")
}
/**
* replicating the close function from bull, which waits for jobs to finish.
*/
async close() {
return []
}
/**
* This removes a cron which has been implemented, this is part of Bull API.
* @param {string} cronJobId The cron which is to be removed.
*/
removeRepeatableByKey(cronJobId: string) {
// TODO: implement for testing
console.log(cronJobId)
}
/**
* Implemented for tests
*/
getRepeatableJobs() {
return []
}
// eslint-disable-next-line no-unused-vars
removeJobs(pattern: string) {
// no-op
}
/**
* Implemented for tests
*/
async clean() {
return []
}
async getJob() {
return {}
}
on() {
// do nothing
}
}
export = InMemoryQueue

View File

@ -0,0 +1,2 @@
export * from "./queue"
export * from "./constants"

View File

@ -0,0 +1,101 @@
import { Job, JobId, Queue } from "bull"
import { JobQueue } from "./constants"
export type StalledFn = (job: Job) => Promise<void>
export function addListeners(
queue: Queue,
jobQueue: JobQueue,
removeStalledCb?: StalledFn
) {
logging(queue, jobQueue)
if (removeStalledCb) {
handleStalled(queue, removeStalledCb)
}
}
function handleStalled(queue: Queue, removeStalledCb?: StalledFn) {
queue.on("stalled", async (job: Job) => {
if (removeStalledCb) {
await removeStalledCb(job)
} else if (job.opts.repeat) {
const jobId = job.id
const repeatJobs = await queue.getRepeatableJobs()
for (let repeatJob of repeatJobs) {
if (repeatJob.id === jobId) {
await queue.removeRepeatableByKey(repeatJob.key)
}
}
console.log(`jobId=${jobId} disabled`)
}
})
}
function logging(queue: Queue, jobQueue: JobQueue) {
let eventType: string
switch (jobQueue) {
case JobQueue.AUTOMATION:
eventType = "automation-event"
break
case JobQueue.APP_BACKUP:
eventType = "app-backup-event"
break
}
if (process.env.NODE_DEBUG?.includes("bull")) {
queue
.on("error", (error: any) => {
// An error occurred.
console.error(`${eventType}=error error=${JSON.stringify(error)}`)
})
.on("waiting", (jobId: JobId) => {
// A Job is waiting to be processed as soon as a worker is idling.
console.log(`${eventType}=waiting jobId=${jobId}`)
})
.on("active", (job: Job, jobPromise: any) => {
// A job has started. You can use `jobPromise.cancel()`` to abort it.
console.log(`${eventType}=active jobId=${job.id}`)
})
.on("stalled", (job: Job) => {
// A job has been marked as stalled. This is useful for debugging job
// workers that crash or pause the event loop.
console.error(
`${eventType}=stalled jobId=${job.id} job=${JSON.stringify(job)}`
)
})
.on("progress", (job: Job, progress: any) => {
// A job's progress was updated!
console.log(
`${eventType}=progress jobId=${job.id} progress=${progress}`
)
})
.on("completed", (job: Job, result) => {
// A job successfully completed with a `result`.
console.log(`${eventType}=completed jobId=${job.id} result=${result}`)
})
.on("failed", (job, err: any) => {
// A job failed with reason `err`!
console.log(`${eventType}=failed jobId=${job.id} error=${err}`)
})
.on("paused", () => {
// The queue has been paused.
console.log(`${eventType}=paused`)
})
.on("resumed", (job: Job) => {
// The queue has been resumed.
console.log(`${eventType}=paused jobId=${job.id}`)
})
.on("cleaned", (jobs: Job[], type: string) => {
// Old jobs have been cleaned from the queue. `jobs` is an array of cleaned
// jobs, and `type` is the type of jobs cleaned.
console.log(`${eventType}=cleaned length=${jobs.length} type=${type}`)
})
.on("drained", () => {
// Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed)
console.log(`${eventType}=drained`)
})
.on("removed", (job: Job) => {
// A job successfully removed.
console.log(`${eventType}=removed jobId=${job.id}`)
})
}
}

View File

@ -0,0 +1,51 @@
import env from "../environment"
import { getRedisOptions } from "../redis/utils"
import { JobQueue } from "./constants"
import InMemoryQueue from "./inMemoryQueue"
import BullQueue from "bull"
import { addListeners, StalledFn } from "./listeners"
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
const CLEANUP_PERIOD_MS = 60 * 1000
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
let cleanupInterval: NodeJS.Timeout
async function cleanup() {
for (let queue of QUEUES) {
await queue.clean(CLEANUP_PERIOD_MS, "completed")
}
}
export function createQueue<T>(
jobQueue: JobQueue,
opts: { removeStalledCb?: StalledFn } = {}
): BullQueue.Queue<T> {
const queueConfig: any = redisProtocolUrl || { redis: redisOpts }
let queue: any
if (!env.isTest()) {
queue = new BullQueue(jobQueue, queueConfig)
} else {
queue = new InMemoryQueue(jobQueue, queueConfig)
}
addListeners(queue, jobQueue, opts?.removeStalledCb)
QUEUES.push(queue)
if (!cleanupInterval) {
cleanupInterval = setInterval(cleanup, CLEANUP_PERIOD_MS)
// fire off an initial cleanup
cleanup().catch(err => {
console.error(`Unable to cleanup automation queue initially - ${err}`)
})
}
return queue
}
exports.shutdown = async () => {
if (QUEUES.length) {
clearInterval(cleanupInterval)
for (let queue of QUEUES) {
await queue.close()
}
QUEUES = []
}
console.log("Queues shutdown")
}

View File

@ -214,6 +214,34 @@ export = class RedisWrapper {
}
}
async bulkGet(keys: string[]) {
const db = this._db
if (keys.length === 0) {
return {}
}
const prefixedKeys = keys.map(key => addDbPrefix(db, key))
let response = await this.getClient().mget(prefixedKeys)
if (Array.isArray(response)) {
let final: any = {}
let count = 0
for (let result of response) {
if (result) {
let parsed
try {
parsed = JSON.parse(result)
} catch (err) {
parsed = result
}
final[keys[count]] = parsed
}
count++
}
return final
} else {
throw new Error(`Invalid response: ${response}`)
}
}
async store(key: string, value: any, expirySeconds: number | null = null) {
const db = this._db
if (typeof value === "object") {

View File

@ -1,27 +1,23 @@
const Client = require("./index")
const utils = require("./utils")
const { getRedlock } = require("./redlock")
let userClient, sessionClient, appClient, cacheClient, writethroughClient
let migrationsRedlock
// turn retry off so that only one instance can ever hold the lock
const migrationsRedlockConfig = { retryCount: 0 }
let userClient,
sessionClient,
appClient,
cacheClient,
writethroughClient,
lockClient
async function init() {
userClient = await new Client(utils.Databases.USER_CACHE).init()
sessionClient = await new Client(utils.Databases.SESSIONS).init()
appClient = await new Client(utils.Databases.APP_METADATA).init()
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
lockClient = await new Client(utils.Databases.LOCKS).init()
writethroughClient = await new Client(
utils.Databases.WRITE_THROUGH,
utils.SelectableDatabases.WRITE_THROUGH
).init()
// pass the underlying ioredis client to redlock
migrationsRedlock = getRedlock(
cacheClient.getClient(),
migrationsRedlockConfig
)
}
process.on("exit", async () => {
@ -30,6 +26,7 @@ process.on("exit", async () => {
if (appClient) await appClient.finish()
if (cacheClient) await cacheClient.finish()
if (writethroughClient) await writethroughClient.finish()
if (lockClient) await lockClient.finish()
})
module.exports = {
@ -63,10 +60,10 @@ module.exports = {
}
return writethroughClient
},
getMigrationsRedlock: async () => {
if (!migrationsRedlock) {
getLockClient: async () => {
if (!lockClient) {
await init()
}
return migrationsRedlock
return lockClient
},
}

View File

@ -1,14 +1,37 @@
import Redlock from "redlock"
import Redlock, { Options } from "redlock"
import { getLockClient } from "./init"
import { LockOptions, LockType } from "@budibase/types"
import * as tenancy from "../tenancy"
export const getRedlock = (redisClient: any, opts = { retryCount: 10 }) => {
return new Redlock([redisClient], {
let noRetryRedlock: Redlock | undefined
const getClient = async (type: LockType): Promise<Redlock> => {
switch (type) {
case LockType.TRY_ONCE: {
if (!noRetryRedlock) {
noRetryRedlock = await newRedlock(OPTIONS.TRY_ONCE)
}
return noRetryRedlock
}
default: {
throw new Error(`Could not get redlock client: ${type}`)
}
}
}
export const OPTIONS = {
TRY_ONCE: {
// immediately throws an error if the lock is already held
retryCount: 0,
},
DEFAULT: {
// the expected clock drift; for more details
// see http://redis.io/topics/distlock
driftFactor: 0.01, // multiplied by lock ttl to determine drift time
// the max number of times Redlock will attempt
// to lock a resource before erroring
retryCount: opts.retryCount,
retryCount: 10,
// the time in ms between attempts
retryDelay: 200, // time in ms
@ -16,6 +39,50 @@ export const getRedlock = (redisClient: any, opts = { retryCount: 10 }) => {
// the max time in ms randomly added to retries
// to improve performance under high contention
// see https://www.awsarchitectureblog.com/2015/03/backoff.html
retryJitter: 200, // time in ms
})
retryJitter: 100, // time in ms
},
}
export const newRedlock = async (opts: Options = {}) => {
let options = { ...OPTIONS.DEFAULT, ...opts }
const redisWrapper = await getLockClient()
const client = redisWrapper.getClient()
return new Redlock([client], options)
}
export const doWithLock = async (opts: LockOptions, task: any) => {
const redlock = await getClient(opts.type)
let lock
try {
// aquire lock
let name: string
if (opts.systemLock) {
name = opts.name
} else {
name = `${tenancy.getTenantId()}_${opts.name}`
}
if (opts.nameSuffix) {
name = name + `_${opts.nameSuffix}`
}
lock = await redlock.lock(name, opts.ttl)
// perform locked task
return task()
} catch (e: any) {
// lock limit exceeded
if (e.name === "LockError") {
if (opts.type === LockType.TRY_ONCE) {
// don't throw for try-once locks, they will always error
// due to retry count (0) exceeded
return
} else {
throw e
}
} else {
throw e
}
} finally {
if (lock) {
await lock.unlock()
}
}
}

View File

@ -28,6 +28,7 @@ exports.Databases = {
LICENSES: "license",
GENERIC_CACHE: "data_cache",
WRITE_THROUGH: "writeThrough",
LOCKS: "locks",
}
/**

View File

@ -78,7 +78,7 @@ function isBuiltin(role) {
*/
exports.builtinRoleToNumber = id => {
const builtins = exports.getBuiltinRoles()
const MAX = Object.values(BUILTIN_IDS).length + 1
const MAX = Object.values(builtins).length + 1
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
return MAX
}
@ -94,6 +94,22 @@ exports.builtinRoleToNumber = id => {
return count
}
/**
* Converts any role to a number, but has to be async to get the roles from db.
*/
exports.roleToNumber = async id => {
if (exports.isBuiltin(id)) {
return exports.builtinRoleToNumber(id)
}
const hierarchy = await exports.getUserRoleHierarchy(id)
for (let role of hierarchy) {
if (isBuiltin(role.inherits)) {
return exports.builtinRoleToNumber(role.inherits) + 1
}
}
return 0
}
/**
* Returns whichever builtin roleID is lower.
*/
@ -172,7 +188,7 @@ async function getAllUserRoles(userRoleId) {
* to determine if a user can access something that requires a specific role.
* @param {string} userRoleId The user's role ID, this can be found in their access token.
* @param {object} opts Various options, such as whether to only retrieve the IDs (default true).
* @returns {Promise<string[]>} returns an ordered array of the roles, with the first being their
* @returns {Promise<string[]|object[]>} returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level.
*/
exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => {

View File

@ -2,18 +2,12 @@ const redis = require("../redis/init")
const { v4: uuidv4 } = require("uuid")
const { logWarn } = require("../logging")
const env = require("../environment")
interface Session {
key: string
userId: string
sessionId: string
lastAccessedAt: string
createdAt: string
csrfToken?: string
value: string
}
type SessionKey = { key: string }[]
import {
Session,
ScannedSession,
SessionKey,
CreateSession,
} from "@budibase/types"
// a week in seconds
const EXPIRY_SECONDS = 86400 * 7
@ -22,14 +16,14 @@ function makeSessionID(userId: string, sessionId: string) {
return `${userId}/${sessionId}`
}
export async function getSessionsForUser(userId: string) {
export async function getSessionsForUser(userId: string): Promise<Session[]> {
if (!userId) {
console.trace("Cannot get sessions for undefined userId")
return []
}
const client = await redis.getSessionClient()
const sessions = await client.scan(userId)
return sessions.map((session: Session) => session.value)
const sessions: ScannedSession[] = await client.scan(userId)
return sessions.map(session => session.value)
}
export async function invalidateSessions(
@ -39,33 +33,32 @@ export async function invalidateSessions(
try {
const reason = opts?.reason || "unknown"
let sessionIds: string[] = opts.sessionIds || []
let sessions: SessionKey
let sessionKeys: SessionKey[]
// If no sessionIds, get all the sessions for the user
if (sessionIds.length === 0) {
sessions = await getSessionsForUser(userId)
sessions.forEach(
(session: any) =>
(session.key = makeSessionID(session.userId, session.sessionId))
)
const sessions = await getSessionsForUser(userId)
sessionKeys = sessions.map(session => ({
key: makeSessionID(session.userId, session.sessionId),
}))
} else {
// use the passed array of sessionIds
sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds]
sessions = sessionIds.map((sessionId: string) => ({
sessionKeys = sessionIds.map(sessionId => ({
key: makeSessionID(userId, sessionId),
}))
}
if (sessions && sessions.length > 0) {
if (sessionKeys && sessionKeys.length > 0) {
const client = await redis.getSessionClient()
const promises = []
for (let session of sessions) {
promises.push(client.delete(session.key))
for (let sessionKey of sessionKeys) {
promises.push(client.delete(sessionKey.key))
}
if (!env.isTest()) {
logWarn(
`Invalidating sessions for ${userId} (reason: ${reason}) - ${sessions
.map(session => session.key)
`Invalidating sessions for ${userId} (reason: ${reason}) - ${sessionKeys
.map(sessionKey => sessionKey.key)
.join(", ")}`
)
}
@ -76,22 +69,26 @@ export async function invalidateSessions(
}
}
export async function createASession(userId: string, session: Session) {
export async function createASession(
userId: string,
createSession: CreateSession
) {
// invalidate all other sessions
await invalidateSessions(userId, { reason: "creation" })
const client = await redis.getSessionClient()
const sessionId = session.sessionId
if (!session.csrfToken) {
session.csrfToken = uuidv4()
}
session = {
...session,
const sessionId = createSession.sessionId
const csrfToken = createSession.csrfToken ? createSession.csrfToken : uuidv4()
const key = makeSessionID(userId, sessionId)
const session: Session = {
...createSession,
csrfToken,
createdAt: new Date().toISOString(),
lastAccessedAt: new Date().toISOString(),
userId,
}
await client.store(makeSessionID(userId, sessionId), session, EXPIRY_SECONDS)
await client.store(key, session, EXPIRY_SECONDS)
}
export async function updateSessionTTL(session: Session) {
@ -106,7 +103,10 @@ export async function endSession(userId: string, sessionId: string) {
await client.delete(makeSessionID(userId, sessionId))
}
export async function getSession(userId: string, sessionId: string) {
export async function getSession(
userId: string,
sessionId: string
): Promise<Session> {
if (!userId || !sessionId) {
throw new Error(`Invalid session details - ${userId} - ${sessionId}`)
}

Some files were not shown because too many files have changed in this diff Show More