diff --git a/.eslintignore b/.eslintignore
index 54824be5c7..579bd55947 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -7,4 +7,5 @@ packages/server/client
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
-packages/builder/cypress/reports
\ No newline at end of file
+packages/builder/cypress/reports
+packages/sdk/sdk
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/epic.md b/.github/ISSUE_TEMPLATE/epic.md
new file mode 100644
index 0000000000..b8cf652125
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/epic.md
@@ -0,0 +1,24 @@
+---
+name: Epic
+about: Plan a new project
+title: ''
+labels: epic
+assignees: ''
+
+---
+
+## Description
+Brief summary of what this Epic is, whether it's a larger project, goal, or user story. Describe the job to be done, which persona this Epic is mainly for, or if more multiple, break it down by user and job story.
+
+## Spec
+Link to confluence spec
+
+## Teams and Stakeholders
+Describe who needs to be kept up-to-date about this Epic, included in discussions, or updated along the way. Stakeholders can be both in Product/Engineering, as well as other teams like Customer Success who might want to keep customers updated on the Epic project.
+
+
+## Workflow
+- [ ] Spec Created and pasted above
+- [ ] Product Review
+- [ ] Designs created
+- [ ] Individual Tasks created and assigned to Epic
diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml
index e940e6fa10..475bd4f66a 100644
--- a/.github/workflows/budibase_ci.yml
+++ b/.github/workflows/budibase_ci.yml
@@ -23,6 +23,15 @@ jobs:
build:
runs-on: ubuntu-latest
+ services:
+ couchdb:
+ image: ibmcom/couchdb3
+ env:
+ COUCHDB_PASSWORD: budibase
+ COUCHDB_USER: budibase
+ ports:
+ - 4567:5984
+
strategy:
matrix:
node-version: [14.x]
@@ -53,9 +62,8 @@ jobs:
name: codecov-umbrella
verbose: true
- # TODO: parallelise this
- - name: Cypress run
- uses: cypress-io/github-action@v2
- with:
- install: false
- command: yarn test:e2e:ci
+ - name: QA Core Integration Tests
+ run: |
+ cd qa-core
+ yarn
+ yarn api:test:ci
\ No newline at end of file
diff --git a/.github/workflows/deploy-single-image.yml b/.github/workflows/deploy-single-image.yml
index 8bf8f232c5..cd16574eea 100644
--- a/.github/workflows/deploy-single-image.yml
+++ b/.github/workflows/deploy-single-image.yml
@@ -4,8 +4,6 @@ on:
workflow_dispatch:
env:
- BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
- BRANCH: ${{ github.event.pull_request.head.ref }}
CI: true
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
@@ -17,6 +15,11 @@ jobs:
matrix:
node-version: [14.x]
steps:
+ - name: Fail if branch is not master
+ if: github.ref != 'refs/heads/master'
+ run: |
+ echo "Ref is not master, you must run this job from master."
+ exit 1
- name: "Checkout"
uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
@@ -28,8 +31,6 @@ jobs:
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- - name: Install Pro
- run: yarn install:pro $BRANCH $BASE_BRANCH
- name: Run Yarn
run: yarn
- name: Run Yarn Bootstrap
diff --git a/.github/workflows/release-develop.yml b/.github/workflows/release-develop.yml
index 57e65c734e..21c74851e1 100644
--- a/.github/workflows/release-develop.yml
+++ b/.github/workflows/release-develop.yml
@@ -46,7 +46,8 @@ jobs:
- run: yarn
- run: yarn bootstrap
- run: yarn lint
- - run: yarn build
+ - run: yarn build
+ - run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials
diff --git a/.github/workflows/release-selfhost.yml b/.github/workflows/release-selfhost.yml
index da064f3e32..d78180fdc7 100644
--- a/.github/workflows/release-selfhost.yml
+++ b/.github/workflows/release-selfhost.yml
@@ -3,10 +3,6 @@ name: Budibase Release Selfhost
on:
workflow_dispatch:
-env:
- BRANCH: ${{ github.event.pull_request.head.ref }}
- BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
-
jobs:
release:
runs-on: ubuntu-latest
@@ -54,9 +50,6 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
SELFHOST_TAG: latest
- - name: Install Pro
- run: yarn install:pro $BRANCH $BASE_BRANCH
-
- name: Bootstrap and build (CLI)
run: |
yarn
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 961082e1ef..de288dd7db 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -56,6 +56,7 @@ jobs:
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
+ - run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials
diff --git a/.gitignore b/.gitignore
index 32c6faf980..e1d3e6db0e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -63,6 +63,7 @@ typings/
# dotenv environment variables file
.env
+!qa-core/.env
!hosting/.env
hosting/.generated-nginx.dev.conf
hosting/proxy/.generated-nginx.prod.conf
diff --git a/.prettierignore b/.prettierignore
index bbeff65da7..3a381d255e 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -9,3 +9,4 @@ packages/server/src/definitions/openapi.ts
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
+packages/sdk/sdk
\ No newline at end of file
diff --git a/README.md b/README.md
index 1dec1737da..bd38610566 100644
--- a/README.md
+++ b/README.md
@@ -65,7 +65,7 @@ Budibase is open-source - licensed as GPL v3. This should fill you with confiden
### Load data or start from scratch
-Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
+Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no datasources. [Request new datasources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
diff --git a/charts/budibase/templates/app-service-deployment.yaml b/charts/budibase/templates/app-service-deployment.yaml
index 6517133a58..f72d1aef03 100644
--- a/charts/budibase/templates/app-service-deployment.yaml
+++ b/charts/budibase/templates/app-service-deployment.yaml
@@ -78,6 +78,8 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
+ - name: PLUGIN_BUCKET_NAME
+ value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }}
- name: PORT
value: {{ .Values.services.apps.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
diff --git a/charts/budibase/templates/worker-service-deployment.yaml b/charts/budibase/templates/worker-service-deployment.yaml
index 902e9ac03d..b1c6110d95 100644
--- a/charts/budibase/templates/worker-service-deployment.yaml
+++ b/charts/budibase/templates/worker-service-deployment.yaml
@@ -77,6 +77,8 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
+ - name: PLUGIN_BUCKET_NAME
+ value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }}
- name: PORT
value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY
diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml
index a15504d58c..5c4004cb57 100644
--- a/charts/budibase/values.yaml
+++ b/charts/budibase/values.yaml
@@ -76,6 +76,7 @@ affinity: {}
globals:
appVersion: "latest"
budibaseEnv: PRODUCTION
+ tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS"
enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
diff --git a/docs/DEV-SETUP-DEBIAN.md b/docs/DEV-SETUP-DEBIAN.md
index 88a124708c..9edd8286cb 100644
--- a/docs/DEV-SETUP-DEBIAN.md
+++ b/docs/DEV-SETUP-DEBIAN.md
@@ -1,12 +1,15 @@
## Dev Environment on Debian 11
-### Install Node
+### Install NVM & Node 14
+NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
-Budibase requires a recent version of node (14+):
+Install NVM
```
-curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
-apt -y install nodejs
-node -v
+curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
+```
+Install Node 14
+```
+nvm install 14
```
### Install npm requirements
@@ -31,7 +34,7 @@ This setup process was tested on Debian 11 (bullseye) with version numbers show
- Docker: 20.10.5
- Docker-Compose: 1.29.2
-- Node: v16.15.1
+- Node: v14.20.1
- Yarn: 1.22.19
- Lerna: 5.1.4
diff --git a/docs/DEV-SETUP-MACOSX.md b/docs/DEV-SETUP-MACOSX.md
index c5990e58da..d9e2dcad6a 100644
--- a/docs/DEV-SETUP-MACOSX.md
+++ b/docs/DEV-SETUP-MACOSX.md
@@ -11,7 +11,7 @@ through brew.
### Install Node
-Budibase requires a recent version of node (14+):
+Budibase requires a recent version of node 14:
```
brew install node npm
node -v
@@ -38,7 +38,7 @@ This setup process was tested on Mac OSX 12 (Monterey) with version numbers show
- Docker: 20.10.14
- Docker-Compose: 2.6.0
-- Node: 18.3.0
+- Node: 14.20.1
- Yarn: 1.22.19
- Lerna: 5.1.4
@@ -59,4 +59,7 @@ The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin
| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in
-[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
\ No newline at end of file
+[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
+
+### Troubleshooting
+If there are errors with the `yarn setup` command, you can try installing nvm and node 14. This is the same as the instructions for Debian 11.
diff --git a/docs/DEV-SETUP-WINDOWS.md b/docs/DEV-SETUP-WINDOWS.md
new file mode 100644
index 0000000000..c5608b7567
--- /dev/null
+++ b/docs/DEV-SETUP-WINDOWS.md
@@ -0,0 +1,81 @@
+## Dev Environment on Windows 10/11 (WSL2)
+
+
+### Install WSL with Ubuntu LTS
+
+Enable WSL 2 on Windows 10/11 for docker support.
+```
+wsl --set-default-version 2
+```
+Install Ubuntu LTS.
+```
+wsl --install Ubuntu
+```
+
+Or follow the instruction here:
+https://learn.microsoft.com/en-us/windows/wsl/install
+
+### Install Docker in windows
+Download the installer from docker and install it.
+
+Check this url for more detailed instructions:
+https://docs.docker.com/desktop/install/windows-install/
+
+You should follow the next steps from within the Ubuntu terminal.
+
+### Install NVM & Node 14
+NVM documentation: https://github.com/nvm-sh/nvm#installing-and-updating
+
+Install NVM
+```
+curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
+```
+Install Node 14
+```
+nvm install 14
+```
+
+
+### Install npm requirements
+
+```
+npm install -g yarn jest lerna
+```
+
+### Clone the repo
+```
+git clone https://github.com/Budibase/budibase.git
+```
+
+### Check Versions
+
+This setup process was tested on Windows 11 with version numbers show below. Your mileage may vary using anything else.
+
+- Docker: 20.10.7
+- Docker-Compose: 2.10.2
+- Node: v14.20.1
+- Yarn: 1.22.19
+- Lerna: 5.5.4
+
+### Build
+
+```
+cd budibase
+yarn setup
+```
+The yarn setup command runs several build steps i.e.
+```
+node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev
+```
+So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose.
+
+The dev version will be available on port 10000 i.e.
+
+http://127.0.0.1:10000/builder/admin
+
+### Working with the code
+Here are the instructions to work on the application from within Visual Studio Code (in Windows) through the WSL. All the commands and files are within the Ubuntu system and it should run as if you were working on a Linux machine.
+
+https://code.visualstudio.com/docs/remote/wsl
+
+Note you will be able to run the application from within the WSL terminal and you will be able to access the application from the a browser in Windows.
\ No newline at end of file
diff --git a/examples/nextjs-api-sales/definitions/openapi.ts b/examples/nextjs-api-sales/definitions/openapi.ts
index 4f4ad45fc6..7f7f6befec 100644
--- a/examples/nextjs-api-sales/definitions/openapi.ts
+++ b/examples/nextjs-api-sales/definitions/openapi.ts
@@ -348,7 +348,7 @@ export interface paths {
}
}
responses: {
- /** Returns the created table, including the ID which has been generated for it. This can be internal or external data sources. */
+ /** Returns the created table, including the ID which has been generated for it. This can be internal or external datasources. */
200: {
content: {
"application/json": components["schemas"]["tableOutput"]
@@ -959,7 +959,7 @@ export interface components {
query: {
/** @description The ID of the query. */
_id: string
- /** @description The ID of the data source the query belongs to. */
+ /** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
@@ -983,7 +983,7 @@ export interface components {
data: {
/** @description The ID of the query. */
_id: string
- /** @description The ID of the data source the query belongs to. */
+ /** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
diff --git a/examples/nextjs-api-sales/package.json b/examples/nextjs-api-sales/package.json
index 777d07f968..41ce52e952 100644
--- a/examples/nextjs-api-sales/package.json
+++ b/examples/nextjs-api-sales/package.json
@@ -12,7 +12,7 @@
"bulma": "^0.9.3",
"next": "12.1.0",
"node-fetch": "^3.2.10",
- "node-sass": "^7.0.1",
+ "sass": "^1.52.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-notifications-component": "^3.4.1"
@@ -24,4 +24,4 @@
"eslint-config-next": "12.1.0",
"typescript": "4.6.2"
}
-}
+}
\ No newline at end of file
diff --git a/hosting/nginx.dev.conf.hbs b/hosting/nginx.dev.conf.hbs
index 430ea75398..39a8dc52af 100644
--- a/hosting/nginx.dev.conf.hbs
+++ b/hosting/nginx.dev.conf.hbs
@@ -24,6 +24,21 @@ http {
default "upgrade";
}
+ upstream app-service {
+ server {{address}}:4001;
+ keepalive 32;
+ }
+
+ upstream worker-service {
+ server {{address}}:4002;
+ keepalive 32;
+ }
+
+ upstream builder {
+ server {{address}}:3000;
+ keepalive 32;
+ }
+
server {
listen 10000 default_server;
server_name _;
@@ -43,50 +58,88 @@ http {
}
location ~ ^/api/(system|admin|global)/ {
- proxy_pass http://{{ address }}:4002;
+ proxy_pass http://worker-service;
+ proxy_read_timeout 120s;
+ proxy_connect_timeout 120s;
+ proxy_send_timeout 120s;
+ proxy_http_version 1.1;
+ proxy_set_header Connection "";
}
location /api/ {
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
- proxy_pass http://{{ address }}:4001;
+ proxy_pass http://app-service;
+ proxy_http_version 1.1;
+ proxy_set_header Connection "";
}
location = / {
- proxy_pass http://{{ address }}:4001;
+ proxy_pass http://app-service;
+ proxy_read_timeout 120s;
+ proxy_connect_timeout 120s;
+ proxy_send_timeout 120s;
+ proxy_http_version 1.1;
+ proxy_set_header Connection "";
}
location /app_ {
- proxy_pass http://{{ address }}:4001;
+ proxy_pass http://app-service;
+ proxy_read_timeout 120s;
+ proxy_connect_timeout 120s;
+ proxy_send_timeout 120s;
+ proxy_http_version 1.1;
+ proxy_set_header Connection "";
}
location /app {
- proxy_pass http://{{ address }}:4001;
- }
-
- location /preview {
- proxy_pass http://{{ address }}:4001;
+ proxy_pass http://app-service;
+ proxy_read_timeout 120s;
+ proxy_connect_timeout 120s;
+ proxy_send_timeout 120s;
+ proxy_http_version 1.1;
+ proxy_set_header Connection "";
}
location /builder {
- proxy_pass http://{{ address }}:3000;
+ proxy_pass http://builder;
+ proxy_read_timeout 120s;
+ proxy_connect_timeout 120s;
+ proxy_send_timeout 120s;
+ proxy_http_version 1.1;
+ proxy_set_header Connection "";
rewrite ^/builder(.*)$ /builder/$1 break;
}
location /builder/ {
- proxy_pass http://{{ address }}:3000;
+ proxy_pass http://builder;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_read_timeout 120s;
+ proxy_connect_timeout 120s;
+ proxy_send_timeout 120s;
}
- location /vite {
- proxy_pass http://{{ address }}:3000;
- rewrite ^/vite(.*)$ /$1 break;
+ location /vite/ {
+ proxy_pass http://builder;
+ proxy_read_timeout 120s;
+ proxy_connect_timeout 120s;
+ proxy_send_timeout 120s;
+ rewrite ^/vite(.*)$ /$1 break;
+ }
+
+ location /socket/ {
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+ proxy_pass http://app-service;
}
location / {
diff --git a/hosting/nginx.prod.conf.hbs b/hosting/nginx.prod.conf.hbs
index 0ff986d0a7..f3202ad4a4 100644
--- a/hosting/nginx.prod.conf.hbs
+++ b/hosting/nginx.prod.conf.hbs
@@ -88,10 +88,6 @@ http {
proxy_pass http://$apps:4002;
}
- location /preview {
- proxy_pass http://$apps:4002;
- }
-
location = / {
proxy_pass http://$apps:4002;
}
@@ -162,6 +158,15 @@ http {
rewrite ^/db/(.*)$ /$1 break;
}
+ location /socket/ {
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+ proxy_pass http://$apps:4002;
+ }
+
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
diff --git a/hosting/proxy/10-listen-on-ipv6-by-default.sh b/hosting/proxy/10-listen-on-ipv6-by-default.sh
new file mode 100644
index 0000000000..e2e89388a9
--- /dev/null
+++ b/hosting/proxy/10-listen-on-ipv6-by-default.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# vim:sw=4:ts=4:et
+
+set -e
+
+ME=$(basename $0)
+NGINX_CONF_FILE="/etc/nginx/nginx.conf"
+DEFAULT_CONF_FILE="/etc/nginx/conf.d/default.conf"
+
+# check if we have ipv6 available
+if [ ! -f "/proc/net/if_inet6" ]; then
+ # ipv6 not available so delete lines from nginx conf
+ if [ -f "$NGINX_CONF_FILE" ]; then
+ sed -i '/listen \[::\]/d' $NGINX_CONF_FILE
+ fi
+ if [ -f "$DEFAULT_CONF_FILE" ]; then
+ sed -i '/listen \[::\]/d' $DEFAULT_CONF_FILE
+ fi
+ echo "$ME: info: ipv6 not available so delete lines from nginx conf"
+else
+ echo "$ME: info: ipv6 is available so no need to delete lines from nginx conf"
+fi
+
+exit 0
diff --git a/hosting/proxy/Dockerfile b/hosting/proxy/Dockerfile
index 298762aaf1..5fd0dc7d11 100644
--- a/hosting/proxy/Dockerfile
+++ b/hosting/proxy/Dockerfile
@@ -5,7 +5,7 @@ FROM nginx:latest
# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
-
+COPY 10-listen-on-ipv6-by-default.sh /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
# Error handling
COPY error.html /usr/share/nginx/html/error.html
diff --git a/hosting/scripts/build-target-paths.sh b/hosting/scripts/build-target-paths.sh
index ee314c1ce4..67e1765ca8 100644
--- a/hosting/scripts/build-target-paths.sh
+++ b/hosting/scripts/build-target-paths.sh
@@ -4,17 +4,21 @@ echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
- mkdir -p $DATA_DIR/{search,minio,couchdb}
- mkdir -p $DATA_DIR/couchdb/{dbs,views}
- chown -R couchdb:couchdb $DATA_DIR/couchdb/
+ WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
+ mkdir -p $DATA_DIR/{search,minio,couch}
+ mkdir -p $DATA_DIR/couch/{dbs,views}
+ chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
- sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
+ echo "root:Docker!" | chpasswd
+ mkdir -p /tmp
+ chmod +x /tmp/ssh_setup.sh \
+ && (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
+ cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
-
fi
\ No newline at end of file
diff --git a/hosting/single/Dockerfile b/hosting/single/Dockerfile
index 476a6e5e94..58796f0362 100644
--- a/hosting/single/Dockerfile
+++ b/hosting/single/Dockerfile
@@ -19,8 +19,8 @@ ADD packages/worker .
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
-# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
-ARG TARGETARCH=amd64
+ARG TARGETARCH
+ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
@@ -29,23 +29,8 @@ ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
-ENV \
- APP_PORT=4001 \
- ARCHITECTURE=amd \
- BUDIBASE_ENVIRONMENT=PRODUCTION \
- CLUSTER_PORT=80 \
- # CUSTOM_DOMAIN=budi001.custom.com \
- DATA_DIR=/data \
- DEPLOYMENT_ENVIRONMENT=docker \
- MINIO_URL=http://localhost:9000 \
- POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU \
- REDIS_URL=localhost:6379 \
- SELF_HOSTED=1 \
- TARGETBUILD=$TARGETBUILD \
- WORKER_PORT=4002 \
- WORKER_URL=http://localhost:4002 \
- APPS_URL=http://localhost:4001
-
+# ENV CUSTOM_DOMAIN=budi001.custom.com \
+# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
@@ -117,6 +102,8 @@ RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
+ADD hosting/single/ssh/sshd_config /etc/
+ADD hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# cleanup cache
@@ -124,6 +111,8 @@ RUN yarn cache clean -f
EXPOSE 80
EXPOSE 443
+# Expose port 2222 for SSH on Azure App Service build
+EXPOSE 2222
VOLUME /data
# setup letsencrypt certificate
diff --git a/hosting/single/couch/local.ini b/hosting/single/couch/local.ini
index 35f0383dfc..266c0d4b60 100644
--- a/hosting/single/couch/local.ini
+++ b/hosting/single/couch/local.ini
@@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
-database_dir = DATA_DIR/couchdb/dbs
-view_index_dir = DATA_DIR/couchdb/views
+database_dir = DATA_DIR/couch/dbs
+view_index_dir = DATA_DIR/couch/views
diff --git a/hosting/single/nginx/nginx-default-site.conf b/hosting/single/nginx/nginx-default-site.conf
index c0d80a0185..bd89e21251 100644
--- a/hosting/single/nginx/nginx-default-site.conf
+++ b/hosting/single/nginx/nginx-default-site.conf
@@ -66,6 +66,15 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
+ location /socket/ {
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+ proxy_pass http://127.0.0.1:4001;
+ }
+
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh
index 09387343ba..6770d27ee0 100644
--- a/hosting/single/runner.sh
+++ b/hosting/single/runner.sh
@@ -1,18 +1,37 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
-
+declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL")
+# Check the env vars set in Dockerfile have come through, AAS seems to drop them
+[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
+[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
+[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
+[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
+[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
+[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
+[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
+[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
+[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS"
+[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
+[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
+[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
+[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
+[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
+[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
+# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
+ WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
/etc/init.d/ssh start
else
DATA_DIR=${DATA_DIR:-/data}
fi
if [ -f "${DATA_DIR}/.env" ]; then
- export $(cat ${DATA_DIR}/.env | xargs)
+ # Read in the .env file and export the variables
+ for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
fi
-# first randomise any unset environment variables
+# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
@@ -30,16 +49,23 @@ if [ ! -f "${DATA_DIR}/.env" ]; then
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
+ for ENV_VAR in "${DOCKER_VARS[@]}"
+ do
+ temp=$(eval "echo \$$ENV_VAR")
+ echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
+ done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
fi
-export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
-
+# Read in the .env file and export the variables
+for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
+ln -s ${DATA_DIR}/.env /app/.env
+ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
-mkdir -p ${DATA_DIR}/couchdb/{dbs,views}
+mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
-chown -R couchdb:couchdb ${DATA_DIR}/couchdb
+chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server ${DATA_DIR}/minio &
diff --git a/hosting/single/ssh/ssh_setup.sh b/hosting/single/ssh/ssh_setup.sh
new file mode 100644
index 0000000000..0af0b6d7ad
--- /dev/null
+++ b/hosting/single/ssh/ssh_setup.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+ssh-keygen -A
+
+#prepare run dir
+if [ ! -d "/var/run/sshd" ]; then
+ mkdir -p /var/run/sshd
+fi
\ No newline at end of file
diff --git a/hosting/single/ssh/sshd_config b/hosting/single/ssh/sshd_config
new file mode 100644
index 0000000000..7eb5df953a
--- /dev/null
+++ b/hosting/single/ssh/sshd_config
@@ -0,0 +1,12 @@
+Port 2222
+ListenAddress 0.0.0.0
+LoginGraceTime 180
+X11Forwarding yes
+Ciphers aes128-cbc,3des-cbc,aes256-cbc,aes128-ctr,aes192-ctr,aes256-ctr
+MACs hmac-sha1,hmac-sha1-96
+StrictModes yes
+SyslogFacility DAEMON
+PasswordAuthentication yes
+PermitEmptyPasswords no
+PermitRootLogin yes
+Subsystem sftp internal-sftp
diff --git a/lerna.json b/lerna.json
index bee30cbc57..5600db4cf3 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "1.3.12-alpha.3",
+ "version": "2.0.30-alpha.12",
"npmClient": "yarn",
"packages": [
"packages/*"
diff --git a/package.json b/package.json
index 4c24e0025b..7733a6df95 100644
--- a/package.json
+++ b/package.json
@@ -3,7 +3,6 @@
"private": true,
"devDependencies": {
"@rollup/plugin-json": "^4.0.2",
- "@types/mongodb": "3.6.3",
"@typescript-eslint/parser": "4.28.0",
"babel-eslint": "^10.0.3",
"eslint": "^7.28.0",
@@ -13,6 +12,7 @@
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1",
"lerna": "3.14.1",
+ "madge": "^5.0.1",
"prettier": "^2.3.1",
"prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
@@ -25,6 +25,8 @@
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
+ "build:sdk": "lerna run build:sdk",
+ "deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
"release:pro": "bash scripts/pro/release.sh",
@@ -45,8 +47,8 @@
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
- "lint:fix:eslint": "eslint --fix packages",
- "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
+ "lint:fix:eslint": "eslint --fix packages qa-core",
+ "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test --stream",
"test:e2e:ci": "lerna run cy:ci --stream",
diff --git a/packages/backend-core/context.js b/packages/backend-core/context.js
index aaa0f56f92..c6fa87a337 100644
--- a/packages/backend-core/context.js
+++ b/packages/backend-core/context.js
@@ -6,6 +6,7 @@ const {
updateAppId,
doInAppContext,
doInTenant,
+ doInContext,
} = require("./src/context")
const identity = require("./src/context/identity")
@@ -19,4 +20,5 @@ module.exports = {
doInAppContext,
doInTenant,
identity,
+ doInContext,
}
diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json
index 3f5cb33241..2e358f07b5 100644
--- a/packages/backend-core/package.json
+++ b/packages/backend-core/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
- "version": "1.3.12-alpha.3",
+ "version": "2.0.30-alpha.12",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@@ -20,11 +20,12 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
- "@budibase/types": "1.3.12-alpha.3",
+ "@budibase/types": "2.0.30-alpha.12",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
+ "bcryptjs": "2.4.3",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",
@@ -61,6 +62,7 @@
]
},
"devDependencies": {
+ "@types/chance": "1.1.3",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/lodash": "4.14.180",
@@ -71,6 +73,7 @@
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
+ "chance": "1.1.3",
"ioredis-mock": "5.8.0",
"jest": "27.5.1",
"koa": "2.7.0",
diff --git a/packages/backend-core/plugins.js b/packages/backend-core/plugins.js
new file mode 100644
index 0000000000..018e214dcb
--- /dev/null
+++ b/packages/backend-core/plugins.js
@@ -0,0 +1,3 @@
+module.exports = {
+ ...require("./src/plugin"),
+}
diff --git a/packages/backend-core/src/constants.js b/packages/backend-core/src/constants.js
index 172e66e603..44c271a4f8 100644
--- a/packages/backend-core/src/constants.js
+++ b/packages/backend-core/src/constants.js
@@ -7,6 +7,7 @@ exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
+ ACCOUNT_RETURN_URL: "budibase:account:returnurl",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}
diff --git a/packages/backend-core/src/context/index.ts b/packages/backend-core/src/context/index.ts
index 78ce764d55..35eeee608b 100644
--- a/packages/backend-core/src/context/index.ts
+++ b/packages/backend-core/src/context/index.ts
@@ -2,7 +2,7 @@ import env from "../environment"
import { SEPARATOR, DocumentType } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
-import { baseGlobalDBName } from "../tenancy/utils"
+import { baseGlobalDBName } from "../db/tenancy"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKey } from "./constants"
@@ -65,7 +65,16 @@ export const getTenantIDFromAppID = (appId: string) => {
}
}
-// used for automations, API endpoints should always be in context already
+export const doInContext = async (appId: string, task: any) => {
+ // gets the tenant ID from the app ID
+ const tenantId = getTenantIDFromAppID(appId)
+ return doInTenant(tenantId, async () => {
+ return doInAppContext(appId, async () => {
+ return task()
+ })
+ })
+}
+
export const doInTenant = (tenantId: string | null, task: any) => {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
@@ -226,6 +235,10 @@ export const getAppId = () => {
}
}
+export const isTenancyEnabled = () => {
+ return env.MULTI_TENANCY
+}
+
/**
* Opens the app database based on whatever the request
* contained, dev or prod.
diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts
index b46f6072be..e0bd3c7a43 100644
--- a/packages/backend-core/src/db/Replication.ts
+++ b/packages/backend-core/src/db/Replication.ts
@@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
+import { DocumentType } from "./constants"
class Replication {
source: any
@@ -53,6 +54,14 @@ class Replication {
return this.replication
}
+ appReplicateOpts() {
+ return {
+ filter: (doc: any) => {
+ return doc._id !== DocumentType.APP_METADATA
+ },
+ }
+ }
+
/**
* Rollback the target DB back to the state of the source DB
*/
@@ -60,6 +69,7 @@ class Replication {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
+ // take the opportunity to remove deleted tombstones
await this.replicate()
}
diff --git a/packages/backend-core/src/db/constants.ts b/packages/backend-core/src/db/constants.ts
index fd464ba5fb..a61e8a2af2 100644
--- a/packages/backend-core/src/db/constants.ts
+++ b/packages/backend-core/src/db/constants.ts
@@ -19,6 +19,8 @@ export enum ViewName {
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
ACCOUNT_BY_EMAIL = "account_by_email",
+ PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
+ USER_BY_GROUP = "by_group_user",
}
export const DeprecatedViews = {
@@ -43,6 +45,10 @@ export enum DocumentType {
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
+ PLUGIN = "plg",
+ TABLE = "ta",
+ DATASOURCE = "datasource",
+ DATASOURCE_PLUS = "datasource_plus",
}
export const StaticDatabases = {
diff --git a/packages/backend-core/src/db/conversions.js b/packages/backend-core/src/db/conversions.js
index 90c04e9251..5b1a785ecc 100644
--- a/packages/backend-core/src/db/conversions.js
+++ b/packages/backend-core/src/db/conversions.js
@@ -36,6 +36,7 @@ exports.getDevelopmentAppID = appId => {
const rest = split.join(APP_PREFIX)
return `${APP_DEV_PREFIX}${rest}`
}
+exports.getDevAppID = exports.getDevelopmentAppID
/**
* Convert a development app ID to a deployed app ID.
diff --git a/packages/backend-core/src/db/tenancy.ts b/packages/backend-core/src/db/tenancy.ts
new file mode 100644
index 0000000000..d920f7cd41
--- /dev/null
+++ b/packages/backend-core/src/db/tenancy.ts
@@ -0,0 +1,22 @@
+import { DEFAULT_TENANT_ID } from "../constants"
+import { StaticDatabases, SEPARATOR } from "./constants"
+import { getTenantId } from "../context"
+
+export const getGlobalDBName = (tenantId?: string) => {
+ // tenant ID can be set externally, for example user API where
+ // new tenants are being created, this may be the case
+ if (!tenantId) {
+ tenantId = getTenantId()
+ }
+ return baseGlobalDBName(tenantId)
+}
+
+export const baseGlobalDBName = (tenantId: string | undefined | null) => {
+ let dbName
+ if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
+ dbName = StaticDatabases.GLOBAL.name
+ } else {
+ dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
+ }
+ return dbName
+}
diff --git a/packages/backend-core/src/db/utils.ts b/packages/backend-core/src/db/utils.ts
index 321ebd7f58..1c4be7e366 100644
--- a/packages/backend-core/src/db/utils.ts
+++ b/packages/backend-core/src/db/utils.ts
@@ -2,7 +2,8 @@ import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants"
import env from "../environment"
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
-import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
+import { getTenantId, getGlobalDB } from "../context"
+import { getGlobalDBName } from "./tenancy"
import fetch from "node-fetch"
import { doWithDB, allDbs } from "./index"
import { getCouchInfo } from "./pouch"
@@ -15,6 +16,7 @@ import * as events from "../events"
export * from "./constants"
export * from "./conversions"
export { default as Replication } from "./Replication"
+export * from "./tenancy"
/**
* Generates a new app ID.
@@ -62,6 +64,28 @@ export function getQueryIndex(viewName: ViewName) {
return `database/${viewName}`
}
+/**
+ * Check if a given ID is that of a table.
+ * @returns {boolean}
+ */
+export const isTableId = (id: string) => {
+ // this includes datasource plus tables
+ return (
+ id &&
+ (id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||
+ id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))
+ )
+}
+
+/**
+ * Check if a given ID is that of a datasource or datasource plus.
+ * @returns {boolean}
+ */
+export const isDatasourceId = (id: string) => {
+ // this covers both datasources and datasource plus
+ return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
+}
+
/**
* Generates a new workspace ID.
* @returns {string} The new workspace ID which the workspace doc can be stored under.
@@ -254,7 +278,16 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
return false
})
if (idsOnly) {
- return appDbNames
+ const devAppIds = appDbNames.filter(appId => isDevAppID(appId))
+ const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))
+ switch (dev) {
+ case true:
+ return devAppIds
+ case false:
+ return prodAppIds
+ default:
+ return appDbNames
+ }
}
const appPromises = appDbNames.map((app: any) =>
// skip setup otherwise databases could be re-created
@@ -357,6 +390,21 @@ export const generateDevInfoID = (userId: any) => {
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
}
+/**
+ * Generates a new plugin ID - to be used in the global DB.
+ * @returns {string} The new plugin ID which a plugin metadata document can be stored under.
+ */
+export const generatePluginID = (name: string) => {
+ return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
+}
+
+/**
+ * Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
+ */
+export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
+ return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
+}
+
/**
* Returns the most granular configuration document from the DB based on the type, workspace and userID passed.
* @param {Object} db - db instance to query
diff --git a/packages/backend-core/src/db/views.js b/packages/backend-core/src/db/views.js
deleted file mode 100644
index b2562bdc71..0000000000
--- a/packages/backend-core/src/db/views.js
+++ /dev/null
@@ -1,203 +0,0 @@
-const {
- DocumentType,
- ViewName,
- DeprecatedViews,
- SEPARATOR,
-} = require("./utils")
-const { getGlobalDB } = require("../tenancy")
-const { StaticDatabases } = require("./constants")
-const { doWithDB } = require("./")
-
-const DESIGN_DB = "_design/database"
-
-function DesignDoc() {
- return {
- _id: DESIGN_DB,
- // view collation information, read before writing any complex views:
- // https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
- views: {},
- }
-}
-
-async function removeDeprecated(db, viewName) {
- if (!DeprecatedViews[viewName]) {
- return
- }
- try {
- const designDoc = await db.get(DESIGN_DB)
- for (let deprecatedNames of DeprecatedViews[viewName]) {
- delete designDoc.views[deprecatedNames]
- }
- await db.put(designDoc)
- } catch (err) {
- // doesn't exist, ignore
- }
-}
-
-exports.createNewUserEmailView = async () => {
- const db = getGlobalDB()
- let designDoc
- try {
- designDoc = await db.get(DESIGN_DB)
- } catch (err) {
- // no design doc, make one
- designDoc = DesignDoc()
- }
- const view = {
- // if using variables in a map function need to inject them before use
- map: `function(doc) {
- if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
- emit(doc.email.toLowerCase(), doc._id)
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.USER_BY_EMAIL]: view,
- }
- await db.put(designDoc)
-}
-
-exports.createAccountEmailView = async () => {
- await doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
- let designDoc
- try {
- designDoc = await db.get(DESIGN_DB)
- } catch (err) {
- // no design doc, make one
- designDoc = DesignDoc()
- }
- const view = {
- // if using variables in a map function need to inject them before use
- map: `function(doc) {
- if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
- emit(doc.email.toLowerCase(), doc._id)
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.ACCOUNT_BY_EMAIL]: view,
- }
- await db.put(designDoc)
- })
-}
-
-exports.createUserAppView = async () => {
- const db = getGlobalDB()
- let designDoc
- try {
- designDoc = await db.get("_design/database")
- } catch (err) {
- // no design doc, make one
- designDoc = DesignDoc()
- }
- const view = {
- // if using variables in a map function need to inject them before use
- map: `function(doc) {
- if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
- for (let prodAppId of Object.keys(doc.roles)) {
- let emitted = prodAppId + "${SEPARATOR}" + doc._id
- emit(emitted, null)
- }
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.USER_BY_APP]: view,
- }
- await db.put(designDoc)
-}
-
-exports.createApiKeyView = async () => {
- const db = getGlobalDB()
- let designDoc
- try {
- designDoc = await db.get("_design/database")
- } catch (err) {
- designDoc = DesignDoc()
- }
- const view = {
- map: `function(doc) {
- if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
- emit(doc.apiKey, doc.userId)
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.BY_API_KEY]: view,
- }
- await db.put(designDoc)
-}
-
-exports.createUserBuildersView = async () => {
- const db = getGlobalDB()
- let designDoc
- try {
- designDoc = await db.get("_design/database")
- } catch (err) {
- // no design doc, make one
- designDoc = DesignDoc()
- }
- const view = {
- map: `function(doc) {
- if (doc.builder && doc.builder.global === true) {
- emit(doc._id, doc._id)
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.USER_BY_BUILDERS]: view,
- }
- await db.put(designDoc)
-}
-
-exports.queryView = async (viewName, params, db, CreateFuncByName) => {
- try {
- let response = (await db.query(`database/${viewName}`, params)).rows
- response = response.map(resp =>
- params.include_docs ? resp.doc : resp.value
- )
- if (params.arrayResponse) {
- return response
- } else {
- return response.length <= 1 ? response[0] : response
- }
- } catch (err) {
- if (err != null && err.name === "not_found") {
- const createFunc = CreateFuncByName[viewName]
- await removeDeprecated(db, viewName)
- await createFunc()
- return exports.queryView(viewName, params, db, CreateFuncByName)
- } else {
- throw err
- }
- }
-}
-
-exports.queryPlatformView = async (viewName, params) => {
- const CreateFuncByName = {
- [ViewName.ACCOUNT_BY_EMAIL]: exports.createAccountEmailView,
- }
-
- return doWithDB(StaticDatabases.PLATFORM_INFO.name, async db => {
- return exports.queryView(viewName, params, db, CreateFuncByName)
- })
-}
-
-exports.queryGlobalView = async (viewName, params, db = null) => {
- const CreateFuncByName = {
- [ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView,
- [ViewName.BY_API_KEY]: exports.createApiKeyView,
- [ViewName.USER_BY_BUILDERS]: exports.createUserBuildersView,
- [ViewName.USER_BY_APP]: exports.createUserAppView,
- }
- // can pass DB in if working with something specific
- if (!db) {
- db = getGlobalDB()
- }
- return exports.queryView(viewName, params, db, CreateFuncByName)
-}
diff --git a/packages/backend-core/src/db/views.ts b/packages/backend-core/src/db/views.ts
new file mode 100644
index 0000000000..f0fff918fc
--- /dev/null
+++ b/packages/backend-core/src/db/views.ts
@@ -0,0 +1,199 @@
+import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
+import { getGlobalDB } from "../context"
+import PouchDB from "pouchdb"
+import { StaticDatabases } from "./constants"
+import { doWithDB } from "./"
+
+const DESIGN_DB = "_design/database"
+
+function DesignDoc() {
+ return {
+ _id: DESIGN_DB,
+ // view collation information, read before writing any complex views:
+ // https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
+ views: {},
+ }
+}
+
+interface DesignDocument {
+ views: any
+}
+
+async function removeDeprecated(db: PouchDB.Database, viewName: ViewName) {
+ // @ts-ignore
+ if (!DeprecatedViews[viewName]) {
+ return
+ }
+ try {
+ const designDoc = await db.get(DESIGN_DB)
+ // @ts-ignore
+ for (let deprecatedNames of DeprecatedViews[viewName]) {
+ delete designDoc.views[deprecatedNames]
+ }
+ await db.put(designDoc)
+ } catch (err) {
+ // doesn't exist, ignore
+ }
+}
+
+export async function createView(db: any, viewJs: string, viewName: string) {
+ let designDoc
+ try {
+ designDoc = (await db.get(DESIGN_DB)) as DesignDocument
+ } catch (err) {
+ // no design doc, make one
+ designDoc = DesignDoc()
+ }
+ const view = {
+ map: viewJs,
+ }
+ designDoc.views = {
+ ...designDoc.views,
+ [viewName]: view,
+ }
+ await db.put(designDoc)
+}
+
+export const createNewUserEmailView = async () => {
+ const db = getGlobalDB()
+ const viewJs = `function(doc) {
+ if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
+ emit(doc.email.toLowerCase(), doc._id)
+ }
+ }`
+ await createView(db, viewJs, ViewName.USER_BY_EMAIL)
+}
+
+export const createAccountEmailView = async () => {
+ const viewJs = `function(doc) {
+ if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
+ emit(doc.email.toLowerCase(), doc._id)
+ }
+ }`
+ await doWithDB(
+ StaticDatabases.PLATFORM_INFO.name,
+ async (db: PouchDB.Database) => {
+ await createView(db, viewJs, ViewName.ACCOUNT_BY_EMAIL)
+ }
+ )
+}
+
+export const createUserAppView = async () => {
+ const db = getGlobalDB() as PouchDB.Database
+ const viewJs = `function(doc) {
+ if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
+ for (let prodAppId of Object.keys(doc.roles)) {
+ let emitted = prodAppId + "${SEPARATOR}" + doc._id
+ emit(emitted, null)
+ }
+ }
+ }`
+ await createView(db, viewJs, ViewName.USER_BY_APP)
+}
+
+export const createApiKeyView = async () => {
+ const db = getGlobalDB()
+ const viewJs = `function(doc) {
+ if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
+ emit(doc.apiKey, doc.userId)
+ }
+ }`
+ await createView(db, viewJs, ViewName.BY_API_KEY)
+}
+
+export const createUserBuildersView = async () => {
+ const db = getGlobalDB()
+ const viewJs = `function(doc) {
+ if (doc.builder && doc.builder.global === true) {
+ emit(doc._id, doc._id)
+ }
+ }`
+ await createView(db, viewJs, ViewName.USER_BY_BUILDERS)
+}
+
+export const createPlatformUserView = async () => {
+ const viewJs = `function(doc) {
+ if (doc.tenantId) {
+ emit(doc._id.toLowerCase(), doc._id)
+ }
+ }`
+ await doWithDB(
+ StaticDatabases.PLATFORM_INFO.name,
+ async (db: PouchDB.Database) => {
+ await createView(db, viewJs, ViewName.PLATFORM_USERS_LOWERCASE)
+ }
+ )
+}
+
+export interface QueryViewOptions {
+ arrayResponse?: boolean
+}
+
+export const queryView = async (
+ viewName: ViewName,
+ params: PouchDB.Query.Options,
+ db: PouchDB.Database,
+ createFunc: any,
+ opts?: QueryViewOptions
+): Promise => {
+ try {
+ let response = await db.query(`database/${viewName}`, params)
+ const rows = response.rows
+ const docs = rows.map(row => (params.include_docs ? row.doc : row.value))
+
+ // if arrayResponse has been requested, always return array regardless of length
+ if (opts?.arrayResponse) {
+ return docs
+ } else {
+ // return the single document if there is only one
+ return docs.length <= 1 ? docs[0] : docs
+ }
+ } catch (err: any) {
+ if (err != null && err.name === "not_found") {
+ await removeDeprecated(db, viewName)
+ await createFunc()
+ return queryView(viewName, params, db, createFunc, opts)
+ } else {
+ throw err
+ }
+ }
+}
+
+export const queryPlatformView = async (
+ viewName: ViewName,
+ params: PouchDB.Query.Options,
+ opts?: QueryViewOptions
+): Promise => {
+ const CreateFuncByName: any = {
+ [ViewName.ACCOUNT_BY_EMAIL]: createAccountEmailView,
+ [ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
+ }
+
+ return doWithDB(
+ StaticDatabases.PLATFORM_INFO.name,
+ async (db: PouchDB.Database) => {
+ const createFn = CreateFuncByName[viewName]
+ return queryView(viewName, params, db, createFn, opts)
+ }
+ )
+}
+
+export const queryGlobalView = async (
+ viewName: ViewName,
+ params: PouchDB.Query.Options,
+ db?: PouchDB.Database,
+ opts?: QueryViewOptions
+): Promise => {
+ const CreateFuncByName: any = {
+ [ViewName.USER_BY_EMAIL]: createNewUserEmailView,
+ [ViewName.BY_API_KEY]: createApiKeyView,
+ [ViewName.USER_BY_BUILDERS]: createUserBuildersView,
+ [ViewName.USER_BY_APP]: createUserAppView,
+ }
+ // can pass DB in if working with something specific
+ if (!db) {
+ db = getGlobalDB() as PouchDB.Database
+ }
+ const createFn = CreateFuncByName[viewName]
+ return queryView(viewName, params, db, createFn, opts)
+}
diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts
index 04d09d2eb7..6e2ac94be9 100644
--- a/packages/backend-core/src/environment.ts
+++ b/packages/backend-core/src/environment.ts
@@ -16,9 +16,19 @@ if (!LOADED && isDev() && !isTest()) {
LOADED = true
}
+const DefaultBucketName = {
+ BACKUPS: "backups",
+ APPS: "prod-budi-app-assets",
+ TEMPLATES: "templates",
+ GLOBAL: "global",
+ CLOUD: "prod-budi-tenant-uploads",
+ PLUGINS: "plugins",
+}
+
const env = {
isTest,
isDev,
+ JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
@@ -36,7 +46,7 @@ const env = {
MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL:
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
- ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
+ ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "",
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
@@ -44,13 +54,17 @@ const env = {
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
- BACKUPS_BUCKET_NAME: process.env.BACKUPS_BUCKET_NAME || "backups",
- APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || "prod-budi-app-assets",
- TEMPLATES_BUCKET_NAME: process.env.TEMPLATES_BUCKET_NAME || "templates",
- GLOBAL_BUCKET_NAME: process.env.GLOBAL_BUCKET_NAME || "global",
+ BACKUPS_BUCKET_NAME:
+ process.env.BACKUPS_BUCKET_NAME || DefaultBucketName.BACKUPS,
+ APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || DefaultBucketName.APPS,
+ TEMPLATES_BUCKET_NAME:
+ process.env.TEMPLATES_BUCKET_NAME || DefaultBucketName.TEMPLATES,
+ GLOBAL_BUCKET_NAME:
+ process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,
GLOBAL_CLOUD_BUCKET_NAME:
- process.env.GLOBAL_CLOUD_BUCKET_NAME || "prod-budi-tenant-uploads",
- PLUGIN_BUCKET_NAME: process.env.PLUGIN_BUCKET_NAME || "plugins",
+ process.env.GLOBAL_CLOUD_BUCKET_NAME || DefaultBucketName.CLOUD,
+ PLUGIN_BUCKET_NAME:
+ process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,
USE_COUCH: process.env.USE_COUCH || true,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
diff --git a/packages/backend-core/src/errors/base.js b/packages/backend-core/src/errors/base.js
deleted file mode 100644
index 7cb0c0fc23..0000000000
--- a/packages/backend-core/src/errors/base.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class BudibaseError extends Error {
- constructor(message, code, type) {
- super(message)
- this.code = code
- this.type = type
- }
-}
-
-module.exports = {
- BudibaseError,
-}
diff --git a/packages/backend-core/src/errors/base.ts b/packages/backend-core/src/errors/base.ts
new file mode 100644
index 0000000000..801dcf168d
--- /dev/null
+++ b/packages/backend-core/src/errors/base.ts
@@ -0,0 +1,10 @@
+export class BudibaseError extends Error {
+ code: string
+ type: string
+
+ constructor(message: string, code: string, type: string) {
+ super(message)
+ this.code = code
+ this.type = type
+ }
+}
diff --git a/packages/backend-core/src/errors/generic.js b/packages/backend-core/src/errors/generic.js
deleted file mode 100644
index 5c7661f035..0000000000
--- a/packages/backend-core/src/errors/generic.js
+++ /dev/null
@@ -1,11 +0,0 @@
-const { BudibaseError } = require("./base")
-
-class GenericError extends BudibaseError {
- constructor(message, code, type) {
- super(message, code, type ? type : "generic")
- }
-}
-
-module.exports = {
- GenericError,
-}
diff --git a/packages/backend-core/src/errors/generic.ts b/packages/backend-core/src/errors/generic.ts
new file mode 100644
index 0000000000..71b3352438
--- /dev/null
+++ b/packages/backend-core/src/errors/generic.ts
@@ -0,0 +1,7 @@
+import { BudibaseError } from "./base"
+
+export class GenericError extends BudibaseError {
+ constructor(message: string, code: string, type: string) {
+ super(message, code, type ? type : "generic")
+ }
+}
diff --git a/packages/backend-core/src/errors/http.js b/packages/backend-core/src/errors/http.js
deleted file mode 100644
index 8e7cab4638..0000000000
--- a/packages/backend-core/src/errors/http.js
+++ /dev/null
@@ -1,12 +0,0 @@
-const { GenericError } = require("./generic")
-
-class HTTPError extends GenericError {
- constructor(message, httpStatus, code = "http", type = "generic") {
- super(message, code, type)
- this.status = httpStatus
- }
-}
-
-module.exports = {
- HTTPError,
-}
diff --git a/packages/backend-core/src/errors/http.ts b/packages/backend-core/src/errors/http.ts
new file mode 100644
index 0000000000..182e009f58
--- /dev/null
+++ b/packages/backend-core/src/errors/http.ts
@@ -0,0 +1,15 @@
+import { GenericError } from "./generic"
+
+export class HTTPError extends GenericError {
+ status: number
+
+ constructor(
+ message: string,
+ httpStatus: number,
+ code = "http",
+ type = "generic"
+ ) {
+ super(message, code, type)
+ this.status = httpStatus
+ }
+}
diff --git a/packages/backend-core/src/errors/index.js b/packages/backend-core/src/errors/index.ts
similarity index 65%
rename from packages/backend-core/src/errors/index.js
rename to packages/backend-core/src/errors/index.ts
index 31ffd739a0..be6657093d 100644
--- a/packages/backend-core/src/errors/index.js
+++ b/packages/backend-core/src/errors/index.ts
@@ -1,5 +1,6 @@
-const http = require("./http")
-const licensing = require("./licensing")
+import { HTTPError } from "./http"
+import { UsageLimitError, FeatureDisabledError } from "./licensing"
+import * as licensing from "./licensing"
const codes = {
...licensing.codes,
@@ -11,7 +12,7 @@ const context = {
...licensing.context,
}
-const getPublicError = err => {
+const getPublicError = (err: any) => {
let error
if (err.code || err.type) {
// add generic error information
@@ -32,13 +33,15 @@ const getPublicError = err => {
return error
}
-module.exports = {
+const pkg = {
codes,
types,
errors: {
- UsageLimitError: licensing.UsageLimitError,
- FeatureDisabledError: licensing.FeatureDisabledError,
- HTTPError: http.HTTPError,
+ UsageLimitError,
+ FeatureDisabledError,
+ HTTPError,
},
getPublicError,
}
+
+export = pkg
diff --git a/packages/backend-core/src/errors/licensing.js b/packages/backend-core/src/errors/licensing.js
deleted file mode 100644
index 85d207ac35..0000000000
--- a/packages/backend-core/src/errors/licensing.js
+++ /dev/null
@@ -1,43 +0,0 @@
-const { HTTPError } = require("./http")
-
-const type = "license_error"
-
-const codes = {
- USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
- FEATURE_DISABLED: "feature_disabled",
-}
-
-const context = {
- [codes.USAGE_LIMIT_EXCEEDED]: err => {
- return {
- limitName: err.limitName,
- }
- },
- [codes.FEATURE_DISABLED]: err => {
- return {
- featureName: err.featureName,
- }
- },
-}
-
-class UsageLimitError extends HTTPError {
- constructor(message, limitName) {
- super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
- this.limitName = limitName
- }
-}
-
-class FeatureDisabledError extends HTTPError {
- constructor(message, featureName) {
- super(message, 400, codes.FEATURE_DISABLED, type)
- this.featureName = featureName
- }
-}
-
-module.exports = {
- type,
- codes,
- context,
- UsageLimitError,
- FeatureDisabledError,
-}
diff --git a/packages/backend-core/src/errors/licensing.ts b/packages/backend-core/src/errors/licensing.ts
new file mode 100644
index 0000000000..7ffcefa167
--- /dev/null
+++ b/packages/backend-core/src/errors/licensing.ts
@@ -0,0 +1,39 @@
+import { HTTPError } from "./http"
+
+export const type = "license_error"
+
+export const codes = {
+ USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
+ FEATURE_DISABLED: "feature_disabled",
+}
+
+export const context = {
+ [codes.USAGE_LIMIT_EXCEEDED]: (err: any) => {
+ return {
+ limitName: err.limitName,
+ }
+ },
+ [codes.FEATURE_DISABLED]: (err: any) => {
+ return {
+ featureName: err.featureName,
+ }
+ },
+}
+
+export class UsageLimitError extends HTTPError {
+ limitName: string
+
+ constructor(message: string, limitName: string) {
+ super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
+ this.limitName = limitName
+ }
+}
+
+export class FeatureDisabledError extends HTTPError {
+ featureName: string
+
+ constructor(message: string, featureName: string) {
+ super(message, 400, codes.FEATURE_DISABLED, type)
+ this.featureName = featureName
+ }
+}
diff --git a/packages/backend-core/src/events/processors/LoggingProcessor.ts b/packages/backend-core/src/events/processors/LoggingProcessor.ts
index a517fba09c..d41a82fbb4 100644
--- a/packages/backend-core/src/events/processors/LoggingProcessor.ts
+++ b/packages/backend-core/src/events/processors/LoggingProcessor.ts
@@ -23,9 +23,11 @@ export default class LoggingProcessor implements EventProcessor {
return
}
let timestampString = getTimestampString(timestamp)
- console.log(
- `[audit] [tenant=${identity.tenantId}] [identityType=${identity.type}] [identity=${identity.id}] ${timestampString} ${event} `
- )
+ let message = `[audit] [tenant=${identity.tenantId}] [identityType=${identity.type}] [identity=${identity.id}] ${timestampString} ${event} `
+ if (env.isDev()) {
+ message = message + `[debug: [properties=${JSON.stringify(properties)}] ]`
+ }
+ console.log(message)
}
async identify(identity: Identity, timestamp?: string | number) {
diff --git a/packages/backend-core/src/events/publishers/datasource.ts b/packages/backend-core/src/events/publishers/datasource.ts
index 3cd68033fc..d3ea7402f9 100644
--- a/packages/backend-core/src/events/publishers/datasource.ts
+++ b/packages/backend-core/src/events/publishers/datasource.ts
@@ -5,8 +5,15 @@ import {
DatasourceCreatedEvent,
DatasourceUpdatedEvent,
DatasourceDeletedEvent,
+ SourceName,
} from "@budibase/types"
+function isCustom(datasource: Datasource) {
+ const sources = Object.values(SourceName)
+ // if not in the base source list, then it must be custom
+ return !sources.includes(datasource.source)
+}
+
export async function created(
datasource: Datasource,
timestamp?: string | number
@@ -14,6 +21,7 @@ export async function created(
const properties: DatasourceCreatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
+ custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_CREATED, properties, timestamp)
}
@@ -22,6 +30,7 @@ export async function updated(datasource: Datasource) {
const properties: DatasourceUpdatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
+ custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_UPDATED, properties)
}
@@ -30,6 +39,7 @@ export async function deleted(datasource: Datasource) {
const properties: DatasourceDeletedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
+ custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_DELETED, properties)
}
diff --git a/packages/backend-core/src/events/publishers/group.ts b/packages/backend-core/src/events/publishers/group.ts
index d300873725..b4fd0d1469 100644
--- a/packages/backend-core/src/events/publishers/group.ts
+++ b/packages/backend-core/src/events/publishers/group.ts
@@ -40,9 +40,9 @@ export async function usersAdded(count: number, group: UserGroup) {
await publishEvent(Event.USER_GROUP_USERS_ADDED, properties)
}
-export async function usersDeleted(emails: string[], group: UserGroup) {
+export async function usersDeleted(count: number, group: UserGroup) {
const properties: GroupUsersDeletedEvent = {
- count: emails.length,
+ count,
groupId: group._id as string,
}
await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties)
diff --git a/packages/backend-core/src/events/publishers/index.ts b/packages/backend-core/src/events/publishers/index.ts
index 57fd0bf8e2..6fe42c4bda 100644
--- a/packages/backend-core/src/events/publishers/index.ts
+++ b/packages/backend-core/src/events/publishers/index.ts
@@ -18,3 +18,4 @@ export * as view from "./view"
export * as installation from "./installation"
export * as backfill from "./backfill"
export * as group from "./group"
+export * as plugin from "./plugin"
diff --git a/packages/backend-core/src/events/publishers/license.ts b/packages/backend-core/src/events/publishers/license.ts
index 1adc71652e..84472e408f 100644
--- a/packages/backend-core/src/events/publishers/license.ts
+++ b/packages/backend-core/src/events/publishers/license.ts
@@ -1,27 +1,78 @@
import { publishEvent } from "../events"
import {
Event,
- License,
LicenseActivatedEvent,
- LicenseDowngradedEvent,
- LicenseUpdatedEvent,
- LicenseUpgradedEvent,
+ LicensePlanChangedEvent,
+ LicenseTierChangedEvent,
+ PlanType,
+ Account,
+ LicensePortalOpenedEvent,
+ LicenseCheckoutSuccessEvent,
+ LicenseCheckoutOpenedEvent,
+ LicensePaymentFailedEvent,
+ LicensePaymentRecoveredEvent,
} from "@budibase/types"
-// TODO
-export async function updgraded(license: License) {
- const properties: LicenseUpgradedEvent = {}
- await publishEvent(Event.LICENSE_UPGRADED, properties)
+export async function tierChanged(account: Account, from: number, to: number) {
+ const properties: LicenseTierChangedEvent = {
+ accountId: account.accountId,
+ to,
+ from,
+ }
+ await publishEvent(Event.LICENSE_TIER_CHANGED, properties)
}
-// TODO
-export async function downgraded(license: License) {
- const properties: LicenseDowngradedEvent = {}
- await publishEvent(Event.LICENSE_DOWNGRADED, properties)
+export async function planChanged(
+ account: Account,
+ from: PlanType,
+ to: PlanType
+) {
+ const properties: LicensePlanChangedEvent = {
+ accountId: account.accountId,
+ to,
+ from,
+ }
+ await publishEvent(Event.LICENSE_PLAN_CHANGED, properties)
}
-// TODO
-export async function activated(license: License) {
- const properties: LicenseActivatedEvent = {}
+export async function activated(account: Account) {
+ const properties: LicenseActivatedEvent = {
+ accountId: account.accountId,
+ }
await publishEvent(Event.LICENSE_ACTIVATED, properties)
}
+
+export async function checkoutOpened(account: Account) {
+ const properties: LicenseCheckoutOpenedEvent = {
+ accountId: account.accountId,
+ }
+ await publishEvent(Event.LICENSE_CHECKOUT_OPENED, properties)
+}
+
+export async function checkoutSuccess(account: Account) {
+ const properties: LicenseCheckoutSuccessEvent = {
+ accountId: account.accountId,
+ }
+ await publishEvent(Event.LICENSE_CHECKOUT_SUCCESS, properties)
+}
+
+export async function portalOpened(account: Account) {
+ const properties: LicensePortalOpenedEvent = {
+ accountId: account.accountId,
+ }
+ await publishEvent(Event.LICENSE_PORTAL_OPENED, properties)
+}
+
+export async function paymentFailed(account: Account) {
+ const properties: LicensePaymentFailedEvent = {
+ accountId: account.accountId,
+ }
+ await publishEvent(Event.LICENSE_PAYMENT_FAILED, properties)
+}
+
+export async function paymentRecovered(account: Account) {
+ const properties: LicensePaymentRecoveredEvent = {
+ accountId: account.accountId,
+ }
+ await publishEvent(Event.LICENSE_PAYMENT_RECOVERED, properties)
+}
diff --git a/packages/backend-core/src/events/publishers/plugin.ts b/packages/backend-core/src/events/publishers/plugin.ts
new file mode 100644
index 0000000000..4e4d87cf56
--- /dev/null
+++ b/packages/backend-core/src/events/publishers/plugin.ts
@@ -0,0 +1,41 @@
+import { publishEvent } from "../events"
+import {
+ Event,
+ Plugin,
+ PluginDeletedEvent,
+ PluginImportedEvent,
+ PluginInitEvent,
+} from "@budibase/types"
+
+export async function init(plugin: Plugin) {
+ const properties: PluginInitEvent = {
+ type: plugin.schema.type,
+ name: plugin.name,
+ description: plugin.description,
+ version: plugin.version,
+ }
+ await publishEvent(Event.PLUGIN_INIT, properties)
+}
+
+export async function imported(plugin: Plugin) {
+ const properties: PluginImportedEvent = {
+ pluginId: plugin._id as string,
+ type: plugin.schema.type,
+ source: plugin.source,
+ name: plugin.name,
+ description: plugin.description,
+ version: plugin.version,
+ }
+ await publishEvent(Event.PLUGIN_IMPORTED, properties)
+}
+
+export async function deleted(plugin: Plugin) {
+ const properties: PluginDeletedEvent = {
+ pluginId: plugin._id as string,
+ type: plugin.schema.type,
+ name: plugin.name,
+ description: plugin.description,
+ version: plugin.version,
+ }
+ await publishEvent(Event.PLUGIN_DELETED, properties)
+}
diff --git a/packages/backend-core/src/featureFlags/index.js b/packages/backend-core/src/featureFlags/index.js
index 103ac4df59..8a8162d0ba 100644
--- a/packages/backend-core/src/featureFlags/index.js
+++ b/packages/backend-core/src/featureFlags/index.js
@@ -31,23 +31,29 @@ const TENANT_FEATURE_FLAGS = getFeatureFlags()
exports.isEnabled = featureFlag => {
const tenantId = tenancy.getTenantId()
-
- return (
- TENANT_FEATURE_FLAGS &&
- TENANT_FEATURE_FLAGS[tenantId] &&
- TENANT_FEATURE_FLAGS[tenantId].includes(featureFlag)
- )
+ const flags = exports.getTenantFeatureFlags(tenantId)
+ return flags.includes(featureFlag)
}
exports.getTenantFeatureFlags = tenantId => {
- if (TENANT_FEATURE_FLAGS && TENANT_FEATURE_FLAGS[tenantId]) {
- return TENANT_FEATURE_FLAGS[tenantId]
+ const flags = []
+
+ if (TENANT_FEATURE_FLAGS) {
+ const globalFlags = TENANT_FEATURE_FLAGS["*"]
+ const tenantFlags = TENANT_FEATURE_FLAGS[tenantId]
+
+ if (globalFlags) {
+ flags.push(...globalFlags)
+ }
+ if (tenantFlags) {
+ flags.push(...tenantFlags)
+ }
}
- return []
+ return flags
}
-exports.FeatureFlag = {
+exports.TenantFeatureFlag = {
LICENSING: "LICENSING",
GOOGLE_SHEETS: "GOOGLE_SHEETS",
USER_GROUPS: "USER_GROUPS",
diff --git a/packages/backend-core/src/hashing.js b/packages/backend-core/src/hashing.js
index 45abe2f9bd..7524e66043 100644
--- a/packages/backend-core/src/hashing.js
+++ b/packages/backend-core/src/hashing.js
@@ -1,5 +1,5 @@
-const bcrypt = require("bcrypt")
const env = require("./environment")
+const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
const { v4 } = require("uuid")
const SALT_ROUNDS = env.SALT_ROUNDS || 10
diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts
index 74e79e7b95..42cad17620 100644
--- a/packages/backend-core/src/index.ts
+++ b/packages/backend-core/src/index.ts
@@ -1,5 +1,4 @@
import errors from "./errors"
-
const errorClasses = errors.errors
import * as events from "./events"
import * as migrations from "./migrations"
@@ -15,9 +14,11 @@ import deprovisioning from "./context/deprovision"
import auth from "./auth"
import constants from "./constants"
import * as dbConstants from "./db/constants"
-import logging from "./logging"
+import * as logging from "./logging"
import pino from "./pino"
import * as middleware from "./middleware"
+import plugins from "./plugin"
+import encryption from "./security/encryption"
// mimic the outer package exports
import * as db from "./pkg/db"
@@ -36,6 +37,7 @@ const core = {
db,
...dbConstants,
redis,
+ locks: redis.redlock,
objectStore,
utils,
users,
@@ -56,9 +58,11 @@ const core = {
errors,
logging,
roles,
+ plugins,
...pino,
...errorClasses,
middleware,
+ encryption,
}
export = core
diff --git a/packages/backend-core/src/middleware/authenticated.ts b/packages/backend-core/src/middleware/authenticated.ts
index 062070785d..a3c6b67cde 100644
--- a/packages/backend-core/src/middleware/authenticated.ts
+++ b/packages/backend-core/src/middleware/authenticated.ts
@@ -106,6 +106,7 @@ export = (
user = await getUser(userId, session.tenantId)
}
user.csrfToken = session.csrfToken
+
if (session?.lastAccessedAt < timeMinusOneMinute()) {
// make sure we denote that the session is still in use
await updateSessionTTL(session)
diff --git a/packages/backend-core/src/migrations/definitions.ts b/packages/backend-core/src/migrations/definitions.ts
index 34ec0f0cad..6eba56ab43 100644
--- a/packages/backend-core/src/migrations/definitions.ts
+++ b/packages/backend-core/src/migrations/definitions.ts
@@ -11,20 +11,12 @@ export const DEFINITIONS: MigrationDefinition[] = [
},
{
type: MigrationType.GLOBAL,
- name: MigrationName.QUOTAS_1,
+ name: MigrationName.SYNC_QUOTAS,
},
{
type: MigrationType.APP,
name: MigrationName.APP_URLS,
},
- {
- type: MigrationType.GLOBAL,
- name: MigrationName.DEVELOPER_QUOTA,
- },
- {
- type: MigrationType.GLOBAL,
- name: MigrationName.PUBLISHED_APP_QUOTA,
- },
{
type: MigrationType.APP,
name: MigrationName.EVENT_APP_BACKFILL,
diff --git a/packages/backend-core/src/migrations/migrations.ts b/packages/backend-core/src/migrations/migrations.ts
index ca238ff80e..90a12acec2 100644
--- a/packages/backend-core/src/migrations/migrations.ts
+++ b/packages/backend-core/src/migrations/migrations.ts
@@ -3,12 +3,8 @@ import { doWithDB } from "../db"
import { DocumentType, StaticDatabases } from "../db/constants"
import { getAllApps } from "../db/utils"
import environment from "../environment"
-import {
- doInTenant,
- getTenantIds,
- getGlobalDBName,
- getTenantId,
-} from "../tenancy"
+import { doInTenant, getTenantIds, getTenantId } from "../tenancy"
+import { getGlobalDBName } from "../db/tenancy"
import * as context from "../context"
import { DEFINITIONS } from "."
import {
diff --git a/packages/backend-core/src/objectStore/index.ts b/packages/backend-core/src/objectStore/index.ts
index a9f7981844..17e002cc49 100644
--- a/packages/backend-core/src/objectStore/index.ts
+++ b/packages/backend-core/src/objectStore/index.ts
@@ -182,6 +182,11 @@ export const streamUpload = async (
...extra,
ContentType: "application/javascript",
}
+ } else if (filename?.endsWith(".svg")) {
+ extra = {
+ ...extra,
+ ContentType: "image",
+ }
}
const params = {
@@ -307,9 +312,13 @@ export const uploadDirectory = async (
return files
}
-exports.downloadTarballDirect = async (url: string, path: string) => {
+exports.downloadTarballDirect = async (
+ url: string,
+ path: string,
+ headers = {}
+) => {
path = sanitizeKey(path)
- const response = await fetch(url)
+ const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
diff --git a/packages/backend-core/src/objectStore/utils.js b/packages/backend-core/src/objectStore/utils.js
index acc1b9904e..9cf4f5f70e 100644
--- a/packages/backend-core/src/objectStore/utils.js
+++ b/packages/backend-core/src/objectStore/utils.js
@@ -2,6 +2,11 @@ const { join } = require("path")
const { tmpdir } = require("os")
const env = require("../environment")
+/****************************************************
+ * NOTE: When adding a new bucket - name *
+ * sure that S3 usages (like budibase-infra) *
+ * have been updated to have a unique bucket name. *
+ ****************************************************/
exports.ObjectStoreBuckets = {
BACKUPS: env.BACKUPS_BUCKET_NAME,
APPS: env.APPS_BUCKET_NAME,
diff --git a/packages/backend-core/src/pkg/context.ts b/packages/backend-core/src/pkg/context.ts
index 5caa82ab0c..4915cc6e41 100644
--- a/packages/backend-core/src/pkg/context.ts
+++ b/packages/backend-core/src/pkg/context.ts
@@ -8,6 +8,7 @@ import {
updateAppId,
doInAppContext,
doInTenant,
+ doInContext,
} from "../context"
import * as identity from "../context/identity"
@@ -20,5 +21,6 @@ export = {
updateAppId,
doInAppContext,
doInTenant,
+ doInContext,
identity,
}
diff --git a/packages/backend-core/src/pkg/redis.ts b/packages/backend-core/src/pkg/redis.ts
index 65ab186d9a..297c2b54f4 100644
--- a/packages/backend-core/src/pkg/redis.ts
+++ b/packages/backend-core/src/pkg/redis.ts
@@ -3,9 +3,11 @@
import Client from "../redis"
import utils from "../redis/utils"
import clients from "../redis/init"
+import * as redlock from "../redis/redlock"
export = {
Client,
utils,
clients,
+ redlock,
}
diff --git a/packages/backend-core/src/plugin/index.ts b/packages/backend-core/src/plugin/index.ts
new file mode 100644
index 0000000000..a6d1853007
--- /dev/null
+++ b/packages/backend-core/src/plugin/index.ts
@@ -0,0 +1,7 @@
+import * as utils from "./utils"
+
+const pkg = {
+ ...utils,
+}
+
+export = pkg
diff --git a/packages/cli/src/plugins/validate.js b/packages/backend-core/src/plugin/utils.js
similarity index 80%
rename from packages/cli/src/plugins/validate.js
rename to packages/backend-core/src/plugin/utils.js
index a6b4555cbd..60a40f3a76 100644
--- a/packages/cli/src/plugins/validate.js
+++ b/packages/backend-core/src/plugin/utils.js
@@ -1,5 +1,8 @@
-const { PluginTypes } = require("./constants")
-const { DatasourceFieldType, QueryType } = require("@budibase/types")
+const {
+ DatasourceFieldType,
+ QueryType,
+ PluginType,
+} = require("@budibase/types")
const joi = require("joi")
const DATASOURCE_TYPES = [
@@ -64,25 +67,30 @@ function validateDatasource(schema) {
description: joi.string().required(),
datasource: joi.object().pattern(joi.string(), fieldValidator).required(),
query: joi
- .object({
- create: queryValidator,
- read: queryValidator,
- update: queryValidator,
- delete: queryValidator,
- })
+ .object()
+ .pattern(joi.string(), queryValidator)
.unknown(true)
.required(),
+ extra: joi.object().pattern(
+ joi.string(),
+ joi.object({
+ type: joi.string().required(),
+ displayName: joi.string().required(),
+ required: joi.boolean(),
+ data: joi.object(),
+ })
+ ),
}),
})
runJoi(validator, schema)
}
exports.validate = schema => {
- switch (schema.type) {
- case PluginTypes.COMPONENT:
+ switch (schema?.type) {
+ case PluginType.COMPONENT:
validateComponent(schema)
break
- case PluginTypes.DATASOURCE:
+ case PluginType.DATASOURCE:
validateDatasource(schema)
break
default:
diff --git a/packages/backend-core/src/redis/index.ts b/packages/backend-core/src/redis/index.ts
index 206110366f..8a15320ff3 100644
--- a/packages/backend-core/src/redis/index.ts
+++ b/packages/backend-core/src/redis/index.ts
@@ -214,6 +214,34 @@ export = class RedisWrapper {
}
}
+ async bulkGet(keys: string[]) {
+ const db = this._db
+ if (keys.length === 0) {
+ return {}
+ }
+ const prefixedKeys = keys.map(key => addDbPrefix(db, key))
+ let response = await this.getClient().mget(prefixedKeys)
+ if (Array.isArray(response)) {
+ let final: any = {}
+ let count = 0
+ for (let result of response) {
+ if (result) {
+ let parsed
+ try {
+ parsed = JSON.parse(result)
+ } catch (err) {
+ parsed = result
+ }
+ final[keys[count]] = parsed
+ }
+ count++
+ }
+ return final
+ } else {
+ throw new Error(`Invalid response: ${response}`)
+ }
+ }
+
async store(key: string, value: any, expirySeconds: number | null = null) {
const db = this._db
if (typeof value === "object") {
diff --git a/packages/backend-core/src/redis/init.js b/packages/backend-core/src/redis/init.js
index 8e5d10f838..3150ef2c1c 100644
--- a/packages/backend-core/src/redis/init.js
+++ b/packages/backend-core/src/redis/init.js
@@ -1,27 +1,23 @@
const Client = require("./index")
const utils = require("./utils")
-const { getRedlock } = require("./redlock")
-let userClient, sessionClient, appClient, cacheClient, writethroughClient
-let migrationsRedlock
-
-// turn retry off so that only one instance can ever hold the lock
-const migrationsRedlockConfig = { retryCount: 0 }
+let userClient,
+ sessionClient,
+ appClient,
+ cacheClient,
+ writethroughClient,
+ lockClient
async function init() {
userClient = await new Client(utils.Databases.USER_CACHE).init()
sessionClient = await new Client(utils.Databases.SESSIONS).init()
appClient = await new Client(utils.Databases.APP_METADATA).init()
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
+ lockClient = await new Client(utils.Databases.LOCKS).init()
writethroughClient = await new Client(
utils.Databases.WRITE_THROUGH,
utils.SelectableDatabases.WRITE_THROUGH
).init()
- // pass the underlying ioredis client to redlock
- migrationsRedlock = getRedlock(
- cacheClient.getClient(),
- migrationsRedlockConfig
- )
}
process.on("exit", async () => {
@@ -30,6 +26,7 @@ process.on("exit", async () => {
if (appClient) await appClient.finish()
if (cacheClient) await cacheClient.finish()
if (writethroughClient) await writethroughClient.finish()
+ if (lockClient) await lockClient.finish()
})
module.exports = {
@@ -63,10 +60,10 @@ module.exports = {
}
return writethroughClient
},
- getMigrationsRedlock: async () => {
- if (!migrationsRedlock) {
+ getLockClient: async () => {
+ if (!lockClient) {
await init()
}
- return migrationsRedlock
+ return lockClient
},
}
diff --git a/packages/backend-core/src/redis/redlock.ts b/packages/backend-core/src/redis/redlock.ts
index beef375b55..abb13b2534 100644
--- a/packages/backend-core/src/redis/redlock.ts
+++ b/packages/backend-core/src/redis/redlock.ts
@@ -1,14 +1,37 @@
-import Redlock from "redlock"
+import Redlock, { Options } from "redlock"
+import { getLockClient } from "./init"
+import { LockOptions, LockType } from "@budibase/types"
+import * as tenancy from "../tenancy"
-export const getRedlock = (redisClient: any, opts = { retryCount: 10 }) => {
- return new Redlock([redisClient], {
+let noRetryRedlock: Redlock | undefined
+
+const getClient = async (type: LockType): Promise => {
+ switch (type) {
+ case LockType.TRY_ONCE: {
+ if (!noRetryRedlock) {
+ noRetryRedlock = await newRedlock(OPTIONS.TRY_ONCE)
+ }
+ return noRetryRedlock
+ }
+ default: {
+ throw new Error(`Could not get redlock client: ${type}`)
+ }
+ }
+}
+
+export const OPTIONS = {
+ TRY_ONCE: {
+ // immediately throws an error if the lock is already held
+ retryCount: 0,
+ },
+ DEFAULT: {
// the expected clock drift; for more details
// see http://redis.io/topics/distlock
driftFactor: 0.01, // multiplied by lock ttl to determine drift time
// the max number of times Redlock will attempt
// to lock a resource before erroring
- retryCount: opts.retryCount,
+ retryCount: 10,
// the time in ms between attempts
retryDelay: 200, // time in ms
@@ -16,6 +39,45 @@ export const getRedlock = (redisClient: any, opts = { retryCount: 10 }) => {
// the max time in ms randomly added to retries
// to improve performance under high contention
// see https://www.awsarchitectureblog.com/2015/03/backoff.html
- retryJitter: 200, // time in ms
- })
+ retryJitter: 100, // time in ms
+ },
+}
+
+export const newRedlock = async (opts: Options = {}) => {
+ let options = { ...OPTIONS.DEFAULT, ...opts }
+ const redisWrapper = await getLockClient()
+ const client = redisWrapper.getClient()
+ return new Redlock([client], options)
+}
+
+export const doWithLock = async (opts: LockOptions, task: any) => {
+ const redlock = await getClient(opts.type)
+ let lock
+ try {
+ // aquire lock
+ let name: string = `${tenancy.getTenantId()}_${opts.name}`
+ if (opts.nameSuffix) {
+ name = name + `_${opts.nameSuffix}`
+ }
+ lock = await redlock.lock(name, opts.ttl)
+ // perform locked task
+ return task()
+ } catch (e: any) {
+ // lock limit exceeded
+ if (e.name === "LockError") {
+ if (opts.type === LockType.TRY_ONCE) {
+ // don't throw for try-once locks, they will always error
+ // due to retry count (0) exceeded
+ return
+ } else {
+ throw e
+ }
+ } else {
+ throw e
+ }
+ } finally {
+ if (lock) {
+ await lock.unlock()
+ }
+ }
}
diff --git a/packages/backend-core/src/redis/utils.js b/packages/backend-core/src/redis/utils.js
index 90b3561f31..af719197b5 100644
--- a/packages/backend-core/src/redis/utils.js
+++ b/packages/backend-core/src/redis/utils.js
@@ -28,6 +28,7 @@ exports.Databases = {
LICENSES: "license",
GENERIC_CACHE: "data_cache",
WRITE_THROUGH: "writeThrough",
+ LOCKS: "locks",
}
/**
diff --git a/packages/backend-core/src/security/roles.js b/packages/backend-core/src/security/roles.js
index 983aebf676..33c9123b63 100644
--- a/packages/backend-core/src/security/roles.js
+++ b/packages/backend-core/src/security/roles.js
@@ -78,7 +78,7 @@ function isBuiltin(role) {
*/
exports.builtinRoleToNumber = id => {
const builtins = exports.getBuiltinRoles()
- const MAX = Object.values(BUILTIN_IDS).length + 1
+ const MAX = Object.values(builtins).length + 1
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
return MAX
}
@@ -94,6 +94,22 @@ exports.builtinRoleToNumber = id => {
return count
}
+/**
+ * Converts any role to a number, but has to be async to get the roles from db.
+ */
+exports.roleToNumber = async id => {
+ if (exports.isBuiltin(id)) {
+ return exports.builtinRoleToNumber(id)
+ }
+ const hierarchy = await exports.getUserRoleHierarchy(id)
+ for (let role of hierarchy) {
+ if (isBuiltin(role.inherits)) {
+ return exports.builtinRoleToNumber(role.inherits) + 1
+ }
+ }
+ return 0
+}
+
/**
* Returns whichever builtin roleID is lower.
*/
@@ -172,7 +188,7 @@ async function getAllUserRoles(userRoleId) {
* to determine if a user can access something that requires a specific role.
* @param {string} userRoleId The user's role ID, this can be found in their access token.
* @param {object} opts Various options, such as whether to only retrieve the IDs (default true).
- * @returns {Promise} returns an ordered array of the roles, with the first being their
+ * @returns {Promise} returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level.
*/
exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => {
diff --git a/packages/backend-core/src/security/sessions.ts b/packages/backend-core/src/security/sessions.ts
index f621b99dc2..33230afc60 100644
--- a/packages/backend-core/src/security/sessions.ts
+++ b/packages/backend-core/src/security/sessions.ts
@@ -2,28 +2,12 @@ const redis = require("../redis/init")
const { v4: uuidv4 } = require("uuid")
const { logWarn } = require("../logging")
const env = require("../environment")
-
-interface CreateSession {
- sessionId: string
- tenantId: string
- csrfToken?: string
-}
-
-interface Session extends CreateSession {
- userId: string
- lastAccessedAt: string
- createdAt: string
- // make optional attributes required
- csrfToken: string
-}
-
-interface SessionKey {
- key: string
-}
-
-interface ScannedSession {
- value: Session
-}
+import {
+ Session,
+ ScannedSession,
+ SessionKey,
+ CreateSession,
+} from "@budibase/types"
// a week in seconds
const EXPIRY_SECONDS = 86400 * 7
diff --git a/packages/backend-core/src/tenancy/tenancy.ts b/packages/backend-core/src/tenancy/tenancy.ts
index 041f694d34..ad5c6b5287 100644
--- a/packages/backend-core/src/tenancy/tenancy.ts
+++ b/packages/backend-core/src/tenancy/tenancy.ts
@@ -1,6 +1,7 @@
import { doWithDB } from "../db"
-import { StaticDatabases } from "../db/constants"
-import { baseGlobalDBName } from "./utils"
+import { queryPlatformView } from "../db/views"
+import { StaticDatabases, ViewName } from "../db/constants"
+import { getGlobalDBName } from "../db/tenancy"
import {
getTenantId,
DEFAULT_TENANT_ID,
@@ -8,6 +9,7 @@ import {
getTenantIDFromAppID,
} from "../context"
import env from "../environment"
+import { PlatformUser } from "@budibase/types"
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
@@ -87,15 +89,6 @@ export const tryAddTenant = async (
})
}
-export const getGlobalDBName = (tenantId?: string) => {
- // tenant ID can be set externally, for example user API where
- // new tenants are being created, this may be the case
- if (!tenantId) {
- tenantId = getTenantId()
- }
- return baseGlobalDBName(tenantId)
-}
-
export const doWithGlobalDB = (tenantId: string, cb: any) => {
return doWithDB(getGlobalDBName(tenantId), cb)
}
@@ -116,17 +109,19 @@ export const lookupTenantId = async (userId: string) => {
}
// lookup, could be email or userId, either will return a doc
-export const getTenantUser = async (identifier: string) => {
- return doWithDB(PLATFORM_INFO_DB, async (db: any) => {
- try {
- return await db.get(identifier)
- } catch (err) {
- return null
- }
- })
+export const getTenantUser = async (
+ identifier: string
+): Promise => {
+ // use the view here and allow to find anyone regardless of casing
+ // Use lowercase to ensure email login is case insensitive
+ const response = queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, {
+ keys: [identifier.toLowerCase()],
+ include_docs: true,
+ }) as Promise
+ return response
}
-export const isUserInAppTenant = (appId: string, user: any) => {
+export const isUserInAppTenant = (appId: string, user?: any) => {
let userTenantId
if (user) {
userTenantId = user.tenantId || DEFAULT_TENANT_ID
diff --git a/packages/backend-core/src/tenancy/utils.js b/packages/backend-core/src/tenancy/utils.js
deleted file mode 100644
index 70a965ddb7..0000000000
--- a/packages/backend-core/src/tenancy/utils.js
+++ /dev/null
@@ -1,12 +0,0 @@
-const { DEFAULT_TENANT_ID } = require("../constants")
-const { StaticDatabases, SEPARATOR } = require("../db/constants")
-
-exports.baseGlobalDBName = tenantId => {
- let dbName
- if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
- dbName = StaticDatabases.GLOBAL.name
- } else {
- dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
- }
- return dbName
-}
diff --git a/packages/backend-core/src/users.js b/packages/backend-core/src/users.js
deleted file mode 100644
index 81bf28bb46..0000000000
--- a/packages/backend-core/src/users.js
+++ /dev/null
@@ -1,67 +0,0 @@
-const {
- ViewName,
- getUsersByAppParams,
- getProdAppID,
- generateAppUserID,
-} = require("./db/utils")
-const { queryGlobalView } = require("./db/views")
-const { UNICODE_MAX } = require("./db/constants")
-
-/**
- * Given an email address this will use a view to search through
- * all the users to find one with this email address.
- * @param {string} email the email to lookup the user by.
- */
-exports.getGlobalUserByEmail = async email => {
- if (email == null) {
- throw "Must supply an email address to view"
- }
-
- return await queryGlobalView(ViewName.USER_BY_EMAIL, {
- key: email.toLowerCase(),
- include_docs: true,
- })
-}
-
-exports.searchGlobalUsersByApp = async (appId, opts) => {
- if (typeof appId !== "string") {
- throw new Error("Must provide a string based app ID")
- }
- const params = getUsersByAppParams(appId, {
- include_docs: true,
- })
- params.startkey = opts && opts.startkey ? opts.startkey : params.startkey
- let response = await queryGlobalView(ViewName.USER_BY_APP, params)
- if (!response) {
- response = []
- }
- return Array.isArray(response) ? response : [response]
-}
-
-exports.getGlobalUserByAppPage = (appId, user) => {
- if (!user) {
- return
- }
- return generateAppUserID(getProdAppID(appId), user._id)
-}
-
-/**
- * Performs a starts with search on the global email view.
- */
-exports.searchGlobalUsersByEmail = async (email, opts) => {
- if (typeof email !== "string") {
- throw new Error("Must provide a string to search by")
- }
- const lcEmail = email.toLowerCase()
- // handle if passing up startkey for pagination
- const startkey = opts && opts.startkey ? opts.startkey : lcEmail
- let response = await queryGlobalView(ViewName.USER_BY_EMAIL, {
- ...opts,
- startkey,
- endkey: `${lcEmail}${UNICODE_MAX}`,
- })
- if (!response) {
- response = []
- }
- return Array.isArray(response) ? response : [response]
-}
diff --git a/packages/backend-core/src/users.ts b/packages/backend-core/src/users.ts
new file mode 100644
index 0000000000..44f04749c9
--- /dev/null
+++ b/packages/backend-core/src/users.ts
@@ -0,0 +1,94 @@
+import {
+ ViewName,
+ getUsersByAppParams,
+ getProdAppID,
+ generateAppUserID,
+} from "./db/utils"
+import { queryGlobalView } from "./db/views"
+import { UNICODE_MAX } from "./db/constants"
+import { BulkDocsResponse, User } from "@budibase/types"
+import { getGlobalDB } from "./context"
+import PouchDB from "pouchdb"
+
+export const bulkGetGlobalUsersById = async (userIds: string[]) => {
+ const db = getGlobalDB() as PouchDB.Database
+ return (
+ await db.allDocs({
+ keys: userIds,
+ include_docs: true,
+ })
+ ).rows.map(row => row.doc) as User[]
+}
+
+export const bulkUpdateGlobalUsers = async (users: User[]) => {
+ const db = getGlobalDB() as PouchDB.Database
+ return (await db.bulkDocs(users)) as BulkDocsResponse
+}
+
+/**
+ * Given an email address this will use a view to search through
+ * all the users to find one with this email address.
+ * @param {string} email the email to lookup the user by.
+ */
+export const getGlobalUserByEmail = async (
+ email: String
+): Promise => {
+ if (email == null) {
+ throw "Must supply an email address to view"
+ }
+
+ const response = await queryGlobalView(ViewName.USER_BY_EMAIL, {
+ key: email.toLowerCase(),
+ include_docs: true,
+ })
+
+ if (Array.isArray(response)) {
+ // shouldn't be able to happen, but need to handle just in case
+ throw new Error(`Multiple users found with email address: ${email}`)
+ }
+
+ return response
+}
+
+export const searchGlobalUsersByApp = async (appId: any, opts: any) => {
+ if (typeof appId !== "string") {
+ throw new Error("Must provide a string based app ID")
+ }
+ const params = getUsersByAppParams(appId, {
+ include_docs: true,
+ })
+ params.startkey = opts && opts.startkey ? opts.startkey : params.startkey
+ let response = await queryGlobalView(ViewName.USER_BY_APP, params)
+ if (!response) {
+ response = []
+ }
+ return Array.isArray(response) ? response : [response]
+}
+
+export const getGlobalUserByAppPage = (appId: string, user: User) => {
+ if (!user) {
+ return
+ }
+ return generateAppUserID(getProdAppID(appId), user._id!)
+}
+
+/**
+ * Performs a starts with search on the global email view.
+ */
+export const searchGlobalUsersByEmail = async (email: string, opts: any) => {
+ if (typeof email !== "string") {
+ throw new Error("Must provide a string to search by")
+ }
+ const lcEmail = email.toLowerCase()
+ // handle if passing up startkey for pagination
+ const startkey = opts && opts.startkey ? opts.startkey : lcEmail
+ let response = await queryGlobalView(ViewName.USER_BY_EMAIL, {
+ ...opts,
+ startkey,
+ endkey: `${lcEmail}${UNICODE_MAX}`,
+ })
+ if (!response) {
+ response = []
+ }
+ return Array.isArray(response) ? response : [response]
+}
diff --git a/packages/backend-core/src/utils.js b/packages/backend-core/src/utils.js
index 0587267e9a..6b59c7cb72 100644
--- a/packages/backend-core/src/utils.js
+++ b/packages/backend-core/src/utils.js
@@ -42,6 +42,18 @@ async function resolveAppUrl(ctx) {
return app && app.appId ? app.appId : undefined
}
+exports.isServingApp = ctx => {
+ // dev app
+ if (ctx.path.startsWith(`/${APP_PREFIX}`)) {
+ return true
+ }
+ // prod app
+ if (ctx.path.startsWith(PROD_APP_PREFIX)) {
+ return true
+ }
+ return false
+}
+
/**
* Given a request tries to find the appId, which can be located in various places
* @param {object} ctx The main request body to look through.
diff --git a/packages/backend-core/tests/utilities/structures/accounts.ts b/packages/backend-core/tests/utilities/structures/accounts.ts
new file mode 100644
index 0000000000..5d23962575
--- /dev/null
+++ b/packages/backend-core/tests/utilities/structures/accounts.ts
@@ -0,0 +1,23 @@
+import { generator, uuid } from "."
+import { AuthType, CloudAccount, Hosting } from "@budibase/types"
+import * as db from "../../../src/db/utils"
+
+export const cloudAccount = (): CloudAccount => {
+ return {
+ accountId: uuid(),
+ createdAt: Date.now(),
+ verified: true,
+ verificationSent: true,
+ tier: "",
+ email: generator.email(),
+ tenantId: generator.word(),
+ hosting: Hosting.CLOUD,
+ authType: AuthType.PASSWORD,
+ password: generator.word(),
+ tenantName: generator.word(),
+ name: generator.name(),
+ size: "10+",
+ profession: "Software Engineer",
+ budibaseUserId: db.generateGlobalUserID(),
+ }
+}
diff --git a/packages/backend-core/tests/utilities/structures/common.ts b/packages/backend-core/tests/utilities/structures/common.ts
new file mode 100644
index 0000000000..51ae220254
--- /dev/null
+++ b/packages/backend-core/tests/utilities/structures/common.ts
@@ -0,0 +1 @@
+export { v4 as uuid } from "uuid"
diff --git a/packages/backend-core/tests/utilities/structures/index.ts b/packages/backend-core/tests/utilities/structures/index.ts
index 12b6ab7ad6..68064b9715 100644
--- a/packages/backend-core/tests/utilities/structures/index.ts
+++ b/packages/backend-core/tests/utilities/structures/index.ts
@@ -1 +1,8 @@
+export * from "./common"
+
+import Chance from "chance"
+export const generator = new Chance()
+
export * as koa from "./koa"
+export * as accounts from "./accounts"
+export * as licenses from "./licenses"
diff --git a/packages/backend-core/tests/utilities/structures/licenses.ts b/packages/backend-core/tests/utilities/structures/licenses.ts
new file mode 100644
index 0000000000..a541e91860
--- /dev/null
+++ b/packages/backend-core/tests/utilities/structures/licenses.ts
@@ -0,0 +1,18 @@
+import { AccountPlan, License, PlanType, Quotas } from "@budibase/types"
+
+const newPlan = (type: PlanType = PlanType.FREE): AccountPlan => {
+ return {
+ type,
+ }
+}
+
+export const newLicense = (opts: {
+ quotas: Quotas
+ planType?: PlanType
+}): License => {
+ return {
+ features: [],
+ quotas: opts.quotas,
+ plan: newPlan(opts.planType),
+ }
+}
diff --git a/packages/backend-core/yarn.lock b/packages/backend-core/yarn.lock
index 22c17a9444..6bc9b63728 100644
--- a/packages/backend-core/yarn.lock
+++ b/packages/backend-core/yarn.lock
@@ -663,6 +663,11 @@
"@types/connect" "*"
"@types/node" "*"
+"@types/chance@1.1.3":
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/@types/chance/-/chance-1.1.3.tgz#d19fe9391288d60fdccd87632bfc9ab2b4523fea"
+ integrity sha512-X6c6ghhe4/sQh4XzcZWSFaTAUOda38GQHmq9BUanYkOE/EO7ZrkazwKmtsj3xzTjkLWmwULE++23g3d3CCWaWw==
+
"@types/connect@*":
version "3.4.35"
resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1"
@@ -1377,6 +1382,11 @@ bcrypt@5.0.1:
"@mapbox/node-pre-gyp" "^1.0.0"
node-addon-api "^3.1.0"
+bcryptjs@2.4.3:
+ version "2.4.3"
+ resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb"
+ integrity sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==
+
binary-extensions@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
@@ -1550,6 +1560,11 @@ chalk@^4.0.0, chalk@^4.1.0:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
+chance@1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/chance/-/chance-1.1.3.tgz#414f08634ee479c7a316b569050ea20751b82dd3"
+ integrity sha512-XeJsdoVAzDb1WRPRuMBesRSiWpW1uNTo5Fd7mYxPJsAfgX71+jfuCOHOdbyBz2uAUZ8TwKcXgWk3DMedFfJkbg==
+
char-regex@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf"
diff --git a/packages/bbui/package.json b/packages/bbui/package.json
index c9e35848f3..65da2c2cea 100644
--- a/packages/bbui/package.json
+++ b/packages/bbui/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
- "version": "1.3.12-alpha.3",
+ "version": "2.0.30-alpha.12",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
- "@budibase/string-templates": "1.3.12-alpha.3",
+ "@budibase/string-templates": "2.0.30-alpha.12",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",
diff --git a/packages/bbui/src/Actions/click_outside.js b/packages/bbui/src/Actions/click_outside.js
index 49a15d36a3..7fd2879071 100644
--- a/packages/bbui/src/Actions/click_outside.js
+++ b/packages/bbui/src/Actions/click_outside.js
@@ -1,18 +1,18 @@
export default function clickOutside(element, callbackFunction) {
function onClick(event) {
if (!element.contains(event.target)) {
- callbackFunction()
+ callbackFunction(event)
}
}
- document.body.addEventListener("mousedown", onClick, true)
+ document.body.addEventListener("click", onClick, true)
return {
update(newCallbackFunction) {
callbackFunction = newCallbackFunction
},
destroy() {
- document.body.removeEventListener("mousedown", onClick, true)
+ document.body.removeEventListener("click", onClick, true)
},
}
}
diff --git a/packages/bbui/src/Banner/BannerDisplay.svelte b/packages/bbui/src/Banner/BannerDisplay.svelte
index aad742b1bd..9ea2eaf2ec 100644
--- a/packages/bbui/src/Banner/BannerDisplay.svelte
+++ b/packages/bbui/src/Banner/BannerDisplay.svelte
@@ -4,22 +4,32 @@
import { banner } from "../Stores/banner"
import Banner from "./Banner.svelte"
import { fly } from "svelte/transition"
+ import TooltipWrapper from "../Tooltip/TooltipWrapper.svelte"
- {#if $banner.message}
+ {#each $banner.messages as message}
{
+ if (message.onChange) {
+ message.onChange()
+ }
+ }}
+ showCloseButton={typeof message.showCloseButton === "boolean"
+ ? message.showCloseButton
+ : true}
>
- {$banner.message}
+
+ {message.message}
+
- {/if}
+ {/each}
diff --git a/packages/bbui/src/ColorPicker/ColorPicker.svelte b/packages/bbui/src/ColorPicker/ColorPicker.svelte
index 52a877415b..331de38371 100644
--- a/packages/bbui/src/ColorPicker/ColorPicker.svelte
+++ b/packages/bbui/src/ColorPicker/ColorPicker.svelte
@@ -119,6 +119,13 @@
return "var(--spectrum-global-color-static-gray-900)"
}
+
+ const handleOutsideClick = event => {
+ if (open) {
+ event.stopPropagation()
+ open = false
+ }
+ }
@@ -131,7 +138,7 @@
{#if open}
(open = false)}
+ use:clickOutside={handleOutsideClick}
transition:fly={{ y: -20, duration: 200 }}
class="spectrum-Popover spectrum-Popover--bottom spectrum-Picker-popover is-open"
class:spectrum-Popover--align-right={alignRight}
diff --git a/packages/bbui/src/Drawer/Drawer.svelte b/packages/bbui/src/Drawer/Drawer.svelte
index e1880d0ed4..43729cd794 100644
--- a/packages/bbui/src/Drawer/Drawer.svelte
+++ b/packages/bbui/src/Drawer/Drawer.svelte
@@ -78,7 +78,7 @@
bottom: 0;
background: var(--background);
border-top: var(--border-light);
- z-index: 2;
+ z-index: 3;
}
.fillWidth {
diff --git a/packages/bbui/src/Form/Core/Dropzone.svelte b/packages/bbui/src/Form/Core/Dropzone.svelte
index 3102972d1e..51f6eef6f9 100644
--- a/packages/bbui/src/Form/Core/Dropzone.svelte
+++ b/packages/bbui/src/Form/Core/Dropzone.svelte
@@ -65,6 +65,9 @@
}
}
+ $: showDropzone =
+ (!maximum || (maximum && value?.length < maximum)) && !disabled
+
async function processFileList(fileList) {
if (
handleFileTooLarge &&
@@ -211,7 +214,7 @@
{/each}
{/if}
{/if}
- {#if !maximum || (maximum && value?.length < maximum)}
+ {#if showDropzone}
false
+ export let isOptionEnabled = () => true
export let onSelectOption = () => {}
export let getOptionLabel = option => option
export let getOptionValue = option => option
@@ -84,7 +85,7 @@
class:is-invalid={!!error}
class:is-open={open}
aria-haspopup="listbox"
- on:mousedown={onClick}
+ on:click={onClick}
>
{#if fieldIcon}