diff --git a/.eslintignore b/.eslintignore
index 54824be5c7..579bd55947 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -7,4 +7,5 @@ packages/server/client
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
-packages/builder/cypress/reports
\ No newline at end of file
+packages/builder/cypress/reports
+packages/sdk/sdk
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/epic.md b/.github/ISSUE_TEMPLATE/epic.md
new file mode 100644
index 0000000000..b8cf652125
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/epic.md
@@ -0,0 +1,24 @@
+---
+name: Epic
+about: Plan a new project
+title: ''
+labels: epic
+assignees: ''
+
+---
+
+## Description
+Brief summary of what this Epic is, whether it's a larger project, goal, or user story. Describe the job to be done, which persona this Epic is mainly for, or if more multiple, break it down by user and job story.
+
+## Spec
+Link to confluence spec
+
+## Teams and Stakeholders
+Describe who needs to be kept up-to-date about this Epic, included in discussions, or updated along the way. Stakeholders can be both in Product/Engineering, as well as other teams like Customer Success who might want to keep customers updated on the Epic project.
+
+
+## Workflow
+- [ ] Spec Created and pasted above
+- [ ] Product Review
+- [ ] Designs created
+- [ ] Individual Tasks created and assigned to Epic
diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml
index e940e6fa10..475bd4f66a 100644
--- a/.github/workflows/budibase_ci.yml
+++ b/.github/workflows/budibase_ci.yml
@@ -23,6 +23,15 @@ jobs:
build:
runs-on: ubuntu-latest
+ services:
+ couchdb:
+ image: ibmcom/couchdb3
+ env:
+ COUCHDB_PASSWORD: budibase
+ COUCHDB_USER: budibase
+ ports:
+ - 4567:5984
+
strategy:
matrix:
node-version: [14.x]
@@ -53,9 +62,8 @@ jobs:
name: codecov-umbrella
verbose: true
- # TODO: parallelise this
- - name: Cypress run
- uses: cypress-io/github-action@v2
- with:
- install: false
- command: yarn test:e2e:ci
+ - name: QA Core Integration Tests
+ run: |
+ cd qa-core
+ yarn
+ yarn api:test:ci
\ No newline at end of file
diff --git a/.github/workflows/deploy-single-image.yml b/.github/workflows/deploy-single-image.yml
index 8bf8f232c5..cd16574eea 100644
--- a/.github/workflows/deploy-single-image.yml
+++ b/.github/workflows/deploy-single-image.yml
@@ -4,8 +4,6 @@ on:
workflow_dispatch:
env:
- BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
- BRANCH: ${{ github.event.pull_request.head.ref }}
CI: true
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
@@ -17,6 +15,11 @@ jobs:
matrix:
node-version: [14.x]
steps:
+ - name: Fail if branch is not master
+ if: github.ref != 'refs/heads/master'
+ run: |
+ echo "Ref is not master, you must run this job from master."
+ exit 1
- name: "Checkout"
uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
@@ -28,8 +31,6 @@ jobs:
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- - name: Install Pro
- run: yarn install:pro $BRANCH $BASE_BRANCH
- name: Run Yarn
run: yarn
- name: Run Yarn Bootstrap
diff --git a/.github/workflows/release-develop.yml b/.github/workflows/release-develop.yml
index 57e65c734e..21c74851e1 100644
--- a/.github/workflows/release-develop.yml
+++ b/.github/workflows/release-develop.yml
@@ -46,7 +46,8 @@ jobs:
- run: yarn
- run: yarn bootstrap
- run: yarn lint
- - run: yarn build
+ - run: yarn build
+ - run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials
diff --git a/.github/workflows/release-selfhost.yml b/.github/workflows/release-selfhost.yml
index da064f3e32..d78180fdc7 100644
--- a/.github/workflows/release-selfhost.yml
+++ b/.github/workflows/release-selfhost.yml
@@ -3,10 +3,6 @@ name: Budibase Release Selfhost
on:
workflow_dispatch:
-env:
- BRANCH: ${{ github.event.pull_request.head.ref }}
- BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
-
jobs:
release:
runs-on: ubuntu-latest
@@ -54,9 +50,6 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
SELFHOST_TAG: latest
- - name: Install Pro
- run: yarn install:pro $BRANCH $BASE_BRANCH
-
- name: Bootstrap and build (CLI)
run: |
yarn
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 961082e1ef..de288dd7db 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -56,6 +56,7 @@ jobs:
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
+ - run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials
diff --git a/.gitignore b/.gitignore
index f063e2224f..e1d3e6db0e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -63,6 +63,7 @@ typings/
# dotenv environment variables file
.env
+!qa-core/.env
!hosting/.env
hosting/.generated-nginx.dev.conf
hosting/proxy/.generated-nginx.prod.conf
@@ -102,4 +103,6 @@ packages/builder/cypress/reports
stats.html
# TypeScript cache
-*.tsbuildinfo
\ No newline at end of file
+*.tsbuildinfo
+budibase-component
+budibase-datasource
diff --git a/.prettierignore b/.prettierignore
index bbeff65da7..3a381d255e 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -9,3 +9,4 @@ packages/server/src/definitions/openapi.ts
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
+packages/sdk/sdk
\ No newline at end of file
diff --git a/.prettierrc.json b/.prettierrc.json
index 39654fd9f9..dae5906124 100644
--- a/.prettierrc.json
+++ b/.prettierrc.json
@@ -4,7 +4,7 @@
"singleQuote": false,
"trailingComma": "es5",
"arrowParens": "avoid",
- "jsxBracketSameLine": false,
+ "bracketSameLine": false,
"plugins": ["prettier-plugin-svelte"],
"svelteSortOrder": "options-scripts-markup-styles"
}
diff --git a/README.md b/README.md
index 1dec1737da..bd38610566 100644
--- a/README.md
+++ b/README.md
@@ -65,7 +65,7 @@ Budibase is open-source - licensed as GPL v3. This should fill you with confiden
### Load data or start from scratch
-Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
+Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no datasources. [Request new datasources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
diff --git a/charts/budibase/templates/app-service-deployment.yaml b/charts/budibase/templates/app-service-deployment.yaml
index fd46e77647..f72d1aef03 100644
--- a/charts/budibase/templates/app-service-deployment.yaml
+++ b/charts/budibase/templates/app-service-deployment.yaml
@@ -78,6 +78,8 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
+ - name: PLUGIN_BUCKET_NAME
+ value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }}
- name: PORT
value: {{ .Values.services.apps.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
@@ -124,11 +126,31 @@ spec:
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.bbAdminUserEmail }}
- name: BB_ADMIN_USER_EMAIL
- value: { { .Values.globals.bbAdminUserEmail | quote } }
+ value: {{ .Values.globals.bbAdminUserEmail | quote }}
{{ end }}
{{ if .Values.globals.bbAdminUserPassword }}
- name: BB_ADMIN_USER_PASSWORD
- value: { { .Values.globals.bbAdminUserPassword | quote } }
+ value: {{ .Values.globals.bbAdminUserPassword | quote }}
+ {{ end }}
+ {{ if .Values.globals.pluginsDir }}
+ - name: PLUGINS_DIR
+ value: {{ .Values.globals.pluginsDir | quote }}
+ {{ end }}
+ {{ if .Values.services.apps.nodeDebug }}
+ - name: NODE_DEBUG
+ value: {{ .Values.services.apps.nodeDebug | quote }}
+ {{ end }}
+ {{ if .Values.globals.elasticApmEnabled }}
+ - name: ELASTIC_APM_ENABLED
+ value: {{ .Values.globals.elasticApmEnabled | quote }}
+ {{ end }}
+ {{ if .Values.globals.elasticApmSecretToken }}
+ - name: ELASTIC_APM_SECRET_TOKEN
+ value: {{ .Values.globals.elasticApmSecretToken | quote }}
+ {{ end }}
+ {{ if .Values.globals.elasticApmServerUrl }}
+ - name: ELASTIC_APM_SERVER_URL
+ value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }}
@@ -142,7 +164,10 @@ spec:
name: bbapps
ports:
- containerPort: {{ .Values.services.apps.port }}
- resources: {}
+ {{ with .Values.services.apps.resources }}
+ resources:
+ {{- toYaml . | nindent 10 }}
+ {{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
diff --git a/charts/budibase/templates/couchdb-backup.yaml b/charts/budibase/templates/couchdb-backup.yaml
index ae062475ce..68e5eab617 100644
--- a/charts/budibase/templates/couchdb-backup.yaml
+++ b/charts/budibase/templates/couchdb-backup.yaml
@@ -38,7 +38,10 @@ spec:
image: redgeoff/replicate-couchdb-cluster
imagePullPolicy: Always
name: couchdb-backup
- resources: {}
+ {{ with .Values.services.couchdb.backup.resources }}
+ resources:
+ {{- toYaml . | nindent 10 }}
+ {{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
diff --git a/charts/budibase/templates/minio-service-deployment.yaml b/charts/budibase/templates/minio-service-deployment.yaml
index 103f9e3ed2..144dbe539a 100644
--- a/charts/budibase/templates/minio-service-deployment.yaml
+++ b/charts/budibase/templates/minio-service-deployment.yaml
@@ -56,7 +56,10 @@ spec:
name: minio-service
ports:
- containerPort: {{ .Values.services.objectStore.port }}
- resources: {}
+ {{ with .Values.services.objectStore.resources }}
+ resources:
+ {{- toYaml . | nindent 10 }}
+ {{ end }}
volumeMounts:
- mountPath: /data
name: minio-data
diff --git a/charts/budibase/templates/proxy-service-deployment.yaml b/charts/budibase/templates/proxy-service-deployment.yaml
index 505a46f1e8..5588022032 100644
--- a/charts/budibase/templates/proxy-service-deployment.yaml
+++ b/charts/budibase/templates/proxy-service-deployment.yaml
@@ -30,7 +30,10 @@ spec:
name: proxy-service
ports:
- containerPort: {{ .Values.services.proxy.port }}
- resources: {}
+ {{ with .Values.services.proxy.resources }}
+ resources:
+ {{- toYaml . | nindent 10 }}
+ {{ end }}
volumeMounts:
{{- with .Values.affinity }}
affinity:
diff --git a/charts/budibase/templates/redis-service-deployment.yaml b/charts/budibase/templates/redis-service-deployment.yaml
index 6e09346cad..d94e4d70f8 100644
--- a/charts/budibase/templates/redis-service-deployment.yaml
+++ b/charts/budibase/templates/redis-service-deployment.yaml
@@ -35,7 +35,10 @@ spec:
name: redis-service
ports:
- containerPort: {{ .Values.services.redis.port }}
- resources: {}
+ {{ with .Values.services.redis.resources }}
+ resources:
+ {{- toYaml . | nindent 10 }}
+ {{ end }}
volumeMounts:
- mountPath: /data
name: redis-data
diff --git a/charts/budibase/templates/worker-service-deployment.yaml b/charts/budibase/templates/worker-service-deployment.yaml
index 918dab427b..b1c6110d95 100644
--- a/charts/budibase/templates/worker-service-deployment.yaml
+++ b/charts/budibase/templates/worker-service-deployment.yaml
@@ -27,6 +27,8 @@ spec:
spec:
containers:
- env:
+ - name: BUDIBASE_ENVIRONMENT
+ value: {{ .Values.globals.budibaseEnv }}
- name: DEPLOYMENT_ENVIRONMENT
value: "kubernetes"
- name: CLUSTER_PORT
@@ -75,6 +77,8 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
+ - name: PLUGIN_BUCKET_NAME
+ value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }}
- name: PORT
value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY
@@ -125,6 +129,19 @@ spec:
value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
+ {{ if .Values.globals.elasticApmEnabled }}
+ - name: ELASTIC_APM_ENABLED
+ value: {{ .Values.globals.elasticApmEnabled | quote }}
+ {{ end }}
+ {{ if .Values.globals.elasticApmSecretToken }}
+ - name: ELASTIC_APM_SECRET_TOKEN
+ value: {{ .Values.globals.elasticApmSecretToken | quote }}
+ {{ end }}
+ {{ if .Values.globals.elasticApmServerUrl }}
+ - name: ELASTIC_APM_SERVER_URL
+ value: {{ .Values.globals.elasticApmServerUrl | quote }}
+ {{ end }}
+
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
livenessProbe:
@@ -136,7 +153,10 @@ spec:
name: bbworker
ports:
- containerPort: {{ .Values.services.worker.port }}
- resources: {}
+ {{ with .Values.services.worker.resources }}
+ resources:
+ {{- toYaml . | nindent 10 }}
+ {{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml
index 404e92c70f..5c4004cb57 100644
--- a/charts/budibase/values.yaml
+++ b/charts/budibase/values.yaml
@@ -60,19 +60,6 @@ ingress:
port:
number: 10000
-resources:
- {}
- # We usually recommend not to specify default resources and to leave this as a conscious
- # choice for the user. This also increases chances charts run on environments with little
- # resources, such as Minikube. If you do want to specify resources, uncomment the following
- # lines, adjust them as necessary, and remove the curly braces after 'resources:'.
- # limits:
- # cpu: 100m
- # memory: 128Mi
- # requests:
- # cpu: 100m
- # memory: 128Mi
-
autoscaling:
enabled: false
minReplicas: 1
@@ -89,6 +76,7 @@ affinity: {}
globals:
appVersion: "latest"
budibaseEnv: PRODUCTION
+ tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS"
enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
@@ -114,6 +102,10 @@ globals:
smtp:
enabled: false
+# elasticApmEnabled:
+# elasticApmSecretToken:
+# elasticApmServerUrl:
+
services:
budibaseVersion: latest
dns: cluster.local
@@ -121,15 +113,19 @@ services:
proxy:
port: 10000
replicaCount: 1
+ resources: {}
apps:
port: 4002
replicaCount: 1
logLevel: info
+ resources: {}
+# nodeDebug: "" # set the value of NODE_DEBUG
worker:
port: 4003
replicaCount: 1
+ resources: {}
couchdb:
enabled: true
@@ -143,6 +139,7 @@ services:
target: ""
# backup interval in seconds
interval: ""
+ resources: {}
redis:
enabled: true # disable if using external redis
@@ -156,6 +153,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
+ resources: {}
objectStore:
minio: true
@@ -172,6 +170,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
+ resources: {}
# Override values in couchDB subchart
couchdb:
diff --git a/examples/nextjs-api-sales/definitions/openapi.ts b/examples/nextjs-api-sales/definitions/openapi.ts
index 4f4ad45fc6..7f7f6befec 100644
--- a/examples/nextjs-api-sales/definitions/openapi.ts
+++ b/examples/nextjs-api-sales/definitions/openapi.ts
@@ -348,7 +348,7 @@ export interface paths {
}
}
responses: {
- /** Returns the created table, including the ID which has been generated for it. This can be internal or external data sources. */
+ /** Returns the created table, including the ID which has been generated for it. This can be internal or external datasources. */
200: {
content: {
"application/json": components["schemas"]["tableOutput"]
@@ -959,7 +959,7 @@ export interface components {
query: {
/** @description The ID of the query. */
_id: string
- /** @description The ID of the data source the query belongs to. */
+ /** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
@@ -983,7 +983,7 @@ export interface components {
data: {
/** @description The ID of the query. */
_id: string
- /** @description The ID of the data source the query belongs to. */
+ /** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
diff --git a/examples/nextjs-api-sales/package.json b/examples/nextjs-api-sales/package.json
index 6d75c85f01..41ce52e952 100644
--- a/examples/nextjs-api-sales/package.json
+++ b/examples/nextjs-api-sales/package.json
@@ -11,8 +11,8 @@
"dependencies": {
"bulma": "^0.9.3",
"next": "12.1.0",
- "node-fetch": "^3.2.2",
- "node-sass": "^7.0.1",
+ "node-fetch": "^3.2.10",
+ "sass": "^1.52.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-notifications-component": "^3.4.1"
@@ -24,4 +24,4 @@
"eslint-config-next": "12.1.0",
"typescript": "4.6.2"
}
-}
+}
\ No newline at end of file
diff --git a/examples/nextjs-api-sales/yarn.lock b/examples/nextjs-api-sales/yarn.lock
index 52c89967b2..f47fb84e33 100644
--- a/examples/nextjs-api-sales/yarn.lock
+++ b/examples/nextjs-api-sales/yarn.lock
@@ -2020,10 +2020,10 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
-node-fetch@^3.2.2:
- version "3.2.2"
- resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.2.tgz#16d33fbe32ca7c6ca1ca8ba5dfea1dd885c59f04"
- integrity sha512-Cwhq1JFIoon15wcIkFzubVNFE5GvXGV82pKf4knXXjvGmn7RJKcypeuqcVNZMGDZsAFWyIRya/anwAJr7TWJ7w==
+node-fetch@^3.2.10:
+ version "3.2.10"
+ resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.10.tgz#e8347f94b54ae18b57c9c049ef641cef398a85c8"
+ integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==
dependencies:
data-uri-to-buffer "^4.0.0"
fetch-blob "^3.1.4"
diff --git a/hosting/.env b/hosting/.env
index 11dd661bf1..c5638a266f 100644
--- a/hosting/.env
+++ b/hosting/.env
@@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
-BB_ADMIN_USER_PASSWORD=
\ No newline at end of file
+BB_ADMIN_USER_PASSWORD=
+
+# A path that is watched for plugin bundles. Any bundles found are imported automatically/
+PLUGINS_DIR=
\ No newline at end of file
diff --git a/hosting/docker-compose.yaml b/hosting/docker-compose.yaml
index 7d3e6960dc..5b2adc2665 100644
--- a/hosting/docker-compose.yaml
+++ b/hosting/docker-compose.yaml
@@ -25,9 +25,12 @@ services:
REDIS_PASSWORD: ${REDIS_PASSWORD}
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
+ PLUGINS_DIR: ${PLUGINS_DIR}
depends_on:
- worker-service
- redis-service
+# volumes:
+# - /some/path/to/plugins:/plugins
worker-service:
restart: unless-stopped
@@ -78,6 +81,7 @@ services:
image: budibase/proxy
environment:
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
+ - PROXY_RATE_LIMIT_API_PER_SECOND=20
depends_on:
- minio-service
- worker-service
diff --git a/hosting/hosting.properties b/hosting/hosting.properties
index 11dd661bf1..c5638a266f 100644
--- a/hosting/hosting.properties
+++ b/hosting/hosting.properties
@@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
-BB_ADMIN_USER_PASSWORD=
\ No newline at end of file
+BB_ADMIN_USER_PASSWORD=
+
+# A path that is watched for plugin bundles. Any bundles found are imported automatically/
+PLUGINS_DIR=
\ No newline at end of file
diff --git a/hosting/nginx.dev.conf.hbs b/hosting/nginx.dev.conf.hbs
index e08516c9d3..14c32b1bba 100644
--- a/hosting/nginx.dev.conf.hbs
+++ b/hosting/nginx.dev.conf.hbs
@@ -15,7 +15,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
- '"$http_user_agent" "$http_x_forwarded_for"';
+ '"$http_user_agent" "$http_x_forwarded_for" '
+ 'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
+
+ access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@@ -62,10 +65,6 @@ http {
proxy_pass http://{{ address }}:4001;
}
- location /preview {
- proxy_pass http://{{ address }}:4001;
- }
-
location /builder {
proxy_pass http://{{ address }}:3000;
rewrite ^/builder(.*)$ /builder/$1 break;
@@ -81,6 +80,20 @@ http {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
+ location /vite/ {
+ proxy_pass http://{{ address }}:3000;
+ rewrite ^/vite(.*)$ /$1 break;
+ }
+
+ location /socket/ {
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+ proxy_pass http://{{ address }}:4001;
+ }
+
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
diff --git a/hosting/nginx.prod.conf.hbs b/hosting/nginx.prod.conf.hbs
index eaff214187..f3202ad4a4 100644
--- a/hosting/nginx.prod.conf.hbs
+++ b/hosting/nginx.prod.conf.hbs
@@ -11,7 +11,7 @@ events {
http {
# rate limiting
limit_req_status 429;
- limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
+ limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=${PROXY_RATE_LIMIT_API_PER_SECOND}r/s;
limit_req_zone $binary_remote_addr zone=webhooks:10m rate=${PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND}r/s;
include /etc/nginx/mime.types;
@@ -33,7 +33,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
- '"$http_user_agent" "$http_x_forwarded_for"';
+ '"$http_user_agent" "$http_x_forwarded_for" '
+ 'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
+
+ access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@@ -85,10 +88,6 @@ http {
proxy_pass http://$apps:4002;
}
- location /preview {
- proxy_pass http://$apps:4002;
- }
-
location = / {
proxy_pass http://$apps:4002;
}
@@ -159,6 +158,15 @@ http {
rewrite ^/db/(.*)$ /$1 break;
}
+ location /socket/ {
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+ proxy_pass http://$apps:4002;
+ }
+
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
diff --git a/hosting/proxy/Dockerfile b/hosting/proxy/Dockerfile
index d9b33e3e9a..298762aaf1 100644
--- a/hosting/proxy/Dockerfile
+++ b/hosting/proxy/Dockerfile
@@ -10,4 +10,5 @@ COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
COPY error.html /usr/share/nginx/html/error.html
# Default environment
-ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
\ No newline at end of file
+ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
+ENV PROXY_RATE_LIMIT_API_PER_SECOND=20
\ No newline at end of file
diff --git a/hosting/scripts/build-target-paths.sh b/hosting/scripts/build-target-paths.sh
index ee314c1ce4..67e1765ca8 100644
--- a/hosting/scripts/build-target-paths.sh
+++ b/hosting/scripts/build-target-paths.sh
@@ -4,17 +4,21 @@ echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
- mkdir -p $DATA_DIR/{search,minio,couchdb}
- mkdir -p $DATA_DIR/couchdb/{dbs,views}
- chown -R couchdb:couchdb $DATA_DIR/couchdb/
+ WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
+ mkdir -p $DATA_DIR/{search,minio,couch}
+ mkdir -p $DATA_DIR/couch/{dbs,views}
+ chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
- sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
+ echo "root:Docker!" | chpasswd
+ mkdir -p /tmp
+ chmod +x /tmp/ssh_setup.sh \
+ && (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
+ cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
-
fi
\ No newline at end of file
diff --git a/hosting/single/Dockerfile b/hosting/single/Dockerfile
index 476a6e5e94..f34290f627 100644
--- a/hosting/single/Dockerfile
+++ b/hosting/single/Dockerfile
@@ -29,23 +29,8 @@ ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
-ENV \
- APP_PORT=4001 \
- ARCHITECTURE=amd \
- BUDIBASE_ENVIRONMENT=PRODUCTION \
- CLUSTER_PORT=80 \
- # CUSTOM_DOMAIN=budi001.custom.com \
- DATA_DIR=/data \
- DEPLOYMENT_ENVIRONMENT=docker \
- MINIO_URL=http://localhost:9000 \
- POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU \
- REDIS_URL=localhost:6379 \
- SELF_HOSTED=1 \
- TARGETBUILD=$TARGETBUILD \
- WORKER_PORT=4002 \
- WORKER_URL=http://localhost:4002 \
- APPS_URL=http://localhost:4001
-
+# ENV CUSTOM_DOMAIN=budi001.custom.com \
+# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
@@ -117,6 +102,8 @@ RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
+ADD hosting/single/ssh/sshd_config /etc/
+ADD hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# cleanup cache
@@ -124,6 +111,8 @@ RUN yarn cache clean -f
EXPOSE 80
EXPOSE 443
+# Expose port 2222 for SSH on Azure App Service build
+EXPOSE 2222
VOLUME /data
# setup letsencrypt certificate
diff --git a/hosting/single/couch/local.ini b/hosting/single/couch/local.ini
index 35f0383dfc..266c0d4b60 100644
--- a/hosting/single/couch/local.ini
+++ b/hosting/single/couch/local.ini
@@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
-database_dir = DATA_DIR/couchdb/dbs
-view_index_dir = DATA_DIR/couchdb/views
+database_dir = DATA_DIR/couch/dbs
+view_index_dir = DATA_DIR/couch/views
diff --git a/hosting/single/nginx/nginx-default-site.conf b/hosting/single/nginx/nginx-default-site.conf
index c0d80a0185..bd89e21251 100644
--- a/hosting/single/nginx/nginx-default-site.conf
+++ b/hosting/single/nginx/nginx-default-site.conf
@@ -66,6 +66,15 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
+ location /socket/ {
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_cache_bypass $http_upgrade;
+ proxy_pass http://127.0.0.1:4001;
+ }
+
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh
index 09387343ba..6770d27ee0 100644
--- a/hosting/single/runner.sh
+++ b/hosting/single/runner.sh
@@ -1,18 +1,37 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
-
+declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL")
+# Check the env vars set in Dockerfile have come through, AAS seems to drop them
+[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
+[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
+[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
+[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
+[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
+[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
+[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
+[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
+[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS"
+[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
+[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
+[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
+[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
+[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
+[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
+# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
+ WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
/etc/init.d/ssh start
else
DATA_DIR=${DATA_DIR:-/data}
fi
if [ -f "${DATA_DIR}/.env" ]; then
- export $(cat ${DATA_DIR}/.env | xargs)
+ # Read in the .env file and export the variables
+ for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
fi
-# first randomise any unset environment variables
+# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
@@ -30,16 +49,23 @@ if [ ! -f "${DATA_DIR}/.env" ]; then
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
+ for ENV_VAR in "${DOCKER_VARS[@]}"
+ do
+ temp=$(eval "echo \$$ENV_VAR")
+ echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
+ done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
fi
-export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
-
+# Read in the .env file and export the variables
+for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
+ln -s ${DATA_DIR}/.env /app/.env
+ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
-mkdir -p ${DATA_DIR}/couchdb/{dbs,views}
+mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
-chown -R couchdb:couchdb ${DATA_DIR}/couchdb
+chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server ${DATA_DIR}/minio &
diff --git a/hosting/single/ssh/ssh_setup.sh b/hosting/single/ssh/ssh_setup.sh
new file mode 100644
index 0000000000..0af0b6d7ad
--- /dev/null
+++ b/hosting/single/ssh/ssh_setup.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+ssh-keygen -A
+
+#prepare run dir
+if [ ! -d "/var/run/sshd" ]; then
+ mkdir -p /var/run/sshd
+fi
\ No newline at end of file
diff --git a/hosting/single/ssh/sshd_config b/hosting/single/ssh/sshd_config
new file mode 100644
index 0000000000..7eb5df953a
--- /dev/null
+++ b/hosting/single/ssh/sshd_config
@@ -0,0 +1,12 @@
+Port 2222
+ListenAddress 0.0.0.0
+LoginGraceTime 180
+X11Forwarding yes
+Ciphers aes128-cbc,3des-cbc,aes256-cbc,aes128-ctr,aes192-ctr,aes256-ctr
+MACs hmac-sha1,hmac-sha1-96
+StrictModes yes
+SyslogFacility DAEMON
+PasswordAuthentication yes
+PermitEmptyPasswords no
+PermitRootLogin yes
+Subsystem sftp internal-sftp
diff --git a/lerna.json b/lerna.json
index f85789bb81..4e08b98680 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "1.2.44-alpha.7",
+ "version": "2.0.14-alpha.2",
"npmClient": "yarn",
"packages": [
"packages/*"
diff --git a/package.json b/package.json
index d5cc637fc2..7733a6df95 100644
--- a/package.json
+++ b/package.json
@@ -12,6 +12,7 @@
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1",
"lerna": "3.14.1",
+ "madge": "^5.0.1",
"prettier": "^2.3.1",
"prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
@@ -24,6 +25,8 @@
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
+ "build:sdk": "lerna run build:sdk",
+ "deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
"release:pro": "bash scripts/pro/release.sh",
@@ -44,8 +47,8 @@
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
- "lint:fix:eslint": "eslint --fix packages",
- "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
+ "lint:fix:eslint": "eslint --fix packages qa-core",
+ "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test --stream",
"test:e2e:ci": "lerna run cy:ci --stream",
diff --git a/packages/backend-core/context.js b/packages/backend-core/context.js
index aaa0f56f92..c6fa87a337 100644
--- a/packages/backend-core/context.js
+++ b/packages/backend-core/context.js
@@ -6,6 +6,7 @@ const {
updateAppId,
doInAppContext,
doInTenant,
+ doInContext,
} = require("./src/context")
const identity = require("./src/context/identity")
@@ -19,4 +20,5 @@ module.exports = {
doInAppContext,
doInTenant,
identity,
+ doInContext,
}
diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json
index 43efa43efb..75cd8ff374 100644
--- a/packages/backend-core/package.json
+++ b/packages/backend-core/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
- "version": "1.2.44-alpha.7",
+ "version": "2.0.14-alpha.2",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@@ -20,10 +20,12 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
- "@budibase/types": "1.2.44-alpha.7",
+ "@budibase/types": "2.0.14-alpha.2",
+ "@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
+ "bcryptjs": "2.4.3",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",
@@ -60,7 +62,6 @@
]
},
"devDependencies": {
- "@shopify/jest-koa-mocks": "3.1.5",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/lodash": "4.14.180",
diff --git a/packages/backend-core/plugins.js b/packages/backend-core/plugins.js
new file mode 100644
index 0000000000..018e214dcb
--- /dev/null
+++ b/packages/backend-core/plugins.js
@@ -0,0 +1,3 @@
+module.exports = {
+ ...require("./src/plugin"),
+}
diff --git a/packages/backend-core/src/auth.js b/packages/backend-core/src/auth.ts
similarity index 73%
rename from packages/backend-core/src/auth.js
rename to packages/backend-core/src/auth.ts
index d39b8426fb..23873b84e7 100644
--- a/packages/backend-core/src/auth.js
+++ b/packages/backend-core/src/auth.ts
@@ -1,11 +1,11 @@
const passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
-const { getGlobalDB } = require("./tenancy")
+import { getGlobalDB } from "./tenancy"
const refresh = require("passport-oauth2-refresh")
-const { Configs } = require("./constants")
-const { getScopedConfig } = require("./db/utils")
-const {
+import { Configs } from "./constants"
+import { getScopedConfig } from "./db/utils"
+import {
jwt,
local,
authenticated,
@@ -13,7 +13,6 @@ const {
oidc,
auditLog,
tenancy,
- appTenancy,
authError,
ssoCallbackUrl,
csrf,
@@ -22,32 +21,36 @@ const {
builderOnly,
builderOrAdmin,
joiValidator,
-} = require("./middleware")
-
-const { invalidateUser } = require("./cache/user")
+} from "./middleware"
+import { invalidateUser } from "./cache/user"
+import { User } from "@budibase/types"
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
-passport.serializeUser((user, done) => done(null, user))
+passport.serializeUser((user: User, done: any) => done(null, user))
-passport.deserializeUser(async (user, done) => {
+passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
- const user = await db.get(user._id)
- return done(null, user)
+ const dbUser = await db.get(user._id)
+ return done(null, dbUser)
} catch (err) {
console.error(`User not found`, err)
return done(null, false, { message: "User not found" })
}
})
-async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
+async function refreshOIDCAccessToken(
+ db: any,
+ chosenConfig: any,
+ refreshToken: string
+) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
- let enrichedConfig
- let strategy
+ let enrichedConfig: any
+ let strategy: any
try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
@@ -70,22 +73,28 @@ async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
refresh.requestNewAccessToken(
Configs.OIDC,
refreshToken,
- (err, accessToken, refreshToken, params) => {
+ (err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
-async function refreshGoogleAccessToken(db, config, refreshToken) {
+async function refreshGoogleAccessToken(
+ db: any,
+ config: any,
+ refreshToken: any
+) {
let callbackUrl = await google.getCallbackUrl(db, config)
let strategy
try {
strategy = await google.strategyFactory(config, callbackUrl)
- } catch (err) {
+ } catch (err: any) {
console.error(err)
- throw new Error("Error constructing OIDC refresh strategy", err)
+ throw new Error(
+ `Error constructing OIDC refresh strategy: message=${err.message}`
+ )
}
refresh.use(strategy)
@@ -94,14 +103,18 @@ async function refreshGoogleAccessToken(db, config, refreshToken) {
refresh.requestNewAccessToken(
Configs.GOOGLE,
refreshToken,
- (err, accessToken, refreshToken, params) => {
+ (err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
-async function refreshOAuthToken(refreshToken, configType, configId) {
+async function refreshOAuthToken(
+ refreshToken: string,
+ configType: string,
+ configId: string
+) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
@@ -113,7 +126,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
let refreshResponse
if (configType === Configs.OIDC) {
// configId - retrieved from cookie.
- chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
+ chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) {
throw new Error("Invalid OIDC configuration")
}
@@ -134,7 +147,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
return refreshResponse
}
-async function updateUserOAuth(userId, oAuthConfig) {
+async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@@ -162,14 +175,13 @@ async function updateUserOAuth(userId, oAuthConfig) {
}
}
-module.exports = {
+export = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
- buildAppTenancyMiddleware: appTenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,
diff --git a/packages/backend-core/src/constants.js b/packages/backend-core/src/constants.js
index 172e66e603..44c271a4f8 100644
--- a/packages/backend-core/src/constants.js
+++ b/packages/backend-core/src/constants.js
@@ -7,6 +7,7 @@ exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
+ ACCOUNT_RETURN_URL: "budibase:account:returnurl",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}
diff --git a/packages/backend-core/src/context/index.ts b/packages/backend-core/src/context/index.ts
index 78ce764d55..35eeee608b 100644
--- a/packages/backend-core/src/context/index.ts
+++ b/packages/backend-core/src/context/index.ts
@@ -2,7 +2,7 @@ import env from "../environment"
import { SEPARATOR, DocumentType } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
-import { baseGlobalDBName } from "../tenancy/utils"
+import { baseGlobalDBName } from "../db/tenancy"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKey } from "./constants"
@@ -65,7 +65,16 @@ export const getTenantIDFromAppID = (appId: string) => {
}
}
-// used for automations, API endpoints should always be in context already
+export const doInContext = async (appId: string, task: any) => {
+ // gets the tenant ID from the app ID
+ const tenantId = getTenantIDFromAppID(appId)
+ return doInTenant(tenantId, async () => {
+ return doInAppContext(appId, async () => {
+ return task()
+ })
+ })
+}
+
export const doInTenant = (tenantId: string | null, task: any) => {
// make sure default always selected in single tenancy
if (!env.MULTI_TENANCY) {
@@ -226,6 +235,10 @@ export const getAppId = () => {
}
}
+export const isTenancyEnabled = () => {
+ return env.MULTI_TENANCY
+}
+
/**
* Opens the app database based on whatever the request
* contained, dev or prod.
diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts
index b46f6072be..e0bd3c7a43 100644
--- a/packages/backend-core/src/db/Replication.ts
+++ b/packages/backend-core/src/db/Replication.ts
@@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
+import { DocumentType } from "./constants"
class Replication {
source: any
@@ -53,6 +54,14 @@ class Replication {
return this.replication
}
+ appReplicateOpts() {
+ return {
+ filter: (doc: any) => {
+ return doc._id !== DocumentType.APP_METADATA
+ },
+ }
+ }
+
/**
* Rollback the target DB back to the state of the source DB
*/
@@ -60,6 +69,7 @@ class Replication {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
+ // take the opportunity to remove deleted tombstones
await this.replicate()
}
diff --git a/packages/backend-core/src/db/constants.ts b/packages/backend-core/src/db/constants.ts
index 460476da24..a61e8a2af2 100644
--- a/packages/backend-core/src/db/constants.ts
+++ b/packages/backend-core/src/db/constants.ts
@@ -18,6 +18,9 @@ export enum ViewName {
LINK = "by_link",
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
+ ACCOUNT_BY_EMAIL = "account_by_email",
+ PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
+ USER_BY_GROUP = "by_group_user",
}
export const DeprecatedViews = {
@@ -41,6 +44,11 @@ export enum DocumentType {
MIGRATIONS = "migrations",
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
+ ACCOUNT_METADATA = "acc_metadata",
+ PLUGIN = "plg",
+ TABLE = "ta",
+ DATASOURCE = "datasource",
+ DATASOURCE_PLUS = "datasource_plus",
}
export const StaticDatabases = {
diff --git a/packages/backend-core/src/db/conversions.js b/packages/backend-core/src/db/conversions.js
index 90c04e9251..5b1a785ecc 100644
--- a/packages/backend-core/src/db/conversions.js
+++ b/packages/backend-core/src/db/conversions.js
@@ -36,6 +36,7 @@ exports.getDevelopmentAppID = appId => {
const rest = split.join(APP_PREFIX)
return `${APP_DEV_PREFIX}${rest}`
}
+exports.getDevAppID = exports.getDevelopmentAppID
/**
* Convert a development app ID to a deployed app ID.
diff --git a/packages/backend-core/src/db/tenancy.ts b/packages/backend-core/src/db/tenancy.ts
new file mode 100644
index 0000000000..d920f7cd41
--- /dev/null
+++ b/packages/backend-core/src/db/tenancy.ts
@@ -0,0 +1,22 @@
+import { DEFAULT_TENANT_ID } from "../constants"
+import { StaticDatabases, SEPARATOR } from "./constants"
+import { getTenantId } from "../context"
+
+export const getGlobalDBName = (tenantId?: string) => {
+ // tenant ID can be set externally, for example user API where
+ // new tenants are being created, this may be the case
+ if (!tenantId) {
+ tenantId = getTenantId()
+ }
+ return baseGlobalDBName(tenantId)
+}
+
+export const baseGlobalDBName = (tenantId: string | undefined | null) => {
+ let dbName
+ if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
+ dbName = StaticDatabases.GLOBAL.name
+ } else {
+ dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
+ }
+ return dbName
+}
diff --git a/packages/backend-core/src/db/utils.ts b/packages/backend-core/src/db/utils.ts
index 321ebd7f58..1c4be7e366 100644
--- a/packages/backend-core/src/db/utils.ts
+++ b/packages/backend-core/src/db/utils.ts
@@ -2,7 +2,8 @@ import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants"
import env from "../environment"
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
-import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
+import { getTenantId, getGlobalDB } from "../context"
+import { getGlobalDBName } from "./tenancy"
import fetch from "node-fetch"
import { doWithDB, allDbs } from "./index"
import { getCouchInfo } from "./pouch"
@@ -15,6 +16,7 @@ import * as events from "../events"
export * from "./constants"
export * from "./conversions"
export { default as Replication } from "./Replication"
+export * from "./tenancy"
/**
* Generates a new app ID.
@@ -62,6 +64,28 @@ export function getQueryIndex(viewName: ViewName) {
return `database/${viewName}`
}
+/**
+ * Check if a given ID is that of a table.
+ * @returns {boolean}
+ */
+export const isTableId = (id: string) => {
+ // this includes datasource plus tables
+ return (
+ id &&
+ (id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||
+ id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))
+ )
+}
+
+/**
+ * Check if a given ID is that of a datasource or datasource plus.
+ * @returns {boolean}
+ */
+export const isDatasourceId = (id: string) => {
+ // this covers both datasources and datasource plus
+ return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
+}
+
/**
* Generates a new workspace ID.
* @returns {string} The new workspace ID which the workspace doc can be stored under.
@@ -254,7 +278,16 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
return false
})
if (idsOnly) {
- return appDbNames
+ const devAppIds = appDbNames.filter(appId => isDevAppID(appId))
+ const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))
+ switch (dev) {
+ case true:
+ return devAppIds
+ case false:
+ return prodAppIds
+ default:
+ return appDbNames
+ }
}
const appPromises = appDbNames.map((app: any) =>
// skip setup otherwise databases could be re-created
@@ -357,6 +390,21 @@ export const generateDevInfoID = (userId: any) => {
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
}
+/**
+ * Generates a new plugin ID - to be used in the global DB.
+ * @returns {string} The new plugin ID which a plugin metadata document can be stored under.
+ */
+export const generatePluginID = (name: string) => {
+ return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
+}
+
+/**
+ * Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
+ */
+export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
+ return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
+}
+
/**
* Returns the most granular configuration document from the DB based on the type, workspace and userID passed.
* @param {Object} db - db instance to query
diff --git a/packages/backend-core/src/db/views.js b/packages/backend-core/src/db/views.js
deleted file mode 100644
index 3a45611a8f..0000000000
--- a/packages/backend-core/src/db/views.js
+++ /dev/null
@@ -1,158 +0,0 @@
-const {
- DocumentType,
- ViewName,
- DeprecatedViews,
- SEPARATOR,
-} = require("./utils")
-const { getGlobalDB } = require("../tenancy")
-
-const DESIGN_DB = "_design/database"
-
-function DesignDoc() {
- return {
- _id: DESIGN_DB,
- // view collation information, read before writing any complex views:
- // https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
- views: {},
- }
-}
-
-async function removeDeprecated(db, viewName) {
- if (!DeprecatedViews[viewName]) {
- return
- }
- try {
- const designDoc = await db.get(DESIGN_DB)
- for (let deprecatedNames of DeprecatedViews[viewName]) {
- delete designDoc.views[deprecatedNames]
- }
- await db.put(designDoc)
- } catch (err) {
- // doesn't exist, ignore
- }
-}
-
-exports.createNewUserEmailView = async () => {
- const db = getGlobalDB()
- let designDoc
- try {
- designDoc = await db.get(DESIGN_DB)
- } catch (err) {
- // no design doc, make one
- designDoc = DesignDoc()
- }
- const view = {
- // if using variables in a map function need to inject them before use
- map: `function(doc) {
- if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
- emit(doc.email.toLowerCase(), doc._id)
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.USER_BY_EMAIL]: view,
- }
- await db.put(designDoc)
-}
-
-exports.createUserAppView = async () => {
- const db = getGlobalDB()
- let designDoc
- try {
- designDoc = await db.get("_design/database")
- } catch (err) {
- // no design doc, make one
- designDoc = DesignDoc()
- }
- const view = {
- // if using variables in a map function need to inject them before use
- map: `function(doc) {
- if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
- for (let prodAppId of Object.keys(doc.roles)) {
- let emitted = prodAppId + "${SEPARATOR}" + doc._id
- emit(emitted, null)
- }
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.USER_BY_APP]: view,
- }
- await db.put(designDoc)
-}
-
-exports.createApiKeyView = async () => {
- const db = getGlobalDB()
- let designDoc
- try {
- designDoc = await db.get("_design/database")
- } catch (err) {
- designDoc = DesignDoc()
- }
- const view = {
- map: `function(doc) {
- if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
- emit(doc.apiKey, doc.userId)
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.BY_API_KEY]: view,
- }
- await db.put(designDoc)
-}
-
-exports.createUserBuildersView = async () => {
- const db = getGlobalDB()
- let designDoc
- try {
- designDoc = await db.get("_design/database")
- } catch (err) {
- // no design doc, make one
- designDoc = DesignDoc()
- }
- const view = {
- map: `function(doc) {
- if (doc.builder && doc.builder.global === true) {
- emit(doc._id, doc._id)
- }
- }`,
- }
- designDoc.views = {
- ...designDoc.views,
- [ViewName.USER_BY_BUILDERS]: view,
- }
- await db.put(designDoc)
-}
-
-exports.queryGlobalView = async (viewName, params, db = null) => {
- const CreateFuncByName = {
- [ViewName.USER_BY_EMAIL]: exports.createNewUserEmailView,
- [ViewName.BY_API_KEY]: exports.createApiKeyView,
- [ViewName.USER_BY_BUILDERS]: exports.createUserBuildersView,
- [ViewName.USER_BY_APP]: exports.createUserAppView,
- }
- // can pass DB in if working with something specific
- if (!db) {
- db = getGlobalDB()
- }
- try {
- let response = (await db.query(`database/${viewName}`, params)).rows
- response = response.map(resp =>
- params.include_docs ? resp.doc : resp.value
- )
- return response.length <= 1 ? response[0] : response
- } catch (err) {
- if (err != null && err.name === "not_found") {
- const createFunc = CreateFuncByName[viewName]
- await removeDeprecated(db, viewName)
- await createFunc()
- return exports.queryGlobalView(viewName, params)
- } else {
- throw err
- }
- }
-}
diff --git a/packages/backend-core/src/db/views.ts b/packages/backend-core/src/db/views.ts
new file mode 100644
index 0000000000..f0fff918fc
--- /dev/null
+++ b/packages/backend-core/src/db/views.ts
@@ -0,0 +1,199 @@
+import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
+import { getGlobalDB } from "../context"
+import PouchDB from "pouchdb"
+import { StaticDatabases } from "./constants"
+import { doWithDB } from "./"
+
+const DESIGN_DB = "_design/database"
+
+function DesignDoc() {
+ return {
+ _id: DESIGN_DB,
+ // view collation information, read before writing any complex views:
+ // https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
+ views: {},
+ }
+}
+
+interface DesignDocument {
+ views: any
+}
+
+async function removeDeprecated(db: PouchDB.Database, viewName: ViewName) {
+ // @ts-ignore
+ if (!DeprecatedViews[viewName]) {
+ return
+ }
+ try {
+ const designDoc = await db.get