Merge branch 'develop' of github.com:Budibase/budibase into cheeks-lab-day-portal-redesign
This commit is contained in:
commit
33ac401046
|
@ -3,6 +3,8 @@ public
|
|||
dist
|
||||
packages/server/builder
|
||||
packages/server/coverage
|
||||
packages/worker/coverage
|
||||
packages/backend-core/coverage
|
||||
packages/server/client
|
||||
packages/builder/.routify
|
||||
packages/builder/cypress/support/queryLevelTransformerFunction.js
|
||||
|
|
|
@ -58,7 +58,7 @@ jobs:
|
|||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
|
||||
files: ./packages/server/coverage/clover.xml
|
||||
files: ./packages/server/coverage/clover.xml,./packages/worker/coverage/clover.xml,./packages/backend-core/coverage/clover.xml
|
||||
name: codecov-umbrella
|
||||
verbose: true
|
||||
|
||||
|
|
|
@ -106,3 +106,5 @@ stats.html
|
|||
*.tsbuildinfo
|
||||
budibase-component
|
||||
budibase-datasource
|
||||
|
||||
*.iml
|
|
@ -4,6 +4,8 @@ dist
|
|||
packages/builder/src/components/design/AppPreview/CurrentItemPreview.svelte
|
||||
packages/server/builder
|
||||
packages/server/coverage
|
||||
packages/worker/coverage
|
||||
packages/backend-core/coverage
|
||||
packages/server/client
|
||||
packages/server/src/definitions/openapi.ts
|
||||
packages/builder/.routify
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
"editor.defaultFormatter": "vscode.json-language-features"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
"editor.defaultFormatter": "vscode.typescript-language-features"
|
||||
},
|
||||
"debug.javascript.terminalOptions": {
|
||||
"skipFiles": [
|
||||
|
@ -16,4 +16,7 @@
|
|||
"<node_internals>/**"
|
||||
]
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "vscode.typescript-language-features"
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
# Artifact Hub repository metadata file
|
||||
# This file is used to verify ownership of the budibase Helm chart repo
|
||||
# so that we appear as a verified owner on artifacthub.io
|
||||
|
||||
repositoryID: a7536764-e72e-4961-87d8-efe7c8dedfa3
|
||||
owners: # (optional, used to claim repository ownership)
|
||||
- name: Martin
|
||||
email: budimaster@budibase.com
|
||||
- name: DevOps
|
||||
email: devops@budibase.com
|
|
@ -4,6 +4,9 @@ metadata:
|
|||
annotations:
|
||||
kompose.cmd: kompose convert
|
||||
kompose.version: 1.21.0 (992df58d8)
|
||||
{{ if .Values.globals.logAnnotations }}
|
||||
{{ toYaml .Values.globals.logAnnotations | indent 4 }}
|
||||
{{ end }}
|
||||
creationTimestamp: null
|
||||
labels:
|
||||
io.kompose.service: app-service
|
||||
|
@ -60,8 +63,6 @@ spec:
|
|||
secretKeyRef:
|
||||
name: {{ template "budibase.fullname" . }}
|
||||
key: jwtSecret
|
||||
- name: LOG_LEVEL
|
||||
value: {{ .Values.services.apps.logLevel | default "info" | quote }}
|
||||
{{ if .Values.services.objectStore.region }}
|
||||
- name: AWS_REGION
|
||||
value: {{ .Values.services.objectStore.region }}
|
||||
|
@ -84,6 +85,8 @@ spec:
|
|||
value: {{ .Values.services.objectStore.appsBucketName | quote }}
|
||||
- name: GLOBAL_CLOUD_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.globalBucketName | quote }}
|
||||
- name: BACKUPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
|
||||
- name: PORT
|
||||
value: {{ .Values.services.apps.port | quote }}
|
||||
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
|
||||
|
@ -156,6 +159,24 @@ spec:
|
|||
- name: ELASTIC_APM_SERVER_URL
|
||||
value: {{ .Values.globals.elasticApmServerUrl | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentHttpProxy }}
|
||||
- name: GLOBAL_AGENT_HTTP_PROXY
|
||||
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentHttpsProxy }}
|
||||
- name: GLOBAL_AGENT_HTTPS_PROXY
|
||||
value: {{ .Values.globals.globalAgentHttpsProxy | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentNoProxy }}
|
||||
- name: GLOBAL_AGENT_NO_PROXY
|
||||
value: {{ .Values.globals.globalAgentNoProxy | quote }}
|
||||
{{ end }}
|
||||
- name: CDN_URL
|
||||
value: {{ .Values.globals.cdnUrl }}
|
||||
{{ if .Values.services.tlsRejectUnauthorized }}
|
||||
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
||||
value: {{ .Values.services.tlsRejectUnauthorized }}
|
||||
{{ end }}
|
||||
|
||||
image: budibase/apps:{{ .Values.globals.appVersion }}
|
||||
imagePullPolicy: Always
|
||||
|
|
|
@ -42,6 +42,7 @@ spec:
|
|||
secretKeyRef:
|
||||
name: {{ template "budibase.fullname" . }}
|
||||
key: objectStoreSecret
|
||||
|
||||
image: minio/minio
|
||||
imagePullPolicy: ""
|
||||
livenessProbe:
|
||||
|
|
|
@ -4,6 +4,9 @@ metadata:
|
|||
annotations:
|
||||
kompose.cmd: kompose convert
|
||||
kompose.version: 1.21.0 (992df58d8)
|
||||
{{ if .Values.globals.logAnnotations }}
|
||||
{{ toYaml .Values.globals.logAnnotations | indent 4 }}
|
||||
{{ end }}
|
||||
creationTimestamp: null
|
||||
labels:
|
||||
app.kubernetes.io/name: budibase-proxy
|
||||
|
|
|
@ -60,5 +60,6 @@ spec:
|
|||
- name: redis-data
|
||||
persistentVolumeClaim:
|
||||
claimName: redis-data
|
||||
|
||||
status: {}
|
||||
{{- end }}
|
||||
|
|
|
@ -4,6 +4,9 @@ metadata:
|
|||
annotations:
|
||||
kompose.cmd: kompose convert
|
||||
kompose.version: 1.21.0 (992df58d8)
|
||||
{{ if .Values.globals.logAnnotations }}
|
||||
{{ toYaml .Values.globals.logAnnotations | indent 4 }}
|
||||
{{ end }}
|
||||
creationTimestamp: null
|
||||
labels:
|
||||
io.kompose.service: worker-service
|
||||
|
@ -83,6 +86,8 @@ spec:
|
|||
value: {{ .Values.services.objectStore.appsBucketName | quote }}
|
||||
- name: GLOBAL_CLOUD_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.globalBucketName | quote }}
|
||||
- name: BACKUPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
|
||||
- name: PORT
|
||||
value: {{ .Values.services.worker.port | quote }}
|
||||
- name: MULTI_TENANCY
|
||||
|
@ -145,6 +150,24 @@ spec:
|
|||
- name: ELASTIC_APM_SERVER_URL
|
||||
value: {{ .Values.globals.elasticApmServerUrl | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentHttpProxy }}
|
||||
- name: GLOBAL_AGENT_HTTP_PROXY
|
||||
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentHttpsProxy }}
|
||||
- name: GLOBAL_AGENT_HTTPS_PROXY
|
||||
value: {{ .Values.globals.globalAgentHttpsProxy | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentNoProxy }}
|
||||
- name: GLOBAL_AGENT_NO_PROXY
|
||||
value: {{ .Values.globals.globalAgentNoProxy | quote }}
|
||||
{{ end }}
|
||||
- name: CDN_URL
|
||||
value: {{ .Values.globals.cdnUrl }}
|
||||
{{ if .Values.services.tlsRejectUnauthorized }}
|
||||
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
||||
value: {{ .Values.services.tlsRejectUnauthorized }}
|
||||
{{ end }}
|
||||
|
||||
image: budibase/worker:{{ .Values.globals.appVersion }}
|
||||
imagePullPolicy: Always
|
||||
|
|
|
@ -22,6 +22,12 @@ serviceAccount:
|
|||
|
||||
podAnnotations: {}
|
||||
|
||||
# logAnnotations:
|
||||
# co.elastic.logs/multiline.type: pattern
|
||||
# co.elastic.logs/multiline.pattern: '^[[:space:]]'
|
||||
# co.elastic.logs/multiline.negate: false
|
||||
# co.elastic.logs/multiline.match: after
|
||||
|
||||
podSecurityContext:
|
||||
{}
|
||||
# fsGroup: 2000
|
||||
|
@ -98,6 +104,7 @@ globals:
|
|||
# if createSecrets is set to false, you can hard-code your secrets here
|
||||
internalApiKey: ""
|
||||
jwtSecret: ""
|
||||
cdnUrl: ""
|
||||
|
||||
smtp:
|
||||
enabled: false
|
||||
|
@ -105,10 +112,14 @@ globals:
|
|||
# elasticApmEnabled:
|
||||
# elasticApmSecretToken:
|
||||
# elasticApmServerUrl:
|
||||
# globalAgentHttpProxy:
|
||||
# globalAgentHttpsProxy:
|
||||
# globalAgentNoProxy:
|
||||
|
||||
services:
|
||||
budibaseVersion: latest
|
||||
dns: cluster.local
|
||||
# tlsRejectUnauthorized: 0
|
||||
|
||||
proxy:
|
||||
port: 10000
|
||||
|
|
|
@ -58,71 +58,101 @@ http {
|
|||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_pass http://worker-service;
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Connection "";
|
||||
|
||||
proxy_pass http://worker-service;
|
||||
}
|
||||
|
||||
location /api/backups/ {
|
||||
proxy_read_timeout 1800s;
|
||||
proxy_connect_timeout 1800s;
|
||||
proxy_send_timeout 1800s;
|
||||
proxy_pass http://app-service;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
proxy_pass http://app-service;
|
||||
proxy_send_timeout 120s;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Connection "";
|
||||
|
||||
proxy_pass http://app-service;
|
||||
}
|
||||
|
||||
location = / {
|
||||
proxy_pass http://app-service;
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Connection "";
|
||||
|
||||
proxy_pass http://app-service;
|
||||
}
|
||||
|
||||
location /app_ {
|
||||
proxy_pass http://app-service;
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Connection "";
|
||||
|
||||
proxy_pass http://app-service;
|
||||
}
|
||||
|
||||
location /app {
|
||||
proxy_pass http://app-service;
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Connection "";
|
||||
|
||||
proxy_pass http://app-service;
|
||||
}
|
||||
|
||||
location /builder {
|
||||
proxy_pass http://builder;
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Connection "";
|
||||
|
||||
proxy_pass http://builder;
|
||||
rewrite ^/builder(.*)$ /builder/$1 break;
|
||||
}
|
||||
|
||||
location /builder/ {
|
||||
proxy_pass http://builder;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 120s;
|
||||
proxy_send_timeout 120s;
|
||||
|
||||
proxy_pass http://builder;
|
||||
}
|
||||
|
||||
location /vite/ {
|
||||
|
|
|
@ -51,11 +51,11 @@ http {
|
|||
proxy_buffering off;
|
||||
|
||||
set $csp_default "default-src 'self'";
|
||||
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
|
||||
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
|
||||
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
|
||||
set $csp_object "object-src 'none'";
|
||||
set $csp_base_uri "base-uri 'self'";
|
||||
set $csp_connect "connect-src 'self' https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
|
||||
set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com";
|
||||
set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com";
|
||||
set $csp_frame "frame-src 'self' https:";
|
||||
set $csp_img "img-src http: https: data: blob:";
|
||||
|
@ -100,22 +100,47 @@ http {
|
|||
|
||||
location ~ ^/(builder|app_) {
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$apps:4002;
|
||||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$worker:4003;
|
||||
}
|
||||
|
||||
location /worker/ {
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$worker:4003;
|
||||
rewrite ^/worker/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location /api/backups/ {
|
||||
# calls to export apps are limited
|
||||
limit_req zone=ratelimit burst=20 nodelay;
|
||||
|
||||
# 1800s timeout for app export requests
|
||||
proxy_read_timeout 1800s;
|
||||
proxy_connect_timeout 1800s;
|
||||
proxy_send_timeout 1800s;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_pass http://$apps:4002;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
# calls to the API are rate limited with bursting
|
||||
limit_req zone=ratelimit burst=20 nodelay;
|
||||
|
@ -130,6 +155,7 @@ http {
|
|||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$apps:4002;
|
||||
}
|
||||
|
@ -149,6 +175,7 @@ http {
|
|||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_pass http://$apps:4002;
|
||||
}
|
||||
|
@ -171,11 +198,13 @@ http {
|
|||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
|
||||
proxy_pass http://$minio:9000;
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,8 @@ ADD hosting/single/nginx/nginx.conf /etc/nginx
|
|||
ADD hosting/single/nginx/nginx-default-site.conf /etc/nginx/sites-enabled/default
|
||||
RUN mkdir -p /var/log/nginx && \
|
||||
touch /var/log/nginx/error.log && \
|
||||
touch /var/run/nginx.pid
|
||||
touch /var/run/nginx.pid && \
|
||||
usermod -a -G tty www-data
|
||||
|
||||
WORKDIR /
|
||||
RUN mkdir -p scripts/integrations/oracle
|
||||
|
|
|
@ -2,7 +2,8 @@ server {
|
|||
listen 80 default_server;
|
||||
listen [::]:80 default_server;
|
||||
server_name _;
|
||||
|
||||
error_log /dev/stderr warn;
|
||||
access_log /dev/stdout main;
|
||||
client_max_body_size 1000m;
|
||||
ignore_invalid_headers off;
|
||||
proxy_buffering off;
|
||||
|
@ -43,6 +44,24 @@ server {
|
|||
rewrite ^/worker/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
location /api/backups/ {
|
||||
# calls to export apps are limited
|
||||
limit_req zone=ratelimit burst=20 nodelay;
|
||||
|
||||
# 1800s timeout for app export requests
|
||||
proxy_read_timeout 1800s;
|
||||
proxy_connect_timeout 1800s;
|
||||
proxy_send_timeout 1800s;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
proxy_pass http://127.0.0.1:4001;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
# calls to the API are rate limited with bursting
|
||||
limit_req zone=ratelimit burst=20 nodelay;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
user www-data www-data;
|
||||
error_log /var/log/nginx/error.log;
|
||||
error_log /dev/stderr warn;
|
||||
pid /var/run/nginx.pid;
|
||||
worker_processes auto;
|
||||
worker_rlimit_nofile 8192;
|
||||
|
|
|
@ -27,12 +27,14 @@ if [[ "${TARGETBUILD}" = "aas" ]]; then
|
|||
else
|
||||
DATA_DIR=${DATA_DIR:-/data}
|
||||
fi
|
||||
|
||||
mkdir -p ${DATA_DIR}
|
||||
# Mount NFS or GCP Filestore if env vars exist for it
|
||||
if [[ -z ${FILESHARE_IP} && -z ${FILESHARE_NAME} ]]; then
|
||||
if [[ ! -z ${FILESHARE_IP} && ! -z ${FILESHARE_NAME} ]]; then
|
||||
echo "Mounting NFS share"
|
||||
apt update && apt install -y nfs-common nfs-kernel-server
|
||||
echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}"
|
||||
mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR}
|
||||
echo "Mounting completed."
|
||||
echo "Mounting result: $?"
|
||||
fi
|
||||
|
||||
if [ -f "${DATA_DIR}/.env" ]; then
|
||||
|
@ -74,9 +76,9 @@ mkdir -p ${DATA_DIR}/couch/{dbs,views}
|
|||
mkdir -p ${DATA_DIR}/minio
|
||||
mkdir -p ${DATA_DIR}/search
|
||||
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
||||
redis-server --requirepass $REDIS_PASSWORD &
|
||||
/opt/clouseau/bin/clouseau &
|
||||
/minio/minio server ${DATA_DIR}/minio &
|
||||
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
|
||||
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
||||
/minio/minio server ${DATA_DIR}/minio > /dev/stdout 2>&1 &
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
|
||||
/etc/init.d/nginx restart
|
||||
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
||||
|
@ -85,16 +87,18 @@ if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
|||
chmod +x /etc/cron.d/certificate-renew
|
||||
# Request the certbot certificate
|
||||
/app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN}
|
||||
/etc/init.d/nginx restart
|
||||
fi
|
||||
|
||||
/etc/init.d/nginx restart
|
||||
pushd app
|
||||
pm2 start --name app "yarn run:docker"
|
||||
pm2 start -l /dev/stdout --name app "yarn run:docker"
|
||||
popd
|
||||
pushd worker
|
||||
pm2 start --name worker "yarn run:docker"
|
||||
pm2 start -l /dev/stdout --name worker "yarn run:docker"
|
||||
popd
|
||||
sleep 10
|
||||
echo "curl to couchdb endpoints"
|
||||
curl -X PUT ${COUCH_DB_URL}/_users
|
||||
curl -X PUT ${COUCH_DB_URL}/_replicator
|
||||
echo "end of runner.sh, sleeping ..."
|
||||
sleep infinity
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.0.34-alpha.5",
|
||||
"version": "2.1.46-alpha.6",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
10
package.json
10
package.json
|
@ -3,7 +3,7 @@
|
|||
"private": true,
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-json": "^4.0.2",
|
||||
"@typescript-eslint/parser": "4.28.0",
|
||||
"@typescript-eslint/parser": "5.45.0",
|
||||
"babel-eslint": "^10.0.3",
|
||||
"eslint": "^7.28.0",
|
||||
"eslint-plugin-cypress": "^2.11.3",
|
||||
|
@ -18,7 +18,7 @@
|
|||
"rimraf": "^3.0.2",
|
||||
"rollup-plugin-replace": "^2.2.0",
|
||||
"svelte": "^3.38.2",
|
||||
"typescript": "4.5.5"
|
||||
"typescript": "4.7.3"
|
||||
},
|
||||
"scripts": {
|
||||
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
|
||||
|
@ -75,8 +75,8 @@
|
|||
"env:multi:disable": "lerna run env:multi:disable",
|
||||
"env:selfhost:enable": "lerna run env:selfhost:enable",
|
||||
"env:selfhost:disable": "lerna run env:selfhost:disable",
|
||||
"env:localdomain:enable": "lerna run env:localdomain:enable",
|
||||
"env:localdomain:disable": "lerna run env:localdomain:disable",
|
||||
"env:localdomain:enable": "./scripts/localdomain.sh enable",
|
||||
"env:localdomain:disable": "./scripts/localdomain.sh disable",
|
||||
"env:account:enable": "lerna run env:account:enable",
|
||||
"env:account:disable": "lerna run env:account:disable",
|
||||
"mode:self": "yarn env:selfhost:enable && yarn env:multi:disable && yarn env:account:disable",
|
||||
|
@ -87,4 +87,4 @@
|
|||
"install:pro": "bash scripts/pro/install.sh",
|
||||
"dep:clean": "yarn clean && yarn bootstrap"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
const mockS3 = {
|
||||
headBucket: jest.fn().mockReturnThis(),
|
||||
deleteObject: jest.fn().mockReturnThis(),
|
||||
deleteObjects: jest.fn().mockReturnThis(),
|
||||
createBucket: jest.fn().mockReturnThis(),
|
||||
listObjects: jest.fn().mockReturnThis(),
|
||||
promise: jest.fn().mockReturnThis(),
|
||||
catch: jest.fn(),
|
||||
}
|
||||
|
||||
const AWS = {
|
||||
S3: jest.fn(() => mockS3),
|
||||
}
|
||||
|
||||
export default AWS
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/cloud/accounts")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/auth")
|
|
@ -1,9 +0,0 @@
|
|||
const generic = require("./src/cache/generic")
|
||||
|
||||
module.exports = {
|
||||
user: require("./src/cache/user"),
|
||||
app: require("./src/cache/appMetadata"),
|
||||
writethrough: require("./src/cache/writethrough"),
|
||||
...generic,
|
||||
cache: generic,
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/constants")
|
|
@ -1,24 +0,0 @@
|
|||
const {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
getProdAppDB,
|
||||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
doInTenant,
|
||||
doInContext,
|
||||
} = require("./src/context")
|
||||
|
||||
const identity = require("./src/context/identity")
|
||||
|
||||
module.exports = {
|
||||
getAppDB,
|
||||
getDevAppDB,
|
||||
getProdAppDB,
|
||||
getAppId,
|
||||
updateAppId,
|
||||
doInAppContext,
|
||||
doInTenant,
|
||||
identity,
|
||||
doInContext,
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
module.exports = {
|
||||
...require("./src/db/utils"),
|
||||
...require("./src/db/constants"),
|
||||
...require("./src/db"),
|
||||
...require("./src/db/views"),
|
||||
...require("./src/db/pouch"),
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/context/deprovision")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/security/encryption")
|
|
@ -0,0 +1,21 @@
|
|||
import { Config } from "@jest/types"
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
preset: "ts-jest",
|
||||
testEnvironment: "node",
|
||||
setupFiles: ["./tests/jestSetup.ts"],
|
||||
collectCoverageFrom: ["src/**/*.{js,ts}"],
|
||||
coverageReporters: ["lcov", "json", "clover"],
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
// use sources when not in CI
|
||||
config.moduleNameMapper = {
|
||||
"@budibase/types": "<rootDir>/../types/src",
|
||||
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
|
||||
}
|
||||
} else {
|
||||
console.log("Running tests with compiled dependency sources")
|
||||
}
|
||||
|
||||
export default config
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/logging")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/middleware")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/migrations")
|
|
@ -1,4 +0,0 @@
|
|||
module.exports = {
|
||||
...require("./src/objectStore"),
|
||||
...require("./src/objectStore/utils"),
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/backend-core",
|
||||
"version": "2.0.34-alpha.5",
|
||||
"version": "2.1.46-alpha.6",
|
||||
"description": "Budibase backend core libraries used in server and worker",
|
||||
"main": "dist/src/index.js",
|
||||
"types": "dist/src/index.d.ts",
|
||||
|
@ -16,11 +16,11 @@
|
|||
"prepack": "cp package.json dist",
|
||||
"build": "tsc -p tsconfig.build.json",
|
||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"test": "jest",
|
||||
"test": "jest --coverage --maxWorkers=2",
|
||||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/types": "2.0.34-alpha.5",
|
||||
"@budibase/types": "2.1.46-alpha.6",
|
||||
"@shopify/jest-koa-mocks": "5.0.1",
|
||||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"aws-sdk": "2.1030.0",
|
||||
|
@ -35,8 +35,8 @@
|
|||
"koa-passport": "4.1.4",
|
||||
"lodash": "4.17.21",
|
||||
"lodash.isarguments": "3.1.0",
|
||||
"nano": "^10.1.0",
|
||||
"node-fetch": "2.6.7",
|
||||
"passport-google-auth": "1.0.2",
|
||||
"passport-google-oauth": "2.0.0",
|
||||
"passport-jwt": "4.0.0",
|
||||
"passport-local": "1.0.0",
|
||||
|
@ -52,21 +52,11 @@
|
|||
"uuid": "8.3.2",
|
||||
"zlib": "1.0.5"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
"testEnvironment": "node",
|
||||
"moduleNameMapper": {
|
||||
"@budibase/types": "<rootDir>/../types/src"
|
||||
},
|
||||
"setupFiles": [
|
||||
"./scripts/jestSetup.ts"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chance": "1.1.3",
|
||||
"@types/ioredis": "4.28.0",
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/koa": "2.0.52",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/lodash": "4.14.180",
|
||||
"@types/node": "14.18.20",
|
||||
"@types/node-fetch": "2.6.1",
|
||||
|
@ -77,12 +67,14 @@
|
|||
"@types/uuid": "8.3.4",
|
||||
"chance": "1.1.3",
|
||||
"ioredis-mock": "5.8.0",
|
||||
"jest": "27.5.1",
|
||||
"koa": "2.7.0",
|
||||
"jest": "28.1.1",
|
||||
"koa": "2.13.4",
|
||||
"nodemon": "2.0.16",
|
||||
"pouchdb-adapter-memory": "7.2.2",
|
||||
"timekeeper": "2.2.0",
|
||||
"ts-jest": "27.1.5",
|
||||
"ts-jest": "28.0.4",
|
||||
"ts-node": "10.8.1",
|
||||
"tsconfig-paths": "4.0.0",
|
||||
"typescript": "4.7.3"
|
||||
},
|
||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/security/permissions")
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = {
|
||||
...require("./src/plugin"),
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export * from "./src/plugin"
|
|
@ -1,5 +0,0 @@
|
|||
module.exports = {
|
||||
Client: require("./src/redis"),
|
||||
utils: require("./src/redis/utils"),
|
||||
clients: require("./src/redis/init"),
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/security/roles")
|
|
@ -1,12 +0,0 @@
|
|||
import env from "../src/environment"
|
||||
import { mocks } from "../tests/utilities"
|
||||
|
||||
// mock all dates to 2020-01-01T00:00:00.000Z
|
||||
// use tk.reset() to use real dates in individual tests
|
||||
import tk from "timekeeper"
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
env._set("SELF_HOSTED", "1")
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("JWT_SECRET", "test-jwtsecret")
|
||||
env._set("LOG_LEVEL", "silent")
|
|
@ -1 +0,0 @@
|
|||
module.exports = require("./src/security/sessions")
|
|
@ -1,16 +1,14 @@
|
|||
const passport = require("koa-passport")
|
||||
const _passport = require("koa-passport")
|
||||
const LocalStrategy = require("passport-local").Strategy
|
||||
const JwtStrategy = require("passport-jwt").Strategy
|
||||
import { getGlobalDB } from "./tenancy"
|
||||
import { getGlobalDB } from "../tenancy"
|
||||
const refresh = require("passport-oauth2-refresh")
|
||||
import { Configs } from "./constants"
|
||||
import { getScopedConfig } from "./db/utils"
|
||||
import { Config } from "../constants"
|
||||
import { getScopedConfig } from "../db"
|
||||
import {
|
||||
jwt,
|
||||
jwt as jwtPassport,
|
||||
local,
|
||||
authenticated,
|
||||
google,
|
||||
oidc,
|
||||
auditLog,
|
||||
tenancy,
|
||||
authError,
|
||||
|
@ -21,17 +19,41 @@ import {
|
|||
builderOnly,
|
||||
builderOrAdmin,
|
||||
joiValidator,
|
||||
} from "./middleware"
|
||||
import { invalidateUser } from "./cache/user"
|
||||
oidc,
|
||||
google,
|
||||
} from "../middleware"
|
||||
import { invalidateUser } from "../cache/user"
|
||||
import { User } from "@budibase/types"
|
||||
import { logAlert } from "../logging"
|
||||
export {
|
||||
auditLog,
|
||||
authError,
|
||||
internalApi,
|
||||
ssoCallbackUrl,
|
||||
adminOnly,
|
||||
builderOnly,
|
||||
builderOrAdmin,
|
||||
joiValidator,
|
||||
google,
|
||||
oidc,
|
||||
} from "../middleware"
|
||||
export const buildAuthMiddleware = authenticated
|
||||
export const buildTenancyMiddleware = tenancy
|
||||
export const buildCsrfMiddleware = csrf
|
||||
export const passport = _passport
|
||||
export const jwt = require("jsonwebtoken")
|
||||
|
||||
// Strategies
|
||||
passport.use(new LocalStrategy(local.options, local.authenticate))
|
||||
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
|
||||
_passport.use(new LocalStrategy(local.options, local.authenticate))
|
||||
if (jwtPassport.options.secretOrKey) {
|
||||
_passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate))
|
||||
} else {
|
||||
logAlert("No JWT Secret supplied, cannot configure JWT strategy")
|
||||
}
|
||||
|
||||
passport.serializeUser((user: User, done: any) => done(null, user))
|
||||
_passport.serializeUser((user: User, done: any) => done(null, user))
|
||||
|
||||
passport.deserializeUser(async (user: User, done: any) => {
|
||||
_passport.deserializeUser(async (user: User, done: any) => {
|
||||
const db = getGlobalDB()
|
||||
|
||||
try {
|
||||
|
@ -71,7 +93,7 @@ async function refreshOIDCAccessToken(
|
|||
|
||||
return new Promise(resolve => {
|
||||
refresh.requestNewAccessToken(
|
||||
Configs.OIDC,
|
||||
Config.OIDC,
|
||||
refreshToken,
|
||||
(err: any, accessToken: string, refreshToken: any, params: any) => {
|
||||
resolve({ err, accessToken, refreshToken, params })
|
||||
|
@ -101,7 +123,7 @@ async function refreshGoogleAccessToken(
|
|||
|
||||
return new Promise(resolve => {
|
||||
refresh.requestNewAccessToken(
|
||||
Configs.GOOGLE,
|
||||
Config.GOOGLE,
|
||||
refreshToken,
|
||||
(err: any, accessToken: string, refreshToken: string, params: any) => {
|
||||
resolve({ err, accessToken, refreshToken, params })
|
||||
|
@ -110,7 +132,7 @@ async function refreshGoogleAccessToken(
|
|||
})
|
||||
}
|
||||
|
||||
async function refreshOAuthToken(
|
||||
export async function refreshOAuthToken(
|
||||
refreshToken: string,
|
||||
configType: string,
|
||||
configId: string
|
||||
|
@ -124,7 +146,7 @@ async function refreshOAuthToken(
|
|||
|
||||
let chosenConfig = {}
|
||||
let refreshResponse
|
||||
if (configType === Configs.OIDC) {
|
||||
if (configType === Config.OIDC) {
|
||||
// configId - retrieved from cookie.
|
||||
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
|
||||
if (!chosenConfig) {
|
||||
|
@ -147,7 +169,7 @@ async function refreshOAuthToken(
|
|||
return refreshResponse
|
||||
}
|
||||
|
||||
async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
||||
export async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
||||
const details = {
|
||||
accessToken: oAuthConfig.accessToken,
|
||||
refreshToken: oAuthConfig.refreshToken,
|
||||
|
@ -174,23 +196,3 @@ async function updateUserOAuth(userId: string, oAuthConfig: any) {
|
|||
console.error("Could not update OAuth details for current user", e)
|
||||
}
|
||||
}
|
||||
|
||||
export = {
|
||||
buildAuthMiddleware: authenticated,
|
||||
passport,
|
||||
google,
|
||||
oidc,
|
||||
jwt: require("jsonwebtoken"),
|
||||
buildTenancyMiddleware: tenancy,
|
||||
auditLog,
|
||||
authError,
|
||||
buildCsrfMiddleware: csrf,
|
||||
internalApi,
|
||||
refreshOAuthToken,
|
||||
updateUserOAuth,
|
||||
ssoCallbackUrl,
|
||||
adminOnly,
|
||||
builderOnly,
|
||||
builderOrAdmin,
|
||||
joiValidator,
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export * from "./auth"
|
|
@ -1,6 +1,6 @@
|
|||
const redis = require("../redis/init")
|
||||
const { doWithDB } = require("../db")
|
||||
const { DocumentType } = require("../db/constants")
|
||||
import { getAppClient } from "../redis/init"
|
||||
import { doWithDB, DocumentType } from "../db"
|
||||
import { Database } from "@budibase/types"
|
||||
|
||||
const AppState = {
|
||||
INVALID: "invalid",
|
||||
|
@ -10,17 +10,17 @@ const EXPIRY_SECONDS = 3600
|
|||
/**
|
||||
* The default populate app metadata function
|
||||
*/
|
||||
const populateFromDB = async appId => {
|
||||
async function populateFromDB(appId: string) {
|
||||
return doWithDB(
|
||||
appId,
|
||||
db => {
|
||||
(db: Database) => {
|
||||
return db.get(DocumentType.APP_METADATA)
|
||||
},
|
||||
{ skip_setup: true }
|
||||
)
|
||||
}
|
||||
|
||||
const isInvalid = metadata => {
|
||||
function isInvalid(metadata?: { state: string }) {
|
||||
return !metadata || metadata.state === AppState.INVALID
|
||||
}
|
||||
|
||||
|
@ -31,15 +31,15 @@ const isInvalid = metadata => {
|
|||
* @param {string} appId the id of the app to get metadata from.
|
||||
* @returns {object} the app metadata.
|
||||
*/
|
||||
exports.getAppMetadata = async appId => {
|
||||
const client = await redis.getAppClient()
|
||||
export async function getAppMetadata(appId: string) {
|
||||
const client = await getAppClient()
|
||||
// try cache
|
||||
let metadata = await client.get(appId)
|
||||
if (!metadata) {
|
||||
let expiry = EXPIRY_SECONDS
|
||||
let expiry: number | undefined = EXPIRY_SECONDS
|
||||
try {
|
||||
metadata = await populateFromDB(appId)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
// app DB left around, but no metadata, it is invalid
|
||||
if (err && err.status === 404) {
|
||||
metadata = { state: AppState.INVALID }
|
||||
|
@ -74,11 +74,11 @@ exports.getAppMetadata = async appId => {
|
|||
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
|
||||
* @return {Promise<void>} will respond with success when cache is updated.
|
||||
*/
|
||||
exports.invalidateAppMetadata = async (appId, newMetadata = null) => {
|
||||
export async function invalidateAppMetadata(appId: string, newMetadata?: any) {
|
||||
if (!appId) {
|
||||
throw "Cannot invalidate if no app ID provided."
|
||||
}
|
||||
const client = await redis.getAppClient()
|
||||
const client = await getAppClient()
|
||||
await client.delete(appId)
|
||||
if (newMetadata) {
|
||||
await client.store(appId, newMetadata, EXPIRY_SECONDS)
|
|
@ -1,6 +1,6 @@
|
|||
import { getTenantId } from "../../context"
|
||||
import redis from "../../redis/init"
|
||||
import RedisWrapper from "../../redis"
|
||||
import * as redis from "../../redis/init"
|
||||
import { Client } from "../../redis"
|
||||
|
||||
function generateTenantKey(key: string) {
|
||||
const tenantId = getTenantId()
|
||||
|
@ -8,9 +8,9 @@ function generateTenantKey(key: string) {
|
|||
}
|
||||
|
||||
export = class BaseCache {
|
||||
client: RedisWrapper | undefined
|
||||
client: Client | undefined
|
||||
|
||||
constructor(client: RedisWrapper | undefined = undefined) {
|
||||
constructor(client: Client | undefined = undefined) {
|
||||
this.client = client
|
||||
}
|
||||
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
const BaseCache = require("./base")
|
||||
|
||||
const GENERIC = new BaseCache()
|
||||
|
||||
exports.CacheKeys = {
|
||||
CHECKLIST: "checklist",
|
||||
INSTALLATION: "installation",
|
||||
ANALYTICS_ENABLED: "analyticsEnabled",
|
||||
UNIQUE_TENANT_ID: "uniqueTenantId",
|
||||
EVENTS: "events",
|
||||
BACKFILL_METADATA: "backfillMetadata",
|
||||
EVENTS_RATE_LIMIT: "eventsRateLimit",
|
||||
}
|
||||
|
||||
exports.TTL = {
|
||||
ONE_MINUTE: 600,
|
||||
ONE_HOUR: 3600,
|
||||
ONE_DAY: 86400,
|
||||
}
|
||||
|
||||
function performExport(funcName) {
|
||||
return (...args) => GENERIC[funcName](...args)
|
||||
}
|
||||
|
||||
exports.keys = performExport("keys")
|
||||
exports.get = performExport("get")
|
||||
exports.store = performExport("store")
|
||||
exports.delete = performExport("delete")
|
||||
exports.withCache = performExport("withCache")
|
||||
exports.bustCache = performExport("bustCache")
|
|
@ -0,0 +1,30 @@
|
|||
const BaseCache = require("./base")
|
||||
|
||||
const GENERIC = new BaseCache()
|
||||
|
||||
export enum CacheKey {
|
||||
CHECKLIST = "checklist",
|
||||
INSTALLATION = "installation",
|
||||
ANALYTICS_ENABLED = "analyticsEnabled",
|
||||
UNIQUE_TENANT_ID = "uniqueTenantId",
|
||||
EVENTS = "events",
|
||||
BACKFILL_METADATA = "backfillMetadata",
|
||||
EVENTS_RATE_LIMIT = "eventsRateLimit",
|
||||
}
|
||||
|
||||
export enum TTL {
|
||||
ONE_MINUTE = 600,
|
||||
ONE_HOUR = 3600,
|
||||
ONE_DAY = 86400,
|
||||
}
|
||||
|
||||
function performExport(funcName: string) {
|
||||
return (...args: any) => GENERIC[funcName](...args)
|
||||
}
|
||||
|
||||
export const keys = performExport("keys")
|
||||
export const get = performExport("get")
|
||||
export const store = performExport("store")
|
||||
export const destroy = performExport("delete")
|
||||
export const withCache = performExport("withCache")
|
||||
export const bustCache = performExport("bustCache")
|
|
@ -0,0 +1,5 @@
|
|||
export * as generic from "./generic"
|
||||
export * as user from "./user"
|
||||
export * as app from "./appMetadata"
|
||||
export * as writethrough from "./writethrough"
|
||||
export * from "./generic"
|
|
@ -1,6 +1,6 @@
|
|||
require("../../../tests/utilities/TestConfiguration")
|
||||
require("../../../tests")
|
||||
const { Writethrough } = require("../writethrough")
|
||||
const { dangerousGetDB } = require("../../db")
|
||||
const { getDB } = require("../../db")
|
||||
const tk = require("timekeeper")
|
||||
|
||||
const START_DATE = Date.now()
|
||||
|
@ -8,8 +8,8 @@ tk.freeze(START_DATE)
|
|||
|
||||
const DELAY = 5000
|
||||
|
||||
const db = dangerousGetDB("test")
|
||||
const db2 = dangerousGetDB("test2")
|
||||
const db = getDB("test")
|
||||
const db2 = getDB("test2")
|
||||
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
|
||||
|
||||
describe("writethrough", () => {
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
const redis = require("../redis/init")
|
||||
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
|
||||
const env = require("../environment")
|
||||
const accounts = require("../cloud/accounts")
|
||||
import * as redis from "../redis/init"
|
||||
import { getTenantId, lookupTenantId, doWithGlobalDB } from "../tenancy"
|
||||
import env from "../environment"
|
||||
import * as accounts from "../cloud/accounts"
|
||||
import { Database } from "@budibase/types"
|
||||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
/**
|
||||
* The default populate user function
|
||||
*/
|
||||
const populateFromDB = async (userId, tenantId) => {
|
||||
const user = await doWithGlobalDB(tenantId, db => db.get(userId))
|
||||
async function populateFromDB(userId: string, tenantId: string) {
|
||||
const user = await doWithGlobalDB(tenantId, (db: Database) => db.get(userId))
|
||||
user.budibaseAccess = true
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
|
@ -31,7 +32,11 @@ const populateFromDB = async (userId, tenantId) => {
|
|||
* @param {*} populateUser function to provide the user for re-caching. default to couch db
|
||||
* @returns
|
||||
*/
|
||||
exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
||||
export async function getUser(
|
||||
userId: string,
|
||||
tenantId?: string,
|
||||
populateUser?: any
|
||||
) {
|
||||
if (!populateUser) {
|
||||
populateUser = populateFromDB
|
||||
}
|
||||
|
@ -47,7 +52,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
|||
let user = await client.get(userId)
|
||||
if (!user) {
|
||||
user = await populateUser(userId, tenantId)
|
||||
client.store(userId, user, EXPIRY_SECONDS)
|
||||
await client.store(userId, user, EXPIRY_SECONDS)
|
||||
}
|
||||
if (user && !user.tenantId && tenantId) {
|
||||
// make sure the tenant ID is always correct/set
|
||||
|
@ -56,7 +61,7 @@ exports.getUser = async (userId, tenantId = null, populateUser = null) => {
|
|||
return user
|
||||
}
|
||||
|
||||
exports.invalidateUser = async userId => {
|
||||
export async function invalidateUser(userId: string) {
|
||||
const client = await redis.getUserClient()
|
||||
await client.delete(userId)
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
import BaseCache from "./base"
|
||||
import { getWritethroughClient } from "../redis/init"
|
||||
import { logWarn } from "../logging"
|
||||
import PouchDB from "pouchdb"
|
||||
import { Database } from "@budibase/types"
|
||||
|
||||
const DEFAULT_WRITE_RATE_MS = 10000
|
||||
let CACHE: BaseCache | null = null
|
||||
|
@ -19,7 +19,7 @@ async function getCache() {
|
|||
return CACHE
|
||||
}
|
||||
|
||||
function makeCacheKey(db: PouchDB.Database, key: string) {
|
||||
function makeCacheKey(db: Database, key: string) {
|
||||
return db.name + key
|
||||
}
|
||||
|
||||
|
@ -28,7 +28,7 @@ function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem {
|
|||
}
|
||||
|
||||
export async function put(
|
||||
db: PouchDB.Database,
|
||||
db: Database,
|
||||
doc: any,
|
||||
writeRateMs: number = DEFAULT_WRITE_RATE_MS
|
||||
) {
|
||||
|
@ -64,7 +64,7 @@ export async function put(
|
|||
return { ok: true, id: output._id, rev: output._rev }
|
||||
}
|
||||
|
||||
export async function get(db: PouchDB.Database, id: string): Promise<any> {
|
||||
export async function get(db: Database, id: string): Promise<any> {
|
||||
const cache = await getCache()
|
||||
const cacheKey = makeCacheKey(db, id)
|
||||
let cacheItem: CacheItem = await cache.get(cacheKey)
|
||||
|
@ -77,7 +77,7 @@ export async function get(db: PouchDB.Database, id: string): Promise<any> {
|
|||
}
|
||||
|
||||
export async function remove(
|
||||
db: PouchDB.Database,
|
||||
db: Database,
|
||||
docOrId: any,
|
||||
rev?: any
|
||||
): Promise<void> {
|
||||
|
@ -95,13 +95,10 @@ export async function remove(
|
|||
}
|
||||
|
||||
export class Writethrough {
|
||||
db: PouchDB.Database
|
||||
db: Database
|
||||
writeRateMs: number
|
||||
|
||||
constructor(
|
||||
db: PouchDB.Database,
|
||||
writeRateMs: number = DEFAULT_WRITE_RATE_MS
|
||||
) {
|
||||
constructor(db: Database, writeRateMs: number = DEFAULT_WRITE_RATE_MS) {
|
||||
this.db = db
|
||||
this.writeRateMs = writeRateMs
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import API from "./api"
|
||||
import env from "../environment"
|
||||
import { Headers } from "../constants"
|
||||
import { Header } from "../constants"
|
||||
import { CloudAccount } from "@budibase/types"
|
||||
|
||||
const api = new API(env.ACCOUNT_PORTAL_URL)
|
||||
|
@ -14,7 +14,7 @@ export const getAccount = async (
|
|||
const response = await api.post(`/api/accounts/search`, {
|
||||
body: payload,
|
||||
headers: {
|
||||
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -35,7 +35,7 @@ export const getAccountByTenantId = async (
|
|||
const response = await api.post(`/api/accounts/search`, {
|
||||
body: payload,
|
||||
headers: {
|
||||
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
},
|
||||
})
|
||||
|
||||
|
@ -50,7 +50,7 @@ export const getAccountByTenantId = async (
|
|||
export const getStatus = async () => {
|
||||
const response = await api.get(`/api/status`, {
|
||||
headers: {
|
||||
[Headers.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
[Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY,
|
||||
},
|
||||
})
|
||||
const json = await response.json()
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
const fetch = require("node-fetch")
|
||||
class API {
|
||||
constructor(host) {
|
||||
this.host = host
|
||||
}
|
||||
|
||||
apiCall =
|
||||
method =>
|
||||
async (url = "", options = {}) => {
|
||||
if (!options.headers) {
|
||||
options.headers = {}
|
||||
}
|
||||
|
||||
if (!options.headers["Content-Type"]) {
|
||||
options.headers = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
...options.headers,
|
||||
}
|
||||
}
|
||||
|
||||
let json = options.headers["Content-Type"] === "application/json"
|
||||
|
||||
const requestOptions = {
|
||||
method: method,
|
||||
body: json ? JSON.stringify(options.body) : options.body,
|
||||
headers: options.headers,
|
||||
// TODO: See if this is necessary
|
||||
credentials: "include",
|
||||
}
|
||||
|
||||
return await fetch(`${this.host}${url}`, requestOptions)
|
||||
}
|
||||
|
||||
post = this.apiCall("POST")
|
||||
get = this.apiCall("GET")
|
||||
patch = this.apiCall("PATCH")
|
||||
del = this.apiCall("DELETE")
|
||||
put = this.apiCall("PUT")
|
||||
}
|
||||
|
||||
module.exports = API
|
|
@ -0,0 +1,55 @@
|
|||
import fetch from "node-fetch"
|
||||
|
||||
export = class API {
|
||||
host: string
|
||||
|
||||
constructor(host: string) {
|
||||
this.host = host
|
||||
}
|
||||
|
||||
async apiCall(method: string, url: string, options?: any) {
|
||||
if (!options.headers) {
|
||||
options.headers = {}
|
||||
}
|
||||
|
||||
if (!options.headers["Content-Type"]) {
|
||||
options.headers = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
...options.headers,
|
||||
}
|
||||
}
|
||||
|
||||
let json = options.headers["Content-Type"] === "application/json"
|
||||
|
||||
const requestOptions = {
|
||||
method: method,
|
||||
body: json ? JSON.stringify(options.body) : options.body,
|
||||
headers: options.headers,
|
||||
// TODO: See if this is necessary
|
||||
credentials: "include",
|
||||
}
|
||||
|
||||
return await fetch(`${this.host}${url}`, requestOptions)
|
||||
}
|
||||
|
||||
async post(url: string, options?: any) {
|
||||
return this.apiCall("POST", url, options)
|
||||
}
|
||||
|
||||
async get(url: string, options?: any) {
|
||||
return this.apiCall("GET", url, options)
|
||||
}
|
||||
|
||||
async patch(url: string, options?: any) {
|
||||
return this.apiCall("PATCH", url, options)
|
||||
}
|
||||
|
||||
async del(url: string, options?: any) {
|
||||
return this.apiCall("DELETE", url, options)
|
||||
}
|
||||
|
||||
async put(url: string, options?: any) {
|
||||
return this.apiCall("PUT", url, options)
|
||||
}
|
||||
}
|
|
@ -1,650 +0,0 @@
|
|||
const util = require("util")
|
||||
const assert = require("assert")
|
||||
const wrapEmitter = require("emitter-listener")
|
||||
const async_hooks = require("async_hooks")
|
||||
|
||||
const CONTEXTS_SYMBOL = "cls@contexts"
|
||||
const ERROR_SYMBOL = "error@context"
|
||||
|
||||
const DEBUG_CLS_HOOKED = process.env.DEBUG_CLS_HOOKED
|
||||
|
||||
let currentUid = -1
|
||||
|
||||
module.exports = {
|
||||
getNamespace: getNamespace,
|
||||
createNamespace: createNamespace,
|
||||
destroyNamespace: destroyNamespace,
|
||||
reset: reset,
|
||||
ERROR_SYMBOL: ERROR_SYMBOL,
|
||||
}
|
||||
|
||||
function Namespace(name) {
|
||||
this.name = name
|
||||
// changed in 2.7: no default context
|
||||
this.active = null
|
||||
this._set = []
|
||||
this.id = null
|
||||
this._contexts = new Map()
|
||||
this._indent = 0
|
||||
this._hook = null
|
||||
}
|
||||
|
||||
Namespace.prototype.set = function set(key, value) {
|
||||
if (!this.active) {
|
||||
throw new Error(
|
||||
"No context available. ns.run() or ns.bind() must be called first."
|
||||
)
|
||||
}
|
||||
|
||||
this.active[key] = value
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
indentStr +
|
||||
"CONTEXT-SET KEY:" +
|
||||
key +
|
||||
"=" +
|
||||
value +
|
||||
" in ns:" +
|
||||
this.name +
|
||||
" currentUid:" +
|
||||
currentUid +
|
||||
" active:" +
|
||||
util.inspect(this.active, { showHidden: true, depth: 2, colors: true })
|
||||
)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
Namespace.prototype.get = function get(key) {
|
||||
if (!this.active) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.currentId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-GETTING KEY NO ACTIVE NS: (${this.name}) ${key}=undefined currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${this._set.length}`
|
||||
)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
indentStr +
|
||||
"CONTEXT-GETTING KEY:" +
|
||||
key +
|
||||
"=" +
|
||||
this.active[key] +
|
||||
" (" +
|
||||
this.name +
|
||||
") currentUid:" +
|
||||
currentUid +
|
||||
" active:" +
|
||||
util.inspect(this.active, { showHidden: true, depth: 2, colors: true })
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-GETTING KEY: (${this.name}) ${key}=${
|
||||
this.active[key]
|
||||
} currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${
|
||||
this._set.length
|
||||
} active:${util.inspect(this.active)}`
|
||||
)
|
||||
}
|
||||
return this.active[key]
|
||||
}
|
||||
|
||||
Namespace.prototype.createContext = function createContext() {
|
||||
// Prototype inherit existing context if created a new child context within existing context.
|
||||
let context = Object.create(this.active ? this.active : Object.prototype)
|
||||
context._ns_name = this.name
|
||||
context.id = currentUid
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-CREATED Context: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} asyncHooksCurrentId:${asyncHooksCurrentId} triggerId:${triggerId} len:${
|
||||
this._set.length
|
||||
} context:${util.inspect(context, {
|
||||
showHidden: true,
|
||||
depth: 2,
|
||||
colors: true,
|
||||
})}`
|
||||
)
|
||||
}
|
||||
|
||||
return context
|
||||
}
|
||||
|
||||
Namespace.prototype.run = function run(fn) {
|
||||
let context = this.createContext()
|
||||
this.enter(context)
|
||||
|
||||
try {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-RUN BEGIN: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
|
||||
this._set.length
|
||||
} context:${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
fn(context)
|
||||
return context
|
||||
} catch (exception) {
|
||||
if (exception) {
|
||||
exception[ERROR_SYMBOL] = context
|
||||
}
|
||||
throw exception
|
||||
} finally {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-RUN END: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
|
||||
this._set.length
|
||||
} ${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
this.exit(context)
|
||||
}
|
||||
}
|
||||
|
||||
Namespace.prototype.runAndReturn = function runAndReturn(fn) {
|
||||
let value
|
||||
this.run(function (context) {
|
||||
value = fn(context)
|
||||
})
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses global Promise and assumes Promise is cls friendly or wrapped already.
|
||||
* @param {function} fn
|
||||
* @returns {*}
|
||||
*/
|
||||
Namespace.prototype.runPromise = function runPromise(fn) {
|
||||
let context = this.createContext()
|
||||
this.enter(context)
|
||||
|
||||
let promise = fn(context)
|
||||
if (!promise || !promise.then || !promise.catch) {
|
||||
throw new Error("fn must return a promise.")
|
||||
}
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(
|
||||
"CONTEXT-runPromise BEFORE: (" +
|
||||
this.name +
|
||||
") currentUid:" +
|
||||
currentUid +
|
||||
" len:" +
|
||||
this._set.length +
|
||||
" " +
|
||||
util.inspect(context)
|
||||
)
|
||||
}
|
||||
|
||||
return promise
|
||||
.then(result => {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(
|
||||
"CONTEXT-runPromise AFTER then: (" +
|
||||
this.name +
|
||||
") currentUid:" +
|
||||
currentUid +
|
||||
" len:" +
|
||||
this._set.length +
|
||||
" " +
|
||||
util.inspect(context)
|
||||
)
|
||||
}
|
||||
this.exit(context)
|
||||
return result
|
||||
})
|
||||
.catch(err => {
|
||||
err[ERROR_SYMBOL] = context
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(
|
||||
"CONTEXT-runPromise AFTER catch: (" +
|
||||
this.name +
|
||||
") currentUid:" +
|
||||
currentUid +
|
||||
" len:" +
|
||||
this._set.length +
|
||||
" " +
|
||||
util.inspect(context)
|
||||
)
|
||||
}
|
||||
this.exit(context)
|
||||
throw err
|
||||
})
|
||||
}
|
||||
|
||||
Namespace.prototype.bind = function bindFactory(fn, context) {
|
||||
if (!context) {
|
||||
if (!this.active) {
|
||||
context = this.createContext()
|
||||
} else {
|
||||
context = this.active
|
||||
}
|
||||
}
|
||||
|
||||
let self = this
|
||||
return function clsBind() {
|
||||
self.enter(context)
|
||||
try {
|
||||
return fn.apply(this, arguments)
|
||||
} catch (exception) {
|
||||
if (exception) {
|
||||
exception[ERROR_SYMBOL] = context
|
||||
}
|
||||
throw exception
|
||||
} finally {
|
||||
self.exit(context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Namespace.prototype.enter = function enter(context) {
|
||||
assert.ok(context, "context must be provided for entering")
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-ENTER: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
|
||||
this._set.length
|
||||
} ${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
|
||||
this._set.push(this.active)
|
||||
this.active = context
|
||||
}
|
||||
|
||||
Namespace.prototype.exit = function exit(context) {
|
||||
assert.ok(context, "context must be provided for exiting")
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const asyncHooksCurrentId = async_hooks.executionAsyncId()
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(this._indent < 0 ? 0 : this._indent)
|
||||
debug2(
|
||||
`${indentStr}CONTEXT-EXIT: (${
|
||||
this.name
|
||||
}) currentUid:${currentUid} triggerId:${triggerId} asyncHooksCurrentId:${asyncHooksCurrentId} len:${
|
||||
this._set.length
|
||||
} ${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
|
||||
// Fast path for most exits that are at the top of the stack
|
||||
if (this.active === context) {
|
||||
assert.ok(this._set.length, "can't remove top context")
|
||||
this.active = this._set.pop()
|
||||
return
|
||||
}
|
||||
|
||||
// Fast search in the stack using lastIndexOf
|
||||
let index = this._set.lastIndexOf(context)
|
||||
|
||||
if (index < 0) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(
|
||||
"??ERROR?? context exiting but not entered - ignoring: " +
|
||||
util.inspect(context)
|
||||
)
|
||||
}
|
||||
assert.ok(
|
||||
index >= 0,
|
||||
"context not currently entered; can't exit. \n" +
|
||||
util.inspect(this) +
|
||||
"\n" +
|
||||
util.inspect(context)
|
||||
)
|
||||
} else {
|
||||
assert.ok(index, "can't remove top context")
|
||||
this._set.splice(index, 1)
|
||||
}
|
||||
}
|
||||
|
||||
Namespace.prototype.bindEmitter = function bindEmitter(emitter) {
|
||||
assert.ok(
|
||||
emitter.on && emitter.addListener && emitter.emit,
|
||||
"can only bind real EEs"
|
||||
)
|
||||
|
||||
let namespace = this
|
||||
let thisSymbol = "context@" + this.name
|
||||
|
||||
// Capture the context active at the time the emitter is bound.
|
||||
function attach(listener) {
|
||||
if (!listener) {
|
||||
return
|
||||
}
|
||||
if (!listener[CONTEXTS_SYMBOL]) {
|
||||
listener[CONTEXTS_SYMBOL] = Object.create(null)
|
||||
}
|
||||
|
||||
listener[CONTEXTS_SYMBOL][thisSymbol] = {
|
||||
namespace: namespace,
|
||||
context: namespace.active,
|
||||
}
|
||||
}
|
||||
|
||||
// At emit time, bind the listener within the correct context.
|
||||
function bind(unwrapped) {
|
||||
if (!(unwrapped && unwrapped[CONTEXTS_SYMBOL])) {
|
||||
return unwrapped
|
||||
}
|
||||
|
||||
let wrapped = unwrapped
|
||||
let unwrappedContexts = unwrapped[CONTEXTS_SYMBOL]
|
||||
Object.keys(unwrappedContexts).forEach(function (name) {
|
||||
let thunk = unwrappedContexts[name]
|
||||
wrapped = thunk.namespace.bind(wrapped, thunk.context)
|
||||
})
|
||||
return wrapped
|
||||
}
|
||||
|
||||
wrapEmitter(emitter, attach, bind)
|
||||
}
|
||||
|
||||
/**
|
||||
* If an error comes out of a namespace, it will have a context attached to it.
|
||||
* This function knows how to find it.
|
||||
*
|
||||
* @param {Error} exception Possibly annotated error.
|
||||
*/
|
||||
Namespace.prototype.fromException = function fromException(exception) {
|
||||
return exception[ERROR_SYMBOL]
|
||||
}
|
||||
|
||||
function getNamespace(name) {
|
||||
return process.namespaces[name]
|
||||
}
|
||||
|
||||
function createNamespace(name) {
|
||||
assert.ok(name, "namespace must be given a name.")
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
debug2(`NS-CREATING NAMESPACE (${name})`)
|
||||
}
|
||||
let namespace = new Namespace(name)
|
||||
namespace.id = currentUid
|
||||
|
||||
const hook = async_hooks.createHook({
|
||||
init(asyncId, type, triggerId, resource) {
|
||||
currentUid = async_hooks.executionAsyncId()
|
||||
|
||||
//CHAIN Parent's Context onto child if none exists. This is needed to pass net-events.spec
|
||||
// let initContext = namespace.active;
|
||||
// if(!initContext && triggerId) {
|
||||
// let parentContext = namespace._contexts.get(triggerId);
|
||||
// if (parentContext) {
|
||||
// namespace.active = parentContext;
|
||||
// namespace._contexts.set(currentUid, parentContext);
|
||||
// if (DEBUG_CLS_HOOKED) {
|
||||
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
|
||||
// debug2(`${indentStr}INIT [${type}] (${name}) WITH PARENT CONTEXT asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
|
||||
// }
|
||||
// } else if (DEBUG_CLS_HOOKED) {
|
||||
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
|
||||
// debug2(`${indentStr}INIT [${type}] (${name}) MISSING CONTEXT asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
|
||||
// }
|
||||
// }else {
|
||||
// namespace._contexts.set(currentUid, namespace.active);
|
||||
// if (DEBUG_CLS_HOOKED) {
|
||||
// const indentStr = ' '.repeat(namespace._indent < 0 ? 0 : namespace._indent);
|
||||
// debug2(`${indentStr}INIT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(namespace.active, true)} resource:${resource}`);
|
||||
// }
|
||||
// }
|
||||
if (namespace.active) {
|
||||
namespace._contexts.set(asyncId, namespace.active)
|
||||
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}INIT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} resource:${resource}`
|
||||
)
|
||||
}
|
||||
} else if (currentUid === 0) {
|
||||
// CurrentId will be 0 when triggered from C++. Promise events
|
||||
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const triggerIdContext = namespace._contexts.get(triggerId)
|
||||
if (triggerIdContext) {
|
||||
namespace._contexts.set(asyncId, triggerIdContext)
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}INIT USING CONTEXT FROM TRIGGERID [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} resource:${resource}`
|
||||
)
|
||||
}
|
||||
} else if (DEBUG_CLS_HOOKED) {
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}INIT MISSING CONTEXT [${type}] (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} resource:${resource}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (DEBUG_CLS_HOOKED && type === "PROMISE") {
|
||||
debug2(util.inspect(resource, { showHidden: true }))
|
||||
const parentId = resource.parentId
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}INIT RESOURCE-PROMISE [${type}] (${name}) parentId:${parentId} asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} resource:${resource}`
|
||||
)
|
||||
}
|
||||
},
|
||||
before(asyncId) {
|
||||
currentUid = async_hooks.executionAsyncId()
|
||||
let context
|
||||
|
||||
/*
|
||||
if(currentUid === 0){
|
||||
// CurrentId will be 0 when triggered from C++. Promise events
|
||||
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
|
||||
//const triggerId = async_hooks.triggerAsyncId();
|
||||
context = namespace._contexts.get(asyncId); // || namespace._contexts.get(triggerId);
|
||||
}else{
|
||||
context = namespace._contexts.get(currentUid);
|
||||
}
|
||||
*/
|
||||
|
||||
//HACK to work with promises until they are fixed in node > 8.1.1
|
||||
context =
|
||||
namespace._contexts.get(asyncId) || namespace._contexts.get(currentUid)
|
||||
|
||||
if (context) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}BEFORE (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} context:${util.inspect(context)}`
|
||||
)
|
||||
namespace._indent += 2
|
||||
}
|
||||
|
||||
namespace.enter(context)
|
||||
} else if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}BEFORE MISSING CONTEXT (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} namespace._contexts:${util.inspect(namespace._contexts, {
|
||||
showHidden: true,
|
||||
depth: 2,
|
||||
colors: true,
|
||||
})}`
|
||||
)
|
||||
namespace._indent += 2
|
||||
}
|
||||
},
|
||||
after(asyncId) {
|
||||
currentUid = async_hooks.executionAsyncId()
|
||||
let context // = namespace._contexts.get(currentUid);
|
||||
/*
|
||||
if(currentUid === 0){
|
||||
// CurrentId will be 0 when triggered from C++. Promise events
|
||||
// https://github.com/nodejs/node/blob/master/doc/api/async_hooks.md#triggerid
|
||||
//const triggerId = async_hooks.triggerAsyncId();
|
||||
context = namespace._contexts.get(asyncId); // || namespace._contexts.get(triggerId);
|
||||
}else{
|
||||
context = namespace._contexts.get(currentUid);
|
||||
}
|
||||
*/
|
||||
//HACK to work with promises until they are fixed in node > 8.1.1
|
||||
context =
|
||||
namespace._contexts.get(asyncId) || namespace._contexts.get(currentUid)
|
||||
|
||||
if (context) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
namespace._indent -= 2
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}AFTER (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} context:${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
|
||||
namespace.exit(context)
|
||||
} else if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
namespace._indent -= 2
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}AFTER MISSING CONTEXT (${name}) asyncId:${asyncId} currentUid:${currentUid} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} context:${util.inspect(context)}`
|
||||
)
|
||||
}
|
||||
},
|
||||
destroy(asyncId) {
|
||||
currentUid = async_hooks.executionAsyncId()
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
const triggerId = async_hooks.triggerAsyncId()
|
||||
const indentStr = " ".repeat(
|
||||
namespace._indent < 0 ? 0 : namespace._indent
|
||||
)
|
||||
debug2(
|
||||
`${indentStr}DESTROY (${name}) currentUid:${currentUid} asyncId:${asyncId} triggerId:${triggerId} active:${util.inspect(
|
||||
namespace.active,
|
||||
{ showHidden: true, depth: 2, colors: true }
|
||||
)} context:${util.inspect(namespace._contexts.get(currentUid))}`
|
||||
)
|
||||
}
|
||||
|
||||
namespace._contexts.delete(asyncId)
|
||||
},
|
||||
})
|
||||
|
||||
hook.enable()
|
||||
namespace._hook = hook
|
||||
|
||||
process.namespaces[name] = namespace
|
||||
return namespace
|
||||
}
|
||||
|
||||
function destroyNamespace(name) {
|
||||
let namespace = getNamespace(name)
|
||||
|
||||
assert.ok(namespace, "can't delete nonexistent namespace! \"" + name + '"')
|
||||
assert.ok(
|
||||
namespace.id,
|
||||
"don't assign to process.namespaces directly! " + util.inspect(namespace)
|
||||
)
|
||||
|
||||
namespace._hook.disable()
|
||||
namespace._contexts = null
|
||||
process.namespaces[name] = null
|
||||
}
|
||||
|
||||
function reset() {
|
||||
// must unregister async listeners
|
||||
if (process.namespaces) {
|
||||
Object.keys(process.namespaces).forEach(function (name) {
|
||||
destroyNamespace(name)
|
||||
})
|
||||
}
|
||||
process.namespaces = Object.create(null)
|
||||
}
|
||||
|
||||
process.namespaces = process.namespaces || {}
|
||||
|
||||
//const fs = require('fs');
|
||||
function debug2(...args) {
|
||||
if (DEBUG_CLS_HOOKED) {
|
||||
//fs.writeSync(1, `${util.format(...args)}\n`);
|
||||
process._rawDebug(`${util.format(...args)}`)
|
||||
}
|
||||
}
|
||||
|
||||
/*function getFunctionName(fn) {
|
||||
if (!fn) {
|
||||
return fn;
|
||||
}
|
||||
if (typeof fn === 'function') {
|
||||
if (fn.name) {
|
||||
return fn.name;
|
||||
}
|
||||
return (fn.toString().trim().match(/^function\s*([^\s(]+)/) || [])[1];
|
||||
} else if (fn.constructor && fn.constructor.name) {
|
||||
return fn.constructor.name;
|
||||
}
|
||||
}*/
|
|
@ -1,44 +0,0 @@
|
|||
exports.UserStatus = {
|
||||
ACTIVE: "active",
|
||||
INACTIVE: "inactive",
|
||||
}
|
||||
|
||||
exports.Cookies = {
|
||||
CurrentApp: "budibase:currentapp",
|
||||
Auth: "budibase:auth",
|
||||
Init: "budibase:init",
|
||||
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
|
||||
DatasourceAuth: "budibase:datasourceauth",
|
||||
OIDC_CONFIG: "budibase:oidc:config",
|
||||
}
|
||||
|
||||
exports.Headers = {
|
||||
API_KEY: "x-budibase-api-key",
|
||||
LICENSE_KEY: "x-budibase-license-key",
|
||||
API_VER: "x-budibase-api-version",
|
||||
APP_ID: "x-budibase-app-id",
|
||||
TYPE: "x-budibase-type",
|
||||
PREVIEW_ROLE: "x-budibase-role",
|
||||
TENANT_ID: "x-budibase-tenant-id",
|
||||
TOKEN: "x-budibase-token",
|
||||
CSRF_TOKEN: "x-csrf-token",
|
||||
}
|
||||
|
||||
exports.GlobalRoles = {
|
||||
OWNER: "owner",
|
||||
ADMIN: "admin",
|
||||
BUILDER: "builder",
|
||||
WORKSPACE_MANAGER: "workspace_manager",
|
||||
}
|
||||
|
||||
exports.Configs = {
|
||||
SETTINGS: "settings",
|
||||
ACCOUNT: "account",
|
||||
SMTP: "smtp",
|
||||
GOOGLE: "google",
|
||||
OIDC: "oidc",
|
||||
OIDC_LOGOS: "logos_oidc",
|
||||
}
|
||||
|
||||
exports.MAX_VALID_DATE = new Date(2147483647000)
|
||||
exports.DEFAULT_TENANT_ID = "default"
|
|
@ -92,3 +92,4 @@ export const StaticDatabases = {
|
|||
export const APP_PREFIX = DocumentType.APP + SEPARATOR
|
||||
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
|
||||
export const APP_DEV_PREFIX = APP_DEV
|
||||
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
|
|
@ -0,0 +1,2 @@
|
|||
export * from "./db"
|
||||
export * from "./misc"
|
|
@ -0,0 +1,44 @@
|
|||
export enum UserStatus {
|
||||
ACTIVE = "active",
|
||||
INACTIVE = "inactive",
|
||||
}
|
||||
|
||||
export enum Cookie {
|
||||
CurrentApp = "budibase:currentapp",
|
||||
Auth = "budibase:auth",
|
||||
Init = "budibase:init",
|
||||
ACCOUNT_RETURN_URL = "budibase:account:returnurl",
|
||||
DatasourceAuth = "budibase:datasourceauth",
|
||||
OIDC_CONFIG = "budibase:oidc:config",
|
||||
}
|
||||
|
||||
export enum Header {
|
||||
API_KEY = "x-budibase-api-key",
|
||||
LICENSE_KEY = "x-budibase-license-key",
|
||||
API_VER = "x-budibase-api-version",
|
||||
APP_ID = "x-budibase-app-id",
|
||||
TYPE = "x-budibase-type",
|
||||
PREVIEW_ROLE = "x-budibase-role",
|
||||
TENANT_ID = "x-budibase-tenant-id",
|
||||
TOKEN = "x-budibase-token",
|
||||
CSRF_TOKEN = "x-csrf-token",
|
||||
}
|
||||
|
||||
export enum GlobalRole {
|
||||
OWNER = "owner",
|
||||
ADMIN = "admin",
|
||||
BUILDER = "builder",
|
||||
WORKSPACE_MANAGER = "workspace_manager",
|
||||
}
|
||||
|
||||
export enum Config {
|
||||
SETTINGS = "settings",
|
||||
ACCOUNT = "account",
|
||||
SMTP = "smtp",
|
||||
GOOGLE = "google",
|
||||
OIDC = "oidc",
|
||||
OIDC_LOGOS = "logos_oidc",
|
||||
}
|
||||
|
||||
export const MAX_VALID_DATE = new Date(2147483647000)
|
||||
export const DEFAULT_TENANT_ID = "default"
|
|
@ -0,0 +1,17 @@
|
|||
import { AsyncLocalStorage } from "async_hooks"
|
||||
|
||||
export default class Context {
|
||||
static storage = new AsyncLocalStorage<Record<string, any>>()
|
||||
|
||||
static run(context: Record<string, any>, func: any) {
|
||||
return Context.storage.run(context, () => func())
|
||||
}
|
||||
|
||||
static get(): Record<string, any> {
|
||||
return Context.storage.getStore() as Record<string, any>
|
||||
}
|
||||
|
||||
static set(context: Record<string, any>) {
|
||||
Context.storage.enterWith(context)
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
const cls = require("../clshooked")
|
||||
const { newid } = require("../hashing")
|
||||
|
||||
const REQUEST_ID_KEY = "requestId"
|
||||
const MAIN_CTX = cls.createNamespace("main")
|
||||
|
||||
function getContextStorage(namespace) {
|
||||
if (namespace && namespace.active) {
|
||||
let contextData = namespace.active
|
||||
delete contextData.id
|
||||
delete contextData._ns_name
|
||||
return contextData
|
||||
}
|
||||
return {}
|
||||
}
|
||||
|
||||
class FunctionContext {
|
||||
static run(callback) {
|
||||
return MAIN_CTX.runAndReturn(async () => {
|
||||
const namespaceId = newid()
|
||||
MAIN_CTX.set(REQUEST_ID_KEY, namespaceId)
|
||||
const namespace = cls.createNamespace(namespaceId)
|
||||
let response = await namespace.runAndReturn(callback)
|
||||
cls.destroyNamespace(namespaceId)
|
||||
return response
|
||||
})
|
||||
}
|
||||
|
||||
static setOnContext(key, value) {
|
||||
const namespaceId = MAIN_CTX.get(REQUEST_ID_KEY)
|
||||
const namespace = cls.getNamespace(namespaceId)
|
||||
namespace.set(key, value)
|
||||
}
|
||||
|
||||
static getFromContext(key) {
|
||||
const namespaceId = MAIN_CTX.get(REQUEST_ID_KEY)
|
||||
const namespace = cls.getNamespace(namespaceId)
|
||||
const context = getContextStorage(namespace)
|
||||
if (context) {
|
||||
return context[key]
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FunctionContext
|
|
@ -1,17 +0,0 @@
|
|||
export enum ContextKey {
|
||||
TENANT_ID = "tenantId",
|
||||
GLOBAL_DB = "globalDb",
|
||||
APP_ID = "appId",
|
||||
IDENTITY = "identity",
|
||||
// whatever the request app DB was
|
||||
CURRENT_DB = "currentDb",
|
||||
// get the prod app DB from the request
|
||||
PROD_DB = "prodDb",
|
||||
// get the dev app DB from the request
|
||||
DEV_DB = "devDb",
|
||||
DB_OPTS = "dbOpts",
|
||||
// check if something else is using the context, don't close DB
|
||||
TENANCY_IN_USE = "tenancyInUse",
|
||||
APP_IN_USE = "appInUse",
|
||||
IDENTITY_IN_USE = "identityInUse",
|
||||
}
|
|
@ -1,15 +1,19 @@
|
|||
const { getGlobalUserParams, getAllApps } = require("../db/utils")
|
||||
const { doWithDB } = require("../db")
|
||||
const { doWithGlobalDB } = require("../tenancy")
|
||||
const { StaticDatabases } = require("../db/constants")
|
||||
import {
|
||||
getGlobalUserParams,
|
||||
getAllApps,
|
||||
doWithDB,
|
||||
StaticDatabases,
|
||||
} from "../db"
|
||||
import { doWithGlobalDB } from "../tenancy"
|
||||
import { App, Tenants, User, Database } from "@budibase/types"
|
||||
|
||||
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
|
||||
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
|
||||
|
||||
const removeTenantFromInfoDB = async tenantId => {
|
||||
async function removeTenantFromInfoDB(tenantId: string) {
|
||||
try {
|
||||
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
|
||||
let tenants = await infoDb.get(TENANT_DOC)
|
||||
await doWithDB(PLATFORM_INFO_DB, async (infoDb: Database) => {
|
||||
const tenants = (await infoDb.get(TENANT_DOC)) as Tenants
|
||||
tenants.tenantIds = tenants.tenantIds.filter(id => id !== tenantId)
|
||||
|
||||
await infoDb.put(tenants)
|
||||
|
@ -20,14 +24,14 @@ const removeTenantFromInfoDB = async tenantId => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.removeUserFromInfoDB = async dbUser => {
|
||||
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
|
||||
const keys = [dbUser._id, dbUser.email]
|
||||
export async function removeUserFromInfoDB(dbUser: User) {
|
||||
await doWithDB(PLATFORM_INFO_DB, async (infoDb: Database) => {
|
||||
const keys = [dbUser._id!, dbUser.email]
|
||||
const userDocs = await infoDb.allDocs({
|
||||
keys,
|
||||
include_docs: true,
|
||||
})
|
||||
const toDelete = userDocs.rows.map(row => {
|
||||
const toDelete = userDocs.rows.map((row: any) => {
|
||||
return {
|
||||
...row.doc,
|
||||
_deleted: true,
|
||||
|
@ -37,18 +41,18 @@ exports.removeUserFromInfoDB = async dbUser => {
|
|||
})
|
||||
}
|
||||
|
||||
const removeUsersFromInfoDB = async tenantId => {
|
||||
return doWithGlobalDB(tenantId, async db => {
|
||||
async function removeUsersFromInfoDB(tenantId: string) {
|
||||
return doWithGlobalDB(tenantId, async (db: any) => {
|
||||
try {
|
||||
const allUsers = await db.allDocs(
|
||||
getGlobalUserParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
await doWithDB(PLATFORM_INFO_DB, async infoDb => {
|
||||
const allEmails = allUsers.rows.map(row => row.doc.email)
|
||||
await doWithDB(PLATFORM_INFO_DB, async (infoDb: any) => {
|
||||
const allEmails = allUsers.rows.map((row: any) => row.doc.email)
|
||||
// get the id docs
|
||||
let keys = allUsers.rows.map(row => row.id)
|
||||
let keys = allUsers.rows.map((row: any) => row.id)
|
||||
// and the email docs
|
||||
keys = keys.concat(allEmails)
|
||||
// retrieve the docs and delete them
|
||||
|
@ -56,7 +60,7 @@ const removeUsersFromInfoDB = async tenantId => {
|
|||
keys,
|
||||
include_docs: true,
|
||||
})
|
||||
const toDelete = userDocs.rows.map(row => {
|
||||
const toDelete = userDocs.rows.map((row: any) => {
|
||||
return {
|
||||
...row.doc,
|
||||
_deleted: true,
|
||||
|
@ -71,8 +75,8 @@ const removeUsersFromInfoDB = async tenantId => {
|
|||
})
|
||||
}
|
||||
|
||||
const removeGlobalDB = async tenantId => {
|
||||
return doWithGlobalDB(tenantId, async db => {
|
||||
async function removeGlobalDB(tenantId: string) {
|
||||
return doWithGlobalDB(tenantId, async (db: Database) => {
|
||||
try {
|
||||
await db.destroy()
|
||||
} catch (err) {
|
||||
|
@ -82,11 +86,11 @@ const removeGlobalDB = async tenantId => {
|
|||
})
|
||||
}
|
||||
|
||||
const removeTenantApps = async tenantId => {
|
||||
async function removeTenantApps(tenantId: string) {
|
||||
try {
|
||||
const apps = await getAllApps({ all: true })
|
||||
const apps = (await getAllApps({ all: true })) as App[]
|
||||
const destroyPromises = apps.map(app =>
|
||||
doWithDB(app.appId, db => db.destroy())
|
||||
doWithDB(app.appId, (db: Database) => db.destroy())
|
||||
)
|
||||
await Promise.allSettled(destroyPromises)
|
||||
} catch (err) {
|
||||
|
@ -96,7 +100,7 @@ const removeTenantApps = async tenantId => {
|
|||
}
|
||||
|
||||
// can't live in tenancy package due to circular dependency on db/utils
|
||||
exports.deleteTenant = async tenantId => {
|
||||
export async function deleteTenant(tenantId: string) {
|
||||
await removeTenantFromInfoDB(tenantId)
|
||||
await removeUsersFromInfoDB(tenantId)
|
||||
await removeGlobalDB(tenantId)
|
|
@ -2,23 +2,22 @@ import {
|
|||
IdentityContext,
|
||||
IdentityType,
|
||||
User,
|
||||
UserContext,
|
||||
isCloudAccount,
|
||||
Account,
|
||||
AccountUserContext,
|
||||
} from "@budibase/types"
|
||||
import * as context from "."
|
||||
|
||||
export const getIdentity = (): IdentityContext | undefined => {
|
||||
export function getIdentity(): IdentityContext | undefined {
|
||||
return context.getIdentity()
|
||||
}
|
||||
|
||||
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
||||
export function doInIdentityContext(identity: IdentityContext, task: any) {
|
||||
return context.doInIdentityContext(identity, task)
|
||||
}
|
||||
|
||||
export const doInUserContext = (user: User, task: any) => {
|
||||
const userContext: UserContext = {
|
||||
export function doInUserContext(user: User, task: any) {
|
||||
const userContext: any = {
|
||||
...user,
|
||||
_id: user._id as string,
|
||||
type: IdentityType.USER,
|
||||
|
@ -26,7 +25,7 @@ export const doInUserContext = (user: User, task: any) => {
|
|||
return doInIdentityContext(userContext, task)
|
||||
}
|
||||
|
||||
export const doInAccountContext = (account: Account, task: any) => {
|
||||
export function doInAccountContext(account: Account, task: any) {
|
||||
const _id = getAccountUserId(account)
|
||||
const tenantId = account.tenantId
|
||||
const accountContext: AccountUserContext = {
|
||||
|
@ -38,12 +37,12 @@ export const doInAccountContext = (account: Account, task: any) => {
|
|||
return doInIdentityContext(accountContext, task)
|
||||
}
|
||||
|
||||
export const getAccountUserId = (account: Account) => {
|
||||
export function getAccountUserId(account: Account) {
|
||||
let userId: string
|
||||
if (isCloudAccount(account)) {
|
||||
userId = account.budibaseUserId
|
||||
} else {
|
||||
// use account id as user id for self hosting
|
||||
// use account id as user id for self-hosting
|
||||
userId = account.accountId
|
||||
}
|
||||
return userId
|
||||
|
|
|
@ -1,267 +1,3 @@
|
|||
import env from "../environment"
|
||||
import { SEPARATOR, DocumentType } from "../db/constants"
|
||||
import cls from "./FunctionContext"
|
||||
import { dangerousGetDB, closeDB } from "../db"
|
||||
import { baseGlobalDBName } from "../db/tenancy"
|
||||
import { IdentityContext } from "@budibase/types"
|
||||
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
|
||||
import { ContextKey } from "./constants"
|
||||
import {
|
||||
updateUsing,
|
||||
closeWithUsing,
|
||||
setAppTenantId,
|
||||
setIdentity,
|
||||
closeAppDBs,
|
||||
getContextDB,
|
||||
} from "./utils"
|
||||
|
||||
export const DEFAULT_TENANT_ID = _DEFAULT_TENANT_ID
|
||||
|
||||
// some test cases call functions directly, need to
|
||||
// store an app ID to pretend there is a context
|
||||
let TEST_APP_ID: string | null = null
|
||||
|
||||
export const closeTenancy = async () => {
|
||||
let db
|
||||
try {
|
||||
if (env.USE_COUCH) {
|
||||
db = getGlobalDB()
|
||||
}
|
||||
} catch (err) {
|
||||
// no DB found - skip closing
|
||||
return
|
||||
}
|
||||
await closeDB(db)
|
||||
// clear from context now that database is closed/task is finished
|
||||
cls.setOnContext(ContextKey.TENANT_ID, null)
|
||||
cls.setOnContext(ContextKey.GLOBAL_DB, null)
|
||||
}
|
||||
|
||||
// export const isDefaultTenant = () => {
|
||||
// return getTenantId() === DEFAULT_TENANT_ID
|
||||
// }
|
||||
|
||||
export const isMultiTenant = () => {
|
||||
return env.MULTI_TENANCY
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an app ID this will attempt to retrieve the tenant ID from it.
|
||||
* @return {null|string} The tenant ID found within the app ID.
|
||||
*/
|
||||
export const getTenantIDFromAppID = (appId: string) => {
|
||||
if (!appId) {
|
||||
return null
|
||||
}
|
||||
if (!isMultiTenant()) {
|
||||
return DEFAULT_TENANT_ID
|
||||
}
|
||||
const split = appId.split(SEPARATOR)
|
||||
const hasDev = split[1] === DocumentType.DEV
|
||||
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
|
||||
return null
|
||||
}
|
||||
if (hasDev) {
|
||||
return split[2]
|
||||
} else {
|
||||
return split[1]
|
||||
}
|
||||
}
|
||||
|
||||
export const doInContext = async (appId: string, task: any) => {
|
||||
// gets the tenant ID from the app ID
|
||||
const tenantId = getTenantIDFromAppID(appId)
|
||||
return doInTenant(tenantId, async () => {
|
||||
return doInAppContext(appId, async () => {
|
||||
return task()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export const doInTenant = (tenantId: string | null, task: any) => {
|
||||
// make sure default always selected in single tenancy
|
||||
if (!env.MULTI_TENANCY) {
|
||||
tenantId = tenantId || DEFAULT_TENANT_ID
|
||||
}
|
||||
// the internal function is so that we can re-use an existing
|
||||
// context - don't want to close DB on a parent context
|
||||
async function internal(opts = { existing: false }) {
|
||||
// set the tenant id + global db if this is a new context
|
||||
if (!opts.existing) {
|
||||
updateTenantId(tenantId)
|
||||
}
|
||||
|
||||
try {
|
||||
// invoke the task
|
||||
return await task()
|
||||
} finally {
|
||||
await closeWithUsing(ContextKey.TENANCY_IN_USE, () => {
|
||||
return closeTenancy()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const existing = cls.getFromContext(ContextKey.TENANT_ID) === tenantId
|
||||
return updateUsing(ContextKey.TENANCY_IN_USE, existing, internal)
|
||||
}
|
||||
|
||||
export const doInAppContext = (appId: string, task: any) => {
|
||||
if (!appId) {
|
||||
throw new Error("appId is required")
|
||||
}
|
||||
|
||||
const identity = getIdentity()
|
||||
|
||||
// the internal function is so that we can re-use an existing
|
||||
// context - don't want to close DB on a parent context
|
||||
async function internal(opts = { existing: false }) {
|
||||
// set the app tenant id
|
||||
if (!opts.existing) {
|
||||
setAppTenantId(appId)
|
||||
}
|
||||
// set the app ID
|
||||
cls.setOnContext(ContextKey.APP_ID, appId)
|
||||
|
||||
// preserve the identity
|
||||
if (identity) {
|
||||
setIdentity(identity)
|
||||
}
|
||||
try {
|
||||
// invoke the task
|
||||
return await task()
|
||||
} finally {
|
||||
await closeWithUsing(ContextKey.APP_IN_USE, async () => {
|
||||
await closeAppDBs()
|
||||
await closeTenancy()
|
||||
})
|
||||
}
|
||||
}
|
||||
const existing = cls.getFromContext(ContextKey.APP_ID) === appId
|
||||
return updateUsing(ContextKey.APP_IN_USE, existing, internal)
|
||||
}
|
||||
|
||||
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
|
||||
if (!identity) {
|
||||
throw new Error("identity is required")
|
||||
}
|
||||
|
||||
async function internal(opts = { existing: false }) {
|
||||
if (!opts.existing) {
|
||||
cls.setOnContext(ContextKey.IDENTITY, identity)
|
||||
// set the tenant so that doInTenant will preserve identity
|
||||
if (identity.tenantId) {
|
||||
updateTenantId(identity.tenantId)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// invoke the task
|
||||
return await task()
|
||||
} finally {
|
||||
await closeWithUsing(ContextKey.IDENTITY_IN_USE, async () => {
|
||||
setIdentity(null)
|
||||
await closeTenancy()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const existing = cls.getFromContext(ContextKey.IDENTITY)
|
||||
return updateUsing(ContextKey.IDENTITY_IN_USE, existing, internal)
|
||||
}
|
||||
|
||||
export const getIdentity = (): IdentityContext | undefined => {
|
||||
try {
|
||||
return cls.getFromContext(ContextKey.IDENTITY)
|
||||
} catch (e) {
|
||||
// do nothing - identity is not in context
|
||||
}
|
||||
}
|
||||
|
||||
export const updateTenantId = (tenantId: string | null) => {
|
||||
cls.setOnContext(ContextKey.TENANT_ID, tenantId)
|
||||
if (env.USE_COUCH) {
|
||||
setGlobalDB(tenantId)
|
||||
}
|
||||
}
|
||||
|
||||
export const updateAppId = async (appId: string) => {
|
||||
try {
|
||||
// have to close first, before removing the databases from context
|
||||
await closeAppDBs()
|
||||
cls.setOnContext(ContextKey.APP_ID, appId)
|
||||
} catch (err) {
|
||||
if (env.isTest()) {
|
||||
TEST_APP_ID = appId
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const setGlobalDB = (tenantId: string | null) => {
|
||||
const dbName = baseGlobalDBName(tenantId)
|
||||
const db = dangerousGetDB(dbName)
|
||||
cls.setOnContext(ContextKey.GLOBAL_DB, db)
|
||||
return db
|
||||
}
|
||||
|
||||
export const getGlobalDB = () => {
|
||||
const db = cls.getFromContext(ContextKey.GLOBAL_DB)
|
||||
if (!db) {
|
||||
throw new Error("Global DB not found")
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
export const isTenantIdSet = () => {
|
||||
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
|
||||
return !!tenantId
|
||||
}
|
||||
|
||||
export const getTenantId = () => {
|
||||
if (!isMultiTenant()) {
|
||||
return DEFAULT_TENANT_ID
|
||||
}
|
||||
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
|
||||
if (!tenantId) {
|
||||
throw new Error("Tenant id not found")
|
||||
}
|
||||
return tenantId
|
||||
}
|
||||
|
||||
export const getAppId = () => {
|
||||
const foundId = cls.getFromContext(ContextKey.APP_ID)
|
||||
if (!foundId && env.isTest() && TEST_APP_ID) {
|
||||
return TEST_APP_ID
|
||||
} else {
|
||||
return foundId
|
||||
}
|
||||
}
|
||||
|
||||
export const isTenancyEnabled = () => {
|
||||
return env.MULTI_TENANCY
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the app database based on whatever the request
|
||||
* contained, dev or prod.
|
||||
*/
|
||||
export const getAppDB = (opts?: any) => {
|
||||
return getContextDB(ContextKey.CURRENT_DB, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
* This specifically gets the prod app ID, if the request
|
||||
* contained a development app ID, this will open the prod one.
|
||||
*/
|
||||
export const getProdAppDB = (opts?: any) => {
|
||||
return getContextDB(ContextKey.PROD_DB, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
* This specifically gets the dev app ID, if the request
|
||||
* contained a prod app ID, this will open the dev one.
|
||||
*/
|
||||
export const getDevAppDB = (opts?: any) => {
|
||||
return getContextDB(ContextKey.DEV_DB, opts)
|
||||
}
|
||||
export { DEFAULT_TENANT_ID } from "../constants"
|
||||
export * as identity from "./identity"
|
||||
export * from "./mainContext"
|
||||
|
|
|
@ -0,0 +1,245 @@
|
|||
// some test cases call functions directly, need to
|
||||
// store an app ID to pretend there is a context
|
||||
import env from "../environment"
|
||||
import Context from "./Context"
|
||||
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
|
||||
import { getDB } from "../db/db"
|
||||
import {
|
||||
DocumentType,
|
||||
SEPARATOR,
|
||||
StaticDatabases,
|
||||
DEFAULT_TENANT_ID,
|
||||
} from "../constants"
|
||||
import { Database, IdentityContext } from "@budibase/types"
|
||||
|
||||
export type ContextMap = {
|
||||
tenantId?: string
|
||||
appId?: string
|
||||
identity?: IdentityContext
|
||||
}
|
||||
|
||||
let TEST_APP_ID: string | null = null
|
||||
|
||||
export function getGlobalDBName(tenantId?: string) {
|
||||
// tenant ID can be set externally, for example user API where
|
||||
// new tenants are being created, this may be the case
|
||||
if (!tenantId) {
|
||||
tenantId = getTenantId()
|
||||
}
|
||||
return baseGlobalDBName(tenantId)
|
||||
}
|
||||
|
||||
export function baseGlobalDBName(tenantId: string | undefined | null) {
|
||||
let dbName
|
||||
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
||||
dbName = StaticDatabases.GLOBAL.name
|
||||
} else {
|
||||
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
|
||||
}
|
||||
return dbName
|
||||
}
|
||||
|
||||
export function isMultiTenant() {
|
||||
return env.MULTI_TENANCY
|
||||
}
|
||||
|
||||
export function isTenantIdSet() {
|
||||
const context = Context.get()
|
||||
return !!context?.tenantId
|
||||
}
|
||||
|
||||
export function isTenancyEnabled() {
|
||||
return env.MULTI_TENANCY
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an app ID this will attempt to retrieve the tenant ID from it.
|
||||
* @return {null|string} The tenant ID found within the app ID.
|
||||
*/
|
||||
export function getTenantIDFromAppID(appId: string) {
|
||||
if (!appId) {
|
||||
return undefined
|
||||
}
|
||||
if (!isMultiTenant()) {
|
||||
return DEFAULT_TENANT_ID
|
||||
}
|
||||
const split = appId.split(SEPARATOR)
|
||||
const hasDev = split[1] === DocumentType.DEV
|
||||
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
|
||||
return undefined
|
||||
}
|
||||
if (hasDev) {
|
||||
return split[2]
|
||||
} else {
|
||||
return split[1]
|
||||
}
|
||||
}
|
||||
|
||||
function updateContext(updates: ContextMap) {
|
||||
let context: ContextMap
|
||||
try {
|
||||
context = Context.get()
|
||||
} catch (err) {
|
||||
// no context, start empty
|
||||
context = {}
|
||||
}
|
||||
context = {
|
||||
...context,
|
||||
...updates,
|
||||
}
|
||||
return context
|
||||
}
|
||||
|
||||
async function newContext(updates: ContextMap, task: any) {
|
||||
// see if there already is a context setup
|
||||
let context: ContextMap = updateContext(updates)
|
||||
return Context.run(context, task)
|
||||
}
|
||||
|
||||
export async function doInContext(appId: string, task: any): Promise<any> {
|
||||
const tenantId = getTenantIDFromAppID(appId)
|
||||
return newContext(
|
||||
{
|
||||
tenantId,
|
||||
appId,
|
||||
},
|
||||
task
|
||||
)
|
||||
}
|
||||
|
||||
export async function doInTenant(
|
||||
tenantId: string | null,
|
||||
task: any
|
||||
): Promise<any> {
|
||||
// make sure default always selected in single tenancy
|
||||
if (!env.MULTI_TENANCY) {
|
||||
tenantId = tenantId || DEFAULT_TENANT_ID
|
||||
}
|
||||
|
||||
const updates = tenantId ? { tenantId } : {}
|
||||
return newContext(updates, task)
|
||||
}
|
||||
|
||||
export async function doInAppContext(appId: string, task: any): Promise<any> {
|
||||
if (!appId) {
|
||||
throw new Error("appId is required")
|
||||
}
|
||||
|
||||
const tenantId = getTenantIDFromAppID(appId)
|
||||
const updates: ContextMap = { appId }
|
||||
if (tenantId) {
|
||||
updates.tenantId = tenantId
|
||||
}
|
||||
return newContext(updates, task)
|
||||
}
|
||||
|
||||
export async function doInIdentityContext(
|
||||
identity: IdentityContext,
|
||||
task: any
|
||||
): Promise<any> {
|
||||
if (!identity) {
|
||||
throw new Error("identity is required")
|
||||
}
|
||||
|
||||
const context: ContextMap = {
|
||||
identity,
|
||||
}
|
||||
if (identity.tenantId) {
|
||||
context.tenantId = identity.tenantId
|
||||
}
|
||||
return newContext(context, task)
|
||||
}
|
||||
|
||||
export function getIdentity(): IdentityContext | undefined {
|
||||
try {
|
||||
const context = Context.get()
|
||||
return context?.identity
|
||||
} catch (e) {
|
||||
// do nothing - identity is not in context
|
||||
}
|
||||
}
|
||||
|
||||
export function getTenantId(): string {
|
||||
if (!isMultiTenant()) {
|
||||
return DEFAULT_TENANT_ID
|
||||
}
|
||||
const context = Context.get()
|
||||
const tenantId = context?.tenantId
|
||||
if (!tenantId) {
|
||||
throw new Error("Tenant id not found")
|
||||
}
|
||||
return tenantId
|
||||
}
|
||||
|
||||
export function getAppId(): string | undefined {
|
||||
const context = Context.get()
|
||||
const foundId = context?.appId
|
||||
if (!foundId && env.isTest() && TEST_APP_ID) {
|
||||
return TEST_APP_ID
|
||||
} else {
|
||||
return foundId
|
||||
}
|
||||
}
|
||||
|
||||
export function updateTenantId(tenantId?: string) {
|
||||
let context: ContextMap = updateContext({
|
||||
tenantId,
|
||||
})
|
||||
Context.set(context)
|
||||
}
|
||||
|
||||
export function updateAppId(appId: string) {
|
||||
let context: ContextMap = updateContext({
|
||||
appId,
|
||||
})
|
||||
try {
|
||||
Context.set(context)
|
||||
} catch (err) {
|
||||
if (env.isTest()) {
|
||||
TEST_APP_ID = appId
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function getGlobalDB(): Database {
|
||||
const context = Context.get()
|
||||
if (!context || (env.MULTI_TENANCY && !context.tenantId)) {
|
||||
throw new Error("Global DB not found")
|
||||
}
|
||||
return getDB(baseGlobalDBName(context?.tenantId))
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the app database based on whatever the request
|
||||
* contained, dev or prod.
|
||||
*/
|
||||
export function getAppDB(opts?: any): Database {
|
||||
const appId = getAppId()
|
||||
return getDB(appId, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
* This specifically gets the prod app ID, if the request
|
||||
* contained a development app ID, this will get the prod one.
|
||||
*/
|
||||
export function getProdAppDB(opts?: any): Database {
|
||||
const appId = getAppId()
|
||||
if (!appId) {
|
||||
throw new Error("Unable to retrieve prod DB - no app ID.")
|
||||
}
|
||||
return getDB(getProdAppID(appId), opts)
|
||||
}
|
||||
|
||||
/**
|
||||
* This specifically gets the dev app ID, if the request
|
||||
* contained a prod app ID, this will get the dev one.
|
||||
*/
|
||||
export function getDevAppDB(opts?: any): Database {
|
||||
const appId = getAppId()
|
||||
if (!appId) {
|
||||
throw new Error("Unable to retrieve dev DB - no app ID.")
|
||||
}
|
||||
return getDB(getDevelopmentAppID(appId), opts)
|
||||
}
|
|
@ -1,18 +1,9 @@
|
|||
import "../../../tests/utilities/TestConfiguration"
|
||||
import * as context from ".."
|
||||
import { DEFAULT_TENANT_ID } from "../../constants"
|
||||
import env from "../../environment"
|
||||
|
||||
// must use require to spy index file exports due to known issue in jest
|
||||
const dbUtils = require("../../db")
|
||||
jest.spyOn(dbUtils, "closeDB")
|
||||
jest.spyOn(dbUtils, "dangerousGetDB")
|
||||
require("../../../tests")
|
||||
const context = require("../")
|
||||
const { DEFAULT_TENANT_ID } = require("../../constants")
|
||||
const env = require("../../environment")
|
||||
|
||||
describe("context", () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
describe("doInTenant", () => {
|
||||
describe("single-tenancy", () => {
|
||||
it("defaults to the default tenant", () => {
|
||||
|
@ -25,8 +16,6 @@ describe("context", () => {
|
|||
const db = context.getGlobalDB()
|
||||
expect(db.name).toBe("global-db")
|
||||
})
|
||||
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
|
||||
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -40,7 +29,7 @@ describe("context", () => {
|
|||
let error
|
||||
try {
|
||||
context.getTenantId()
|
||||
} catch (e: any) {
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
expect(error.message).toBe("Tenant id not found")
|
||||
|
@ -59,7 +48,7 @@ describe("context", () => {
|
|||
let error
|
||||
try {
|
||||
context.getGlobalDB()
|
||||
} catch (e: any) {
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
expect(error.message).toBe("Global DB not found")
|
||||
|
@ -85,8 +74,6 @@ describe("context", () => {
|
|||
const db = context.getGlobalDB()
|
||||
expect(db.name).toBe("test_global-db")
|
||||
})
|
||||
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
|
||||
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("sets the tenant id when nested with same tenant id", async () => {
|
||||
|
@ -121,10 +108,6 @@ describe("context", () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
// only 1 db is opened and closed
|
||||
expect(dbUtils.dangerousGetDB).toHaveBeenCalledTimes(1)
|
||||
expect(dbUtils.closeDB).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("sets different tenant id inside another context", () => {
|
|
@ -1,109 +0,0 @@
|
|||
import {
|
||||
DEFAULT_TENANT_ID,
|
||||
getAppId,
|
||||
getTenantIDFromAppID,
|
||||
updateTenantId,
|
||||
} from "./index"
|
||||
import cls from "./FunctionContext"
|
||||
import { IdentityContext } from "@budibase/types"
|
||||
import { ContextKey } from "./constants"
|
||||
import { dangerousGetDB, closeDB } from "../db"
|
||||
import { isEqual } from "lodash"
|
||||
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
|
||||
import env from "../environment"
|
||||
|
||||
export async function updateUsing(
|
||||
usingKey: string,
|
||||
existing: boolean,
|
||||
internal: (opts: { existing: boolean }) => Promise<any>
|
||||
) {
|
||||
const using = cls.getFromContext(usingKey)
|
||||
if (using && existing) {
|
||||
cls.setOnContext(usingKey, using + 1)
|
||||
return internal({ existing: true })
|
||||
} else {
|
||||
return cls.run(async () => {
|
||||
cls.setOnContext(usingKey, 1)
|
||||
return internal({ existing: false })
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function closeWithUsing(
|
||||
usingKey: string,
|
||||
closeFn: () => Promise<any>
|
||||
) {
|
||||
const using = cls.getFromContext(usingKey)
|
||||
if (!using || using <= 1) {
|
||||
await closeFn()
|
||||
} else {
|
||||
cls.setOnContext(usingKey, using - 1)
|
||||
}
|
||||
}
|
||||
|
||||
export const setAppTenantId = (appId: string) => {
|
||||
const appTenantId = getTenantIDFromAppID(appId) || DEFAULT_TENANT_ID
|
||||
updateTenantId(appTenantId)
|
||||
}
|
||||
|
||||
export const setIdentity = (identity: IdentityContext | null) => {
|
||||
cls.setOnContext(ContextKey.IDENTITY, identity)
|
||||
}
|
||||
|
||||
// this function makes sure the PouchDB objects are closed and
|
||||
// fully deleted when finished - this protects against memory leaks
|
||||
export async function closeAppDBs() {
|
||||
const dbKeys = [ContextKey.CURRENT_DB, ContextKey.PROD_DB, ContextKey.DEV_DB]
|
||||
for (let dbKey of dbKeys) {
|
||||
const db = cls.getFromContext(dbKey)
|
||||
if (!db) {
|
||||
continue
|
||||
}
|
||||
await closeDB(db)
|
||||
// clear the DB from context, incase someone tries to use it again
|
||||
cls.setOnContext(dbKey, null)
|
||||
}
|
||||
// clear the app ID now that the databases are closed
|
||||
if (cls.getFromContext(ContextKey.APP_ID)) {
|
||||
cls.setOnContext(ContextKey.APP_ID, null)
|
||||
}
|
||||
if (cls.getFromContext(ContextKey.DB_OPTS)) {
|
||||
cls.setOnContext(ContextKey.DB_OPTS, null)
|
||||
}
|
||||
}
|
||||
|
||||
export function getContextDB(key: string, opts: any) {
|
||||
const dbOptsKey = `${key}${ContextKey.DB_OPTS}`
|
||||
let storedOpts = cls.getFromContext(dbOptsKey)
|
||||
let db = cls.getFromContext(key)
|
||||
if (db && isEqual(opts, storedOpts)) {
|
||||
return db
|
||||
}
|
||||
|
||||
const appId = getAppId()
|
||||
let toUseAppId
|
||||
|
||||
switch (key) {
|
||||
case ContextKey.CURRENT_DB:
|
||||
toUseAppId = appId
|
||||
break
|
||||
case ContextKey.PROD_DB:
|
||||
toUseAppId = getProdAppID(appId)
|
||||
break
|
||||
case ContextKey.DEV_DB:
|
||||
toUseAppId = getDevelopmentAppID(appId)
|
||||
break
|
||||
}
|
||||
db = dangerousGetDB(toUseAppId, opts)
|
||||
try {
|
||||
cls.setOnContext(key, db)
|
||||
if (opts) {
|
||||
cls.setOnContext(dbOptsKey, opts)
|
||||
}
|
||||
} catch (err) {
|
||||
if (!env.isTest()) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
return db
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
import { dangerousGetDB, closeDB } from "."
|
||||
import { DocumentType } from "./constants"
|
||||
import { getPouchDB, closePouchDB } from "./couch"
|
||||
import { DocumentType } from "../constants"
|
||||
|
||||
class Replication {
|
||||
source: any
|
||||
|
@ -12,12 +12,12 @@ class Replication {
|
|||
* @param {String} target - the DB you want to replicate to, or rollback from
|
||||
*/
|
||||
constructor({ source, target }: any) {
|
||||
this.source = dangerousGetDB(source)
|
||||
this.target = dangerousGetDB(target)
|
||||
this.source = getPouchDB(source)
|
||||
this.target = getPouchDB(target)
|
||||
}
|
||||
|
||||
close() {
|
||||
return Promise.all([closeDB(this.source), closeDB(this.target)])
|
||||
return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
|
||||
}
|
||||
|
||||
promisify(operation: any, opts = {}) {
|
||||
|
@ -68,7 +68,7 @@ class Replication {
|
|||
async rollback() {
|
||||
await this.target.destroy()
|
||||
// Recreate the DB again
|
||||
this.target = dangerousGetDB(this.target.name)
|
||||
this.target = getPouchDB(this.target.name)
|
||||
// take the opportunity to remove deleted tombstones
|
||||
await this.replicate()
|
||||
}
|
||||
|
|
|
@ -1,32 +1,33 @@
|
|||
import { APP_DEV_PREFIX, APP_PREFIX } from "../constants"
|
||||
import { App } from "@budibase/types"
|
||||
const NO_APP_ERROR = "No app provided"
|
||||
const { APP_DEV_PREFIX, APP_PREFIX } = require("./constants")
|
||||
|
||||
exports.isDevAppID = appId => {
|
||||
export function isDevAppID(appId?: string) {
|
||||
if (!appId) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return appId.startsWith(APP_DEV_PREFIX)
|
||||
}
|
||||
|
||||
exports.isProdAppID = appId => {
|
||||
export function isProdAppID(appId?: string) {
|
||||
if (!appId) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return appId.startsWith(APP_PREFIX) && !exports.isDevAppID(appId)
|
||||
return appId.startsWith(APP_PREFIX) && !isDevAppID(appId)
|
||||
}
|
||||
|
||||
exports.isDevApp = app => {
|
||||
export function isDevApp(app: App) {
|
||||
if (!app) {
|
||||
throw NO_APP_ERROR
|
||||
}
|
||||
return exports.isDevAppID(app.appId)
|
||||
return isDevAppID(app.appId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a development app ID from a real app ID.
|
||||
* @returns {string} the dev app ID which can be used for dev database.
|
||||
*/
|
||||
exports.getDevelopmentAppID = appId => {
|
||||
export function getDevelopmentAppID(appId: string) {
|
||||
if (!appId || appId.startsWith(APP_DEV_PREFIX)) {
|
||||
return appId
|
||||
}
|
||||
|
@ -36,12 +37,12 @@ exports.getDevelopmentAppID = appId => {
|
|||
const rest = split.join(APP_PREFIX)
|
||||
return `${APP_DEV_PREFIX}${rest}`
|
||||
}
|
||||
exports.getDevAppID = exports.getDevelopmentAppID
|
||||
export const getDevAppID = getDevelopmentAppID
|
||||
|
||||
/**
|
||||
* Convert a development app ID to a deployed app ID.
|
||||
*/
|
||||
exports.getProdAppID = appId => {
|
||||
export function getProdAppID(appId: string) {
|
||||
if (!appId || !appId.startsWith(APP_DEV_PREFIX)) {
|
||||
return appId
|
||||
}
|
||||
|
@ -52,7 +53,7 @@ exports.getProdAppID = appId => {
|
|||
return `${APP_PREFIX}${rest}`
|
||||
}
|
||||
|
||||
exports.extractAppUUID = id => {
|
||||
export function extractAppUUID(id: string) {
|
||||
const split = id?.split("_") || []
|
||||
return split.length ? split[split.length - 1] : null
|
||||
}
|
|
@ -0,0 +1,193 @@
|
|||
import Nano from "nano"
|
||||
import {
|
||||
AllDocsResponse,
|
||||
AnyDocument,
|
||||
Database,
|
||||
DatabaseOpts,
|
||||
DatabaseQueryOpts,
|
||||
DatabasePutOpts,
|
||||
DatabaseCreateIndexOpts,
|
||||
DatabaseDeleteIndexOpts,
|
||||
Document,
|
||||
isDocument,
|
||||
} from "@budibase/types"
|
||||
import { getCouchInfo } from "./connections"
|
||||
import { directCouchCall } from "./utils"
|
||||
import { getPouchDB } from "./pouchDB"
|
||||
import { WriteStream, ReadStream } from "fs"
|
||||
|
||||
export class DatabaseImpl implements Database {
|
||||
public readonly name: string
|
||||
private static nano: Nano.ServerScope
|
||||
private readonly pouchOpts: DatabaseOpts
|
||||
|
||||
constructor(dbName?: string, opts?: DatabaseOpts) {
|
||||
if (dbName == null) {
|
||||
throw new Error("Database name cannot be undefined.")
|
||||
}
|
||||
this.name = dbName
|
||||
this.pouchOpts = opts || {}
|
||||
if (!DatabaseImpl.nano) {
|
||||
DatabaseImpl.init()
|
||||
}
|
||||
}
|
||||
|
||||
static init() {
|
||||
const couchInfo = getCouchInfo()
|
||||
DatabaseImpl.nano = Nano({
|
||||
url: couchInfo.url,
|
||||
requestDefaults: {
|
||||
headers: {
|
||||
Authorization: couchInfo.cookie,
|
||||
},
|
||||
},
|
||||
parseUrl: false,
|
||||
})
|
||||
}
|
||||
|
||||
async exists() {
|
||||
let response = await directCouchCall(`/${this.name}`, "HEAD")
|
||||
return response.status === 200
|
||||
}
|
||||
|
||||
async checkSetup() {
|
||||
let shouldCreate = !this.pouchOpts?.skip_setup
|
||||
// check exists in a lightweight fashion
|
||||
let exists = await this.exists()
|
||||
if (!shouldCreate && !exists) {
|
||||
throw new Error("DB does not exist")
|
||||
}
|
||||
if (!exists) {
|
||||
await DatabaseImpl.nano.db.create(this.name)
|
||||
}
|
||||
return DatabaseImpl.nano.db.use(this.name)
|
||||
}
|
||||
|
||||
private async updateOutput(fnc: any) {
|
||||
try {
|
||||
return await fnc()
|
||||
} catch (err: any) {
|
||||
if (err.statusCode) {
|
||||
err.status = err.statusCode
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
async get<T>(id?: string): Promise<T | any> {
|
||||
const db = await this.checkSetup()
|
||||
if (!id) {
|
||||
throw new Error("Unable to get doc without a valid _id.")
|
||||
}
|
||||
return this.updateOutput(() => db.get(id))
|
||||
}
|
||||
|
||||
async remove(idOrDoc: string | Document, rev?: string) {
|
||||
const db = await this.checkSetup()
|
||||
let _id: string
|
||||
let _rev: string
|
||||
|
||||
if (isDocument(idOrDoc)) {
|
||||
_id = idOrDoc._id!
|
||||
_rev = idOrDoc._rev!
|
||||
} else {
|
||||
_id = idOrDoc
|
||||
_rev = rev!
|
||||
}
|
||||
|
||||
if (!_id || !_rev) {
|
||||
throw new Error("Unable to remove doc without a valid _id and _rev.")
|
||||
}
|
||||
return this.updateOutput(() => db.destroy(_id, _rev))
|
||||
}
|
||||
|
||||
async put(document: AnyDocument, opts?: DatabasePutOpts) {
|
||||
if (!document._id) {
|
||||
throw new Error("Cannot store document without _id field.")
|
||||
}
|
||||
const db = await this.checkSetup()
|
||||
if (!document.createdAt) {
|
||||
document.createdAt = new Date().toISOString()
|
||||
}
|
||||
document.updatedAt = new Date().toISOString()
|
||||
if (opts?.force && document._id) {
|
||||
try {
|
||||
const existing = await this.get(document._id)
|
||||
if (existing) {
|
||||
document._rev = existing._rev
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err.status !== 404) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
return this.updateOutput(() => db.insert(document))
|
||||
}
|
||||
|
||||
async bulkDocs(documents: AnyDocument[]) {
|
||||
const db = await this.checkSetup()
|
||||
return this.updateOutput(() => db.bulk({ docs: documents }))
|
||||
}
|
||||
|
||||
async allDocs<T>(params: DatabaseQueryOpts): Promise<AllDocsResponse<T>> {
|
||||
const db = await this.checkSetup()
|
||||
return this.updateOutput(() => db.list(params))
|
||||
}
|
||||
|
||||
async query<T>(
|
||||
viewName: string,
|
||||
params: DatabaseQueryOpts
|
||||
): Promise<AllDocsResponse<T>> {
|
||||
const db = await this.checkSetup()
|
||||
const [database, view] = viewName.split("/")
|
||||
return this.updateOutput(() => db.view(database, view, params))
|
||||
}
|
||||
|
||||
async destroy() {
|
||||
try {
|
||||
await DatabaseImpl.nano.db.destroy(this.name)
|
||||
} catch (err: any) {
|
||||
// didn't exist, don't worry
|
||||
if (err.statusCode === 404) {
|
||||
return
|
||||
} else {
|
||||
throw { ...err, status: err.statusCode }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async compact() {
|
||||
const db = await this.checkSetup()
|
||||
return this.updateOutput(() => db.compact())
|
||||
}
|
||||
|
||||
// All below functions are in-frequently called, just utilise PouchDB
|
||||
// for them as it implements them better than we can
|
||||
async dump(stream: WriteStream, opts?: { filter?: any }) {
|
||||
const pouch = getPouchDB(this.name)
|
||||
// @ts-ignore
|
||||
return pouch.dump(stream, opts)
|
||||
}
|
||||
|
||||
async load(stream: ReadStream) {
|
||||
const pouch = getPouchDB(this.name)
|
||||
// @ts-ignore
|
||||
return pouch.load(stream)
|
||||
}
|
||||
|
||||
async createIndex(opts: DatabaseCreateIndexOpts) {
|
||||
const pouch = getPouchDB(this.name)
|
||||
return pouch.createIndex(opts)
|
||||
}
|
||||
|
||||
async deleteIndex(opts: DatabaseDeleteIndexOpts) {
|
||||
const pouch = getPouchDB(this.name)
|
||||
return pouch.deleteIndex(opts)
|
||||
}
|
||||
|
||||
async getIndexes() {
|
||||
const pouch = getPouchDB(this.name)
|
||||
return pouch.getIndexes()
|
||||
}
|
||||
}
|
|
@ -1,5 +1,37 @@
|
|||
import PouchDB from "pouchdb"
|
||||
import env from "../environment"
|
||||
import env from "../../environment"
|
||||
|
||||
export const getCouchInfo = () => {
|
||||
const urlInfo = getUrlInfo()
|
||||
let username
|
||||
let password
|
||||
if (env.COUCH_DB_USERNAME) {
|
||||
// set from env
|
||||
username = env.COUCH_DB_USERNAME
|
||||
} else if (urlInfo.auth.username) {
|
||||
// set from url
|
||||
username = urlInfo.auth.username
|
||||
} else if (!env.isTest()) {
|
||||
throw new Error("CouchDB username not set")
|
||||
}
|
||||
if (env.COUCH_DB_PASSWORD) {
|
||||
// set from env
|
||||
password = env.COUCH_DB_PASSWORD
|
||||
} else if (urlInfo.auth.password) {
|
||||
// set from url
|
||||
password = urlInfo.auth.password
|
||||
} else if (!env.isTest()) {
|
||||
throw new Error("CouchDB password not set")
|
||||
}
|
||||
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
|
||||
return {
|
||||
url: urlInfo.url!,
|
||||
auth: {
|
||||
username: username,
|
||||
password: password,
|
||||
},
|
||||
cookie: `Basic ${authCookie}`,
|
||||
}
|
||||
}
|
||||
|
||||
export const getUrlInfo = (url = env.COUCH_DB_URL) => {
|
||||
let cleanUrl, username, password, host
|
||||
|
@ -43,85 +75,3 @@ export const getUrlInfo = (url = env.COUCH_DB_URL) => {
|
|||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const getCouchInfo = () => {
|
||||
const urlInfo = getUrlInfo()
|
||||
let username
|
||||
let password
|
||||
if (env.COUCH_DB_USERNAME) {
|
||||
// set from env
|
||||
username = env.COUCH_DB_USERNAME
|
||||
} else if (urlInfo.auth.username) {
|
||||
// set from url
|
||||
username = urlInfo.auth.username
|
||||
} else if (!env.isTest()) {
|
||||
throw new Error("CouchDB username not set")
|
||||
}
|
||||
if (env.COUCH_DB_PASSWORD) {
|
||||
// set from env
|
||||
password = env.COUCH_DB_PASSWORD
|
||||
} else if (urlInfo.auth.password) {
|
||||
// set from url
|
||||
password = urlInfo.auth.password
|
||||
} else if (!env.isTest()) {
|
||||
throw new Error("CouchDB password not set")
|
||||
}
|
||||
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
|
||||
return {
|
||||
url: urlInfo.url,
|
||||
auth: {
|
||||
username: username,
|
||||
password: password,
|
||||
},
|
||||
cookie: `Basic ${authCookie}`,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a constructor for PouchDB.
|
||||
* This should be rarely used outside of the main application config.
|
||||
* Exposed for exceptional cases such as in-memory views.
|
||||
*/
|
||||
export const getPouch = (opts: any = {}) => {
|
||||
let { url, cookie } = getCouchInfo()
|
||||
let POUCH_DB_DEFAULTS = {
|
||||
prefix: url,
|
||||
fetch: (url: string, opts: any) => {
|
||||
// use a specific authorization cookie - be very explicit about how we authenticate
|
||||
opts.headers.set("Authorization", cookie)
|
||||
return PouchDB.fetch(url, opts)
|
||||
},
|
||||
}
|
||||
|
||||
if (opts.inMemory) {
|
||||
const inMemory = require("pouchdb-adapter-memory")
|
||||
PouchDB.plugin(inMemory)
|
||||
POUCH_DB_DEFAULTS = {
|
||||
prefix: undefined,
|
||||
// @ts-ignore
|
||||
adapter: "memory",
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.onDisk) {
|
||||
POUCH_DB_DEFAULTS = {
|
||||
prefix: undefined,
|
||||
// @ts-ignore
|
||||
adapter: "leveldb",
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.replication) {
|
||||
const replicationStream = require("pouchdb-replication-stream")
|
||||
PouchDB.plugin(replicationStream.plugin)
|
||||
// @ts-ignore
|
||||
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
|
||||
}
|
||||
|
||||
if (opts.find) {
|
||||
const find = require("pouchdb-find")
|
||||
PouchDB.plugin(find)
|
||||
}
|
||||
|
||||
return PouchDB.defaults(POUCH_DB_DEFAULTS)
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
export * from "./connections"
|
||||
export * from "./DatabaseImpl"
|
||||
export * from "./utils"
|
||||
export { init, getPouch, getPouchDB, closePouchDB } from "./pouchDB"
|
|
@ -0,0 +1,97 @@
|
|||
import PouchDB from "pouchdb"
|
||||
import env from "../../environment"
|
||||
import { PouchOptions } from "@budibase/types"
|
||||
import { getCouchInfo } from "./connections"
|
||||
|
||||
let Pouch: any
|
||||
let initialised = false
|
||||
|
||||
/**
|
||||
* Return a constructor for PouchDB.
|
||||
* This should be rarely used outside of the main application config.
|
||||
* Exposed for exceptional cases such as in-memory views.
|
||||
*/
|
||||
export const getPouch = (opts: PouchOptions = {}) => {
|
||||
let { url, cookie } = getCouchInfo()
|
||||
let POUCH_DB_DEFAULTS = {
|
||||
prefix: url,
|
||||
fetch: (url: string, opts: any) => {
|
||||
// use a specific authorization cookie - be very explicit about how we authenticate
|
||||
opts.headers.set("Authorization", cookie)
|
||||
return PouchDB.fetch(url, opts)
|
||||
},
|
||||
}
|
||||
|
||||
if (opts.inMemory) {
|
||||
const inMemory = require("pouchdb-adapter-memory")
|
||||
PouchDB.plugin(inMemory)
|
||||
POUCH_DB_DEFAULTS = {
|
||||
// @ts-ignore
|
||||
adapter: "memory",
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.onDisk) {
|
||||
POUCH_DB_DEFAULTS = {
|
||||
// @ts-ignore
|
||||
adapter: "leveldb",
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.replication) {
|
||||
const replicationStream = require("pouchdb-replication-stream")
|
||||
PouchDB.plugin(replicationStream.plugin)
|
||||
// @ts-ignore
|
||||
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
|
||||
}
|
||||
|
||||
if (opts.find) {
|
||||
const find = require("pouchdb-find")
|
||||
PouchDB.plugin(find)
|
||||
}
|
||||
|
||||
return PouchDB.defaults(POUCH_DB_DEFAULTS)
|
||||
}
|
||||
|
||||
export function init(opts?: PouchOptions) {
|
||||
Pouch = getPouch(opts)
|
||||
initialised = true
|
||||
}
|
||||
|
||||
const checkInitialised = () => {
|
||||
if (!initialised) {
|
||||
throw new Error("init has not been called")
|
||||
}
|
||||
}
|
||||
|
||||
export function getPouchDB(dbName: string, opts?: any): PouchDB.Database {
|
||||
checkInitialised()
|
||||
const db = new Pouch(dbName, opts)
|
||||
const dbPut = db.put
|
||||
db.put = async (doc: any, options = {}) => {
|
||||
if (!doc.createdAt) {
|
||||
doc.createdAt = new Date().toISOString()
|
||||
}
|
||||
doc.updatedAt = new Date().toISOString()
|
||||
return dbPut(doc, options)
|
||||
}
|
||||
db.exists = async () => {
|
||||
const info = await db.info()
|
||||
return !info.error
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
// use this function if you have called getPouchDB - close
|
||||
// the databases you've opened once finished
|
||||
export async function closePouchDB(db: PouchDB.Database) {
|
||||
if (!db || env.isTest()) {
|
||||
return
|
||||
}
|
||||
try {
|
||||
// specifically await so that if there is an error, it can be ignored
|
||||
return await db.close()
|
||||
} catch (err) {
|
||||
// ignore error, already closed
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
import { getCouchInfo } from "./connections"
|
||||
import fetch from "node-fetch"
|
||||
import { checkSlashesInUrl } from "../../helpers"
|
||||
|
||||
export async function directCouchCall(
|
||||
path: string,
|
||||
method: string = "GET",
|
||||
body?: any
|
||||
) {
|
||||
let { url, cookie } = getCouchInfo()
|
||||
const couchUrl = `${url}/${path}`
|
||||
const params: any = {
|
||||
method: method,
|
||||
headers: {
|
||||
Authorization: cookie,
|
||||
},
|
||||
}
|
||||
if (body && method !== "GET") {
|
||||
params.body = JSON.stringify(body)
|
||||
params.headers["Content-Type"] = "application/json"
|
||||
}
|
||||
return await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params)
|
||||
}
|
||||
|
||||
export async function directCouchQuery(
|
||||
path: string,
|
||||
method: string = "GET",
|
||||
body?: any
|
||||
) {
|
||||
const response = await directCouchCall(path, method, body)
|
||||
if (response.status < 300) {
|
||||
return await response.json()
|
||||
} else {
|
||||
throw "Cannot connect to CouchDB instance"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
import env from "../environment"
|
||||
import { directCouchQuery, getPouchDB } from "./couch"
|
||||
import { CouchFindOptions, Database } from "@budibase/types"
|
||||
import { DatabaseImpl } from "../db"
|
||||
|
||||
const dbList = new Set()
|
||||
|
||||
export function getDB(dbName?: string, opts?: any): Database {
|
||||
// TODO: once using the test image, need to remove this
|
||||
if (env.isTest()) {
|
||||
dbList.add(dbName)
|
||||
// @ts-ignore
|
||||
return getPouchDB(dbName, opts)
|
||||
}
|
||||
return new DatabaseImpl(dbName, opts)
|
||||
}
|
||||
|
||||
// we have to use a callback for this so that we can close
|
||||
// the DB when we're done, without this manual requests would
|
||||
// need to close the database when done with it to avoid memory leaks
|
||||
export async function doWithDB(dbName: string, cb: any, opts = {}) {
|
||||
const db = getDB(dbName, opts)
|
||||
// need this to be async so that we can correctly close DB after all
|
||||
// async operations have been completed
|
||||
return await cb(db)
|
||||
}
|
||||
|
||||
export function allDbs() {
|
||||
if (!env.isTest()) {
|
||||
throw new Error("Cannot be used outside test environment.")
|
||||
}
|
||||
return [...dbList]
|
||||
}
|
||||
|
||||
export async function directCouchAllDbs(queryString?: string) {
|
||||
let couchPath = "/_all_dbs"
|
||||
if (queryString) {
|
||||
couchPath += `?${queryString}`
|
||||
}
|
||||
return await directCouchQuery(couchPath)
|
||||
}
|
||||
|
||||
export async function directCouchFind(dbName: string, opts: CouchFindOptions) {
|
||||
const json = await directCouchQuery(`${dbName}/_find`, "POST", opts)
|
||||
return { rows: json.docs, bookmark: json.bookmark }
|
||||
}
|
|
@ -1,133 +1,9 @@
|
|||
import * as pouch from "./pouch"
|
||||
import env from "../environment"
|
||||
import { checkSlashesInUrl } from "../helpers"
|
||||
import fetch from "node-fetch"
|
||||
import { PouchOptions, CouchFindOptions } from "@budibase/types"
|
||||
import PouchDB from "pouchdb"
|
||||
|
||||
const openDbs: string[] = []
|
||||
let Pouch: any
|
||||
let initialised = false
|
||||
const dbList = new Set()
|
||||
|
||||
if (env.MEMORY_LEAK_CHECK) {
|
||||
setInterval(() => {
|
||||
console.log("--- OPEN DBS ---")
|
||||
console.log(openDbs)
|
||||
}, 5000)
|
||||
}
|
||||
|
||||
const put =
|
||||
(dbPut: any) =>
|
||||
async (doc: any, options = {}) => {
|
||||
if (!doc.createdAt) {
|
||||
doc.createdAt = new Date().toISOString()
|
||||
}
|
||||
doc.updatedAt = new Date().toISOString()
|
||||
return dbPut(doc, options)
|
||||
}
|
||||
|
||||
const checkInitialised = () => {
|
||||
if (!initialised) {
|
||||
throw new Error("init has not been called")
|
||||
}
|
||||
}
|
||||
|
||||
export async function init(opts?: PouchOptions) {
|
||||
Pouch = pouch.getPouch(opts)
|
||||
initialised = true
|
||||
}
|
||||
|
||||
// NOTE: THIS IS A DANGEROUS FUNCTION - USE WITH CAUTION
|
||||
// this function is prone to leaks, should only be used
|
||||
// in situations that using the function doWithDB does not work
|
||||
export function dangerousGetDB(dbName: string, opts?: any): PouchDB.Database {
|
||||
checkInitialised()
|
||||
if (env.isTest()) {
|
||||
dbList.add(dbName)
|
||||
}
|
||||
const db = new Pouch(dbName, opts)
|
||||
if (env.MEMORY_LEAK_CHECK) {
|
||||
openDbs.push(db.name)
|
||||
}
|
||||
const dbPut = db.put
|
||||
db.put = put(dbPut)
|
||||
return db
|
||||
}
|
||||
|
||||
// use this function if you have called dangerousGetDB - close
|
||||
// the databases you've opened once finished
|
||||
export async function closeDB(db: PouchDB.Database) {
|
||||
if (!db || env.isTest()) {
|
||||
return
|
||||
}
|
||||
if (env.MEMORY_LEAK_CHECK) {
|
||||
openDbs.splice(openDbs.indexOf(db.name), 1)
|
||||
}
|
||||
try {
|
||||
// specifically await so that if there is an error, it can be ignored
|
||||
return await db.close()
|
||||
} catch (err) {
|
||||
// ignore error, already closed
|
||||
}
|
||||
}
|
||||
|
||||
// we have to use a callback for this so that we can close
|
||||
// the DB when we're done, without this manual requests would
|
||||
// need to close the database when done with it to avoid memory leaks
|
||||
export async function doWithDB(dbName: string, cb: any, opts = {}) {
|
||||
const db = dangerousGetDB(dbName, opts)
|
||||
// need this to be async so that we can correctly close DB after all
|
||||
// async operations have been completed
|
||||
try {
|
||||
return await cb(db)
|
||||
} finally {
|
||||
await closeDB(db)
|
||||
}
|
||||
}
|
||||
|
||||
export function allDbs() {
|
||||
if (!env.isTest()) {
|
||||
throw new Error("Cannot be used outside test environment.")
|
||||
}
|
||||
checkInitialised()
|
||||
return [...dbList]
|
||||
}
|
||||
|
||||
export async function directCouchQuery(
|
||||
path: string,
|
||||
method: string = "GET",
|
||||
body?: any
|
||||
) {
|
||||
let { url, cookie } = pouch.getCouchInfo()
|
||||
const couchUrl = `${url}/${path}`
|
||||
const params: any = {
|
||||
method: method,
|
||||
headers: {
|
||||
Authorization: cookie,
|
||||
},
|
||||
}
|
||||
if (body && method !== "GET") {
|
||||
params.body = JSON.stringify(body)
|
||||
params.headers["Content-Type"] = "application/json"
|
||||
}
|
||||
const response = await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params)
|
||||
if (response.status < 300) {
|
||||
return await response.json()
|
||||
} else {
|
||||
throw "Cannot connect to CouchDB instance"
|
||||
}
|
||||
}
|
||||
|
||||
export async function directCouchAllDbs(queryString?: string) {
|
||||
let couchPath = "/_all_dbs"
|
||||
if (queryString) {
|
||||
couchPath += `?${queryString}`
|
||||
}
|
||||
return await directCouchQuery(couchPath)
|
||||
}
|
||||
|
||||
export async function directCouchFind(dbName: string, opts: CouchFindOptions) {
|
||||
const json = await directCouchQuery(`${dbName}/_find`, "POST", opts)
|
||||
return { rows: json.docs, bookmark: json.bookmark }
|
||||
}
|
||||
export * from "./couch"
|
||||
export * from "./db"
|
||||
export * from "./utils"
|
||||
export * from "./views"
|
||||
export * from "./conversions"
|
||||
export { default as Replication } from "./Replication"
|
||||
// exports to support old export structure
|
||||
export * from "../constants/db"
|
||||
export { getGlobalDBName, baseGlobalDBName } from "../context"
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
import { DEFAULT_TENANT_ID } from "../constants"
|
||||
import { StaticDatabases, SEPARATOR } from "./constants"
|
||||
import { getTenantId } from "../context"
|
||||
|
||||
export const getGlobalDBName = (tenantId?: string) => {
|
||||
// tenant ID can be set externally, for example user API where
|
||||
// new tenants are being created, this may be the case
|
||||
if (!tenantId) {
|
||||
tenantId = getTenantId()
|
||||
}
|
||||
return baseGlobalDBName(tenantId)
|
||||
}
|
||||
|
||||
export const baseGlobalDBName = (tenantId: string | undefined | null) => {
|
||||
let dbName
|
||||
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
||||
dbName = StaticDatabases.GLOBAL.name
|
||||
} else {
|
||||
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
|
||||
}
|
||||
return dbName
|
||||
}
|
|
@ -1,11 +1,11 @@
|
|||
require("../../../tests/utilities/TestConfiguration")
|
||||
const { dangerousGetDB } = require("../")
|
||||
require("../../../tests")
|
||||
const { getDB } = require("../")
|
||||
|
||||
describe("db", () => {
|
||||
|
||||
describe("getDB", () => {
|
||||
it("returns a db", async () => {
|
||||
const db = dangerousGetDB("test")
|
||||
const db = getDB("test")
|
||||
expect(db).toBeDefined()
|
||||
expect(db._adapter).toBe("memory")
|
||||
expect(db.prefix).toBe("_pouch_")
|
||||
|
@ -13,7 +13,7 @@ describe("db", () => {
|
|||
})
|
||||
|
||||
it("uses the custom put function", async () => {
|
||||
const db = dangerousGetDB("test")
|
||||
const db = getDB("test")
|
||||
let doc = { _id: "test" }
|
||||
await db.put(doc)
|
||||
doc = await db.get(doc._id)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
require("../../../tests/utilities/TestConfiguration")
|
||||
const getUrlInfo = require("../pouch").getUrlInfo
|
||||
require("../../../tests")
|
||||
const getUrlInfo = require("../couch").getUrlInfo
|
||||
|
||||
describe("pouch", () => {
|
||||
describe("Couch DB URL parsing", () => {
|
||||
|
|
|
@ -1,15 +1,17 @@
|
|||
require("../../../tests/utilities/TestConfiguration");
|
||||
require("../../../tests")
|
||||
const {
|
||||
generateAppID,
|
||||
getDevelopmentAppID,
|
||||
getProdAppID,
|
||||
isDevAppID,
|
||||
isProdAppID,
|
||||
} = require("../conversions")
|
||||
const {
|
||||
generateAppID,
|
||||
getPlatformUrl,
|
||||
getScopedConfig
|
||||
} = require("../utils")
|
||||
const tenancy = require("../../tenancy");
|
||||
const { Configs, DEFAULT_TENANT_ID } = require("../../constants");
|
||||
const tenancy = require("../../tenancy")
|
||||
const { Config, DEFAULT_TENANT_ID } = require("../../constants")
|
||||
const env = require("../../environment")
|
||||
|
||||
describe("utils", () => {
|
||||
|
@ -77,7 +79,7 @@ const setDbPlatformUrl = async () => {
|
|||
const db = tenancy.getGlobalDB()
|
||||
db.put({
|
||||
_id: "config_settings",
|
||||
type: Configs.SETTINGS,
|
||||
type: Config.SETTINGS,
|
||||
config: {
|
||||
platformUrl: DB_URL
|
||||
}
|
||||
|
@ -178,7 +180,7 @@ describe("getScopedConfig", () => {
|
|||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
await setDbPlatformUrl()
|
||||
const db = tenancy.getGlobalDB()
|
||||
const config = await getScopedConfig(db, { type: Configs.SETTINGS })
|
||||
const config = await getScopedConfig(db, { type: Config.SETTINGS })
|
||||
expect(config.platformUrl).toBe(DB_URL)
|
||||
})
|
||||
})
|
||||
|
@ -186,7 +188,7 @@ describe("getScopedConfig", () => {
|
|||
it("returns the platform url without an existing config", async () => {
|
||||
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
|
||||
const db = tenancy.getGlobalDB()
|
||||
const config = await getScopedConfig(db, { type: Configs.SETTINGS })
|
||||
const config = await getScopedConfig(db, { type: Config.SETTINGS })
|
||||
expect(config.platformUrl).toBe(DEFAULT_URL)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,31 +1,26 @@
|
|||
import { newid } from "../hashing"
|
||||
import { DEFAULT_TENANT_ID, Configs } from "../constants"
|
||||
import { newid } from "../newid"
|
||||
import env from "../environment"
|
||||
import {
|
||||
DEFAULT_TENANT_ID,
|
||||
SEPARATOR,
|
||||
DocumentType,
|
||||
UNICODE_MAX,
|
||||
ViewName,
|
||||
InternalTable,
|
||||
} from "./constants"
|
||||
import { getTenantId, getGlobalDB } from "../context"
|
||||
import { getGlobalDBName } from "./tenancy"
|
||||
import { doWithDB, allDbs, directCouchAllDbs } from "./index"
|
||||
APP_PREFIX,
|
||||
} from "../constants"
|
||||
import { getTenantId, getGlobalDB, getGlobalDBName } from "../context"
|
||||
import { doWithDB, allDbs, directCouchAllDbs } from "./db"
|
||||
import { getAppMetadata } from "../cache/appMetadata"
|
||||
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
|
||||
import { APP_PREFIX } from "./constants"
|
||||
import * as events from "../events"
|
||||
|
||||
export * from "./constants"
|
||||
export * from "./conversions"
|
||||
export { default as Replication } from "./Replication"
|
||||
export * from "./tenancy"
|
||||
import { App, Database, ConfigType } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* Generates a new app ID.
|
||||
* @returns {string} The new app ID which the app doc can be stored under.
|
||||
*/
|
||||
export const generateAppID = (tenantId = null) => {
|
||||
export const generateAppID = (tenantId?: string | null) => {
|
||||
let id = APP_PREFIX
|
||||
if (tenantId) {
|
||||
id += `${tenantId}${SEPARATOR}`
|
||||
|
@ -170,7 +165,7 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
|
|||
/**
|
||||
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
export function getUserMetadataParams(userId?: string, otherProps = {}) {
|
||||
export function getUserMetadataParams(userId?: string | null, otherProps = {}) {
|
||||
return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
|
||||
}
|
||||
|
||||
|
@ -243,18 +238,18 @@ export function getTemplateParams(
|
|||
* Generates a new role ID.
|
||||
* @returns {string} The new role ID which the role doc can be stored under.
|
||||
*/
|
||||
export function generateRoleID(id: any) {
|
||||
export function generateRoleID(id?: any) {
|
||||
return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets parameters for retrieving a role, this is a utility function for the getDocParams function.
|
||||
*/
|
||||
export function getRoleParams(roleId = null, otherProps = {}) {
|
||||
export function getRoleParams(roleId?: string | null, otherProps = {}) {
|
||||
return getDocParams(DocumentType.ROLE, roleId, otherProps)
|
||||
}
|
||||
|
||||
export function getStartEndKeyURL(baseKey: any, tenantId = null) {
|
||||
export function getStartEndKeyURL(baseKey: any, tenantId?: string) {
|
||||
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
|
||||
return `startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
|
||||
}
|
||||
|
@ -301,7 +296,12 @@ export async function getAllDbs(opts = { efficient: false }) {
|
|||
*
|
||||
* @return {Promise<object[]>} returns the app information document stored in each app database.
|
||||
*/
|
||||
export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
|
||||
export async function getAllApps({
|
||||
dev,
|
||||
all,
|
||||
idsOnly,
|
||||
efficient,
|
||||
}: any = {}): Promise<App[] | string[]> {
|
||||
let tenantId = getTenantId()
|
||||
if (!env.MULTI_TENANCY && !tenantId) {
|
||||
tenantId = DEFAULT_TENANT_ID
|
||||
|
@ -373,35 +373,23 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
|
|||
* Utility function for getAllApps but filters to production apps only.
|
||||
*/
|
||||
export async function getProdAppIDs() {
|
||||
return (await getAllApps({ idsOnly: true })).filter(
|
||||
(id: any) => !isDevAppID(id)
|
||||
)
|
||||
const apps = (await getAllApps({ idsOnly: true })) as string[]
|
||||
return apps.filter((id: any) => !isDevAppID(id))
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function for the inverse of above.
|
||||
*/
|
||||
export async function getDevAppIDs() {
|
||||
return (await getAllApps({ idsOnly: true })).filter((id: any) =>
|
||||
isDevAppID(id)
|
||||
)
|
||||
const apps = (await getAllApps({ idsOnly: true })) as string[]
|
||||
return apps.filter((id: any) => isDevAppID(id))
|
||||
}
|
||||
|
||||
export async function dbExists(dbName: any) {
|
||||
let exists = false
|
||||
return doWithDB(
|
||||
dbName,
|
||||
async (db: any) => {
|
||||
try {
|
||||
// check if database exists
|
||||
const info = await db.info()
|
||||
if (info && !info.error) {
|
||||
exists = true
|
||||
}
|
||||
} catch (err) {
|
||||
exists = false
|
||||
}
|
||||
return exists
|
||||
async (db: Database) => {
|
||||
return await db.exists()
|
||||
},
|
||||
{ skip_setup: true }
|
||||
)
|
||||
|
@ -500,7 +488,7 @@ export const getScopedFullConfig = async function (
|
|||
)[0]
|
||||
|
||||
// custom logic for settings doc
|
||||
if (type === Configs.SETTINGS) {
|
||||
if (type === ConfigType.SETTINGS) {
|
||||
if (scopedConfig && scopedConfig.doc) {
|
||||
// overrides affected by environment variables
|
||||
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
|
||||
|
@ -539,7 +527,7 @@ export const getPlatformUrl = async (opts = { tenantAware: true }) => {
|
|||
// get the doc directly instead of with getScopedConfig to prevent loop
|
||||
let settings
|
||||
try {
|
||||
settings = await db.get(generateConfigID({ type: Configs.SETTINGS }))
|
||||
settings = await db.get(generateConfigID({ type: ConfigType.SETTINGS }))
|
||||
} catch (e: any) {
|
||||
if (e.status !== 404) {
|
||||
throw e
|
||||
|
|
|
@ -1,8 +1,13 @@
|
|||
import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
|
||||
import {
|
||||
DocumentType,
|
||||
ViewName,
|
||||
DeprecatedViews,
|
||||
SEPARATOR,
|
||||
StaticDatabases,
|
||||
} from "../constants"
|
||||
import { getGlobalDB } from "../context"
|
||||
import PouchDB from "pouchdb"
|
||||
import { StaticDatabases } from "./constants"
|
||||
import { doWithDB } from "./"
|
||||
import { Database, DatabaseQueryOpts } from "@budibase/types"
|
||||
|
||||
const DESIGN_DB = "_design/database"
|
||||
|
||||
|
@ -19,7 +24,7 @@ interface DesignDocument {
|
|||
views: any
|
||||
}
|
||||
|
||||
async function removeDeprecated(db: PouchDB.Database, viewName: ViewName) {
|
||||
async function removeDeprecated(db: Database, viewName: ViewName) {
|
||||
// @ts-ignore
|
||||
if (!DeprecatedViews[viewName]) {
|
||||
return
|
||||
|
@ -70,16 +75,13 @@ export const createAccountEmailView = async () => {
|
|||
emit(doc.email.toLowerCase(), doc._id)
|
||||
}
|
||||
}`
|
||||
await doWithDB(
|
||||
StaticDatabases.PLATFORM_INFO.name,
|
||||
async (db: PouchDB.Database) => {
|
||||
await createView(db, viewJs, ViewName.ACCOUNT_BY_EMAIL)
|
||||
}
|
||||
)
|
||||
await doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {
|
||||
await createView(db, viewJs, ViewName.ACCOUNT_BY_EMAIL)
|
||||
})
|
||||
}
|
||||
|
||||
export const createUserAppView = async () => {
|
||||
const db = getGlobalDB() as PouchDB.Database
|
||||
const db = getGlobalDB()
|
||||
const viewJs = `function(doc) {
|
||||
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
|
||||
for (let prodAppId of Object.keys(doc.roles)) {
|
||||
|
@ -117,12 +119,9 @@ export const createPlatformUserView = async () => {
|
|||
emit(doc._id.toLowerCase(), doc._id)
|
||||
}
|
||||
}`
|
||||
await doWithDB(
|
||||
StaticDatabases.PLATFORM_INFO.name,
|
||||
async (db: PouchDB.Database) => {
|
||||
await createView(db, viewJs, ViewName.PLATFORM_USERS_LOWERCASE)
|
||||
}
|
||||
)
|
||||
await doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {
|
||||
await createView(db, viewJs, ViewName.PLATFORM_USERS_LOWERCASE)
|
||||
})
|
||||
}
|
||||
|
||||
export interface QueryViewOptions {
|
||||
|
@ -131,25 +130,29 @@ export interface QueryViewOptions {
|
|||
|
||||
export const queryView = async <T>(
|
||||
viewName: ViewName,
|
||||
params: PouchDB.Query.Options<T, T>,
|
||||
db: PouchDB.Database,
|
||||
params: DatabaseQueryOpts,
|
||||
db: Database,
|
||||
createFunc: any,
|
||||
opts?: QueryViewOptions
|
||||
): Promise<T[] | T | undefined> => {
|
||||
try {
|
||||
let response = await db.query<T, T>(`database/${viewName}`, params)
|
||||
let response = await db.query<T>(`database/${viewName}`, params)
|
||||
const rows = response.rows
|
||||
const docs = rows.map(row => (params.include_docs ? row.doc : row.value))
|
||||
const docs = rows.map((row: any) =>
|
||||
params.include_docs ? row.doc : row.value
|
||||
)
|
||||
|
||||
// if arrayResponse has been requested, always return array regardless of length
|
||||
if (opts?.arrayResponse) {
|
||||
return docs
|
||||
return docs as T[]
|
||||
} else {
|
||||
// return the single document if there is only one
|
||||
return docs.length <= 1 ? docs[0] : docs
|
||||
return docs.length <= 1 ? (docs[0] as T) : (docs as T[])
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err != null && err.name === "not_found") {
|
||||
const pouchNotFound = err && err.name === "not_found"
|
||||
const couchNotFound = err && err.status === 404
|
||||
if (pouchNotFound || couchNotFound) {
|
||||
await removeDeprecated(db, viewName)
|
||||
await createFunc()
|
||||
return queryView(viewName, params, db, createFunc, opts)
|
||||
|
@ -161,7 +164,7 @@ export const queryView = async <T>(
|
|||
|
||||
export const queryPlatformView = async <T>(
|
||||
viewName: ViewName,
|
||||
params: PouchDB.Query.Options<T, T>,
|
||||
params: DatabaseQueryOpts,
|
||||
opts?: QueryViewOptions
|
||||
): Promise<T[] | T | undefined> => {
|
||||
const CreateFuncByName: any = {
|
||||
|
@ -169,19 +172,16 @@ export const queryPlatformView = async <T>(
|
|||
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
|
||||
}
|
||||
|
||||
return doWithDB(
|
||||
StaticDatabases.PLATFORM_INFO.name,
|
||||
async (db: PouchDB.Database) => {
|
||||
const createFn = CreateFuncByName[viewName]
|
||||
return queryView(viewName, params, db, createFn, opts)
|
||||
}
|
||||
)
|
||||
return doWithDB(StaticDatabases.PLATFORM_INFO.name, async (db: Database) => {
|
||||
const createFn = CreateFuncByName[viewName]
|
||||
return queryView(viewName, params, db, createFn, opts)
|
||||
})
|
||||
}
|
||||
|
||||
export const queryGlobalView = async <T>(
|
||||
viewName: ViewName,
|
||||
params: PouchDB.Query.Options<T, T>,
|
||||
db?: PouchDB.Database,
|
||||
params: DatabaseQueryOpts,
|
||||
db?: Database,
|
||||
opts?: QueryViewOptions
|
||||
): Promise<T[] | T | undefined> => {
|
||||
const CreateFuncByName: any = {
|
||||
|
@ -192,8 +192,8 @@ export const queryGlobalView = async <T>(
|
|||
}
|
||||
// can pass DB in if working with something specific
|
||||
if (!db) {
|
||||
db = getGlobalDB() as PouchDB.Database
|
||||
db = getGlobalDB()
|
||||
}
|
||||
const createFn = CreateFuncByName[viewName]
|
||||
return queryView(viewName, params, db, createFn, opts)
|
||||
return queryView(viewName, params, db!, createFn, opts)
|
||||
}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
function isTest() {
|
||||
return (
|
||||
process.env.NODE_ENV === "jest" ||
|
||||
process.env.NODE_ENV === "cypress" ||
|
||||
process.env.JEST_WORKER_ID != null
|
||||
)
|
||||
return isCypress() || isJest()
|
||||
}
|
||||
|
||||
function isJest() {
|
||||
return !!(process.env.NODE_ENV === "jest" || process.env.JEST_WORKER_ID)
|
||||
}
|
||||
|
||||
function isCypress() {
|
||||
return process.env.NODE_ENV === "cypress"
|
||||
}
|
||||
|
||||
function isDev() {
|
||||
|
@ -25,8 +29,9 @@ const DefaultBucketName = {
|
|||
PLUGINS: "plugins",
|
||||
}
|
||||
|
||||
const env = {
|
||||
const environment = {
|
||||
isTest,
|
||||
isJest,
|
||||
isDev,
|
||||
JS_BCRYPT: process.env.JS_BCRYPT,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
|
@ -69,24 +74,24 @@ const env = {
|
|||
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
|
||||
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
|
||||
SERVICE: process.env.SERVICE || "budibase",
|
||||
MEMORY_LEAK_CHECK: process.env.MEMORY_LEAK_CHECK || false,
|
||||
LOG_LEVEL: process.env.LOG_LEVEL,
|
||||
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
|
||||
DEPLOYMENT_ENVIRONMENT:
|
||||
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
|
||||
_set(key: any, value: any) {
|
||||
process.env[key] = value
|
||||
module.exports[key] = value
|
||||
// @ts-ignore
|
||||
environment[key] = value
|
||||
},
|
||||
}
|
||||
|
||||
// clean up any environment variable edge cases
|
||||
for (let [key, value] of Object.entries(env)) {
|
||||
for (let [key, value] of Object.entries(environment)) {
|
||||
// handle the edge case of "0" to disable an environment variable
|
||||
if (value === "0") {
|
||||
// @ts-ignore
|
||||
env[key] = 0
|
||||
environment[key] = 0
|
||||
}
|
||||
}
|
||||
|
||||
export = env
|
||||
export = environment
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import env from "../environment"
|
||||
import tenancy from "../tenancy"
|
||||
import * as tenancy from "../tenancy"
|
||||
import * as dbUtils from "../db/utils"
|
||||
import { Configs } from "../constants"
|
||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
||||
import { Config } from "../constants"
|
||||
import { withCache, TTL, CacheKey } from "../cache"
|
||||
|
||||
export const enabled = async () => {
|
||||
// cloud - always use the environment variable
|
||||
|
@ -13,7 +13,7 @@ export const enabled = async () => {
|
|||
// self host - prefer the settings doc
|
||||
// use cache as events have high throughput
|
||||
const enabledInDB = await withCache(
|
||||
CacheKeys.ANALYTICS_ENABLED,
|
||||
CacheKey.ANALYTICS_ENABLED,
|
||||
TTL.ONE_DAY,
|
||||
async () => {
|
||||
const settings = await getSettingsDoc()
|
||||
|
@ -45,9 +45,7 @@ const getSettingsDoc = async () => {
|
|||
const db = tenancy.getGlobalDB()
|
||||
let settings
|
||||
try {
|
||||
settings = await db.get(
|
||||
dbUtils.generateConfigID({ type: Configs.SETTINGS })
|
||||
)
|
||||
settings = await db.get(dbUtils.generateConfigID({ type: Config.SETTINGS }))
|
||||
} catch (e: any) {
|
||||
if (e.status !== 404) {
|
||||
throw e
|
||||
|
|
|
@ -21,7 +21,7 @@ import {
|
|||
AppCreatedEvent,
|
||||
} from "@budibase/types"
|
||||
import * as context from "../context"
|
||||
import { CacheKeys } from "../cache/generic"
|
||||
import { CacheKey } from "../cache/generic"
|
||||
import * as cache from "../cache/generic"
|
||||
|
||||
// LIFECYCLE
|
||||
|
@ -48,18 +48,18 @@ export const end = async () => {
|
|||
// CRUD
|
||||
|
||||
const getBackfillMetadata = async (): Promise<BackfillMetadata | null> => {
|
||||
return cache.get(CacheKeys.BACKFILL_METADATA)
|
||||
return cache.get(CacheKey.BACKFILL_METADATA)
|
||||
}
|
||||
|
||||
const saveBackfillMetadata = async (
|
||||
backfill: BackfillMetadata
|
||||
): Promise<void> => {
|
||||
// no TTL - deleted by backfill
|
||||
return cache.store(CacheKeys.BACKFILL_METADATA, backfill)
|
||||
return cache.store(CacheKey.BACKFILL_METADATA, backfill)
|
||||
}
|
||||
|
||||
const deleteBackfillMetadata = async (): Promise<void> => {
|
||||
await cache.delete(CacheKeys.BACKFILL_METADATA)
|
||||
await cache.destroy(CacheKey.BACKFILL_METADATA)
|
||||
}
|
||||
|
||||
const clearEvents = async () => {
|
||||
|
@ -70,7 +70,7 @@ const clearEvents = async () => {
|
|||
for (const key of keys) {
|
||||
// delete each key
|
||||
// don't use tenancy, already in the key
|
||||
await cache.delete(key, { useTenancy: false })
|
||||
await cache.destroy(key, { useTenancy: false })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -167,7 +167,7 @@ const getEventKey = (event?: Event, properties?: any) => {
|
|||
|
||||
const tenantId = context.getTenantId()
|
||||
if (event) {
|
||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:${event}`
|
||||
eventKey = `${CacheKey.EVENTS}:${tenantId}:${event}`
|
||||
|
||||
// use some properties to make the key more unique
|
||||
const custom = CUSTOM_PROPERTY_SUFFIX[event]
|
||||
|
@ -176,7 +176,7 @@ const getEventKey = (event?: Event, properties?: any) => {
|
|||
eventKey = `${eventKey}:${suffix}`
|
||||
}
|
||||
} else {
|
||||
eventKey = `${CacheKeys.EVENTS}:${tenantId}:*`
|
||||
eventKey = `${CacheKey.EVENTS}:${tenantId}:*`
|
||||
}
|
||||
|
||||
return eventKey
|
||||
|
|
|
@ -19,10 +19,10 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { processors } from "./processors"
|
||||
import * as dbUtils from "../db/utils"
|
||||
import { Configs } from "../constants"
|
||||
import * as hashing from "../hashing"
|
||||
import { Config } from "../constants"
|
||||
import { newid } from "../utils"
|
||||
import * as installation from "../installation"
|
||||
import { withCache, TTL, CacheKeys } from "../cache/generic"
|
||||
import { withCache, TTL, CacheKey } from "../cache/generic"
|
||||
|
||||
const pkg = require("../../package.json")
|
||||
|
||||
|
@ -270,17 +270,17 @@ const getEventTenantId = async (tenantId: string): Promise<string> => {
|
|||
const getUniqueTenantId = async (tenantId: string): Promise<string> => {
|
||||
// make sure this tenantId always matches the tenantId in context
|
||||
return context.doInTenant(tenantId, () => {
|
||||
return withCache(CacheKeys.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||
return withCache(CacheKey.UNIQUE_TENANT_ID, TTL.ONE_DAY, async () => {
|
||||
const db = context.getGlobalDB()
|
||||
const config: SettingsConfig = await dbUtils.getScopedFullConfig(db, {
|
||||
type: Configs.SETTINGS,
|
||||
type: Config.SETTINGS,
|
||||
})
|
||||
|
||||
let uniqueTenantId: string
|
||||
if (config.config.uniqueTenantId) {
|
||||
return config.config.uniqueTenantId
|
||||
} else {
|
||||
uniqueTenantId = `${hashing.newid()}_${tenantId}`
|
||||
uniqueTenantId = `${newid()}_${tenantId}`
|
||||
config.config.uniqueTenantId = uniqueTenantId
|
||||
await db.put(config)
|
||||
return uniqueTenantId
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Event } from "@budibase/types"
|
||||
import { CacheKeys, TTL } from "../../../cache/generic"
|
||||
import { CacheKey, TTL } from "../../../cache/generic"
|
||||
import * as cache from "../../../cache/generic"
|
||||
import * as context from "../../../context"
|
||||
|
||||
|
@ -74,7 +74,7 @@ export const limited = async (event: Event): Promise<boolean> => {
|
|||
}
|
||||
|
||||
const eventKey = (event: RateLimitedEvent) => {
|
||||
let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}`
|
||||
let key = `${CacheKey.EVENTS_RATE_LIMIT}:${event}`
|
||||
if (isPerApp(event)) {
|
||||
key = key + ":" + context.getAppId()
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import "../../../../../tests/utilities/TestConfiguration"
|
||||
import "../../../../../tests"
|
||||
import PosthogProcessor from "../PosthogProcessor"
|
||||
import { Event, IdentityType, Hosting } from "@budibase/types"
|
||||
const tk = require("timekeeper")
|
||||
import * as cache from "../../../../cache/generic"
|
||||
import { CacheKeys } from "../../../../cache/generic"
|
||||
import { CacheKey } from "../../../../cache/generic"
|
||||
import * as context from "../../../../context"
|
||||
|
||||
const newIdentity = () => {
|
||||
|
@ -19,7 +19,7 @@ describe("PosthogProcessor", () => {
|
|||
beforeEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
await cache.bustCache(
|
||||
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -89,7 +89,7 @@ describe("PosthogProcessor", () => {
|
|||
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
||||
|
||||
await cache.bustCache(
|
||||
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||
)
|
||||
|
||||
tk.freeze(new Date(2022, 0, 1, 14, 0))
|
||||
|
|
|
@ -72,7 +72,7 @@ export async function stepCreated(
|
|||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
stepId: step.id,
|
||||
stepId: step.id!,
|
||||
stepType: step.stepId,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_STEP_CREATED, properties, timestamp)
|
||||
|
@ -87,7 +87,7 @@ export async function stepDeleted(
|
|||
automationId: automation._id as string,
|
||||
triggerId: automation.definition?.trigger?.id,
|
||||
triggerType: automation.definition?.trigger?.stepId,
|
||||
stepId: step.id,
|
||||
stepId: step.id!,
|
||||
stepType: step.stepId,
|
||||
}
|
||||
await publishEvent(Event.AUTOMATION_STEP_DELETED, properties)
|
||||
|
|
|
@ -1,12 +1,34 @@
|
|||
import { AppBackup, AppBackupRestoreEvent, Event } from "@budibase/types"
|
||||
import {
|
||||
AppBackup,
|
||||
AppBackupRestoreEvent,
|
||||
AppBackupTriggeredEvent,
|
||||
AppBackupTrigger,
|
||||
AppBackupType,
|
||||
Event,
|
||||
} from "@budibase/types"
|
||||
import { publishEvent } from "../events"
|
||||
|
||||
export async function appBackupRestored(backup: AppBackup) {
|
||||
const properties: AppBackupRestoreEvent = {
|
||||
appId: backup.appId,
|
||||
backupName: backup.name!,
|
||||
restoreId: backup._id!,
|
||||
backupCreatedAt: backup.timestamp,
|
||||
}
|
||||
|
||||
await publishEvent(Event.APP_BACKUP_RESTORED, properties)
|
||||
}
|
||||
|
||||
export async function appBackupTriggered(
|
||||
appId: string,
|
||||
backupId: string,
|
||||
type: AppBackupType,
|
||||
trigger: AppBackupTrigger
|
||||
) {
|
||||
const properties: AppBackupTriggeredEvent = {
|
||||
appId: appId,
|
||||
backupId,
|
||||
type,
|
||||
trigger,
|
||||
}
|
||||
await publishEvent(Event.APP_BACKUP_TRIGGERED, properties)
|
||||
}
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
const env = require("../environment")
|
||||
const tenancy = require("../tenancy")
|
||||
import env from "../environment"
|
||||
import * as tenancy from "../tenancy"
|
||||
|
||||
/**
|
||||
* Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant.
|
||||
* The env var is formatted as:
|
||||
* tenant1:feature1:feature2,tenant2:feature1
|
||||
*/
|
||||
const getFeatureFlags = () => {
|
||||
function getFeatureFlags() {
|
||||
if (!env.TENANT_FEATURE_FLAGS) {
|
||||
return
|
||||
}
|
||||
|
||||
const tenantFeatureFlags = {}
|
||||
const tenantFeatureFlags: Record<string, string[]> = {}
|
||||
|
||||
env.TENANT_FEATURE_FLAGS.split(",").forEach(tenantToFeatures => {
|
||||
const [tenantId, ...features] = tenantToFeatures.split(":")
|
||||
|
@ -29,13 +29,13 @@ const getFeatureFlags = () => {
|
|||
|
||||
const TENANT_FEATURE_FLAGS = getFeatureFlags()
|
||||
|
||||
exports.isEnabled = featureFlag => {
|
||||
export function isEnabled(featureFlag: string) {
|
||||
const tenantId = tenancy.getTenantId()
|
||||
const flags = exports.getTenantFeatureFlags(tenantId)
|
||||
const flags = getTenantFeatureFlags(tenantId)
|
||||
return flags.includes(featureFlag)
|
||||
}
|
||||
|
||||
exports.getTenantFeatureFlags = tenantId => {
|
||||
export function getTenantFeatureFlags(tenantId: string) {
|
||||
const flags = []
|
||||
|
||||
if (TENANT_FEATURE_FLAGS) {
|
||||
|
@ -53,8 +53,8 @@ exports.getTenantFeatureFlags = tenantId => {
|
|||
return flags
|
||||
}
|
||||
|
||||
exports.TenantFeatureFlag = {
|
||||
LICENSING: "LICENSING",
|
||||
GOOGLE_SHEETS: "GOOGLE_SHEETS",
|
||||
USER_GROUPS: "USER_GROUPS",
|
||||
export enum TenantFeatureFlag {
|
||||
LICENSING = "LICENSING",
|
||||
GOOGLE_SHEETS = "GOOGLE_SHEETS",
|
||||
USER_GROUPS = "USER_GROUPS",
|
||||
}
|
|
@ -4,6 +4,6 @@
|
|||
* @param {string} url The URL to test and remove any extra double slashes.
|
||||
* @return {string} The updated url.
|
||||
*/
|
||||
exports.checkSlashesInUrl = url => {
|
||||
export function checkSlashesInUrl(url: string) {
|
||||
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
||||
}
|
|
@ -4,30 +4,28 @@ import * as events from "./events"
|
|||
import * as migrations from "./migrations"
|
||||
import * as users from "./users"
|
||||
import * as roles from "./security/roles"
|
||||
import * as permissions from "./security/permissions"
|
||||
import * as accounts from "./cloud/accounts"
|
||||
import * as installation from "./installation"
|
||||
import env from "./environment"
|
||||
import tenancy from "./tenancy"
|
||||
import featureFlags from "./featureFlags"
|
||||
import * as tenancy from "./tenancy"
|
||||
import * as featureFlags from "./featureFlags"
|
||||
import * as sessions from "./security/sessions"
|
||||
import deprovisioning from "./context/deprovision"
|
||||
import auth from "./auth"
|
||||
import constants from "./constants"
|
||||
import * as dbConstants from "./db/constants"
|
||||
import * as deprovisioning from "./context/deprovision"
|
||||
import * as auth from "./auth"
|
||||
import * as constants from "./constants"
|
||||
import * as logging from "./logging"
|
||||
import pino from "./pino"
|
||||
import * as pino from "./pino"
|
||||
import * as middleware from "./middleware"
|
||||
import plugins from "./plugin"
|
||||
import encryption from "./security/encryption"
|
||||
import * as plugins from "./plugin"
|
||||
import * as encryption from "./security/encryption"
|
||||
import * as queue from "./queue"
|
||||
|
||||
// mimic the outer package exports
|
||||
import * as db from "./pkg/db"
|
||||
import * as objectStore from "./pkg/objectStore"
|
||||
import * as utils from "./pkg/utils"
|
||||
import redis from "./pkg/redis"
|
||||
import cache from "./pkg/cache"
|
||||
import context from "./pkg/context"
|
||||
import * as db from "./db"
|
||||
import * as context from "./context"
|
||||
import * as cache from "./cache"
|
||||
import * as objectStore from "./objectStore"
|
||||
import * as redis from "./redis"
|
||||
import * as utils from "./utils"
|
||||
|
||||
const init = (opts: any = {}) => {
|
||||
db.init(opts.db)
|
||||
|
@ -36,7 +34,7 @@ const init = (opts: any = {}) => {
|
|||
const core = {
|
||||
init,
|
||||
db,
|
||||
...dbConstants,
|
||||
...constants,
|
||||
redis,
|
||||
locks: redis.redlock,
|
||||
objectStore,
|
||||
|
@ -45,7 +43,6 @@ const core = {
|
|||
cache,
|
||||
auth,
|
||||
constants,
|
||||
...constants,
|
||||
migrations,
|
||||
env,
|
||||
accounts,
|
||||
|
@ -65,6 +62,7 @@ const core = {
|
|||
middleware,
|
||||
encryption,
|
||||
queue,
|
||||
permissions,
|
||||
}
|
||||
|
||||
export = core
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
import * as hashing from "./hashing"
|
||||
import { newid } from "./utils"
|
||||
import * as events from "./events"
|
||||
import { StaticDatabases } from "./db/constants"
|
||||
import { StaticDatabases } from "./db"
|
||||
import { doWithDB } from "./db"
|
||||
import { Installation, IdentityType } from "@budibase/types"
|
||||
import * as context from "./context"
|
||||
import semver from "semver"
|
||||
import { bustCache, withCache, TTL, CacheKeys } from "./cache/generic"
|
||||
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
|
||||
|
||||
const pkg = require("../package.json")
|
||||
|
||||
export const getInstall = async (): Promise<Installation> => {
|
||||
return withCache(CacheKeys.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
||||
return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
||||
useTenancy: false,
|
||||
})
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ const getInstallFromDB = async (): Promise<Installation> => {
|
|||
if (e.status === 404) {
|
||||
install = {
|
||||
_id: StaticDatabases.PLATFORM_INFO.docs.install,
|
||||
installId: hashing.newid(),
|
||||
installId: newid(),
|
||||
version: pkg.version,
|
||||
}
|
||||
const resp = await platformDb.put(install)
|
||||
|
@ -50,7 +50,7 @@ const updateVersion = async (version: string): Promise<boolean> => {
|
|||
const install = await getInstall()
|
||||
install.version = version
|
||||
await platformDb.put(install)
|
||||
await bustCache(CacheKeys.INSTALLATION)
|
||||
await bustCache(CacheKey.INSTALLATION)
|
||||
}
|
||||
)
|
||||
} catch (e: any) {
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
module.exports = async (ctx, next) => {
|
||||
import { BBContext } from "@budibase/types"
|
||||
|
||||
export = async (ctx: BBContext, next: any) => {
|
||||
if (
|
||||
!ctx.internal &&
|
||||
(!ctx.user || !ctx.user.admin || !ctx.user.admin.global)
|
|
@ -1,4 +0,0 @@
|
|||
module.exports = async (ctx, next) => {
|
||||
// Placeholder for audit log middleware
|
||||
return next()
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
import { BBContext } from "@budibase/types"
|
||||
|
||||
export = async (ctx: BBContext | any, next: any) => {
|
||||
// Placeholder for audit log middleware
|
||||
return next()
|
||||
}
|
|
@ -1,17 +1,18 @@
|
|||
import { Cookies, Headers } from "../constants"
|
||||
import { Cookie, Header } from "../constants"
|
||||
import { getCookie, clearCookie, openJwt } from "../utils"
|
||||
import { getUser } from "../cache/user"
|
||||
import { getSession, updateSessionTTL } from "../security/sessions"
|
||||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { SEPARATOR } from "../db/constants"
|
||||
import { ViewName } from "../db/utils"
|
||||
import { queryGlobalView } from "../db/views"
|
||||
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
|
||||
import { getGlobalDB, doInTenant } from "../tenancy"
|
||||
import { decrypt } from "../security/encryption"
|
||||
const identity = require("../context/identity")
|
||||
const env = require("../environment")
|
||||
import * as identity from "../context/identity"
|
||||
import env from "../environment"
|
||||
import { BBContext, EndpointMatcher } from "@budibase/types"
|
||||
|
||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000
|
||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
|
||||
? parseInt(env.SESSION_UPDATE_PERIOD)
|
||||
: 60 * 1000
|
||||
|
||||
interface FinaliseOpts {
|
||||
authenticated?: boolean
|
||||
|
@ -42,13 +43,13 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
|
|||
return doInTenant(tenantId, async () => {
|
||||
const db = getGlobalDB()
|
||||
// api key is encrypted in the database
|
||||
const userId = await queryGlobalView(
|
||||
const userId = (await queryGlobalView(
|
||||
ViewName.BY_API_KEY,
|
||||
{
|
||||
key: apiKey,
|
||||
},
|
||||
db
|
||||
)
|
||||
)) as string
|
||||
if (userId) {
|
||||
return {
|
||||
valid: true,
|
||||
|
@ -65,16 +66,16 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
|
|||
* The tenancy modules should not be used here and it should be assumed that the tenancy context
|
||||
* has not yet been populated.
|
||||
*/
|
||||
export = (
|
||||
noAuthPatterns = [],
|
||||
opts: { publicAllowed: boolean; populateUser?: Function } = {
|
||||
export = function (
|
||||
noAuthPatterns: EndpointMatcher[] = [],
|
||||
opts: { publicAllowed?: boolean; populateUser?: Function } = {
|
||||
publicAllowed: false,
|
||||
}
|
||||
) => {
|
||||
) {
|
||||
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
|
||||
return async (ctx: any, next: any) => {
|
||||
return async (ctx: BBContext | any, next: any) => {
|
||||
let publicEndpoint = false
|
||||
const version = ctx.request.headers[Headers.API_VER]
|
||||
const version = ctx.request.headers[Header.API_VER]
|
||||
// the path is not authenticated
|
||||
const found = matches(ctx, noAuthOptions)
|
||||
if (found) {
|
||||
|
@ -82,10 +83,10 @@ export = (
|
|||
}
|
||||
try {
|
||||
// check the actual user is authenticated first, try header or cookie
|
||||
const headerToken = ctx.request.headers[Headers.TOKEN]
|
||||
const authCookie = getCookie(ctx, Cookies.Auth) || openJwt(headerToken)
|
||||
const apiKey = ctx.request.headers[Headers.API_KEY]
|
||||
const tenantId = ctx.request.headers[Headers.TENANT_ID]
|
||||
const headerToken = ctx.request.headers[Header.TOKEN]
|
||||
const authCookie = getCookie(ctx, Cookie.Auth) || openJwt(headerToken)
|
||||
const apiKey = ctx.request.headers[Header.API_KEY]
|
||||
const tenantId = ctx.request.headers[Header.TENANT_ID]
|
||||
let authenticated = false,
|
||||
user = null,
|
||||
internal = false
|
||||
|
@ -116,7 +117,7 @@ export = (
|
|||
authenticated = false
|
||||
console.error("Auth Error", err?.message || err)
|
||||
// remove the cookie as the user does not exist anymore
|
||||
clearCookie(ctx, Cookies.Auth)
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
}
|
||||
}
|
||||
// this is an internal request, no user made it
|
||||
|
@ -140,7 +141,7 @@ export = (
|
|||
delete user.password
|
||||
}
|
||||
// be explicit
|
||||
if (authenticated !== true) {
|
||||
if (!authenticated) {
|
||||
authenticated = false
|
||||
}
|
||||
// isAuthenticated is a function, so use a variable to be able to check authed state
|
||||
|
@ -152,9 +153,10 @@ export = (
|
|||
return next()
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error("Auth Error", err?.message || err)
|
||||
// invalid token, clear the cookie
|
||||
if (err && err.name === "JsonWebTokenError") {
|
||||
clearCookie(ctx, Cookies.Auth)
|
||||
clearCookie(ctx, Cookie.Auth)
|
||||
}
|
||||
// allow configuring for public access
|
||||
if ((opts && opts.publicAllowed) || publicEndpoint) {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue