Merge branch 'develop' of github.com:Budibase/budibase into group-fixes

This commit is contained in:
mike12345567 2022-09-16 17:02:08 +01:00
commit 1be71933ca
632 changed files with 24160 additions and 10558 deletions

View File

@ -162,6 +162,7 @@
"translation"
]
},
{
"login": "mslourens",
"name": "Maurits Lourens",
"avatar_url": "https://avatars.githubusercontent.com/u/1907152?v=4",

View File

@ -59,3 +59,9 @@ jobs:
with:
install: false
command: yarn test:e2e:ci
- name: QA Core Integration Tests
run: |
cd qa-core
yarn
yarn api:test:ci

View File

@ -69,6 +69,28 @@ jobs:
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with:

View File

@ -18,8 +18,9 @@ on:
workflow_dispatch:
env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
# Posthog token used by ui at build time
# disable unless needed for testing
# POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
FEATURE_PREVIEW_URL: https://budirelease.live
@ -119,6 +120,27 @@ jobs:
]
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0

View File

@ -3,24 +3,37 @@ name: Budibase Release Selfhost
on:
workflow_dispatch:
env:
BRANCH: ${{ github.event.pull_request.head.ref }}
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Fail if branch is not master
if: github.ref != 'refs/heads/master'
run: |
echo "Ref is not master, you must run this job from master."
exit 1
- uses: actions/checkout@v2
with:
node-version: 14.x
fetch_depth: 0
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Docker images (Self Host)
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
# Get latest release version
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
release_tag=v$release_version
release_tag=v${{ env.RELEASE_VERSION }}
# Pull apps and worker images
docker pull budibase/apps:$release_tag
@ -40,13 +53,15 @@ jobs:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
SELFHOST_TAG: latest
- name: Build CLI executables
- name: Install Pro
run: yarn install:pro $BRANCH $BASE_BRANCH
- name: Bootstrap and build (CLI)
run: |
pushd packages/cli
yarn
yarn bootstrap
yarn build
popd
- name: Build OpenAPI spec
run: |
@ -93,4 +108,4 @@ jobs:
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Self Host Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Self Host."
embed-title: ${{ env.RELEASE_VERSION }}
embed-title: ${{ env.RELEASE_VERSION }}

View File

@ -29,7 +29,7 @@ on:
env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}

View File

@ -1,4 +1,4 @@
name: Budibase Smoke Test
name: Budibase Nightly Tests
on:
workflow_dispatch:
@ -6,7 +6,7 @@ on:
- cron: "0 5 * * *" # every day at 5AM
jobs:
release:
nightly:
runs-on: ubuntu-latest
steps:
@ -43,6 +43,18 @@ jobs:
name: Test Reports
path: packages/builder/cypress/reports/testReport.html
# TODO: enable once running in QA test env
# - name: Configure AWS Credentials
# uses: aws-actions/configure-aws-credentials@v1
# with:
# aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
# aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-region: eu-west-1
# - name: Upload test results HTML
# uses: aws-actions/configure-aws-credentials@v1
# run: aws s3 cp packages/builder/cypress/reports/testReport.html s3://{{ secrets.BUDI_QA_REPORTS_BUCKET_NAME }}/$GITHUB_RUN_ID/index.html
- name: Cypress Discord Notify
run: yarn test:e2e:ci:notify
env:

5
.gitignore vendored
View File

@ -63,6 +63,7 @@ typings/
# dotenv environment variables file
.env
!qa-core/.env
!hosting/.env
hosting/.generated-nginx.dev.conf
hosting/proxy/.generated-nginx.prod.conf
@ -102,4 +103,6 @@ packages/builder/cypress/reports
stats.html
# TypeScript cache
*.tsbuildinfo
*.tsbuildinfo
budibase-component
budibase-datasource

View File

@ -8,4 +8,4 @@ packages/server/client
packages/server/src/definitions/openapi.ts
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js

View File

@ -4,7 +4,7 @@
"singleQuote": false,
"trailingComma": "es5",
"arrowParens": "avoid",
"jsxBracketSameLine": false,
"bracketSameLine": false,
"plugins": ["prettier-plugin-svelte"],
"svelteSortOrder": "options-scripts-markup-styles"
}

View File

@ -65,7 +65,7 @@ Budibase is open-source - licensed as GPL v3. This should fill you with confiden
<br /><br />
### Load data or start from scratch
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no datasources. [Request new datasources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/data_n1tlhf.png">

View File

@ -124,11 +124,31 @@ spec:
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.bbAdminUserEmail }}
- name: BB_ADMIN_USER_EMAIL
value: { { .Values.globals.bbAdminUserEmail | quote } }
value: {{ .Values.globals.bbAdminUserEmail | quote }}
{{ end }}
{{ if .Values.globals.bbAdminUserPassword }}
- name: BB_ADMIN_USER_PASSWORD
value: { { .Values.globals.bbAdminUserPassword | quote } }
value: {{ .Values.globals.bbAdminUserPassword | quote }}
{{ end }}
{{ if .Values.globals.pluginsDir }}
- name: PLUGINS_DIR
value: {{ .Values.globals.pluginsDir | quote }}
{{ end }}
{{ if .Values.services.apps.nodeDebug }}
- name: NODE_DEBUG
value: {{ .Values.services.apps.nodeDebug | quote }}
{{ end }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }}
@ -142,7 +162,10 @@ spec:
name: bbapps
ports:
- containerPort: {{ .Values.services.apps.port }}
resources: {}
{{ with .Values.services.apps.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -38,7 +38,10 @@ spec:
image: redgeoff/replicate-couchdb-cluster
imagePullPolicy: Always
name: couchdb-backup
resources: {}
{{ with .Values.services.couchdb.backup.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -56,7 +56,10 @@ spec:
name: minio-service
ports:
- containerPort: {{ .Values.services.objectStore.port }}
resources: {}
{{ with .Values.services.objectStore.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: minio-data

View File

@ -30,7 +30,10 @@ spec:
name: proxy-service
ports:
- containerPort: {{ .Values.services.proxy.port }}
resources: {}
{{ with .Values.services.proxy.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
{{- with .Values.affinity }}
affinity:

View File

@ -35,7 +35,10 @@ spec:
name: redis-service
ports:
- containerPort: {{ .Values.services.redis.port }}
resources: {}
{{ with .Values.services.redis.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: redis-data

View File

@ -27,6 +27,8 @@ spec:
spec:
containers:
- env:
- name: BUDIBASE_ENVIRONMENT
value: {{ .Values.globals.budibaseEnv }}
- name: DEPLOYMENT_ENVIRONMENT
value: "kubernetes"
- name: CLUSTER_PORT
@ -125,6 +127,19 @@ spec:
value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
livenessProbe:
@ -136,7 +151,10 @@ spec:
name: bbworker
ports:
- containerPort: {{ .Values.services.worker.port }}
resources: {}
{{ with .Values.services.worker.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -60,19 +60,6 @@ ingress:
port:
number: 10000
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
@ -91,7 +78,7 @@ globals:
budibaseEnv: PRODUCTION
enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS"
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
logLevel: info
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
@ -114,6 +101,10 @@ globals:
smtp:
enabled: false
# elasticApmEnabled:
# elasticApmSecretToken:
# elasticApmServerUrl:
services:
budibaseVersion: latest
dns: cluster.local
@ -121,15 +112,19 @@ services:
proxy:
port: 10000
replicaCount: 1
resources: {}
apps:
port: 4002
replicaCount: 1
logLevel: info
resources: {}
# nodeDebug: "" # set the value of NODE_DEBUG
worker:
port: 4003
replicaCount: 1
resources: {}
couchdb:
enabled: true
@ -143,6 +138,7 @@ services:
target: ""
# backup interval in seconds
interval: ""
resources: {}
redis:
enabled: true # disable if using external redis
@ -156,6 +152,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
objectStore:
minio: true
@ -172,6 +169,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
# Override values in couchDB subchart
couchdb:

View File

@ -348,7 +348,7 @@ export interface paths {
}
}
responses: {
/** Returns the created table, including the ID which has been generated for it. This can be internal or external data sources. */
/** Returns the created table, including the ID which has been generated for it. This can be internal or external datasources. */
200: {
content: {
"application/json": components["schemas"]["tableOutput"]
@ -959,7 +959,7 @@ export interface components {
query: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
@ -983,7 +983,7 @@ export interface components {
data: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]

View File

@ -11,8 +11,8 @@
"dependencies": {
"bulma": "^0.9.3",
"next": "12.1.0",
"node-fetch": "^3.2.2",
"node-sass": "^7.0.1",
"node-fetch": "^3.2.10",
"sass": "^1.52.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-notifications-component": "^3.4.1"
@ -24,4 +24,4 @@
"eslint-config-next": "12.1.0",
"typescript": "4.6.2"
}
}
}

View File

@ -2020,10 +2020,10 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-fetch@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.2.tgz#16d33fbe32ca7c6ca1ca8ba5dfea1dd885c59f04"
integrity sha512-Cwhq1JFIoon15wcIkFzubVNFE5GvXGV82pKf4knXXjvGmn7RJKcypeuqcVNZMGDZsAFWyIRya/anwAJr7TWJ7w==
node-fetch@^3.2.10:
version "3.2.10"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.10.tgz#e8347f94b54ae18b57c9c049ef641cef398a85c8"
integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==
dependencies:
data-uri-to-buffer "^4.0.0"
fetch-blob "^3.1.4"

View File

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View File

@ -25,9 +25,12 @@ services:
REDIS_PASSWORD: ${REDIS_PASSWORD}
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
PLUGINS_DIR: ${PLUGINS_DIR}
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
restart: unless-stopped
@ -76,6 +79,9 @@ services:
- "${MAIN_PORT}:10000"
container_name: bbproxy
image: budibase/proxy
environment:
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
- PROXY_RATE_LIMIT_API_PER_SECOND=20
depends_on:
- minio-service
- worker-service

View File

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View File

@ -15,7 +15,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -77,6 +80,20 @@ http {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
location /vite/ {
proxy_pass http://{{ address }}:3000;
rewrite ^/vite(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://{{ address }}:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -9,7 +9,11 @@ events {
}
http {
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
# rate limiting
limit_req_status 429;
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=${PROXY_RATE_LIMIT_API_PER_SECOND}r/s;
limit_req_zone $binary_remote_addr zone=webhooks:10m rate=${PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND}r/s;
include /etc/nginx/mime.types;
default_type application/octet-stream;
proxy_set_header Host $host;
@ -29,7 +33,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -90,6 +97,7 @@ http {
proxy_pass http://$watchtower:8080;
}
{{/if}}
location ~ ^/(builder|app_) {
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
@ -126,11 +134,39 @@ http {
proxy_pass http://$apps:4002;
}
location /api/webhooks/ {
# calls to webhooks are rate limited
limit_req zone=webhooks nodelay;
# Rest of configuration copied from /api/ location above
# 120s timeout on API requests
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://$apps:4002;
}
location /db/ {
proxy_pass http://$couchdb:5984;
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://$apps:4002;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -1,3 +1,14 @@
FROM nginx:latest
COPY .generated-nginx.prod.conf /etc/nginx/nginx.conf
COPY error.html /usr/share/nginx/html/error.html
# nginx.conf
# use the default nginx behaviour for *.template files which are processed with envsubst
# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
# Error handling
COPY error.html /usr/share/nginx/html/error.html
# Default environment
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20

View File

@ -3,15 +3,21 @@
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
mkdir -p /home/{search,minio,couch}
mkdir -p /home/couch/{dbs,views}
chown -R couchdb:couchdb /home/couch/
DATA_DIR=/home
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
sed -i 's#dir=/opt/couchdb/data/search#dir=/home/search#' /opt/clouseau/clouseau.ini
sed -i 's#/minio/minio server /minio &#/minio/minio server /home/minio &#' /runner.sh
sed -i 's#database_dir = ./data#database_dir = /home/couch/dbs#' /opt/couchdb/etc/default.ini
sed -i 's#view_index_dir = ./data#view_index_dir = /home/couch/views#' /opt/couchdb/etc/default.ini
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
fi
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

View File

@ -20,31 +20,17 @@ RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
ARG TARGETARCH amd64
ARG TARGETARCH=amd64
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD single
ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
ENV \
APP_PORT=4001 \
ARCHITECTURE=amd \
BUDIBASE_ENVIRONMENT=PRODUCTION \
CLUSTER_PORT=80 \
# CUSTOM_DOMAIN=budi001.custom.com \
DEPLOYMENT_ENVIRONMENT=docker \
MINIO_URL=http://localhost:9000 \
POSTHOG_TOKEN=phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS \
REDIS_URL=localhost:6379 \
SELF_HOSTED=1 \
TARGETBUILD=$TARGETBUILD \
WORKER_PORT=4002 \
WORKER_URL=http://localhost:4002 \
APPS_URL=http://localhost:4001
# ENV CUSTOM_DOMAIN=budi001.custom.com \
# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
@ -114,7 +100,10 @@ RUN chmod +x ./healthcheck.sh
ADD hosting/scripts/build-target-paths.sh .
RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
ADD hosting/single/ssh/sshd_config /etc/
ADD hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# cleanup cache
@ -122,6 +111,8 @@ RUN yarn cache clean -f
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
# setup letsencrypt certificate

View File

@ -7,7 +7,7 @@ name=clouseau@127.0.0.1
cookie=monster
; the path where you would like to store the search index files
dir=/data/search
dir=DATA_DIR/search
; the number of search indexes that can be open simultaneously
max_indexes_open=500

View File

@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
database_dir = /data/couch/dbs
view_index_dir = /data/couch/views
database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views

View File

@ -3,6 +3,11 @@ healthy=true
if [ -f "/data/.env" ]; then
export $(cat /data/.env | xargs)
elif [ -f "/home/.env" ]; then
export $(cat /home/.env | xargs)
else
echo "No .env file found"
healthy=false
fi
if [[ $(curl -Lfk -s -w "%{http_code}\n" http://localhost/ -o /dev/null) -ne 200 ]]; then

View File

@ -66,6 +66,15 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://127.0.0.1:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -1,9 +1,34 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
if [ -f "/data/.env" ]; then
export $(cat /data/.env | xargs)
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL")
# Check the env vars set in Dockerfile have come through, AAS seems to drop them
[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
/etc/init.d/ssh start
else
DATA_DIR=${DATA_DIR:-/data}
fi
# first randomise any unset environment variables
if [ -f "${DATA_DIR}/.env" ]; then
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
fi
# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
@ -14,21 +39,33 @@ done
if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
fi
if [ ! -f "/data/.env" ]; then
touch /data/.env
if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> /data/.env
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
for ENV_VAR in "${DOCKER_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
fi
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
mkdir -p /data/couch/{dbs,views} /home/couch/{dbs,views}
chown -R couchdb:couchdb /data/couch /home/couch
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server /data/minio &
/minio/minio server ${DATA_DIR}/minio &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then

View File

@ -0,0 +1,8 @@
#!/bin/sh
ssh-keygen -A
#prepare run dir
if [ ! -d "/var/run/sshd" ]; then
mkdir -p /var/run/sshd
fi

View File

@ -0,0 +1,12 @@
Port 2222
ListenAddress 0.0.0.0
LoginGraceTime 180
X11Forwarding yes
Ciphers aes128-cbc,3des-cbc,aes256-cbc,aes128-ctr,aes192-ctr,aes256-ctr
MACs hmac-sha1,hmac-sha1-96
StrictModes yes
SyslogFacility DAEMON
PasswordAuthentication yes
PermitEmptyPasswords no
PermitRootLogin yes
Subsystem sftp internal-sftp

View File

@ -1,5 +1,5 @@
{
"version": "1.2.14",
"version": "1.3.20",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -45,8 +45,8 @@
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
"lint:fix:eslint": "eslint --fix packages",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
"lint:fix:eslint": "eslint --fix packages qa-core",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test --stream",
"test:e2e:ci": "lerna run cy:ci --stream",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.2.14",
"version": "1.3.20",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,13 +20,16 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "^1.2.14",
"@budibase/types": "1.3.20",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcryptjs": "2.4.3",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",
"joi": "17.6.0",
"jsonwebtoken": "8.5.1",
"koa-passport": "4.1.4",
"lodash": "4.17.21",
@ -59,7 +62,6 @@
]
},
"devDependencies": {
"@shopify/jest-koa-mocks": "3.1.5",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/lodash": "4.14.180",

View File

@ -0,0 +1,3 @@
module.exports = {
...require("./src/plugin"),
}

View File

@ -1,11 +1,11 @@
const passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
const { getGlobalDB } = require("./tenancy")
import { getGlobalDB } from "./tenancy"
const refresh = require("passport-oauth2-refresh")
const { Configs } = require("./constants")
const { getScopedConfig } = require("./db/utils")
const {
import { Configs } from "./constants"
import { getScopedConfig } from "./db/utils"
import {
jwt,
local,
authenticated,
@ -13,7 +13,6 @@ const {
oidc,
auditLog,
tenancy,
appTenancy,
authError,
ssoCallbackUrl,
csrf,
@ -22,32 +21,36 @@ const {
builderOnly,
builderOrAdmin,
joiValidator,
} = require("./middleware")
const { invalidateUser } = require("./cache/user")
} from "./middleware"
import { invalidateUser } from "./cache/user"
import { User } from "@budibase/types"
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
passport.serializeUser((user, done) => done(null, user))
passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user, done) => {
passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
const user = await db.get(user._id)
return done(null, user)
const dbUser = await db.get(user._id)
return done(null, dbUser)
} catch (err) {
console.error(`User not found`, err)
return done(null, false, { message: "User not found" })
}
})
async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
async function refreshOIDCAccessToken(
db: any,
chosenConfig: any,
refreshToken: string
) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
let enrichedConfig
let strategy
let enrichedConfig: any
let strategy: any
try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
@ -70,22 +73,28 @@ async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
refresh.requestNewAccessToken(
Configs.OIDC,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshGoogleAccessToken(db, config, refreshToken) {
async function refreshGoogleAccessToken(
db: any,
config: any,
refreshToken: any
) {
let callbackUrl = await google.getCallbackUrl(db, config)
let strategy
try {
strategy = await google.strategyFactory(config, callbackUrl)
} catch (err) {
} catch (err: any) {
console.error(err)
throw new Error("Error constructing OIDC refresh strategy", err)
throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}`
)
}
refresh.use(strategy)
@ -94,14 +103,18 @@ async function refreshGoogleAccessToken(db, config, refreshToken) {
refresh.requestNewAccessToken(
Configs.GOOGLE,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshOAuthToken(refreshToken, configType, configId) {
async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
@ -113,7 +126,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
let refreshResponse
if (configType === Configs.OIDC) {
// configId - retrieved from cookie.
chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) {
throw new Error("Invalid OIDC configuration")
}
@ -134,7 +147,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
return refreshResponse
}
async function updateUserOAuth(userId, oAuthConfig) {
async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@ -162,14 +175,13 @@ async function updateUserOAuth(userId, oAuthConfig) {
}
}
module.exports = {
export = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
buildAppTenancyMiddleware: appTenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,

View File

@ -1,6 +1,6 @@
const redis = require("../redis/init")
const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants")
const { DocumentType } = require("../db/constants")
const AppState = {
INVALID: "invalid",
@ -14,7 +14,7 @@ const populateFromDB = async appId => {
return doWithDB(
appId,
db => {
return db.get(DocumentTypes.APP_METADATA)
return db.get(DocumentType.APP_METADATA)
},
{ skip_setup: true }
)

View File

@ -9,6 +9,7 @@ exports.CacheKeys = {
UNIQUE_TENANT_ID: "uniqueTenantId",
EVENTS: "events",
BACKFILL_METADATA: "backfillMetadata",
EVENTS_RATE_LIMIT: "eventsRateLimit",
}
exports.TTL = {

View File

@ -7,6 +7,7 @@ exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}

View File

@ -1,4 +1,4 @@
export enum ContextKeys {
export enum ContextKey {
TENANT_ID = "tenantId",
GLOBAL_DB = "globalDb",
APP_ID = "appId",

View File

@ -1,11 +1,11 @@
import env from "../environment"
import { SEPARATOR, DocumentTypes } from "../db/constants"
import { SEPARATOR, DocumentType } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
import { baseGlobalDBName } from "../tenancy/utils"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKeys } from "./constants"
import { ContextKey } from "./constants"
import {
updateUsing,
closeWithUsing,
@ -33,8 +33,8 @@ export const closeTenancy = async () => {
}
await closeDB(db)
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKeys.TENANT_ID, null)
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
cls.setOnContext(ContextKey.TENANT_ID, null)
cls.setOnContext(ContextKey.GLOBAL_DB, null)
}
// export const isDefaultTenant = () => {
@ -54,7 +54,7 @@ export const getTenantIDFromAppID = (appId: string) => {
return null
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentTypes.DEV
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return null
}
@ -83,14 +83,14 @@ export const doInTenant = (tenantId: string | null, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.TENANCY_IN_USE, () => {
await closeWithUsing(ContextKey.TENANCY_IN_USE, () => {
return closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.TENANT_ID) === tenantId
return updateUsing(ContextKeys.TENANCY_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.TENANT_ID) === tenantId
return updateUsing(ContextKey.TENANCY_IN_USE, existing, internal)
}
export const doInAppContext = (appId: string, task: any) => {
@ -108,7 +108,7 @@ export const doInAppContext = (appId: string, task: any) => {
setAppTenantId(appId)
}
// set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKey.APP_ID, appId)
// preserve the identity
if (identity) {
@ -118,14 +118,14 @@ export const doInAppContext = (appId: string, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.APP_IN_USE, async () => {
await closeWithUsing(ContextKey.APP_IN_USE, async () => {
await closeAppDBs()
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.APP_ID) === appId
return updateUsing(ContextKeys.APP_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.APP_ID) === appId
return updateUsing(ContextKey.APP_IN_USE, existing, internal)
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
@ -135,7 +135,7 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
async function internal(opts = { existing: false }) {
if (!opts.existing) {
cls.setOnContext(ContextKeys.IDENTITY, identity)
cls.setOnContext(ContextKey.IDENTITY, identity)
// set the tenant so that doInTenant will preserve identity
if (identity.tenantId) {
updateTenantId(identity.tenantId)
@ -146,27 +146,27 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.IDENTITY_IN_USE, async () => {
await closeWithUsing(ContextKey.IDENTITY_IN_USE, async () => {
setIdentity(null)
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.IDENTITY)
return updateUsing(ContextKeys.IDENTITY_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.IDENTITY)
return updateUsing(ContextKey.IDENTITY_IN_USE, existing, internal)
}
export const getIdentity = (): IdentityContext | undefined => {
try {
return cls.getFromContext(ContextKeys.IDENTITY)
return cls.getFromContext(ContextKey.IDENTITY)
} catch (e) {
// do nothing - identity is not in context
}
}
export const updateTenantId = (tenantId: string | null) => {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
cls.setOnContext(ContextKey.TENANT_ID, tenantId)
if (env.USE_COUCH) {
setGlobalDB(tenantId)
}
@ -176,7 +176,7 @@ export const updateAppId = async (appId: string) => {
try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKey.APP_ID, appId)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
@ -189,12 +189,12 @@ export const updateAppId = async (appId: string) => {
export const setGlobalDB = (tenantId: string | null) => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
cls.setOnContext(ContextKey.GLOBAL_DB, db)
return db
}
export const getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
const db = cls.getFromContext(ContextKey.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
@ -202,7 +202,7 @@ export const getGlobalDB = () => {
}
export const isTenantIdSet = () => {
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
return !!tenantId
}
@ -210,7 +210,7 @@ export const getTenantId = () => {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
if (!tenantId) {
throw new Error("Tenant id not found")
}
@ -218,7 +218,7 @@ export const getTenantId = () => {
}
export const getAppId = () => {
const foundId = cls.getFromContext(ContextKeys.APP_ID)
const foundId = cls.getFromContext(ContextKey.APP_ID)
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
@ -231,7 +231,7 @@ export const getAppId = () => {
* contained, dev or prod.
*/
export const getAppDB = (opts?: any) => {
return getContextDB(ContextKeys.CURRENT_DB, opts)
return getContextDB(ContextKey.CURRENT_DB, opts)
}
/**
@ -239,7 +239,7 @@ export const getAppDB = (opts?: any) => {
* contained a development app ID, this will open the prod one.
*/
export const getProdAppDB = (opts?: any) => {
return getContextDB(ContextKeys.PROD_DB, opts)
return getContextDB(ContextKey.PROD_DB, opts)
}
/**
@ -247,5 +247,5 @@ export const getProdAppDB = (opts?: any) => {
* contained a prod app ID, this will open the dev one.
*/
export const getDevAppDB = (opts?: any) => {
return getContextDB(ContextKeys.DEV_DB, opts)
return getContextDB(ContextKey.DEV_DB, opts)
}

View File

@ -6,7 +6,7 @@ import {
} from "./index"
import cls from "./FunctionContext"
import { IdentityContext } from "@budibase/types"
import { ContextKeys } from "./constants"
import { ContextKey } from "./constants"
import { dangerousGetDB, closeDB } from "../db"
import { isEqual } from "lodash"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
@ -47,17 +47,13 @@ export const setAppTenantId = (appId: string) => {
}
export const setIdentity = (identity: IdentityContext | null) => {
cls.setOnContext(ContextKeys.IDENTITY, identity)
cls.setOnContext(ContextKey.IDENTITY, identity)
}
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
export async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
const dbKeys = [ContextKey.CURRENT_DB, ContextKey.PROD_DB, ContextKey.DEV_DB]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
@ -68,16 +64,16 @@ export async function closeAppDBs() {
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKeys.APP_ID)) {
cls.setOnContext(ContextKeys.APP_ID, null)
if (cls.getFromContext(ContextKey.APP_ID)) {
cls.setOnContext(ContextKey.APP_ID, null)
}
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
cls.setOnContext(ContextKeys.DB_OPTS, null)
if (cls.getFromContext(ContextKey.DB_OPTS)) {
cls.setOnContext(ContextKey.DB_OPTS, null)
}
}
export function getContextDB(key: string, opts: any) {
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
const dbOptsKey = `${key}${ContextKey.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key)
if (db && isEqual(opts, storedOpts)) {
@ -88,13 +84,13 @@ export function getContextDB(key: string, opts: any) {
let toUseAppId
switch (key) {
case ContextKeys.CURRENT_DB:
case ContextKey.CURRENT_DB:
toUseAppId = appId
break
case ContextKeys.PROD_DB:
case ContextKey.PROD_DB:
toUseAppId = getProdAppID(appId)
break
case ContextKeys.DEV_DB:
case ContextKey.DEV_DB:
toUseAppId = getDevelopmentAppID(appId)
break
}

View File

@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
import { DocumentType } from "./constants"
class Replication {
source: any
@ -53,6 +54,14 @@ class Replication {
return this.replication
}
appReplicateOpts() {
return {
filter: (doc: any) => {
return doc._id !== DocumentType.APP_METADATA
},
}
}
/**
* Rollback the target DB back to the state of the source DB
*/
@ -60,6 +69,7 @@ class Replication {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
// take the opportunity to remove deleted tombstones
await this.replicate()
}

View File

@ -4,13 +4,13 @@ export const UNICODE_MAX = "\ufff0"
/**
* Can be used to create a few different forms of querying a view.
*/
export enum AutomationViewModes {
export enum AutomationViewMode {
ALL = "all",
AUTOMATION = "automation",
STATUS = "status",
}
export enum ViewNames {
export enum ViewName {
USER_BY_APP = "by_app",
USER_BY_EMAIL = "by_email2",
BY_API_KEY = "by_api_key",
@ -18,16 +18,18 @@ export enum ViewNames {
LINK = "by_link",
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
ACCOUNT_BY_EMAIL = "account_by_email",
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
}
export const DeprecatedViews = {
[ViewNames.USER_BY_EMAIL]: [
[ViewName.USER_BY_EMAIL]: [
// removed due to inaccuracy in view doc filter logic
"by_email",
],
}
export enum DocumentTypes {
export enum DocumentType {
USER = "us",
GROUP = "gr",
WORKSPACE = "workspace",
@ -41,6 +43,7 @@ export enum DocumentTypes {
MIGRATIONS = "migrations",
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
}
export const StaticDatabases = {
@ -62,6 +65,6 @@ export const StaticDatabases = {
},
}
export const APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR
export const APP_DEV = exports.DocumentTypes.APP_DEV + exports.SEPARATOR
export const APP_PREFIX = DocumentType.APP + SEPARATOR
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
export const APP_DEV_PREFIX = APP_DEV

View File

@ -1,8 +1,9 @@
import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants"
import env from "../environment"
import { SEPARATOR, DocumentTypes, UNICODE_MAX, ViewNames } from "./constants"
import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
import { getTenantId, getGlobalDB } from "../context"
import { getGlobalDBName } from "../tenancy/utils"
import fetch from "node-fetch"
import { doWithDB, allDbs } from "./index"
import { getCouchInfo } from "./pouch"
@ -58,7 +59,7 @@ export function getDocParams(
/**
* Retrieve the correct index for a view based on default design DB.
*/
export function getQueryIndex(viewName: ViewNames) {
export function getQueryIndex(viewName: ViewName) {
return `database/${viewName}`
}
@ -67,7 +68,7 @@ export function getQueryIndex(viewName: ViewNames) {
* @returns {string} The new workspace ID which the workspace doc can be stored under.
*/
export function generateWorkspaceID() {
return `${DocumentTypes.WORKSPACE}${SEPARATOR}${newid()}`
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
}
/**
@ -76,8 +77,8 @@ export function generateWorkspaceID() {
export function getWorkspaceParams(id = "", otherProps = {}) {
return {
...otherProps,
startkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}`,
endkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
}
}
@ -86,7 +87,7 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
* @returns {string} The new user ID which the user doc can be stored under.
*/
export function generateGlobalUserID(id?: any) {
return `${DocumentTypes.USER}${SEPARATOR}${id || newid()}`
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
}
/**
@ -102,8 +103,8 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
// need to include this incase pagination
startkey: startkey
? startkey
: `${DocumentTypes.USER}${SEPARATOR}${globalId}`,
endkey: `${DocumentTypes.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
: `${DocumentType.USER}${SEPARATOR}${globalId}`,
endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
}
}
@ -121,7 +122,7 @@ export function getUsersByAppParams(appId: any, otherProps: any = {}) {
* @param ownerId The owner/user of the template, this could be global or a workspace level.
*/
export function generateTemplateID(ownerId: any) {
return `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
}
export function generateAppUserID(prodAppId: string, userId: string) {
@ -143,7 +144,7 @@ export function getTemplateParams(
if (templateId) {
final = templateId
} else {
final = `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
}
return {
...otherProps,
@ -157,14 +158,14 @@ export function getTemplateParams(
* @returns {string} The new role ID which the role doc can be stored under.
*/
export function generateRoleID(id: any) {
return `${DocumentTypes.ROLE}${SEPARATOR}${id || newid()}`
return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}`
}
/**
* Gets parameters for retrieving a role, this is a utility function for the getDocParams function.
*/
export function getRoleParams(roleId = null, otherProps = {}) {
return getDocParams(DocumentTypes.ROLE, roleId, otherProps)
return getDocParams(DocumentType.ROLE, roleId, otherProps)
}
export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) {
@ -211,9 +212,9 @@ export async function getAllDbs(opts = { efficient: false }) {
await addDbs(couchUrl)
} else {
// get prod apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId))
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId))
// get dev apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP_DEV, tenantId))
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId))
// add global db name
dbs.push(getGlobalDBName(tenantId))
}
@ -233,14 +234,18 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
}
let dbs = await getAllDbs({ efficient })
const appDbNames = dbs.filter((dbName: any) => {
if (env.isTest() && !dbName) {
return false
}
const split = dbName.split(SEPARATOR)
// it is an app, check the tenantId
if (split[0] === DocumentTypes.APP) {
if (split[0] === DocumentType.APP) {
// tenantId is always right before the UUID
const possibleTenantId = split[split.length - 2]
const noTenantId =
split.length === 2 || possibleTenantId === DocumentTypes.DEV
split.length === 2 || possibleTenantId === DocumentType.DEV
return (
(tenantId === DEFAULT_TENANT_ID && noTenantId) ||
@ -250,7 +255,16 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
return false
})
if (idsOnly) {
return appDbNames
const devAppIds = appDbNames.filter(appId => isDevAppID(appId))
const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))
switch (dev) {
case true:
return devAppIds
case false:
return prodAppIds
default:
return appDbNames
}
}
const appPromises = appDbNames.map((app: any) =>
// skip setup otherwise databases could be re-created
@ -326,7 +340,7 @@ export async function dbExists(dbName: any) {
export const generateConfigID = ({ type, workspace, user }: any) => {
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
return `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`
return `${DocumentType.CONFIG}${SEPARATOR}${scope}`
}
/**
@ -340,8 +354,8 @@ export const getConfigParams = (
return {
...otherProps,
startkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`,
endkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
startkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}`,
endkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
}
}
@ -350,7 +364,7 @@ export const getConfigParams = (
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
*/
export const generateDevInfoID = (userId: any) => {
return `${DocumentTypes.DEV_INFO}${SEPARATOR}${userId}`
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
}
/**

View File

@ -1,158 +0,0 @@
const {
DocumentTypes,
ViewNames,
DeprecatedViews,
SEPARATOR,
} = require("./utils")
const { getGlobalDB } = require("../tenancy")
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
async function removeDeprecated(db, viewName) {
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get(DESIGN_DB)
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
exports.createNewUserEmailView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_EMAIL]: view,
}
await db.put(designDoc)
}
exports.createUserAppView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
}
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_APP]: view,
}
await db.put(designDoc)
}
exports.createApiKeyView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.BY_API_KEY]: view,
}
await db.put(designDoc)
}
exports.createUserBuildersView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_BUILDERS]: view,
}
await db.put(designDoc)
}
exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = {
[ViewNames.USER_BY_EMAIL]: exports.createNewUserEmailView,
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
[ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView,
[ViewNames.USER_BY_APP]: exports.createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {
db = getGlobalDB()
}
try {
let response = (await db.query(`database/${viewName}`, params)).rows
response = response.map(resp =>
params.include_docs ? resp.doc : resp.value
)
return response.length <= 1 ? response[0] : response
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryGlobalView(viewName, params)
} else {
throw err
}
}
}

View File

@ -0,0 +1,261 @@
import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
import { getGlobalDB } from "../context"
import PouchDB from "pouchdb"
import { StaticDatabases } from "./constants"
import { doWithDB } from "./"
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
interface DesignDocument {
views: any
}
async function removeDeprecated(db: PouchDB.Database, viewName: ViewName) {
// @ts-ignore
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get<DesignDocument>(DESIGN_DB)
// @ts-ignore
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
export const createNewUserEmailView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_EMAIL]: view,
}
await db.put(designDoc)
}
export const createAccountEmailView = async () => {
await doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
let designDoc
try {
designDoc = await db.get<DesignDocument>(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.ACCOUNT_BY_EMAIL]: view,
}
await db.put(designDoc)
}
)
}
export const createUserAppView = async () => {
const db = getGlobalDB() as PouchDB.Database
let designDoc
try {
designDoc = await db.get<DesignDocument>("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
}
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_APP]: view,
}
await db.put(designDoc)
}
export const createApiKeyView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.BY_API_KEY]: view,
}
await db.put(designDoc)
}
export const createUserBuildersView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.USER_BY_BUILDERS]: view,
}
await db.put(designDoc)
}
export const createPlatformUserView = async () => {
await doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
let designDoc
try {
designDoc = await db.get<DesignDocument>(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc.tenantId) {
emit(doc._id.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewName.PLATFORM_USERS_LOWERCASE]: view,
}
await db.put(designDoc)
}
)
}
export interface QueryViewOptions {
arrayResponse?: boolean
}
export const queryView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
db: PouchDB.Database,
CreateFuncByName: any,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
try {
let response = await db.query<T, T>(`database/${viewName}`, params)
const rows = response.rows
const docs = rows.map(row => (params.include_docs ? row.doc : row.value))
// if arrayResponse has been requested, always return array regardless of length
if (opts?.arrayResponse) {
return docs
} else {
// return the single document if there is only one
return docs.length <= 1 ? docs[0] : docs
}
} catch (err: any) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return queryView(viewName, params, db, CreateFuncByName, opts)
} else {
throw err
}
}
}
export const queryPlatformView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
const CreateFuncByName = {
[ViewName.ACCOUNT_BY_EMAIL]: createAccountEmailView,
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
}
return doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
return queryView(viewName, params, db, CreateFuncByName, opts)
}
)
}
export const queryGlobalView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
db?: PouchDB.Database,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
const CreateFuncByName = {
[ViewName.USER_BY_EMAIL]: createNewUserEmailView,
[ViewName.BY_API_KEY]: createApiKeyView,
[ViewName.USER_BY_BUILDERS]: createUserBuildersView,
[ViewName.USER_BY_APP]: createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {
db = getGlobalDB() as PouchDB.Database
}
return queryView(viewName, params, db, CreateFuncByName, opts)
}

View File

@ -19,6 +19,7 @@ if (!LOADED && isDev() && !isTest()) {
const env = {
isTest,
isDev,
JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
@ -36,7 +37,7 @@ const env = {
MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL:
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "",
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
@ -50,12 +51,14 @@ const env = {
GLOBAL_BUCKET_NAME: process.env.GLOBAL_BUCKET_NAME || "global",
GLOBAL_CLOUD_BUCKET_NAME:
process.env.GLOBAL_CLOUD_BUCKET_NAME || "prod-budi-tenant-uploads",
PLUGIN_BUCKET_NAME: process.env.PLUGIN_BUCKET_NAME || "plugins",
USE_COUCH: process.env.USE_COUCH || true,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
SERVICE: process.env.SERVICE || "budibase",
MEMORY_LEAK_CHECK: process.env.MEMORY_LEAK_CHECK || false,
LOG_LEVEL: process.env.LOG_LEVEL,
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
DEPLOYMENT_ENVIRONMENT:
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
_set(key: any, value: any) {

View File

@ -1,11 +0,0 @@
class BudibaseError extends Error {
constructor(message, code, type) {
super(message)
this.code = code
this.type = type
}
}
module.exports = {
BudibaseError,
}

View File

@ -0,0 +1,10 @@
export class BudibaseError extends Error {
code: string
type: string
constructor(message: string, code: string, type: string) {
super(message)
this.code = code
this.type = type
}
}

View File

@ -1,11 +0,0 @@
const { BudibaseError } = require("./base")
class GenericError extends BudibaseError {
constructor(message, code, type) {
super(message, code, type ? type : "generic")
}
}
module.exports = {
GenericError,
}

View File

@ -0,0 +1,7 @@
import { BudibaseError } from "./base"
export class GenericError extends BudibaseError {
constructor(message: string, code: string, type: string) {
super(message, code, type ? type : "generic")
}
}

View File

@ -1,12 +0,0 @@
const { GenericError } = require("./generic")
class HTTPError extends GenericError {
constructor(message, httpStatus, code = "http", type = "generic") {
super(message, code, type)
this.status = httpStatus
}
}
module.exports = {
HTTPError,
}

View File

@ -0,0 +1,15 @@
import { GenericError } from "./generic"
export class HTTPError extends GenericError {
status: number
constructor(
message: string,
httpStatus: number,
code = "http",
type = "generic"
) {
super(message, code, type)
this.status = httpStatus
}
}

View File

@ -1,5 +1,6 @@
const http = require("./http")
const licensing = require("./licensing")
import { HTTPError } from "./http"
import { UsageLimitError, FeatureDisabledError } from "./licensing"
import * as licensing from "./licensing"
const codes = {
...licensing.codes,
@ -11,7 +12,7 @@ const context = {
...licensing.context,
}
const getPublicError = err => {
const getPublicError = (err: any) => {
let error
if (err.code || err.type) {
// add generic error information
@ -32,13 +33,15 @@ const getPublicError = err => {
return error
}
module.exports = {
const pkg = {
codes,
types,
errors: {
UsageLimitError: licensing.UsageLimitError,
FeatureDisabledError: licensing.FeatureDisabledError,
HTTPError: http.HTTPError,
UsageLimitError,
FeatureDisabledError,
HTTPError,
},
getPublicError,
}
export = pkg

View File

@ -1,43 +0,0 @@
const { HTTPError } = require("./http")
const type = "license_error"
const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
FEATURE_DISABLED: "feature_disabled",
}
const context = {
[codes.USAGE_LIMIT_EXCEEDED]: err => {
return {
limitName: err.limitName,
}
},
[codes.FEATURE_DISABLED]: err => {
return {
featureName: err.featureName,
}
},
}
class UsageLimitError extends HTTPError {
constructor(message, limitName) {
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
this.limitName = limitName
}
}
class FeatureDisabledError extends HTTPError {
constructor(message, featureName) {
super(message, 400, codes.FEATURE_DISABLED, type)
this.featureName = featureName
}
}
module.exports = {
type,
codes,
context,
UsageLimitError,
FeatureDisabledError,
}

View File

@ -0,0 +1,39 @@
import { HTTPError } from "./http"
export const type = "license_error"
export const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
FEATURE_DISABLED: "feature_disabled",
}
export const context = {
[codes.USAGE_LIMIT_EXCEEDED]: (err: any) => {
return {
limitName: err.limitName,
}
},
[codes.FEATURE_DISABLED]: (err: any) => {
return {
featureName: err.featureName,
}
},
}
export class UsageLimitError extends HTTPError {
limitName: string
constructor(message: string, limitName: string) {
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
this.limitName = limitName
}
}
export class FeatureDisabledError extends HTTPError {
featureName: string
constructor(message: string, featureName: string) {
super(message, 400, codes.FEATURE_DISABLED, type)
this.featureName = featureName
}
}

View File

@ -8,4 +8,5 @@ import { processors } from "./processors"
export const shutdown = () => {
processors.shutdown()
console.log("Events shutdown")
}

View File

@ -2,7 +2,7 @@ import { Event, Identity, Group, IdentityType } from "@budibase/types"
import { EventProcessor } from "./types"
import env from "../../environment"
import * as analytics from "../analytics"
import PosthogProcessor from "./PosthogProcessor"
import PosthogProcessor from "./posthog"
/**
* Events that are always captured.
@ -32,7 +32,7 @@ export default class AnalyticsProcessor implements EventProcessor {
return
}
if (this.posthog) {
this.posthog.processEvent(event, identity, properties, timestamp)
await this.posthog.processEvent(event, identity, properties, timestamp)
}
}
@ -45,14 +45,14 @@ export default class AnalyticsProcessor implements EventProcessor {
return
}
if (this.posthog) {
this.posthog.identify(identity, timestamp)
await this.posthog.identify(identity, timestamp)
}
}
async identifyGroup(group: Group, timestamp?: string | number) {
// Group indentifications (tenant and installation) always on
if (this.posthog) {
this.posthog.identifyGroup(group, timestamp)
await this.posthog.identifyGroup(group, timestamp)
}
}

View File

@ -1,9 +1,26 @@
import PostHog from "posthog-node"
import { Event, Identity, Group, BaseEvent } from "@budibase/types"
import { EventProcessor } from "./types"
import env from "../../environment"
import * as context from "../../context"
const pkg = require("../../../package.json")
import { EventProcessor } from "../types"
import env from "../../../environment"
import * as context from "../../../context"
import * as rateLimiting from "./rateLimiting"
const pkg = require("../../../../package.json")
const EXCLUDED_EVENTS: Event[] = [
Event.USER_UPDATED,
Event.EMAIL_SMTP_UPDATED,
Event.AUTH_SSO_UPDATED,
Event.APP_UPDATED,
Event.ROLE_UPDATED,
Event.DATASOURCE_UPDATED,
Event.QUERY_UPDATED,
Event.TABLE_UPDATED,
Event.VIEW_UPDATED,
Event.VIEW_FILTER_UPDATED,
Event.VIEW_CALCULATION_UPDATED,
Event.AUTOMATION_TRIGGER_UPDATED,
Event.USER_GROUP_UPDATED,
]
export default class PosthogProcessor implements EventProcessor {
posthog: PostHog
@ -21,6 +38,15 @@ export default class PosthogProcessor implements EventProcessor {
properties: BaseEvent,
timestamp?: string | number
): Promise<void> {
// don't send excluded events
if (EXCLUDED_EVENTS.includes(event)) {
return
}
if (await rateLimiting.limited(event)) {
return
}
properties.version = pkg.version
properties.service = env.SERVICE
properties.environment = identity.environment

View File

@ -0,0 +1,2 @@
import PosthogProcessor from "./PosthogProcessor"
export default PosthogProcessor

View File

@ -0,0 +1,106 @@
import { Event } from "@budibase/types"
import { CacheKeys, TTL } from "../../../cache/generic"
import * as cache from "../../../cache/generic"
import * as context from "../../../context"
type RateLimitedEvent =
| Event.SERVED_BUILDER
| Event.SERVED_APP_PREVIEW
| Event.SERVED_APP
const isRateLimited = (event: Event): event is RateLimitedEvent => {
return (
event === Event.SERVED_BUILDER ||
event === Event.SERVED_APP_PREVIEW ||
event === Event.SERVED_APP
)
}
const isPerApp = (event: RateLimitedEvent) => {
return event === Event.SERVED_APP_PREVIEW || event === Event.SERVED_APP
}
interface EventProperties {
timestamp: number
}
enum RateLimit {
CALENDAR_DAY = "calendarDay",
}
const RATE_LIMITS = {
[Event.SERVED_APP]: RateLimit.CALENDAR_DAY,
[Event.SERVED_APP_PREVIEW]: RateLimit.CALENDAR_DAY,
[Event.SERVED_BUILDER]: RateLimit.CALENDAR_DAY,
}
/**
* Check if this event should be sent right now
* Return false to signal the event SHOULD be sent
* Return true to signal the event should NOT be sent
*/
export const limited = async (event: Event): Promise<boolean> => {
// not a rate limited event -- send
if (!isRateLimited(event)) {
return false
}
const cachedEvent = await readEvent(event)
if (cachedEvent) {
const timestamp = new Date(cachedEvent.timestamp)
const limit = RATE_LIMITS[event]
switch (limit) {
case RateLimit.CALENDAR_DAY: {
// get midnight at the start of the next day for the timestamp
timestamp.setDate(timestamp.getDate() + 1)
timestamp.setHours(0, 0, 0, 0)
// if we have passed the threshold into the next day
if (Date.now() > timestamp.getTime()) {
// update the timestamp in the event -- send
await recordEvent(event, { timestamp: Date.now() })
return false
} else {
// still within the limited period -- don't send
return true
}
}
}
} else {
// no event present i.e. expired -- send
await recordEvent(event, { timestamp: Date.now() })
return false
}
}
const eventKey = (event: RateLimitedEvent) => {
let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}`
if (isPerApp(event)) {
key = key + ":" + context.getAppId()
}
return key
}
const readEvent = async (
event: RateLimitedEvent
): Promise<EventProperties | undefined> => {
const key = eventKey(event)
const result = await cache.get(key)
return result as EventProperties
}
const recordEvent = async (
event: RateLimitedEvent,
properties: EventProperties
) => {
const key = eventKey(event)
const limit = RATE_LIMITS[event]
let ttl
switch (limit) {
case RateLimit.CALENDAR_DAY: {
ttl = TTL.ONE_DAY
}
}
await cache.store(key, properties, ttl)
}

View File

@ -0,0 +1,145 @@
import "../../../../../tests/utilities/TestConfiguration"
import PosthogProcessor from "../PosthogProcessor"
import { Event, IdentityType, Hosting } from "@budibase/types"
const tk = require("timekeeper")
import * as cache from "../../../../cache/generic"
import { CacheKeys } from "../../../../cache/generic"
import * as context from "../../../../context"
const newIdentity = () => {
return {
id: "test",
type: IdentityType.USER,
hosting: Hosting.SELF,
environment: "test",
}
}
describe("PosthogProcessor", () => {
beforeEach(async () => {
jest.clearAllMocks()
await cache.bustCache(
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
)
})
describe("processEvent", () => {
it("processes event", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
await processor.processEvent(Event.APP_CREATED, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(1)
})
it("honours exclusions", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
await processor.processEvent(Event.AUTH_SSO_UPDATED, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(0)
})
describe("rate limiting", () => {
it("sends daily event once in same day", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
// go forward one hour
tk.freeze(new Date(2022, 0, 1, 15, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(1)
})
it("sends daily event once per unique day", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
// go forward into next day
tk.freeze(new Date(2022, 0, 2, 9, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
// go forward into next day
tk.freeze(new Date(2022, 0, 3, 5, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
// go forward one hour
tk.freeze(new Date(2022, 0, 3, 6, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(3)
})
it("sends event again after cache expires", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
await cache.bustCache(
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
)
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(2)
})
it("sends per app events once per day per app", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
const runAppEvents = async (appId: string) => {
await context.doInAppContext(appId, async () => {
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_APP, identity, properties)
await processor.processEvent(
Event.SERVED_APP_PREVIEW,
identity,
properties
)
// go forward one hour - should be ignored
tk.freeze(new Date(2022, 0, 1, 15, 0))
await processor.processEvent(Event.SERVED_APP, identity, properties)
await processor.processEvent(
Event.SERVED_APP_PREVIEW,
identity,
properties
)
// go forward into next day
tk.freeze(new Date(2022, 0, 2, 9, 0))
await processor.processEvent(Event.SERVED_APP, identity, properties)
await processor.processEvent(
Event.SERVED_APP_PREVIEW,
identity,
properties
)
})
}
await runAppEvents("app_1")
expect(processor.posthog.capture).toHaveBeenCalledTimes(4)
await runAppEvents("app_2")
expect(processor.posthog.capture).toHaveBeenCalledTimes(8)
})
})
})
})

View File

@ -5,8 +5,15 @@ import {
DatasourceCreatedEvent,
DatasourceUpdatedEvent,
DatasourceDeletedEvent,
SourceName,
} from "@budibase/types"
function isCustom(datasource: Datasource) {
const sources = Object.values(SourceName)
// if not in the base source list, then it must be custom
return !sources.includes(datasource.source)
}
export async function created(
datasource: Datasource,
timestamp?: string | number
@ -14,6 +21,7 @@ export async function created(
const properties: DatasourceCreatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_CREATED, properties, timestamp)
}
@ -22,6 +30,7 @@ export async function updated(datasource: Datasource) {
const properties: DatasourceUpdatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_UPDATED, properties)
}
@ -30,6 +39,7 @@ export async function deleted(datasource: Datasource) {
const properties: DatasourceDeletedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_DELETED, properties)
}

View File

@ -18,3 +18,4 @@ export * as view from "./view"
export * as installation from "./installation"
export * as backfill from "./backfill"
export * as group from "./group"
export * as plugin from "./plugin"

View File

@ -20,12 +20,6 @@ export async function downgraded(license: License) {
await publishEvent(Event.LICENSE_DOWNGRADED, properties)
}
// TODO
export async function updated(license: License) {
const properties: LicenseUpdatedEvent = {}
await publishEvent(Event.LICENSE_UPDATED, properties)
}
// TODO
export async function activated(license: License) {
const properties: LicenseActivatedEvent = {}

View File

@ -0,0 +1,41 @@
import { publishEvent } from "../events"
import {
Event,
Plugin,
PluginDeletedEvent,
PluginImportedEvent,
PluginInitEvent,
} from "@budibase/types"
export async function init(plugin: Plugin) {
const properties: PluginInitEvent = {
type: plugin.schema.type,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_INIT, properties)
}
export async function imported(plugin: Plugin) {
const properties: PluginImportedEvent = {
pluginId: plugin._id as string,
type: plugin.schema.type,
source: plugin.source,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_IMPORTED, properties)
}
export async function deleted(plugin: Plugin) {
const properties: PluginDeletedEvent = {
pluginId: plugin._id as string,
type: plugin.schema.type,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_DELETED, properties)
}

View File

@ -7,22 +7,26 @@ import {
AppServedEvent,
} from "@budibase/types"
export async function servedBuilder() {
const properties: BuilderServedEvent = {}
export async function servedBuilder(timezone: string) {
const properties: BuilderServedEvent = {
timezone,
}
await publishEvent(Event.SERVED_BUILDER, properties)
}
export async function servedApp(app: App) {
export async function servedApp(app: App, timezone: string) {
const properties: AppServedEvent = {
appVersion: app.version,
timezone,
}
await publishEvent(Event.SERVED_APP, properties)
}
export async function servedAppPreview(app: App) {
export async function servedAppPreview(app: App, timezone: string) {
const properties: AppPreviewServedEvent = {
appId: app.appId,
appVersion: app.version,
timezone,
}
await publishEvent(Event.SERVED_APP_PREVIEW, properties)
}

View File

@ -31,20 +31,26 @@ const TENANT_FEATURE_FLAGS = getFeatureFlags()
exports.isEnabled = featureFlag => {
const tenantId = tenancy.getTenantId()
return (
TENANT_FEATURE_FLAGS &&
TENANT_FEATURE_FLAGS[tenantId] &&
TENANT_FEATURE_FLAGS[tenantId].includes(featureFlag)
)
const flags = exports.getTenantFeatureFlags(tenantId)
return flags.includes(featureFlag)
}
exports.getTenantFeatureFlags = tenantId => {
if (TENANT_FEATURE_FLAGS && TENANT_FEATURE_FLAGS[tenantId]) {
return TENANT_FEATURE_FLAGS[tenantId]
const flags = []
if (TENANT_FEATURE_FLAGS) {
const globalFlags = TENANT_FEATURE_FLAGS["*"]
const tenantFlags = TENANT_FEATURE_FLAGS[tenantId]
if (globalFlags) {
flags.push(...globalFlags)
}
if (tenantFlags) {
flags.push(...tenantFlags)
}
}
return []
return flags
}
exports.FeatureFlag = {

View File

@ -1,5 +1,5 @@
const bcrypt = require("bcrypt")
const env = require("./environment")
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
const { v4 } = require("uuid")
const SALT_ROUNDS = env.SALT_ROUNDS || 10

View File

@ -9,13 +9,15 @@ import * as installation from "./installation"
import env from "./environment"
import tenancy from "./tenancy"
import featureFlags from "./featureFlags"
import sessions from "./security/sessions"
import * as sessions from "./security/sessions"
import deprovisioning from "./context/deprovision"
import auth from "./auth"
import constants from "./constants"
import * as dbConstants from "./db/constants"
import logging from "./logging"
import * as logging from "./logging"
import pino from "./pino"
import * as middleware from "./middleware"
import plugins from "./plugin"
// mimic the outer package exports
import * as db from "./pkg/db"
@ -54,8 +56,10 @@ const core = {
errors,
logging,
roles,
plugins,
...pino,
...errorClasses,
middleware,
}
export = core

View File

@ -1,28 +1,39 @@
const { Cookies, Headers } = require("../constants")
const { getCookie, clearCookie, openJwt } = require("../utils")
const { getUser } = require("../cache/user")
const { getSession, updateSessionTTL } = require("../security/sessions")
const { buildMatcherRegex, matches } = require("./matchers")
const env = require("../environment")
const { SEPARATOR } = require("../db/constants")
const { ViewNames } = require("../db/utils")
const { queryGlobalView } = require("../db/views")
const { getGlobalDB, doInTenant } = require("../tenancy")
const { decrypt } = require("../security/encryption")
import { Cookies, Headers } from "../constants"
import { getCookie, clearCookie, openJwt } from "../utils"
import { getUser } from "../cache/user"
import { getSession, updateSessionTTL } from "../security/sessions"
import { buildMatcherRegex, matches } from "./matchers"
import { SEPARATOR } from "../db/constants"
import { ViewName } from "../db/utils"
import { queryGlobalView } from "../db/views"
import { getGlobalDB, doInTenant } from "../tenancy"
import { decrypt } from "../security/encryption"
const identity = require("../context/identity")
const env = require("../environment")
function finalise(
ctx,
{ authenticated, user, internal, version, publicEndpoint } = {}
) {
ctx.publicEndpoint = publicEndpoint || false
ctx.isAuthenticated = authenticated || false
ctx.user = user
ctx.internal = internal || false
ctx.version = version
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000
interface FinaliseOpts {
authenticated?: boolean
internal?: boolean
publicEndpoint?: boolean
version?: string
user?: any
}
async function checkApiKey(apiKey, populateUser) {
function timeMinusOneMinute() {
return new Date(Date.now() - ONE_MINUTE).toISOString()
}
function finalise(ctx: any, opts: FinaliseOpts = {}) {
ctx.publicEndpoint = opts.publicEndpoint || false
ctx.isAuthenticated = opts.authenticated || false
ctx.user = opts.user
ctx.internal = opts.internal || false
ctx.version = opts.version
}
async function checkApiKey(apiKey: string, populateUser?: Function) {
if (apiKey === env.INTERNAL_API_KEY) {
return { valid: true }
}
@ -32,7 +43,7 @@ async function checkApiKey(apiKey, populateUser) {
const db = getGlobalDB()
// api key is encrypted in the database
const userId = await queryGlobalView(
ViewNames.BY_API_KEY,
ViewName.BY_API_KEY,
{
key: apiKey,
},
@ -54,12 +65,14 @@ async function checkApiKey(apiKey, populateUser) {
* The tenancy modules should not be used here and it should be assumed that the tenancy context
* has not yet been populated.
*/
module.exports = (
export = (
noAuthPatterns = [],
opts = { publicAllowed: false, populateUser: null }
opts: { publicAllowed: boolean; populateUser?: Function } = {
publicAllowed: false,
}
) => {
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
return async (ctx, next) => {
return async (ctx: any, next: any) => {
let publicEndpoint = false
const version = ctx.request.headers[Headers.API_VER]
// the path is not authenticated
@ -71,45 +84,41 @@ module.exports = (
// check the actual user is authenticated first, try header or cookie
const headerToken = ctx.request.headers[Headers.TOKEN]
const authCookie = getCookie(ctx, Cookies.Auth) || openJwt(headerToken)
const apiKey = ctx.request.headers[Headers.API_KEY]
const tenantId = ctx.request.headers[Headers.TENANT_ID]
let authenticated = false,
user = null,
internal = false
if (authCookie) {
let error = null
if (authCookie && !apiKey) {
const sessionId = authCookie.sessionId
const userId = authCookie.userId
const session = await getSession(userId, sessionId)
if (!session) {
error = `Session not found - ${userId} - ${sessionId}`
} else {
try {
if (opts && opts.populateUser) {
user = await getUser(
userId,
session.tenantId,
opts.populateUser(ctx)
)
} else {
user = await getUser(userId, session.tenantId)
}
user.csrfToken = session.csrfToken
authenticated = true
} catch (err) {
error = err
let session
try {
// getting session handles error checking (if session exists etc)
session = await getSession(userId, sessionId)
if (opts && opts.populateUser) {
user = await getUser(
userId,
session.tenantId,
opts.populateUser(ctx)
)
} else {
user = await getUser(userId, session.tenantId)
}
}
if (error) {
console.error("Auth Error", error)
user.csrfToken = session.csrfToken
if (session?.lastAccessedAt < timeMinusOneMinute()) {
// make sure we denote that the session is still in use
await updateSessionTTL(session)
}
authenticated = true
} catch (err: any) {
authenticated = false
console.error("Auth Error", err?.message || err)
// remove the cookie as the user does not exist anymore
clearCookie(ctx, Cookies.Auth)
} else {
// make sure we denote that the session is still in use
await updateSessionTTL(session)
}
}
const apiKey = ctx.request.headers[Headers.API_KEY]
const tenantId = ctx.request.headers[Headers.TENANT_ID]
// this is an internal request, no user made it
if (!authenticated && apiKey) {
const populateUser = opts.populateUser ? opts.populateUser(ctx) : null
@ -142,7 +151,7 @@ module.exports = (
} else {
return next()
}
} catch (err) {
} catch (err: any) {
// invalid token, clear the cookie
if (err && err.name === "JsonWebTokenError") {
clearCookie(ctx, Cookies.Auth)

View File

@ -13,7 +13,8 @@ const adminOnly = require("./adminOnly")
const builderOrAdmin = require("./builderOrAdmin")
const builderOnly = require("./builderOnly")
const joiValidator = require("./joi-validator")
module.exports = {
const pkg = {
google,
oidc,
jwt,
@ -33,3 +34,5 @@ module.exports = {
builderOrAdmin,
joiValidator,
}
export = pkg

View File

@ -1,3 +1,5 @@
const Joi = require("joi")
function validate(schema, property) {
// Return a Koa middleware function
return (ctx, next) => {
@ -10,6 +12,15 @@ function validate(schema, property) {
} else if (ctx.request[property] != null) {
params = ctx.request[property]
}
// not all schemas have the append property e.g. array schemas
if (schema.append) {
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
}
const { error } = schema.validate(params)
if (error) {
ctx.throw(400, `Invalid ${property} - ${error.message}`)

View File

@ -17,14 +17,6 @@ export const DEFINITIONS: MigrationDefinition[] = [
type: MigrationType.APP,
name: MigrationName.APP_URLS,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.DEVELOPER_QUOTA,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.PUBLISHED_APP_QUOTA,
},
{
type: MigrationType.APP,
name: MigrationName.EVENT_APP_BACKFILL,

View File

@ -1,6 +1,6 @@
import { DEFAULT_TENANT_ID } from "../constants"
import { doWithDB } from "../db"
import { DocumentTypes, StaticDatabases } from "../db/constants"
import { DocumentType, StaticDatabases } from "../db/constants"
import { getAllApps } from "../db/utils"
import environment from "../environment"
import {
@ -21,10 +21,10 @@ import {
export const getMigrationsDoc = async (db: any) => {
// get the migrations doc
try {
return await db.get(DocumentTypes.MIGRATIONS)
return await db.get(DocumentType.MIGRATIONS)
} catch (err: any) {
if (err.status && err.status === 404) {
return { _id: DocumentTypes.MIGRATIONS }
return { _id: DocumentType.MIGRATIONS }
} else {
console.error(err)
throw err

View File

@ -57,7 +57,11 @@ function publicPolicy(bucketName: any) {
}
}
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
const PUBLIC_BUCKETS = [
ObjectStoreBuckets.APPS,
ObjectStoreBuckets.GLOBAL,
ObjectStoreBuckets.PLUGINS,
]
/**
* Gets a connection to the object store using the S3 SDK.
@ -66,15 +70,13 @@ const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
* @constructor
*/
export const ObjectStore = (bucket: any) => {
AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
})
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
@ -174,6 +176,14 @@ export const streamUpload = async (
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
// Set content type for certain known extensions
if (filename?.endsWith(".js")) {
extra = {
...extra,
ContentType: "application/javascript",
}
}
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
@ -297,9 +307,13 @@ export const uploadDirectory = async (
return files
}
exports.downloadTarballDirect = async (url: string, path: string) => {
exports.downloadTarballDirect = async (
url: string,
path: string,
headers = {}
) => {
path = sanitizeKey(path)
const response = await fetch(url)
const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}

View File

@ -8,6 +8,7 @@ exports.ObjectStoreBuckets = {
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
GLOBAL: env.GLOBAL_BUCKET_NAME,
GLOBAL_CLOUD: env.GLOBAL_CLOUD_BUCKET_NAME,
PLUGINS: env.PLUGIN_BUCKET_NAME,
}
exports.budibaseTempDir = function () {

View File

@ -0,0 +1,7 @@
import * as utils from "./utils"
const pkg = {
...utils,
}
export = pkg

View File

@ -0,0 +1,94 @@
const {
DatasourceFieldType,
QueryType,
PluginType,
} = require("@budibase/types")
const joi = require("joi")
const DATASOURCE_TYPES = [
"Relational",
"Non-relational",
"Spreadsheet",
"Object store",
"Graph",
"API",
]
function runJoi(validator, schema) {
const { error } = validator.validate(schema)
if (error) {
throw error
}
}
function validateComponent(schema) {
const validator = joi.object({
type: joi.string().allow("component").required(),
metadata: joi.object().unknown(true).required(),
hash: joi.string().optional(),
version: joi.string().optional(),
schema: joi
.object({
name: joi.string().required(),
settings: joi.array().items(joi.object().unknown(true)).required(),
})
.unknown(true),
})
runJoi(validator, schema)
}
function validateDatasource(schema) {
const fieldValidator = joi.object({
type: joi
.string()
.allow(...Object.values(DatasourceFieldType))
.required(),
required: joi.boolean().required(),
default: joi.any(),
display: joi.string(),
})
const queryValidator = joi
.object({
type: joi.string().allow(...Object.values(QueryType)),
fields: joi.object().pattern(joi.string(), fieldValidator),
})
.required()
const validator = joi.object({
type: joi.string().allow("datasource").required(),
metadata: joi.object().unknown(true).required(),
hash: joi.string().optional(),
version: joi.string().optional(),
schema: joi.object({
docs: joi.string(),
friendlyName: joi.string().required(),
type: joi.string().allow(...DATASOURCE_TYPES),
description: joi.string().required(),
datasource: joi.object().pattern(joi.string(), fieldValidator).required(),
query: joi
.object({
create: queryValidator,
read: queryValidator,
update: queryValidator,
delete: queryValidator,
})
.unknown(true)
.required(),
}),
})
runJoi(validator, schema)
}
exports.validate = schema => {
switch (schema?.type) {
case PluginType.COMPONENT:
validateComponent(schema)
break
case PluginType.DATASOURCE:
validateDatasource(schema)
break
default:
throw new Error(`Unknown plugin type - check schema.json: ${schema.type}`)
}
}

View File

@ -3,7 +3,7 @@ const { BUILTIN_PERMISSION_IDS, PermissionLevels } = require("./permissions")
const {
generateRoleID,
getRoleParams,
DocumentTypes,
DocumentType,
SEPARATOR,
} = require("../db/utils")
const { getAppDB } = require("../context")
@ -338,7 +338,7 @@ class AccessController {
* Adds the "role_" for builtin role IDs which are to be written to the DB (for permissions).
*/
exports.getDBRoleID = roleId => {
if (roleId.startsWith(DocumentTypes.ROLE)) {
if (roleId.startsWith(DocumentType.ROLE)) {
return roleId
}
return generateRoleID(roleId)
@ -349,8 +349,8 @@ exports.getDBRoleID = roleId => {
*/
exports.getExternalRoleID = roleId => {
// for built in roles we want to remove the DB role ID element (role_)
if (roleId.startsWith(DocumentTypes.ROLE) && isBuiltin(roleId)) {
return roleId.split(`${DocumentTypes.ROLE}${SEPARATOR}`)[1]
if (roleId.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) {
return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]
}
return roleId
}

View File

@ -1,106 +0,0 @@
const redis = require("../redis/init")
const { v4: uuidv4 } = require("uuid")
const { logWarn } = require("../logging")
const env = require("../environment")
// a week in seconds
const EXPIRY_SECONDS = 86400 * 7
async function getSessionsForUser(userId) {
const client = await redis.getSessionClient()
const sessions = await client.scan(userId)
return sessions.map(session => session.value)
}
function makeSessionID(userId, sessionId) {
return `${userId}/${sessionId}`
}
async function invalidateSessions(userId, sessionIds = null) {
try {
let sessions = []
// If no sessionIds, get all the sessions for the user
if (!sessionIds) {
sessions = await getSessionsForUser(userId)
sessions.forEach(
session =>
(session.key = makeSessionID(session.userId, session.sessionId))
)
} else {
// use the passed array of sessionIds
sessions = Array.isArray(sessionIds) ? sessionIds : [sessionIds]
sessions = sessions.map(sessionId => ({
key: makeSessionID(userId, sessionId),
}))
}
if (sessions && sessions.length > 0) {
const client = await redis.getSessionClient()
const promises = []
for (let session of sessions) {
promises.push(client.delete(session.key))
}
if (!env.isTest()) {
logWarn(
`Invalidating sessions for ${userId} - ${sessions
.map(session => session.key)
.join(", ")}`
)
}
await Promise.all(promises)
}
} catch (err) {
console.error(`Error invalidating sessions: ${err}`)
}
}
exports.createASession = async (userId, session) => {
// invalidate all other sessions
await invalidateSessions(userId)
const client = await redis.getSessionClient()
const sessionId = session.sessionId
if (!session.csrfToken) {
session.csrfToken = uuidv4()
}
session = {
createdAt: new Date().toISOString(),
lastAccessedAt: new Date().toISOString(),
...session,
userId,
}
await client.store(makeSessionID(userId, sessionId), session, EXPIRY_SECONDS)
}
exports.updateSessionTTL = async session => {
const client = await redis.getSessionClient()
const key = makeSessionID(session.userId, session.sessionId)
session.lastAccessedAt = new Date().toISOString()
await client.store(key, session, EXPIRY_SECONDS)
}
exports.endSession = async (userId, sessionId) => {
const client = await redis.getSessionClient()
await client.delete(makeSessionID(userId, sessionId))
}
exports.getSession = async (userId, sessionId) => {
try {
const client = await redis.getSessionClient()
return client.get(makeSessionID(userId, sessionId))
} catch (err) {
// if can't get session don't error, just don't return anything
console.error(err)
return null
}
}
exports.getAllSessions = async () => {
const client = await redis.getSessionClient()
const sessions = await client.scan()
return sessions.map(session => session.value)
}
exports.getUserSessions = getSessionsForUser
exports.invalidateSessions = invalidateSessions

View File

@ -0,0 +1,119 @@
const redis = require("../redis/init")
const { v4: uuidv4 } = require("uuid")
const { logWarn } = require("../logging")
const env = require("../environment")
import {
Session,
ScannedSession,
SessionKey,
CreateSession,
} from "@budibase/types"
// a week in seconds
const EXPIRY_SECONDS = 86400 * 7
function makeSessionID(userId: string, sessionId: string) {
return `${userId}/${sessionId}`
}
export async function getSessionsForUser(userId: string): Promise<Session[]> {
if (!userId) {
console.trace("Cannot get sessions for undefined userId")
return []
}
const client = await redis.getSessionClient()
const sessions: ScannedSession[] = await client.scan(userId)
return sessions.map(session => session.value)
}
export async function invalidateSessions(
userId: string,
opts: { sessionIds?: string[]; reason?: string } = {}
) {
try {
const reason = opts?.reason || "unknown"
let sessionIds: string[] = opts.sessionIds || []
let sessionKeys: SessionKey[]
// If no sessionIds, get all the sessions for the user
if (sessionIds.length === 0) {
const sessions = await getSessionsForUser(userId)
sessionKeys = sessions.map(session => ({
key: makeSessionID(session.userId, session.sessionId),
}))
} else {
// use the passed array of sessionIds
sessionIds = Array.isArray(sessionIds) ? sessionIds : [sessionIds]
sessionKeys = sessionIds.map(sessionId => ({
key: makeSessionID(userId, sessionId),
}))
}
if (sessionKeys && sessionKeys.length > 0) {
const client = await redis.getSessionClient()
const promises = []
for (let sessionKey of sessionKeys) {
promises.push(client.delete(sessionKey.key))
}
if (!env.isTest()) {
logWarn(
`Invalidating sessions for ${userId} (reason: ${reason}) - ${sessionKeys
.map(sessionKey => sessionKey.key)
.join(", ")}`
)
}
await Promise.all(promises)
}
} catch (err) {
console.error(`Error invalidating sessions: ${err}`)
}
}
export async function createASession(
userId: string,
createSession: CreateSession
) {
// invalidate all other sessions
await invalidateSessions(userId, { reason: "creation" })
const client = await redis.getSessionClient()
const sessionId = createSession.sessionId
const csrfToken = createSession.csrfToken ? createSession.csrfToken : uuidv4()
const key = makeSessionID(userId, sessionId)
const session: Session = {
...createSession,
csrfToken,
createdAt: new Date().toISOString(),
lastAccessedAt: new Date().toISOString(),
userId,
}
await client.store(key, session, EXPIRY_SECONDS)
}
export async function updateSessionTTL(session: Session) {
const client = await redis.getSessionClient()
const key = makeSessionID(session.userId, session.sessionId)
session.lastAccessedAt = new Date().toISOString()
await client.store(key, session, EXPIRY_SECONDS)
}
export async function endSession(userId: string, sessionId: string) {
const client = await redis.getSessionClient()
await client.delete(makeSessionID(userId, sessionId))
}
export async function getSession(
userId: string,
sessionId: string
): Promise<Session> {
if (!userId || !sessionId) {
throw new Error(`Invalid session details - ${userId} - ${sessionId}`)
}
const client = await redis.getSessionClient()
const session = await client.get(makeSessionID(userId, sessionId))
if (!session) {
throw new Error(`Session not found - ${userId} - ${sessionId}`)
}
return session
}

View File

@ -0,0 +1,12 @@
import * as sessions from "../sessions"
describe("sessions", () => {
describe("getSessionsForUser", () => {
it("returns empty when user is undefined", async () => {
// @ts-ignore - allow the undefined to be passed
const results = await sessions.getSessionsForUser(undefined)
expect(results).toStrictEqual([])
})
})
})

View File

@ -1,9 +1,11 @@
import * as context from "../context"
import * as tenancy from "./tenancy"
import * as utils from "./utils"
const pkg = {
...context,
...tenancy,
...utils,
}
export = pkg

View File

@ -1,6 +1,7 @@
import { doWithDB } from "../db"
import { StaticDatabases } from "../db/constants"
import { baseGlobalDBName } from "./utils"
import { queryPlatformView } from "../db/views"
import { StaticDatabases, ViewName } from "../db/constants"
import { getGlobalDBName } from "./utils"
import {
getTenantId,
DEFAULT_TENANT_ID,
@ -8,6 +9,7 @@ import {
getTenantIDFromAppID,
} from "../context"
import env from "../environment"
import { PlatformUser, PlatformUserByEmail } from "@budibase/types"
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
@ -87,15 +89,6 @@ export const tryAddTenant = async (
})
}
export const getGlobalDBName = (tenantId?: string) => {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export const doWithGlobalDB = (tenantId: string, cb: any) => {
return doWithDB(getGlobalDBName(tenantId), cb)
}
@ -116,14 +109,16 @@ export const lookupTenantId = async (userId: string) => {
}
// lookup, could be email or userId, either will return a doc
export const getTenantUser = async (identifier: string) => {
return doWithDB(PLATFORM_INFO_DB, async (db: any) => {
try {
return await db.get(identifier)
} catch (err) {
return null
}
})
export const getTenantUser = async (
identifier: string
): Promise<PlatformUser | null> => {
// use the view here and allow to find anyone regardless of casing
// Use lowercase to ensure email login is case insensitive
const response = queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, {
keys: [identifier.toLowerCase()],
include_docs: true,
}) as Promise<PlatformUser>
return response
}
export const isUserInAppTenant = (appId: string, user: any) => {

View File

@ -1,12 +0,0 @@
const { DEFAULT_TENANT_ID } = require("../constants")
const { StaticDatabases, SEPARATOR } = require("../db/constants")
exports.baseGlobalDBName = tenantId => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View File

@ -0,0 +1,22 @@
import { DEFAULT_TENANT_ID } from "../constants"
import { StaticDatabases, SEPARATOR } from "../db/constants"
import { getTenantId } from "../context"
export const getGlobalDBName = (tenantId?: string) => {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export const baseGlobalDBName = (tenantId: string | undefined | null) => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View File

@ -1,30 +1,39 @@
const {
ViewNames,
import {
ViewName,
getUsersByAppParams,
getProdAppID,
generateAppUserID,
} = require("./db/utils")
const { queryGlobalView } = require("./db/views")
const { UNICODE_MAX } = require("./db/constants")
} from "./db/utils"
import { queryGlobalView } from "./db/views"
import { UNICODE_MAX } from "./db/constants"
import { User } from "@budibase/types"
/**
* Given an email address this will use a view to search through
* all the users to find one with this email address.
* @param {string} email the email to lookup the user by.
* @return {Promise<object|null>}
*/
exports.getGlobalUserByEmail = async email => {
export const getGlobalUserByEmail = async (
email: String
): Promise<User | undefined> => {
if (email == null) {
throw "Must supply an email address to view"
}
return await queryGlobalView(ViewNames.USER_BY_EMAIL, {
const response = await queryGlobalView<User>(ViewName.USER_BY_EMAIL, {
key: email.toLowerCase(),
include_docs: true,
})
if (Array.isArray(response)) {
// shouldn't be able to happen, but need to handle just in case
throw new Error(`Multiple users found with email address: ${email}`)
}
return response
}
exports.searchGlobalUsersByApp = async (appId, opts) => {
export const searchGlobalUsersByApp = async (appId: any, opts: any) => {
if (typeof appId !== "string") {
throw new Error("Must provide a string based app ID")
}
@ -32,31 +41,31 @@ exports.searchGlobalUsersByApp = async (appId, opts) => {
include_docs: true,
})
params.startkey = opts && opts.startkey ? opts.startkey : params.startkey
let response = await queryGlobalView(ViewNames.USER_BY_APP, params)
let response = await queryGlobalView(ViewName.USER_BY_APP, params)
if (!response) {
response = []
}
return Array.isArray(response) ? response : [response]
}
exports.getGlobalUserByAppPage = (appId, user) => {
export const getGlobalUserByAppPage = (appId: string, user: User) => {
if (!user) {
return
}
return generateAppUserID(getProdAppID(appId), user._id)
return generateAppUserID(getProdAppID(appId), user._id!)
}
/**
* Performs a starts with search on the global email view.
*/
exports.searchGlobalUsersByEmail = async (email, opts) => {
export const searchGlobalUsersByEmail = async (email: string, opts: any) => {
if (typeof email !== "string") {
throw new Error("Must provide a string to search by")
}
const lcEmail = email.toLowerCase()
// handle if passing up startkey for pagination
const startkey = opts && opts.startkey ? opts.startkey : lcEmail
let response = await queryGlobalView(ViewNames.USER_BY_EMAIL, {
let response = await queryGlobalView<User>(ViewName.USER_BY_EMAIL, {
...opts,
startkey,
endkey: `${lcEmail}${UNICODE_MAX}`,

View File

@ -1,20 +1,18 @@
const {
DocumentTypes,
SEPARATOR,
ViewNames,
getAllApps,
} = require("./db/utils")
const { DocumentType, SEPARATOR, ViewName, getAllApps } = require("./db/utils")
const jwt = require("jsonwebtoken")
const { options } = require("./middleware/passport/jwt")
const { queryGlobalView } = require("./db/views")
const { Headers, Cookies, MAX_VALID_DATE } = require("./constants")
const env = require("./environment")
const userCache = require("./cache/user")
const { getUserSessions, invalidateSessions } = require("./security/sessions")
const {
getSessionsForUser,
invalidateSessions,
} = require("./security/sessions")
const events = require("./events")
const tenancy = require("./tenancy")
const APP_PREFIX = DocumentTypes.APP + SEPARATOR
const APP_PREFIX = DocumentType.APP + SEPARATOR
const PROD_APP_PREFIX = "/app/"
function confirmAppId(possibleAppId) {
@ -44,6 +42,18 @@ async function resolveAppUrl(ctx) {
return app && app.appId ? app.appId : undefined
}
exports.isServingApp = ctx => {
// dev app
if (ctx.path.startsWith(`/${APP_PREFIX}`)) {
return true
}
// prod app
if (ctx.path.startsWith(PROD_APP_PREFIX)) {
return true
}
return false
}
/**
* Given a request tries to find the appId, which can be located in various places
* @param {object} ctx The main request body to look through.
@ -151,7 +161,7 @@ exports.isClient = ctx => {
}
const getBuilders = async () => {
const builders = await queryGlobalView(ViewNames.USER_BY_BUILDERS, {
const builders = await queryGlobalView(ViewName.USER_BY_BUILDERS, {
include_docs: false,
})
@ -178,7 +188,7 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => {
if (!ctx) throw new Error("Koa context must be supplied to logout.")
const currentSession = exports.getCookie(ctx, Cookies.Auth)
let sessions = await getUserSessions(userId)
let sessions = await getSessionsForUser(userId)
if (keepActiveSession) {
sessions = sessions.filter(
@ -190,10 +200,8 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => {
exports.clearCookie(ctx, Cookies.CurrentApp)
}
await invalidateSessions(
userId,
sessions.map(({ sessionId }) => sessionId)
)
const sessionIds = sessions.map(({ sessionId }) => sessionId)
await invalidateSessions(userId, { sessionIds, reason: "logout" })
await events.auth.logout()
await userCache.invalidateUser(userId)
}

View File

@ -0,0 +1,7 @@
export const getAccount = jest.fn()
export const getAccountByTenantId = jest.fn()
jest.mock("../../../src/cloud/accounts", () => ({
getAccount,
getAccountByTenantId,
}))

View File

@ -1,2 +0,0 @@
exports.MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
exports.MOCK_DATE_TIMESTAMP = 1577836800000

View File

@ -0,0 +1,2 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000

View File

@ -1,7 +0,0 @@
const events = require("./events")
const date = require("./date")
module.exports = {
date,
events,
}

Some files were not shown because too many files have changed in this diff Show More