commit
4459f1b915
|
@ -56,6 +56,7 @@ jobs:
|
||||||
run: yarn install:pro $BRANCH $BASE_BRANCH
|
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||||
- run: yarn
|
- run: yarn
|
||||||
- run: yarn bootstrap
|
- run: yarn bootstrap
|
||||||
|
- run: yarn build
|
||||||
- run: yarn test
|
- run: yarn test
|
||||||
- uses: codecov/codecov-action@v3
|
- uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
|
@ -77,28 +78,21 @@ jobs:
|
||||||
- run: yarn bootstrap
|
- run: yarn bootstrap
|
||||||
- run: yarn test:pro
|
- run: yarn test:pro
|
||||||
|
|
||||||
# integration-test:
|
integration-test:
|
||||||
# runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# services:
|
steps:
|
||||||
# couchdb:
|
- uses: actions/checkout@v2
|
||||||
# image: ibmcom/couchdb3
|
- name: Use Node.js 14.x
|
||||||
# env:
|
uses: actions/setup-node@v1
|
||||||
# COUCHDB_PASSWORD: budibase
|
with:
|
||||||
# COUCHDB_USER: budibase
|
node-version: 14.x
|
||||||
# ports:
|
- name: Install Pro
|
||||||
# - 4567:5984
|
run: yarn install:pro $BRANCH $BASE_BRANCH
|
||||||
# steps:
|
- run: yarn && yarn bootstrap && yarn build
|
||||||
# - uses: actions/checkout@v2
|
- run: |
|
||||||
# - name: Use Node.js 14.x
|
cd qa-core
|
||||||
# uses: actions/setup-node@v1
|
yarn setup
|
||||||
# with:
|
yarn test:ci
|
||||||
# node-version: 14.x
|
env:
|
||||||
# - name: Install Pro
|
BB_ADMIN_USER_EMAIL: admin
|
||||||
# run: yarn install:pro $BRANCH $BASE_BRANCH
|
BB_ADMIN_USER_PASSWORD: admin
|
||||||
# - run: yarn
|
|
||||||
# - run: yarn bootstrap
|
|
||||||
# - run: yarn build
|
|
||||||
# - run: |
|
|
||||||
# cd qa-core
|
|
||||||
# yarn
|
|
||||||
# yarn api:test:ci
|
|
||||||
|
|
|
@ -62,7 +62,6 @@ jobs:
|
||||||
- name: Build/release Docker images
|
- name: Build/release Docker images
|
||||||
run: |
|
run: |
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||||
yarn build
|
|
||||||
yarn build:docker:develop
|
yarn build:docker:develop
|
||||||
env:
|
env:
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
|
|
@ -75,7 +75,6 @@ jobs:
|
||||||
- name: Build/release Docker images
|
- name: Build/release Docker images
|
||||||
run: |
|
run: |
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||||
yarn build
|
|
||||||
yarn build:docker
|
yarn build:docker
|
||||||
env:
|
env:
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
@ -107,7 +106,7 @@ jobs:
|
||||||
git pull
|
git pull
|
||||||
mkdir sync
|
mkdir sync
|
||||||
echo "Packaging chart to sync dir"
|
echo "Packaging chart to sync dir"
|
||||||
helm package charts/budibase --version 0.0.0-master --app-version "$RELEASE_VERSION" --destination sync
|
helm package charts/budibase --version 0.0.0-master --app-version v"$RELEASE_VERSION" --destination sync
|
||||||
echo "Packaging successful"
|
echo "Packaging successful"
|
||||||
git checkout gh-pages
|
git checkout gh-pages
|
||||||
echo "Indexing helm repo"
|
echo "Indexing helm repo"
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
node_modules
|
||||||
|
**/node_modules
|
||||||
|
|
||||||
|
**/dist/
|
||||||
|
**/.routify/
|
||||||
|
|
||||||
|
**/coverage/
|
||||||
|
**/yarn-error.log
|
||||||
|
|
||||||
|
**/prebuilds/
|
||||||
|
**/build/
|
||||||
|
|
||||||
|
packages/server/builder/*
|
||||||
|
packages/server/client/*
|
|
@ -1 +1 @@
|
||||||
3.11.1
|
3.10.0
|
|
@ -1,2 +1,2 @@
|
||||||
nodejs 14.19.3
|
nodejs 14.20.1
|
||||||
python 3.10.0
|
python 3.10.0
|
34
README.md
34
README.md
|
@ -216,35 +216,9 @@ If you are having issues between updates of the builder, please use the guide [h
|
||||||
|
|
||||||
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||||
|
|
||||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
|
||||||
<!-- prettier-ignore-start -->
|
|
||||||
<!-- markdownlint-disable -->
|
|
||||||
<table>
|
|
||||||
<tr>
|
|
||||||
<td align="center"><a href="http://martinmck.com"><img src="https://avatars1.githubusercontent.com/u/11256663?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Martin McKeaveney</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=shogunpurple" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=shogunpurple" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=shogunpurple" title="Tests">⚠️</a> <a href="#infra-shogunpurple" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
|
||||||
<td align="center"><a href="http://www.michaeldrury.co.uk/"><img src="https://avatars2.githubusercontent.com/u/4407001?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Michael Drury</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=mike12345567" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=mike12345567" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=mike12345567" title="Tests">⚠️</a> <a href="#infra-mike12345567" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/aptkingston"><img src="https://avatars3.githubusercontent.com/u/9075550?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Andrew Kingston</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=aptkingston" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=aptkingston" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=aptkingston" title="Tests">⚠️</a> <a href="#design-aptkingston" title="Design">🎨</a></td>
|
|
||||||
<td align="center"><a href="https://budibase.com/"><img src="https://avatars3.githubusercontent.com/u/3524181?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Michael Shanks</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Tests">⚠️</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/kevmodrome"><img src="https://avatars3.githubusercontent.com/u/534488?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Kevin Åberg Kultalahti</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Tests">⚠️</a></td>
|
|
||||||
<td align="center"><a href="https://www.budibase.com/"><img src="https://avatars2.githubusercontent.com/u/49767913?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Joe</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=joebudi" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=joebudi" title="Code">💻</a> <a href="#content-joebudi" title="Content">🖋</a> <a href="#design-joebudi" title="Design">🎨</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/Rory-Powell"><img src="https://avatars.githubusercontent.com/u/8755148?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Rory Powell</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Tests">⚠️</a></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center"><a href="https://github.com/PClmnt"><img src="https://avatars.githubusercontent.com/u/5665926?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Peter Clement</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Tests">⚠️</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/Conor-Mack"><img src="https://avatars1.githubusercontent.com/u/36074859?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Conor_Mack</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=Conor-Mack" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=Conor-Mack" title="Tests">⚠️</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/pngwn"><img src="https://avatars1.githubusercontent.com/u/12937446?v=4?s=100" width="100px;" alt=""/><br /><sub><b>pngwn</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=pngwn" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=pngwn" title="Tests">⚠️</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/HugoLd"><img src="https://avatars0.githubusercontent.com/u/26521848?v=4?s=100" width="100px;" alt=""/><br /><sub><b>HugoLd</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=HugoLd" title="Code">💻</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/victoriasloan"><img src="https://avatars.githubusercontent.com/u/9913651?v=4?s=100" width="100px;" alt=""/><br /><sub><b>victoriasloan</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=victoriasloan" title="Code">💻</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/yashank09"><img src="https://avatars.githubusercontent.com/u/37672190?v=4?s=100" width="100px;" alt=""/><br /><sub><b>yashank09</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=yashank09" title="Code">💻</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/SOVLOOKUP"><img src="https://avatars.githubusercontent.com/u/53158137?v=4?s=100" width="100px;" alt=""/><br /><sub><b>SOVLOOKUP</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=SOVLOOKUP" title="Code">💻</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/seoulaja"><img src="https://avatars.githubusercontent.com/u/15101654?v=4?s=100" width="100px;" alt=""/><br /><sub><b>seoulaja</b></sub></a><br /><a href="#translation-seoulaja" title="Translation">🌍</a></td>
|
|
||||||
<td align="center"><a href="https://github.com/mslourens"><img src="https://avatars.githubusercontent.com/u/1907152?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Maurits Lourens</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=mslourens" title="Tests">⚠️</a> <a href="https://github.com/Budibase/budibase/commits?author=mslourens" title="Code">💻</a></td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<!-- markdownlint-restore -->
|
<a href="https://github.com/Budibase/budibase/graphs/contributors">
|
||||||
<!-- prettier-ignore-end -->
|
<img src="https://contrib.rocks/image?repo=Budibase/budibase" />
|
||||||
|
</a>
|
||||||
|
|
||||||
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
Made with [contrib.rocks](https://contrib.rocks).
|
||||||
|
|
||||||
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
|
|
||||||
|
|
|
@ -14,6 +14,9 @@ metadata:
|
||||||
alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS":443}]'
|
alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS":443}]'
|
||||||
alb.ingress.kubernetes.io/certificate-arn: {{ .Values.ingress.certificateArn }}
|
alb.ingress.kubernetes.io/certificate-arn: {{ .Values.ingress.certificateArn }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{- if .Values.ingress.sslPolicy }}
|
||||||
|
alb.ingress.kubernetes.io/actions.ssl-policy: {{ .Values.ingress.sslPolicy }}
|
||||||
|
{{- end }}
|
||||||
{{- if .Values.ingress.securityGroups }}
|
{{- if .Values.ingress.securityGroups }}
|
||||||
alb.ingress.kubernetes.io/security-groups: {{ .Values.ingress.securityGroups }}
|
alb.ingress.kubernetes.io/security-groups: {{ .Values.ingress.securityGroups }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
|
|
@ -64,6 +64,8 @@ spec:
|
||||||
value: {{ .Values.globals.enableAnalytics | quote }}
|
value: {{ .Values.globals.enableAnalytics | quote }}
|
||||||
- name: API_ENCRYPTION_KEY
|
- name: API_ENCRYPTION_KEY
|
||||||
value: {{ .Values.globals.apiEncryptionKey | quote }}
|
value: {{ .Values.globals.apiEncryptionKey | quote }}
|
||||||
|
- name: HTTP_LOGGING
|
||||||
|
value: {{ .Values.services.apps.httpLogging | quote }}
|
||||||
- name: INTERNAL_API_KEY
|
- name: INTERNAL_API_KEY
|
||||||
valueFrom:
|
valueFrom:
|
||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
|
@ -119,7 +121,7 @@ spec:
|
||||||
- name: MULTI_TENANCY
|
- name: MULTI_TENANCY
|
||||||
value: {{ .Values.globals.multiTenancy | quote }}
|
value: {{ .Values.globals.multiTenancy | quote }}
|
||||||
- name: LOG_LEVEL
|
- name: LOG_LEVEL
|
||||||
value: {{ default "info" .Values.services.apps.logLevel | quote }}
|
value: {{ .Values.services.apps.logLevel | quote }}
|
||||||
- name: REDIS_PASSWORD
|
- name: REDIS_PASSWORD
|
||||||
value: {{ .Values.services.redis.password }}
|
value: {{ .Values.services.redis.password }}
|
||||||
- name: REDIS_URL
|
- name: REDIS_URL
|
||||||
|
@ -180,18 +182,6 @@ spec:
|
||||||
- name: DD_APM_DD_URL
|
- name: DD_APM_DD_URL
|
||||||
value: https://trace.agent.datadoghq.eu
|
value: https://trace.agent.datadoghq.eu
|
||||||
{{ end }}
|
{{ end }}
|
||||||
{{ if .Values.globals.elasticApmEnabled }}
|
|
||||||
- name: ELASTIC_APM_ENABLED
|
|
||||||
value: {{ .Values.globals.elasticApmEnabled | quote }}
|
|
||||||
{{ end }}
|
|
||||||
{{ if .Values.globals.elasticApmSecretToken }}
|
|
||||||
- name: ELASTIC_APM_SECRET_TOKEN
|
|
||||||
value: {{ .Values.globals.elasticApmSecretToken | quote }}
|
|
||||||
{{ end }}
|
|
||||||
{{ if .Values.globals.elasticApmServerUrl }}
|
|
||||||
- name: ELASTIC_APM_SERVER_URL
|
|
||||||
value: {{ .Values.globals.elasticApmServerUrl | quote }}
|
|
||||||
{{ end }}
|
|
||||||
{{ if .Values.globals.globalAgentHttpProxy }}
|
{{ if .Values.globals.globalAgentHttpProxy }}
|
||||||
- name: GLOBAL_AGENT_HTTP_PROXY
|
- name: GLOBAL_AGENT_HTTP_PROXY
|
||||||
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
|
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
|
||||||
|
@ -209,7 +199,7 @@ spec:
|
||||||
value: {{ .Values.services.tlsRejectUnauthorized }}
|
value: {{ .Values.services.tlsRejectUnauthorized }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
|
||||||
image: budibase/apps:{{ .Values.globals.appVersion }}
|
image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
|
||||||
imagePullPolicy: Always
|
imagePullPolicy: Always
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
|
|
|
@ -37,7 +37,7 @@ spec:
|
||||||
{{ end }}
|
{{ end }}
|
||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- image: budibase/proxy:{{ .Values.globals.appVersion }}
|
- image: budibase/proxy:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
|
||||||
imagePullPolicy: Always
|
imagePullPolicy: Always
|
||||||
name: proxy-service
|
name: proxy-service
|
||||||
ports:
|
ports:
|
||||||
|
|
|
@ -64,6 +64,8 @@ spec:
|
||||||
{{ end }}
|
{{ end }}
|
||||||
- name: API_ENCRYPTION_KEY
|
- name: API_ENCRYPTION_KEY
|
||||||
value: {{ .Values.globals.apiEncryptionKey | quote }}
|
value: {{ .Values.globals.apiEncryptionKey | quote }}
|
||||||
|
- name: HTTP_LOGGING
|
||||||
|
value: {{ .Values.services.worker.httpLogging | quote }}
|
||||||
- name: INTERNAL_API_KEY
|
- name: INTERNAL_API_KEY
|
||||||
valueFrom:
|
valueFrom:
|
||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
|
@ -115,7 +117,7 @@ spec:
|
||||||
- name: MULTI_TENANCY
|
- name: MULTI_TENANCY
|
||||||
value: {{ .Values.globals.multiTenancy | quote }}
|
value: {{ .Values.globals.multiTenancy | quote }}
|
||||||
- name: LOG_LEVEL
|
- name: LOG_LEVEL
|
||||||
value: {{ default "info" .Values.services.worker.logLevel | quote }}
|
value: {{ .Values.services.worker.logLevel | quote }}
|
||||||
- name: REDIS_PASSWORD
|
- name: REDIS_PASSWORD
|
||||||
value: {{ .Values.services.redis.password | quote }}
|
value: {{ .Values.services.redis.password | quote }}
|
||||||
- name: REDIS_URL
|
- name: REDIS_URL
|
||||||
|
@ -170,18 +172,6 @@ spec:
|
||||||
- name: DD_APM_DD_URL
|
- name: DD_APM_DD_URL
|
||||||
value: https://trace.agent.datadoghq.eu
|
value: https://trace.agent.datadoghq.eu
|
||||||
{{ end }}
|
{{ end }}
|
||||||
{{ if .Values.globals.elasticApmEnabled }}
|
|
||||||
- name: ELASTIC_APM_ENABLED
|
|
||||||
value: {{ .Values.globals.elasticApmEnabled | quote }}
|
|
||||||
{{ end }}
|
|
||||||
{{ if .Values.globals.elasticApmSecretToken }}
|
|
||||||
- name: ELASTIC_APM_SECRET_TOKEN
|
|
||||||
value: {{ .Values.globals.elasticApmSecretToken | quote }}
|
|
||||||
{{ end }}
|
|
||||||
{{ if .Values.globals.elasticApmServerUrl }}
|
|
||||||
- name: ELASTIC_APM_SERVER_URL
|
|
||||||
value: {{ .Values.globals.elasticApmServerUrl | quote }}
|
|
||||||
{{ end }}
|
|
||||||
{{ if .Values.globals.globalAgentHttpProxy }}
|
{{ if .Values.globals.globalAgentHttpProxy }}
|
||||||
- name: GLOBAL_AGENT_HTTP_PROXY
|
- name: GLOBAL_AGENT_HTTP_PROXY
|
||||||
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
|
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
|
||||||
|
@ -198,8 +188,7 @@ spec:
|
||||||
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
||||||
value: {{ .Values.services.tlsRejectUnauthorized }}
|
value: {{ .Values.services.tlsRejectUnauthorized }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
image: budibase/worker:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
|
||||||
image: budibase/worker:{{ .Values.globals.appVersion }}
|
|
||||||
imagePullPolicy: Always
|
imagePullPolicy: Always
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
|
|
|
@ -74,13 +74,12 @@ tolerations: []
|
||||||
affinity: {}
|
affinity: {}
|
||||||
|
|
||||||
globals:
|
globals:
|
||||||
appVersion: "latest"
|
appVersion: "" # Use as an override to .Chart.AppVersion
|
||||||
budibaseEnv: PRODUCTION
|
budibaseEnv: PRODUCTION
|
||||||
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
|
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
|
||||||
enableAnalytics: "1"
|
enableAnalytics: "1"
|
||||||
sentryDSN: ""
|
sentryDSN: ""
|
||||||
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
|
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
|
||||||
logLevel: info
|
|
||||||
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
||||||
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
||||||
accountPortalUrl: ""
|
accountPortalUrl: ""
|
||||||
|
@ -107,9 +106,6 @@ globals:
|
||||||
smtp:
|
smtp:
|
||||||
enabled: false
|
enabled: false
|
||||||
|
|
||||||
# elasticApmEnabled:
|
|
||||||
# elasticApmSecretToken:
|
|
||||||
# elasticApmServerUrl:
|
|
||||||
# globalAgentHttpProxy:
|
# globalAgentHttpProxy:
|
||||||
# globalAgentHttpsProxy:
|
# globalAgentHttpsProxy:
|
||||||
# globalAgentNoProxy:
|
# globalAgentNoProxy:
|
||||||
|
@ -137,6 +133,7 @@ services:
|
||||||
port: 4002
|
port: 4002
|
||||||
replicaCount: 1
|
replicaCount: 1
|
||||||
logLevel: info
|
logLevel: info
|
||||||
|
httpLogging: 1
|
||||||
resources: {}
|
resources: {}
|
||||||
# nodeDebug: "" # set the value of NODE_DEBUG
|
# nodeDebug: "" # set the value of NODE_DEBUG
|
||||||
# annotations:
|
# annotations:
|
||||||
|
@ -147,6 +144,8 @@ services:
|
||||||
worker:
|
worker:
|
||||||
port: 4003
|
port: 4003
|
||||||
replicaCount: 1
|
replicaCount: 1
|
||||||
|
logLevel: info
|
||||||
|
httpLogging: 1
|
||||||
resources: {}
|
resources: {}
|
||||||
# annotations:
|
# annotations:
|
||||||
# co.elastic.logs/multiline.type: pattern
|
# co.elastic.logs/multiline.type: pattern
|
||||||
|
|
|
@ -61,5 +61,18 @@ http://127.0.0.1:10000/builder/admin
|
||||||
| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in
|
| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in
|
||||||
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
|
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)
|
||||||
|
|
||||||
### Troubleshooting
|
### Troubleshootings
|
||||||
If there are errors with the `yarn setup` command, you can try installing nvm and node 14. This is the same as the instructions for Debian 11.
|
|
||||||
|
#### Yarn setup errors
|
||||||
|
|
||||||
|
If there are errors with the `yarn setup` command, you can try installing nvm and node 14. This is the same as the instructions for Debian 11.
|
||||||
|
|
||||||
|
#### Node 14.20.1 not supported for arm64
|
||||||
|
|
||||||
|
If you are working with M1 or M2 Mac and trying the Node installation via `nvm`, probably you will find the error `curl: (22) The requested URL returned error: 404`.
|
||||||
|
|
||||||
|
Version `v14.20.1` is not supported for arm64; in order to use it, you can switch the CPU architecture for this by the following command:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
arch -x86_64 zsh #Run this before nvm install
|
||||||
|
```
|
||||||
|
|
10
lerna.json
10
lerna.json
|
@ -1,9 +1,8 @@
|
||||||
{
|
{
|
||||||
"version": "2.5.9",
|
"version": "2.5.10-alpha.1",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"useWorkspaces": true,
|
||||||
"packages/*"
|
"packages": ["packages/*"],
|
||||||
],
|
|
||||||
"command": {
|
"command": {
|
||||||
"publish": {
|
"publish": {
|
||||||
"ignoreChanges": [
|
"ignoreChanges": [
|
||||||
|
@ -13,6 +12,9 @@
|
||||||
"# We ignore every JSON file, except for built-in-modules, built-ins and plugins defined in babel-preset-env/data.",
|
"# We ignore every JSON file, except for built-in-modules, built-ins and plugins defined in babel-preset-env/data.",
|
||||||
"@(!(built-in-modules|built-ins|plugins|package)).json"
|
"@(!(built-in-modules|built-ins|plugins|package)).json"
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
"run": {
|
||||||
|
"loadEnvFiles": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
{
|
||||||
|
"tasksRunnerOptions": {
|
||||||
|
"default": {
|
||||||
|
"runner": "nx/tasks-runners/default",
|
||||||
|
"options": {
|
||||||
|
"cacheableOperations": ["build", "test"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
54
package.json
54
package.json
|
@ -3,7 +3,6 @@
|
||||||
"private": true,
|
"private": true,
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@rollup/plugin-json": "^4.0.2",
|
"@rollup/plugin-json": "^4.0.2",
|
||||||
"@types/supertest": "^2.0.12",
|
|
||||||
"@typescript-eslint/parser": "5.45.0",
|
"@typescript-eslint/parser": "5.45.0",
|
||||||
"babel-eslint": "^10.0.3",
|
"babel-eslint": "^10.0.3",
|
||||||
"eslint": "^7.28.0",
|
"eslint": "^7.28.0",
|
||||||
|
@ -12,7 +11,7 @@
|
||||||
"husky": "^7.0.1",
|
"husky": "^7.0.1",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"kill-port": "^1.6.1",
|
"kill-port": "^1.6.1",
|
||||||
"lerna": "3.14.1",
|
"lerna": "^6.6.1",
|
||||||
"madge": "^6.0.0",
|
"madge": "^6.0.0",
|
||||||
"prettier": "^2.3.1",
|
"prettier": "^2.3.1",
|
||||||
"prettier-plugin-svelte": "^2.3.0",
|
"prettier-plugin-svelte": "^2.3.0",
|
||||||
|
@ -23,11 +22,12 @@
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
|
"setup": "node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev",
|
||||||
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
|
"bootstrap": "lerna link && ./scripts/link-dependencies.sh",
|
||||||
"build": "lerna run build",
|
"build": "lerna run --stream build",
|
||||||
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
|
"build:dev": "lerna run --stream prebuild && tsc --build --watch --preserveWatchOutput",
|
||||||
"build:backend": "lerna run build --ignore @budibase/client --ignore @budibase/bbui --ignore @budibase/builder --ignore @budibase/cli",
|
"backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap",
|
||||||
"build:sdk": "lerna run build:sdk",
|
"backend:build": "./scripts/scopeBackend.sh 'lerna run --stream build'",
|
||||||
|
"build:sdk": "lerna run --stream build:sdk",
|
||||||
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
|
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
|
||||||
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
|
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
|
||||||
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
|
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
|
||||||
|
@ -36,15 +36,16 @@
|
||||||
"restore": "yarn run clean && yarn run bootstrap && yarn run build",
|
"restore": "yarn run clean && yarn run bootstrap && yarn run build",
|
||||||
"nuke": "yarn run nuke:packages && yarn run nuke:docker",
|
"nuke": "yarn run nuke:packages && yarn run nuke:docker",
|
||||||
"nuke:packages": "yarn run restore",
|
"nuke:packages": "yarn run restore",
|
||||||
"nuke:docker": "lerna run --parallel dev:stack:nuke",
|
"nuke:docker": "lerna run --stream --parallel dev:stack:nuke",
|
||||||
"clean": "lerna clean",
|
"clean": "lerna clean",
|
||||||
"kill-builder": "kill-port 3000",
|
"kill-builder": "kill-port 3000",
|
||||||
"kill-server": "kill-port 4001 4002",
|
"kill-server": "kill-port 4001 4002",
|
||||||
"kill-all": "yarn run kill-builder && yarn run kill-server",
|
"kill-all": "yarn run kill-builder && yarn run kill-server",
|
||||||
"dev": "yarn run kill-all && lerna link && lerna run --parallel dev:builder --concurrency 1",
|
"dev": "yarn run kill-all && lerna link && lerna run --stream --parallel dev:builder --concurrency 1 --stream",
|
||||||
"dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
"dev:noserver": "yarn run kill-builder && lerna link && lerna run --stream dev:stack:up && lerna run --stream --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||||
"dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server",
|
"dev:server": "yarn run kill-server && lerna run --stream --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server",
|
||||||
"test": "lerna run test --stream",
|
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream --parallel dev:built",
|
||||||
|
"test": "lerna run --stream test --stream",
|
||||||
"test:pro": "bash scripts/pro/test.sh",
|
"test:pro": "bash scripts/pro/test.sh",
|
||||||
"lint:eslint": "eslint packages && eslint qa-core",
|
"lint:eslint": "eslint packages && eslint qa-core",
|
||||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
|
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
|
||||||
|
@ -52,12 +53,12 @@
|
||||||
"lint:fix:eslint": "eslint --fix packages qa-core",
|
"lint:fix:eslint": "eslint --fix packages qa-core",
|
||||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
||||||
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
||||||
"build:specs": "lerna run specs",
|
"build:specs": "lerna run --stream specs",
|
||||||
"build:docker": "lerna run build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
"build:docker": "lerna run --stream build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||||
"build:docker:pre": "lerna run build && lerna run predocker",
|
"build:docker:pre": "lerna run --stream build && lerna run --stream predocker",
|
||||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
||||||
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||||
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
||||||
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
||||||
|
@ -66,16 +67,16 @@
|
||||||
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
||||||
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
|
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
|
||||||
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
||||||
"build:docs": "lerna run build:docs",
|
"build:docs": "lerna run --stream build:docs",
|
||||||
"release:helm": "node scripts/releaseHelmChart",
|
"release:helm": "node scripts/releaseHelmChart",
|
||||||
"env:multi:enable": "lerna run env:multi:enable",
|
"env:multi:enable": "lerna run --stream env:multi:enable",
|
||||||
"env:multi:disable": "lerna run env:multi:disable",
|
"env:multi:disable": "lerna run --stream env:multi:disable",
|
||||||
"env:selfhost:enable": "lerna run env:selfhost:enable",
|
"env:selfhost:enable": "lerna run --stream env:selfhost:enable",
|
||||||
"env:selfhost:disable": "lerna run env:selfhost:disable",
|
"env:selfhost:disable": "lerna run --stream env:selfhost:disable",
|
||||||
"env:localdomain:enable": "./scripts/localdomain.sh enable",
|
"env:localdomain:enable": "./scripts/localdomain.sh enable",
|
||||||
"env:localdomain:disable": "./scripts/localdomain.sh disable",
|
"env:localdomain:disable": "./scripts/localdomain.sh disable",
|
||||||
"env:account:enable": "lerna run env:account:enable",
|
"env:account:enable": "lerna run --stream env:account:enable",
|
||||||
"env:account:disable": "lerna run env:account:disable",
|
"env:account:disable": "lerna run --stream env:account:disable",
|
||||||
"mode:self": "yarn env:selfhost:enable && yarn env:multi:disable && yarn env:account:disable",
|
"mode:self": "yarn env:selfhost:enable && yarn env:multi:disable && yarn env:account:disable",
|
||||||
"mode:cloud": "yarn env:selfhost:disable && yarn env:multi:enable && yarn env:account:disable",
|
"mode:cloud": "yarn env:selfhost:disable && yarn env:multi:enable && yarn env:account:disable",
|
||||||
"mode:account": "yarn mode:cloud && yarn env:account:enable",
|
"mode:account": "yarn mode:cloud && yarn env:account:enable",
|
||||||
|
@ -83,5 +84,10 @@
|
||||||
"postinstall": "husky install",
|
"postinstall": "husky install",
|
||||||
"install:pro": "bash scripts/pro/install.sh",
|
"install:pro": "bash scripts/pro/install.sh",
|
||||||
"dep:clean": "yarn clean && yarn bootstrap"
|
"dep:clean": "yarn clean && yarn bootstrap"
|
||||||
|
},
|
||||||
|
"workspaces": {
|
||||||
|
"packages": [
|
||||||
|
"packages/*"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/backend-core",
|
"name": "@budibase/backend-core",
|
||||||
"version": "2.5.9",
|
"version": "2.5.10-alpha.1",
|
||||||
"description": "Budibase backend core libraries used in server and worker",
|
"description": "Budibase backend core libraries used in server and worker",
|
||||||
"main": "dist/src/index.js",
|
"main": "dist/src/index.js",
|
||||||
"types": "dist/src/index.d.ts",
|
"types": "dist/src/index.d.ts",
|
||||||
|
@ -24,7 +24,7 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/nano": "10.1.2",
|
"@budibase/nano": "10.1.2",
|
||||||
"@budibase/pouchdb-replication-stream": "1.2.10",
|
"@budibase/pouchdb-replication-stream": "1.2.10",
|
||||||
"@budibase/types": "^2.5.9",
|
"@budibase/types": "2.5.10-alpha.1",
|
||||||
"@shopify/jest-koa-mocks": "5.0.1",
|
"@shopify/jest-koa-mocks": "5.0.1",
|
||||||
"@techpass/passport-openidconnect": "0.3.2",
|
"@techpass/passport-openidconnect": "0.3.2",
|
||||||
"aws-cloudfront-sign": "2.2.0",
|
"aws-cloudfront-sign": "2.2.0",
|
||||||
|
@ -39,6 +39,7 @@
|
||||||
"joi": "17.6.0",
|
"joi": "17.6.0",
|
||||||
"jsonwebtoken": "9.0.0",
|
"jsonwebtoken": "9.0.0",
|
||||||
"koa-passport": "4.1.4",
|
"koa-passport": "4.1.4",
|
||||||
|
"koa-pino-logger": "4.0.0",
|
||||||
"lodash": "4.17.21",
|
"lodash": "4.17.21",
|
||||||
"lodash.isarguments": "3.1.0",
|
"lodash.isarguments": "3.1.0",
|
||||||
"node-fetch": "2.6.7",
|
"node-fetch": "2.6.7",
|
||||||
|
@ -46,6 +47,8 @@
|
||||||
"passport-jwt": "4.0.0",
|
"passport-jwt": "4.0.0",
|
||||||
"passport-local": "1.0.0",
|
"passport-local": "1.0.0",
|
||||||
"passport-oauth2-refresh": "^2.1.0",
|
"passport-oauth2-refresh": "^2.1.0",
|
||||||
|
"pino": "8.11.0",
|
||||||
|
"pino-http": "8.3.3",
|
||||||
"posthog-node": "1.3.0",
|
"posthog-node": "1.3.0",
|
||||||
"pouchdb": "7.3.0",
|
"pouchdb": "7.3.0",
|
||||||
"pouchdb-find": "7.2.2",
|
"pouchdb-find": "7.2.2",
|
||||||
|
@ -53,22 +56,20 @@
|
||||||
"sanitize-s3-objectkey": "0.0.1",
|
"sanitize-s3-objectkey": "0.0.1",
|
||||||
"semver": "7.3.7",
|
"semver": "7.3.7",
|
||||||
"tar-fs": "2.1.1",
|
"tar-fs": "2.1.1",
|
||||||
"uuid": "8.3.2",
|
"uuid": "8.3.2"
|
||||||
"zlib": "1.0.5"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@jest/test-sequencer": "29.5.0",
|
||||||
"@swc/core": "^1.3.25",
|
"@swc/core": "^1.3.25",
|
||||||
"@swc/jest": "^0.2.24",
|
"@swc/jest": "^0.2.24",
|
||||||
"@trendyol/jest-testcontainers": "^2.1.1",
|
"@trendyol/jest-testcontainers": "^2.1.1",
|
||||||
"@types/chance": "1.1.3",
|
"@types/chance": "1.1.3",
|
||||||
"@types/ioredis": "4.28.0",
|
"@types/ioredis": "4.28.0",
|
||||||
"@types/jest": "28.1.1",
|
"@types/jest": "29.5.0",
|
||||||
"@types/koa": "2.13.4",
|
"@types/koa": "2.13.4",
|
||||||
"@types/koa-pino-logger": "3.0.0",
|
|
||||||
"@types/lodash": "4.14.180",
|
"@types/lodash": "4.14.180",
|
||||||
"@types/node": "14.18.20",
|
"@types/node": "14.18.20",
|
||||||
"@types/node-fetch": "2.6.1",
|
"@types/node-fetch": "2.6.1",
|
||||||
"@types/pino-http": "5.8.1",
|
|
||||||
"@types/pouchdb": "6.4.0",
|
"@types/pouchdb": "6.4.0",
|
||||||
"@types/redlock": "4.0.3",
|
"@types/redlock": "4.0.3",
|
||||||
"@types/semver": "7.3.7",
|
"@types/semver": "7.3.7",
|
||||||
|
@ -76,13 +77,15 @@
|
||||||
"@types/uuid": "8.3.4",
|
"@types/uuid": "8.3.4",
|
||||||
"chance": "1.1.8",
|
"chance": "1.1.8",
|
||||||
"ioredis-mock": "5.8.0",
|
"ioredis-mock": "5.8.0",
|
||||||
"jest": "28.1.1",
|
"jest": "29.5.0",
|
||||||
|
"jest-environment-node": "29.5.0",
|
||||||
"jest-serial-runner": "^1.2.1",
|
"jest-serial-runner": "^1.2.1",
|
||||||
"koa": "2.13.4",
|
"koa": "2.13.4",
|
||||||
"nodemon": "2.0.16",
|
"nodemon": "2.0.16",
|
||||||
|
"pino-pretty": "10.0.0",
|
||||||
"pouchdb-adapter-memory": "7.2.2",
|
"pouchdb-adapter-memory": "7.2.2",
|
||||||
"timekeeper": "2.2.0",
|
"timekeeper": "2.2.0",
|
||||||
"ts-jest": "28.0.4",
|
"ts-jest": "29.0.5",
|
||||||
"ts-node": "10.8.1",
|
"ts-node": "10.8.1",
|
||||||
"tsconfig-paths": "4.0.0",
|
"tsconfig-paths": "4.0.0",
|
||||||
"typescript": "4.7.3"
|
"typescript": "4.7.3"
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { structures, testEnv } from "../../../tests"
|
import { structures } from "../../../tests"
|
||||||
|
import { testEnv } from "../../../tests/extra"
|
||||||
import * as auth from "../auth"
|
import * as auth from "../auth"
|
||||||
import * as events from "../../events"
|
import * as events from "../../events"
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
|
import { DBTestConfiguration } from "../../../tests/extra"
|
||||||
import {
|
import {
|
||||||
structures,
|
structures,
|
||||||
DBTestConfiguration,
|
|
||||||
expectFunctionWasCalledTimesWith,
|
expectFunctionWasCalledTimesWith,
|
||||||
|
mocks,
|
||||||
} from "../../../tests"
|
} from "../../../tests"
|
||||||
import { Writethrough } from "../writethrough"
|
import { Writethrough } from "../writethrough"
|
||||||
import { getDB } from "../../db"
|
import { getDB } from "../../db"
|
||||||
|
@ -77,9 +78,9 @@ describe("writethrough", () => {
|
||||||
expect.arrayContaining([current._rev, current._rev, newRev])
|
expect.arrayContaining([current._rev, current._rev, newRev])
|
||||||
)
|
)
|
||||||
expectFunctionWasCalledTimesWith(
|
expectFunctionWasCalledTimesWith(
|
||||||
console.warn,
|
mocks.alerts.logWarn,
|
||||||
2,
|
2,
|
||||||
"bb-warn: Ignoring redlock conflict in write-through cache"
|
"Ignoring redlock conflict in write-through cache"
|
||||||
)
|
)
|
||||||
|
|
||||||
const output = await db.get(current._id)
|
const output = await db.get(current._id)
|
||||||
|
|
|
@ -5,6 +5,8 @@ import {
|
||||||
GoogleInnerConfig,
|
GoogleInnerConfig,
|
||||||
OIDCConfig,
|
OIDCConfig,
|
||||||
OIDCInnerConfig,
|
OIDCInnerConfig,
|
||||||
|
SCIMConfig,
|
||||||
|
SCIMInnerConfig,
|
||||||
SettingsConfig,
|
SettingsConfig,
|
||||||
SettingsInnerConfig,
|
SettingsInnerConfig,
|
||||||
SMTPConfig,
|
SMTPConfig,
|
||||||
|
@ -241,3 +243,10 @@ export async function getSMTPConfig(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SCIM
|
||||||
|
|
||||||
|
export async function getSCIMConfig(): Promise<SCIMInnerConfig | undefined> {
|
||||||
|
const config = await getConfig<SCIMConfig>(ConfigType.SCIM)
|
||||||
|
return config?.config
|
||||||
|
}
|
||||||
|
|
|
@ -1,9 +1,5 @@
|
||||||
import {
|
import { generator, structures } from "../../../tests"
|
||||||
DBTestConfiguration,
|
import { DBTestConfiguration, testEnv } from "../../../tests/extra"
|
||||||
generator,
|
|
||||||
testEnv,
|
|
||||||
structures,
|
|
||||||
} from "../../../tests"
|
|
||||||
import { ConfigType } from "@budibase/types"
|
import { ConfigType } from "@budibase/types"
|
||||||
import env from "../../environment"
|
import env from "../../environment"
|
||||||
import * as configs from "../configs"
|
import * as configs from "../configs"
|
||||||
|
|
|
@ -14,6 +14,7 @@ export enum ViewName {
|
||||||
USER_BY_APP = "by_app",
|
USER_BY_APP = "by_app",
|
||||||
USER_BY_EMAIL = "by_email2",
|
USER_BY_EMAIL = "by_email2",
|
||||||
BY_API_KEY = "by_api_key",
|
BY_API_KEY = "by_api_key",
|
||||||
|
/** @deprecated - could be deleted */
|
||||||
USER_BY_BUILDERS = "by_builders",
|
USER_BY_BUILDERS = "by_builders",
|
||||||
LINK = "by_link",
|
LINK = "by_link",
|
||||||
ROUTING = "screen_routes",
|
ROUTING = "screen_routes",
|
||||||
|
|
|
@ -22,6 +22,7 @@ export enum Header {
|
||||||
TOKEN = "x-budibase-token",
|
TOKEN = "x-budibase-token",
|
||||||
CSRF_TOKEN = "x-csrf-token",
|
CSRF_TOKEN = "x-csrf-token",
|
||||||
CORRELATION_ID = "x-budibase-correlation-id",
|
CORRELATION_ID = "x-budibase-correlation-id",
|
||||||
|
AUTHORIZATION = "authorization",
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum GlobalRole {
|
export enum GlobalRole {
|
||||||
|
@ -38,6 +39,7 @@ export enum Config {
|
||||||
GOOGLE = "google",
|
GOOGLE = "google",
|
||||||
OIDC = "oidc",
|
OIDC = "oidc",
|
||||||
OIDC_LOGOS = "logos_oidc",
|
OIDC_LOGOS = "logos_oidc",
|
||||||
|
SCIM = "scim",
|
||||||
}
|
}
|
||||||
|
|
||||||
export const MIN_VALID_DATE = new Date(-2147483647000)
|
export const MIN_VALID_DATE = new Date(-2147483647000)
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
// store an app ID to pretend there is a context
|
// store an app ID to pretend there is a context
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import Context from "./Context"
|
import Context from "./Context"
|
||||||
import * as conversions from "../db/conversions"
|
import * as conversions from "../docIds/conversions"
|
||||||
import { getDB } from "../db/db"
|
import { getDB } from "../db/db"
|
||||||
import {
|
import {
|
||||||
DocumentType,
|
DocumentType,
|
||||||
|
@ -43,8 +43,12 @@ export function baseGlobalDBName(tenantId: string | undefined | null) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getPlatformURL() {
|
||||||
|
return env.PLATFORM_URL
|
||||||
|
}
|
||||||
|
|
||||||
export function isMultiTenant() {
|
export function isMultiTenant() {
|
||||||
return env.MULTI_TENANCY
|
return !!env.MULTI_TENANCY
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isTenantIdSet() {
|
export function isTenantIdSet() {
|
||||||
|
@ -111,10 +115,10 @@ export async function doInContext(appId: string, task: any): Promise<any> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function doInTenant(
|
export async function doInTenant<T>(
|
||||||
tenantId: string | null,
|
tenantId: string | null,
|
||||||
task: any
|
task: () => T
|
||||||
): Promise<any> {
|
): Promise<T> {
|
||||||
// make sure default always selected in single tenancy
|
// make sure default always selected in single tenancy
|
||||||
if (!env.MULTI_TENANCY) {
|
if (!env.MULTI_TENANCY) {
|
||||||
tenantId = tenantId || DEFAULT_TENANT_ID
|
tenantId = tenantId || DEFAULT_TENANT_ID
|
||||||
|
@ -214,6 +218,13 @@ export function doInEnvironmentContext(
|
||||||
return newContext(updates, task)
|
return newContext(updates, task)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function doInScimContext(task: any) {
|
||||||
|
const updates: ContextMap = {
|
||||||
|
isScim: true,
|
||||||
|
}
|
||||||
|
return newContext(updates, task)
|
||||||
|
}
|
||||||
|
|
||||||
export function getEnvironmentVariables() {
|
export function getEnvironmentVariables() {
|
||||||
const context = Context.get()
|
const context = Context.get()
|
||||||
if (!context.environmentVariables) {
|
if (!context.environmentVariables) {
|
||||||
|
@ -270,3 +281,9 @@ export function getDevAppDB(opts?: any): Database {
|
||||||
}
|
}
|
||||||
return getDB(conversions.getDevelopmentAppID(appId), opts)
|
return getDB(conversions.getDevelopmentAppID(appId), opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isScim(): boolean {
|
||||||
|
const context = Context.get()
|
||||||
|
const scimCall = context?.isScim
|
||||||
|
return !!scimCall
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { testEnv } from "../../../tests"
|
import { testEnv } from "../../../tests/extra"
|
||||||
const context = require("../")
|
import * as context from "../"
|
||||||
const { DEFAULT_TENANT_ID } = require("../../constants")
|
import { DEFAULT_TENANT_ID } from "../../constants"
|
||||||
|
|
||||||
describe("context", () => {
|
describe("context", () => {
|
||||||
describe("doInTenant", () => {
|
describe("doInTenant", () => {
|
||||||
|
@ -131,4 +131,17 @@ describe("context", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("doInScimContext", () => {
|
||||||
|
it("returns true when set", () => {
|
||||||
|
context.doInScimContext(() => {
|
||||||
|
const isScim = context.isScim()
|
||||||
|
expect(isScim).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
it("returns false when not set", () => {
|
||||||
|
const isScim = context.isScim()
|
||||||
|
expect(isScim).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -6,4 +6,5 @@ export type ContextMap = {
|
||||||
appId?: string
|
appId?: string
|
||||||
identity?: IdentityContext
|
identity?: IdentityContext
|
||||||
environmentVariables?: Record<string, string>
|
environmentVariables?: Record<string, string>
|
||||||
|
isScim?: boolean
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,7 @@ import { getCouchInfo } from "./connections"
|
||||||
import { directCouchCall } from "./utils"
|
import { directCouchCall } from "./utils"
|
||||||
import { getPouchDB } from "./pouchDB"
|
import { getPouchDB } from "./pouchDB"
|
||||||
import { WriteStream, ReadStream } from "fs"
|
import { WriteStream, ReadStream } from "fs"
|
||||||
import { newid } from "../../newid"
|
import { newid } from "../../docIds/newid"
|
||||||
|
|
||||||
function buildNano(couchInfo: { url: string; cookie: string }) {
|
function buildNano(couchInfo: { url: string; cookie: string }) {
|
||||||
return Nano({
|
return Nano({
|
||||||
|
|
|
@ -2,9 +2,10 @@ export * from "./couch"
|
||||||
export * from "./db"
|
export * from "./db"
|
||||||
export * from "./utils"
|
export * from "./utils"
|
||||||
export * from "./views"
|
export * from "./views"
|
||||||
export * from "./conversions"
|
export * from "../docIds/conversions"
|
||||||
export { default as Replication } from "./Replication"
|
export { default as Replication } from "./Replication"
|
||||||
// exports to support old export structure
|
// exports to support old export structure
|
||||||
export * from "../constants/db"
|
export * from "../constants/db"
|
||||||
export { getGlobalDBName, baseGlobalDBName } from "../context"
|
export { getGlobalDBName, baseGlobalDBName } from "../context"
|
||||||
export * from "./lucene"
|
export * from "./lucene"
|
||||||
|
export * as searchIndexes from "./searchIndexes"
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
import fetch from "node-fetch"
|
import fetch from "node-fetch"
|
||||||
import { getCouchInfo } from "./couch"
|
import { getCouchInfo } from "./couch"
|
||||||
import { SearchFilters, Row } from "@budibase/types"
|
import { SearchFilters, Row } from "@budibase/types"
|
||||||
|
import { createUserIndex } from "./searchIndexes/searchIndexes"
|
||||||
|
|
||||||
const QUERY_START_REGEX = /\d[0-9]*:/g
|
const QUERY_START_REGEX = /\d[0-9]*:/g
|
||||||
|
|
||||||
interface SearchResponse<T> {
|
interface SearchResponse<T> {
|
||||||
rows: T[] | any[]
|
rows: T[] | any[]
|
||||||
bookmark: string
|
bookmark?: string
|
||||||
|
totalRows: number
|
||||||
}
|
}
|
||||||
|
|
||||||
interface PaginatedSearchResponse<T> extends SearchResponse<T> {
|
interface PaginatedSearchResponse<T> extends SearchResponse<T> {
|
||||||
|
@ -42,23 +44,26 @@ export function removeKeyNumbering(key: any): string {
|
||||||
* Optionally takes a base lucene query object.
|
* Optionally takes a base lucene query object.
|
||||||
*/
|
*/
|
||||||
export class QueryBuilder<T> {
|
export class QueryBuilder<T> {
|
||||||
dbName: string
|
#dbName: string
|
||||||
index: string
|
#index: string
|
||||||
query: SearchFilters
|
#query: SearchFilters
|
||||||
limit: number
|
#limit: number
|
||||||
sort?: string
|
#sort?: string
|
||||||
bookmark?: string
|
#bookmark?: string
|
||||||
sortOrder: string
|
#sortOrder: string
|
||||||
sortType: string
|
#sortType: string
|
||||||
includeDocs: boolean
|
#includeDocs: boolean
|
||||||
version?: string
|
#version?: string
|
||||||
indexBuilder?: () => Promise<any>
|
#indexBuilder?: () => Promise<any>
|
||||||
noEscaping = false
|
#noEscaping = false
|
||||||
|
#skip?: number
|
||||||
|
|
||||||
|
static readonly maxLimit = 200
|
||||||
|
|
||||||
constructor(dbName: string, index: string, base?: SearchFilters) {
|
constructor(dbName: string, index: string, base?: SearchFilters) {
|
||||||
this.dbName = dbName
|
this.#dbName = dbName
|
||||||
this.index = index
|
this.#index = index
|
||||||
this.query = {
|
this.#query = {
|
||||||
allOr: false,
|
allOr: false,
|
||||||
string: {},
|
string: {},
|
||||||
fuzzy: {},
|
fuzzy: {},
|
||||||
|
@ -73,86 +78,96 @@ export class QueryBuilder<T> {
|
||||||
containsAny: {},
|
containsAny: {},
|
||||||
...base,
|
...base,
|
||||||
}
|
}
|
||||||
this.limit = 50
|
this.#limit = 50
|
||||||
this.sortOrder = "ascending"
|
this.#sortOrder = "ascending"
|
||||||
this.sortType = "string"
|
this.#sortType = "string"
|
||||||
this.includeDocs = true
|
this.#includeDocs = true
|
||||||
}
|
}
|
||||||
|
|
||||||
disableEscaping() {
|
disableEscaping() {
|
||||||
this.noEscaping = true
|
this.#noEscaping = true
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setIndexBuilder(builderFn: () => Promise<any>) {
|
setIndexBuilder(builderFn: () => Promise<any>) {
|
||||||
this.indexBuilder = builderFn
|
this.#indexBuilder = builderFn
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setVersion(version?: string) {
|
setVersion(version?: string) {
|
||||||
if (version != null) {
|
if (version != null) {
|
||||||
this.version = version
|
this.#version = version
|
||||||
}
|
}
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setTable(tableId: string) {
|
setTable(tableId: string) {
|
||||||
this.query.equal!.tableId = tableId
|
this.#query.equal!.tableId = tableId
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setLimit(limit?: number) {
|
setLimit(limit?: number) {
|
||||||
if (limit != null) {
|
if (limit != null) {
|
||||||
this.limit = limit
|
this.#limit = limit
|
||||||
}
|
}
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setSort(sort?: string) {
|
setSort(sort?: string) {
|
||||||
if (sort != null) {
|
if (sort != null) {
|
||||||
this.sort = sort
|
this.#sort = sort
|
||||||
}
|
}
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setSortOrder(sortOrder?: string) {
|
setSortOrder(sortOrder?: string) {
|
||||||
if (sortOrder != null) {
|
if (sortOrder != null) {
|
||||||
this.sortOrder = sortOrder
|
this.#sortOrder = sortOrder
|
||||||
}
|
}
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setSortType(sortType?: string) {
|
setSortType(sortType?: string) {
|
||||||
if (sortType != null) {
|
if (sortType != null) {
|
||||||
this.sortType = sortType
|
this.#sortType = sortType
|
||||||
}
|
}
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setBookmark(bookmark?: string) {
|
setBookmark(bookmark?: string) {
|
||||||
if (bookmark != null) {
|
if (bookmark != null) {
|
||||||
this.bookmark = bookmark
|
this.#bookmark = bookmark
|
||||||
}
|
}
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setSkip(skip: number | undefined) {
|
||||||
|
this.#skip = skip
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
excludeDocs() {
|
excludeDocs() {
|
||||||
this.includeDocs = false
|
this.#includeDocs = false
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
includeDocs() {
|
||||||
|
this.#includeDocs = true
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addString(key: string, partial: string) {
|
addString(key: string, partial: string) {
|
||||||
this.query.string![key] = partial
|
this.#query.string![key] = partial
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addFuzzy(key: string, fuzzy: string) {
|
addFuzzy(key: string, fuzzy: string) {
|
||||||
this.query.fuzzy![key] = fuzzy
|
this.#query.fuzzy![key] = fuzzy
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addRange(key: string, low: string | number, high: string | number) {
|
addRange(key: string, low: string | number, high: string | number) {
|
||||||
this.query.range![key] = {
|
this.#query.range![key] = {
|
||||||
low,
|
low,
|
||||||
high,
|
high,
|
||||||
}
|
}
|
||||||
|
@ -160,51 +175,51 @@ export class QueryBuilder<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
addEqual(key: string, value: any) {
|
addEqual(key: string, value: any) {
|
||||||
this.query.equal![key] = value
|
this.#query.equal![key] = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addNotEqual(key: string, value: any) {
|
addNotEqual(key: string, value: any) {
|
||||||
this.query.notEqual![key] = value
|
this.#query.notEqual![key] = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addEmpty(key: string, value: any) {
|
addEmpty(key: string, value: any) {
|
||||||
this.query.empty![key] = value
|
this.#query.empty![key] = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addNotEmpty(key: string, value: any) {
|
addNotEmpty(key: string, value: any) {
|
||||||
this.query.notEmpty![key] = value
|
this.#query.notEmpty![key] = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addOneOf(key: string, value: any) {
|
addOneOf(key: string, value: any) {
|
||||||
this.query.oneOf![key] = value
|
this.#query.oneOf![key] = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addContains(key: string, value: any) {
|
addContains(key: string, value: any) {
|
||||||
this.query.contains![key] = value
|
this.#query.contains![key] = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addNotContains(key: string, value: any) {
|
addNotContains(key: string, value: any) {
|
||||||
this.query.notContains![key] = value
|
this.#query.notContains![key] = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
addContainsAny(key: string, value: any) {
|
addContainsAny(key: string, value: any) {
|
||||||
this.query.containsAny![key] = value
|
this.#query.containsAny![key] = value
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
setAllOr() {
|
setAllOr() {
|
||||||
this.query.allOr = true
|
this.#query.allOr = true
|
||||||
}
|
}
|
||||||
|
|
||||||
handleSpaces(input: string) {
|
handleSpaces(input: string) {
|
||||||
if (this.noEscaping) {
|
if (this.#noEscaping) {
|
||||||
return input
|
return input
|
||||||
} else {
|
} else {
|
||||||
return input.replace(/ /g, "_")
|
return input.replace(/ /g, "_")
|
||||||
|
@ -219,7 +234,7 @@ export class QueryBuilder<T> {
|
||||||
* @returns {string|*}
|
* @returns {string|*}
|
||||||
*/
|
*/
|
||||||
preprocess(value: any, { escape, lowercase, wrap, type }: any = {}) {
|
preprocess(value: any, { escape, lowercase, wrap, type }: any = {}) {
|
||||||
const hasVersion = !!this.version
|
const hasVersion = !!this.#version
|
||||||
// Determine if type needs wrapped
|
// Determine if type needs wrapped
|
||||||
const originalType = typeof value
|
const originalType = typeof value
|
||||||
// Convert to lowercase
|
// Convert to lowercase
|
||||||
|
@ -227,8 +242,8 @@ export class QueryBuilder<T> {
|
||||||
value = value.toLowerCase ? value.toLowerCase() : value
|
value = value.toLowerCase ? value.toLowerCase() : value
|
||||||
}
|
}
|
||||||
// Escape characters
|
// Escape characters
|
||||||
if (!this.noEscaping && escape && originalType === "string") {
|
if (!this.#noEscaping && escape && originalType === "string") {
|
||||||
value = `${value}`.replace(/[ #+\-&|!(){}\]^"~*?:\\]/g, "\\$&")
|
value = `${value}`.replace(/[ \/#+\-&|!(){}\]^"~*?:\\]/g, "\\$&")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wrap in quotes
|
// Wrap in quotes
|
||||||
|
@ -242,7 +257,7 @@ export class QueryBuilder<T> {
|
||||||
|
|
||||||
isMultiCondition() {
|
isMultiCondition() {
|
||||||
let count = 0
|
let count = 0
|
||||||
for (let filters of Object.values(this.query)) {
|
for (let filters of Object.values(this.#query)) {
|
||||||
// not contains is one massive filter in allOr mode
|
// not contains is one massive filter in allOr mode
|
||||||
if (typeof filters === "object") {
|
if (typeof filters === "object") {
|
||||||
count += Object.keys(filters).length
|
count += Object.keys(filters).length
|
||||||
|
@ -272,13 +287,13 @@ export class QueryBuilder<T> {
|
||||||
|
|
||||||
buildSearchQuery() {
|
buildSearchQuery() {
|
||||||
const builder = this
|
const builder = this
|
||||||
let allOr = this.query && this.query.allOr
|
let allOr = this.#query && this.#query.allOr
|
||||||
let query = allOr ? "" : "*:*"
|
let query = allOr ? "" : "*:*"
|
||||||
const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true }
|
const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true }
|
||||||
let tableId
|
let tableId
|
||||||
if (this.query.equal!.tableId) {
|
if (this.#query.equal!.tableId) {
|
||||||
tableId = this.query.equal!.tableId
|
tableId = this.#query.equal!.tableId
|
||||||
delete this.query.equal!.tableId
|
delete this.#query.equal!.tableId
|
||||||
}
|
}
|
||||||
|
|
||||||
const equal = (key: string, value: any) => {
|
const equal = (key: string, value: any) => {
|
||||||
|
@ -305,6 +320,18 @@ export class QueryBuilder<T> {
|
||||||
return `${key}:(${statement})`
|
return `${key}:(${statement})`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const fuzzy = (key: string, value: any) => {
|
||||||
|
if (!value) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
value = builder.preprocess(value, {
|
||||||
|
escape: true,
|
||||||
|
lowercase: true,
|
||||||
|
type: "fuzzy",
|
||||||
|
})
|
||||||
|
return `${key}:/.*${value}.*/`
|
||||||
|
}
|
||||||
|
|
||||||
const notContains = (key: string, value: any) => {
|
const notContains = (key: string, value: any) => {
|
||||||
const allPrefix = allOr ? "*:* AND " : ""
|
const allPrefix = allOr ? "*:* AND " : ""
|
||||||
const mode = allOr ? "AND" : undefined
|
const mode = allOr ? "AND" : undefined
|
||||||
|
@ -363,8 +390,8 @@ export class QueryBuilder<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Construct the actual lucene search query string from JSON structure
|
// Construct the actual lucene search query string from JSON structure
|
||||||
if (this.query.string) {
|
if (this.#query.string) {
|
||||||
build(this.query.string, (key: string, value: any) => {
|
build(this.#query.string, (key: string, value: any) => {
|
||||||
if (!value) {
|
if (!value) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
@ -376,8 +403,8 @@ export class QueryBuilder<T> {
|
||||||
return `${key}:${value}*`
|
return `${key}:${value}*`
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (this.query.range) {
|
if (this.#query.range) {
|
||||||
build(this.query.range, (key: string, value: any) => {
|
build(this.#query.range, (key: string, value: any) => {
|
||||||
if (!value) {
|
if (!value) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
@ -392,47 +419,37 @@ export class QueryBuilder<T> {
|
||||||
return `${key}:[${low} TO ${high}]`
|
return `${key}:[${low} TO ${high}]`
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (this.query.fuzzy) {
|
if (this.#query.fuzzy) {
|
||||||
build(this.query.fuzzy, (key: string, value: any) => {
|
build(this.#query.fuzzy, fuzzy)
|
||||||
if (!value) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
value = builder.preprocess(value, {
|
|
||||||
escape: true,
|
|
||||||
lowercase: true,
|
|
||||||
type: "fuzzy",
|
|
||||||
})
|
|
||||||
return `${key}:${value}~`
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
if (this.query.equal) {
|
if (this.#query.equal) {
|
||||||
build(this.query.equal, equal)
|
build(this.#query.equal, equal)
|
||||||
}
|
}
|
||||||
if (this.query.notEqual) {
|
if (this.#query.notEqual) {
|
||||||
build(this.query.notEqual, (key: string, value: any) => {
|
build(this.#query.notEqual, (key: string, value: any) => {
|
||||||
if (!value) {
|
if (!value) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
return `!${key}:${builder.preprocess(value, allPreProcessingOpts)}`
|
return `!${key}:${builder.preprocess(value, allPreProcessingOpts)}`
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (this.query.empty) {
|
if (this.#query.empty) {
|
||||||
build(this.query.empty, (key: string) => `(*:* -${key}:["" TO *])`)
|
build(this.#query.empty, (key: string) => `(*:* -${key}:["" TO *])`)
|
||||||
}
|
}
|
||||||
if (this.query.notEmpty) {
|
if (this.#query.notEmpty) {
|
||||||
build(this.query.notEmpty, (key: string) => `${key}:["" TO *]`)
|
build(this.#query.notEmpty, (key: string) => `${key}:["" TO *]`)
|
||||||
}
|
}
|
||||||
if (this.query.oneOf) {
|
if (this.#query.oneOf) {
|
||||||
build(this.query.oneOf, oneOf)
|
build(this.#query.oneOf, oneOf)
|
||||||
}
|
}
|
||||||
if (this.query.contains) {
|
if (this.#query.contains) {
|
||||||
build(this.query.contains, contains)
|
build(this.#query.contains, contains)
|
||||||
}
|
}
|
||||||
if (this.query.notContains) {
|
if (this.#query.notContains) {
|
||||||
build(this.compressFilters(this.query.notContains), notContains)
|
build(this.compressFilters(this.#query.notContains), notContains)
|
||||||
}
|
}
|
||||||
if (this.query.containsAny) {
|
if (this.#query.containsAny) {
|
||||||
build(this.query.containsAny, containsAny)
|
build(this.#query.containsAny, containsAny)
|
||||||
}
|
}
|
||||||
// make sure table ID is always added as an AND
|
// make sure table ID is always added as an AND
|
||||||
if (tableId) {
|
if (tableId) {
|
||||||
|
@ -446,29 +463,65 @@ export class QueryBuilder<T> {
|
||||||
buildSearchBody() {
|
buildSearchBody() {
|
||||||
let body: any = {
|
let body: any = {
|
||||||
q: this.buildSearchQuery(),
|
q: this.buildSearchQuery(),
|
||||||
limit: Math.min(this.limit, 200),
|
limit: Math.min(this.#limit, QueryBuilder.maxLimit),
|
||||||
include_docs: this.includeDocs,
|
include_docs: this.#includeDocs,
|
||||||
}
|
}
|
||||||
if (this.bookmark) {
|
if (this.#bookmark) {
|
||||||
body.bookmark = this.bookmark
|
body.bookmark = this.#bookmark
|
||||||
}
|
}
|
||||||
if (this.sort) {
|
if (this.#sort) {
|
||||||
const order = this.sortOrder === "descending" ? "-" : ""
|
const order = this.#sortOrder === "descending" ? "-" : ""
|
||||||
const type = `<${this.sortType}>`
|
const type = `<${this.#sortType}>`
|
||||||
body.sort = `${order}${this.handleSpaces(this.sort)}${type}`
|
body.sort = `${order}${this.handleSpaces(this.#sort)}${type}`
|
||||||
}
|
}
|
||||||
return body
|
return body
|
||||||
}
|
}
|
||||||
|
|
||||||
async run() {
|
async run() {
|
||||||
|
if (this.#skip) {
|
||||||
|
await this.#skipItems(this.#skip)
|
||||||
|
}
|
||||||
|
return await this.#execute()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lucene queries do not support pagination and use bookmarks instead.
|
||||||
|
* For the given builder, walk through pages using bookmarks until the desired
|
||||||
|
* page has been met.
|
||||||
|
*/
|
||||||
|
async #skipItems(skip: number) {
|
||||||
|
// Lucene does not support pagination.
|
||||||
|
// Handle pagination by finding the right bookmark
|
||||||
|
const prevIncludeDocs = this.#includeDocs
|
||||||
|
const prevLimit = this.#limit
|
||||||
|
|
||||||
|
this.excludeDocs()
|
||||||
|
let skipRemaining = skip
|
||||||
|
let iterationFetched = 0
|
||||||
|
do {
|
||||||
|
const toSkip = Math.min(QueryBuilder.maxLimit, skipRemaining)
|
||||||
|
this.setLimit(toSkip)
|
||||||
|
const { bookmark, rows } = await this.#execute()
|
||||||
|
this.setBookmark(bookmark)
|
||||||
|
iterationFetched = rows.length
|
||||||
|
skipRemaining -= rows.length
|
||||||
|
} while (skipRemaining > 0 && iterationFetched > 0)
|
||||||
|
|
||||||
|
this.#includeDocs = prevIncludeDocs
|
||||||
|
this.#limit = prevLimit
|
||||||
|
}
|
||||||
|
|
||||||
|
async #execute() {
|
||||||
const { url, cookie } = getCouchInfo()
|
const { url, cookie } = getCouchInfo()
|
||||||
const fullPath = `${url}/${this.dbName}/_design/database/_search/${this.index}`
|
const fullPath = `${url}/${this.#dbName}/_design/database/_search/${
|
||||||
|
this.#index
|
||||||
|
}`
|
||||||
const body = this.buildSearchBody()
|
const body = this.buildSearchBody()
|
||||||
try {
|
try {
|
||||||
return await runQuery<T>(fullPath, body, cookie)
|
return await runQuery<T>(fullPath, body, cookie)
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
if (err.status === 404 && this.indexBuilder) {
|
if (err.status === 404 && this.#indexBuilder) {
|
||||||
await this.indexBuilder()
|
await this.#indexBuilder()
|
||||||
return await runQuery<T>(fullPath, body, cookie)
|
return await runQuery<T>(fullPath, body, cookie)
|
||||||
} else {
|
} else {
|
||||||
throw err
|
throw err
|
||||||
|
@ -502,8 +555,9 @@ async function runQuery<T>(
|
||||||
}
|
}
|
||||||
const json = await response.json()
|
const json = await response.json()
|
||||||
|
|
||||||
let output: any = {
|
let output: SearchResponse<T> = {
|
||||||
rows: [],
|
rows: [],
|
||||||
|
totalRows: 0,
|
||||||
}
|
}
|
||||||
if (json.rows != null && json.rows.length > 0) {
|
if (json.rows != null && json.rows.length > 0) {
|
||||||
output.rows = json.rows.map((row: any) => row.doc)
|
output.rows = json.rows.map((row: any) => row.doc)
|
||||||
|
@ -511,6 +565,9 @@ async function runQuery<T>(
|
||||||
if (json.bookmark) {
|
if (json.bookmark) {
|
||||||
output.bookmark = json.bookmark
|
output.bookmark = json.bookmark
|
||||||
}
|
}
|
||||||
|
if (json.total_rows) {
|
||||||
|
output.totalRows = json.total_rows
|
||||||
|
}
|
||||||
return output
|
return output
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -543,8 +600,8 @@ async function recursiveSearch<T>(
|
||||||
if (rows.length >= params.limit) {
|
if (rows.length >= params.limit) {
|
||||||
return rows
|
return rows
|
||||||
}
|
}
|
||||||
let pageSize = 200
|
let pageSize = QueryBuilder.maxLimit
|
||||||
if (rows.length > params.limit - 200) {
|
if (rows.length > params.limit - QueryBuilder.maxLimit) {
|
||||||
pageSize = params.limit - rows.length
|
pageSize = params.limit - rows.length
|
||||||
}
|
}
|
||||||
const page = await new QueryBuilder<T>(dbName, index, query)
|
const page = await new QueryBuilder<T>(dbName, index, query)
|
||||||
|
@ -559,7 +616,7 @@ async function recursiveSearch<T>(
|
||||||
if (!page.rows.length) {
|
if (!page.rows.length) {
|
||||||
return rows
|
return rows
|
||||||
}
|
}
|
||||||
if (page.rows.length < 200) {
|
if (page.rows.length < QueryBuilder.maxLimit) {
|
||||||
return [...rows, ...page.rows]
|
return [...rows, ...page.rows]
|
||||||
}
|
}
|
||||||
const newParams = {
|
const newParams = {
|
||||||
|
@ -597,7 +654,7 @@ export async function paginatedSearch<T>(
|
||||||
if (limit == null || isNaN(limit) || limit < 0) {
|
if (limit == null || isNaN(limit) || limit < 0) {
|
||||||
limit = 50
|
limit = 50
|
||||||
}
|
}
|
||||||
limit = Math.min(limit, 200)
|
limit = Math.min(limit, QueryBuilder.maxLimit)
|
||||||
const search = new QueryBuilder<T>(dbName, index, query)
|
const search = new QueryBuilder<T>(dbName, index, query)
|
||||||
if (params.version) {
|
if (params.version) {
|
||||||
search.setVersion(params.version)
|
search.setVersion(params.version)
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
export * from "./searchIndexes"
|
|
@ -0,0 +1,62 @@
|
||||||
|
import { User, SearchIndex } from "@budibase/types"
|
||||||
|
import { getGlobalDB } from "../../context"
|
||||||
|
|
||||||
|
export async function createUserIndex() {
|
||||||
|
const db = getGlobalDB()
|
||||||
|
let designDoc
|
||||||
|
try {
|
||||||
|
designDoc = await db.get("_design/database")
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err.status === 404) {
|
||||||
|
designDoc = { _id: "_design/database" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn = function (user: User) {
|
||||||
|
if (user._id && !user._id.startsWith("us_")) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const ignoredFields = [
|
||||||
|
"_id",
|
||||||
|
"_rev",
|
||||||
|
"password",
|
||||||
|
"account",
|
||||||
|
"license",
|
||||||
|
"budibaseAccess",
|
||||||
|
"accountPortalAccess",
|
||||||
|
"csrfToken",
|
||||||
|
]
|
||||||
|
|
||||||
|
function idx(input: Record<string, any>, prev?: string) {
|
||||||
|
for (let key of Object.keys(input)) {
|
||||||
|
if (ignoredFields.includes(key)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
let idxKey = prev != null ? `${prev}.${key}` : key
|
||||||
|
if (typeof input[key] === "string") {
|
||||||
|
// eslint-disable-next-line no-undef
|
||||||
|
// @ts-ignore
|
||||||
|
index(idxKey, input[key].toLowerCase(), { facet: true })
|
||||||
|
} else if (typeof input[key] !== "object") {
|
||||||
|
// eslint-disable-next-line no-undef
|
||||||
|
// @ts-ignore
|
||||||
|
index(idxKey, input[key], { facet: true })
|
||||||
|
} else {
|
||||||
|
idx(input[key], idxKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
idx(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
designDoc.indexes = {
|
||||||
|
[SearchIndex.USER]: {
|
||||||
|
index: fn.toString(),
|
||||||
|
analyzer: {
|
||||||
|
default: "keyword",
|
||||||
|
name: "perfield",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
await db.put(designDoc)
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
import { newid } from "../../newid"
|
import { newid } from "../../docIds/newid"
|
||||||
import { getDB } from "../db"
|
import { getDB } from "../db"
|
||||||
import { Database } from "@budibase/types"
|
import { Database } from "@budibase/types"
|
||||||
import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene"
|
import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene"
|
||||||
|
@ -136,6 +136,106 @@ describe("lucene", () => {
|
||||||
const resp = await builder.run()
|
const resp = await builder.run()
|
||||||
expect(resp.rows.length).toBe(2)
|
expect(resp.rows.length).toBe(2)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("skip", () => {
|
||||||
|
const skipDbName = `db-${newid()}`
|
||||||
|
let docs: {
|
||||||
|
_id: string
|
||||||
|
property: string
|
||||||
|
array: string[]
|
||||||
|
}[]
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const db = getDB(skipDbName)
|
||||||
|
|
||||||
|
docs = Array(QueryBuilder.maxLimit * 2.5)
|
||||||
|
.fill(0)
|
||||||
|
.map((_, i) => ({
|
||||||
|
_id: i.toString().padStart(3, "0"),
|
||||||
|
property: `value_${i.toString().padStart(3, "0")}`,
|
||||||
|
array: [],
|
||||||
|
}))
|
||||||
|
await db.bulkDocs(docs)
|
||||||
|
|
||||||
|
await db.put({
|
||||||
|
_id: "_design/database",
|
||||||
|
indexes: {
|
||||||
|
[INDEX_NAME]: {
|
||||||
|
index: index,
|
||||||
|
analyzer: "standard",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to apply skip", async () => {
|
||||||
|
const builder = new QueryBuilder(skipDbName, INDEX_NAME)
|
||||||
|
const firstResponse = await builder.run()
|
||||||
|
builder.setSkip(40)
|
||||||
|
const secondResponse = await builder.run()
|
||||||
|
|
||||||
|
// Return the default limit
|
||||||
|
expect(firstResponse.rows.length).toBe(50)
|
||||||
|
expect(secondResponse.rows.length).toBe(50)
|
||||||
|
|
||||||
|
// Should have the expected overlap
|
||||||
|
expect(firstResponse.rows.slice(40)).toEqual(
|
||||||
|
secondResponse.rows.slice(0, 10)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle limits", async () => {
|
||||||
|
const builder = new QueryBuilder(skipDbName, INDEX_NAME)
|
||||||
|
builder.setLimit(10)
|
||||||
|
builder.setSkip(50)
|
||||||
|
builder.setSort("_id")
|
||||||
|
|
||||||
|
const resp = await builder.run()
|
||||||
|
expect(resp.rows.length).toBe(10)
|
||||||
|
expect(resp.rows).toEqual(
|
||||||
|
docs.slice(50, 60).map(expect.objectContaining)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to skip searching through multiple responses", async () => {
|
||||||
|
const builder = new QueryBuilder(skipDbName, INDEX_NAME)
|
||||||
|
// Skipping 2 max limits plus a little bit more
|
||||||
|
const skip = QueryBuilder.maxLimit * 2 + 37
|
||||||
|
builder.setSkip(skip)
|
||||||
|
builder.setSort("_id")
|
||||||
|
const resp = await builder.run()
|
||||||
|
|
||||||
|
expect(resp.rows.length).toBe(50)
|
||||||
|
expect(resp.rows).toEqual(
|
||||||
|
docs.slice(skip, skip + resp.rows.length).map(expect.objectContaining)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not return results if skipping all docs", async () => {
|
||||||
|
const builder = new QueryBuilder(skipDbName, INDEX_NAME)
|
||||||
|
// Skipping 2 max limits plus a little bit more
|
||||||
|
const skip = docs.length + 1
|
||||||
|
builder.setSkip(skip)
|
||||||
|
|
||||||
|
const resp = await builder.run()
|
||||||
|
|
||||||
|
expect(resp.rows.length).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("skip should respect with filters", async () => {
|
||||||
|
const builder = new QueryBuilder(skipDbName, INDEX_NAME)
|
||||||
|
builder.setLimit(10)
|
||||||
|
builder.setSkip(50)
|
||||||
|
builder.addString("property", "value_1")
|
||||||
|
builder.setSort("property")
|
||||||
|
|
||||||
|
const resp = await builder.run()
|
||||||
|
expect(resp.rows.length).toBe(10)
|
||||||
|
expect(resp.rows).toEqual(
|
||||||
|
docs.slice(150, 160).map(expect.objectContaining)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("paginated search", () => {
|
describe("paginated search", () => {
|
||||||
|
|
|
@ -3,7 +3,7 @@ import {
|
||||||
getProdAppID,
|
getProdAppID,
|
||||||
isDevAppID,
|
isDevAppID,
|
||||||
isProdAppID,
|
isProdAppID,
|
||||||
} from "../conversions"
|
} from "../../docIds/conversions"
|
||||||
import { generateAppID } from "../utils"
|
import { generateAppID } from "../utils"
|
||||||
|
|
||||||
describe("utils", () => {
|
describe("utils", () => {
|
||||||
|
|
|
@ -1,257 +1,12 @@
|
||||||
import { newid } from "../newid"
|
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import {
|
import { DEFAULT_TENANT_ID, SEPARATOR, DocumentType } from "../constants"
|
||||||
DEFAULT_TENANT_ID,
|
|
||||||
SEPARATOR,
|
|
||||||
DocumentType,
|
|
||||||
UNICODE_MAX,
|
|
||||||
ViewName,
|
|
||||||
InternalTable,
|
|
||||||
APP_PREFIX,
|
|
||||||
} from "../constants"
|
|
||||||
import { getTenantId, getGlobalDBName } from "../context"
|
import { getTenantId, getGlobalDBName } from "../context"
|
||||||
import { doWithDB, directCouchAllDbs } from "./db"
|
import { doWithDB, directCouchAllDbs } from "./db"
|
||||||
import { getAppMetadata } from "../cache/appMetadata"
|
import { getAppMetadata } from "../cache/appMetadata"
|
||||||
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
|
import { isDevApp, isDevAppID, getProdAppID } from "../docIds/conversions"
|
||||||
import { App, Database } from "@budibase/types"
|
import { App, Database } from "@budibase/types"
|
||||||
|
import { getStartEndKeyURL } from "../docIds"
|
||||||
/**
|
export * from "../docIds"
|
||||||
* Generates a new app ID.
|
|
||||||
* @returns {string} The new app ID which the app doc can be stored under.
|
|
||||||
*/
|
|
||||||
export const generateAppID = (tenantId?: string | null) => {
|
|
||||||
let id = APP_PREFIX
|
|
||||||
if (tenantId) {
|
|
||||||
id += `${tenantId}${SEPARATOR}`
|
|
||||||
}
|
|
||||||
return `${id}${newid()}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
|
||||||
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
|
|
||||||
* More complex cases such as link docs and rows which have multiple levels of IDs that their
|
|
||||||
* ID consists of need their own functions to build the allDocs parameters.
|
|
||||||
* @param {string} docType The type of document which input params are being built for, e.g. user,
|
|
||||||
* link, app, table and so on.
|
|
||||||
* @param {string|null} docId The ID of the document minus its type - this is only needed if looking
|
|
||||||
* for a singular document.
|
|
||||||
* @param {object} otherProps Add any other properties onto the request, e.g. include_docs.
|
|
||||||
* @returns {object} Parameters which can then be used with an allDocs request.
|
|
||||||
*/
|
|
||||||
export function getDocParams(
|
|
||||||
docType: string,
|
|
||||||
docId?: string | null,
|
|
||||||
otherProps: any = {}
|
|
||||||
) {
|
|
||||||
if (docId == null) {
|
|
||||||
docId = ""
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...otherProps,
|
|
||||||
startkey: `${docType}${SEPARATOR}${docId}`,
|
|
||||||
endkey: `${docType}${SEPARATOR}${docId}${UNICODE_MAX}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the DB allDocs/query params for retrieving a row.
|
|
||||||
* @param {string|null} tableId The table in which the rows have been stored.
|
|
||||||
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
|
|
||||||
* left null to get all the rows in the table.
|
|
||||||
* @param {object} otherProps Any other properties to add to the request.
|
|
||||||
* @returns {object} Parameters which can then be used with an allDocs request.
|
|
||||||
*/
|
|
||||||
export function getRowParams(
|
|
||||||
tableId?: string | null,
|
|
||||||
rowId?: string | null,
|
|
||||||
otherProps = {}
|
|
||||||
) {
|
|
||||||
if (tableId == null) {
|
|
||||||
return getDocParams(DocumentType.ROW, null, otherProps)
|
|
||||||
}
|
|
||||||
|
|
||||||
const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId
|
|
||||||
|
|
||||||
return getDocParams(DocumentType.ROW, endOfKey, otherProps)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieve the correct index for a view based on default design DB.
|
|
||||||
*/
|
|
||||||
export function getQueryIndex(viewName: ViewName) {
|
|
||||||
return `database/${viewName}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets a new row ID for the specified table.
|
|
||||||
* @param {string} tableId The table which the row is being created for.
|
|
||||||
* @param {string|null} id If an ID is to be used then the UUID can be substituted for this.
|
|
||||||
* @returns {string} The new ID which a row doc can be stored under.
|
|
||||||
*/
|
|
||||||
export function generateRowID(tableId: string, id?: string) {
|
|
||||||
id = id || newid()
|
|
||||||
return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a given ID is that of a table.
|
|
||||||
* @returns {boolean}
|
|
||||||
*/
|
|
||||||
export const isTableId = (id: string) => {
|
|
||||||
// this includes datasource plus tables
|
|
||||||
return (
|
|
||||||
id &&
|
|
||||||
(id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||
|
|
||||||
id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a given ID is that of a datasource or datasource plus.
|
|
||||||
* @returns {boolean}
|
|
||||||
*/
|
|
||||||
export const isDatasourceId = (id: string) => {
|
|
||||||
// this covers both datasources and datasource plus
|
|
||||||
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a new workspace ID.
|
|
||||||
* @returns {string} The new workspace ID which the workspace doc can be stored under.
|
|
||||||
*/
|
|
||||||
export function generateWorkspaceID() {
|
|
||||||
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets parameters for retrieving workspaces.
|
|
||||||
*/
|
|
||||||
export function getWorkspaceParams(id = "", otherProps = {}) {
|
|
||||||
return {
|
|
||||||
...otherProps,
|
|
||||||
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
|
|
||||||
endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a new global user ID.
|
|
||||||
* @returns {string} The new user ID which the user doc can be stored under.
|
|
||||||
*/
|
|
||||||
export function generateGlobalUserID(id?: any) {
|
|
||||||
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets parameters for retrieving users.
|
|
||||||
*/
|
|
||||||
export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
|
|
||||||
if (!globalId) {
|
|
||||||
globalId = ""
|
|
||||||
}
|
|
||||||
const startkey = otherProps?.startkey
|
|
||||||
return {
|
|
||||||
...otherProps,
|
|
||||||
// need to include this incase pagination
|
|
||||||
startkey: startkey
|
|
||||||
? startkey
|
|
||||||
: `${DocumentType.USER}${SEPARATOR}${globalId}`,
|
|
||||||
endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
|
|
||||||
*/
|
|
||||||
export function getUserMetadataParams(userId?: string | null, otherProps = {}) {
|
|
||||||
return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a new user ID based on the passed in global ID.
|
|
||||||
* @param {string} globalId The ID of the global user.
|
|
||||||
* @returns {string} The new user ID which the user doc can be stored under.
|
|
||||||
*/
|
|
||||||
export function generateUserMetadataID(globalId: string) {
|
|
||||||
return generateRowID(InternalTable.USER_METADATA, globalId)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Breaks up the ID to get the global ID.
|
|
||||||
*/
|
|
||||||
export function getGlobalIDFromUserMetadataID(id: string) {
|
|
||||||
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
|
|
||||||
if (!id || !id.includes(prefix)) {
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
return id.split(prefix)[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getUsersByAppParams(appId: any, otherProps: any = {}) {
|
|
||||||
const prodAppId = getProdAppID(appId)
|
|
||||||
return {
|
|
||||||
...otherProps,
|
|
||||||
startkey: prodAppId,
|
|
||||||
endkey: `${prodAppId}${UNICODE_MAX}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a template ID.
|
|
||||||
* @param ownerId The owner/user of the template, this could be global or a workspace level.
|
|
||||||
*/
|
|
||||||
export function generateTemplateID(ownerId: any) {
|
|
||||||
return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
|
|
||||||
}
|
|
||||||
|
|
||||||
export function generateAppUserID(prodAppId: string, userId: string) {
|
|
||||||
return `${prodAppId}${SEPARATOR}${userId}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets parameters for retrieving templates. Owner ID must be specified, either global or a workspace level.
|
|
||||||
*/
|
|
||||||
export function getTemplateParams(
|
|
||||||
ownerId: any,
|
|
||||||
templateId: any,
|
|
||||||
otherProps = {}
|
|
||||||
) {
|
|
||||||
if (!templateId) {
|
|
||||||
templateId = ""
|
|
||||||
}
|
|
||||||
let final
|
|
||||||
if (templateId) {
|
|
||||||
final = templateId
|
|
||||||
} else {
|
|
||||||
final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...otherProps,
|
|
||||||
startkey: final,
|
|
||||||
endkey: `${final}${UNICODE_MAX}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a new role ID.
|
|
||||||
* @returns {string} The new role ID which the role doc can be stored under.
|
|
||||||
*/
|
|
||||||
export function generateRoleID(id?: any) {
|
|
||||||
return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets parameters for retrieving a role, this is a utility function for the getDocParams function.
|
|
||||||
*/
|
|
||||||
export function getRoleParams(roleId?: string | null, otherProps = {}) {
|
|
||||||
return getDocParams(DocumentType.ROLE, roleId, otherProps)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getStartEndKeyURL(baseKey: any, tenantId?: string) {
|
|
||||||
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
|
|
||||||
return `startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* if in production this will use the CouchDB _all_dbs call to retrieve a list of databases. If testing
|
* if in production this will use the CouchDB _all_dbs call to retrieve a list of databases. If testing
|
||||||
|
@ -411,31 +166,8 @@ export async function dbExists(dbName: any) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
export function pagination<T>(
|
||||||
* Generates a new dev info document ID - this is scoped to a user.
|
data: T[],
|
||||||
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
|
|
||||||
*/
|
|
||||||
export const generateDevInfoID = (userId: any) => {
|
|
||||||
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a new plugin ID - to be used in the global DB.
|
|
||||||
* @returns {string} The new plugin ID which a plugin metadata document can be stored under.
|
|
||||||
*/
|
|
||||||
export const generatePluginID = (name: string) => {
|
|
||||||
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
|
|
||||||
*/
|
|
||||||
export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
|
|
||||||
return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function pagination(
|
|
||||||
data: any[],
|
|
||||||
pageSize: number,
|
pageSize: number,
|
||||||
{
|
{
|
||||||
paginate,
|
paginate,
|
||||||
|
@ -444,7 +176,7 @@ export function pagination(
|
||||||
}: {
|
}: {
|
||||||
paginate: boolean
|
paginate: boolean
|
||||||
property: string
|
property: string
|
||||||
getKey?: (doc: any) => string | undefined
|
getKey?: (doc: T) => string | undefined
|
||||||
} = {
|
} = {
|
||||||
paginate: true,
|
paginate: true,
|
||||||
property: "_id",
|
property: "_id",
|
||||||
|
|
|
@ -7,7 +7,7 @@ import {
|
||||||
} from "../constants"
|
} from "../constants"
|
||||||
import { getGlobalDB } from "../context"
|
import { getGlobalDB } from "../context"
|
||||||
import { doWithDB } from "./"
|
import { doWithDB } from "./"
|
||||||
import { Database, DatabaseQueryOpts } from "@budibase/types"
|
import { AllDocsResponse, Database, DatabaseQueryOpts } from "@budibase/types"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
|
|
||||||
const DESIGN_DB = "_design/database"
|
const DESIGN_DB = "_design/database"
|
||||||
|
@ -42,7 +42,11 @@ async function removeDeprecated(db: Database, viewName: ViewName) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createView(db: any, viewJs: string, viewName: string) {
|
export async function createView(
|
||||||
|
db: any,
|
||||||
|
viewJs: string,
|
||||||
|
viewName: string
|
||||||
|
): Promise<void> {
|
||||||
let designDoc
|
let designDoc
|
||||||
try {
|
try {
|
||||||
designDoc = (await db.get(DESIGN_DB)) as DesignDocument
|
designDoc = (await db.get(DESIGN_DB)) as DesignDocument
|
||||||
|
@ -57,7 +61,15 @@ export async function createView(db: any, viewJs: string, viewName: string) {
|
||||||
...designDoc.views,
|
...designDoc.views,
|
||||||
[viewName]: view,
|
[viewName]: view,
|
||||||
}
|
}
|
||||||
await db.put(designDoc)
|
try {
|
||||||
|
await db.put(designDoc)
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err.status === 409) {
|
||||||
|
return await createView(db, viewJs, viewName)
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const createNewUserEmailView = async () => {
|
export const createNewUserEmailView = async () => {
|
||||||
|
@ -107,6 +119,34 @@ export interface QueryViewOptions {
|
||||||
arrayResponse?: boolean
|
arrayResponse?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function queryViewRaw<T>(
|
||||||
|
viewName: ViewName,
|
||||||
|
params: DatabaseQueryOpts,
|
||||||
|
db: Database,
|
||||||
|
createFunc: any,
|
||||||
|
opts?: QueryViewOptions
|
||||||
|
): Promise<AllDocsResponse<T>> {
|
||||||
|
try {
|
||||||
|
const response = await db.query<T>(`database/${viewName}`, params)
|
||||||
|
// await to catch error
|
||||||
|
return response
|
||||||
|
} catch (err: any) {
|
||||||
|
const pouchNotFound = err && err.name === "not_found"
|
||||||
|
const couchNotFound = err && err.status === 404
|
||||||
|
if (pouchNotFound || couchNotFound) {
|
||||||
|
await removeDeprecated(db, viewName)
|
||||||
|
await createFunc()
|
||||||
|
return queryViewRaw(viewName, params, db, createFunc, opts)
|
||||||
|
} else if (err.status === 409) {
|
||||||
|
// can happen when multiple queries occur at once, view couldn't be created
|
||||||
|
// other design docs being updated, re-run
|
||||||
|
return queryViewRaw(viewName, params, db, createFunc, opts)
|
||||||
|
} else {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export const queryView = async <T>(
|
export const queryView = async <T>(
|
||||||
viewName: ViewName,
|
viewName: ViewName,
|
||||||
params: DatabaseQueryOpts,
|
params: DatabaseQueryOpts,
|
||||||
|
@ -114,30 +154,18 @@ export const queryView = async <T>(
|
||||||
createFunc: any,
|
createFunc: any,
|
||||||
opts?: QueryViewOptions
|
opts?: QueryViewOptions
|
||||||
): Promise<T[] | T | undefined> => {
|
): Promise<T[] | T | undefined> => {
|
||||||
try {
|
const response = await queryViewRaw<T>(viewName, params, db, createFunc, opts)
|
||||||
let response = await db.query<T>(`database/${viewName}`, params)
|
const rows = response.rows
|
||||||
const rows = response.rows
|
const docs = rows.map((row: any) =>
|
||||||
const docs = rows.map((row: any) =>
|
params.include_docs ? row.doc : row.value
|
||||||
params.include_docs ? row.doc : row.value
|
)
|
||||||
)
|
|
||||||
|
|
||||||
// if arrayResponse has been requested, always return array regardless of length
|
// if arrayResponse has been requested, always return array regardless of length
|
||||||
if (opts?.arrayResponse) {
|
if (opts?.arrayResponse) {
|
||||||
return docs as T[]
|
return docs as T[]
|
||||||
} else {
|
} else {
|
||||||
// return the single document if there is only one
|
// return the single document if there is only one
|
||||||
return docs.length <= 1 ? (docs[0] as T) : (docs as T[])
|
return docs.length <= 1 ? (docs[0] as T) : (docs as T[])
|
||||||
}
|
|
||||||
} catch (err: any) {
|
|
||||||
const pouchNotFound = err && err.name === "not_found"
|
|
||||||
const couchNotFound = err && err.status === 404
|
|
||||||
if (pouchNotFound || couchNotFound) {
|
|
||||||
await removeDeprecated(db, viewName)
|
|
||||||
await createFunc()
|
|
||||||
return queryView(viewName, params, db, createFunc, opts)
|
|
||||||
} else {
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,18 +220,19 @@ export const queryPlatformView = async <T>(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const CreateFuncByName: any = {
|
||||||
|
[ViewName.USER_BY_EMAIL]: createNewUserEmailView,
|
||||||
|
[ViewName.BY_API_KEY]: createApiKeyView,
|
||||||
|
[ViewName.USER_BY_BUILDERS]: createUserBuildersView,
|
||||||
|
[ViewName.USER_BY_APP]: createUserAppView,
|
||||||
|
}
|
||||||
|
|
||||||
export const queryGlobalView = async <T>(
|
export const queryGlobalView = async <T>(
|
||||||
viewName: ViewName,
|
viewName: ViewName,
|
||||||
params: DatabaseQueryOpts,
|
params: DatabaseQueryOpts,
|
||||||
db?: Database,
|
db?: Database,
|
||||||
opts?: QueryViewOptions
|
opts?: QueryViewOptions
|
||||||
): Promise<T[] | T | undefined> => {
|
): Promise<T[] | T | undefined> => {
|
||||||
const CreateFuncByName: any = {
|
|
||||||
[ViewName.USER_BY_EMAIL]: createNewUserEmailView,
|
|
||||||
[ViewName.BY_API_KEY]: createApiKeyView,
|
|
||||||
[ViewName.USER_BY_BUILDERS]: createUserBuildersView,
|
|
||||||
[ViewName.USER_BY_APP]: createUserAppView,
|
|
||||||
}
|
|
||||||
// can pass DB in if working with something specific
|
// can pass DB in if working with something specific
|
||||||
if (!db) {
|
if (!db) {
|
||||||
db = getGlobalDB()
|
db = getGlobalDB()
|
||||||
|
@ -211,3 +240,13 @@ export const queryGlobalView = async <T>(
|
||||||
const createFn = CreateFuncByName[viewName]
|
const createFn = CreateFuncByName[viewName]
|
||||||
return queryView(viewName, params, db!, createFn, opts)
|
return queryView(viewName, params, db!, createFn, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function queryGlobalViewRaw<T>(
|
||||||
|
viewName: ViewName,
|
||||||
|
params: DatabaseQueryOpts,
|
||||||
|
opts?: QueryViewOptions
|
||||||
|
) {
|
||||||
|
const db = getGlobalDB()
|
||||||
|
const createFn = CreateFuncByName[viewName]
|
||||||
|
return queryViewRaw<T>(viewName, params, db, createFn, opts)
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,102 @@
|
||||||
|
import {
|
||||||
|
APP_PREFIX,
|
||||||
|
DocumentType,
|
||||||
|
InternalTable,
|
||||||
|
SEPARATOR,
|
||||||
|
} from "../constants"
|
||||||
|
import { newid } from "./newid"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a new app ID.
|
||||||
|
* @returns {string} The new app ID which the app doc can be stored under.
|
||||||
|
*/
|
||||||
|
export const generateAppID = (tenantId?: string | null) => {
|
||||||
|
let id = APP_PREFIX
|
||||||
|
if (tenantId) {
|
||||||
|
id += `${tenantId}${SEPARATOR}`
|
||||||
|
}
|
||||||
|
return `${id}${newid()}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a new row ID for the specified table.
|
||||||
|
* @param {string} tableId The table which the row is being created for.
|
||||||
|
* @param {string|null} id If an ID is to be used then the UUID can be substituted for this.
|
||||||
|
* @returns {string} The new ID which a row doc can be stored under.
|
||||||
|
*/
|
||||||
|
export function generateRowID(tableId: string, id?: string) {
|
||||||
|
id = id || newid()
|
||||||
|
return `${DocumentType.ROW}${SEPARATOR}${tableId}${SEPARATOR}${id}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a new workspace ID.
|
||||||
|
* @returns {string} The new workspace ID which the workspace doc can be stored under.
|
||||||
|
*/
|
||||||
|
export function generateWorkspaceID() {
|
||||||
|
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a new global user ID.
|
||||||
|
* @returns {string} The new user ID which the user doc can be stored under.
|
||||||
|
*/
|
||||||
|
export function generateGlobalUserID(id?: any) {
|
||||||
|
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a new user ID based on the passed in global ID.
|
||||||
|
* @param {string} globalId The ID of the global user.
|
||||||
|
* @returns {string} The new user ID which the user doc can be stored under.
|
||||||
|
*/
|
||||||
|
export function generateUserMetadataID(globalId: string) {
|
||||||
|
return generateRowID(InternalTable.USER_METADATA, globalId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Breaks up the ID to get the global ID.
|
||||||
|
*/
|
||||||
|
export function getGlobalIDFromUserMetadataID(id: string) {
|
||||||
|
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
|
||||||
|
if (!id || !id.includes(prefix)) {
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
return id.split(prefix)[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a template ID.
|
||||||
|
* @param ownerId The owner/user of the template, this could be global or a workspace level.
|
||||||
|
*/
|
||||||
|
export function generateTemplateID(ownerId: any) {
|
||||||
|
return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
|
||||||
|
}
|
||||||
|
|
||||||
|
export function generateAppUserID(prodAppId: string, userId: string) {
|
||||||
|
return `${prodAppId}${SEPARATOR}${userId}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a new role ID.
|
||||||
|
* @returns {string} The new role ID which the role doc can be stored under.
|
||||||
|
*/
|
||||||
|
export function generateRoleID(id?: any) {
|
||||||
|
return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a new dev info document ID - this is scoped to a user.
|
||||||
|
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
|
||||||
|
*/
|
||||||
|
export const generateDevInfoID = (userId: any) => {
|
||||||
|
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a new plugin ID - to be used in the global DB.
|
||||||
|
* @returns {string} The new plugin ID which a plugin metadata document can be stored under.
|
||||||
|
*/
|
||||||
|
export const generatePluginID = (name: string) => {
|
||||||
|
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from "./ids"
|
||||||
|
export * from "./params"
|
|
@ -0,0 +1,174 @@
|
||||||
|
import {
|
||||||
|
DocumentType,
|
||||||
|
InternalTable,
|
||||||
|
SEPARATOR,
|
||||||
|
UNICODE_MAX,
|
||||||
|
ViewName,
|
||||||
|
} from "../constants"
|
||||||
|
import { getProdAppID } from "./conversions"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
||||||
|
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
|
||||||
|
* More complex cases such as link docs and rows which have multiple levels of IDs that their
|
||||||
|
* ID consists of need their own functions to build the allDocs parameters.
|
||||||
|
* @param {string} docType The type of document which input params are being built for, e.g. user,
|
||||||
|
* link, app, table and so on.
|
||||||
|
* @param {string|null} docId The ID of the document minus its type - this is only needed if looking
|
||||||
|
* for a singular document.
|
||||||
|
* @param {object} otherProps Add any other properties onto the request, e.g. include_docs.
|
||||||
|
* @returns {object} Parameters which can then be used with an allDocs request.
|
||||||
|
*/
|
||||||
|
export function getDocParams(
|
||||||
|
docType: string,
|
||||||
|
docId?: string | null,
|
||||||
|
otherProps: any = {}
|
||||||
|
) {
|
||||||
|
if (docId == null) {
|
||||||
|
docId = ""
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...otherProps,
|
||||||
|
startkey: `${docType}${SEPARATOR}${docId}`,
|
||||||
|
endkey: `${docType}${SEPARATOR}${docId}${UNICODE_MAX}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the DB allDocs/query params for retrieving a row.
|
||||||
|
* @param {string|null} tableId The table in which the rows have been stored.
|
||||||
|
* @param {string|null} rowId The ID of the row which is being specifically queried for. This can be
|
||||||
|
* left null to get all the rows in the table.
|
||||||
|
* @param {object} otherProps Any other properties to add to the request.
|
||||||
|
* @returns {object} Parameters which can then be used with an allDocs request.
|
||||||
|
*/
|
||||||
|
export function getRowParams(
|
||||||
|
tableId?: string | null,
|
||||||
|
rowId?: string | null,
|
||||||
|
otherProps = {}
|
||||||
|
) {
|
||||||
|
if (tableId == null) {
|
||||||
|
return getDocParams(DocumentType.ROW, null, otherProps)
|
||||||
|
}
|
||||||
|
|
||||||
|
const endOfKey = rowId == null ? `${tableId}${SEPARATOR}` : rowId
|
||||||
|
|
||||||
|
return getDocParams(DocumentType.ROW, endOfKey, otherProps)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve the correct index for a view based on default design DB.
|
||||||
|
*/
|
||||||
|
export function getQueryIndex(viewName: ViewName) {
|
||||||
|
return `database/${viewName}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a given ID is that of a table.
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
export const isTableId = (id: string) => {
|
||||||
|
// this includes datasource plus tables
|
||||||
|
return (
|
||||||
|
id &&
|
||||||
|
(id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||
|
||||||
|
id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a given ID is that of a datasource or datasource plus.
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
export const isDatasourceId = (id: string) => {
|
||||||
|
// this covers both datasources and datasource plus
|
||||||
|
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets parameters for retrieving workspaces.
|
||||||
|
*/
|
||||||
|
export function getWorkspaceParams(id = "", otherProps = {}) {
|
||||||
|
return {
|
||||||
|
...otherProps,
|
||||||
|
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
|
||||||
|
endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets parameters for retrieving users.
|
||||||
|
*/
|
||||||
|
export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
|
||||||
|
if (!globalId) {
|
||||||
|
globalId = ""
|
||||||
|
}
|
||||||
|
const startkey = otherProps?.startkey
|
||||||
|
return {
|
||||||
|
...otherProps,
|
||||||
|
// need to include this incase pagination
|
||||||
|
startkey: startkey
|
||||||
|
? startkey
|
||||||
|
: `${DocumentType.USER}${SEPARATOR}${globalId}`,
|
||||||
|
endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
|
||||||
|
*/
|
||||||
|
export function getUserMetadataParams(userId?: string | null, otherProps = {}) {
|
||||||
|
return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getUsersByAppParams(appId: any, otherProps: any = {}) {
|
||||||
|
const prodAppId = getProdAppID(appId)
|
||||||
|
return {
|
||||||
|
...otherProps,
|
||||||
|
startkey: prodAppId,
|
||||||
|
endkey: `${prodAppId}${UNICODE_MAX}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets parameters for retrieving templates. Owner ID must be specified, either global or a workspace level.
|
||||||
|
*/
|
||||||
|
export function getTemplateParams(
|
||||||
|
ownerId: any,
|
||||||
|
templateId: any,
|
||||||
|
otherProps = {}
|
||||||
|
) {
|
||||||
|
if (!templateId) {
|
||||||
|
templateId = ""
|
||||||
|
}
|
||||||
|
let final
|
||||||
|
if (templateId) {
|
||||||
|
final = templateId
|
||||||
|
} else {
|
||||||
|
final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...otherProps,
|
||||||
|
startkey: final,
|
||||||
|
endkey: `${final}${UNICODE_MAX}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets parameters for retrieving a role, this is a utility function for the getDocParams function.
|
||||||
|
*/
|
||||||
|
export function getRoleParams(roleId?: string | null, otherProps = {}) {
|
||||||
|
return getDocParams(DocumentType.ROLE, roleId, otherProps)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getStartEndKeyURL(baseKey: any, tenantId?: string) {
|
||||||
|
const tenancy = tenantId ? `${SEPARATOR}${tenantId}` : ""
|
||||||
|
return `startkey="${baseKey}${tenancy}"&endkey="${baseKey}${tenancy}${UNICODE_MAX}"`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
|
||||||
|
*/
|
||||||
|
export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
|
||||||
|
return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
|
||||||
|
}
|
|
@ -0,0 +1,29 @@
|
||||||
|
import { asyncEventQueue, init as initQueue } from "../events/asyncEvents"
|
||||||
|
import {
|
||||||
|
ProcessorMap,
|
||||||
|
default as DocumentUpdateProcessor,
|
||||||
|
} from "../events/processors/async/DocumentUpdateProcessor"
|
||||||
|
|
||||||
|
let processingPromise: Promise<void>
|
||||||
|
let documentProcessor: DocumentUpdateProcessor
|
||||||
|
|
||||||
|
export function init(processors: ProcessorMap) {
|
||||||
|
if (!asyncEventQueue) {
|
||||||
|
initQueue()
|
||||||
|
}
|
||||||
|
if (!documentProcessor) {
|
||||||
|
documentProcessor = new DocumentUpdateProcessor(processors)
|
||||||
|
}
|
||||||
|
// if not processing in this instance, kick it off
|
||||||
|
if (!processingPromise) {
|
||||||
|
processingPromise = asyncEventQueue.process(async job => {
|
||||||
|
const { event, identity, properties, timestamp } = job.data
|
||||||
|
await documentProcessor.processEvent(
|
||||||
|
event,
|
||||||
|
identity,
|
||||||
|
properties,
|
||||||
|
timestamp
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { existsSync, readFileSync } from "fs"
|
||||||
|
|
||||||
function isTest() {
|
function isTest() {
|
||||||
return isCypress() || isJest()
|
return isCypress() || isJest()
|
||||||
}
|
}
|
||||||
|
@ -36,6 +38,44 @@ function getAPIEncryptionKey() {
|
||||||
: process.env.JWT_SECRET // fallback to the JWT_SECRET used historically
|
: process.env.JWT_SECRET // fallback to the JWT_SECRET used historically
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function httpLogging() {
|
||||||
|
if (process.env.HTTP_LOGGING === undefined) {
|
||||||
|
// on by default unless otherwise specified
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return process.env.HTTP_LOGGING
|
||||||
|
}
|
||||||
|
|
||||||
|
function findVersion() {
|
||||||
|
function findFileInAncestors(
|
||||||
|
fileName: string,
|
||||||
|
currentDir: string
|
||||||
|
): string | null {
|
||||||
|
const filePath = `${currentDir}/${fileName}`
|
||||||
|
if (existsSync(filePath)) {
|
||||||
|
return filePath
|
||||||
|
}
|
||||||
|
|
||||||
|
const parentDir = `${currentDir}/..`
|
||||||
|
if (parentDir === currentDir) {
|
||||||
|
// reached root directory
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return findFileInAncestors(fileName, parentDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const packageJsonFile = findFileInAncestors("package.json", process.cwd())
|
||||||
|
const content = readFileSync(packageJsonFile!, "utf-8")
|
||||||
|
const version = JSON.parse(content).version
|
||||||
|
return version
|
||||||
|
} catch {
|
||||||
|
throw new Error("Cannot find a valid version in its package.json")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const environment = {
|
const environment = {
|
||||||
isTest,
|
isTest,
|
||||||
isJest,
|
isJest,
|
||||||
|
@ -90,11 +130,11 @@ const environment = {
|
||||||
USE_COUCH: process.env.USE_COUCH || true,
|
USE_COUCH: process.env.USE_COUCH || true,
|
||||||
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
|
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
|
||||||
SERVICE: process.env.SERVICE || "budibase",
|
SERVICE: process.env.SERVICE || "budibase",
|
||||||
LOG_LEVEL: process.env.LOG_LEVEL,
|
LOG_LEVEL: process.env.LOG_LEVEL || "info",
|
||||||
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
|
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
|
||||||
DEPLOYMENT_ENVIRONMENT:
|
DEPLOYMENT_ENVIRONMENT:
|
||||||
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
|
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
|
||||||
ENABLE_4XX_HTTP_LOGGING: process.env.ENABLE_4XX_HTTP_LOGGING || true,
|
HTTP_LOGGING: httpLogging(),
|
||||||
ENABLE_AUDIT_LOG_IP_ADDR: process.env.ENABLE_AUDIT_LOG_IP_ADDR,
|
ENABLE_AUDIT_LOG_IP_ADDR: process.env.ENABLE_AUDIT_LOG_IP_ADDR,
|
||||||
// smtp
|
// smtp
|
||||||
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
|
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
|
||||||
|
@ -113,6 +153,8 @@ const environment = {
|
||||||
ENABLE_SSO_MAINTENANCE_MODE: selfHosted
|
ENABLE_SSO_MAINTENANCE_MODE: selfHosted
|
||||||
? process.env.ENABLE_SSO_MAINTENANCE_MODE
|
? process.env.ENABLE_SSO_MAINTENANCE_MODE
|
||||||
: false,
|
: false,
|
||||||
|
VERSION: findVersion(),
|
||||||
|
DISABLE_PINO_LOGGER: process.env.DISABLE_PINO_LOGGER,
|
||||||
_set(key: any, value: any) {
|
_set(key: any, value: any) {
|
||||||
process.env[key] = value
|
process.env[key] = value
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
|
|
@ -97,3 +97,11 @@ export class InvalidAPIKeyError extends BudibaseError {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// USERS
|
||||||
|
|
||||||
|
export class EmailUnavailableError extends Error {
|
||||||
|
constructor(email: string) {
|
||||||
|
super(`Email already in use: '${email}'`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * from "./queue"
|
||||||
|
export * from "./publisher"
|
|
@ -0,0 +1,12 @@
|
||||||
|
import { AsyncEvents } from "@budibase/types"
|
||||||
|
import { EventPayload, asyncEventQueue, init } from "./queue"
|
||||||
|
|
||||||
|
export async function publishAsyncEvent(payload: EventPayload) {
|
||||||
|
if (!asyncEventQueue) {
|
||||||
|
init()
|
||||||
|
}
|
||||||
|
const { event, identity } = payload
|
||||||
|
if (AsyncEvents.indexOf(event) !== -1 && identity.tenantId) {
|
||||||
|
await asyncEventQueue.add(payload)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
import BullQueue from "bull"
|
||||||
|
import { createQueue, JobQueue } from "../../queue"
|
||||||
|
import { Event, Identity } from "@budibase/types"
|
||||||
|
|
||||||
|
export interface EventPayload {
|
||||||
|
event: Event
|
||||||
|
identity: Identity
|
||||||
|
properties: any
|
||||||
|
timestamp?: string | number
|
||||||
|
}
|
||||||
|
|
||||||
|
export let asyncEventQueue: BullQueue.Queue
|
||||||
|
|
||||||
|
export function init() {
|
||||||
|
asyncEventQueue = createQueue<EventPayload>(JobQueue.SYSTEM_EVENT_QUEUE)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function shutdown() {
|
||||||
|
if (asyncEventQueue) {
|
||||||
|
await asyncEventQueue.close()
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,56 @@
|
||||||
|
import {
|
||||||
|
Event,
|
||||||
|
UserCreatedEvent,
|
||||||
|
UserUpdatedEvent,
|
||||||
|
UserDeletedEvent,
|
||||||
|
UserPermissionAssignedEvent,
|
||||||
|
UserPermissionRemovedEvent,
|
||||||
|
GroupCreatedEvent,
|
||||||
|
GroupUpdatedEvent,
|
||||||
|
GroupDeletedEvent,
|
||||||
|
GroupUsersAddedEvent,
|
||||||
|
GroupUsersDeletedEvent,
|
||||||
|
GroupPermissionsEditedEvent,
|
||||||
|
} from "@budibase/types"
|
||||||
|
|
||||||
|
const getEventProperties: Record<
|
||||||
|
string,
|
||||||
|
(properties: any) => string | undefined
|
||||||
|
> = {
|
||||||
|
[Event.USER_CREATED]: (properties: UserCreatedEvent) => properties.userId,
|
||||||
|
[Event.USER_UPDATED]: (properties: UserUpdatedEvent) => properties.userId,
|
||||||
|
[Event.USER_DELETED]: (properties: UserDeletedEvent) => properties.userId,
|
||||||
|
[Event.USER_PERMISSION_ADMIN_ASSIGNED]: (
|
||||||
|
properties: UserPermissionAssignedEvent
|
||||||
|
) => properties.userId,
|
||||||
|
[Event.USER_PERMISSION_ADMIN_REMOVED]: (
|
||||||
|
properties: UserPermissionRemovedEvent
|
||||||
|
) => properties.userId,
|
||||||
|
[Event.USER_PERMISSION_BUILDER_ASSIGNED]: (
|
||||||
|
properties: UserPermissionAssignedEvent
|
||||||
|
) => properties.userId,
|
||||||
|
[Event.USER_PERMISSION_BUILDER_REMOVED]: (
|
||||||
|
properties: UserPermissionRemovedEvent
|
||||||
|
) => properties.userId,
|
||||||
|
[Event.USER_GROUP_CREATED]: (properties: GroupCreatedEvent) =>
|
||||||
|
properties.groupId,
|
||||||
|
[Event.USER_GROUP_UPDATED]: (properties: GroupUpdatedEvent) =>
|
||||||
|
properties.groupId,
|
||||||
|
[Event.USER_GROUP_DELETED]: (properties: GroupDeletedEvent) =>
|
||||||
|
properties.groupId,
|
||||||
|
[Event.USER_GROUP_USERS_ADDED]: (properties: GroupUsersAddedEvent) =>
|
||||||
|
properties.groupId,
|
||||||
|
[Event.USER_GROUP_USERS_REMOVED]: (properties: GroupUsersDeletedEvent) =>
|
||||||
|
properties.groupId,
|
||||||
|
[Event.USER_GROUP_PERMISSIONS_EDITED]: (
|
||||||
|
properties: GroupPermissionsEditedEvent
|
||||||
|
) => properties.groupId,
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDocumentId(event: Event, properties: any) {
|
||||||
|
const extractor = getEventProperties[event]
|
||||||
|
if (!extractor) {
|
||||||
|
throw new Error("Event does not have a method of document ID extraction")
|
||||||
|
}
|
||||||
|
return extractor(properties)
|
||||||
|
}
|
|
@ -1,7 +1,8 @@
|
||||||
import { Event, AuditedEventFriendlyName } from "@budibase/types"
|
import { Event } from "@budibase/types"
|
||||||
import { processors } from "./processors"
|
import { processors } from "./processors"
|
||||||
import identification from "./identification"
|
import identification from "./identification"
|
||||||
import * as backfill from "./backfill"
|
import * as backfill from "./backfill"
|
||||||
|
import { publishAsyncEvent } from "./asyncEvents"
|
||||||
|
|
||||||
export const publishEvent = async (
|
export const publishEvent = async (
|
||||||
event: Event,
|
event: Event,
|
||||||
|
@ -14,6 +15,14 @@ export const publishEvent = async (
|
||||||
const backfilling = await backfill.isBackfillingEvent(event)
|
const backfilling = await backfill.isBackfillingEvent(event)
|
||||||
// no backfill - send the event and exit
|
// no backfill - send the event and exit
|
||||||
if (!backfilling) {
|
if (!backfilling) {
|
||||||
|
// send off async events if required
|
||||||
|
await publishAsyncEvent({
|
||||||
|
event,
|
||||||
|
identity,
|
||||||
|
properties,
|
||||||
|
timestamp,
|
||||||
|
})
|
||||||
|
// now handle the main sync event processing pipeline
|
||||||
await processors.processEvent(event, identity, properties, timestamp)
|
await processors.processEvent(event, identity, properties, timestamp)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,8 +23,6 @@ import * as installation from "../installation"
|
||||||
import * as configs from "../configs"
|
import * as configs from "../configs"
|
||||||
import { withCache, TTL, CacheKey } from "../cache/generic"
|
import { withCache, TTL, CacheKey } from "../cache/generic"
|
||||||
|
|
||||||
const pkg = require("../../package.json")
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An identity can be:
|
* An identity can be:
|
||||||
* - account user (Self host)
|
* - account user (Self host)
|
||||||
|
@ -65,6 +63,7 @@ const getCurrentIdentity = async (): Promise<Identity> => {
|
||||||
hosting,
|
hosting,
|
||||||
installationId,
|
installationId,
|
||||||
tenantId,
|
tenantId,
|
||||||
|
realTenantId: context.getTenantId(),
|
||||||
environment,
|
environment,
|
||||||
}
|
}
|
||||||
} else if (identityType === IdentityType.USER) {
|
} else if (identityType === IdentityType.USER) {
|
||||||
|
@ -101,7 +100,7 @@ const identifyInstallationGroup = async (
|
||||||
const id = installId
|
const id = installId
|
||||||
const type = IdentityType.INSTALLATION
|
const type = IdentityType.INSTALLATION
|
||||||
const hosting = getHostingFromEnv()
|
const hosting = getHostingFromEnv()
|
||||||
const version = pkg.version
|
const version = env.VERSION
|
||||||
const environment = getDeploymentEnvironment()
|
const environment = getDeploymentEnvironment()
|
||||||
|
|
||||||
const group: InstallationGroup = {
|
const group: InstallationGroup = {
|
||||||
|
@ -305,4 +304,5 @@ export default {
|
||||||
identify,
|
identify,
|
||||||
identifyGroup,
|
identifyGroup,
|
||||||
getInstallationId,
|
getInstallationId,
|
||||||
|
getUniqueTenantId,
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,8 @@ export * as backfillCache from "./backfill"
|
||||||
|
|
||||||
import { processors } from "./processors"
|
import { processors } from "./processors"
|
||||||
|
|
||||||
|
export function initAsyncEvents() {}
|
||||||
|
|
||||||
export const shutdown = () => {
|
export const shutdown = () => {
|
||||||
processors.shutdown()
|
processors.shutdown()
|
||||||
console.log("Events shutdown")
|
console.log("Events shutdown")
|
||||||
|
|
|
@ -2,14 +2,6 @@ import { Event, Identity, Group } from "@budibase/types"
|
||||||
import { EventProcessor } from "./types"
|
import { EventProcessor } from "./types"
|
||||||
import env from "../../environment"
|
import env from "../../environment"
|
||||||
|
|
||||||
const getTimestampString = (timestamp?: string | number) => {
|
|
||||||
let timestampString = ""
|
|
||||||
if (timestamp) {
|
|
||||||
timestampString = `[timestamp=${new Date(timestamp).toISOString()}]`
|
|
||||||
}
|
|
||||||
return timestampString
|
|
||||||
}
|
|
||||||
|
|
||||||
const skipLogging = env.SELF_HOSTED && !env.isDev()
|
const skipLogging = env.SELF_HOSTED && !env.isDev()
|
||||||
|
|
||||||
export default class LoggingProcessor implements EventProcessor {
|
export default class LoggingProcessor implements EventProcessor {
|
||||||
|
@ -22,32 +14,21 @@ export default class LoggingProcessor implements EventProcessor {
|
||||||
if (skipLogging) {
|
if (skipLogging) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let timestampString = getTimestampString(timestamp)
|
console.log(`[audit] [identityType=${identity.type}] ${event}`, properties)
|
||||||
let message = `[audit] [identityType=${identity.type}] ${timestampString} ${event} `
|
|
||||||
if (env.isDev()) {
|
|
||||||
message = message + `[debug: [properties=${JSON.stringify(properties)}] ]`
|
|
||||||
}
|
|
||||||
console.log(message)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async identify(identity: Identity, timestamp?: string | number) {
|
async identify(identity: Identity, timestamp?: string | number) {
|
||||||
if (skipLogging) {
|
if (skipLogging) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let timestampString = getTimestampString(timestamp)
|
console.log(`[audit] identified`, identity)
|
||||||
console.log(
|
|
||||||
`[audit] [${JSON.stringify(identity)}] ${timestampString} identified`
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||||
if (skipLogging) {
|
if (skipLogging) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let timestampString = getTimestampString(timestamp)
|
console.log(`[audit] group identified`, group)
|
||||||
console.log(
|
|
||||||
`[audit] [${JSON.stringify(group)}] ${timestampString} group identified`
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
shutdown(): void {
|
shutdown(): void {
|
||||||
|
|
|
@ -25,7 +25,9 @@ export default class Processor implements EventProcessor {
|
||||||
timestamp?: string | number
|
timestamp?: string | number
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
for (const eventProcessor of this.processors) {
|
for (const eventProcessor of this.processors) {
|
||||||
await eventProcessor.identify(identity, timestamp)
|
if (eventProcessor.identify) {
|
||||||
|
await eventProcessor.identify(identity, timestamp)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,13 +36,17 @@ export default class Processor implements EventProcessor {
|
||||||
timestamp?: string | number
|
timestamp?: string | number
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
for (const eventProcessor of this.processors) {
|
for (const eventProcessor of this.processors) {
|
||||||
await eventProcessor.identifyGroup(identity, timestamp)
|
if (eventProcessor.identifyGroup) {
|
||||||
|
await eventProcessor.identifyGroup(identity, timestamp)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
shutdown() {
|
shutdown() {
|
||||||
for (const eventProcessor of this.processors) {
|
for (const eventProcessor of this.processors) {
|
||||||
eventProcessor.shutdown()
|
if (eventProcessor.shutdown) {
|
||||||
|
eventProcessor.shutdown()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
import { EventProcessor } from "../types"
|
||||||
|
import { Event, Identity, DocUpdateEvent } from "@budibase/types"
|
||||||
|
import { doInTenant } from "../../../context"
|
||||||
|
import { getDocumentId } from "../../documentId"
|
||||||
|
import { shutdown } from "../../asyncEvents"
|
||||||
|
|
||||||
|
export type Processor = (update: DocUpdateEvent) => Promise<void>
|
||||||
|
export type ProcessorMap = { events: Event[]; processor: Processor }[]
|
||||||
|
|
||||||
|
export default class DocumentUpdateProcessor implements EventProcessor {
|
||||||
|
processors: ProcessorMap = []
|
||||||
|
|
||||||
|
constructor(processors: ProcessorMap) {
|
||||||
|
this.processors = processors
|
||||||
|
}
|
||||||
|
|
||||||
|
async processEvent(
|
||||||
|
event: Event,
|
||||||
|
identity: Identity,
|
||||||
|
properties: any,
|
||||||
|
timestamp?: string | number
|
||||||
|
) {
|
||||||
|
const tenantId = identity.realTenantId
|
||||||
|
const docId = getDocumentId(event, properties)
|
||||||
|
if (!tenantId || !docId) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for (let { events, processor } of this.processors) {
|
||||||
|
if (events.includes(event)) {
|
||||||
|
await doInTenant(tenantId, async () => {
|
||||||
|
await processor({
|
||||||
|
id: docId,
|
||||||
|
tenantId,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
shutdown() {
|
||||||
|
return shutdown()
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,7 +4,6 @@ import { EventProcessor } from "../types"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import * as context from "../../../context"
|
import * as context from "../../../context"
|
||||||
import * as rateLimiting from "./rateLimiting"
|
import * as rateLimiting from "./rateLimiting"
|
||||||
const pkg = require("../../../../package.json")
|
|
||||||
|
|
||||||
const EXCLUDED_EVENTS: Event[] = [
|
const EXCLUDED_EVENTS: Event[] = [
|
||||||
Event.USER_UPDATED,
|
Event.USER_UPDATED,
|
||||||
|
@ -49,7 +48,7 @@ export default class PosthogProcessor implements EventProcessor {
|
||||||
|
|
||||||
properties = this.clearPIIProperties(properties)
|
properties = this.clearPIIProperties(properties)
|
||||||
|
|
||||||
properties.version = pkg.version
|
properties.version = env.VERSION
|
||||||
properties.service = env.SERVICE
|
properties.service = env.SERVICE
|
||||||
properties.environment = identity.environment
|
properties.environment = identity.environment
|
||||||
properties.hosting = identity.hosting
|
properties.hosting = identity.hosting
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { testEnv } from "../../../../../tests"
|
import { testEnv } from "../../../../../tests/extra"
|
||||||
import PosthogProcessor from "../PosthogProcessor"
|
import PosthogProcessor from "../PosthogProcessor"
|
||||||
import { Event, IdentityType, Hosting } from "@budibase/types"
|
import { Event, IdentityType, Hosting } from "@budibase/types"
|
||||||
const tk = require("timekeeper")
|
const tk = require("timekeeper")
|
||||||
|
|
|
@ -1,18 +1 @@
|
||||||
import { Event, Identity, Group } from "@budibase/types"
|
export { EventProcessor } from "@budibase/types"
|
||||||
|
|
||||||
export enum EventProcessorType {
|
|
||||||
POSTHOG = "posthog",
|
|
||||||
LOGGING = "logging",
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface EventProcessor {
|
|
||||||
processEvent(
|
|
||||||
event: Event,
|
|
||||||
identity: Identity,
|
|
||||||
properties: any,
|
|
||||||
timestamp?: string | number
|
|
||||||
): Promise<void>
|
|
||||||
identify(identity: Identity, timestamp?: string | number): Promise<void>
|
|
||||||
identifyGroup(group: Group, timestamp?: string | number): Promise<void>
|
|
||||||
shutdown(): void
|
|
||||||
}
|
|
||||||
|
|
|
@ -9,12 +9,13 @@ import {
|
||||||
GroupUsersDeletedEvent,
|
GroupUsersDeletedEvent,
|
||||||
GroupAddedOnboardingEvent,
|
GroupAddedOnboardingEvent,
|
||||||
GroupPermissionsEditedEvent,
|
GroupPermissionsEditedEvent,
|
||||||
UserGroupRoles,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
import { isScim } from "../../context"
|
||||||
|
|
||||||
async function created(group: UserGroup, timestamp?: number) {
|
async function created(group: UserGroup, timestamp?: number) {
|
||||||
const properties: GroupCreatedEvent = {
|
const properties: GroupCreatedEvent = {
|
||||||
groupId: group._id as string,
|
groupId: group._id as string,
|
||||||
|
viaScim: isScim(),
|
||||||
audited: {
|
audited: {
|
||||||
name: group.name,
|
name: group.name,
|
||||||
},
|
},
|
||||||
|
@ -25,6 +26,7 @@ async function created(group: UserGroup, timestamp?: number) {
|
||||||
async function updated(group: UserGroup) {
|
async function updated(group: UserGroup) {
|
||||||
const properties: GroupUpdatedEvent = {
|
const properties: GroupUpdatedEvent = {
|
||||||
groupId: group._id as string,
|
groupId: group._id as string,
|
||||||
|
viaScim: isScim(),
|
||||||
audited: {
|
audited: {
|
||||||
name: group.name,
|
name: group.name,
|
||||||
},
|
},
|
||||||
|
@ -35,6 +37,7 @@ async function updated(group: UserGroup) {
|
||||||
async function deleted(group: UserGroup) {
|
async function deleted(group: UserGroup) {
|
||||||
const properties: GroupDeletedEvent = {
|
const properties: GroupDeletedEvent = {
|
||||||
groupId: group._id as string,
|
groupId: group._id as string,
|
||||||
|
viaScim: isScim(),
|
||||||
audited: {
|
audited: {
|
||||||
name: group.name,
|
name: group.name,
|
||||||
},
|
},
|
||||||
|
@ -46,6 +49,7 @@ async function usersAdded(count: number, group: UserGroup) {
|
||||||
const properties: GroupUsersAddedEvent = {
|
const properties: GroupUsersAddedEvent = {
|
||||||
count,
|
count,
|
||||||
groupId: group._id as string,
|
groupId: group._id as string,
|
||||||
|
viaScim: isScim(),
|
||||||
audited: {
|
audited: {
|
||||||
name: group.name,
|
name: group.name,
|
||||||
},
|
},
|
||||||
|
@ -57,6 +61,7 @@ async function usersDeleted(count: number, group: UserGroup) {
|
||||||
const properties: GroupUsersDeletedEvent = {
|
const properties: GroupUsersDeletedEvent = {
|
||||||
count,
|
count,
|
||||||
groupId: group._id as string,
|
groupId: group._id as string,
|
||||||
|
viaScim: isScim(),
|
||||||
audited: {
|
audited: {
|
||||||
name: group.name,
|
name: group.name,
|
||||||
},
|
},
|
||||||
|
|
|
@ -3,7 +3,6 @@ import {
|
||||||
Event,
|
Event,
|
||||||
LicenseActivatedEvent,
|
LicenseActivatedEvent,
|
||||||
LicensePlanChangedEvent,
|
LicensePlanChangedEvent,
|
||||||
LicenseTierChangedEvent,
|
|
||||||
PlanType,
|
PlanType,
|
||||||
Account,
|
Account,
|
||||||
LicensePortalOpenedEvent,
|
LicensePortalOpenedEvent,
|
||||||
|
@ -11,22 +10,23 @@ import {
|
||||||
LicenseCheckoutOpenedEvent,
|
LicenseCheckoutOpenedEvent,
|
||||||
LicensePaymentFailedEvent,
|
LicensePaymentFailedEvent,
|
||||||
LicensePaymentRecoveredEvent,
|
LicensePaymentRecoveredEvent,
|
||||||
|
PriceDuration,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
async function tierChanged(account: Account, from: number, to: number) {
|
async function planChanged(
|
||||||
const properties: LicenseTierChangedEvent = {
|
account: Account,
|
||||||
accountId: account.accountId,
|
opts: {
|
||||||
to,
|
from: PlanType
|
||||||
from,
|
to: PlanType
|
||||||
|
fromQuantity: number | undefined
|
||||||
|
toQuantity: number | undefined
|
||||||
|
fromDuration: PriceDuration | undefined
|
||||||
|
toDuration: PriceDuration | undefined
|
||||||
}
|
}
|
||||||
await publishEvent(Event.LICENSE_TIER_CHANGED, properties)
|
) {
|
||||||
}
|
|
||||||
|
|
||||||
async function planChanged(account: Account, from: PlanType, to: PlanType) {
|
|
||||||
const properties: LicensePlanChangedEvent = {
|
const properties: LicensePlanChangedEvent = {
|
||||||
accountId: account.accountId,
|
accountId: account.accountId,
|
||||||
to,
|
...opts,
|
||||||
from,
|
|
||||||
}
|
}
|
||||||
await publishEvent(Event.LICENSE_PLAN_CHANGED, properties)
|
await publishEvent(Event.LICENSE_PLAN_CHANGED, properties)
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,6 @@ async function paymentRecovered(account: Account) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
tierChanged,
|
|
||||||
planChanged,
|
planChanged,
|
||||||
activated,
|
activated,
|
||||||
checkoutOpened,
|
checkoutOpened,
|
||||||
|
|
|
@ -15,10 +15,12 @@ import {
|
||||||
UserUpdatedEvent,
|
UserUpdatedEvent,
|
||||||
UserOnboardingEvent,
|
UserOnboardingEvent,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
import { isScim } from "../../context"
|
||||||
|
|
||||||
async function created(user: User, timestamp?: number) {
|
async function created(user: User, timestamp?: number) {
|
||||||
const properties: UserCreatedEvent = {
|
const properties: UserCreatedEvent = {
|
||||||
userId: user._id as string,
|
userId: user._id as string,
|
||||||
|
viaScim: isScim(),
|
||||||
audited: {
|
audited: {
|
||||||
email: user.email,
|
email: user.email,
|
||||||
},
|
},
|
||||||
|
@ -29,6 +31,7 @@ async function created(user: User, timestamp?: number) {
|
||||||
async function updated(user: User) {
|
async function updated(user: User) {
|
||||||
const properties: UserUpdatedEvent = {
|
const properties: UserUpdatedEvent = {
|
||||||
userId: user._id as string,
|
userId: user._id as string,
|
||||||
|
viaScim: isScim(),
|
||||||
audited: {
|
audited: {
|
||||||
email: user.email,
|
email: user.email,
|
||||||
},
|
},
|
||||||
|
@ -39,6 +42,7 @@ async function updated(user: User) {
|
||||||
async function deleted(user: User) {
|
async function deleted(user: User) {
|
||||||
const properties: UserDeletedEvent = {
|
const properties: UserDeletedEvent = {
|
||||||
userId: user._id as string,
|
userId: user._id as string,
|
||||||
|
viaScim: isScim(),
|
||||||
audited: {
|
audited: {
|
||||||
email: user.email,
|
email: user.email,
|
||||||
},
|
},
|
||||||
|
|
|
@ -27,6 +27,7 @@ export * as errors from "./errors"
|
||||||
export * as timers from "./timers"
|
export * as timers from "./timers"
|
||||||
export { default as env } from "./environment"
|
export { default as env } from "./environment"
|
||||||
export * as blacklist from "./blacklist"
|
export * as blacklist from "./blacklist"
|
||||||
|
export * as docUpdates from "./docUpdates"
|
||||||
export { SearchParams } from "./db"
|
export { SearchParams } from "./db"
|
||||||
// Add context to tenancy for backwards compatibility
|
// Add context to tenancy for backwards compatibility
|
||||||
// only do this for external usages to prevent internal
|
// only do this for external usages to prevent internal
|
||||||
|
|
|
@ -6,8 +6,7 @@ import { Installation, IdentityType, Database } from "@budibase/types"
|
||||||
import * as context from "./context"
|
import * as context from "./context"
|
||||||
import semver from "semver"
|
import semver from "semver"
|
||||||
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
|
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
|
||||||
|
import environment from "./environment"
|
||||||
const pkg = require("../package.json")
|
|
||||||
|
|
||||||
export const getInstall = async (): Promise<Installation> => {
|
export const getInstall = async (): Promise<Installation> => {
|
||||||
return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
return withCache(CacheKey.INSTALLATION, TTL.ONE_DAY, getInstallFromDB, {
|
||||||
|
@ -18,7 +17,7 @@ async function createInstallDoc(platformDb: Database) {
|
||||||
const install: Installation = {
|
const install: Installation = {
|
||||||
_id: StaticDatabases.PLATFORM_INFO.docs.install,
|
_id: StaticDatabases.PLATFORM_INFO.docs.install,
|
||||||
installId: newid(),
|
installId: newid(),
|
||||||
version: pkg.version,
|
version: environment.VERSION,
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const resp = await platformDb.put(install)
|
const resp = await platformDb.put(install)
|
||||||
|
@ -33,7 +32,7 @@ async function createInstallDoc(platformDb: Database) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const getInstallFromDB = async (): Promise<Installation> => {
|
export const getInstallFromDB = async (): Promise<Installation> => {
|
||||||
return doWithDB(
|
return doWithDB(
|
||||||
StaticDatabases.PLATFORM_INFO.name,
|
StaticDatabases.PLATFORM_INFO.name,
|
||||||
async (platformDb: any) => {
|
async (platformDb: any) => {
|
||||||
|
@ -80,7 +79,7 @@ export const checkInstallVersion = async (): Promise<void> => {
|
||||||
const install = await getInstall()
|
const install = await getInstall()
|
||||||
|
|
||||||
const currentVersion = install.version
|
const currentVersion = install.version
|
||||||
const newVersion = pkg.version
|
const newVersion = environment.VERSION
|
||||||
|
|
||||||
if (currentVersion !== newVersion) {
|
if (currentVersion !== newVersion) {
|
||||||
const isUpgrade = semver.gt(newVersion, currentVersion)
|
const isUpgrade = semver.gt(newVersion, currentVersion)
|
||||||
|
|
|
@ -1,60 +0,0 @@
|
||||||
import { Header } from "./constants"
|
|
||||||
import env from "./environment"
|
|
||||||
const correlator = require("correlation-id")
|
|
||||||
import { Options } from "pino-http"
|
|
||||||
import { IncomingMessage } from "http"
|
|
||||||
|
|
||||||
const NonErrors = ["AccountError"]
|
|
||||||
|
|
||||||
function isSuppressed(e?: any) {
|
|
||||||
return e && e["suppressAlert"]
|
|
||||||
}
|
|
||||||
|
|
||||||
export function logAlert(message: string, e?: any) {
|
|
||||||
if (e && NonErrors.includes(e.name) && isSuppressed(e)) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
let errorJson = ""
|
|
||||||
if (e) {
|
|
||||||
errorJson = ": " + JSON.stringify(e, Object.getOwnPropertyNames(e))
|
|
||||||
}
|
|
||||||
console.error(`bb-alert: ${message} ${errorJson}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function logAlertWithInfo(
|
|
||||||
message: string,
|
|
||||||
db: string,
|
|
||||||
id: string,
|
|
||||||
error: any
|
|
||||||
) {
|
|
||||||
message = `${message} - db: ${db} - doc: ${id} - error: `
|
|
||||||
logAlert(message, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function logWarn(message: string) {
|
|
||||||
console.warn(`bb-warn: ${message}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function pinoSettings(): Options {
|
|
||||||
return {
|
|
||||||
prettyPrint: {
|
|
||||||
levelFirst: true,
|
|
||||||
},
|
|
||||||
genReqId: correlator.getId,
|
|
||||||
level: env.LOG_LEVEL || "error",
|
|
||||||
autoLogging: {
|
|
||||||
ignore: (req: IncomingMessage) => !!req.url?.includes("/health"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const setCorrelationHeader = (headers: any) => {
|
|
||||||
const correlationId = correlator.getId()
|
|
||||||
if (correlationId) {
|
|
||||||
headers[Header.CORRELATION_ID] = correlationId
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const correlation = {
|
|
||||||
setHeader: setCorrelationHeader,
|
|
||||||
}
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
const NonErrors = ["AccountError"]
|
||||||
|
|
||||||
|
function isSuppressed(e?: any) {
|
||||||
|
return e && e["suppressAlert"]
|
||||||
|
}
|
||||||
|
|
||||||
|
export function logAlert(message: string, e?: any) {
|
||||||
|
if (e && NonErrors.includes(e.name) && isSuppressed(e)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
console.error(`bb-alert: ${message}`, e)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function logAlertWithInfo(
|
||||||
|
message: string,
|
||||||
|
db: string,
|
||||||
|
id: string,
|
||||||
|
error: any
|
||||||
|
) {
|
||||||
|
message = `${message} - db: ${db} - doc: ${id} - error: `
|
||||||
|
logAlert(message, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function logWarn(message: string) {
|
||||||
|
console.warn(`bb-warn: ${message}`)
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
import { Header } from "../../constants"
|
||||||
|
const correlator = require("correlation-id")
|
||||||
|
|
||||||
|
export const setHeader = (headers: any) => {
|
||||||
|
const correlationId = correlator.getId()
|
||||||
|
if (correlationId) {
|
||||||
|
headers[Header.CORRELATION_ID] = correlationId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getId() {
|
||||||
|
return correlator.getId()
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
export * from "./correlation"
|
|
@ -0,0 +1,17 @@
|
||||||
|
import { Header } from "../../constants"
|
||||||
|
import { v4 as uuid } from "uuid"
|
||||||
|
const correlator = require("correlation-id")
|
||||||
|
|
||||||
|
const correlation = (ctx: any, next: any) => {
|
||||||
|
// use the provided correlation id header if present
|
||||||
|
let correlationId = ctx.headers[Header.CORRELATION_ID]
|
||||||
|
if (!correlationId) {
|
||||||
|
correlationId = uuid()
|
||||||
|
}
|
||||||
|
|
||||||
|
return correlator.withId(correlationId, () => {
|
||||||
|
return next()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export default correlation
|
|
@ -0,0 +1,6 @@
|
||||||
|
export * as correlation from "./correlation/correlation"
|
||||||
|
export { logger } from "./pino/logger"
|
||||||
|
export * from "./alerts"
|
||||||
|
|
||||||
|
// turn off or on context logging i.e. tenantId, appId etc
|
||||||
|
export let LOG_CONTEXT = true
|
|
@ -0,0 +1,173 @@
|
||||||
|
import env from "../../environment"
|
||||||
|
import pino, { LoggerOptions } from "pino"
|
||||||
|
import * as context from "../../context"
|
||||||
|
import * as correlation from "../correlation"
|
||||||
|
import { IdentityType } from "@budibase/types"
|
||||||
|
import { LOG_CONTEXT } from "../index"
|
||||||
|
|
||||||
|
// LOGGER
|
||||||
|
|
||||||
|
let pinoInstance: pino.Logger | undefined
|
||||||
|
if (!env.DISABLE_PINO_LOGGER) {
|
||||||
|
const pinoOptions: LoggerOptions = {
|
||||||
|
level: env.LOG_LEVEL,
|
||||||
|
formatters: {
|
||||||
|
level: label => {
|
||||||
|
return { level: label.toUpperCase() }
|
||||||
|
},
|
||||||
|
bindings: () => {
|
||||||
|
return {}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
timestamp: () => `,"timestamp":"${new Date(Date.now()).toISOString()}"`,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (env.isDev()) {
|
||||||
|
pinoOptions.transport = {
|
||||||
|
target: "pino-pretty",
|
||||||
|
options: {
|
||||||
|
singleLine: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pinoInstance = pino(pinoOptions)
|
||||||
|
|
||||||
|
// CONSOLE OVERRIDES
|
||||||
|
|
||||||
|
interface MergingObject {
|
||||||
|
objects?: any[]
|
||||||
|
tenantId?: string
|
||||||
|
appId?: string
|
||||||
|
identityId?: string
|
||||||
|
identityType?: IdentityType
|
||||||
|
correlationId?: string
|
||||||
|
err?: Error
|
||||||
|
}
|
||||||
|
|
||||||
|
function isPlainObject(obj: any) {
|
||||||
|
return typeof obj === "object" && obj !== null && !(obj instanceof Error)
|
||||||
|
}
|
||||||
|
|
||||||
|
function isError(obj: any) {
|
||||||
|
return obj instanceof Error
|
||||||
|
}
|
||||||
|
|
||||||
|
function isMessage(obj: any) {
|
||||||
|
return typeof obj === "string"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Backwards compatibility between console logging statements
|
||||||
|
* and pino logging requirements.
|
||||||
|
*/
|
||||||
|
function getLogParams(args: any[]): [MergingObject, string] {
|
||||||
|
let error = undefined
|
||||||
|
let objects: any[] = []
|
||||||
|
let message = ""
|
||||||
|
|
||||||
|
args.forEach(arg => {
|
||||||
|
if (isMessage(arg)) {
|
||||||
|
message = `${message} ${arg}`.trimStart()
|
||||||
|
}
|
||||||
|
if (isPlainObject(arg)) {
|
||||||
|
objects.push(arg)
|
||||||
|
}
|
||||||
|
if (isError(arg)) {
|
||||||
|
error = arg
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const identity = getIdentity()
|
||||||
|
|
||||||
|
let contextObject = {}
|
||||||
|
|
||||||
|
if (LOG_CONTEXT) {
|
||||||
|
contextObject = {
|
||||||
|
tenantId: getTenantId(),
|
||||||
|
appId: getAppId(),
|
||||||
|
identityId: identity?._id,
|
||||||
|
identityType: identity?.type,
|
||||||
|
correlationId: correlation.getId(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const mergingObject = {
|
||||||
|
objects: objects.length ? objects : undefined,
|
||||||
|
err: error,
|
||||||
|
...contextObject,
|
||||||
|
}
|
||||||
|
|
||||||
|
return [mergingObject, message]
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log = (...arg: any[]) => {
|
||||||
|
const [obj, msg] = getLogParams(arg)
|
||||||
|
pinoInstance?.info(obj, msg)
|
||||||
|
}
|
||||||
|
console.info = (...arg: any[]) => {
|
||||||
|
const [obj, msg] = getLogParams(arg)
|
||||||
|
pinoInstance?.info(obj, msg)
|
||||||
|
}
|
||||||
|
console.warn = (...arg: any[]) => {
|
||||||
|
const [obj, msg] = getLogParams(arg)
|
||||||
|
pinoInstance?.warn(obj, msg)
|
||||||
|
}
|
||||||
|
console.error = (...arg: any[]) => {
|
||||||
|
const [obj, msg] = getLogParams(arg)
|
||||||
|
pinoInstance?.error(obj, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* custom trace impl - this resembles the node trace behaviour rather
|
||||||
|
* than traditional trace logging
|
||||||
|
* @param arg
|
||||||
|
*/
|
||||||
|
console.trace = (...arg: any[]) => {
|
||||||
|
const [obj, msg] = getLogParams(arg)
|
||||||
|
if (!obj.err) {
|
||||||
|
// to get stack trace
|
||||||
|
obj.err = new Error()
|
||||||
|
}
|
||||||
|
pinoInstance?.trace(obj, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
console.debug = (...arg: any) => {
|
||||||
|
const [obj, msg] = getLogParams(arg)
|
||||||
|
pinoInstance?.debug(obj, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CONTEXT
|
||||||
|
|
||||||
|
const getTenantId = () => {
|
||||||
|
let tenantId
|
||||||
|
try {
|
||||||
|
tenantId = context.getTenantId()
|
||||||
|
} catch (e: any) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
return tenantId
|
||||||
|
}
|
||||||
|
|
||||||
|
const getAppId = () => {
|
||||||
|
let appId
|
||||||
|
try {
|
||||||
|
appId = context.getAppId()
|
||||||
|
} catch (e) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
return appId
|
||||||
|
}
|
||||||
|
|
||||||
|
const getIdentity = () => {
|
||||||
|
let identity
|
||||||
|
try {
|
||||||
|
identity = context.getIdentity()
|
||||||
|
} catch (e) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
return identity
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const logger = pinoInstance
|
|
@ -0,0 +1,45 @@
|
||||||
|
import env from "../../environment"
|
||||||
|
import { logger } from "./logger"
|
||||||
|
import { IncomingMessage } from "http"
|
||||||
|
const pino = require("koa-pino-logger")
|
||||||
|
import { Options } from "pino-http"
|
||||||
|
import { Ctx } from "@budibase/types"
|
||||||
|
const correlator = require("correlation-id")
|
||||||
|
|
||||||
|
export function pinoSettings(): Options {
|
||||||
|
return {
|
||||||
|
logger,
|
||||||
|
genReqId: correlator.getId,
|
||||||
|
autoLogging: {
|
||||||
|
ignore: (req: IncomingMessage) => !!req.url?.includes("/health"),
|
||||||
|
},
|
||||||
|
serializers: {
|
||||||
|
req: req => {
|
||||||
|
return {
|
||||||
|
method: req.method,
|
||||||
|
url: req.url,
|
||||||
|
correlationId: req.id,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
res: res => {
|
||||||
|
return {
|
||||||
|
status: res.statusCode,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMiddleware() {
|
||||||
|
if (env.HTTP_LOGGING) {
|
||||||
|
return pino(pinoSettings())
|
||||||
|
} else {
|
||||||
|
return (ctx: Ctx, next: any) => {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const pinoMiddleware = getMiddleware()
|
||||||
|
|
||||||
|
export default pinoMiddleware
|
|
@ -44,7 +44,7 @@ async function checkApiKey(apiKey: string, populateUser?: Function) {
|
||||||
// check both the primary and the fallback internal api keys
|
// check both the primary and the fallback internal api keys
|
||||||
// this allows for rotation
|
// this allows for rotation
|
||||||
if (isValidInternalAPIKey(apiKey)) {
|
if (isValidInternalAPIKey(apiKey)) {
|
||||||
return { valid: true }
|
return { valid: true, user: undefined }
|
||||||
}
|
}
|
||||||
const decrypted = decrypt(apiKey)
|
const decrypted = decrypt(apiKey)
|
||||||
const tenantId = decrypted.split(SEPARATOR)[0]
|
const tenantId = decrypted.split(SEPARATOR)[0]
|
||||||
|
@ -96,9 +96,15 @@ export default function (
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
// check the actual user is authenticated first, try header or cookie
|
// check the actual user is authenticated first, try header or cookie
|
||||||
const headerToken = ctx.request.headers[Header.TOKEN]
|
let headerToken = ctx.request.headers[Header.TOKEN]
|
||||||
|
|
||||||
const authCookie = getCookie(ctx, Cookie.Auth) || openJwt(headerToken)
|
const authCookie = getCookie(ctx, Cookie.Auth) || openJwt(headerToken)
|
||||||
const apiKey = ctx.request.headers[Header.API_KEY]
|
let apiKey = ctx.request.headers[Header.API_KEY]
|
||||||
|
|
||||||
|
if (!apiKey && ctx.request.headers[Header.AUTHORIZATION]) {
|
||||||
|
apiKey = ctx.request.headers[Header.AUTHORIZATION].split(" ")[1]
|
||||||
|
}
|
||||||
|
|
||||||
const tenantId = ctx.request.headers[Header.TENANT_ID]
|
const tenantId = ctx.request.headers[Header.TENANT_ID]
|
||||||
let authenticated = false,
|
let authenticated = false,
|
||||||
user = null,
|
user = null,
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import { APIError } from "@budibase/types"
|
import { APIError } from "@budibase/types"
|
||||||
import * as errors from "../errors"
|
import * as errors from "../errors"
|
||||||
import env from "../environment"
|
|
||||||
|
|
||||||
export async function errorHandling(ctx: any, next: any) {
|
export async function errorHandling(ctx: any, next: any) {
|
||||||
try {
|
try {
|
||||||
|
@ -9,9 +8,10 @@ export async function errorHandling(ctx: any, next: any) {
|
||||||
const status = err.status || err.statusCode || 500
|
const status = err.status || err.statusCode || 500
|
||||||
ctx.status = status
|
ctx.status = status
|
||||||
|
|
||||||
if (status > 499 || env.ENABLE_4XX_HTTP_LOGGING) {
|
if (status >= 400 && status < 500) {
|
||||||
ctx.log.error(err)
|
console.warn(err)
|
||||||
console.trace(err)
|
} else {
|
||||||
|
console.error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
const error = errors.getPublicError(err)
|
const error = errors.getPublicError(err)
|
||||||
|
|
|
@ -14,7 +14,8 @@ export { default as csrf } from "./csrf"
|
||||||
export { default as adminOnly } from "./adminOnly"
|
export { default as adminOnly } from "./adminOnly"
|
||||||
export { default as builderOrAdmin } from "./builderOrAdmin"
|
export { default as builderOrAdmin } from "./builderOrAdmin"
|
||||||
export { default as builderOnly } from "./builderOnly"
|
export { default as builderOnly } from "./builderOnly"
|
||||||
export { default as logging } from "./logging"
|
export { default as pino } from "../logging/pino/middleware"
|
||||||
|
export { default as correlation } from "../logging/correlation/middleware"
|
||||||
export { default as errorHandling } from "./errorHandling"
|
export { default as errorHandling } from "./errorHandling"
|
||||||
export { default as querystringToBody } from "./querystringToBody"
|
export { default as querystringToBody } from "./querystringToBody"
|
||||||
export * as joiValidator from "./joi-validator"
|
export * as joiValidator from "./joi-validator"
|
||||||
|
|
|
@ -1,90 +0,0 @@
|
||||||
const correlator = require("correlation-id")
|
|
||||||
import { Header } from "../constants"
|
|
||||||
import { v4 as uuid } from "uuid"
|
|
||||||
import * as context from "../context"
|
|
||||||
|
|
||||||
const debug = console.warn
|
|
||||||
const trace = console.trace
|
|
||||||
const log = console.log
|
|
||||||
const info = console.info
|
|
||||||
const warn = console.warn
|
|
||||||
const error = console.error
|
|
||||||
|
|
||||||
const getTenantId = () => {
|
|
||||||
let tenantId
|
|
||||||
try {
|
|
||||||
tenantId = context.getTenantId()
|
|
||||||
} catch (e: any) {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
return tenantId
|
|
||||||
}
|
|
||||||
|
|
||||||
const getAppId = () => {
|
|
||||||
let appId
|
|
||||||
try {
|
|
||||||
appId = context.getAppId()
|
|
||||||
} catch (e) {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
return appId
|
|
||||||
}
|
|
||||||
|
|
||||||
const getIdentityId = () => {
|
|
||||||
let identityId
|
|
||||||
try {
|
|
||||||
const identity = context.getIdentity()
|
|
||||||
identityId = identity?._id
|
|
||||||
} catch (e) {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
return identityId
|
|
||||||
}
|
|
||||||
|
|
||||||
const print = (fn: any, data: any[]) => {
|
|
||||||
let message = ""
|
|
||||||
|
|
||||||
const correlationId = correlator.getId()
|
|
||||||
if (correlationId) {
|
|
||||||
message = message + `[correlationId=${correlator.getId()}]`
|
|
||||||
}
|
|
||||||
|
|
||||||
const tenantId = getTenantId()
|
|
||||||
if (tenantId) {
|
|
||||||
message = message + ` [tenantId=${tenantId}]`
|
|
||||||
}
|
|
||||||
|
|
||||||
const appId = getAppId()
|
|
||||||
if (appId) {
|
|
||||||
message = message + ` [appId=${appId}]`
|
|
||||||
}
|
|
||||||
|
|
||||||
const identityId = getIdentityId()
|
|
||||||
if (identityId) {
|
|
||||||
message = message + ` [identityId=${identityId}]`
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!process.env.CI) {
|
|
||||||
fn(message, data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const logging = (ctx: any, next: any) => {
|
|
||||||
// use the provided correlation id header if present
|
|
||||||
let correlationId = ctx.headers[Header.CORRELATION_ID]
|
|
||||||
if (!correlationId) {
|
|
||||||
correlationId = uuid()
|
|
||||||
}
|
|
||||||
|
|
||||||
return correlator.withId(correlationId, () => {
|
|
||||||
console.debug = data => print(debug, data)
|
|
||||||
console.trace = data => print(trace, data)
|
|
||||||
console.log = data => print(log, data)
|
|
||||||
console.info = data => print(info, data)
|
|
||||||
console.warn = data => print(warn, data)
|
|
||||||
console.error = data => print(error, data)
|
|
||||||
return next()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export default logging
|
|
|
@ -1,6 +1,7 @@
|
||||||
import fetch from "node-fetch"
|
import fetch from "node-fetch"
|
||||||
import * as sso from "./sso"
|
import * as sso from "./sso"
|
||||||
import { ssoCallbackUrl } from "../utils"
|
import { ssoCallbackUrl } from "../utils"
|
||||||
|
import { validEmail } from "../../../utils"
|
||||||
import {
|
import {
|
||||||
ConfigType,
|
ConfigType,
|
||||||
OIDCInnerConfig,
|
OIDCInnerConfig,
|
||||||
|
@ -11,6 +12,7 @@ import {
|
||||||
JwtClaims,
|
JwtClaims,
|
||||||
SaveSSOUserFunction,
|
SaveSSOUserFunction,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||||
|
|
||||||
export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
|
export function buildVerifyFn(saveUserFn: SaveSSOUserFunction) {
|
||||||
|
@ -86,15 +88,6 @@ function getEmail(profile: SSOProfile, jwtClaims: JwtClaims) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function validEmail(value: string) {
|
|
||||||
return (
|
|
||||||
value &&
|
|
||||||
!!value.match(
|
|
||||||
/^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an instance of the oidc passport strategy. This wrapper fetches the configuration
|
* Create an instance of the oidc passport strategy. This wrapper fetches the configuration
|
||||||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { structures, testEnv, mocks } from "../../../../../tests"
|
import { structures, mocks } from "../../../../../tests"
|
||||||
|
import { testEnv } from "../../../../../tests/extra"
|
||||||
import { SSOAuthDetails, User } from "@budibase/types"
|
import { SSOAuthDetails, User } from "@budibase/types"
|
||||||
|
|
||||||
import { HTTPError } from "../../../../errors"
|
import { HTTPError } from "../../../../errors"
|
||||||
|
|
|
@ -99,9 +99,7 @@ export const runMigration = async (
|
||||||
options.force[migrationType] &&
|
options.force[migrationType] &&
|
||||||
options.force[migrationType].includes(migrationName)
|
options.force[migrationType].includes(migrationName)
|
||||||
) {
|
) {
|
||||||
log(
|
log(`[Migration: ${migrationName}] [DB: ${dbName}] Forcing`)
|
||||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Forcing`
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
// no force, exit
|
// no force, exit
|
||||||
return
|
return
|
||||||
|
@ -111,7 +109,7 @@ export const runMigration = async (
|
||||||
// check if the migration is not a no-op
|
// check if the migration is not a no-op
|
||||||
if (!options.noOp) {
|
if (!options.noOp) {
|
||||||
log(
|
log(
|
||||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Running ${lengthStatement}`
|
`[Migration: ${migrationName}] [DB: ${dbName}] Running ${lengthStatement}`
|
||||||
)
|
)
|
||||||
|
|
||||||
if (migration.preventRetry) {
|
if (migration.preventRetry) {
|
||||||
|
@ -131,9 +129,7 @@ export const runMigration = async (
|
||||||
await migration.fn(db)
|
await migration.fn(db)
|
||||||
}
|
}
|
||||||
|
|
||||||
log(
|
log(`[Migration: ${migrationName}] [DB: ${dbName}] Complete`)
|
||||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Complete`
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// mark as complete
|
// mark as complete
|
||||||
|
@ -141,7 +137,7 @@ export const runMigration = async (
|
||||||
await db.put(doc)
|
await db.put(doc)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(
|
console.error(
|
||||||
`[Tenant: ${tenantId}] [Migration: ${migrationName}] [DB: ${dbName}] Error: `,
|
`[Migration: ${migrationName}] [DB: ${dbName}] Error: `,
|
||||||
err
|
err
|
||||||
)
|
)
|
||||||
throw err
|
throw err
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||||
|
|
||||||
exports[`migrations should match snapshot 1`] = `
|
exports[`migrations should match snapshot 1`] = `
|
||||||
Object {
|
{
|
||||||
"_id": "migrations",
|
"_id": "migrations",
|
||||||
"_rev": "1-2f64479842a0513aa8b97f356b0b9127",
|
"_rev": "1-2f64479842a0513aa8b97f356b0b9127",
|
||||||
"createdAt": "2020-01-01T00:00:00.000Z",
|
"createdAt": "2020-01-01T00:00:00.000Z",
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { testEnv, DBTestConfiguration } from "../../../tests"
|
import { testEnv, DBTestConfiguration } from "../../../tests/extra"
|
||||||
import * as migrations from "../index"
|
import * as migrations from "../index"
|
||||||
import * as context from "../../context"
|
import * as context from "../../context"
|
||||||
import { MigrationType } from "@budibase/types"
|
import { MigrationType } from "@budibase/types"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import * as app from "../app"
|
import * as app from "../app"
|
||||||
import { getAppFileUrl } from "../app"
|
import { getAppFileUrl } from "../app"
|
||||||
import { testEnv } from "../../../../tests"
|
import { testEnv } from "../../../../tests/extra"
|
||||||
|
|
||||||
describe("app", () => {
|
describe("app", () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import * as global from "../global"
|
import * as global from "../global"
|
||||||
import { testEnv } from "../../../../tests"
|
import { testEnv } from "../../../../tests/extra"
|
||||||
|
|
||||||
describe("global", () => {
|
describe("global", () => {
|
||||||
describe("getGlobalFileUrl", () => {
|
describe("getGlobalFileUrl", () => {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import * as plugins from "../plugins"
|
import * as plugins from "../plugins"
|
||||||
import { structures, testEnv } from "../../../../tests"
|
import { structures } from "../../../../tests"
|
||||||
|
import { testEnv } from "../../../../tests/extra"
|
||||||
|
|
||||||
describe("plugins", () => {
|
describe("plugins", () => {
|
||||||
describe("enrichPluginURLs", () => {
|
describe("enrichPluginURLs", () => {
|
||||||
|
|
|
@ -3,7 +3,7 @@ import AWS from "aws-sdk"
|
||||||
import stream from "stream"
|
import stream from "stream"
|
||||||
import fetch from "node-fetch"
|
import fetch from "node-fetch"
|
||||||
import tar from "tar-fs"
|
import tar from "tar-fs"
|
||||||
const zlib = require("zlib")
|
import zlib from "zlib"
|
||||||
import { promisify } from "util"
|
import { promisify } from "util"
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
|
@ -415,7 +415,7 @@ export const downloadTarballDirect = async (
|
||||||
throw new Error(`unexpected response ${response.statusText}`)
|
throw new Error(`unexpected response ${response.statusText}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path))
|
||||||
}
|
}
|
||||||
|
|
||||||
export const downloadTarball = async (
|
export const downloadTarball = async (
|
||||||
|
@ -431,7 +431,7 @@ export const downloadTarball = async (
|
||||||
}
|
}
|
||||||
|
|
||||||
const tmpPath = join(budibaseTempDir(), path)
|
const tmpPath = join(budibaseTempDir(), path)
|
||||||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
|
await streamPipeline(response.body, zlib.createUnzip(), tar.extract(tmpPath))
|
||||||
if (!env.isTest() && env.SELF_HOSTED) {
|
if (!env.isTest() && env.SELF_HOSTED) {
|
||||||
await uploadDirectory(bucketName, tmpPath, path)
|
await uploadDirectory(bucketName, tmpPath, path)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { DBTestConfiguration, structures } from "../../../tests"
|
import { structures } from "../../../tests"
|
||||||
|
import { DBTestConfiguration } from "../../../tests/extra"
|
||||||
import * as tenants from "../tenants"
|
import * as tenants from "../tenants"
|
||||||
|
|
||||||
describe("tenants", () => {
|
describe("tenants", () => {
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
import { validate } from "../utils"
|
||||||
|
import fetch from "node-fetch"
|
||||||
|
import { PluginType } from "@budibase/types"
|
||||||
|
|
||||||
|
const repoUrl =
|
||||||
|
"https://raw.githubusercontent.com/Budibase/budibase-skeleton/master"
|
||||||
|
const automationLink = `${repoUrl}/automation/schema.json.hbs`
|
||||||
|
const componentLink = `${repoUrl}/component/schema.json.hbs`
|
||||||
|
const datasourceLink = `${repoUrl}/datasource/schema.json.hbs`
|
||||||
|
|
||||||
|
async function getSchema(link: string) {
|
||||||
|
const response = await fetch(link)
|
||||||
|
if (response.status > 300) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const text = await response.text()
|
||||||
|
return JSON.parse(text)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runTest(opts: { link?: string; schema?: any }) {
|
||||||
|
let error
|
||||||
|
try {
|
||||||
|
let schema = opts.schema
|
||||||
|
if (opts.link) {
|
||||||
|
schema = await getSchema(opts.link)
|
||||||
|
}
|
||||||
|
validate(schema)
|
||||||
|
} catch (err) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
return error
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("it should be able to validate an automation schema", () => {
|
||||||
|
it("should return automation skeleton schema is valid", async () => {
|
||||||
|
const error = await runTest({ link: automationLink })
|
||||||
|
expect(error).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should fail given invalid automation schema", async () => {
|
||||||
|
const error = await runTest({
|
||||||
|
schema: {
|
||||||
|
type: PluginType.AUTOMATION,
|
||||||
|
schema: {},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(error).toBeDefined()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("it should be able to validate a component schema", () => {
|
||||||
|
it("should return component skeleton schema is valid", async () => {
|
||||||
|
const error = await runTest({ link: componentLink })
|
||||||
|
expect(error).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should fail given invalid component schema", async () => {
|
||||||
|
const error = await runTest({
|
||||||
|
schema: {
|
||||||
|
type: PluginType.COMPONENT,
|
||||||
|
schema: {},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(error).toBeDefined()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("it should be able to validate a datasource schema", () => {
|
||||||
|
it("should return datasource skeleton schema is valid", async () => {
|
||||||
|
const error = await runTest({ link: datasourceLink })
|
||||||
|
expect(error).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should fail given invalid datasource schema", async () => {
|
||||||
|
const error = await runTest({
|
||||||
|
schema: {
|
||||||
|
type: PluginType.DATASOURCE,
|
||||||
|
schema: {},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(error).toBeDefined()
|
||||||
|
})
|
||||||
|
})
|
|
@ -1,4 +1,12 @@
|
||||||
import { DatasourceFieldType, QueryType, PluginType } from "@budibase/types"
|
import {
|
||||||
|
DatasourceFieldType,
|
||||||
|
QueryType,
|
||||||
|
PluginType,
|
||||||
|
AutomationStepType,
|
||||||
|
AutomationStepIdArray,
|
||||||
|
AutomationIOType,
|
||||||
|
AutomationCustomIOType,
|
||||||
|
} from "@budibase/types"
|
||||||
import joi from "joi"
|
import joi from "joi"
|
||||||
|
|
||||||
const DATASOURCE_TYPES = [
|
const DATASOURCE_TYPES = [
|
||||||
|
@ -19,7 +27,7 @@ function runJoi(validator: joi.Schema, schema: any) {
|
||||||
|
|
||||||
function validateComponent(schema: any) {
|
function validateComponent(schema: any) {
|
||||||
const validator = joi.object({
|
const validator = joi.object({
|
||||||
type: joi.string().allow("component").required(),
|
type: joi.string().allow(PluginType.COMPONENT).required(),
|
||||||
metadata: joi.object().unknown(true).required(),
|
metadata: joi.object().unknown(true).required(),
|
||||||
hash: joi.string().optional(),
|
hash: joi.string().optional(),
|
||||||
version: joi.string().optional(),
|
version: joi.string().optional(),
|
||||||
|
@ -53,7 +61,7 @@ function validateDatasource(schema: any) {
|
||||||
.required()
|
.required()
|
||||||
|
|
||||||
const validator = joi.object({
|
const validator = joi.object({
|
||||||
type: joi.string().allow("datasource").required(),
|
type: joi.string().allow(PluginType.DATASOURCE).required(),
|
||||||
metadata: joi.object().unknown(true).required(),
|
metadata: joi.object().unknown(true).required(),
|
||||||
hash: joi.string().optional(),
|
hash: joi.string().optional(),
|
||||||
version: joi.string().optional(),
|
version: joi.string().optional(),
|
||||||
|
@ -82,6 +90,55 @@ function validateDatasource(schema: any) {
|
||||||
runJoi(validator, schema)
|
runJoi(validator, schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function validateAutomation(schema: any) {
|
||||||
|
const basePropsValidator = joi.object().pattern(joi.string(), {
|
||||||
|
type: joi
|
||||||
|
.string()
|
||||||
|
.allow(...Object.values(AutomationIOType))
|
||||||
|
.required(),
|
||||||
|
customType: joi.string().allow(...Object.values(AutomationCustomIOType)),
|
||||||
|
title: joi.string(),
|
||||||
|
description: joi.string(),
|
||||||
|
enum: joi.array().items(joi.string()),
|
||||||
|
pretty: joi.array().items(joi.string()),
|
||||||
|
})
|
||||||
|
const stepSchemaValidator = joi
|
||||||
|
.object({
|
||||||
|
properties: basePropsValidator,
|
||||||
|
required: joi.array().items(joi.string()),
|
||||||
|
})
|
||||||
|
.concat(basePropsValidator)
|
||||||
|
.required()
|
||||||
|
const validator = joi.object({
|
||||||
|
type: joi.string().allow(PluginType.AUTOMATION).required(),
|
||||||
|
metadata: joi.object().unknown(true).required(),
|
||||||
|
hash: joi.string().optional(),
|
||||||
|
version: joi.string().optional(),
|
||||||
|
schema: joi.object({
|
||||||
|
name: joi.string().required(),
|
||||||
|
tagline: joi.string().required(),
|
||||||
|
icon: joi.string().required(),
|
||||||
|
description: joi.string().required(),
|
||||||
|
type: joi
|
||||||
|
.string()
|
||||||
|
.allow(AutomationStepType.ACTION, AutomationStepType.LOGIC)
|
||||||
|
.required(),
|
||||||
|
stepId: joi
|
||||||
|
.string()
|
||||||
|
.disallow(...AutomationStepIdArray)
|
||||||
|
.required(),
|
||||||
|
inputs: joi.object().optional(),
|
||||||
|
schema: joi
|
||||||
|
.object({
|
||||||
|
inputs: stepSchemaValidator,
|
||||||
|
outputs: stepSchemaValidator,
|
||||||
|
})
|
||||||
|
.required(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
runJoi(validator, schema)
|
||||||
|
}
|
||||||
|
|
||||||
export function validate(schema: any) {
|
export function validate(schema: any) {
|
||||||
switch (schema?.type) {
|
switch (schema?.type) {
|
||||||
case PluginType.COMPONENT:
|
case PluginType.COMPONENT:
|
||||||
|
@ -90,6 +147,9 @@ export function validate(schema: any) {
|
||||||
case PluginType.DATASOURCE:
|
case PluginType.DATASOURCE:
|
||||||
validateDatasource(schema)
|
validateDatasource(schema)
|
||||||
break
|
break
|
||||||
|
case PluginType.AUTOMATION:
|
||||||
|
validateAutomation(schema)
|
||||||
|
break
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown plugin type - check schema.json: ${schema.type}`)
|
throw new Error(`Unknown plugin type - check schema.json: ${schema.type}`)
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,4 +2,5 @@ export enum JobQueue {
|
||||||
AUTOMATION = "automationQueue",
|
AUTOMATION = "automationQueue",
|
||||||
APP_BACKUP = "appBackupQueue",
|
APP_BACKUP = "appBackupQueue",
|
||||||
AUDIT_LOG = "auditLogQueue",
|
AUDIT_LOG = "auditLogQueue",
|
||||||
|
SYSTEM_EVENT_QUEUE = "systemEventQueue",
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,16 @@
|
||||||
import Redlock, { Options } from "redlock"
|
import Redlock from "redlock"
|
||||||
import { getLockClient } from "./init"
|
import { getLockClient } from "./init"
|
||||||
import { LockOptions, LockType } from "@budibase/types"
|
import { LockOptions, LockType } from "@budibase/types"
|
||||||
import * as context from "../context"
|
import * as context from "../context"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
|
|
||||||
const getClient = async (type: LockType): Promise<Redlock> => {
|
const getClient = async (
|
||||||
|
type: LockType,
|
||||||
|
opts?: Redlock.Options
|
||||||
|
): Promise<Redlock> => {
|
||||||
|
if (type === LockType.CUSTOM) {
|
||||||
|
return newRedlock(opts)
|
||||||
|
}
|
||||||
if (env.isTest() && type !== LockType.TRY_ONCE) {
|
if (env.isTest() && type !== LockType.TRY_ONCE) {
|
||||||
return newRedlock(OPTIONS.TEST)
|
return newRedlock(OPTIONS.TEST)
|
||||||
}
|
}
|
||||||
|
@ -56,7 +62,7 @@ const OPTIONS = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const newRedlock = async (opts: Options = {}) => {
|
const newRedlock = async (opts: Redlock.Options = {}) => {
|
||||||
let options = { ...OPTIONS.DEFAULT, ...opts }
|
let options = { ...OPTIONS.DEFAULT, ...opts }
|
||||||
const redisWrapper = await getLockClient()
|
const redisWrapper = await getLockClient()
|
||||||
const client = redisWrapper.getClient()
|
const client = redisWrapper.getClient()
|
||||||
|
|
|
@ -12,7 +12,7 @@ export enum SecretOption {
|
||||||
ENCRYPTION = "encryption",
|
ENCRYPTION = "encryption",
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSecret(secretOption: SecretOption): string {
|
export function getSecret(secretOption: SecretOption): string {
|
||||||
let secret, secretName
|
let secret, secretName
|
||||||
switch (secretOption) {
|
switch (secretOption) {
|
||||||
case SecretOption.ENCRYPTION:
|
case SecretOption.ENCRYPTION:
|
||||||
|
|
|
@ -24,7 +24,7 @@ export enum PermissionType {
|
||||||
QUERY = "query",
|
QUERY = "query",
|
||||||
}
|
}
|
||||||
|
|
||||||
class Permission {
|
export class Permission {
|
||||||
type: PermissionType
|
type: PermissionType
|
||||||
level: PermissionLevel
|
level: PermissionLevel
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ class Permission {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function levelToNumber(perm: PermissionLevel) {
|
export function levelToNumber(perm: PermissionLevel) {
|
||||||
switch (perm) {
|
switch (perm) {
|
||||||
// not everything has execute privileges
|
// not everything has execute privileges
|
||||||
case PermissionLevel.EXECUTE:
|
case PermissionLevel.EXECUTE:
|
||||||
|
@ -55,7 +55,7 @@ function levelToNumber(perm: PermissionLevel) {
|
||||||
* @param {string} userPermLevel The permission level of the user.
|
* @param {string} userPermLevel The permission level of the user.
|
||||||
* @return {string[]} All the permission levels this user is allowed to carry out.
|
* @return {string[]} All the permission levels this user is allowed to carry out.
|
||||||
*/
|
*/
|
||||||
function getAllowedLevels(userPermLevel: PermissionLevel) {
|
export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
|
||||||
switch (userPermLevel) {
|
switch (userPermLevel) {
|
||||||
case PermissionLevel.EXECUTE:
|
case PermissionLevel.EXECUTE:
|
||||||
return [PermissionLevel.EXECUTE]
|
return [PermissionLevel.EXECUTE]
|
||||||
|
@ -64,9 +64,9 @@ function getAllowedLevels(userPermLevel: PermissionLevel) {
|
||||||
case PermissionLevel.WRITE:
|
case PermissionLevel.WRITE:
|
||||||
case PermissionLevel.ADMIN:
|
case PermissionLevel.ADMIN:
|
||||||
return [
|
return [
|
||||||
|
PermissionLevel.EXECUTE,
|
||||||
PermissionLevel.READ,
|
PermissionLevel.READ,
|
||||||
PermissionLevel.WRITE,
|
PermissionLevel.WRITE,
|
||||||
PermissionLevel.EXECUTE,
|
|
||||||
]
|
]
|
||||||
default:
|
default:
|
||||||
return []
|
return []
|
||||||
|
@ -81,7 +81,7 @@ export enum BuiltinPermissionID {
|
||||||
POWER = "power",
|
POWER = "power",
|
||||||
}
|
}
|
||||||
|
|
||||||
const BUILTIN_PERMISSIONS = {
|
export const BUILTIN_PERMISSIONS = {
|
||||||
PUBLIC: {
|
PUBLIC: {
|
||||||
_id: BuiltinPermissionID.PUBLIC,
|
_id: BuiltinPermissionID.PUBLIC,
|
||||||
name: "Public",
|
name: "Public",
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
import { encrypt, decrypt, SecretOption, getSecret } from "../encryption"
|
||||||
|
import env from "../../environment"
|
||||||
|
|
||||||
|
describe("encryption", () => {
|
||||||
|
it("should throw an error if API encryption key is not set", () => {
|
||||||
|
const jwt = getSecret(SecretOption.API)
|
||||||
|
expect(jwt).toBe(env.JWT_SECRET)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should throw an error if encryption key is not set", () => {
|
||||||
|
expect(() => getSecret(SecretOption.ENCRYPTION)).toThrow(
|
||||||
|
'Secret "ENCRYPTION_KEY" has not been set in environment.'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should encrypt and decrypt a string using API encryption key", () => {
|
||||||
|
env._set("API_ENCRYPTION_KEY", "api_secret")
|
||||||
|
const plaintext = "budibase"
|
||||||
|
const apiEncrypted = encrypt(plaintext, SecretOption.API)
|
||||||
|
const decrypted = decrypt(apiEncrypted, SecretOption.API)
|
||||||
|
expect(decrypted).toEqual(plaintext)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should encrypt and decrypt a string using encryption key", () => {
|
||||||
|
env._set("ENCRYPTION_KEY", "normal_secret")
|
||||||
|
const plaintext = "budibase"
|
||||||
|
const encryptionEncrypted = encrypt(plaintext, SecretOption.ENCRYPTION)
|
||||||
|
const decrypted = decrypt(encryptionEncrypted, SecretOption.ENCRYPTION)
|
||||||
|
expect(decrypted).toEqual(plaintext)
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,145 @@
|
||||||
|
import { cloneDeep } from "lodash"
|
||||||
|
import * as permissions from "../permissions"
|
||||||
|
import { BUILTIN_ROLE_IDS } from "../roles"
|
||||||
|
|
||||||
|
describe("levelToNumber", () => {
|
||||||
|
it("should return 0 for EXECUTE", () => {
|
||||||
|
expect(permissions.levelToNumber(permissions.PermissionLevel.EXECUTE)).toBe(
|
||||||
|
0
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return 1 for READ", () => {
|
||||||
|
expect(permissions.levelToNumber(permissions.PermissionLevel.READ)).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return 2 for WRITE", () => {
|
||||||
|
expect(permissions.levelToNumber(permissions.PermissionLevel.WRITE)).toBe(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return 3 for ADMIN", () => {
|
||||||
|
expect(permissions.levelToNumber(permissions.PermissionLevel.ADMIN)).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return -1 for an unknown permission level", () => {
|
||||||
|
expect(
|
||||||
|
permissions.levelToNumber("unknown" as permissions.PermissionLevel)
|
||||||
|
).toBe(-1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
describe("getAllowedLevels", () => {
|
||||||
|
it('should return ["execute"] for EXECUTE', () => {
|
||||||
|
expect(
|
||||||
|
permissions.getAllowedLevels(permissions.PermissionLevel.EXECUTE)
|
||||||
|
).toEqual([permissions.PermissionLevel.EXECUTE])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return ["execute", "read"] for READ', () => {
|
||||||
|
expect(
|
||||||
|
permissions.getAllowedLevels(permissions.PermissionLevel.READ)
|
||||||
|
).toEqual([
|
||||||
|
permissions.PermissionLevel.EXECUTE,
|
||||||
|
permissions.PermissionLevel.READ,
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return ["execute", "read", "write"] for WRITE', () => {
|
||||||
|
expect(
|
||||||
|
permissions.getAllowedLevels(permissions.PermissionLevel.WRITE)
|
||||||
|
).toEqual([
|
||||||
|
permissions.PermissionLevel.EXECUTE,
|
||||||
|
permissions.PermissionLevel.READ,
|
||||||
|
permissions.PermissionLevel.WRITE,
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return ["execute", "read", "write"] for ADMIN', () => {
|
||||||
|
expect(
|
||||||
|
permissions.getAllowedLevels(permissions.PermissionLevel.ADMIN)
|
||||||
|
).toEqual([
|
||||||
|
permissions.PermissionLevel.EXECUTE,
|
||||||
|
permissions.PermissionLevel.READ,
|
||||||
|
permissions.PermissionLevel.WRITE,
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return [] for an unknown permission level", () => {
|
||||||
|
expect(
|
||||||
|
permissions.getAllowedLevels("unknown" as permissions.PermissionLevel)
|
||||||
|
).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("doesHaveBasePermission", () => {
|
||||||
|
it("should return true if base permission has the required level", () => {
|
||||||
|
const permType = permissions.PermissionType.USER
|
||||||
|
const permLevel = permissions.PermissionLevel.READ
|
||||||
|
const rolesHierarchy = [
|
||||||
|
{
|
||||||
|
roleId: BUILTIN_ROLE_IDS.ADMIN,
|
||||||
|
permissionId: permissions.BuiltinPermissionID.ADMIN,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
expect(
|
||||||
|
permissions.doesHaveBasePermission(permType, permLevel, rolesHierarchy)
|
||||||
|
).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false if base permission does not have the required level", () => {
|
||||||
|
const permType = permissions.PermissionType.APP
|
||||||
|
const permLevel = permissions.PermissionLevel.READ
|
||||||
|
const rolesHierarchy = [
|
||||||
|
{
|
||||||
|
roleId: BUILTIN_ROLE_IDS.PUBLIC,
|
||||||
|
permissionId: permissions.BuiltinPermissionID.PUBLIC,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
expect(
|
||||||
|
permissions.doesHaveBasePermission(permType, permLevel, rolesHierarchy)
|
||||||
|
).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("isPermissionLevelHigherThanRead", () => {
|
||||||
|
it("should return true if level is higher than read", () => {
|
||||||
|
expect(
|
||||||
|
permissions.isPermissionLevelHigherThanRead(
|
||||||
|
permissions.PermissionLevel.WRITE
|
||||||
|
)
|
||||||
|
).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false if level is read or lower", () => {
|
||||||
|
expect(
|
||||||
|
permissions.isPermissionLevelHigherThanRead(
|
||||||
|
permissions.PermissionLevel.READ
|
||||||
|
)
|
||||||
|
).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getBuiltinPermissions", () => {
|
||||||
|
it("returns a clone of the builtin permissions", () => {
|
||||||
|
const builtins = permissions.getBuiltinPermissions()
|
||||||
|
expect(builtins).toEqual(cloneDeep(permissions.BUILTIN_PERMISSIONS))
|
||||||
|
expect(builtins).not.toBe(permissions.BUILTIN_PERMISSIONS)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getBuiltinPermissionByID", () => {
|
||||||
|
it("returns correct permission object for valid ID", () => {
|
||||||
|
const expectedPermission = {
|
||||||
|
_id: permissions.BuiltinPermissionID.PUBLIC,
|
||||||
|
name: "Public",
|
||||||
|
permissions: [
|
||||||
|
new permissions.Permission(
|
||||||
|
permissions.PermissionType.WEBHOOK,
|
||||||
|
permissions.PermissionLevel.EXECUTE
|
||||||
|
),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
expect(permissions.getBuiltinPermissionByID("public")).toEqual(
|
||||||
|
expectedPermission
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
|
@ -3,8 +3,8 @@ import {
|
||||||
getTenantId,
|
getTenantId,
|
||||||
getTenantIDFromAppID,
|
getTenantIDFromAppID,
|
||||||
isMultiTenant,
|
isMultiTenant,
|
||||||
|
getPlatformURL,
|
||||||
} from "../context"
|
} from "../context"
|
||||||
import env from "../environment"
|
|
||||||
import {
|
import {
|
||||||
BBContext,
|
BBContext,
|
||||||
TenantResolutionStrategy,
|
TenantResolutionStrategy,
|
||||||
|
@ -93,7 +93,7 @@ export const getTenantIDFromCtx = (
|
||||||
// subdomain
|
// subdomain
|
||||||
if (isAllowed(TenantResolutionStrategy.SUBDOMAIN)) {
|
if (isAllowed(TenantResolutionStrategy.SUBDOMAIN)) {
|
||||||
// e.g. budibase.app or local.com:10000
|
// e.g. budibase.app or local.com:10000
|
||||||
const platformHost = new URL(env.PLATFORM_URL).host.split(":")[0]
|
const platformHost = new URL(getPlatformURL()).host.split(":")[0]
|
||||||
// e.g. tenant.budibase.app or tenant.local.com
|
// e.g. tenant.budibase.app or tenant.local.com
|
||||||
const requestHost = ctx.host
|
const requestHost = ctx.host
|
||||||
// parse the tenant id from the difference
|
// parse the tenant id from the difference
|
||||||
|
|
|
@ -0,0 +1,184 @@
|
||||||
|
import { TenantResolutionStrategy } from "@budibase/types"
|
||||||
|
import { addTenantToUrl, isUserInAppTenant, getTenantIDFromCtx } from "../"
|
||||||
|
import { isMultiTenant, getTenantIDFromAppID } from "../../context"
|
||||||
|
|
||||||
|
jest.mock("../../context", () => ({
|
||||||
|
getTenantId: jest.fn(() => "budibase"),
|
||||||
|
isMultiTenant: jest.fn(() => true),
|
||||||
|
getTenantIDFromAppID: jest.fn(),
|
||||||
|
getPlatformURL: jest.fn(() => "https://app.com"),
|
||||||
|
DEFAULT_TENANT_ID: "default",
|
||||||
|
}))
|
||||||
|
|
||||||
|
const mockedIsMultiTenant = isMultiTenant as jest.MockedFunction<
|
||||||
|
typeof isMultiTenant
|
||||||
|
>
|
||||||
|
const mockedGetTenantIDFromAppID = getTenantIDFromAppID as jest.MockedFunction<
|
||||||
|
typeof getTenantIDFromAppID
|
||||||
|
>
|
||||||
|
|
||||||
|
describe("addTenantToUrl", () => {
|
||||||
|
it("should append tenantId parameter to the URL", () => {
|
||||||
|
const url = "https://budibase.com"
|
||||||
|
const expectedUrl = "https://budibase.com?tenantId=budibase"
|
||||||
|
expect(addTenantToUrl(url)).toEqual(expectedUrl)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should append tenantId parameter to the URL query string", () => {
|
||||||
|
const url = "https://budibase.com?var=test"
|
||||||
|
const expectedUrl = "https://budibase.com?var=test&tenantId=budibase"
|
||||||
|
expect(addTenantToUrl(url)).toEqual(expectedUrl)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not append tenantId parameter to the URL if isMultiTenant is false", () => {
|
||||||
|
mockedIsMultiTenant.mockImplementation(() => false)
|
||||||
|
|
||||||
|
const url = "https://budibase.com"
|
||||||
|
const expectedUrl = "https://budibase.com"
|
||||||
|
expect(addTenantToUrl(url)).toEqual(expectedUrl)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("isUserInAppTenant", () => {
|
||||||
|
mockedGetTenantIDFromAppID.mockImplementation(() => "budibase")
|
||||||
|
const mockUser = { tenantId: "budibase" }
|
||||||
|
|
||||||
|
it("returns true if user tenant ID matches app tenant ID", () => {
|
||||||
|
const appId = "app-budibase"
|
||||||
|
const result = isUserInAppTenant(appId, mockUser)
|
||||||
|
expect(result).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("uses default tenant ID if user is not provided", () => {
|
||||||
|
const appId = "app-budibase"
|
||||||
|
const result = isUserInAppTenant(appId)
|
||||||
|
expect(result).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("uses default tenant ID if app tenant ID is not found", () => {
|
||||||
|
const appId = "not-budibase-app"
|
||||||
|
const result = isUserInAppTenant(appId, mockUser)
|
||||||
|
expect(result).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("returns false if user tenant ID does not match app tenant ID", () => {
|
||||||
|
const appId = "app-budibase"
|
||||||
|
mockedGetTenantIDFromAppID.mockImplementation(() => "not-budibase")
|
||||||
|
const result = isUserInAppTenant(appId, mockUser)
|
||||||
|
expect(result).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
let mockOpts: any = {}
|
||||||
|
function createCtx(opts: {
|
||||||
|
originalUrl?: string
|
||||||
|
headers?: Record<string, string>
|
||||||
|
qsTenantId?: string
|
||||||
|
userTenantId?: string
|
||||||
|
host?: string
|
||||||
|
path?: string
|
||||||
|
}) {
|
||||||
|
const createdCtx: any = {
|
||||||
|
originalUrl: opts.originalUrl || "budibase.com",
|
||||||
|
matched: [{ name: "name" }],
|
||||||
|
throw: jest.fn(),
|
||||||
|
request: { headers: {} },
|
||||||
|
}
|
||||||
|
if (opts.headers) {
|
||||||
|
createdCtx.request.headers = opts.headers
|
||||||
|
}
|
||||||
|
if (opts.qsTenantId) {
|
||||||
|
createdCtx.request.query = { tenantId: opts.qsTenantId }
|
||||||
|
}
|
||||||
|
if (opts.userTenantId) {
|
||||||
|
createdCtx.user = { tenantId: opts.userTenantId }
|
||||||
|
}
|
||||||
|
if (opts.host) {
|
||||||
|
createdCtx.host = opts.host
|
||||||
|
}
|
||||||
|
if (opts.path) {
|
||||||
|
createdCtx.matched = [
|
||||||
|
{
|
||||||
|
paramNames: [{ name: "tenantId" }],
|
||||||
|
params: () => ({ tenantId: opts.path }),
|
||||||
|
captures: jest.fn(),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
return createdCtx as any
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("getTenantIDFromCtx", () => {
|
||||||
|
describe("when tenant can be found", () => {
|
||||||
|
it("returns the tenant ID from the user object", () => {
|
||||||
|
mockedIsMultiTenant.mockImplementation(() => true)
|
||||||
|
const ctx = createCtx({ userTenantId: "budibase" })
|
||||||
|
expect(getTenantIDFromCtx(ctx, mockOpts)).toEqual("budibase")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("returns the tenant ID from the header", () => {
|
||||||
|
mockedIsMultiTenant.mockImplementation(() => true)
|
||||||
|
const ctx = createCtx({ headers: { "x-budibase-tenant-id": "budibase" } })
|
||||||
|
mockOpts = { includeStrategies: [TenantResolutionStrategy.HEADER] }
|
||||||
|
expect(getTenantIDFromCtx(ctx, mockOpts)).toEqual("budibase")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("returns the tenant ID from the query param", () => {
|
||||||
|
mockedIsMultiTenant.mockImplementation(() => true)
|
||||||
|
mockOpts = { includeStrategies: [TenantResolutionStrategy.QUERY] }
|
||||||
|
const ctx = createCtx({ qsTenantId: "budibase" })
|
||||||
|
expect(getTenantIDFromCtx(ctx, mockOpts)).toEqual("budibase")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("returns the tenant ID from the subdomain", () => {
|
||||||
|
mockedIsMultiTenant.mockImplementation(() => true)
|
||||||
|
const ctx = createCtx({ host: "bb.app.com" })
|
||||||
|
mockOpts = { includeStrategies: [TenantResolutionStrategy.SUBDOMAIN] }
|
||||||
|
expect(getTenantIDFromCtx(ctx, mockOpts)).toEqual("bb")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("returns the tenant ID from the path", () => {
|
||||||
|
mockedIsMultiTenant.mockImplementation(() => true)
|
||||||
|
const ctx = createCtx({ path: "bb" })
|
||||||
|
mockOpts = { includeStrategies: [TenantResolutionStrategy.PATH] }
|
||||||
|
expect(getTenantIDFromCtx(ctx, mockOpts)).toEqual("bb")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("when tenant cannot be found", () => {
|
||||||
|
it("throws a 403 error if allowNoTenant is false", () => {
|
||||||
|
const ctx = createCtx({})
|
||||||
|
mockOpts = {
|
||||||
|
allowNoTenant: false,
|
||||||
|
excludeStrategies: [
|
||||||
|
TenantResolutionStrategy.QUERY,
|
||||||
|
TenantResolutionStrategy.SUBDOMAIN,
|
||||||
|
TenantResolutionStrategy.PATH,
|
||||||
|
],
|
||||||
|
}
|
||||||
|
expect(getTenantIDFromCtx(ctx, mockOpts)).toBeNull()
|
||||||
|
expect(ctx.throw).toBeCalledTimes(1)
|
||||||
|
expect(ctx.throw).toBeCalledWith(403, "Tenant id not set")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("returns null if allowNoTenant is true", () => {
|
||||||
|
const ctx = createCtx({})
|
||||||
|
mockOpts = {
|
||||||
|
allowNoTenant: true,
|
||||||
|
excludeStrategies: [
|
||||||
|
TenantResolutionStrategy.QUERY,
|
||||||
|
TenantResolutionStrategy.SUBDOMAIN,
|
||||||
|
TenantResolutionStrategy.PATH,
|
||||||
|
],
|
||||||
|
}
|
||||||
|
expect(getTenantIDFromCtx(ctx, mockOpts)).toBeNull()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("returns the default tenant ID when isMultiTenant() returns false", () => {
|
||||||
|
mockedIsMultiTenant.mockImplementation(() => false)
|
||||||
|
const ctx = createCtx({})
|
||||||
|
expect(getTenantIDFromCtx(ctx, mockOpts)).toEqual("default")
|
||||||
|
})
|
||||||
|
})
|
|
@ -1,15 +1,18 @@
|
||||||
import {
|
import {
|
||||||
ViewName,
|
|
||||||
getUsersByAppParams,
|
|
||||||
getProdAppID,
|
|
||||||
generateAppUserID,
|
|
||||||
queryGlobalView,
|
|
||||||
UNICODE_MAX,
|
|
||||||
DocumentType,
|
|
||||||
SEPARATOR,
|
|
||||||
directCouchFind,
|
directCouchFind,
|
||||||
|
DocumentType,
|
||||||
|
generateAppUserID,
|
||||||
|
getGlobalUserParams,
|
||||||
|
getProdAppID,
|
||||||
|
getUsersByAppParams,
|
||||||
|
pagination,
|
||||||
|
queryGlobalView,
|
||||||
|
queryGlobalViewRaw,
|
||||||
|
SEPARATOR,
|
||||||
|
UNICODE_MAX,
|
||||||
|
ViewName,
|
||||||
} from "./db"
|
} from "./db"
|
||||||
import { BulkDocsResponse, User } from "@budibase/types"
|
import { BulkDocsResponse, SearchUsersRequest, User } from "@budibase/types"
|
||||||
import { getGlobalDB } from "./context"
|
import { getGlobalDB } from "./context"
|
||||||
import * as context from "./context"
|
import * as context from "./context"
|
||||||
|
|
||||||
|
@ -199,3 +202,49 @@ export const searchGlobalUsersByEmail = async (
|
||||||
}
|
}
|
||||||
return users
|
return users
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const PAGE_LIMIT = 8
|
||||||
|
export const paginatedUsers = async ({
|
||||||
|
page,
|
||||||
|
email,
|
||||||
|
appId,
|
||||||
|
}: SearchUsersRequest = {}) => {
|
||||||
|
const db = getGlobalDB()
|
||||||
|
// get one extra document, to have the next page
|
||||||
|
const opts: any = {
|
||||||
|
include_docs: true,
|
||||||
|
limit: PAGE_LIMIT + 1,
|
||||||
|
}
|
||||||
|
// add a startkey if the page was specified (anchor)
|
||||||
|
if (page) {
|
||||||
|
opts.startkey = page
|
||||||
|
}
|
||||||
|
// property specifies what to use for the page/anchor
|
||||||
|
let userList: User[],
|
||||||
|
property = "_id",
|
||||||
|
getKey
|
||||||
|
if (appId) {
|
||||||
|
userList = await searchGlobalUsersByApp(appId, opts)
|
||||||
|
getKey = (doc: any) => getGlobalUserByAppPage(appId, doc)
|
||||||
|
} else if (email) {
|
||||||
|
userList = await searchGlobalUsersByEmail(email, opts)
|
||||||
|
property = "email"
|
||||||
|
} else {
|
||||||
|
// no search, query allDocs
|
||||||
|
const response = await db.allDocs(getGlobalUserParams(null, opts))
|
||||||
|
userList = response.rows.map((row: any) => row.doc)
|
||||||
|
}
|
||||||
|
return pagination(userList, PAGE_LIMIT, {
|
||||||
|
paginate: true,
|
||||||
|
property,
|
||||||
|
getKey,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getUserCount() {
|
||||||
|
const response = await queryGlobalViewRaw(ViewName.USER_BY_EMAIL, {
|
||||||
|
limit: 0, // to be as fast as possible - we just want the total rows count
|
||||||
|
include_docs: false,
|
||||||
|
})
|
||||||
|
return response.total_rows
|
||||||
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
export * from "../newid"
|
export * from "../docIds/newid"
|
||||||
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
|
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
|
||||||
|
|
||||||
const SALT_ROUNDS = env.SALT_ROUNDS || 10
|
const SALT_ROUNDS = env.SALT_ROUNDS || 10
|
||||||
|
|
|
@ -1,2 +1,3 @@
|
||||||
export * from "./hashing"
|
export * from "./hashing"
|
||||||
export * from "./utils"
|
export * from "./utils"
|
||||||
|
export * from "./stringUtils"
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
export function validEmail(value: string) {
|
||||||
|
return (
|
||||||
|
value &&
|
||||||
|
!!value.match(
|
||||||
|
/^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
import { structures, DBTestConfiguration } from "../../../tests"
|
import { structures } from "../../../tests"
|
||||||
|
import { DBTestConfiguration } from "../../../tests/extra"
|
||||||
import * as utils from "../../utils"
|
import * as utils from "../../utils"
|
||||||
import * as db from "../../db"
|
import * as db from "../../db"
|
||||||
import { Header } from "../../constants"
|
import { Header } from "../../constants"
|
||||||
|
|
|
@ -46,8 +46,9 @@ export async function resolveAppUrl(ctx: Ctx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// search prod apps for a url that matches
|
// search prod apps for a url that matches
|
||||||
const apps: App[] = await context.doInTenant(tenantId, () =>
|
const apps: App[] = await context.doInTenant(
|
||||||
getAllApps({ dev: false })
|
tenantId,
|
||||||
|
() => getAllApps({ dev: false }) as Promise<App[]>
|
||||||
)
|
)
|
||||||
const app = apps.filter(
|
const app = apps.filter(
|
||||||
a => a.url && a.url.toLowerCase() === possibleAppUrl
|
a => a.url && a.url.toLowerCase() === possibleAppUrl
|
||||||
|
@ -221,27 +222,6 @@ export function isClient(ctx: Ctx) {
|
||||||
return ctx.headers[Header.TYPE] === "client"
|
return ctx.headers[Header.TYPE] === "client"
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getBuilders() {
|
|
||||||
const builders = await queryGlobalView(ViewName.USER_BY_BUILDERS, {
|
|
||||||
include_docs: false,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!builders) {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(builders)) {
|
|
||||||
return builders
|
|
||||||
} else {
|
|
||||||
return [builders]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getBuildersCount() {
|
|
||||||
const builders = await getBuilders()
|
|
||||||
return builders.length
|
|
||||||
}
|
|
||||||
|
|
||||||
export function timeout(timeMs: number) {
|
export function timeout(timeMs: number) {
|
||||||
return new Promise(resolve => setTimeout(resolve, timeMs))
|
return new Promise(resolve => setTimeout(resolve, timeMs))
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue