Merge remote-tracking branch 'origin/develop' into feature/automation-log-filter-by-license

This commit is contained in:
Dean 2022-09-28 11:39:22 +01:00
commit e033af8e87
776 changed files with 28890 additions and 13287 deletions

View File

@ -162,6 +162,7 @@
"translation"
]
},
{
"login": "mslourens",
"name": "Maurits Lourens",
"avatar_url": "https://avatars.githubusercontent.com/u/1907152?v=4",

View File

@ -7,4 +7,5 @@ packages/server/client
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
packages/builder/cypress/reports
packages/builder/cypress/reports
packages/sdk/sdk

24
.github/ISSUE_TEMPLATE/epic.md vendored Normal file
View File

@ -0,0 +1,24 @@
---
name: Epic
about: Plan a new project
title: ''
labels: epic
assignees: ''
---
## Description
Brief summary of what this Epic is, whether it's a larger project, goal, or user story. Describe the job to be done, which persona this Epic is mainly for, or if more multiple, break it down by user and job story.
## Spec
Link to confluence spec
## Teams and Stakeholders
Describe who needs to be kept up-to-date about this Epic, included in discussions, or updated along the way. Stakeholders can be both in Product/Engineering, as well as other teams like Customer Success who might want to keep customers updated on the Epic project.
## Workflow
- [ ] Spec Created and pasted above
- [ ] Product Review
- [ ] Designs created
- [ ] Individual Tasks created and assigned to Epic

View File

@ -119,6 +119,8 @@ This job is responsible for deploying to our production, cloud kubernetes enviro
## Pro
| **NOTE**: When developing for both pro / budibase repositories, your branch names need to match, or else the correct pro doesn't get run within your CI job.
### Installing Pro
The pro package is always installed from source in our CI jobs.
@ -132,7 +134,7 @@ This is done to prevent pro needing to be published prior to CI runs in budiabse
- backend-core lives in the monorepo, so it can't be released independently to be used in pro
- therefore the only option is to pull pro from source and release it as a part of the monorepo release, as if it were a mono package
The install is performed using the same steps as local development, via the `yarn bootstrap` command, see the [Contributing Guide#Pro](../CONTRIBUTING.md#pro)
The install is performed using the same steps as local development, via the `yarn bootstrap` command, see the [Contributing Guide#Pro](../../docs/CONTRIBUTING.md#pro)
The branch to install pro from can vary depending on ref of the commit that triggered the budibase CI job. This is done to enable branches which have changes in both the monorepo and the pro repo to have their CI pass successfully.

View File

@ -23,6 +23,15 @@ jobs:
build:
runs-on: ubuntu-latest
services:
couchdb:
image: ibmcom/couchdb3
env:
COUCHDB_PASSWORD: budibase
COUCHDB_USER: budibase
ports:
- 4567:5984
strategy:
matrix:
node-version: [14.x]
@ -53,9 +62,8 @@ jobs:
name: codecov-umbrella
verbose: true
# TODO: parallelise this
- name: Cypress run
uses: cypress-io/github-action@v2
with:
install: false
command: yarn test:e2e:ci
- name: QA Core Integration Tests
run: |
cd qa-core
yarn
yarn api:test:ci

View File

@ -69,6 +69,28 @@ jobs:
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with:

View File

@ -4,8 +4,6 @@ on:
workflow_dispatch:
env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
BRANCH: ${{ github.event.pull_request.head.ref }}
CI: true
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
@ -17,6 +15,11 @@ jobs:
matrix:
node-version: [14.x]
steps:
- name: Fail if branch is not master
if: github.ref != 'refs/heads/master'
run: |
echo "Ref is not master, you must run this job from master."
exit 1
- name: "Checkout"
uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
@ -28,8 +31,6 @@ jobs:
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Install Pro
run: yarn install:pro $BRANCH $BASE_BRANCH
- name: Run Yarn
run: yarn
- name: Run Yarn Bootstrap

View File

@ -18,8 +18,9 @@ on:
workflow_dispatch:
env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
# Posthog token used by ui at build time
# disable unless needed for testing
# POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
FEATURE_PREVIEW_URL: https://budirelease.live
@ -45,7 +46,8 @@ jobs:
- run: yarn
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
- run: yarn build
- run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials
@ -119,6 +121,27 @@ jobs:
]
env:
KUBECONFIG_FILE: '${{ secrets.RELEASE_KUBECONFIG }}'
- name: Re roll app-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment app-service -n budibase
- name: Re roll proxy-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment proxy-service -n budibase
- name: Re roll worker-service
uses: actions-hub/kubectl@master
env:
KUBE_CONFIG: ${{ secrets.RELEASE_KUBECONFIG_BASE64 }}
with:
args: rollout restart deployment worker-service -n budibase
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0

View File

@ -8,19 +8,28 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Fail if branch is not master
if: github.ref != 'refs/heads/master'
run: |
echo "Ref is not master, you must run this job from master."
exit 1
- uses: actions/checkout@v2
with:
node-version: 14.x
fetch_depth: 0
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Docker images (Self Host)
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
# Get latest release version
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
release_tag=v$release_version
release_tag=v${{ env.RELEASE_VERSION }}
# Pull apps and worker images
docker pull budibase/apps:$release_tag
@ -40,13 +49,12 @@ jobs:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
SELFHOST_TAG: latest
- name: Build CLI executables
- name: Bootstrap and build (CLI)
run: |
pushd packages/cli
yarn
yarn bootstrap
yarn build
popd
- name: Build OpenAPI spec
run: |
@ -93,4 +101,4 @@ jobs:
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Self Host Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Self Host."
embed-title: ${{ env.RELEASE_VERSION }}
embed-title: ${{ env.RELEASE_VERSION }}

View File

@ -29,7 +29,7 @@ on:
env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
@ -56,6 +56,7 @@ jobs:
- run: yarn bootstrap
- run: yarn lint
- run: yarn build
- run: yarn build:sdk
- run: yarn test
- name: Configure AWS Credentials

View File

@ -1,4 +1,4 @@
name: Budibase Smoke Test
name: Budibase Nightly Tests
on:
workflow_dispatch:
@ -6,7 +6,7 @@ on:
- cron: "0 5 * * *" # every day at 5AM
jobs:
release:
nightly:
runs-on: ubuntu-latest
steps:
@ -43,6 +43,18 @@ jobs:
name: Test Reports
path: packages/builder/cypress/reports/testReport.html
# TODO: enable once running in QA test env
# - name: Configure AWS Credentials
# uses: aws-actions/configure-aws-credentials@v1
# with:
# aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
# aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-region: eu-west-1
# - name: Upload test results HTML
# uses: aws-actions/configure-aws-credentials@v1
# run: aws s3 cp packages/builder/cypress/reports/testReport.html s3://{{ secrets.BUDI_QA_REPORTS_BUCKET_NAME }}/$GITHUB_RUN_ID/index.html
- name: Cypress Discord Notify
run: yarn test:e2e:ci:notify
env:

5
.gitignore vendored
View File

@ -63,6 +63,7 @@ typings/
# dotenv environment variables file
.env
!qa-core/.env
!hosting/.env
hosting/.generated-nginx.dev.conf
hosting/proxy/.generated-nginx.prod.conf
@ -102,4 +103,6 @@ packages/builder/cypress/reports
stats.html
# TypeScript cache
*.tsbuildinfo
*.tsbuildinfo
budibase-component
budibase-datasource

View File

@ -9,3 +9,4 @@ packages/server/src/definitions/openapi.ts
packages/builder/.routify
packages/builder/cypress/support/queryLevelTransformerFunction.js
packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js
packages/sdk/sdk

View File

@ -4,7 +4,7 @@
"singleQuote": false,
"trailingComma": "es5",
"arrowParens": "avoid",
"jsxBracketSameLine": false,
"bracketSameLine": false,
"plugins": ["prettier-plugin-svelte"],
"svelteSortOrder": "options-scripts-markup-styles"
}

View File

@ -65,7 +65,7 @@ Budibase is open-source - licensed as GPL v3. This should fill you with confiden
<br /><br />
### Load data or start from scratch
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no datasources. [Request new datasources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/data_n1tlhf.png">
@ -169,7 +169,7 @@ If you have a question or would like to talk with other Budibase users and join
## ❗ Code of conduct
Budibase is dedicated to providing a welcoming, diverse, and harrassment-free experience for everyone. We expect everyone in the Budibase community to abide by our [**Code of Conduct**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md). Please read it.
Budibase is dedicated to providing a welcoming, diverse, and harrassment-free experience for everyone. We expect everyone in the Budibase community to abide by our [**Code of Conduct**](https://github.com/Budibase/budibase/blob/HEAD/docs/CODE_OF_CONDUCT.md). Please read it.
<br />

View File

@ -78,6 +78,8 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }}
- name: PORT
value: {{ .Values.services.apps.port | quote }}
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
@ -124,11 +126,31 @@ spec:
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.bbAdminUserEmail }}
- name: BB_ADMIN_USER_EMAIL
value: { { .Values.globals.bbAdminUserEmail | quote } }
value: {{ .Values.globals.bbAdminUserEmail | quote }}
{{ end }}
{{ if .Values.globals.bbAdminUserPassword }}
- name: BB_ADMIN_USER_PASSWORD
value: { { .Values.globals.bbAdminUserPassword | quote } }
value: {{ .Values.globals.bbAdminUserPassword | quote }}
{{ end }}
{{ if .Values.globals.pluginsDir }}
- name: PLUGINS_DIR
value: {{ .Values.globals.pluginsDir | quote }}
{{ end }}
{{ if .Values.services.apps.nodeDebug }}
- name: NODE_DEBUG
value: {{ .Values.services.apps.nodeDebug | quote }}
{{ end }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }}
@ -142,7 +164,10 @@ spec:
name: bbapps
ports:
- containerPort: {{ .Values.services.apps.port }}
resources: {}
{{ with .Values.services.apps.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -38,7 +38,10 @@ spec:
image: redgeoff/replicate-couchdb-cluster
imagePullPolicy: Always
name: couchdb-backup
resources: {}
{{ with .Values.services.couchdb.backup.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -56,7 +56,10 @@ spec:
name: minio-service
ports:
- containerPort: {{ .Values.services.objectStore.port }}
resources: {}
{{ with .Values.services.objectStore.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: minio-data

View File

@ -30,7 +30,10 @@ spec:
name: proxy-service
ports:
- containerPort: {{ .Values.services.proxy.port }}
resources: {}
{{ with .Values.services.proxy.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
{{- with .Values.affinity }}
affinity:

View File

@ -35,7 +35,10 @@ spec:
name: redis-service
ports:
- containerPort: {{ .Values.services.redis.port }}
resources: {}
{{ with .Values.services.redis.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
volumeMounts:
- mountPath: /data
name: redis-data

View File

@ -27,6 +27,8 @@ spec:
spec:
containers:
- env:
- name: BUDIBASE_ENVIRONMENT
value: {{ .Values.globals.budibaseEnv }}
- name: DEPLOYMENT_ENVIRONMENT
value: "kubernetes"
- name: CLUSTER_PORT
@ -75,6 +77,8 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | default "plugins" | quote }}
- name: PORT
value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY
@ -125,6 +129,19 @@ spec:
value: {{ .Values.globals.google.secret | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.elasticApmEnabled }}
- name: ELASTIC_APM_ENABLED
value: {{ .Values.globals.elasticApmEnabled | quote }}
{{ end }}
{{ if .Values.globals.elasticApmSecretToken }}
- name: ELASTIC_APM_SECRET_TOKEN
value: {{ .Values.globals.elasticApmSecretToken | quote }}
{{ end }}
{{ if .Values.globals.elasticApmServerUrl }}
- name: ELASTIC_APM_SERVER_URL
value: {{ .Values.globals.elasticApmServerUrl | quote }}
{{ end }}
image: budibase/worker:{{ .Values.globals.appVersion }}
imagePullPolicy: Always
livenessProbe:
@ -136,7 +153,10 @@ spec:
name: bbworker
ports:
- containerPort: {{ .Values.services.worker.port }}
resources: {}
{{ with .Values.services.worker.resources }}
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}

View File

@ -60,19 +60,6 @@ ingress:
port:
number: 10000
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
@ -89,9 +76,10 @@ affinity: {}
globals:
appVersion: "latest"
budibaseEnv: PRODUCTION
tenantFeatureFlags: "*:LICENSING,*:USER_GROUPS"
enableAnalytics: "1"
sentryDSN: ""
posthogToken: "phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS"
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
logLevel: info
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
@ -114,6 +102,10 @@ globals:
smtp:
enabled: false
# elasticApmEnabled:
# elasticApmSecretToken:
# elasticApmServerUrl:
services:
budibaseVersion: latest
dns: cluster.local
@ -121,15 +113,19 @@ services:
proxy:
port: 10000
replicaCount: 1
resources: {}
apps:
port: 4002
replicaCount: 1
logLevel: info
resources: {}
# nodeDebug: "" # set the value of NODE_DEBUG
worker:
port: 4003
replicaCount: 1
resources: {}
couchdb:
enabled: true
@ -143,6 +139,7 @@ services:
target: ""
# backup interval in seconds
interval: ""
resources: {}
redis:
enabled: true # disable if using external redis
@ -156,6 +153,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
objectStore:
minio: true
@ -172,6 +170,7 @@ services:
## If undefined (the default) or set to null, no storageClassName spec is
## set, choosing the default provisioner.
storageClass: ""
resources: {}
# Override values in couchDB subchart
couchdb:

View File

@ -4,10 +4,10 @@ From opening a bug report to creating a pull request: every contribution is appr
## Table of contents
- [Quick start](#quick-start)
- [Status](#status)
- [What's included](#whats-included)
- [Bugs and feature requests](#bugs-and-feature-requests)
- [Where to start](#not-sure-where-to-start)
- [Contributor Licence Agreement](#contributor-license-agreement-cla)
- [Glossary of Terms](#glossary-of-terms)
- [Contributing to Budibase](#contributing-to-budibase)
## Not Sure Where to Start?
@ -32,6 +32,9 @@ All contributors must sign an [Individual Contributor License Agreement](https:/
If contributing on behalf of your company, your company must sign a [Corporate Contributor License Agreement](https://github.com/budibase/budibase/blob/next/.github/cla/corporate-cla.md). If so, please contact us via community@budibase.com.
If for any reason, your first contribution is in a PR created by other contributor, please just add a comment to the PR
with the following text to agree our CLA: "I have read the CLA Document and I hereby sign the CLA".
## Glossary of Terms
To understand the budibase API, it can be helpful to understand the top level entities that make up Budibase.
@ -162,7 +165,10 @@ When you are running locally, budibase stores data on disk using docker volumes.
### Development Modes
A combination of environment variables controls the mode budibase runs in.
A combination of environment variables controls the mode budibase runs in.
| **NOTE**: You need to clean your browser cookies when you change between different modes.
Yarn commands can be used to mimic the different modes as described in the sections below:
#### Self Hosted
@ -189,7 +195,7 @@ To enable this mode, use:
yarn mode:account
```
### CI
An overview of the CI pipelines can be found [here](./workflows/README.md)
An overview of the CI pipelines can be found [here](../.github/workflows/README.md)
### Pro

View File

@ -4,6 +4,11 @@
Install instructions [here](https://brew.sh/)
| **NOTE**: If you are working on a M1 Apple Silicon which is running Z shell, you could need to add
`eval $(/opt/homebrew/bin/brew shellenv)` line to your `.zshrc`. This will make your zsh to find the apps you install
through brew.
### Install Node
Budibase requires a recent version of node (14+):
@ -51,4 +56,7 @@ So this command will actually run the application in dev mode. It creates .env f
The dev version will be available on port 10000 i.e.
http://127.0.0.1:10000/builder/admin
http://127.0.0.1:10000/builder/admin
| **NOTE**: If you are working on a M1 Apple Silicon, you will need to uncomment `# platform: linux/amd64` line in
[hosting/docker-compose-dev.yaml](../hosting/docker-compose.dev.yaml)

View File

@ -348,7 +348,7 @@ export interface paths {
}
}
responses: {
/** Returns the created table, including the ID which has been generated for it. This can be internal or external data sources. */
/** Returns the created table, including the ID which has been generated for it. This can be internal or external datasources. */
200: {
content: {
"application/json": components["schemas"]["tableOutput"]
@ -959,7 +959,7 @@ export interface components {
query: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]
@ -983,7 +983,7 @@ export interface components {
data: {
/** @description The ID of the query. */
_id: string
/** @description The ID of the data source the query belongs to. */
/** @description The ID of the datasource the query belongs to. */
datasourceId?: string
/** @description The bindings which are required to perform this query. */
parameters?: string[]

View File

@ -11,8 +11,8 @@
"dependencies": {
"bulma": "^0.9.3",
"next": "12.1.0",
"node-fetch": "^3.2.2",
"node-sass": "^7.0.1",
"node-fetch": "^3.2.10",
"sass": "^1.52.3",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-notifications-component": "^3.4.1"
@ -24,4 +24,4 @@
"eslint-config-next": "12.1.0",
"typescript": "4.6.2"
}
}
}

View File

@ -2020,10 +2020,10 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-fetch@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.2.tgz#16d33fbe32ca7c6ca1ca8ba5dfea1dd885c59f04"
integrity sha512-Cwhq1JFIoon15wcIkFzubVNFE5GvXGV82pKf4knXXjvGmn7RJKcypeuqcVNZMGDZsAFWyIRya/anwAJr7TWJ7w==
node-fetch@^3.2.10:
version "3.2.10"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.2.10.tgz#e8347f94b54ae18b57c9c049ef641cef398a85c8"
integrity sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==
dependencies:
data-uri-to-buffer "^4.0.0"
fetch-blob "^3.1.4"

View File

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View File

@ -25,9 +25,12 @@ services:
REDIS_PASSWORD: ${REDIS_PASSWORD}
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
PLUGINS_DIR: ${PLUGINS_DIR}
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
restart: unless-stopped
@ -76,6 +79,9 @@ services:
- "${MAIN_PORT}:10000"
container_name: bbproxy
image: budibase/proxy
environment:
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
- PROXY_RATE_LIMIT_API_PER_SECOND=20
depends_on:
- minio-service
- worker-service

View File

@ -22,4 +22,7 @@ BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=
BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=

View File

@ -15,7 +15,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -77,6 +80,20 @@ http {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
location /vite/ {
proxy_pass http://{{ address }}:3000;
rewrite ^/vite(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://{{ address }}:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -9,7 +9,11 @@ events {
}
http {
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
# rate limiting
limit_req_status 429;
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=${PROXY_RATE_LIMIT_API_PER_SECOND}r/s;
limit_req_zone $binary_remote_addr zone=webhooks:10m rate=${PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND}r/s;
include /etc/nginx/mime.types;
default_type application/octet-stream;
proxy_set_header Host $host;
@ -29,7 +33,10 @@ http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
'"$http_user_agent" "$http_x_forwarded_for" '
'response_time=$upstream_response_time proxy_host=$proxy_host upstream_addr=$upstream_addr';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default "upgrade";
@ -90,6 +97,7 @@ http {
proxy_pass http://$watchtower:8080;
}
{{/if}}
location ~ ^/(builder|app_) {
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
@ -126,11 +134,39 @@ http {
proxy_pass http://$apps:4002;
}
location /api/webhooks/ {
# calls to webhooks are rate limited
limit_req zone=webhooks nodelay;
# Rest of configuration copied from /api/ location above
# 120s timeout on API requests
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://$apps:4002;
}
location /db/ {
proxy_pass http://$couchdb:5984;
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://$apps:4002;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -1,3 +1,14 @@
FROM nginx:latest
COPY .generated-nginx.prod.conf /etc/nginx/nginx.conf
COPY error.html /usr/share/nginx/html/error.html
# nginx.conf
# use the default nginx behaviour for *.template files which are processed with envsubst
# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
# Error handling
COPY error.html /usr/share/nginx/html/error.html
# Default environment
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20

View File

@ -3,15 +3,21 @@
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
mkdir -p /home/{search,minio,couch}
mkdir -p /home/couch/{dbs,views}
chown -R couchdb:couchdb /home/couch/
DATA_DIR=/home
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
sed -i 's#dir=/opt/couchdb/data/search#dir=/home/search#' /opt/clouseau/clouseau.ini
sed -i 's#/minio/minio server /minio &#/minio/minio server /home/minio &#' /runner.sh
sed -i 's#database_dir = ./data#database_dir = /home/couch/dbs#' /opt/couchdb/etc/default.ini
sed -i 's#view_index_dir = ./data#view_index_dir = /home/couch/views#' /opt/couchdb/etc/default.ini
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
fi
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

View File

@ -20,31 +20,17 @@ RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
ARG TARGETARCH amd64
ARG TARGETARCH=amd64
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD single
ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
ENV \
APP_PORT=4001 \
ARCHITECTURE=amd \
BUDIBASE_ENVIRONMENT=PRODUCTION \
CLUSTER_PORT=80 \
# CUSTOM_DOMAIN=budi001.custom.com \
DEPLOYMENT_ENVIRONMENT=docker \
MINIO_URL=http://localhost:9000 \
POSTHOG_TOKEN=phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS \
REDIS_URL=localhost:6379 \
SELF_HOSTED=1 \
TARGETBUILD=$TARGETBUILD \
WORKER_PORT=4002 \
WORKER_URL=http://localhost:4002 \
APPS_URL=http://localhost:4001
# ENV CUSTOM_DOMAIN=budi001.custom.com \
# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
@ -114,7 +100,10 @@ RUN chmod +x ./healthcheck.sh
ADD hosting/scripts/build-target-paths.sh .
RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
ADD hosting/single/ssh/sshd_config /etc/
ADD hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# cleanup cache
@ -122,6 +111,8 @@ RUN yarn cache clean -f
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
# setup letsencrypt certificate

View File

@ -7,7 +7,7 @@ name=clouseau@127.0.0.1
cookie=monster
; the path where you would like to store the search index files
dir=/data/search
dir=DATA_DIR/search
; the number of search indexes that can be open simultaneously
max_indexes_open=500

View File

@ -1,5 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
database_dir = /data/couch/dbs
view_index_dir = /data/couch/views
database_dir = DATA_DIR/couch/dbs
view_index_dir = DATA_DIR/couch/views

View File

@ -3,6 +3,11 @@ healthy=true
if [ -f "/data/.env" ]; then
export $(cat /data/.env | xargs)
elif [ -f "/home/.env" ]; then
export $(cat /home/.env | xargs)
else
echo "No .env file found"
healthy=false
fi
if [[ $(curl -Lfk -s -w "%{http_code}\n" http://localhost/ -o /dev/null) -ne 200 ]]; then

View File

@ -66,6 +66,15 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://127.0.0.1:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -1,9 +1,36 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
if [ -f "/data/.env" ]; then
export $(cat /data/.env | xargs)
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL")
# Check the env vars set in Dockerfile have come through, AAS seems to drop them
[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS"
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
/etc/init.d/ssh start
else
DATA_DIR=${DATA_DIR:-/data}
fi
# first randomise any unset environment variables
if [ -f "${DATA_DIR}/.env" ]; then
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
fi
# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
@ -14,21 +41,33 @@ done
if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
fi
if [ ! -f "/data/.env" ]; then
touch /data/.env
if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> /data/.env
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
for ENV_VAR in "${DOCKER_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
fi
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
mkdir -p /data/couch/{dbs,views} /home/couch/{dbs,views}
chown -R couchdb:couchdb /data/couch /home/couch
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server /data/minio &
/minio/minio server ${DATA_DIR}/minio &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then

View File

@ -0,0 +1,8 @@
#!/bin/sh
ssh-keygen -A
#prepare run dir
if [ ! -d "/var/run/sshd" ]; then
mkdir -p /var/run/sshd
fi

View File

@ -0,0 +1,12 @@
Port 2222
ListenAddress 0.0.0.0
LoginGraceTime 180
X11Forwarding yes
Ciphers aes128-cbc,3des-cbc,aes256-cbc,aes128-ctr,aes192-ctr,aes256-ctr
MACs hmac-sha1,hmac-sha1-96
StrictModes yes
SyslogFacility DAEMON
PasswordAuthentication yes
PermitEmptyPasswords no
PermitRootLogin yes
Subsystem sftp internal-sftp

View File

@ -8,10 +8,11 @@
</h1>
<h3 align="center">
Construye herramientas empresariales personalizadas en cuestión de minutos y en su propia infraestructura.
Construye herramientas empresariales personalizadas en cuestión de minutos y en tu propia infraestructura.
</h3>
<p align="center">
Budibase es una plataforma de código bajo de código abierto, que ayuda a desarrolladores y profesionales de TI a crear, automatizar y enviar aplicaciones empresariales personalizadas en cuestión de minutos y en su propia infraestructura
Budibase es una plataforma low code de código abierto, que ayuda a desarrolladores y profesionales de TI a crear y
automatizar aplicaciones personalizadas en cuestión de minutos
</p>
<h3 align="center">
@ -20,7 +21,7 @@
<p align="center">
<img src="https://i.imgur.com/tPQHruf.png">
<img alt="Budibase design ui" src="https://res.cloudinary.com/daog6scxm/image/upload/v1633524049/ui/design-ui-wide-mobile_gdaveq.jpg">
</p>
<p align="center">
@ -30,9 +31,6 @@
<a href="https://github.com/Budibase/budibase/releases">
<img alt="GitHub release (latest by date)" src="https://img.shields.io/github/v/release/Budibase/budibase">
</a>
<a href="https://discord.gg/rCYayfe">
<img alt="Discord" src="https://img.shields.io/discord/733030666647765003">
</a>
<a href="https://twitter.com/intent/follow?screen_name=budibase">
<img src="https://img.shields.io/twitter/follow/budibase?style=social" alt="Follow @budibase" />
</a>
@ -43,130 +41,213 @@
</p>
<h3 align="center">
<a href="https://portal.budi.live/signup">Sign-up</a>
<a href="https://account.budibase.app/register">Comenzar con Budibase en la nube</a>
<span> · </span>
<a href="https://docs.budibase.com">Docs</a>
<a href="https://docs.budibase.com/docs/hosting-methods">Comenzar con Docker, K8s, DO</a>
<span> · </span>
<a href="https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas">Feature request</a>
<a href="https://docs.budibase.com/docs">Documentaciones</a>
<span> · </span>
<a href="https://github.com/Budibase/budibase/issues">Report a bug</a>
<a href="https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas">Pedir una funcionalidad</a>
<span> · </span>
Support: <a href="https://github.com/Budibase/budibase/discussions">Discussions</a>
<span> & </span>
<a href="https://discord.gg/rCYayfe">Discord</a>
<a href="https://github.com/Budibase/budibase/issues">Reportar un error</a>
<span> · </span>
Support: <a href="https://github.com/Budibase/budibase/discussions">Comunidad</a>
</h3>
<br /><br />
## ✨ Caracteristicas
## ✨ Features
When other platforms chose the closed source route, we decided to go open source. When other platforms chose cloud builders, we decided a local builder offered the better developer experience. We like to do things differently at Budibase.
### Construir aplicaciones reales
Con Budibase podras construir aplicaciones de pagina unica de gran rendimiento. Ademas, puedes hacerlas con un diseño
adaptativo para darles a tus usuarios una gran experiencia.
<br /><br />
- **Build and ship real software.** Unlike other platforms, with Budibase you build and ship single page applications. Budibase applications have performance baked in and can be designed responsively, providing your users with a great experience.
### Codigo abierto y ampliable
Budibase es de codigo abierto con licencia GPL v3. Puedes ampliarlo o modificarlo para adaptarlo a tus necesidades y preferencias.
- **Open source and extensable.** Budibase is open-source. The builder is licensed AGPL v3, the server is GPL v3, and the client is MPL. This should fill you with confidence that Budibase will always be around. You can also code against Budibase or fork it and make changes as you please, providing a developer-friendly experience.
De esta manera proveemos una buena experiencia para el desarrollador asi como establecemos la confianza de que Budibase siempre estara funcional.
<br /><br />
- **Load data or start from scratch.** Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, mySQL, Airtable, Google Sheets, S3, DyanmoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
### Cargar informacion o empezar desde cero
Budibase permite importar datos desde multiples fuentes, entre las que estan incluidas: MondoDB, CouchDB, PostgreSQL, MySQL,
Airtable, S3, DynamoDB o API REST.
- **Design and build apps with powerful pre-made components.** Budibase comes out of the box with beautifully designed, powerful components which you can use like building blocks to build your UI. We also expose a lot of your favourite CSS styling options so you can go that extra creative mile. [Request new components](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
- **Automate processes, integrate with other tools, and connect to webhooks.** Save time by automating manual processes and workflows. From connecting to webhooks, to automating emails, simply tell Budibase what to do and let it work for you. You can easily [create new automations for Budibase here](https://github.com/Budibase/automations) or [request new integrations here](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
- **Cloud hosting and self-hosting.** Users can self-host (see below), or host their apps with Budibase. Currently, our cloud hosting offering is limited to the free tier but we aim to change this in the future. For heavy usage, we advise users to self-host.
O si lo prefieres, con Budibase puedes empezar desde cero y construir tus propias aplicaciones
sin necesidad de herramientas externas.
[Sugerir fuente de datos](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase design ui" src="https://imgur.com/v8m6v3q.png">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/data_n1tlhf.png">
</p>
<br /><br />
### Diseña y construye aplicaciones con componentes profesionales prediseñados
## ⌛ Status
- [x] Alpha: We are demoing Budibase to users and receiving feedback
- [x] Private Beta: We are testing Budibase with a closed set of customers
- [x] Public Beta: Anyone can [sign-up and use Budibase](https://portal.budi.live/signup).
- [ ] Official Launch
Budibase incorpora componentes profesionales prediseñados que podras usar de manera facil e intuitiva
como bloques de construccion para la interfaz de tu aplicacion.
Watch "releases" of this repo to get notified of major updates, and give the star button a click whilst you're there.
Tambien mostramos gran parte del CSS para que puedas adaptar los componentes a tus diseños.
[Sugerir componente](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img src="https://i.imgur.com/cJpgqm8.png">
<img alt="Budibase design" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970243/Out%20of%20beta%20launch/design-like-a-pro_qhlfeu.gif">
</p>
<br /><br />
### Stargazers over time
### Procesos automatizados, integra tu aplicacion con otras herramientas y conectala a eventos webhook
Ahorra tiempo automatizando flujos de trabajo y procesos manuales. Podras desde conectar eventos webhook hasta automatizar emails,
simplemente dile a Budibase que hacer y deja que el haga el trabajo por ti.
[Crear nuevos procesos automatizados](https://github.com/Budibase/automations) o [Sugerir proceso automatizado](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase automations" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970486/Out%20of%20beta%20launch/automation_riro7u.png">
</p>
<br /><br />
### Tus herramientas favoritas
Budibase integra un gran numero de herramientas que te permitiran construir tus aplicaciones ajustandose a tus preferencias.
<p align="center">
<img alt="Budibase integrations" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/integrations_kc7dqt.png">
</p>
<br /><br />
### Un paraiso para administradores
Puedes albergar Budibase en tu propia infraestructura y gestionar globalmente usuarios, incorporaciones, SMTP, aplicaciones,
grupos, diseños de temas, etc.
Tambien puedes gestionar los usuarios y grupos, o delegar en personas asignadas para ello, desde nuestra aplicacion sin
mucho esfuerzo.
Budibase is made to scale. With Budibase, you can self-host on your own infrastructure and globally manage users, onboarding, SMTP, apps, groups, theming and more. You can also provide users/groups with an app portal and disseminate user-management to the group manager.
- Video Promocional: https://youtu.be/xoljVpty_Kw
<br />
---
<br />
## Budibase API Publica
Como todo lo que construimos en Budibase, nuestra nueva API publica es facil de usar, flexible e introduce nueva ampliacion
del sistema. Budibase API ofrece:
- Uso de Budibase como backend
- Interoperabilidad
#### Documentacion
Puedes aprender mas acerca de Budibase API en los siguientes documentos:
- [Documentacion general](https://docs.budibase.com/docs/public-api) : Como optener tu clave para la API, usar Insomnia y Postman
- [API Interactiva](https://docs.budibase.com/reference/post_applications) : Aprende como trabajar con la API
#### Guias
- [Construye una aplicacion con Budibase y Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/)
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1647858558/Feb%20release/Start_building_with_Budibase_s_API_3_rhlzhv.png">
</p>
<br /><br />
<br /><br /><br />
## 🏁 Comenzar con Budibase
Puedes alojar Budibase en tu propia infraestructura con Docker, Kubernetes o Digital Ocean; o usa Budibase en la nube si
quieres empezar a crear tus aplicaciones rapidamente y sin ningun tipo de preocupacion.
### [Comenzar con Budibase self-hosting](https://docs.budibase.com/docs/hosting-methods)
- [Docker - single ARM compatible image](https://docs.budibase.com/docs/docker)
- [Docker Compose](https://docs.budibase.com/docs/docker-compose)
- [Kubernetes](https://docs.budibase.com/docs/kubernetes-k8s)
- [Digital Ocean](https://docs.budibase.com/docs/digitalocean)
- [Portainer](https://docs.budibase.com/docs/portainer)
### [Comenzar con Budibase en la nube](https://budibase.com)
<br /><br />
## 🎓 Aprende a usar Budibase
Aqui tienes la [documentacion de Budibase](https://docs.budibase.com/docs).
<br />
<br /><br />
## 💬 Comunidad
Te invitamos a que te unas a nuestra comunidad de Budibase, alli podras hacer las preguntas que quieras, ayudar a otras
personas o tener una charla entretenida con otros usuarios de Budibase.
[Acceder a la comunidad de Budibase](https://github.com/Budibase/budibase/discussions)
<br /><br /><br />
## ❗ Codigo de conducta
Budibase presta especial atencion en acoger a personas de toda diversidad y ofrecer un entorno de respeto mutuo. Asi mismo
esperamos lo mismo de nuestra comunidad, por favor lee el
[**Codigo de conducta**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md).
<br />
<br /><br />
## 🙌 Contribuir en Budibase
Desde comunicar un bug a solventar un error en el codigo, toda contribucion es apreciada y bienvenida. Si estas planeando
implementar una nueva funcionalidad o un realizar un cambio en la API, por favor crea un [nuevo mensaje aqui](https://github.com/Budibase/budibase/issues),
de esta manera nos encargaremos que tu trabajo no sea en vano.
Aqui tienes instrucciones de como configurar tu entorno Budibase para [Debian](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-DEBIAN.md)
y [MacOSX](https://github.com/Budibase/budibase/tree/HEAD/docs/DEV-SETUP-MACOSX.md)
### No estas seguro por donde empezar?
Un buen lugar para empezar a contribuir con nosotros es [aqui](https://github.com/Budibase/budibase/projects/22).
### Organizacion del repositorio
Budibase es un repositorio unico gestionado por Lerna. Lerna construye y publica los paquetes de Budibase sincronizandolos
cada ves que se realiza un cambio. A rasgos generales, estos son los paquetes que conforman Budibase:
- [packages/builder](https://github.com/Budibase/budibase/tree/HEAD/packages/builder) - contiene el codigo del builder de la parte cliente, esta es una aplicacion svelte.
- [packages/client](https://github.com/Budibase/budibase/tree/HEAD/packages/client) - Este modulo se ejecuta en el browser y es el responsable de leer definiciones JSON y crear aplicaciones web en el momento.
- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - La parte servidor de Budibase. Esta aplicacion Koa es responsable de suministrar lo necesario al builder para asi generar las aplicaciones Budibase. Tambien provee una API para interaccionar con la base de datos y el almacenamiento de ficheros.
Para mas informacion, por favor lee el siguiente documento [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md)
<br /><br />
## 📝 Licencia
Budibase es open-source, licenciado como [GPL v3](https://www.gnu.org/licenses/gpl-3.0.en.html). El cliente y las librerias
de componentes estan licenciadas como [MPL](https://directory.fsf.org/wiki/License:MPL-2.0) - de esta manera, puedes licenciar
como tu quieras las aplicaciones que construyas.
<br /><br />
## ⭐ Historia de nuestros Stargazers
[![Stargazers over time](https://starchart.cc/Budibase/budibase.svg)](https://starchart.cc/Budibase/budibase)
If you are having issues between updates of the builder, please use the guide [here](https://github.com/Budibase/budibase/blob/HEAD/.github/CONTRIBUTING.md#troubleshooting) to clear down your environment.
Si estas teniendo problemas con el builder despues de actualizar, por favor [lee esta guia](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md#troubleshooting) to clear down your environment.
<br /><br />
## 🏁 Getting Started with Budibase
## Contribuidores ✨
The Budibase builder runs in Electron, on Mac, PC and Linux. Follow the steps below to get started:
- [ ] [Sign-up to Budibase](https://portal.budi.live/signup)
- [ ] Create a username and password
- [ ] Copy your API key
- [ ] Download Budibase
- [ ] Open Budibase and enter your API key
[Here is a guided tutorial](https://docs.budibase.com/tutorial/tutorial-signing-up) if you need extra help.
## 🤖 Self-hosting
Budibase wants to make sure anyone can use the tools we develop and we know a lot of people need to be able to host the apps they make on their own systems - that is why we've decided to try and make self hosting as easy as possible!
Currently, you can host your apps using Docker or Digital Ocean. The documentation for self-hosting can be found [here](https://docs.budibase.com/docs/hosting-methods).
[![Deploy to DO](https://www.deploytodo.com/do-btn-blue.svg)](https://cloud.digitalocean.com/droplets/new?onboarding_origin=marketplace&i=09038e&fleetUuid=bb04f9c8-1de8-4687-b2ae-1d5177a0535b&appId=77729671&type=applications&size=s-4vcpu-8gb&region=nyc1&refcode=0caaa6085a82&image=budibase-20-04)
## 🎓 Learning Budibase
The Budibase [documentation lives here](https://docs.budibase.com).
You can also follow a quick tutorial on [how to build a CRM with Budibase](https://docs.budibase.com/tutorial/tutorial-introduction)
## Roadmap
Checkout our [Public Roadmap](https://github.com/Budibase/budibase/projects/10). If you would like to discuss some of the items on the roadmap, please feel to reach out on [Discord](https://discord.gg/rCYayfe), or via [Github discussions](https://github.com/Budibase/budibase/discussions)
## ❗ Code of Conduct
Budibase is dedicated to providing a welcoming, diverse, and harrassment-free experience for everyone. We expect everyone in the Budibase community to abide by our [**Code of Conduct**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md). Please read it.
## 🙌 Contributing to Budibase
From opening a bug report to creating a pull request: every contribution is appreciated and welcomed. If you're planning to implement a new feature or change the API please create an issue first. This way we can ensure your work is not in vain.
### Not Sure Where to Start?
A good place to start contributing, is the [First time issues project](https://github.com/Budibase/budibase/projects/22).
### How the repository is organized
Budibase is a monorepo managed by lerna. Lerna manages the building and publishing of the budibase packages. At a high level, here are the packages that make up Budibase.
- [packages/builder](https://github.com/Budibase/budibase/tree/HEAD/packages/builder) - contains code for the budibase builder client side svelte application.
- [packages/client](https://github.com/Budibase/budibase/tree/HEAD/packages/client) - A module that runs in the browser responsible for reading JSON definition and creating living, breathing web apps from it.
- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - The budibase server. This Koa app is responsible for serving the JS for the builder and budibase apps, as well as providing the API for interaction with the database and file system.
For more information, see [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/.github/CONTRIBUTING.md)
## 📝 License
Budibase is open-source. The builder is licensed [AGPL v3](https://www.gnu.org/licenses/agpl-3.0.en.html), the server is licensed [GPL v3](https://www.gnu.org/licenses/gpl-3.0.en.html), and the client is licensed [MPL](https://directory.fsf.org/wiki/License:MPL-2.0).
## 💬 Get in touch
If you have a question or would like to talk with other Budibase users, please hop over to [Github discussions](https://github.com/Budibase/budibase/discussions) or join our Discord server:
[Discord chatroom](https://discord.gg/rCYayfe)
![Discord Shield](https://discordapp.com/api/guilds/733030666647765003/widget.png?style=shield)
## Contributors ✨
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
Queremos prestar un especial agradecimiento a nuestra maravillosa gente ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
<!-- prettier-ignore-start -->
@ -179,14 +260,18 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
<td align="center"><a href="https://budibase.com/"><img src="https://avatars3.githubusercontent.com/u/3524181?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Michael Shanks</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Tests">⚠️</a></td>
<td align="center"><a href="https://github.com/kevmodrome"><img src="https://avatars3.githubusercontent.com/u/534488?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Kevin Åberg Kultalahti</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Tests">⚠️</a></td>
<td align="center"><a href="https://www.budibase.com/"><img src="https://avatars2.githubusercontent.com/u/49767913?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Joe</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=joebudi" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=joebudi" title="Code">💻</a> <a href="#content-joebudi" title="Content">🖋</a> <a href="#design-joebudi" title="Design">🎨</a></td>
<td align="center"><a href="https://github.com/Conor-Mack"><img src="https://avatars1.githubusercontent.com/u/36074859?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Conor_Mack</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=Conor-Mack" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=Conor-Mack" title="Tests">⚠️</a></td>
<td align="center"><a href="https://github.com/Rory-Powell"><img src="https://avatars.githubusercontent.com/u/8755148?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Rory Powell</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Tests">⚠️</a></td>
</tr>
<tr>
<td align="center"><a href="https://github.com/PClmnt"><img src="https://avatars.githubusercontent.com/u/5665926?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Peter Clement</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Tests">⚠️</a></td>
<td align="center"><a href="https://github.com/Conor-Mack"><img src="https://avatars1.githubusercontent.com/u/36074859?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Conor_Mack</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=Conor-Mack" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=Conor-Mack" title="Tests">⚠️</a></td>
<td align="center"><a href="https://github.com/pngwn"><img src="https://avatars1.githubusercontent.com/u/12937446?v=4?s=100" width="100px;" alt=""/><br /><sub><b>pngwn</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=pngwn" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=pngwn" title="Tests">⚠️</a></td>
<td align="center"><a href="https://github.com/HugoLd"><img src="https://avatars0.githubusercontent.com/u/26521848?v=4?s=100" width="100px;" alt=""/><br /><sub><b>HugoLd</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=HugoLd" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/victoriasloan"><img src="https://avatars.githubusercontent.com/u/9913651?v=4?s=100" width="100px;" alt=""/><br /><sub><b>victoriasloan</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=victoriasloan" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/yashank09"><img src="https://avatars.githubusercontent.com/u/37672190?v=4?s=100" width="100px;" alt=""/><br /><sub><b>yashank09</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=yashank09" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/SOVLOOKUP"><img src="https://avatars.githubusercontent.com/u/53158137?v=4?s=100" width="100px;" alt=""/><br /><sub><b>SOVLOOKUP</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=SOVLOOKUP" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/seoulaja"><img src="https://avatars.githubusercontent.com/u/15101654?v=4?s=100" width="100px;" alt=""/><br /><sub><b>seoulaja</b></sub></a><br /><a href="#translation-seoulaja" title="Translation">🌍</a></td>
<td align="center"><a href="https://github.com/mslourens"><img src="https://avatars.githubusercontent.com/u/1907152?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Maurits Lourens</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=mslourens" title="Tests">⚠️</a> <a href="https://github.com/Budibase/budibase/commits?author=mslourens" title="Code">💻</a></td>
</tr>
</table>
@ -195,4 +280,5 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
<!-- ALL-CONTRIBUTORS-LIST:END -->
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
Este proyecto sigue las especificaciones de [all-contributors](https://github.com/all-contributors/all-contributors).
Todo tipo de contribuciones son agradecidas!

View File

@ -1,5 +1,5 @@
{
"version": "1.1.33-alpha.4",
"version": "1.4.18-alpha.1",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -13,6 +13,7 @@
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1",
"lerna": "3.14.1",
"madge": "^5.0.1",
"prettier": "^2.3.1",
"prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
@ -25,6 +26,8 @@
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
"build:sdk": "lerna run build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop --exact && yarn release:pro:develop",
"release:pro": "bash scripts/pro/release.sh",
@ -45,8 +48,8 @@
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
"lint:fix:eslint": "eslint --fix packages",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
"lint:fix:eslint": "eslint --fix packages qa-core",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test --stream",
"test:e2e:ci": "lerna run cy:ci --stream",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.1.33-alpha.4",
"version": "1.4.18-alpha.1",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,10 +20,12 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "1.1.33-alpha.4",
"@budibase/types": "1.4.18-alpha.1",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcryptjs": "2.4.3",
"dotenv": "16.0.1",
"emitter-listener": "1.1.2",
"ioredis": "4.28.0",
@ -60,7 +62,6 @@
]
},
"devDependencies": {
"@shopify/jest-koa-mocks": "3.1.5",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/lodash": "4.14.180",

View File

@ -0,0 +1,3 @@
module.exports = {
...require("./src/plugin"),
}

View File

@ -1,11 +1,11 @@
const passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
const { getGlobalDB } = require("./tenancy")
import { getGlobalDB } from "./tenancy"
const refresh = require("passport-oauth2-refresh")
const { Configs } = require("./constants")
const { getScopedConfig } = require("./db/utils")
const {
import { Configs } from "./constants"
import { getScopedConfig } from "./db/utils"
import {
jwt,
local,
authenticated,
@ -13,39 +13,44 @@ const {
oidc,
auditLog,
tenancy,
appTenancy,
authError,
ssoCallbackUrl,
csrf,
internalApi,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
} = require("./middleware")
const { invalidateUser } = require("./cache/user")
} from "./middleware"
import { invalidateUser } from "./cache/user"
import { User } from "@budibase/types"
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
passport.serializeUser((user, done) => done(null, user))
passport.serializeUser((user: User, done: any) => done(null, user))
passport.deserializeUser(async (user, done) => {
passport.deserializeUser(async (user: User, done: any) => {
const db = getGlobalDB()
try {
const user = await db.get(user._id)
return done(null, user)
const dbUser = await db.get(user._id)
return done(null, dbUser)
} catch (err) {
console.error(`User not found`, err)
return done(null, false, { message: "User not found" })
}
})
async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
async function refreshOIDCAccessToken(
db: any,
chosenConfig: any,
refreshToken: string
) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
let enrichedConfig
let strategy
let enrichedConfig: any
let strategy: any
try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
@ -68,22 +73,28 @@ async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
refresh.requestNewAccessToken(
Configs.OIDC,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: any, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshGoogleAccessToken(db, config, refreshToken) {
async function refreshGoogleAccessToken(
db: any,
config: any,
refreshToken: any
) {
let callbackUrl = await google.getCallbackUrl(db, config)
let strategy
try {
strategy = await google.strategyFactory(config, callbackUrl)
} catch (err) {
} catch (err: any) {
console.error(err)
throw new Error("Error constructing OIDC refresh strategy", err)
throw new Error(
`Error constructing OIDC refresh strategy: message=${err.message}`
)
}
refresh.use(strategy)
@ -92,14 +103,18 @@ async function refreshGoogleAccessToken(db, config, refreshToken) {
refresh.requestNewAccessToken(
Configs.GOOGLE,
refreshToken,
(err, accessToken, refreshToken, params) => {
(err: any, accessToken: string, refreshToken: string, params: any) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshOAuthToken(refreshToken, configType, configId) {
async function refreshOAuthToken(
refreshToken: string,
configType: string,
configId: string
) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
@ -111,7 +126,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
let refreshResponse
if (configType === Configs.OIDC) {
// configId - retrieved from cookie.
chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
chosenConfig = config.configs.filter((c: any) => c.uuid === configId)[0]
if (!chosenConfig) {
throw new Error("Invalid OIDC configuration")
}
@ -132,7 +147,7 @@ async function refreshOAuthToken(refreshToken, configType, configId) {
return refreshResponse
}
async function updateUserOAuth(userId, oAuthConfig) {
async function updateUserOAuth(userId: string, oAuthConfig: any) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
@ -160,14 +175,13 @@ async function updateUserOAuth(userId, oAuthConfig) {
}
}
module.exports = {
export = {
buildAuthMiddleware: authenticated,
passport,
google,
oidc,
jwt: require("jsonwebtoken"),
buildTenancyMiddleware: tenancy,
buildAppTenancyMiddleware: appTenancy,
auditLog,
authError,
buildCsrfMiddleware: csrf,
@ -176,5 +190,7 @@ module.exports = {
updateUserOAuth,
ssoCallbackUrl,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
}

View File

@ -1,6 +1,6 @@
const redis = require("../redis/init")
const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants")
const { DocumentType } = require("../db/constants")
const AppState = {
INVALID: "invalid",
@ -14,7 +14,7 @@ const populateFromDB = async appId => {
return doWithDB(
appId,
db => {
return db.get(DocumentTypes.APP_METADATA)
return db.get(DocumentType.APP_METADATA)
},
{ skip_setup: true }
)

View File

@ -9,6 +9,7 @@ exports.CacheKeys = {
UNIQUE_TENANT_ID: "uniqueTenantId",
EVENTS: "events",
BACKFILL_METADATA: "backfillMetadata",
EVENTS_RATE_LIMIT: "eventsRateLimit",
}
exports.TTL = {

View File

@ -7,6 +7,7 @@ exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
ACCOUNT_RETURN_URL: "budibase:account:returnurl",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
}

View File

@ -1,4 +1,4 @@
export enum ContextKeys {
export enum ContextKey {
TENANT_ID = "tenantId",
GLOBAL_DB = "globalDb",
APP_ID = "appId",

View File

@ -1,11 +1,11 @@
import env from "../environment"
import { SEPARATOR, DocumentTypes } from "../db/constants"
import { SEPARATOR, DocumentType } from "../db/constants"
import cls from "./FunctionContext"
import { dangerousGetDB, closeDB } from "../db"
import { baseGlobalDBName } from "../tenancy/utils"
import { baseGlobalDBName } from "../db/tenancy"
import { IdentityContext } from "@budibase/types"
import { DEFAULT_TENANT_ID as _DEFAULT_TENANT_ID } from "../constants"
import { ContextKeys } from "./constants"
import { ContextKey } from "./constants"
import {
updateUsing,
closeWithUsing,
@ -33,8 +33,8 @@ export const closeTenancy = async () => {
}
await closeDB(db)
// clear from context now that database is closed/task is finished
cls.setOnContext(ContextKeys.TENANT_ID, null)
cls.setOnContext(ContextKeys.GLOBAL_DB, null)
cls.setOnContext(ContextKey.TENANT_ID, null)
cls.setOnContext(ContextKey.GLOBAL_DB, null)
}
// export const isDefaultTenant = () => {
@ -54,7 +54,7 @@ export const getTenantIDFromAppID = (appId: string) => {
return null
}
const split = appId.split(SEPARATOR)
const hasDev = split[1] === DocumentTypes.DEV
const hasDev = split[1] === DocumentType.DEV
if ((hasDev && split.length === 3) || (!hasDev && split.length === 2)) {
return null
}
@ -83,14 +83,14 @@ export const doInTenant = (tenantId: string | null, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.TENANCY_IN_USE, () => {
await closeWithUsing(ContextKey.TENANCY_IN_USE, () => {
return closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.TENANT_ID) === tenantId
return updateUsing(ContextKeys.TENANCY_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.TENANT_ID) === tenantId
return updateUsing(ContextKey.TENANCY_IN_USE, existing, internal)
}
export const doInAppContext = (appId: string, task: any) => {
@ -108,7 +108,7 @@ export const doInAppContext = (appId: string, task: any) => {
setAppTenantId(appId)
}
// set the app ID
cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKey.APP_ID, appId)
// preserve the identity
if (identity) {
@ -118,14 +118,14 @@ export const doInAppContext = (appId: string, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.APP_IN_USE, async () => {
await closeWithUsing(ContextKey.APP_IN_USE, async () => {
await closeAppDBs()
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.APP_ID) === appId
return updateUsing(ContextKeys.APP_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.APP_ID) === appId
return updateUsing(ContextKey.APP_IN_USE, existing, internal)
}
export const doInIdentityContext = (identity: IdentityContext, task: any) => {
@ -135,7 +135,7 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
async function internal(opts = { existing: false }) {
if (!opts.existing) {
cls.setOnContext(ContextKeys.IDENTITY, identity)
cls.setOnContext(ContextKey.IDENTITY, identity)
// set the tenant so that doInTenant will preserve identity
if (identity.tenantId) {
updateTenantId(identity.tenantId)
@ -146,27 +146,27 @@ export const doInIdentityContext = (identity: IdentityContext, task: any) => {
// invoke the task
return await task()
} finally {
await closeWithUsing(ContextKeys.IDENTITY_IN_USE, async () => {
await closeWithUsing(ContextKey.IDENTITY_IN_USE, async () => {
setIdentity(null)
await closeTenancy()
})
}
}
const existing = cls.getFromContext(ContextKeys.IDENTITY)
return updateUsing(ContextKeys.IDENTITY_IN_USE, existing, internal)
const existing = cls.getFromContext(ContextKey.IDENTITY)
return updateUsing(ContextKey.IDENTITY_IN_USE, existing, internal)
}
export const getIdentity = (): IdentityContext | undefined => {
try {
return cls.getFromContext(ContextKeys.IDENTITY)
return cls.getFromContext(ContextKey.IDENTITY)
} catch (e) {
// do nothing - identity is not in context
}
}
export const updateTenantId = (tenantId: string | null) => {
cls.setOnContext(ContextKeys.TENANT_ID, tenantId)
cls.setOnContext(ContextKey.TENANT_ID, tenantId)
if (env.USE_COUCH) {
setGlobalDB(tenantId)
}
@ -176,7 +176,7 @@ export const updateAppId = async (appId: string) => {
try {
// have to close first, before removing the databases from context
await closeAppDBs()
cls.setOnContext(ContextKeys.APP_ID, appId)
cls.setOnContext(ContextKey.APP_ID, appId)
} catch (err) {
if (env.isTest()) {
TEST_APP_ID = appId
@ -189,12 +189,12 @@ export const updateAppId = async (appId: string) => {
export const setGlobalDB = (tenantId: string | null) => {
const dbName = baseGlobalDBName(tenantId)
const db = dangerousGetDB(dbName)
cls.setOnContext(ContextKeys.GLOBAL_DB, db)
cls.setOnContext(ContextKey.GLOBAL_DB, db)
return db
}
export const getGlobalDB = () => {
const db = cls.getFromContext(ContextKeys.GLOBAL_DB)
const db = cls.getFromContext(ContextKey.GLOBAL_DB)
if (!db) {
throw new Error("Global DB not found")
}
@ -202,7 +202,7 @@ export const getGlobalDB = () => {
}
export const isTenantIdSet = () => {
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
return !!tenantId
}
@ -210,7 +210,7 @@ export const getTenantId = () => {
if (!isMultiTenant()) {
return DEFAULT_TENANT_ID
}
const tenantId = cls.getFromContext(ContextKeys.TENANT_ID)
const tenantId = cls.getFromContext(ContextKey.TENANT_ID)
if (!tenantId) {
throw new Error("Tenant id not found")
}
@ -218,7 +218,7 @@ export const getTenantId = () => {
}
export const getAppId = () => {
const foundId = cls.getFromContext(ContextKeys.APP_ID)
const foundId = cls.getFromContext(ContextKey.APP_ID)
if (!foundId && env.isTest() && TEST_APP_ID) {
return TEST_APP_ID
} else {
@ -226,12 +226,16 @@ export const getAppId = () => {
}
}
export const isTenancyEnabled = () => {
return env.MULTI_TENANCY
}
/**
* Opens the app database based on whatever the request
* contained, dev or prod.
*/
export const getAppDB = (opts?: any) => {
return getContextDB(ContextKeys.CURRENT_DB, opts)
return getContextDB(ContextKey.CURRENT_DB, opts)
}
/**
@ -239,7 +243,7 @@ export const getAppDB = (opts?: any) => {
* contained a development app ID, this will open the prod one.
*/
export const getProdAppDB = (opts?: any) => {
return getContextDB(ContextKeys.PROD_DB, opts)
return getContextDB(ContextKey.PROD_DB, opts)
}
/**
@ -247,5 +251,5 @@ export const getProdAppDB = (opts?: any) => {
* contained a prod app ID, this will open the dev one.
*/
export const getDevAppDB = (opts?: any) => {
return getContextDB(ContextKeys.DEV_DB, opts)
return getContextDB(ContextKey.DEV_DB, opts)
}

View File

@ -6,7 +6,7 @@ import {
} from "./index"
import cls from "./FunctionContext"
import { IdentityContext } from "@budibase/types"
import { ContextKeys } from "./constants"
import { ContextKey } from "./constants"
import { dangerousGetDB, closeDB } from "../db"
import { isEqual } from "lodash"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
@ -47,17 +47,13 @@ export const setAppTenantId = (appId: string) => {
}
export const setIdentity = (identity: IdentityContext | null) => {
cls.setOnContext(ContextKeys.IDENTITY, identity)
cls.setOnContext(ContextKey.IDENTITY, identity)
}
// this function makes sure the PouchDB objects are closed and
// fully deleted when finished - this protects against memory leaks
export async function closeAppDBs() {
const dbKeys = [
ContextKeys.CURRENT_DB,
ContextKeys.PROD_DB,
ContextKeys.DEV_DB,
]
const dbKeys = [ContextKey.CURRENT_DB, ContextKey.PROD_DB, ContextKey.DEV_DB]
for (let dbKey of dbKeys) {
const db = cls.getFromContext(dbKey)
if (!db) {
@ -68,16 +64,16 @@ export async function closeAppDBs() {
cls.setOnContext(dbKey, null)
}
// clear the app ID now that the databases are closed
if (cls.getFromContext(ContextKeys.APP_ID)) {
cls.setOnContext(ContextKeys.APP_ID, null)
if (cls.getFromContext(ContextKey.APP_ID)) {
cls.setOnContext(ContextKey.APP_ID, null)
}
if (cls.getFromContext(ContextKeys.DB_OPTS)) {
cls.setOnContext(ContextKeys.DB_OPTS, null)
if (cls.getFromContext(ContextKey.DB_OPTS)) {
cls.setOnContext(ContextKey.DB_OPTS, null)
}
}
export function getContextDB(key: string, opts: any) {
const dbOptsKey = `${key}${ContextKeys.DB_OPTS}`
const dbOptsKey = `${key}${ContextKey.DB_OPTS}`
let storedOpts = cls.getFromContext(dbOptsKey)
let db = cls.getFromContext(key)
if (db && isEqual(opts, storedOpts)) {
@ -88,13 +84,13 @@ export function getContextDB(key: string, opts: any) {
let toUseAppId
switch (key) {
case ContextKeys.CURRENT_DB:
case ContextKey.CURRENT_DB:
toUseAppId = appId
break
case ContextKeys.PROD_DB:
case ContextKey.PROD_DB:
toUseAppId = getProdAppID(appId)
break
case ContextKeys.DEV_DB:
case ContextKey.DEV_DB:
toUseAppId = getDevelopmentAppID(appId)
break
}

View File

@ -1,4 +1,5 @@
import { dangerousGetDB, closeDB } from "."
import { DocumentType } from "./constants"
class Replication {
source: any
@ -53,6 +54,14 @@ class Replication {
return this.replication
}
appReplicateOpts() {
return {
filter: (doc: any) => {
return doc._id !== DocumentType.APP_METADATA
},
}
}
/**
* Rollback the target DB back to the state of the source DB
*/
@ -60,6 +69,7 @@ class Replication {
await this.target.destroy()
// Recreate the DB again
this.target = dangerousGetDB(this.target.name)
// take the opportunity to remove deleted tombstones
await this.replicate()
}

View File

@ -4,13 +4,13 @@ export const UNICODE_MAX = "\ufff0"
/**
* Can be used to create a few different forms of querying a view.
*/
export enum AutomationViewModes {
export enum AutomationViewMode {
ALL = "all",
AUTOMATION = "automation",
STATUS = "status",
}
export enum ViewNames {
export enum ViewName {
USER_BY_APP = "by_app",
USER_BY_EMAIL = "by_email2",
BY_API_KEY = "by_api_key",
@ -18,16 +18,19 @@ export enum ViewNames {
LINK = "by_link",
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
ACCOUNT_BY_EMAIL = "account_by_email",
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
USER_BY_GROUP = "by_group_user",
}
export const DeprecatedViews = {
[ViewNames.USER_BY_EMAIL]: [
[ViewName.USER_BY_EMAIL]: [
// removed due to inaccuracy in view doc filter logic
"by_email",
],
}
export enum DocumentTypes {
export enum DocumentType {
USER = "us",
GROUP = "gr",
WORKSPACE = "workspace",
@ -41,6 +44,8 @@ export enum DocumentTypes {
MIGRATIONS = "migrations",
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
ACCOUNT_METADATA = "acc_metadata",
PLUGIN = "plg",
}
export const StaticDatabases = {
@ -62,6 +67,6 @@ export const StaticDatabases = {
},
}
export const APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR
export const APP_DEV = exports.DocumentTypes.APP_DEV + exports.SEPARATOR
export const APP_PREFIX = DocumentType.APP + SEPARATOR
export const APP_DEV = DocumentType.APP_DEV + SEPARATOR
export const APP_DEV_PREFIX = APP_DEV

View File

@ -0,0 +1,22 @@
import { DEFAULT_TENANT_ID } from "../constants"
import { StaticDatabases, SEPARATOR } from "./constants"
import { getTenantId } from "../context"
export const getGlobalDBName = (tenantId?: string) => {
// tenant ID can be set externally, for example user API where
// new tenants are being created, this may be the case
if (!tenantId) {
tenantId = getTenantId()
}
return baseGlobalDBName(tenantId)
}
export const baseGlobalDBName = (tenantId: string | undefined | null) => {
let dbName
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
dbName = StaticDatabases.GLOBAL.name
} else {
dbName = `${tenantId}${SEPARATOR}${StaticDatabases.GLOBAL.name}`
}
return dbName
}

View File

@ -1,8 +1,9 @@
import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants"
import env from "../environment"
import { SEPARATOR, DocumentTypes, UNICODE_MAX, ViewNames } from "./constants"
import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
import { SEPARATOR, DocumentType, UNICODE_MAX, ViewName } from "./constants"
import { getTenantId, getGlobalDB } from "../context"
import { getGlobalDBName } from "./tenancy"
import fetch from "node-fetch"
import { doWithDB, allDbs } from "./index"
import { getCouchInfo } from "./pouch"
@ -15,6 +16,7 @@ import * as events from "../events"
export * from "./constants"
export * from "./conversions"
export { default as Replication } from "./Replication"
export * from "./tenancy"
/**
* Generates a new app ID.
@ -58,7 +60,7 @@ export function getDocParams(
/**
* Retrieve the correct index for a view based on default design DB.
*/
export function getQueryIndex(viewName: ViewNames) {
export function getQueryIndex(viewName: ViewName) {
return `database/${viewName}`
}
@ -67,7 +69,7 @@ export function getQueryIndex(viewName: ViewNames) {
* @returns {string} The new workspace ID which the workspace doc can be stored under.
*/
export function generateWorkspaceID() {
return `${DocumentTypes.WORKSPACE}${SEPARATOR}${newid()}`
return `${DocumentType.WORKSPACE}${SEPARATOR}${newid()}`
}
/**
@ -76,8 +78,8 @@ export function generateWorkspaceID() {
export function getWorkspaceParams(id = "", otherProps = {}) {
return {
...otherProps,
startkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}`,
endkey: `${DocumentTypes.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
endkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}${UNICODE_MAX}`,
}
}
@ -86,7 +88,7 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
* @returns {string} The new user ID which the user doc can be stored under.
*/
export function generateGlobalUserID(id?: any) {
return `${DocumentTypes.USER}${SEPARATOR}${id || newid()}`
return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
}
/**
@ -102,8 +104,8 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
// need to include this incase pagination
startkey: startkey
? startkey
: `${DocumentTypes.USER}${SEPARATOR}${globalId}`,
endkey: `${DocumentTypes.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
: `${DocumentType.USER}${SEPARATOR}${globalId}`,
endkey: `${DocumentType.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
}
}
@ -121,7 +123,7 @@ export function getUsersByAppParams(appId: any, otherProps: any = {}) {
* @param ownerId The owner/user of the template, this could be global or a workspace level.
*/
export function generateTemplateID(ownerId: any) {
return `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
}
export function generateAppUserID(prodAppId: string, userId: string) {
@ -143,7 +145,7 @@ export function getTemplateParams(
if (templateId) {
final = templateId
} else {
final = `${DocumentTypes.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
final = `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}`
}
return {
...otherProps,
@ -157,14 +159,14 @@ export function getTemplateParams(
* @returns {string} The new role ID which the role doc can be stored under.
*/
export function generateRoleID(id: any) {
return `${DocumentTypes.ROLE}${SEPARATOR}${id || newid()}`
return `${DocumentType.ROLE}${SEPARATOR}${id || newid()}`
}
/**
* Gets parameters for retrieving a role, this is a utility function for the getDocParams function.
*/
export function getRoleParams(roleId = null, otherProps = {}) {
return getDocParams(DocumentTypes.ROLE, roleId, otherProps)
return getDocParams(DocumentType.ROLE, roleId, otherProps)
}
export function getStartEndKeyURL(base: any, baseKey: any, tenantId = null) {
@ -211,9 +213,9 @@ export async function getAllDbs(opts = { efficient: false }) {
await addDbs(couchUrl)
} else {
// get prod apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP, tenantId))
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP, tenantId))
// get dev apps
await addDbs(getStartEndKeyURL(couchUrl, DocumentTypes.APP_DEV, tenantId))
await addDbs(getStartEndKeyURL(couchUrl, DocumentType.APP_DEV, tenantId))
// add global db name
dbs.push(getGlobalDBName(tenantId))
}
@ -233,14 +235,18 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
}
let dbs = await getAllDbs({ efficient })
const appDbNames = dbs.filter((dbName: any) => {
if (env.isTest() && !dbName) {
return false
}
const split = dbName.split(SEPARATOR)
// it is an app, check the tenantId
if (split[0] === DocumentTypes.APP) {
if (split[0] === DocumentType.APP) {
// tenantId is always right before the UUID
const possibleTenantId = split[split.length - 2]
const noTenantId =
split.length === 2 || possibleTenantId === DocumentTypes.DEV
split.length === 2 || possibleTenantId === DocumentType.DEV
return (
(tenantId === DEFAULT_TENANT_ID && noTenantId) ||
@ -250,7 +256,16 @@ export async function getAllApps({ dev, all, idsOnly, efficient }: any = {}) {
return false
})
if (idsOnly) {
return appDbNames
const devAppIds = appDbNames.filter(appId => isDevAppID(appId))
const prodAppIds = appDbNames.filter(appId => !isDevAppID(appId))
switch (dev) {
case true:
return devAppIds
case false:
return prodAppIds
default:
return appDbNames
}
}
const appPromises = appDbNames.map((app: any) =>
// skip setup otherwise databases could be re-created
@ -326,7 +341,7 @@ export async function dbExists(dbName: any) {
export const generateConfigID = ({ type, workspace, user }: any) => {
const scope = [type, workspace, user].filter(Boolean).join(SEPARATOR)
return `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`
return `${DocumentType.CONFIG}${SEPARATOR}${scope}`
}
/**
@ -340,8 +355,8 @@ export const getConfigParams = (
return {
...otherProps,
startkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}`,
endkey: `${DocumentTypes.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
startkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}`,
endkey: `${DocumentType.CONFIG}${SEPARATOR}${scope}${UNICODE_MAX}`,
}
}
@ -350,7 +365,22 @@ export const getConfigParams = (
* @returns {string} The new dev info ID which info for dev (like api key) can be stored under.
*/
export const generateDevInfoID = (userId: any) => {
return `${DocumentTypes.DEV_INFO}${SEPARATOR}${userId}`
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
}
/**
* Generates a new plugin ID - to be used in the global DB.
* @returns {string} The new plugin ID which a plugin metadata document can be stored under.
*/
export const generatePluginID = (name: string) => {
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
}
/**
* Gets parameters for retrieving automations, this is a utility function for the getDocParams function.
*/
export const getPluginParams = (pluginId?: string | null, otherProps = {}) => {
return getDocParams(DocumentType.PLUGIN, pluginId, otherProps)
}
/**

View File

@ -1,158 +0,0 @@
const {
DocumentTypes,
ViewNames,
DeprecatedViews,
SEPARATOR,
} = require("./utils")
const { getGlobalDB } = require("../tenancy")
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
async function removeDeprecated(db, viewName) {
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get(DESIGN_DB)
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
exports.createNewUserEmailView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_EMAIL]: view,
}
await db.put(designDoc)
}
exports.createUserAppView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
// if using variables in a map function need to inject them before use
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
}
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_APP]: view,
}
await db.put(designDoc)
}
exports.createApiKeyView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc._id.startsWith("${DocumentTypes.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.BY_API_KEY]: view,
}
await db.put(designDoc)
}
exports.createUserBuildersView = async () => {
const db = getGlobalDB()
let designDoc
try {
designDoc = await db.get("_design/database")
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`,
}
designDoc.views = {
...designDoc.views,
[ViewNames.USER_BY_BUILDERS]: view,
}
await db.put(designDoc)
}
exports.queryGlobalView = async (viewName, params, db = null) => {
const CreateFuncByName = {
[ViewNames.USER_BY_EMAIL]: exports.createNewUserEmailView,
[ViewNames.BY_API_KEY]: exports.createApiKeyView,
[ViewNames.USER_BY_BUILDERS]: exports.createUserBuildersView,
[ViewNames.USER_BY_APP]: exports.createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {
db = getGlobalDB()
}
try {
let response = (await db.query(`database/${viewName}`, params)).rows
response = response.map(resp =>
params.include_docs ? resp.doc : resp.value
)
return response.length <= 1 ? response[0] : response
} catch (err) {
if (err != null && err.name === "not_found") {
const createFunc = CreateFuncByName[viewName]
await removeDeprecated(db, viewName)
await createFunc()
return exports.queryGlobalView(viewName, params)
} else {
throw err
}
}
}

View File

@ -0,0 +1,199 @@
import { DocumentType, ViewName, DeprecatedViews, SEPARATOR } from "./utils"
import { getGlobalDB } from "../context"
import PouchDB from "pouchdb"
import { StaticDatabases } from "./constants"
import { doWithDB } from "./"
const DESIGN_DB = "_design/database"
function DesignDoc() {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
// https://docs.couchdb.org/en/master/ddocs/views/collation.html#collation-specification
views: {},
}
}
interface DesignDocument {
views: any
}
async function removeDeprecated(db: PouchDB.Database, viewName: ViewName) {
// @ts-ignore
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get<DesignDocument>(DESIGN_DB)
// @ts-ignore
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
// doesn't exist, ignore
}
}
export async function createView(db: any, viewJs: string, viewName: string) {
let designDoc
try {
designDoc = (await db.get(DESIGN_DB)) as DesignDocument
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
map: viewJs,
}
designDoc.views = {
...designDoc.views,
[viewName]: view,
}
await db.put(designDoc)
}
export const createNewUserEmailView = async () => {
const db = getGlobalDB()
const viewJs = `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`
await createView(db, viewJs, ViewName.USER_BY_EMAIL)
}
export const createAccountEmailView = async () => {
const viewJs = `function(doc) {
if (doc._id.startsWith("${DocumentType.ACCOUNT_METADATA}${SEPARATOR}")) {
emit(doc.email.toLowerCase(), doc._id)
}
}`
await doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
await createView(db, viewJs, ViewName.ACCOUNT_BY_EMAIL)
}
)
}
export const createUserAppView = async () => {
const db = getGlobalDB() as PouchDB.Database
const viewJs = `function(doc) {
if (doc._id.startsWith("${DocumentType.USER}${SEPARATOR}") && doc.roles) {
for (let prodAppId of Object.keys(doc.roles)) {
let emitted = prodAppId + "${SEPARATOR}" + doc._id
emit(emitted, null)
}
}
}`
await createView(db, viewJs, ViewName.USER_BY_APP)
}
export const createApiKeyView = async () => {
const db = getGlobalDB()
const viewJs = `function(doc) {
if (doc._id.startsWith("${DocumentType.DEV_INFO}") && doc.apiKey) {
emit(doc.apiKey, doc.userId)
}
}`
await createView(db, viewJs, ViewName.BY_API_KEY)
}
export const createUserBuildersView = async () => {
const db = getGlobalDB()
const viewJs = `function(doc) {
if (doc.builder && doc.builder.global === true) {
emit(doc._id, doc._id)
}
}`
await createView(db, viewJs, ViewName.USER_BY_BUILDERS)
}
export const createPlatformUserView = async () => {
const viewJs = `function(doc) {
if (doc.tenantId) {
emit(doc._id.toLowerCase(), doc._id)
}
}`
await doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
await createView(db, viewJs, ViewName.PLATFORM_USERS_LOWERCASE)
}
)
}
export interface QueryViewOptions {
arrayResponse?: boolean
}
export const queryView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
db: PouchDB.Database,
createFunc: any,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
try {
let response = await db.query<T, T>(`database/${viewName}`, params)
const rows = response.rows
const docs = rows.map(row => (params.include_docs ? row.doc : row.value))
// if arrayResponse has been requested, always return array regardless of length
if (opts?.arrayResponse) {
return docs
} else {
// return the single document if there is only one
return docs.length <= 1 ? docs[0] : docs
}
} catch (err: any) {
if (err != null && err.name === "not_found") {
await removeDeprecated(db, viewName)
await createFunc()
return queryView(viewName, params, db, createFunc, opts)
} else {
throw err
}
}
}
export const queryPlatformView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
const CreateFuncByName: any = {
[ViewName.ACCOUNT_BY_EMAIL]: createAccountEmailView,
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
}
return doWithDB(
StaticDatabases.PLATFORM_INFO.name,
async (db: PouchDB.Database) => {
const createFn = CreateFuncByName[viewName]
return queryView(viewName, params, db, createFn, opts)
}
)
}
export const queryGlobalView = async <T>(
viewName: ViewName,
params: PouchDB.Query.Options<T, T>,
db?: PouchDB.Database,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
const CreateFuncByName: any = {
[ViewName.USER_BY_EMAIL]: createNewUserEmailView,
[ViewName.BY_API_KEY]: createApiKeyView,
[ViewName.USER_BY_BUILDERS]: createUserBuildersView,
[ViewName.USER_BY_APP]: createUserAppView,
}
// can pass DB in if working with something specific
if (!db) {
db = getGlobalDB() as PouchDB.Database
}
const createFn = CreateFuncByName[viewName]
return queryView(viewName, params, db, createFn, opts)
}

View File

@ -16,9 +16,19 @@ if (!LOADED && isDev() && !isTest()) {
LOADED = true
}
const DefaultBucketName = {
BACKUPS: "backups",
APPS: "prod-budi-app-assets",
TEMPLATES: "templates",
GLOBAL: "global",
CLOUD: "prod-budi-tenant-uploads",
PLUGINS: "plugins",
}
const env = {
isTest,
isDev,
JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
@ -36,7 +46,7 @@ const env = {
MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL:
process.env.ACCOUNT_PORTAL_URL || "https://account.budibase.app",
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY || "",
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
@ -44,17 +54,24 @@ const env = {
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
BACKUPS_BUCKET_NAME: process.env.BACKUPS_BUCKET_NAME || "backups",
APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || "prod-budi-app-assets",
TEMPLATES_BUCKET_NAME: process.env.TEMPLATES_BUCKET_NAME || "templates",
GLOBAL_BUCKET_NAME: process.env.GLOBAL_BUCKET_NAME || "global",
BACKUPS_BUCKET_NAME:
process.env.BACKUPS_BUCKET_NAME || DefaultBucketName.BACKUPS,
APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || DefaultBucketName.APPS,
TEMPLATES_BUCKET_NAME:
process.env.TEMPLATES_BUCKET_NAME || DefaultBucketName.TEMPLATES,
GLOBAL_BUCKET_NAME:
process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,
GLOBAL_CLOUD_BUCKET_NAME:
process.env.GLOBAL_CLOUD_BUCKET_NAME || "prod-budi-tenant-uploads",
process.env.GLOBAL_CLOUD_BUCKET_NAME || DefaultBucketName.CLOUD,
PLUGIN_BUCKET_NAME:
process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,
USE_COUCH: process.env.USE_COUCH || true,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
SERVICE: process.env.SERVICE || "budibase",
MEMORY_LEAK_CHECK: process.env.MEMORY_LEAK_CHECK || false,
LOG_LEVEL: process.env.LOG_LEVEL,
SESSION_UPDATE_PERIOD: process.env.SESSION_UPDATE_PERIOD,
DEPLOYMENT_ENVIRONMENT:
process.env.DEPLOYMENT_ENVIRONMENT || "docker-compose",
_set(key: any, value: any) {

View File

@ -1,11 +0,0 @@
class BudibaseError extends Error {
constructor(message, code, type) {
super(message)
this.code = code
this.type = type
}
}
module.exports = {
BudibaseError,
}

View File

@ -0,0 +1,10 @@
export class BudibaseError extends Error {
code: string
type: string
constructor(message: string, code: string, type: string) {
super(message)
this.code = code
this.type = type
}
}

View File

@ -1,11 +0,0 @@
const { BudibaseError } = require("./base")
class GenericError extends BudibaseError {
constructor(message, code, type) {
super(message, code, type ? type : "generic")
}
}
module.exports = {
GenericError,
}

View File

@ -0,0 +1,7 @@
import { BudibaseError } from "./base"
export class GenericError extends BudibaseError {
constructor(message: string, code: string, type: string) {
super(message, code, type ? type : "generic")
}
}

View File

@ -1,12 +0,0 @@
const { GenericError } = require("./generic")
class HTTPError extends GenericError {
constructor(message, httpStatus, code = "http", type = "generic") {
super(message, code, type)
this.status = httpStatus
}
}
module.exports = {
HTTPError,
}

View File

@ -0,0 +1,15 @@
import { GenericError } from "./generic"
export class HTTPError extends GenericError {
status: number
constructor(
message: string,
httpStatus: number,
code = "http",
type = "generic"
) {
super(message, code, type)
this.status = httpStatus
}
}

View File

@ -1,5 +1,6 @@
const http = require("./http")
const licensing = require("./licensing")
import { HTTPError } from "./http"
import { UsageLimitError, FeatureDisabledError } from "./licensing"
import * as licensing from "./licensing"
const codes = {
...licensing.codes,
@ -11,7 +12,7 @@ const context = {
...licensing.context,
}
const getPublicError = err => {
const getPublicError = (err: any) => {
let error
if (err.code || err.type) {
// add generic error information
@ -32,13 +33,15 @@ const getPublicError = err => {
return error
}
module.exports = {
const pkg = {
codes,
types,
errors: {
UsageLimitError: licensing.UsageLimitError,
FeatureDisabledError: licensing.FeatureDisabledError,
HTTPError: http.HTTPError,
UsageLimitError,
FeatureDisabledError,
HTTPError,
},
getPublicError,
}
export = pkg

View File

@ -1,43 +0,0 @@
const { HTTPError } = require("./http")
const type = "license_error"
const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
FEATURE_DISABLED: "feature_disabled",
}
const context = {
[codes.USAGE_LIMIT_EXCEEDED]: err => {
return {
limitName: err.limitName,
}
},
[codes.FEATURE_DISABLED]: err => {
return {
featureName: err.featureName,
}
},
}
class UsageLimitError extends HTTPError {
constructor(message, limitName) {
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
this.limitName = limitName
}
}
class FeatureDisabledError extends HTTPError {
constructor(message, featureName) {
super(message, 400, codes.FEATURE_DISABLED, type)
this.featureName = featureName
}
}
module.exports = {
type,
codes,
context,
UsageLimitError,
FeatureDisabledError,
}

View File

@ -0,0 +1,39 @@
import { HTTPError } from "./http"
export const type = "license_error"
export const codes = {
USAGE_LIMIT_EXCEEDED: "usage_limit_exceeded",
FEATURE_DISABLED: "feature_disabled",
}
export const context = {
[codes.USAGE_LIMIT_EXCEEDED]: (err: any) => {
return {
limitName: err.limitName,
}
},
[codes.FEATURE_DISABLED]: (err: any) => {
return {
featureName: err.featureName,
}
},
}
export class UsageLimitError extends HTTPError {
limitName: string
constructor(message: string, limitName: string) {
super(message, 400, codes.USAGE_LIMIT_EXCEEDED, type)
this.limitName = limitName
}
}
export class FeatureDisabledError extends HTTPError {
featureName: string
constructor(message: string, featureName: string) {
super(message, 400, codes.FEATURE_DISABLED, type)
this.featureName = featureName
}
}

View File

@ -8,4 +8,5 @@ import { processors } from "./processors"
export const shutdown = () => {
processors.shutdown()
console.log("Events shutdown")
}

View File

@ -2,7 +2,7 @@ import { Event, Identity, Group, IdentityType } from "@budibase/types"
import { EventProcessor } from "./types"
import env from "../../environment"
import * as analytics from "../analytics"
import PosthogProcessor from "./PosthogProcessor"
import PosthogProcessor from "./posthog"
/**
* Events that are always captured.
@ -32,7 +32,7 @@ export default class AnalyticsProcessor implements EventProcessor {
return
}
if (this.posthog) {
this.posthog.processEvent(event, identity, properties, timestamp)
await this.posthog.processEvent(event, identity, properties, timestamp)
}
}
@ -45,14 +45,14 @@ export default class AnalyticsProcessor implements EventProcessor {
return
}
if (this.posthog) {
this.posthog.identify(identity, timestamp)
await this.posthog.identify(identity, timestamp)
}
}
async identifyGroup(group: Group, timestamp?: string | number) {
// Group indentifications (tenant and installation) always on
if (this.posthog) {
this.posthog.identifyGroup(group, timestamp)
await this.posthog.identifyGroup(group, timestamp)
}
}

View File

@ -23,9 +23,11 @@ export default class LoggingProcessor implements EventProcessor {
return
}
let timestampString = getTimestampString(timestamp)
console.log(
`[audit] [tenant=${identity.tenantId}] [identityType=${identity.type}] [identity=${identity.id}] ${timestampString} ${event} `
)
let message = `[audit] [tenant=${identity.tenantId}] [identityType=${identity.type}] [identity=${identity.id}] ${timestampString} ${event} `
if (env.isDev()) {
message = message + `[debug: [properties=${JSON.stringify(properties)}] ]`
}
console.log(message)
}
async identify(identity: Identity, timestamp?: string | number) {

View File

@ -1,9 +1,10 @@
import PostHog from "posthog-node"
import { Event, Identity, Group, BaseEvent } from "@budibase/types"
import { EventProcessor } from "./types"
import env from "../../environment"
import * as context from "../../context"
const pkg = require("../../../package.json")
import { EventProcessor } from "../types"
import env from "../../../environment"
import * as context from "../../../context"
import * as rateLimiting from "./rateLimiting"
const pkg = require("../../../../package.json")
const EXCLUDED_EVENTS: Event[] = [
Event.USER_UPDATED,
@ -42,6 +43,10 @@ export default class PosthogProcessor implements EventProcessor {
return
}
if (await rateLimiting.limited(event)) {
return
}
properties.version = pkg.version
properties.service = env.SERVICE
properties.environment = identity.environment

View File

@ -0,0 +1,2 @@
import PosthogProcessor from "./PosthogProcessor"
export default PosthogProcessor

View File

@ -0,0 +1,106 @@
import { Event } from "@budibase/types"
import { CacheKeys, TTL } from "../../../cache/generic"
import * as cache from "../../../cache/generic"
import * as context from "../../../context"
type RateLimitedEvent =
| Event.SERVED_BUILDER
| Event.SERVED_APP_PREVIEW
| Event.SERVED_APP
const isRateLimited = (event: Event): event is RateLimitedEvent => {
return (
event === Event.SERVED_BUILDER ||
event === Event.SERVED_APP_PREVIEW ||
event === Event.SERVED_APP
)
}
const isPerApp = (event: RateLimitedEvent) => {
return event === Event.SERVED_APP_PREVIEW || event === Event.SERVED_APP
}
interface EventProperties {
timestamp: number
}
enum RateLimit {
CALENDAR_DAY = "calendarDay",
}
const RATE_LIMITS = {
[Event.SERVED_APP]: RateLimit.CALENDAR_DAY,
[Event.SERVED_APP_PREVIEW]: RateLimit.CALENDAR_DAY,
[Event.SERVED_BUILDER]: RateLimit.CALENDAR_DAY,
}
/**
* Check if this event should be sent right now
* Return false to signal the event SHOULD be sent
* Return true to signal the event should NOT be sent
*/
export const limited = async (event: Event): Promise<boolean> => {
// not a rate limited event -- send
if (!isRateLimited(event)) {
return false
}
const cachedEvent = await readEvent(event)
if (cachedEvent) {
const timestamp = new Date(cachedEvent.timestamp)
const limit = RATE_LIMITS[event]
switch (limit) {
case RateLimit.CALENDAR_DAY: {
// get midnight at the start of the next day for the timestamp
timestamp.setDate(timestamp.getDate() + 1)
timestamp.setHours(0, 0, 0, 0)
// if we have passed the threshold into the next day
if (Date.now() > timestamp.getTime()) {
// update the timestamp in the event -- send
await recordEvent(event, { timestamp: Date.now() })
return false
} else {
// still within the limited period -- don't send
return true
}
}
}
} else {
// no event present i.e. expired -- send
await recordEvent(event, { timestamp: Date.now() })
return false
}
}
const eventKey = (event: RateLimitedEvent) => {
let key = `${CacheKeys.EVENTS_RATE_LIMIT}:${event}`
if (isPerApp(event)) {
key = key + ":" + context.getAppId()
}
return key
}
const readEvent = async (
event: RateLimitedEvent
): Promise<EventProperties | undefined> => {
const key = eventKey(event)
const result = await cache.get(key)
return result as EventProperties
}
const recordEvent = async (
event: RateLimitedEvent,
properties: EventProperties
) => {
const key = eventKey(event)
const limit = RATE_LIMITS[event]
let ttl
switch (limit) {
case RateLimit.CALENDAR_DAY: {
ttl = TTL.ONE_DAY
}
}
await cache.store(key, properties, ttl)
}

View File

@ -0,0 +1,145 @@
import "../../../../../tests/utilities/TestConfiguration"
import PosthogProcessor from "../PosthogProcessor"
import { Event, IdentityType, Hosting } from "@budibase/types"
const tk = require("timekeeper")
import * as cache from "../../../../cache/generic"
import { CacheKeys } from "../../../../cache/generic"
import * as context from "../../../../context"
const newIdentity = () => {
return {
id: "test",
type: IdentityType.USER,
hosting: Hosting.SELF,
environment: "test",
}
}
describe("PosthogProcessor", () => {
beforeEach(async () => {
jest.clearAllMocks()
await cache.bustCache(
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
)
})
describe("processEvent", () => {
it("processes event", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
await processor.processEvent(Event.APP_CREATED, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(1)
})
it("honours exclusions", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
await processor.processEvent(Event.AUTH_SSO_UPDATED, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(0)
})
describe("rate limiting", () => {
it("sends daily event once in same day", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
// go forward one hour
tk.freeze(new Date(2022, 0, 1, 15, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(1)
})
it("sends daily event once per unique day", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
// go forward into next day
tk.freeze(new Date(2022, 0, 2, 9, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
// go forward into next day
tk.freeze(new Date(2022, 0, 3, 5, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
// go forward one hour
tk.freeze(new Date(2022, 0, 3, 6, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(3)
})
it("sends event again after cache expires", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
await cache.bustCache(
`${CacheKeys.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
)
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(2)
})
it("sends per app events once per day per app", async () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
const runAppEvents = async (appId: string) => {
await context.doInAppContext(appId, async () => {
tk.freeze(new Date(2022, 0, 1, 14, 0))
await processor.processEvent(Event.SERVED_APP, identity, properties)
await processor.processEvent(
Event.SERVED_APP_PREVIEW,
identity,
properties
)
// go forward one hour - should be ignored
tk.freeze(new Date(2022, 0, 1, 15, 0))
await processor.processEvent(Event.SERVED_APP, identity, properties)
await processor.processEvent(
Event.SERVED_APP_PREVIEW,
identity,
properties
)
// go forward into next day
tk.freeze(new Date(2022, 0, 2, 9, 0))
await processor.processEvent(Event.SERVED_APP, identity, properties)
await processor.processEvent(
Event.SERVED_APP_PREVIEW,
identity,
properties
)
})
}
await runAppEvents("app_1")
expect(processor.posthog.capture).toHaveBeenCalledTimes(4)
await runAppEvents("app_2")
expect(processor.posthog.capture).toHaveBeenCalledTimes(8)
})
})
})
})

View File

@ -1,40 +0,0 @@
import PosthogProcessor from "../PosthogProcessor"
import { Event, IdentityType, Hosting } from "@budibase/types"
const newIdentity = () => {
return {
id: "test",
type: IdentityType.USER,
hosting: Hosting.SELF,
environment: "test",
}
}
describe("PosthogProcessor", () => {
beforeEach(() => {
jest.clearAllMocks()
})
describe("processEvent", () => {
it("processes event", () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
processor.processEvent(Event.APP_CREATED, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(1)
})
it("honours exclusions", () => {
const processor = new PosthogProcessor("test")
const identity = newIdentity()
const properties = {}
processor.processEvent(Event.AUTH_SSO_UPDATED, identity, properties)
expect(processor.posthog.capture).toHaveBeenCalledTimes(0)
})
})
})

View File

@ -5,8 +5,15 @@ import {
DatasourceCreatedEvent,
DatasourceUpdatedEvent,
DatasourceDeletedEvent,
SourceName,
} from "@budibase/types"
function isCustom(datasource: Datasource) {
const sources = Object.values(SourceName)
// if not in the base source list, then it must be custom
return !sources.includes(datasource.source)
}
export async function created(
datasource: Datasource,
timestamp?: string | number
@ -14,6 +21,7 @@ export async function created(
const properties: DatasourceCreatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_CREATED, properties, timestamp)
}
@ -22,6 +30,7 @@ export async function updated(datasource: Datasource) {
const properties: DatasourceUpdatedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_UPDATED, properties)
}
@ -30,6 +39,7 @@ export async function deleted(datasource: Datasource) {
const properties: DatasourceDeletedEvent = {
datasourceId: datasource._id as string,
source: datasource.source,
custom: isCustom(datasource),
}
await publishEvent(Event.DATASOURCE_DELETED, properties)
}

View File

@ -40,9 +40,9 @@ export async function usersAdded(count: number, group: UserGroup) {
await publishEvent(Event.USER_GROUP_USERS_ADDED, properties)
}
export async function usersDeleted(emails: string[], group: UserGroup) {
export async function usersDeleted(count: number, group: UserGroup) {
const properties: GroupUsersDeletedEvent = {
count: emails.length,
count,
groupId: group._id as string,
}
await publishEvent(Event.USER_GROUP_USERS_REMOVED, properties)

View File

@ -18,3 +18,4 @@ export * as view from "./view"
export * as installation from "./installation"
export * as backfill from "./backfill"
export * as group from "./group"
export * as plugin from "./plugin"

View File

@ -1,27 +1,78 @@
import { publishEvent } from "../events"
import {
Event,
License,
LicenseActivatedEvent,
LicenseDowngradedEvent,
LicenseUpdatedEvent,
LicenseUpgradedEvent,
LicensePlanChangedEvent,
LicenseTierChangedEvent,
PlanType,
Account,
LicensePortalOpenedEvent,
LicenseCheckoutSuccessEvent,
LicenseCheckoutOpenedEvent,
LicensePaymentFailedEvent,
LicensePaymentRecoveredEvent,
} from "@budibase/types"
// TODO
export async function updgraded(license: License) {
const properties: LicenseUpgradedEvent = {}
await publishEvent(Event.LICENSE_UPGRADED, properties)
export async function tierChanged(account: Account, from: number, to: number) {
const properties: LicenseTierChangedEvent = {
accountId: account.accountId,
to,
from,
}
await publishEvent(Event.LICENSE_TIER_CHANGED, properties)
}
// TODO
export async function downgraded(license: License) {
const properties: LicenseDowngradedEvent = {}
await publishEvent(Event.LICENSE_DOWNGRADED, properties)
export async function planChanged(
account: Account,
from: PlanType,
to: PlanType
) {
const properties: LicensePlanChangedEvent = {
accountId: account.accountId,
to,
from,
}
await publishEvent(Event.LICENSE_PLAN_CHANGED, properties)
}
// TODO
export async function activated(license: License) {
const properties: LicenseActivatedEvent = {}
export async function activated(account: Account) {
const properties: LicenseActivatedEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_ACTIVATED, properties)
}
export async function checkoutOpened(account: Account) {
const properties: LicenseCheckoutOpenedEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_CHECKOUT_OPENED, properties)
}
export async function checkoutSuccess(account: Account) {
const properties: LicenseCheckoutSuccessEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_CHECKOUT_SUCCESS, properties)
}
export async function portalOpened(account: Account) {
const properties: LicensePortalOpenedEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_PORTAL_OPENED, properties)
}
export async function paymentFailed(account: Account) {
const properties: LicensePaymentFailedEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_PAYMENT_FAILED, properties)
}
export async function paymentRecovered(account: Account) {
const properties: LicensePaymentRecoveredEvent = {
accountId: account.accountId,
}
await publishEvent(Event.LICENSE_PAYMENT_RECOVERED, properties)
}

View File

@ -0,0 +1,41 @@
import { publishEvent } from "../events"
import {
Event,
Plugin,
PluginDeletedEvent,
PluginImportedEvent,
PluginInitEvent,
} from "@budibase/types"
export async function init(plugin: Plugin) {
const properties: PluginInitEvent = {
type: plugin.schema.type,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_INIT, properties)
}
export async function imported(plugin: Plugin) {
const properties: PluginImportedEvent = {
pluginId: plugin._id as string,
type: plugin.schema.type,
source: plugin.source,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_IMPORTED, properties)
}
export async function deleted(plugin: Plugin) {
const properties: PluginDeletedEvent = {
pluginId: plugin._id as string,
type: plugin.schema.type,
name: plugin.name,
description: plugin.description,
version: plugin.version,
}
await publishEvent(Event.PLUGIN_DELETED, properties)
}

View File

@ -7,22 +7,26 @@ import {
AppServedEvent,
} from "@budibase/types"
export async function servedBuilder() {
const properties: BuilderServedEvent = {}
export async function servedBuilder(timezone: string) {
const properties: BuilderServedEvent = {
timezone,
}
await publishEvent(Event.SERVED_BUILDER, properties)
}
export async function servedApp(app: App) {
export async function servedApp(app: App, timezone: string) {
const properties: AppServedEvent = {
appVersion: app.version,
timezone,
}
await publishEvent(Event.SERVED_APP, properties)
}
export async function servedAppPreview(app: App) {
export async function servedAppPreview(app: App, timezone: string) {
const properties: AppPreviewServedEvent = {
appId: app.appId,
appVersion: app.version,
timezone,
}
await publishEvent(Event.SERVED_APP_PREVIEW, properties)
}

View File

@ -31,23 +31,29 @@ const TENANT_FEATURE_FLAGS = getFeatureFlags()
exports.isEnabled = featureFlag => {
const tenantId = tenancy.getTenantId()
return (
TENANT_FEATURE_FLAGS &&
TENANT_FEATURE_FLAGS[tenantId] &&
TENANT_FEATURE_FLAGS[tenantId].includes(featureFlag)
)
const flags = exports.getTenantFeatureFlags(tenantId)
return flags.includes(featureFlag)
}
exports.getTenantFeatureFlags = tenantId => {
if (TENANT_FEATURE_FLAGS && TENANT_FEATURE_FLAGS[tenantId]) {
return TENANT_FEATURE_FLAGS[tenantId]
const flags = []
if (TENANT_FEATURE_FLAGS) {
const globalFlags = TENANT_FEATURE_FLAGS["*"]
const tenantFlags = TENANT_FEATURE_FLAGS[tenantId]
if (globalFlags) {
flags.push(...globalFlags)
}
if (tenantFlags) {
flags.push(...tenantFlags)
}
}
return []
return flags
}
exports.FeatureFlag = {
exports.TenantFeatureFlag = {
LICENSING: "LICENSING",
GOOGLE_SHEETS: "GOOGLE_SHEETS",
USER_GROUPS: "USER_GROUPS",

View File

@ -1,5 +1,5 @@
const bcrypt = require("bcrypt")
const env = require("./environment")
const bcrypt = env.JS_BCRYPT ? require("bcryptjs") : require("bcrypt")
const { v4 } = require("uuid")
const SALT_ROUNDS = env.SALT_ROUNDS || 10

View File

@ -9,12 +9,16 @@ import * as installation from "./installation"
import env from "./environment"
import tenancy from "./tenancy"
import featureFlags from "./featureFlags"
import sessions from "./security/sessions"
import * as sessions from "./security/sessions"
import deprovisioning from "./context/deprovision"
import auth from "./auth"
import constants from "./constants"
import * as dbConstants from "./db/constants"
import logging from "./logging"
import * as logging from "./logging"
import pino from "./pino"
import * as middleware from "./middleware"
import plugins from "./plugin"
import encryption from "./security/encryption"
// mimic the outer package exports
import * as db from "./pkg/db"
@ -53,7 +57,11 @@ const core = {
errors,
logging,
roles,
plugins,
...pino,
...errorClasses,
middleware,
encryption,
}
export = core

View File

@ -1,28 +1,39 @@
const { Cookies, Headers } = require("../constants")
const { getCookie, clearCookie, openJwt } = require("../utils")
const { getUser } = require("../cache/user")
const { getSession, updateSessionTTL } = require("../security/sessions")
const { buildMatcherRegex, matches } = require("./matchers")
const env = require("../environment")
const { SEPARATOR } = require("../db/constants")
const { ViewNames } = require("../db/utils")
const { queryGlobalView } = require("../db/views")
const { getGlobalDB, doInTenant } = require("../tenancy")
const { decrypt } = require("../security/encryption")
import { Cookies, Headers } from "../constants"
import { getCookie, clearCookie, openJwt } from "../utils"
import { getUser } from "../cache/user"
import { getSession, updateSessionTTL } from "../security/sessions"
import { buildMatcherRegex, matches } from "./matchers"
import { SEPARATOR } from "../db/constants"
import { ViewName } from "../db/utils"
import { queryGlobalView } from "../db/views"
import { getGlobalDB, doInTenant } from "../tenancy"
import { decrypt } from "../security/encryption"
const identity = require("../context/identity")
const env = require("../environment")
function finalise(
ctx,
{ authenticated, user, internal, version, publicEndpoint } = {}
) {
ctx.publicEndpoint = publicEndpoint || false
ctx.isAuthenticated = authenticated || false
ctx.user = user
ctx.internal = internal || false
ctx.version = version
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD || 60 * 1000
interface FinaliseOpts {
authenticated?: boolean
internal?: boolean
publicEndpoint?: boolean
version?: string
user?: any
}
async function checkApiKey(apiKey, populateUser) {
function timeMinusOneMinute() {
return new Date(Date.now() - ONE_MINUTE).toISOString()
}
function finalise(ctx: any, opts: FinaliseOpts = {}) {
ctx.publicEndpoint = opts.publicEndpoint || false
ctx.isAuthenticated = opts.authenticated || false
ctx.user = opts.user
ctx.internal = opts.internal || false
ctx.version = opts.version
}
async function checkApiKey(apiKey: string, populateUser?: Function) {
if (apiKey === env.INTERNAL_API_KEY) {
return { valid: true }
}
@ -32,7 +43,7 @@ async function checkApiKey(apiKey, populateUser) {
const db = getGlobalDB()
// api key is encrypted in the database
const userId = await queryGlobalView(
ViewNames.BY_API_KEY,
ViewName.BY_API_KEY,
{
key: apiKey,
},
@ -54,12 +65,14 @@ async function checkApiKey(apiKey, populateUser) {
* The tenancy modules should not be used here and it should be assumed that the tenancy context
* has not yet been populated.
*/
module.exports = (
export = (
noAuthPatterns = [],
opts = { publicAllowed: false, populateUser: null }
opts: { publicAllowed: boolean; populateUser?: Function } = {
publicAllowed: false,
}
) => {
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
return async (ctx, next) => {
return async (ctx: any, next: any) => {
let publicEndpoint = false
const version = ctx.request.headers[Headers.API_VER]
// the path is not authenticated
@ -71,45 +84,41 @@ module.exports = (
// check the actual user is authenticated first, try header or cookie
const headerToken = ctx.request.headers[Headers.TOKEN]
const authCookie = getCookie(ctx, Cookies.Auth) || openJwt(headerToken)
const apiKey = ctx.request.headers[Headers.API_KEY]
const tenantId = ctx.request.headers[Headers.TENANT_ID]
let authenticated = false,
user = null,
internal = false
if (authCookie) {
let error = null
if (authCookie && !apiKey) {
const sessionId = authCookie.sessionId
const userId = authCookie.userId
const session = await getSession(userId, sessionId)
if (!session) {
error = "No session found"
} else {
try {
if (opts && opts.populateUser) {
user = await getUser(
userId,
session.tenantId,
opts.populateUser(ctx)
)
} else {
user = await getUser(userId, session.tenantId)
}
user.csrfToken = session.csrfToken
authenticated = true
} catch (err) {
error = err
let session
try {
// getting session handles error checking (if session exists etc)
session = await getSession(userId, sessionId)
if (opts && opts.populateUser) {
user = await getUser(
userId,
session.tenantId,
opts.populateUser(ctx)
)
} else {
user = await getUser(userId, session.tenantId)
}
}
if (error) {
console.error("Auth Error", error)
user.csrfToken = session.csrfToken
if (session?.lastAccessedAt < timeMinusOneMinute()) {
// make sure we denote that the session is still in use
await updateSessionTTL(session)
}
authenticated = true
} catch (err: any) {
authenticated = false
console.error("Auth Error", err?.message || err)
// remove the cookie as the user does not exist anymore
clearCookie(ctx, Cookies.Auth)
} else {
// make sure we denote that the session is still in use
await updateSessionTTL(session)
}
}
const apiKey = ctx.request.headers[Headers.API_KEY]
const tenantId = ctx.request.headers[Headers.TENANT_ID]
// this is an internal request, no user made it
if (!authenticated && apiKey) {
const populateUser = opts.populateUser ? opts.populateUser(ctx) : null
@ -142,7 +151,7 @@ module.exports = (
} else {
return next()
}
} catch (err) {
} catch (err: any) {
// invalid token, clear the cookie
if (err && err.name === "JsonWebTokenError") {
clearCookie(ctx, Cookies.Auth)

View File

@ -10,8 +10,11 @@ const internalApi = require("./internalApi")
const datasourceGoogle = require("./passport/datasource/google")
const csrf = require("./csrf")
const adminOnly = require("./adminOnly")
const builderOrAdmin = require("./builderOrAdmin")
const builderOnly = require("./builderOnly")
const joiValidator = require("./joi-validator")
module.exports = {
const pkg = {
google,
oidc,
jwt,
@ -27,5 +30,9 @@ module.exports = {
},
csrf,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
}
export = pkg

View File

@ -13,10 +13,13 @@ function validate(schema, property) {
params = ctx.request[property]
}
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
// not all schemas have the append property e.g. array schemas
if (schema.append) {
schema = schema.append({
createdAt: Joi.any().optional(),
updatedAt: Joi.any().optional(),
})
}
const { error } = schema.validate(params)
if (error) {

View File

@ -17,14 +17,6 @@ export const DEFINITIONS: MigrationDefinition[] = [
type: MigrationType.APP,
name: MigrationName.APP_URLS,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.DEVELOPER_QUOTA,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.PUBLISHED_APP_QUOTA,
},
{
type: MigrationType.APP,
name: MigrationName.EVENT_APP_BACKFILL,
@ -37,4 +29,12 @@ export const DEFINITIONS: MigrationDefinition[] = [
type: MigrationType.INSTALLATION,
name: MigrationName.EVENT_INSTALLATION_BACKFILL,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.GLOBAL_INFO_SYNC_USERS,
},
{
type: MigrationType.GLOBAL,
name: MigrationName.PLUGIN_COUNT,
},
]

View File

@ -1,14 +1,10 @@
import { DEFAULT_TENANT_ID } from "../constants"
import { doWithDB } from "../db"
import { DocumentTypes, StaticDatabases } from "../db/constants"
import { DocumentType, StaticDatabases } from "../db/constants"
import { getAllApps } from "../db/utils"
import environment from "../environment"
import {
doInTenant,
getTenantIds,
getGlobalDBName,
getTenantId,
} from "../tenancy"
import { doInTenant, getTenantIds, getTenantId } from "../tenancy"
import { getGlobalDBName } from "../db/tenancy"
import * as context from "../context"
import { DEFINITIONS } from "."
import {
@ -21,10 +17,10 @@ import {
export const getMigrationsDoc = async (db: any) => {
// get the migrations doc
try {
return await db.get(DocumentTypes.MIGRATIONS)
return await db.get(DocumentType.MIGRATIONS)
} catch (err: any) {
if (err.status && err.status === 404) {
return { _id: DocumentTypes.MIGRATIONS }
return { _id: DocumentType.MIGRATIONS }
} else {
console.error(err)
throw err

View File

@ -57,7 +57,11 @@ function publicPolicy(bucketName: any) {
}
}
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
const PUBLIC_BUCKETS = [
ObjectStoreBuckets.APPS,
ObjectStoreBuckets.GLOBAL,
ObjectStoreBuckets.PLUGINS,
]
/**
* Gets a connection to the object store using the S3 SDK.
@ -66,15 +70,13 @@ const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
* @constructor
*/
export const ObjectStore = (bucket: any) => {
AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
})
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
@ -174,6 +176,14 @@ export const streamUpload = async (
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
// Set content type for certain known extensions
if (filename?.endsWith(".js")) {
extra = {
...extra,
ContentType: "application/javascript",
}
}
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
@ -297,9 +307,13 @@ export const uploadDirectory = async (
return files
}
exports.downloadTarballDirect = async (url: string, path: string) => {
exports.downloadTarballDirect = async (
url: string,
path: string,
headers = {}
) => {
path = sanitizeKey(path)
const response = await fetch(url)
const response = await fetch(url, { headers })
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}

View File

@ -2,12 +2,18 @@ const { join } = require("path")
const { tmpdir } = require("os")
const env = require("../environment")
/****************************************************
* NOTE: When adding a new bucket - name *
* sure that S3 usages (like budibase-infra) *
* have been updated to have a unique bucket name. *
****************************************************/
exports.ObjectStoreBuckets = {
BACKUPS: env.BACKUPS_BUCKET_NAME,
APPS: env.APPS_BUCKET_NAME,
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
GLOBAL: env.GLOBAL_BUCKET_NAME,
GLOBAL_CLOUD: env.GLOBAL_CLOUD_BUCKET_NAME,
PLUGINS: env.PLUGIN_BUCKET_NAME,
}
exports.budibaseTempDir = function () {

View File

@ -0,0 +1,11 @@
const env = require("./environment")
exports.pinoSettings = () => ({
prettyPrint: {
levelFirst: true,
},
level: env.LOG_LEVEL || "error",
autoLogging: {
ignore: req => req.url.includes("/health"),
},
})

View File

@ -0,0 +1,7 @@
import * as utils from "./utils"
const pkg = {
...utils,
}
export = pkg

View File

@ -0,0 +1,103 @@
const {
DatasourceFieldType,
QueryType,
PluginType,
} = require("@budibase/types")
const joi = require("joi")
const DATASOURCE_TYPES = [
"Relational",
"Non-relational",
"Spreadsheet",
"Object store",
"Graph",
"API",
]
function runJoi(validator, schema) {
const { error } = validator.validate(schema)
if (error) {
throw error
}
}
function validateComponent(schema) {
const validator = joi.object({
type: joi.string().allow("component").required(),
metadata: joi.object().unknown(true).required(),
hash: joi.string().optional(),
version: joi.string().optional(),
schema: joi
.object({
name: joi.string().required(),
settings: joi.array().items(joi.object().unknown(true)).required(),
})
.unknown(true),
})
runJoi(validator, schema)
}
function validateDatasource(schema) {
const fieldValidator = joi.object({
type: joi
.string()
.allow(...Object.values(DatasourceFieldType))
.required(),
required: joi.boolean().required(),
default: joi.any(),
display: joi.string(),
})
const queryValidator = joi
.object({
type: joi.string().allow(...Object.values(QueryType)),
fields: joi.object().pattern(joi.string(), fieldValidator),
})
.required()
const validator = joi.object({
type: joi.string().allow("datasource").required(),
metadata: joi.object().unknown(true).required(),
hash: joi.string().optional(),
version: joi.string().optional(),
schema: joi.object({
docs: joi.string(),
friendlyName: joi.string().required(),
type: joi.string().allow(...DATASOURCE_TYPES),
description: joi.string().required(),
datasource: joi.object().pattern(joi.string(), fieldValidator).required(),
query: joi
.object({
create: queryValidator,
read: queryValidator,
update: queryValidator,
delete: queryValidator,
})
.unknown(true)
.required(),
extra: joi.object().pattern(
joi.string(),
joi.object({
type: joi.string().required(),
displayName: joi.string().required(),
required: joi.boolean(),
data: joi.object(),
})
),
}),
})
runJoi(validator, schema)
}
exports.validate = schema => {
switch (schema?.type) {
case PluginType.COMPONENT:
validateComponent(schema)
break
case PluginType.DATASOURCE:
validateDatasource(schema)
break
default:
throw new Error(`Unknown plugin type - check schema.json: ${schema.type}`)
}
}

View File

@ -3,7 +3,7 @@ const { BUILTIN_PERMISSION_IDS, PermissionLevels } = require("./permissions")
const {
generateRoleID,
getRoleParams,
DocumentTypes,
DocumentType,
SEPARATOR,
} = require("../db/utils")
const { getAppDB } = require("../context")
@ -78,7 +78,7 @@ function isBuiltin(role) {
*/
exports.builtinRoleToNumber = id => {
const builtins = exports.getBuiltinRoles()
const MAX = Object.values(BUILTIN_IDS).length + 1
const MAX = Object.values(builtins).length + 1
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
return MAX
}
@ -94,6 +94,22 @@ exports.builtinRoleToNumber = id => {
return count
}
/**
* Converts any role to a number, but has to be async to get the roles from db.
*/
exports.roleToNumber = async id => {
if (exports.isBuiltin(id)) {
return exports.builtinRoleToNumber(id)
}
const hierarchy = await exports.getUserRoleHierarchy(id)
for (let role of hierarchy) {
if (isBuiltin(role.inherits)) {
return exports.builtinRoleToNumber(role.inherits) + 1
}
}
return 0
}
/**
* Returns whichever builtin roleID is lower.
*/
@ -172,7 +188,7 @@ async function getAllUserRoles(userRoleId) {
* to determine if a user can access something that requires a specific role.
* @param {string} userRoleId The user's role ID, this can be found in their access token.
* @param {object} opts Various options, such as whether to only retrieve the IDs (default true).
* @returns {Promise<string[]>} returns an ordered array of the roles, with the first being their
* @returns {Promise<string[]|object[]>} returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level.
*/
exports.getUserRoleHierarchy = async (userRoleId, opts = { idOnly: true }) => {
@ -203,15 +219,24 @@ exports.getAllRoles = async appId => {
if (appId) {
return doWithDB(appId, internal)
} else {
return internal(getAppDB())
let appDB
try {
appDB = getAppDB()
} catch (error) {
// We don't have any apps, so we'll just use the built-in roles
}
return internal(appDB)
}
async function internal(db) {
const body = await db.allDocs(
getRoleParams(null, {
include_docs: true,
})
)
let roles = body.rows.map(row => row.doc)
let roles = []
if (db) {
const body = await db.allDocs(
getRoleParams(null, {
include_docs: true,
})
)
roles = body.rows.map(row => row.doc)
}
const builtinRoles = exports.getBuiltinRoles()
// need to combine builtin with any DB record of them (for sake of permissions)
@ -329,7 +354,7 @@ class AccessController {
* Adds the "role_" for builtin role IDs which are to be written to the DB (for permissions).
*/
exports.getDBRoleID = roleId => {
if (roleId.startsWith(DocumentTypes.ROLE)) {
if (roleId.startsWith(DocumentType.ROLE)) {
return roleId
}
return generateRoleID(roleId)
@ -340,8 +365,8 @@ exports.getDBRoleID = roleId => {
*/
exports.getExternalRoleID = roleId => {
// for built in roles we want to remove the DB role ID element (role_)
if (roleId.startsWith(DocumentTypes.ROLE) && isBuiltin(roleId)) {
return roleId.split(`${DocumentTypes.ROLE}${SEPARATOR}`)[1]
if (roleId.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) {
return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]
}
return roleId
}

Some files were not shown because too many files have changed in this diff Show More