Merge branch 'master' of github.com:Budibase/budibase into feature/multistep-form-block
This commit is contained in:
commit
16f98dd8f7
|
@ -1,194 +0,0 @@
|
|||
{
|
||||
"files": [
|
||||
"README.md"
|
||||
],
|
||||
"imageSize": 100,
|
||||
"commit": false,
|
||||
"contributors": [
|
||||
{
|
||||
"login": "shogunpurple",
|
||||
"name": "Martin McKeaveney",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/11256663?v=4",
|
||||
"profile": "http://martinmck.com",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"test",
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "mike12345567",
|
||||
"name": "Michael Drury",
|
||||
"avatar_url": "https://avatars2.githubusercontent.com/u/4407001?v=4",
|
||||
"profile": "http://www.michaeldrury.co.uk/",
|
||||
"contributions": [
|
||||
"doc",
|
||||
"code",
|
||||
"test",
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "aptkingston",
|
||||
"name": "Andrew Kingston",
|
||||
"avatar_url": "https://avatars3.githubusercontent.com/u/9075550?v=4",
|
||||
"profile": "https://github.com/aptkingston",
|
||||
"contributions": [
|
||||
"doc",
|
||||
"code",
|
||||
"test",
|
||||
"design"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "mjashanks",
|
||||
"name": "Michael Shanks",
|
||||
"avatar_url": "https://avatars3.githubusercontent.com/u/3524181?v=4",
|
||||
"profile": "https://budibase.com/",
|
||||
"contributions": [
|
||||
"doc",
|
||||
"code",
|
||||
"test"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "kevmodrome",
|
||||
"name": "Kevin Åberg Kultalahti",
|
||||
"avatar_url": "https://avatars3.githubusercontent.com/u/534488?v=4",
|
||||
"profile": "https://github.com/kevmodrome",
|
||||
"contributions": [
|
||||
"doc",
|
||||
"code",
|
||||
"test"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "joebudi",
|
||||
"name": "Joe",
|
||||
"avatar_url": "https://avatars2.githubusercontent.com/u/49767913?v=4",
|
||||
"profile": "https://www.budibase.com/",
|
||||
"contributions": [
|
||||
"doc",
|
||||
"code",
|
||||
"content",
|
||||
"design"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Rory-Powell",
|
||||
"name": "Rory Powell",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8755148?v=4",
|
||||
"profile": "https://github.com/Rory-Powell",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"test"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "PClmnt",
|
||||
"name": "Peter Clement",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/5665926?v=4",
|
||||
"profile": "https://github.com/PClmnt",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc",
|
||||
"test"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Conor-Mack",
|
||||
"name": "Conor_Mack",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/36074859?v=4",
|
||||
"profile": "https://github.com/Conor-Mack",
|
||||
"contributions": [
|
||||
"code",
|
||||
"test"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "pngwn",
|
||||
"name": "pngwn",
|
||||
"avatar_url": "https://avatars1.githubusercontent.com/u/12937446?v=4",
|
||||
"profile": "https://github.com/pngwn",
|
||||
"contributions": [
|
||||
"code",
|
||||
"test"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "HugoLd",
|
||||
"name": "HugoLd",
|
||||
"avatar_url": "https://avatars0.githubusercontent.com/u/26521848?v=4",
|
||||
"profile": "https://github.com/HugoLd",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "victoriasloan",
|
||||
"name": "victoriasloan",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/9913651?v=4",
|
||||
"profile": "https://github.com/victoriasloan",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "yashank09",
|
||||
"name": "yashank09",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/37672190?v=4",
|
||||
"profile": "https://github.com/yashank09",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "SOVLOOKUP",
|
||||
"name": "SOVLOOKUP",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/53158137?v=4",
|
||||
"profile": "https://github.com/SOVLOOKUP",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "seoulaja",
|
||||
"name": "seoulaja",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/15101654?v=4",
|
||||
"profile": "https://github.com/seoulaja",
|
||||
"contributions": [
|
||||
"translation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "mslourens",
|
||||
"name": "Maurits Lourens",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1907152?v=4",
|
||||
"profile": "https://github.com/mslourens",
|
||||
"contributions": [
|
||||
"test",
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Rory-Powell",
|
||||
"name": "Rory Powell",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8755148?v=4",
|
||||
"profile": "https://github.com/Rory-Powell",
|
||||
"contributions": [
|
||||
"infra",
|
||||
"test",
|
||||
"code"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
"projectName": "budibase",
|
||||
"projectOwner": "Budibase",
|
||||
"repoType": "github",
|
||||
"repoHost": "https://github.com",
|
||||
"skipCi": true,
|
||||
"commitConvention": "none"
|
||||
}
|
|
@ -8,3 +8,6 @@ packages/backend-core/coverage
|
|||
packages/server/client
|
||||
packages/builder/.routify
|
||||
packages/sdk/sdk
|
||||
packages/account-portal/packages/server/build
|
||||
packages/account-portal/packages/ui/.routify
|
||||
packages/account-portal/packages/ui/build
|
|
@ -1,139 +1,45 @@
|
|||
# Budibase CI Pipelines
|
||||
|
||||
Welcome to the budibase CI pipelines directory. This document details what each of the CI pipelines are for, and come common combinations.
|
||||
Welcome to the Budibase CI pipelines directory. This document details what each of the CI pipelines are for, and come common combinations.
|
||||
|
||||
## All CI Pipelines
|
||||
|
||||
### Note
|
||||
|
||||
- When running workflow dispatch jobs, ensure you always run them off the `master` branch. It defaults to `develop`, so double check before running any jobs. The exception to this case is the `deploy-release` job which requires the develop branch.
|
||||
|
||||
### Standard CI Build Job (budibase_ci.yml)
|
||||
|
||||
Triggers:
|
||||
|
||||
- PR or push to develop
|
||||
- PR or push to master
|
||||
|
||||
The standard CI Build job is what runs when you raise a PR to develop or master.
|
||||
The standard CI Build job is what runs when you raise a PR to master.
|
||||
|
||||
- Installs all dependencies,
|
||||
- builds the project
|
||||
- run the unit tests
|
||||
- Generate test coverage metrics with codecov
|
||||
- Run the integration tests
|
||||
- Check that the pro and account portal submodules are pointing to the lastest master head
|
||||
|
||||
### Release Develop Job (release-develop.yml)
|
||||
### Release Job (tag-release.yml)
|
||||
|
||||
Triggers:
|
||||
|
||||
- Push to develop
|
||||
- Manually triggered
|
||||
|
||||
The job responsible for building, tagging and pushing docker images out to the test and release environments.
|
||||
This job is responsible for building and pushing all the production services, packages and images. This is done via [budibase-deploys](https://github.com/Budibase/budibase-deploys/actions/workflows/release.yml).
|
||||
|
||||
- Installs all dependencies
|
||||
- builds the project
|
||||
- run the unit tests
|
||||
- publish the budibase JS packages under a prerelease tag to NPM
|
||||
- build, tag and push docker images under the `develop` tag to docker hub
|
||||
An input is required, indicating if the new version will be a `patch`, `minor` or `major` bump.
|
||||
|
||||
These images will then be pulled by the test and release environments, updating the latest automatically. Discord notifications are sent to the #infra channel when this occurs.
|
||||
|
||||
### Release Job (release.yml)
|
||||
|
||||
Triggers:
|
||||
|
||||
- Push to master
|
||||
|
||||
This job is responsible for building and pushing the latest code to NPM and docker hub, so that it can be deployed.
|
||||
|
||||
- Installs all dependencies
|
||||
- builds the project
|
||||
- run the unit tests
|
||||
- publish the budibase JS packages under a release tag to NPM (always incremented by patch versions)
|
||||
- build, tag and push docker images under the `v.x.x.x` (the tag of the NPM release) tag to docker hub
|
||||
|
||||
### Release Selfhost Job (release-selfhost.yml)
|
||||
|
||||
Triggers:
|
||||
|
||||
- Manual Workflow Dispatch Trigger
|
||||
|
||||
This job is responsible for delivering the latest version of budibase to those that are self-hosting.
|
||||
|
||||
This job relies on the release job to have run first, so the latest image is pushed to dockerhub. This job then will pull the latest version from `lerna.json` and try to find an image in dockerhub corresponding to that version. For example, if the version in `lerna.json` is `1.0.0`:
|
||||
|
||||
- Pull the images for all budibase services tagged `v1.0.0` from dockerhub
|
||||
- Tag these images as `latest`
|
||||
- Push them back to dockerhub. This now means anyone who pulls `latest` (self hosters using docker-compose) will get the latest version.
|
||||
- Build and release the budibase helm chart for kubernetes users
|
||||
- Perform a github release with the latest version. You can see previous releases here (https://github.com/Budibase/budibase/releases)
|
||||
|
||||
### Deploy Release (deploy-release.yml)
|
||||
|
||||
Triggers:
|
||||
|
||||
- Manual Workflow Dispatch Trigger
|
||||
|
||||
This job is responsible for deploying to our release, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur:
|
||||
|
||||
- Checks out the release branch
|
||||
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
|
||||
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
|
||||
- Configures AWS Credentials
|
||||
- Deploys the helm chart in the budibase repo to our preproduction EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
|
||||
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
|
||||
|
||||
### Deploy Preprod (deploy-preprod.yml)
|
||||
|
||||
Triggers:
|
||||
|
||||
- Manual Workflow Dispatch Trigger
|
||||
|
||||
This job is responsible for deploying to our preprod, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur:
|
||||
|
||||
- Checks out the master branch
|
||||
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
|
||||
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
|
||||
- Configures AWS Credentials
|
||||
- Deploys the helm chart in the budibase repo to our preprod EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
|
||||
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
|
||||
|
||||
### Deploy Production (deploy-cloud.yml)
|
||||
|
||||
Triggers:
|
||||
|
||||
- Manual Workflow Dispatch Trigger
|
||||
|
||||
This job is responsible for deploying to our production, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. You can also manually enter a version number for this job, so you can perform rollbacks or upgrade to a specific version. After kicking off this job, the following will occur:
|
||||
|
||||
- Checks out the master branch
|
||||
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
|
||||
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
|
||||
- Configures AWS Credentials
|
||||
- Deploys the helm chart in the budibase repo to our production EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
|
||||
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
|
||||
More documentation can be found in here: https://budibase.atlassian.net/wiki/spaces/DEVOPS/pages/347930625/Production+release
|
||||
|
||||
## Common Workflows
|
||||
|
||||
### Deploy Changes to Production (Release)
|
||||
|
||||
- Merge `develop` into `master`
|
||||
- Wait for budibase CI job and release job to run
|
||||
- Run cloud deploy job
|
||||
- Run release selfhost job
|
||||
|
||||
### Deploy Changes to Production (Hotfix)
|
||||
|
||||
- Branch off `master`
|
||||
- Perform your hotfix
|
||||
- Merge back into `master`
|
||||
- Wait for budibase CI job and release job to run
|
||||
- Run cloud deploy job
|
||||
- Run release selfhost job
|
||||
- Merge your changes into `master`
|
||||
- Run `tag-release.yml`
|
||||
- Check the progress in [budibase-deploys](https://github.com/Budibase/budibase-deploys/actions/workflows/release.yml)
|
||||
|
||||
### Rollback A Bad Cloud Deployment
|
||||
|
||||
- Kick off cloud deploy job
|
||||
- Ensure you are running off master
|
||||
- Enter the version number of the last known good version of budibase. For example `1.0.0`
|
||||
Rollback documentation can be found in here.
|
||||
https://budibase.atlassian.net/wiki/spaces/DEVOPS/pages/347930625/Production+release#Rollback
|
||||
|
|
|
@ -246,7 +246,57 @@ jobs:
|
|||
|
||||
if (submoduleCommit !== baseCommit) {
|
||||
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
|
||||
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
|
||||
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md')
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('All good, the submodule had been merged and setup correctly!')
|
||||
}
|
||||
|
||||
check-accountportal-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||
steps:
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check account portal commit
|
||||
id: get_accountportal_commits
|
||||
run: |
|
||||
cd packages/account-portal
|
||||
accountportal_commit=$(git rev-parse HEAD)
|
||||
|
||||
branch="${{ github.base_ref || github.ref_name }}"
|
||||
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
|
||||
|
||||
base_commit=$(git rev-parse origin/master)
|
||||
|
||||
if [[ ! -z $base_commit ]]; then
|
||||
echo "target_branch=$branch"
|
||||
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
|
||||
echo "accountportal_commit=$accountportal_commit"
|
||||
echo "accountportal_commit=$accountportal_commit" >> "$GITHUB_OUTPUT"
|
||||
echo "base_commit=$base_commit"
|
||||
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "Nothing to do - branch to branch merge."
|
||||
fi
|
||||
|
||||
- name: Check submodule merged to base branch
|
||||
if: ${{ steps.get_accountportal_commits.outputs.base_commit != '' }}
|
||||
uses: actions/github-script@v4
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const submoduleCommit = '${{ steps.get_accountportal_commits.outputs.accountportal_commit }}';
|
||||
const baseCommit = '${{ steps.get_accountportal_commits.outputs.base_commit }}';
|
||||
|
||||
if (submoduleCommit !== baseCommit) {
|
||||
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_accountportal_commits.outputs.target_branch }}" branch.');
|
||||
console.error('Refer to the account portal repo to merge your changes: https://github.com/Budibase/account-portal/blob/master/docs/index.md')
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('All good, the submodule had been merged and setup correctly!')
|
||||
|
|
|
@ -2,9 +2,7 @@ name: close-featurebranch
|
|||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- master
|
||||
types: [closed, unlabeled]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
BRANCH:
|
||||
|
@ -14,6 +12,9 @@ on:
|
|||
|
||||
jobs:
|
||||
release:
|
||||
if: |
|
||||
(github.event.action == 'closed' && contains(github.event.pull_request.labels.*.name, 'feature-branch')) ||
|
||||
github.event.label.name == 'feature-branch'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
|
|
@ -2,12 +2,19 @@ name: deploy-featurebranch
|
|||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
types: [
|
||||
labeled,
|
||||
# default types below (https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request)
|
||||
opened,
|
||||
synchronize,
|
||||
reopened,
|
||||
]
|
||||
|
||||
jobs:
|
||||
release:
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||
if: |
|
||||
(github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase') &&
|
||||
contains(github.event.pull_request.labels.*.name, 'feature-branch')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
name: Forced release
|
||||
concurrency:
|
||||
group: tag-release
|
||||
cancel-in-progress: false
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
ensure-is-master-tag:
|
||||
name: Ensure is a master tag
|
||||
runs-on: qa-arc-runner-set
|
||||
steps:
|
||||
- name: Checkout monorepo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
fetch-tags: true
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Fail if ref is not a tag
|
||||
run: |
|
||||
if ! git show-ref -q --verify "refs/tags/${{ github.ref_name }}" 2>/dev/null; then
|
||||
echo "'${{ github.ref_name }}' is not a valid tag."
|
||||
exit 1
|
||||
fi
|
||||
- name: Fail if tag is not in master
|
||||
run: |
|
||||
if ! git merge-base --is-ancestor ${{ github.ref_name }} origin/master; then
|
||||
echo "Tag is not in master. Release can only execute tags that are present on the master branch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
trigger-release:
|
||||
needs: [ensure-is-master-tag]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
repository: budibase/budibase-deploys
|
||||
event-type: release-prod
|
||||
token: ${{ secrets.GH_ACCESS_TOKEN }}
|
||||
client-payload: |-
|
||||
{
|
||||
"TAG": "${{ github.ref_name }}"
|
||||
}
|
|
@ -1,4 +1,3 @@
|
|||
builder/*
|
||||
.data/
|
||||
.temp/
|
||||
packages/server/runtime_apps/
|
||||
|
@ -41,8 +40,11 @@ bower_components
|
|||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
/node_modules/
|
||||
jspm_packages/
|
||||
*.min.js
|
||||
*.map
|
||||
node_modules/
|
||||
dist/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
[submodule "packages/pro"]
|
||||
path = packages/pro
|
||||
url = git@github.com:Budibase/budibase-pro.git
|
||||
[submodule "packages/account-portal"]
|
||||
path = packages/account-portal
|
||||
url = git@github.com:Budibase/account-portal.git
|
||||
|
|
|
@ -8,4 +8,7 @@ packages/worker/coverage
|
|||
packages/backend-core/coverage
|
||||
packages/builder/.routify
|
||||
packages/sdk/sdk
|
||||
packages/pro/coverage
|
||||
packages/pro/coverage
|
||||
packages/account-portal/packages/ui/build
|
||||
packages/account-portal/packages/ui/.routify
|
||||
packages/account-portal/packages/server/build
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": true
|
||||
"source.fixAll": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"[json]": {
|
||||
|
|
4
LICENSE
4
LICENSE
|
@ -1,7 +1,9 @@
|
|||
Copyright 2019-2021, Budibase Ltd.
|
||||
Copyright 2019-2023, Budibase Ltd.
|
||||
|
||||
Each Budibase package has its own license, please check the license file in each package.
|
||||
|
||||
You can consider Budibase to be GPLv3 licensed overall.
|
||||
|
||||
The apps that you build with Budibase do not package any GPLv3 licensed code, thus do not fall under those restrictions.
|
||||
|
||||
Budibase ships with Structured Query Server, by The Neighbourhoodie Software GmbH. This license for this can be found at ./SQS_LICENSE
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
FORM OF CUSTOMER LICENCE
|
||||
|
||||
Budibase hereby grants the Customer a worldwide, royalty free, non-exclusive,
|
||||
perpetual (for the lifetime of the intellectual property rights contained in the Product)
|
||||
right and title to utilise the binary code of the The Neighbourhoodie Software GmbH
|
||||
Structured Query Server software product (Product) for its own internal business
|
||||
purposes (the Purpose) only (the Licence). The Product has the function of bringing a
|
||||
CouchDB database (NoSQL database) into an SQL database form (SQLite) and thereby
|
||||
making it usable for complex queries - which originally could only be displayed in an
|
||||
SQL database. By indexing in SQLite and a server that is tailored to it, the Product
|
||||
enables the use of CouchDB with SQL queries.
|
||||
The Licence shall not permit sub-licensing, resale or transfer of the Product to third
|
||||
parties, other than sub-licensing to the Customer’s direct contractors for the purposes
|
||||
of utilizing the Product as contemplated above.
|
||||
The Licence shall not permit the adaptation, modification, decompilation, reverse
|
||||
engineering or similar activities with respect to the Product.
|
||||
This licence is granted to the Customer only, although Customer and its Affiliates’
|
||||
employees, servants and agents shall be entitled to utilize the Product within the scope
|
||||
of the Licence for the Customer’s Purpose only.
|
||||
Reproduction is not permitted to users, except for reproductions that are necessary for
|
||||
the use of the product under the licence described above. These conditions apply to the
|
||||
product regardless of the form in which we make the product available and on which
|
||||
devices it is installed and/or with which devices it is ultimately used. Depending on the
|
||||
product variant or intended use, certain technical requirements in the IT infrastructure
|
||||
must be satisfied as a prerequisite for use.
|
||||
The law of the Northern Ireland applies exclusively to this licence, and the courts of
|
||||
Northern Ireland shall have exclusive jurisdiction, save that we reserve a right to sue
|
||||
you in the jurisdiction in which you are based. The application of the UN Sales
|
||||
Convention (CISG) is excluded.
|
||||
The invalidity of any part of this licence does not affect the validity of the remaining
|
||||
regulations.
|
|
@ -157,6 +157,17 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
|
|||
| services.apps.replicaCount | int | `1` | The number of apps replicas to run. |
|
||||
| services.apps.resources | object | `{}` | The resources to use for apps pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
|
||||
| services.apps.startupProbe | object | HTTP health checks. | Startup probe configuration for apps pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.automationWorkers.autoscaling.enabled | bool | `false` | Whether to enable horizontal pod autoscaling for the apps service. |
|
||||
| services.automationWorkers.autoscaling.maxReplicas | int | `10` | |
|
||||
| services.automationWorkers.autoscaling.minReplicas | int | `1` | |
|
||||
| services.automationWorkers.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the automation worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the automation worker pods. |
|
||||
| services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. |
|
||||
| services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.automationWorkers.logLevel | string | `"info"` | The log level for the automation worker service. |
|
||||
| services.automationWorkers.readinessProbe | object | HTTP health checks. | Readiness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.automationWorkers.replicaCount | int | `1` | The number of automation worker replicas to run. |
|
||||
| services.automationWorkers.resources | object | `{}` | The resources to use for automation worker pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
|
||||
| services.automationWorkers.startupProbe | object | HTTP health checks. | Startup probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.couchdb.backup.enabled | bool | `false` | Whether or not to enable periodic CouchDB backups. This works by replicating to another CouchDB instance. |
|
||||
| services.couchdb.backup.interval | string | `""` | Backup interval in seconds |
|
||||
| services.couchdb.backup.resources | object | `{}` | The resources to use for CouchDB backup pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
|
||||
|
|
|
@ -192,7 +192,14 @@ spec:
|
|||
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
||||
value: {{ .Values.services.tlsRejectUnauthorized }}
|
||||
{{ end }}
|
||||
|
||||
{{- if .Values.services.automationWorkers.enabled }}
|
||||
- name: APP_FEATURES
|
||||
value: "api"
|
||||
{{- end }}
|
||||
{{- range .Values.services.apps.extraEnv }}
|
||||
- name: {{ .name }}
|
||||
value: {{ .value | quote }}
|
||||
{{- end }}
|
||||
image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
|
||||
imagePullPolicy: Always
|
||||
{{- if .Values.services.apps.startupProbe }}
|
||||
|
|
|
@ -0,0 +1,248 @@
|
|||
{{- if .Values.services.automationWorkers.enabled }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
annotations:
|
||||
{{ if .Values.services.automationWorkers.deploymentAnnotations }}
|
||||
{{- toYaml .Values.services.automationWorkers.deploymentAnnotations | indent 4 -}}
|
||||
{{ end }}
|
||||
labels:
|
||||
io.kompose.service: automation-worker-service
|
||||
{{ if .Values.services.automationWorkers.deploymentLabels }}
|
||||
{{- toYaml .Values.services.automationWorkers.deploymentLabels | indent 4 -}}
|
||||
{{ end }}
|
||||
name: automation-worker-service
|
||||
spec:
|
||||
replicas: {{ .Values.services.automationWorkers.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
io.kompose.service: automation-worker-service
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
template:
|
||||
metadata:
|
||||
annotations:
|
||||
{{ if .Values.services.automationWorkers.templateAnnotations }}
|
||||
{{- toYaml .Values.services.automationWorkers.templateAnnotations | indent 8 -}}
|
||||
{{ end }}
|
||||
labels:
|
||||
io.kompose.service: automation-worker-service
|
||||
{{ if .Values.services.automationWorkers.templateLabels }}
|
||||
{{- toYaml .Values.services.automationWorkers.templateLabels | indent 8 -}}
|
||||
{{ end }}
|
||||
spec:
|
||||
containers:
|
||||
- env:
|
||||
- name: BUDIBASE_ENVIRONMENT
|
||||
value: {{ .Values.globals.budibaseEnv }}
|
||||
- name: DEPLOYMENT_ENVIRONMENT
|
||||
value: "kubernetes"
|
||||
- name: COUCH_DB_URL
|
||||
{{ if .Values.services.couchdb.url }}
|
||||
value: {{ .Values.services.couchdb.url }}
|
||||
{{ else }}
|
||||
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.couchdb.enabled }}
|
||||
- name: COUCH_DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ template "couchdb.fullname" . }}
|
||||
key: adminUsername
|
||||
- name: COUCH_DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ template "couchdb.fullname" . }}
|
||||
key: adminPassword
|
||||
{{ end }}
|
||||
- name: ENABLE_ANALYTICS
|
||||
value: {{ .Values.globals.enableAnalytics | quote }}
|
||||
- name: API_ENCRYPTION_KEY
|
||||
value: {{ .Values.globals.apiEncryptionKey | quote }}
|
||||
- name: HTTP_LOGGING
|
||||
value: {{ .Values.services.automationWorkers.httpLogging | quote }}
|
||||
- name: INTERNAL_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ template "budibase.fullname" . }}
|
||||
key: internalApiKey
|
||||
- name: INTERNAL_API_KEY_FALLBACK
|
||||
value: {{ .Values.globals.internalApiKeyFallback | quote }}
|
||||
- name: JWT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ template "budibase.fullname" . }}
|
||||
key: jwtSecret
|
||||
- name: JWT_SECRET_FALLBACK
|
||||
value: {{ .Values.globals.jwtSecretFallback | quote }}
|
||||
{{ if .Values.services.objectStore.region }}
|
||||
- name: AWS_REGION
|
||||
value: {{ .Values.services.objectStore.region }}
|
||||
{{ end }}
|
||||
- name: MINIO_ENABLED
|
||||
value: {{ .Values.services.objectStore.minio | quote }}
|
||||
- name: MINIO_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ template "budibase.fullname" . }}
|
||||
key: objectStoreAccess
|
||||
- name: MINIO_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ template "budibase.fullname" . }}
|
||||
key: objectStoreSecret
|
||||
- name: CLOUDFRONT_CDN
|
||||
value: {{ .Values.services.objectStore.cloudfront.cdn | quote }}
|
||||
- name: CLOUDFRONT_PUBLIC_KEY_ID
|
||||
value: {{ .Values.services.objectStore.cloudfront.publicKeyId | quote }}
|
||||
- name: CLOUDFRONT_PRIVATE_KEY_64
|
||||
value: {{ .Values.services.objectStore.cloudfront.privateKey64 | quote }}
|
||||
- name: MINIO_URL
|
||||
value: {{ .Values.services.objectStore.url }}
|
||||
- name: PLUGIN_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
|
||||
- name: APPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.appsBucketName | quote }}
|
||||
- name: GLOBAL_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.globalBucketName | quote }}
|
||||
- name: BACKUPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
|
||||
- name: PORT
|
||||
value: {{ .Values.services.automationWorkers.port | quote }}
|
||||
{{ if .Values.services.worker.publicApiRateLimitPerSecond }}
|
||||
- name: API_REQ_LIMIT_PER_SEC
|
||||
value: {{ .Values.globals.automationWorkers.publicApiRateLimitPerSecond | quote }}
|
||||
{{ end }}
|
||||
- name: MULTI_TENANCY
|
||||
value: {{ .Values.globals.multiTenancy | quote }}
|
||||
- name: OFFLINE_MODE
|
||||
value: {{ .Values.globals.offlineMode | quote }}
|
||||
- name: LOG_LEVEL
|
||||
value: {{ .Values.services.automationWorkers.logLevel | quote }}
|
||||
- name: REDIS_PASSWORD
|
||||
value: {{ .Values.services.redis.password }}
|
||||
- name: REDIS_URL
|
||||
{{ if .Values.services.redis.url }}
|
||||
value: {{ .Values.services.redis.url }}
|
||||
{{ else }}
|
||||
value: redis-service:{{ .Values.services.redis.port }}
|
||||
{{ end }}
|
||||
- name: SELF_HOSTED
|
||||
value: {{ .Values.globals.selfHosted | quote }}
|
||||
- name: POSTHOG_TOKEN
|
||||
value: {{ .Values.globals.posthogToken | quote }}
|
||||
- name: WORKER_URL
|
||||
value: http://worker-service:{{ .Values.services.worker.port }}
|
||||
- name: PLATFORM_URL
|
||||
value: {{ .Values.globals.platformUrl | quote }}
|
||||
- name: ACCOUNT_PORTAL_URL
|
||||
value: {{ .Values.globals.accountPortalUrl | quote }}
|
||||
- name: ACCOUNT_PORTAL_API_KEY
|
||||
value: {{ .Values.globals.accountPortalApiKey | quote }}
|
||||
- name: COOKIE_DOMAIN
|
||||
value: {{ .Values.globals.cookieDomain | quote }}
|
||||
- name: HTTP_MIGRATIONS
|
||||
value: {{ .Values.globals.httpMigrations | quote }}
|
||||
- name: GOOGLE_CLIENT_ID
|
||||
value: {{ .Values.globals.google.clientId | quote }}
|
||||
- name: GOOGLE_CLIENT_SECRET
|
||||
value: {{ .Values.globals.google.secret | quote }}
|
||||
- name: AUTOMATION_MAX_ITERATIONS
|
||||
value: {{ .Values.globals.automationMaxIterations | quote }}
|
||||
- name: TENANT_FEATURE_FLAGS
|
||||
value: {{ .Values.globals.tenantFeatureFlags | quote }}
|
||||
- name: ENCRYPTION_KEY
|
||||
value: {{ .Values.globals.bbEncryptionKey | quote }}
|
||||
{{ if .Values.globals.bbAdminUserEmail }}
|
||||
- name: BB_ADMIN_USER_EMAIL
|
||||
value: {{ .Values.globals.bbAdminUserEmail | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.bbAdminUserPassword }}
|
||||
- name: BB_ADMIN_USER_PASSWORD
|
||||
value: {{ .Values.globals.bbAdminUserPassword | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.pluginsDir }}
|
||||
- name: PLUGINS_DIR
|
||||
value: {{ .Values.globals.pluginsDir | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.automationWorkers.nodeDebug }}
|
||||
- name: NODE_DEBUG
|
||||
value: {{ .Values.services.automationWorkers.nodeDebug | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.datadogApmEnabled }}
|
||||
- name: DD_LOGS_INJECTION
|
||||
value: {{ .Values.globals.datadogApmEnabled | quote }}
|
||||
- name: DD_APM_ENABLED
|
||||
value: {{ .Values.globals.datadogApmEnabled | quote }}
|
||||
- name: DD_APM_DD_URL
|
||||
value: https://trace.agent.datadoghq.eu
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentHttpProxy }}
|
||||
- name: GLOBAL_AGENT_HTTP_PROXY
|
||||
value: {{ .Values.globals.globalAgentHttpProxy | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentHttpsProxy }}
|
||||
- name: GLOBAL_AGENT_HTTPS_PROXY
|
||||
value: {{ .Values.globals.globalAgentHttpsProxy | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.globals.globalAgentNoProxy }}
|
||||
- name: GLOBAL_AGENT_NO_PROXY
|
||||
value: {{ .Values.globals.globalAgentNoProxy | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.tlsRejectUnauthorized }}
|
||||
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
||||
value: {{ .Values.services.tlsRejectUnauthorized }}
|
||||
{{ end }}
|
||||
- name: APP_FEATURES
|
||||
value: "automations"
|
||||
{{- range .Values.services.automationWorkers.extraEnv }}
|
||||
- name: {{ .name }}
|
||||
value: {{ .value | quote }}
|
||||
{{- end }}
|
||||
|
||||
image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
|
||||
imagePullPolicy: Always
|
||||
{{- if .Values.services.automationWorkers.startupProbe }}
|
||||
{{- with .Values.services.automationWorkers.startupProbe }}
|
||||
startupProbe:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .Values.services.automationWorkers.livenessProbe }}
|
||||
{{- with .Values.services.automationWorkers.livenessProbe }}
|
||||
livenessProbe:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .Values.services.automationWorkers.readinessProbe }}
|
||||
{{- with .Values.services.automationWorkers.readinessProbe }}
|
||||
readinessProbe:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
name: bbautomationworker
|
||||
ports:
|
||||
- containerPort: {{ .Values.services.automationWorkers.port }}
|
||||
{{ with .Values.services.automationWorkers.resources }}
|
||||
resources:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
{{ end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{ if .Values.schedulerName }}
|
||||
schedulerName: {{ .Values.schedulerName | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml .Values.imagePullSecrets | nindent 6 }}
|
||||
{{ end }}
|
||||
restartPolicy: Always
|
||||
serviceAccountName: ""
|
||||
status: {}
|
||||
{{- end }}
|
|
@ -0,0 +1,32 @@
|
|||
{{- if .Values.services.automationWorkers.autoscaling.enabled }}
|
||||
apiVersion: {{ ternary "autoscaling/v2" "autoscaling/v2beta2" (.Capabilities.APIVersions.Has "autoscaling/v2") }}
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: {{ include "budibase.fullname" . }}-apps
|
||||
labels:
|
||||
{{- include "budibase.labels" . | nindent 4 }}
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: automation-worker-service
|
||||
minReplicas: {{ .Values.services.automationWorkers.autoscaling.minReplicas }}
|
||||
maxReplicas: {{ .Values.services.automationWorkers.autoscaling.maxReplicas }}
|
||||
metrics:
|
||||
{{- if .Values.services.automationWorkers.autoscaling.targetCPUUtilizationPercentage }}
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: {{ .Values.services.automationWorkers.autoscaling.targetCPUUtilizationPercentage }}
|
||||
{{- end }}
|
||||
{{- if .Values.services.automationWorkers.autoscaling.targetMemoryUtilizationPercentage }}
|
||||
- type: Resource
|
||||
resource:
|
||||
name: memory
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: {{ .Values.services.automationWorkers.autoscaling.targetMemoryUtilizationPercentage }}
|
||||
{{- end }}
|
||||
{{- end }}
|
|
@ -182,6 +182,10 @@ spec:
|
|||
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
||||
value: {{ .Values.services.tlsRejectUnauthorized }}
|
||||
{{ end }}
|
||||
{{- range .Values.services.worker.extraEnv }}
|
||||
- name: {{ .name }}
|
||||
value: {{ .value | quote }}
|
||||
{{- end }}
|
||||
image: budibase/worker:{{ .Values.globals.appVersion | default .Chart.AppVersion }}
|
||||
imagePullPolicy: Always
|
||||
{{- if .Values.services.worker.startupProbe }}
|
||||
|
|
|
@ -220,6 +220,9 @@ services:
|
|||
# <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/>
|
||||
# for more information on how to set these.
|
||||
resources: {}
|
||||
# -- Extra environment variables to set for apps pods. Takes a list of
|
||||
# name=value pairs.
|
||||
extraEnv: []
|
||||
# -- Startup probe configuration for apps pods. You shouldn't need to
|
||||
# change this, but if you want to you can find more information here:
|
||||
# <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>
|
||||
|
@ -272,6 +275,78 @@ services:
|
|||
# and resources set for the apps pods.
|
||||
targetCPUUtilizationPercentage: 80
|
||||
|
||||
automationWorkers:
|
||||
# -- Whether or not to enable the automation worker service. If you disable this,
|
||||
# automations will be processed by the apps service.
|
||||
enabled: true
|
||||
# @ignore (you shouldn't need to change this)
|
||||
port: 4002
|
||||
# -- The number of automation worker replicas to run.
|
||||
replicaCount: 1
|
||||
# -- The log level for the automation worker service.
|
||||
logLevel: info
|
||||
# -- The resources to use for automation worker pods. See
|
||||
# <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/>
|
||||
# for more information on how to set these.
|
||||
resources: {}
|
||||
# -- Extra environment variables to set for automation worker pods. Takes a list of
|
||||
# name=value pairs.
|
||||
extraEnv: []
|
||||
# -- Startup probe configuration for automation worker pods. You shouldn't
|
||||
# need to change this, but if you want to you can find more information
|
||||
# here:
|
||||
# <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>
|
||||
# @default -- HTTP health checks.
|
||||
startupProbe:
|
||||
# @ignore
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 4002
|
||||
scheme: HTTP
|
||||
# @ignore
|
||||
failureThreshold: 30
|
||||
# @ignore
|
||||
periodSeconds: 3
|
||||
# -- Readiness probe configuration for automation worker pods. You shouldn't
|
||||
# need to change this, but if you want to you can find more information
|
||||
# here:
|
||||
# <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>
|
||||
# @default -- HTTP health checks.
|
||||
readinessProbe:
|
||||
# @ignore
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 4002
|
||||
scheme: HTTP
|
||||
# @ignore
|
||||
periodSeconds: 3
|
||||
# @ignore
|
||||
failureThreshold: 1
|
||||
# -- Liveness probe configuration for automation worker pods. You shouldn't
|
||||
# need to change this, but if you want to you can find more information
|
||||
# here:
|
||||
# <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>
|
||||
# @default -- HTTP health checks.
|
||||
livenessProbe:
|
||||
# @ignore
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 4002
|
||||
scheme: HTTP
|
||||
# @ignore
|
||||
failureThreshold: 3
|
||||
# @ignore
|
||||
periodSeconds: 30
|
||||
autoscaling:
|
||||
# -- Whether to enable horizontal pod autoscaling for the apps service.
|
||||
enabled: false
|
||||
minReplicas: 1
|
||||
maxReplicas: 10
|
||||
# -- Target CPU utilization percentage for the automation worker service.
|
||||
# Note that for autoscaling to work, you will need to have metrics-server
|
||||
# configured, and resources set for the automation worker pods.
|
||||
targetCPUUtilizationPercentage: 80
|
||||
|
||||
worker:
|
||||
# @ignore (you shouldn't need to change this)
|
||||
port: 4003
|
||||
|
@ -285,6 +360,9 @@ services:
|
|||
# <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/>
|
||||
# for more information on how to set these.
|
||||
resources: {}
|
||||
# -- Extra environment variables to set for worker pods. Takes a list of
|
||||
# name=value pairs.
|
||||
extraEnv: []
|
||||
# -- Startup probe configuration for worker pods. You shouldn't need to
|
||||
# change this, but if you want to you can find more information here:
|
||||
# <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/>
|
||||
|
|
|
@ -84,7 +84,7 @@ Component libraries are collections of components as well as the definition of t
|
|||
|
||||
- If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
|
||||
|
||||
- Once your work is completed, please raise a PR against the `develop` branch with some information about what has changed and why.
|
||||
- Once your work is completed, please raise a PR against the `master` branch with some information about what has changed and why.
|
||||
|
||||
### Getting Started For Contributors
|
||||
|
||||
|
@ -246,7 +246,7 @@ From here - to develop a change in pro, you can follow the below flow:
|
|||
cd packages/pro
|
||||
# get the base branch you are working from (same as monorepo)
|
||||
git fetch
|
||||
git checkout <develop | master>
|
||||
git checkout master
|
||||
# create a branch, named the same as the branch in your monorepo
|
||||
git checkout -b <some branch>
|
||||
... make changes
|
||||
|
|
|
@ -22,6 +22,6 @@
|
|||
"@types/react": "17.0.39",
|
||||
"eslint": "8.10.0",
|
||||
"eslint-config-next": "12.1.0",
|
||||
"typescript": "4.6.2"
|
||||
"typescript": "5.2.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,10 +37,11 @@ elif [[ "${TARGETBUILD}" = "docker-compose" ]]; then
|
|||
# image.
|
||||
sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
|
||||
elif [[ -n $KUBERNETES_SERVICE_HOST ]]; then
|
||||
# In Kubernetes the directory /opt/couchdb/data has a persistent volume
|
||||
# mount for storing database data.
|
||||
sed -i "s#DATA_DIR#/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
|
||||
sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
|
||||
|
||||
# We remove the database_dir and view_index_dir settings from the local.ini
|
||||
# in Kubernetes because it will default to /opt/couchdb/data which is what
|
||||
|
|
|
@ -57,7 +57,6 @@ services:
|
|||
depends_on:
|
||||
- redis-service
|
||||
- minio-service
|
||||
- couch-init
|
||||
|
||||
minio-service:
|
||||
restart: unless-stopped
|
||||
|
@ -70,7 +69,7 @@ services:
|
|||
MINIO_BROWSER: "off"
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
test: "timeout 5s bash -c ':> /dev/tcp/127.0.0.1/9000' || exit 1"
|
||||
interval: 30s
|
||||
timeout: 20s
|
||||
retries: 3
|
||||
|
@ -98,26 +97,15 @@ services:
|
|||
|
||||
couchdb-service:
|
||||
restart: unless-stopped
|
||||
image: ibmcom/couchdb3
|
||||
image: budibase/couchdb
|
||||
pull_policy: always
|
||||
environment:
|
||||
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
|
||||
- COUCHDB_USER=${COUCH_DB_USER}
|
||||
- TARGETBUILD=docker-compose
|
||||
volumes:
|
||||
- couchdb3_data:/opt/couchdb/data
|
||||
|
||||
couch-init:
|
||||
image: curlimages/curl
|
||||
environment:
|
||||
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
|
||||
depends_on:
|
||||
- couchdb-service
|
||||
command:
|
||||
[
|
||||
"sh",
|
||||
"-c",
|
||||
"sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;",
|
||||
]
|
||||
|
||||
redis-service:
|
||||
restart: unless-stopped
|
||||
image: redis
|
||||
|
|
|
@ -257,6 +257,7 @@ http {
|
|||
|
||||
access_log off;
|
||||
allow 127.0.0.1;
|
||||
allow 10.0.0.0/8;
|
||||
deny all;
|
||||
|
||||
location /nginx_status {
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
{
|
||||
"version": "2.13.35",
|
||||
"version": "2.13.41",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
"packages/*",
|
||||
"!packages/account-portal",
|
||||
"packages/account-portal/packages/*"
|
||||
],
|
||||
"useNx": true,
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
"publish": {
|
||||
"ignoreChanges": [
|
||||
|
|
32
package.json
32
package.json
|
@ -6,25 +6,25 @@
|
|||
"@babel/eslint-parser": "^7.22.5",
|
||||
"@babel/preset-env": "^7.22.5",
|
||||
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
|
||||
"@typescript-eslint/parser": "6.7.2",
|
||||
"@typescript-eslint/parser": "6.9.0",
|
||||
"esbuild": "^0.18.17",
|
||||
"esbuild-node-externals": "^1.8.0",
|
||||
"eslint": "^8.44.0",
|
||||
"eslint": "^8.52.0",
|
||||
"eslint-plugin-import": "^2.29.0",
|
||||
"eslint-plugin-local-rules": "^2.0.0",
|
||||
"eslint-plugin-svelte": "^2.32.2",
|
||||
"eslint-plugin-svelte": "^2.34.0",
|
||||
"husky": "^8.0.3",
|
||||
"kill-port": "^1.6.1",
|
||||
"lerna": "7.1.1",
|
||||
"madge": "^6.0.0",
|
||||
"minimist": "^1.2.8",
|
||||
"nx": "16.4.3",
|
||||
"nx-cloud": "16.0.5",
|
||||
"prettier": "2.8.8",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"svelte": "3.49.0",
|
||||
"svelte-eslint-parser": "^0.32.0",
|
||||
"typescript": "5.2.2"
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.2.2",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"scripts": {
|
||||
"preinstall": "node scripts/syncProPackage.js",
|
||||
|
@ -39,13 +39,16 @@
|
|||
"nuke": "yarn run nuke:packages && yarn run nuke:docker",
|
||||
"nuke:packages": "yarn run restore",
|
||||
"nuke:docker": "lerna run --stream dev:stack:nuke",
|
||||
"clean": "lerna clean -y",
|
||||
"clean": "lerna clean -y && echo Cleaning top level node modules 🧹 && rm -rf ./node_modules && echo Done! 🚀",
|
||||
"kill-builder": "kill-port 3000",
|
||||
"kill-server": "kill-port 4001 4002",
|
||||
"kill-all": "yarn run kill-builder && yarn run kill-server",
|
||||
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev:builder",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server",
|
||||
"kill-accountportal": "kill-port 3001 4003",
|
||||
"kill-all": "yarn run kill-builder && yarn run kill-server && yarn kill-accountportal",
|
||||
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
|
||||
"dev:all": "yarn run kill-all && lerna run --stream dev",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"test": "lerna run --stream test --stream",
|
||||
|
@ -79,11 +82,14 @@
|
|||
"security:audit": "node scripts/audit.js",
|
||||
"postinstall": "husky install",
|
||||
"submodules:load": "git submodule init && git submodule update && yarn",
|
||||
"submodules:unload": "git submodule deinit --all && yarn"
|
||||
"submodules:unload": "git submodule deinit --all && yarn",
|
||||
"add-app-migration": "node scripts/add-app-migration.js --title"
|
||||
},
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
"packages/*"
|
||||
"packages/*",
|
||||
"!packages/account-portal",
|
||||
"packages/account-portal/packages/*"
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Subproject commit a0b13270c36dd188e2a953d026b4560a1208008e
|
|
@ -21,7 +21,7 @@
|
|||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/nano": "10.1.3",
|
||||
"@budibase/nano": "10.1.4",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.10",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
|
@ -73,8 +73,8 @@
|
|||
"@types/uuid": "8.3.4",
|
||||
"chance": "1.1.8",
|
||||
"ioredis-mock": "8.9.0",
|
||||
"jest": "29.6.2",
|
||||
"jest-environment-node": "29.6.2",
|
||||
"jest": "29.7.0",
|
||||
"jest-environment-node": "29.7.0",
|
||||
"jest-serial-runner": "1.2.1",
|
||||
"pino-pretty": "10.0.0",
|
||||
"pouchdb-adapter-memory": "7.2.2",
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
import { DBTestConfiguration } from "../../../tests/extra"
|
||||
import {
|
||||
structures,
|
||||
expectFunctionWasCalledTimesWith,
|
||||
mocks,
|
||||
} from "../../../tests"
|
||||
import { structures } from "../../../tests"
|
||||
import { Writethrough } from "../writethrough"
|
||||
import { getDB } from "../../db"
|
||||
import { Document } from "@budibase/types"
|
||||
import tk from "timekeeper"
|
||||
|
||||
tk.freeze(Date.now())
|
||||
|
||||
interface ValueDoc extends Document {
|
||||
value: any
|
||||
}
|
||||
|
||||
const DELAY = 5000
|
||||
|
||||
describe("writethrough", () => {
|
||||
|
@ -117,7 +118,7 @@ describe("writethrough", () => {
|
|||
describe("get", () => {
|
||||
it("should be able to retrieve", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const response = await writethrough.get(docId)
|
||||
const response = await writethrough.get<ValueDoc>(docId)
|
||||
expect(response.value).toBe(4)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -7,7 +7,7 @@ import * as locks from "../redis/redlockImpl"
|
|||
const DEFAULT_WRITE_RATE_MS = 10000
|
||||
let CACHE: BaseCache | null = null
|
||||
|
||||
interface CacheItem {
|
||||
interface CacheItem<T extends Document> {
|
||||
doc: any
|
||||
lastWrite: number
|
||||
}
|
||||
|
@ -24,7 +24,10 @@ function makeCacheKey(db: Database, key: string) {
|
|||
return db.name + key
|
||||
}
|
||||
|
||||
function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem {
|
||||
function makeCacheItem<T extends Document>(
|
||||
doc: T,
|
||||
lastWrite: number | null = null
|
||||
): CacheItem<T> {
|
||||
return { doc, lastWrite: lastWrite || Date.now() }
|
||||
}
|
||||
|
||||
|
@ -35,7 +38,7 @@ async function put(
|
|||
) {
|
||||
const cache = await getCache()
|
||||
const key = doc._id
|
||||
let cacheItem: CacheItem | undefined
|
||||
let cacheItem: CacheItem<any> | undefined
|
||||
if (key) {
|
||||
cacheItem = await cache.get(makeCacheKey(db, key))
|
||||
}
|
||||
|
@ -84,12 +87,12 @@ async function put(
|
|||
return { ok: true, id: output._id, rev: output._rev }
|
||||
}
|
||||
|
||||
async function get(db: Database, id: string): Promise<any> {
|
||||
async function get<T extends Document>(db: Database, id: string): Promise<T> {
|
||||
const cache = await getCache()
|
||||
const cacheKey = makeCacheKey(db, id)
|
||||
let cacheItem: CacheItem = await cache.get(cacheKey)
|
||||
let cacheItem: CacheItem<T> = await cache.get(cacheKey)
|
||||
if (!cacheItem) {
|
||||
const doc = await db.get(id)
|
||||
const doc = await db.get<T>(id)
|
||||
cacheItem = makeCacheItem(doc)
|
||||
await cache.store(cacheKey, cacheItem)
|
||||
}
|
||||
|
@ -123,8 +126,8 @@ export class Writethrough {
|
|||
return put(this.db, doc, writeRateMs)
|
||||
}
|
||||
|
||||
async get(id: string) {
|
||||
return get(this.db, id)
|
||||
async get<T extends Document>(id: string) {
|
||||
return get<T>(this.db, id)
|
||||
}
|
||||
|
||||
async remove(docOrId: any, rev?: any) {
|
||||
|
|
|
@ -11,24 +11,7 @@ export enum Cookie {
|
|||
OIDC_CONFIG = "budibase:oidc:config",
|
||||
}
|
||||
|
||||
export enum Header {
|
||||
API_KEY = "x-budibase-api-key",
|
||||
LICENSE_KEY = "x-budibase-license-key",
|
||||
API_VER = "x-budibase-api-version",
|
||||
APP_ID = "x-budibase-app-id",
|
||||
SESSION_ID = "x-budibase-session-id",
|
||||
TYPE = "x-budibase-type",
|
||||
PREVIEW_ROLE = "x-budibase-role",
|
||||
TENANT_ID = "x-budibase-tenant-id",
|
||||
VERIFICATION_CODE = "x-budibase-verification-code",
|
||||
RETURN_VERIFICATION_CODE = "x-budibase-return-verification-code",
|
||||
RESET_PASSWORD_CODE = "x-budibase-reset-password-code",
|
||||
RETURN_RESET_PASSWORD_CODE = "x-budibase-return-reset-password-code",
|
||||
TOKEN = "x-budibase-token",
|
||||
CSRF_TOKEN = "x-csrf-token",
|
||||
CORRELATION_ID = "x-budibase-correlation-id",
|
||||
AUTHORIZATION = "authorization",
|
||||
}
|
||||
export { Header } from "@budibase/shared-core"
|
||||
|
||||
export enum GlobalRole {
|
||||
OWNER = "owner",
|
||||
|
|
|
@ -107,6 +107,7 @@ const environment = {
|
|||
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
||||
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4984",
|
||||
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
||||
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
||||
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
||||
|
|
|
@ -260,12 +260,12 @@ export async function listAllObjects(bucketName: string, path: string) {
|
|||
}
|
||||
|
||||
/**
|
||||
* Generate a presigned url with a default TTL of 36 hours
|
||||
* Generate a presigned url with a default TTL of 1 hour
|
||||
*/
|
||||
export function getPresignedUrl(
|
||||
bucketName: string,
|
||||
key: string,
|
||||
durationSeconds: number = 129600
|
||||
durationSeconds: number = 3600
|
||||
) {
|
||||
const objectStore = ObjectStore(bucketName, { presigning: true })
|
||||
const params = {
|
||||
|
|
|
@ -3,4 +3,5 @@ export enum JobQueue {
|
|||
APP_BACKUP = "appBackupQueue",
|
||||
AUDIT_LOG = "auditLogQueue",
|
||||
SYSTEM_EVENT_QUEUE = "systemEventQueue",
|
||||
APP_MIGRATION = "appMigration",
|
||||
}
|
||||
|
|
|
@ -68,6 +68,10 @@ class InMemoryQueue {
|
|||
})
|
||||
}
|
||||
|
||||
async isReady() {
|
||||
return true
|
||||
}
|
||||
|
||||
// simply puts a message to the queue and emits to the queue for processing
|
||||
/**
|
||||
* Simple function to replicate the add message functionality of Bull, putting
|
||||
|
|
|
@ -87,6 +87,7 @@ enum QueueEventType {
|
|||
APP_BACKUP_EVENT = "app-backup-event",
|
||||
AUDIT_LOG_EVENT = "audit-log-event",
|
||||
SYSTEM_EVENT = "system-event",
|
||||
APP_MIGRATION = "app-migration",
|
||||
}
|
||||
|
||||
const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
|
||||
|
@ -94,6 +95,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
|
|||
[JobQueue.APP_BACKUP]: QueueEventType.APP_BACKUP_EVENT,
|
||||
[JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT,
|
||||
[JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT,
|
||||
[JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION,
|
||||
}
|
||||
|
||||
function logging(queue: Queue, jobQueue: JobQueue) {
|
||||
|
|
|
@ -137,7 +137,6 @@ export async function doWithLock<T>(
|
|||
const result = await task()
|
||||
return { executed: true, result }
|
||||
} catch (e: any) {
|
||||
logWarn(`lock type: ${opts.type} error`, e)
|
||||
// lock limit exceeded
|
||||
if (e.name === "LockError") {
|
||||
if (opts.type === LockType.TRY_ONCE) {
|
||||
|
|
|
@ -96,7 +96,7 @@ export async function getAppIdFromCtx(ctx: Ctx) {
|
|||
}
|
||||
|
||||
// look in the path
|
||||
const pathId = parseAppIdFromUrl(ctx.path)
|
||||
const pathId = parseAppIdFromUrlPath(ctx.path)
|
||||
if (!appId && pathId) {
|
||||
appId = confirmAppId(pathId)
|
||||
}
|
||||
|
@ -116,18 +116,21 @@ export async function getAppIdFromCtx(ctx: Ctx) {
|
|||
// referer header is present from a builder redirect
|
||||
const referer = ctx.request.headers.referer
|
||||
if (!appId && referer?.includes(BUILDER_APP_PREFIX)) {
|
||||
const refererId = parseAppIdFromUrl(ctx.request.headers.referer)
|
||||
const refererId = parseAppIdFromUrlPath(ctx.request.headers.referer)
|
||||
appId = confirmAppId(refererId)
|
||||
}
|
||||
|
||||
return appId
|
||||
}
|
||||
|
||||
function parseAppIdFromUrl(url?: string) {
|
||||
function parseAppIdFromUrlPath(url?: string) {
|
||||
if (!url) {
|
||||
return
|
||||
}
|
||||
return url.split("/").find(subPath => subPath.startsWith(APP_PREFIX))
|
||||
return url
|
||||
.split("?")[0] // Remove any possible query string
|
||||
.split("/")
|
||||
.find(subPath => subPath.startsWith(APP_PREFIX))
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
"scripts": {
|
||||
"build": "routify -b && vite build --emptyOutDir",
|
||||
"start": "routify -c rollup",
|
||||
"dev:builder": "routify -c dev:vite",
|
||||
"dev": "routify -c dev:vite",
|
||||
"dev:vite": "vite --host 0.0.0.0",
|
||||
"rollup": "rollup -c -w",
|
||||
"test": "vitest run",
|
||||
|
@ -61,9 +61,9 @@
|
|||
"@codemirror/theme-one-dark": "^6.1.2",
|
||||
"@codemirror/view": "^6.11.2",
|
||||
"@fontsource/source-sans-pro": "^5.0.3",
|
||||
"@fortawesome/fontawesome-svg-core": "^6.2.1",
|
||||
"@fortawesome/free-brands-svg-icons": "^6.2.1",
|
||||
"@fortawesome/free-solid-svg-icons": "^6.2.1",
|
||||
"@fortawesome/fontawesome-svg-core": "^6.4.2",
|
||||
"@fortawesome/free-brands-svg-icons": "^6.4.2",
|
||||
"@fortawesome/free-solid-svg-icons": "^6.4.2",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
"codemirror": "^5.59.0",
|
||||
|
@ -78,25 +78,24 @@
|
|||
"svelte-dnd-action": "^0.9.8",
|
||||
"svelte-loading-spinners": "^0.1.1",
|
||||
"svelte-portal": "1.0.0",
|
||||
"yup": "0.29.2"
|
||||
"yup": "^0.32.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.12.14",
|
||||
"@babel/plugin-transform-runtime": "^7.13.10",
|
||||
"@babel/preset-env": "^7.13.12",
|
||||
"@rollup/plugin-replace": "^5.0.3",
|
||||
"@roxi/routify": "2.18.12",
|
||||
"@sveltejs/vite-plugin-svelte": "1.0.1",
|
||||
"@sveltejs/vite-plugin-svelte": "1.4.0",
|
||||
"@testing-library/jest-dom": "5.17.0",
|
||||
"@testing-library/svelte": "^3.2.2",
|
||||
"babel-jest": "29.6.2",
|
||||
"babel-jest": "^29.6.2",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"jest": "29.6.2",
|
||||
"jest": "29.7.0",
|
||||
"jsdom": "^21.1.1",
|
||||
"ncp": "^2.0.0",
|
||||
"svelte": "^3.48.0",
|
||||
"svelte": "^3.49.0",
|
||||
"svelte-jester": "^1.3.2",
|
||||
"vite": "^4.4.11",
|
||||
"vite": "^4.5.0",
|
||||
"vite-plugin-static-copy": "^0.17.0",
|
||||
"vitest": "^0.29.2"
|
||||
},
|
||||
|
@ -115,7 +114,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"dev:builder": {
|
||||
"dev": {
|
||||
"dependsOn": [
|
||||
{
|
||||
"projects": [
|
||||
|
|
|
@ -1,14 +1,19 @@
|
|||
<script>
|
||||
import { ActionButton, notifications } from "@budibase/bbui"
|
||||
import CreateEditRelationshipModal from "../../Datasources/CreateEditRelationshipModal.svelte"
|
||||
import { datasources } from "../../../../stores/backend"
|
||||
import {
|
||||
datasources,
|
||||
tables as tablesStore,
|
||||
} from "../../../../stores/backend"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let table
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
$: datasource = findDatasource(table?._id)
|
||||
$: tables = datasource?.plus ? Object.values(datasource?.entities || {}) : []
|
||||
$: tables = datasource?.plus
|
||||
? $tablesStore.list.filter(tbl => tbl.sourceId === datasource._id)
|
||||
: []
|
||||
|
||||
let modal
|
||||
|
||||
|
@ -28,7 +33,12 @@
|
|||
}
|
||||
|
||||
const onError = err => {
|
||||
notifications.error(`Error saving relationship info: ${err}`)
|
||||
if (err.err) {
|
||||
err = err.err
|
||||
}
|
||||
notifications.error(
|
||||
`Error saving relationship info: ${err?.message || JSON.stringify(err)}`
|
||||
)
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -85,6 +85,7 @@
|
|||
let relationshipTableIdSecondary = null
|
||||
|
||||
let table = $tables.selected
|
||||
|
||||
let confirmDeleteDialog
|
||||
let savingColumn
|
||||
let deleteColName
|
||||
|
@ -171,7 +172,7 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
if (!savingColumn) {
|
||||
if (!savingColumn && !originalName) {
|
||||
let highestNumber = 0
|
||||
Object.keys(table.schema).forEach(columnName => {
|
||||
const columnNumber = extractColumnNumber(columnName)
|
||||
|
|
|
@ -15,7 +15,8 @@
|
|||
|
||||
let modal
|
||||
|
||||
$: tables = Object.values(datasource.entities)
|
||||
$: tables =
|
||||
$tablesStore.list.filter(tbl => tbl.sourceId === datasource._id) || []
|
||||
$: relationships = getRelationships(tables)
|
||||
|
||||
function getRelationships(tables) {
|
||||
|
@ -43,14 +44,16 @@
|
|||
})
|
||||
})
|
||||
|
||||
return Object.values(relatedColumns).map(({ from, to, through }) => {
|
||||
return {
|
||||
tables: `${from.tableName} ${through ? "↔" : "→"} ${to.tableName}`,
|
||||
columns: `${from.name} to ${to.name}`,
|
||||
from,
|
||||
to,
|
||||
}
|
||||
})
|
||||
return Object.values(relatedColumns)
|
||||
.filter(({ from, to }) => from && to)
|
||||
.map(({ from, to, through }) => {
|
||||
return {
|
||||
tables: `${from.tableName} ${through ? "↔" : "→"} ${to.tableName}`,
|
||||
columns: `${from.name} to ${to.name}`,
|
||||
from,
|
||||
to,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const handleRowClick = ({ detail }) => {
|
||||
|
|
|
@ -125,7 +125,6 @@
|
|||
|
||||
// Handler for DatasourceModal confirmation, move to screen access select
|
||||
const confirmScreenDatasources = async ({ templates }) => {
|
||||
console.log(templates)
|
||||
selectedTemplates = templates
|
||||
screenAccessRoleModal.show()
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
<script>
|
||||
import {
|
||||
banner,
|
||||
Heading,
|
||||
Layout,
|
||||
Button,
|
||||
|
@ -11,7 +10,6 @@
|
|||
Notification,
|
||||
Body,
|
||||
Search,
|
||||
BANNER_TYPES,
|
||||
} from "@budibase/bbui"
|
||||
import Spinner from "components/common/Spinner.svelte"
|
||||
import CreateAppModal from "components/start/CreateAppModal.svelte"
|
||||
|
@ -200,20 +198,6 @@
|
|||
if (usersLimitLockAction) {
|
||||
usersLimitLockAction()
|
||||
}
|
||||
if (!$admin.isDev) {
|
||||
await banner.show({
|
||||
messages: [
|
||||
{
|
||||
message:
|
||||
"We've updated our pricing - see our website to learn more.",
|
||||
type: BANNER_TYPES.NEUTRAL,
|
||||
extraButtonText: "Learn More",
|
||||
extraButtonAction: () =>
|
||||
window.open("https://budibase.com/pricing"),
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
notifications.error("Error getting init info")
|
||||
}
|
||||
|
|
|
@ -134,6 +134,10 @@ export default defineConfig(({ mode }) => {
|
|||
find: "@budibase/shared-core",
|
||||
replacement: path.resolve("../shared-core/src"),
|
||||
},
|
||||
{
|
||||
find: "@budibase/bbui",
|
||||
replacement: path.resolve("../bbui/src"),
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
|
|
@ -52,11 +52,11 @@
|
|||
"yaml": "^2.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "29.5.3",
|
||||
"@types/jest": "29.5.5",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "^6.4.0",
|
||||
"renamer": "^4.0.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"ts-node": "10.8.1",
|
||||
"typescript": "5.2.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c",
|
||||
"dev:builder": "rollup -cw"
|
||||
"dev": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "0.0.0",
|
||||
|
@ -78,7 +78,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"dev:builder": {
|
||||
"dev": {
|
||||
"dependsOn": [
|
||||
{
|
||||
"projects": [
|
||||
|
|
|
@ -9,8 +9,8 @@
|
|||
"@budibase/bbui": "0.0.0",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"dayjs": "^1.10.8",
|
||||
"lodash": "^4.17.21",
|
||||
"lodash": "4.17.21",
|
||||
"socket.io-client": "^4.6.1",
|
||||
"svelte": "^3.46.2"
|
||||
"svelte": "^3.49.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { Helpers } from "@budibase/bbui"
|
||||
import { Header } from "@budibase/shared-core"
|
||||
import { ApiVersion } from "../constants"
|
||||
import { buildAnalyticsEndpoints } from "./analytics"
|
||||
import { buildAppEndpoints } from "./app"
|
||||
|
@ -62,6 +63,11 @@ const defaultAPIClientConfig = {
|
|||
* invoked before the actual JS error is thrown up the stack.
|
||||
*/
|
||||
onError: null,
|
||||
|
||||
/**
|
||||
* A function can be passed to be called when an API call returns info about a migration running for a specific app
|
||||
*/
|
||||
onMigrationDetected: null,
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -133,9 +139,9 @@ export const createAPIClient = config => {
|
|||
|
||||
// Build headers
|
||||
let headers = { Accept: "application/json" }
|
||||
headers["x-budibase-session-id"] = APISessionID
|
||||
headers[Header.SESSION_ID] = APISessionID
|
||||
if (!external) {
|
||||
headers["x-budibase-api-version"] = ApiVersion
|
||||
headers[Header.API_VER] = ApiVersion
|
||||
}
|
||||
if (json) {
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
@ -170,6 +176,7 @@ export const createAPIClient = config => {
|
|||
|
||||
// Handle response
|
||||
if (response.status >= 200 && response.status < 400) {
|
||||
handleMigrations(response)
|
||||
try {
|
||||
if (parseResponse) {
|
||||
return await parseResponse(response)
|
||||
|
@ -186,7 +193,18 @@ export const createAPIClient = config => {
|
|||
}
|
||||
}
|
||||
|
||||
// Performs an API call to the server and caches the response.
|
||||
const handleMigrations = response => {
|
||||
if (!config.onMigrationDetected) {
|
||||
return
|
||||
}
|
||||
const migration = response.headers.get(Header.MIGRATING_APP)
|
||||
|
||||
if (migration) {
|
||||
config.onMigrationDetected(migration)
|
||||
}
|
||||
}
|
||||
|
||||
// Performs an API call to the server and caches the response.
|
||||
// Future invocation for this URL will return the cached result instead of
|
||||
// hitting the server again.
|
||||
const makeCachedApiCall = async params => {
|
||||
|
@ -242,7 +260,7 @@ export const createAPIClient = config => {
|
|||
getAppID: () => {
|
||||
let headers = {}
|
||||
config?.attachHeaders(headers)
|
||||
return headers?.["x-budibase-app-id"]
|
||||
return headers?.[Header.APP_ID]
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 1037b032d49244678204704d1bca779a29e395eb
|
||||
Subproject commit 992486c10044a7495496b97bdf5f454d4020bfba
|
|
@ -0,0 +1,14 @@
|
|||
# Budibase server project
|
||||
|
||||
This project contains all the server specific logic required to run a Budibase app
|
||||
|
||||
## App migrations
|
||||
|
||||
A migration system has been created in order to modify existing apps when breaking changes are added. These migrations will run on the app startup (both from the client side or the builder side), blocking the access until they are correctly applied.
|
||||
|
||||
### Create a new migration
|
||||
|
||||
In order to add a new migration:
|
||||
|
||||
1. Run `yarn add-app-migration [title]`
|
||||
2. Write your code on the newly created file
|
|
@ -23,7 +23,7 @@
|
|||
"dev:stack:up": "node scripts/dev/manage.js up",
|
||||
"dev:stack:down": "node scripts/dev/manage.js down",
|
||||
"dev:stack:nuke": "node scripts/dev/manage.js nuke",
|
||||
"dev:builder": "yarn run dev:stack:up && nodemon",
|
||||
"dev": "yarn run dev:stack:up && nodemon",
|
||||
"dev:built": "yarn run dev:stack:up && yarn run run:docker",
|
||||
"specs": "ts-node specs/generate.ts && openapi-typescript specs/openapi.yaml --output src/definitions/openapi.ts",
|
||||
"initialise": "node scripts/initialise.js",
|
||||
|
@ -111,7 +111,6 @@
|
|||
"xml2js": "0.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.17.4",
|
||||
"@babel/preset-env": "7.16.11",
|
||||
"@swc/core": "1.3.71",
|
||||
"@swc/jest": "0.2.27",
|
||||
|
@ -128,20 +127,20 @@
|
|||
"@types/oracledb": "5.2.2",
|
||||
"@types/pg": "8.6.6",
|
||||
"@types/server-destroy": "1.0.1",
|
||||
"@types/supertest": "2.0.12",
|
||||
"@types/supertest": "2.0.14",
|
||||
"@types/tar": "6.1.5",
|
||||
"apidoc": "0.50.4",
|
||||
"copyfiles": "2.4.1",
|
||||
"docker-compose": "0.23.17",
|
||||
"jest": "29.6.2",
|
||||
"jest": "29.7.0",
|
||||
"jest-openapi": "0.14.2",
|
||||
"jest-runner": "29.6.2",
|
||||
"jest-runner": "29.7.0",
|
||||
"jest-serial-runner": "1.2.1",
|
||||
"nodemon": "2.0.15",
|
||||
"openapi-typescript": "5.2.0",
|
||||
"path-to-regexp": "6.2.0",
|
||||
"rimraf": "3.0.2",
|
||||
"supertest": "6.2.2",
|
||||
"supertest": "6.3.3",
|
||||
"swagger-jsdoc": "6.1.0",
|
||||
"timekeeper": "2.2.0",
|
||||
"ts-node": "10.8.1",
|
||||
|
@ -155,7 +154,7 @@
|
|||
},
|
||||
"nx": {
|
||||
"targets": {
|
||||
"dev:builder": {
|
||||
"dev": {
|
||||
"dependsOn": [
|
||||
{
|
||||
"comment": "Required for pro usage when submodule not loaded",
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
#!/usr/bin/env node
|
||||
const compose = require("docker-compose")
|
||||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
const { parsed: existingConfig } = require("dotenv").config()
|
||||
const updateDotEnv = require("update-dotenv")
|
||||
|
||||
// This script wraps docker-compose allowing you to manage your dev infrastructure with simple commands.
|
||||
const CONFIG = {
|
||||
|
@ -17,44 +18,41 @@ const Commands = {
|
|||
}
|
||||
|
||||
async function init() {
|
||||
const envFilePath = path.join(process.cwd(), ".env")
|
||||
if (!fs.existsSync(envFilePath)) {
|
||||
const envFileJson = {
|
||||
PORT: 4001,
|
||||
MINIO_URL: "http://localhost:4004",
|
||||
COUCH_DB_URL: "http://budibase:budibase@localhost:4005",
|
||||
REDIS_URL: "localhost:6379",
|
||||
WORKER_URL: "http://localhost:4002",
|
||||
INTERNAL_API_KEY: "budibase",
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||
ACCOUNT_PORTAL_API_KEY: "budibase",
|
||||
JWT_SECRET: "testsecret",
|
||||
ENCRYPTION_KEY: "testsecret",
|
||||
REDIS_PASSWORD: "budibase",
|
||||
MINIO_ACCESS_KEY: "budibase",
|
||||
MINIO_SECRET_KEY: "budibase",
|
||||
COUCH_DB_PASSWORD: "budibase",
|
||||
COUCH_DB_USER: "budibase",
|
||||
SELF_HOSTED: 1,
|
||||
DISABLE_ACCOUNT_PORTAL: 1,
|
||||
MULTI_TENANCY: "",
|
||||
DISABLE_THREADING: 1,
|
||||
SERVICE: "app-service",
|
||||
DEPLOYMENT_ENVIRONMENT: "development",
|
||||
BB_ADMIN_USER_EMAIL: "",
|
||||
BB_ADMIN_USER_PASSWORD: "",
|
||||
PLUGINS_DIR: "",
|
||||
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
|
||||
HTTP_MIGRATIONS: "0",
|
||||
HTTP_LOGGING: "0",
|
||||
VERSION: "0.0.0+local",
|
||||
}
|
||||
let envFile = ""
|
||||
Object.keys(envFileJson).forEach(key => {
|
||||
envFile += `${key}=${envFileJson[key]}\n`
|
||||
})
|
||||
fs.writeFileSync(envFilePath, envFile)
|
||||
let config = {
|
||||
PORT: "4001",
|
||||
MINIO_URL: "http://localhost:4004",
|
||||
COUCH_DB_URL: "http://budibase:budibase@localhost:4005",
|
||||
REDIS_URL: "localhost:6379",
|
||||
WORKER_URL: "http://localhost:4002",
|
||||
INTERNAL_API_KEY: "budibase",
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||
ACCOUNT_PORTAL_API_KEY: "budibase",
|
||||
PLATFORM_URL: "http://localhost:10000",
|
||||
JWT_SECRET: "testsecret",
|
||||
ENCRYPTION_KEY: "testsecret",
|
||||
REDIS_PASSWORD: "budibase",
|
||||
MINIO_ACCESS_KEY: "budibase",
|
||||
MINIO_SECRET_KEY: "budibase",
|
||||
COUCH_DB_PASSWORD: "budibase",
|
||||
COUCH_DB_USER: "budibase",
|
||||
SELF_HOSTED: "1",
|
||||
DISABLE_ACCOUNT_PORTAL: "1",
|
||||
MULTI_TENANCY: "",
|
||||
DISABLE_THREADING: "1",
|
||||
SERVICE: "app-service",
|
||||
DEPLOYMENT_ENVIRONMENT: "development",
|
||||
BB_ADMIN_USER_EMAIL: "",
|
||||
BB_ADMIN_USER_PASSWORD: "",
|
||||
PLUGINS_DIR: "",
|
||||
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
|
||||
HTTP_MIGRATIONS: "0",
|
||||
HTTP_LOGGING: "0",
|
||||
VERSION: "0.0.0+local",
|
||||
}
|
||||
|
||||
config = { ...config, ...existingConfig }
|
||||
|
||||
await updateDotEnv(config)
|
||||
}
|
||||
|
||||
async function up() {
|
||||
|
|
|
@ -52,6 +52,7 @@ import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
|||
import sdk from "../../sdk"
|
||||
import { builderSocket } from "../../websockets"
|
||||
import { sdk as sharedCoreSDK } from "@budibase/shared-core"
|
||||
import * as appMigrations from "../../appMigrations"
|
||||
|
||||
// utility function, need to do away with this
|
||||
async function getLayouts() {
|
||||
|
@ -336,6 +337,12 @@ async function performAppCreate(ctx: UserCtx) {
|
|||
await createApp(appId)
|
||||
}
|
||||
|
||||
// Initialise the app migration version as the latest one
|
||||
await appMigrations.updateAppMigrationMetadata({
|
||||
appId,
|
||||
version: appMigrations.getLatestMigrationId(),
|
||||
})
|
||||
|
||||
await cache.app.invalidateAppMetadata(appId, newApplication)
|
||||
return newApplication
|
||||
})
|
||||
|
|
|
@ -26,6 +26,7 @@ import {
|
|||
import sdk from "../../sdk"
|
||||
import { builderSocket } from "../../websockets"
|
||||
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
|
||||
import { isEqual } from "lodash"
|
||||
|
||||
async function getConnector(
|
||||
datasource: Datasource
|
||||
|
@ -198,19 +199,20 @@ async function invalidateVariables(
|
|||
export async function update(ctx: UserCtx<any, UpdateDatasourceResponse>) {
|
||||
const db = context.getAppDB()
|
||||
const datasourceId = ctx.params.datasourceId
|
||||
let datasource = await sdk.datasources.get(datasourceId)
|
||||
const auth = datasource.config?.auth
|
||||
await invalidateVariables(datasource, ctx.request.body)
|
||||
const baseDatasource = await sdk.datasources.get(datasourceId)
|
||||
const auth = baseDatasource.config?.auth
|
||||
await invalidateVariables(baseDatasource, ctx.request.body)
|
||||
|
||||
const isBudibaseSource = datasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE
|
||||
const isBudibaseSource =
|
||||
baseDatasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE
|
||||
|
||||
const dataSourceBody = isBudibaseSource
|
||||
? { name: ctx.request.body?.name }
|
||||
: ctx.request.body
|
||||
|
||||
datasource = {
|
||||
...datasource,
|
||||
...sdk.datasources.mergeConfigs(dataSourceBody, datasource),
|
||||
let datasource: Datasource = {
|
||||
...baseDatasource,
|
||||
...sdk.datasources.mergeConfigs(dataSourceBody, baseDatasource),
|
||||
}
|
||||
if (auth && !ctx.request.body.auth) {
|
||||
// don't strip auth config from DB
|
||||
|
@ -245,6 +247,15 @@ export async function update(ctx: UserCtx<any, UpdateDatasourceResponse>) {
|
|||
datasource: await sdk.datasources.removeSecretSingle(datasource),
|
||||
}
|
||||
builderSocket?.emitDatasourceUpdate(ctx, datasource)
|
||||
// send table updates if they have occurred
|
||||
if (datasource.entities) {
|
||||
for (let table of Object.values(datasource.entities)) {
|
||||
const oldTable = baseDatasource.entities?.[table.name]
|
||||
if (!oldTable || !isEqual(oldTable, table)) {
|
||||
builderSocket?.emitTableUpdate(ctx, table, { includeOriginator: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const preSaveAction: Partial<Record<SourceName, any>> = {
|
||||
|
|
|
@ -1,14 +1,34 @@
|
|||
import { context } from "@budibase/backend-core"
|
||||
import { migrate as migrationImpl, MIGRATIONS } from "../../migrations"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
import {
|
||||
getAppMigrationVersion,
|
||||
getLatestMigrationId,
|
||||
} from "../../appMigrations"
|
||||
|
||||
export async function migrate(ctx: BBContext) {
|
||||
export async function migrate(ctx: Ctx) {
|
||||
const options = ctx.request.body
|
||||
// don't await as can take a while, just return
|
||||
migrationImpl(options)
|
||||
ctx.status = 200
|
||||
}
|
||||
|
||||
export async function fetchDefinitions(ctx: BBContext) {
|
||||
export async function fetchDefinitions(ctx: Ctx) {
|
||||
ctx.body = MIGRATIONS
|
||||
ctx.status = 200
|
||||
}
|
||||
|
||||
export async function getMigrationStatus(ctx: Ctx) {
|
||||
const appId = context.getAppId()
|
||||
|
||||
if (!appId) {
|
||||
ctx.throw("AppId could not be found")
|
||||
}
|
||||
|
||||
const latestAppliedMigration = await getAppMigrationVersion(appId)
|
||||
|
||||
const migrated = latestAppliedMigration === getLatestMigrationId()
|
||||
|
||||
ctx.body = { migrated }
|
||||
ctx.status = 200
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import * as linkRows from "../../../db/linkedRows"
|
|||
import { generateRowID, InternalTables } from "../../../db/utils"
|
||||
import * as userController from "../user"
|
||||
import {
|
||||
cleanupAttachments,
|
||||
AttachmentCleanup,
|
||||
inputProcessing,
|
||||
outputProcessing,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
|
@ -79,7 +79,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
table,
|
||||
})) as Row
|
||||
// check if any attachments removed
|
||||
await cleanupAttachments(table, { oldRow, row })
|
||||
await AttachmentCleanup.rowUpdate(table, { row, oldRow })
|
||||
|
||||
if (isUserTable) {
|
||||
// the row has been updated, need to put it into the ctx
|
||||
|
@ -119,7 +119,7 @@ export async function save(ctx: UserCtx) {
|
|||
throw { validation: validateResult.errors }
|
||||
}
|
||||
|
||||
// make sure link rows are up to date
|
||||
// make sure link rows are up-to-date
|
||||
row = (await linkRows.updateLinks({
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
|
@ -165,7 +165,7 @@ export async function destroy(ctx: UserCtx) {
|
|||
tableId,
|
||||
})
|
||||
// remove any attachments that were on the row from object storage
|
||||
await cleanupAttachments(table, { row })
|
||||
await AttachmentCleanup.rowDelete(table, [row])
|
||||
// remove any static formula
|
||||
await updateRelatedFormula(table, row)
|
||||
|
||||
|
@ -216,7 +216,7 @@ export async function bulkDestroy(ctx: UserCtx) {
|
|||
await db.bulkDocs(processedRows.map(row => ({ ...row, _deleted: true })))
|
||||
}
|
||||
// remove any attachments that were on the rows from object storage
|
||||
await cleanupAttachments(table, { rows: processedRows })
|
||||
await AttachmentCleanup.rowDelete(table, processedRows)
|
||||
await updateRelatedFormula(table, processedRows)
|
||||
await Promise.all(updates)
|
||||
return { response: { ok: true }, rows: processedRows }
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
} from "../../../constants"
|
||||
import {
|
||||
inputProcessing,
|
||||
cleanupAttachments,
|
||||
AttachmentCleanup,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import { getViews, saveView } from "../view/utils"
|
||||
import viewTemplate from "../view/viewBuilder"
|
||||
|
@ -82,7 +82,10 @@ export async function checkForColumnUpdates(
|
|||
})
|
||||
|
||||
// cleanup any attachments from object storage for deleted attachment columns
|
||||
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows })
|
||||
await AttachmentCleanup.tableUpdate(updatedTable, rawRows, {
|
||||
oldTable,
|
||||
rename: columnRename,
|
||||
})
|
||||
// Update views
|
||||
await checkForViewUpdates(updatedTable, deletedColumns, columnRename)
|
||||
}
|
||||
|
|
|
@ -4,59 +4,75 @@ import currentApp from "../middleware/currentapp"
|
|||
import zlib from "zlib"
|
||||
import { mainRoutes, staticRoutes, publicRoutes } from "./routes"
|
||||
import { middleware as pro } from "@budibase/pro"
|
||||
import { apiEnabled, automationsEnabled } from "../features"
|
||||
import migrations from "../middleware/appMigrations"
|
||||
import { automationQueue } from "../automations"
|
||||
|
||||
export { shutdown } from "./routes/public"
|
||||
const compress = require("koa-compress")
|
||||
|
||||
export const router: Router = new Router()
|
||||
|
||||
router.get("/health", ctx => (ctx.status = 200))
|
||||
router.get("/health", async ctx => {
|
||||
if (automationsEnabled()) {
|
||||
if (!(await automationQueue.isReady())) {
|
||||
ctx.status = 503
|
||||
return
|
||||
}
|
||||
}
|
||||
ctx.status = 200
|
||||
})
|
||||
router.get("/version", ctx => (ctx.body = envCore.VERSION))
|
||||
|
||||
router.use(middleware.errorHandling)
|
||||
|
||||
router
|
||||
.use(
|
||||
compress({
|
||||
threshold: 2048,
|
||||
gzip: {
|
||||
flush: zlib.constants.Z_SYNC_FLUSH,
|
||||
},
|
||||
deflate: {
|
||||
flush: zlib.constants.Z_SYNC_FLUSH,
|
||||
},
|
||||
br: false,
|
||||
})
|
||||
)
|
||||
// re-direct before any middlewares occur
|
||||
.redirect("/", "/builder")
|
||||
.use(
|
||||
auth.buildAuthMiddleware([], {
|
||||
publicAllowed: true,
|
||||
})
|
||||
)
|
||||
// nothing in the server should allow query string tenants
|
||||
// the server can be public anywhere, so nowhere should throw errors
|
||||
// if the tenancy has not been set, it'll have to be discovered at application layer
|
||||
.use(
|
||||
auth.buildTenancyMiddleware([], [], {
|
||||
noTenancyRequired: true,
|
||||
})
|
||||
)
|
||||
.use(pro.licensing())
|
||||
// @ts-ignore
|
||||
.use(currentApp)
|
||||
.use(auth.auditLog)
|
||||
// only add the routes if they are enabled
|
||||
if (apiEnabled()) {
|
||||
router
|
||||
.use(
|
||||
compress({
|
||||
threshold: 2048,
|
||||
gzip: {
|
||||
flush: zlib.constants.Z_SYNC_FLUSH,
|
||||
},
|
||||
deflate: {
|
||||
flush: zlib.constants.Z_SYNC_FLUSH,
|
||||
},
|
||||
br: false,
|
||||
})
|
||||
)
|
||||
// re-direct before any middlewares occur
|
||||
.redirect("/", "/builder")
|
||||
.use(
|
||||
auth.buildAuthMiddleware([], {
|
||||
publicAllowed: true,
|
||||
})
|
||||
)
|
||||
// nothing in the server should allow query string tenants
|
||||
// the server can be public anywhere, so nowhere should throw errors
|
||||
// if the tenancy has not been set, it'll have to be discovered at application layer
|
||||
.use(
|
||||
auth.buildTenancyMiddleware([], [], {
|
||||
noTenancyRequired: true,
|
||||
})
|
||||
)
|
||||
.use(pro.licensing())
|
||||
// @ts-ignore
|
||||
.use(currentApp)
|
||||
.use(auth.auditLog)
|
||||
// @ts-ignore
|
||||
.use(migrations)
|
||||
|
||||
// authenticated routes
|
||||
for (let route of mainRoutes) {
|
||||
router.use(route.routes())
|
||||
router.use(route.allowedMethods())
|
||||
// authenticated routes
|
||||
for (let route of mainRoutes) {
|
||||
router.use(route.routes())
|
||||
router.use(route.allowedMethods())
|
||||
}
|
||||
|
||||
router.use(publicRoutes.routes())
|
||||
router.use(publicRoutes.allowedMethods())
|
||||
|
||||
// WARNING - static routes will catch everything else after them this must be last
|
||||
router.use(staticRoutes.routes())
|
||||
router.use(staticRoutes.allowedMethods())
|
||||
}
|
||||
|
||||
router.use(publicRoutes.routes())
|
||||
router.use(publicRoutes.allowedMethods())
|
||||
|
||||
// WARNING - static routes will catch everything else after them this must be last
|
||||
router.use(staticRoutes.routes())
|
||||
router.use(staticRoutes.allowedMethods())
|
||||
|
|
|
@ -11,4 +11,6 @@ router
|
|||
auth.internalApi,
|
||||
migrationsController.fetchDefinitions
|
||||
)
|
||||
.get("/api/migrations/status", migrationsController.getMigrationStatus)
|
||||
|
||||
export default router
|
||||
|
|
|
@ -9,7 +9,6 @@ import { ServiceType } from "@budibase/types"
|
|||
import { env as coreEnv } from "@budibase/backend-core"
|
||||
|
||||
coreEnv._set("SERVICE_TYPE", ServiceType.APPS)
|
||||
import { apiEnabled } from "./features"
|
||||
import createKoaApp from "./koa"
|
||||
import Koa from "koa"
|
||||
import { Server } from "http"
|
||||
|
@ -18,12 +17,9 @@ import { startup } from "./startup"
|
|||
let app: Koa, server: Server
|
||||
|
||||
async function start() {
|
||||
// if API disabled, could run automations instead
|
||||
if (apiEnabled()) {
|
||||
const koa = createKoaApp()
|
||||
app = koa.app
|
||||
server = koa.server
|
||||
}
|
||||
const koa = createKoaApp()
|
||||
app = koa.app
|
||||
server = koa.server
|
||||
// startup includes automation runner - if enabled
|
||||
await startup(app, server)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,90 @@
|
|||
import { Duration, cache, context, db, env } from "@budibase/backend-core"
|
||||
import { Database, DocumentType, Document } from "@budibase/types"
|
||||
|
||||
export interface AppMigrationDoc extends Document {
|
||||
version: string
|
||||
history: Record<string, { runAt: string }>
|
||||
}
|
||||
|
||||
const EXPIRY_SECONDS = Duration.fromDays(1).toSeconds()
|
||||
|
||||
async function getFromDB(appId: string) {
|
||||
return db.doWithDB(
|
||||
appId,
|
||||
(db: Database) => {
|
||||
return db.get<AppMigrationDoc>(DocumentType.APP_MIGRATION_METADATA)
|
||||
},
|
||||
{ skip_setup: true }
|
||||
)
|
||||
}
|
||||
|
||||
const getCacheKey = (appId: string) => `appmigrations_${env.VERSION}_${appId}`
|
||||
|
||||
export async function getAppMigrationVersion(appId: string): Promise<string> {
|
||||
const cacheKey = getCacheKey(appId)
|
||||
|
||||
let metadata: AppMigrationDoc | undefined = await cache.get(cacheKey)
|
||||
|
||||
// We don't want to cache in dev, in order to be able to tweak it
|
||||
if (metadata && !env.isDev()) {
|
||||
return metadata.version
|
||||
}
|
||||
|
||||
let version
|
||||
try {
|
||||
metadata = await getFromDB(appId)
|
||||
version = metadata.version
|
||||
} catch (err: any) {
|
||||
if (err.status !== 404) {
|
||||
throw err
|
||||
}
|
||||
|
||||
version = ""
|
||||
}
|
||||
|
||||
await cache.store(cacheKey, version, EXPIRY_SECONDS)
|
||||
|
||||
return version
|
||||
}
|
||||
|
||||
export async function updateAppMigrationMetadata({
|
||||
appId,
|
||||
version,
|
||||
}: {
|
||||
appId: string
|
||||
version: string
|
||||
}): Promise<void> {
|
||||
const db = context.getAppDB()
|
||||
|
||||
let appMigrationDoc: AppMigrationDoc
|
||||
|
||||
try {
|
||||
appMigrationDoc = await getFromDB(appId)
|
||||
} catch (err: any) {
|
||||
if (err.status !== 404) {
|
||||
throw err
|
||||
}
|
||||
|
||||
appMigrationDoc = {
|
||||
_id: DocumentType.APP_MIGRATION_METADATA,
|
||||
version: "",
|
||||
history: {},
|
||||
}
|
||||
await db.put(appMigrationDoc)
|
||||
appMigrationDoc = await getFromDB(appId)
|
||||
}
|
||||
|
||||
const updatedMigrationDoc: AppMigrationDoc = {
|
||||
...appMigrationDoc,
|
||||
version: version || "",
|
||||
history: {
|
||||
...appMigrationDoc.history,
|
||||
[version]: { runAt: new Date().toISOString() },
|
||||
},
|
||||
}
|
||||
await db.put(updatedMigrationDoc)
|
||||
|
||||
const cacheKey = getCacheKey(appId)
|
||||
|
||||
await cache.destroy(cacheKey)
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
import queue from "./queue"
|
||||
import { Next } from "koa"
|
||||
import { getAppMigrationVersion } from "./appMigrationMetadata"
|
||||
import { MIGRATIONS } from "./migrations"
|
||||
import { UserCtx } from "@budibase/types"
|
||||
import { Header } from "@budibase/backend-core"
|
||||
|
||||
export * from "./appMigrationMetadata"
|
||||
|
||||
export type AppMigration = {
|
||||
id: string
|
||||
func: () => Promise<void>
|
||||
}
|
||||
|
||||
export const getLatestMigrationId = () =>
|
||||
MIGRATIONS.map(m => m.id)
|
||||
.sort()
|
||||
.reverse()[0]
|
||||
|
||||
const getTimestamp = (versionId: string) => versionId?.split("_")[0]
|
||||
|
||||
export async function checkMissingMigrations(
|
||||
ctx: UserCtx,
|
||||
next: Next,
|
||||
appId: string
|
||||
) {
|
||||
const currentVersion = await getAppMigrationVersion(appId)
|
||||
const latestMigration = getLatestMigrationId()
|
||||
|
||||
if (getTimestamp(currentVersion) < getTimestamp(latestMigration)) {
|
||||
await queue.add(
|
||||
{
|
||||
appId,
|
||||
},
|
||||
{
|
||||
jobId: `${appId}_${latestMigration}`,
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true,
|
||||
}
|
||||
)
|
||||
|
||||
ctx.response.set(Header.MIGRATING_APP, appId)
|
||||
}
|
||||
|
||||
return next()
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
|
||||
|
||||
import { AppMigration } from "."
|
||||
|
||||
export const MIGRATIONS: AppMigration[] = [
|
||||
// Migrations will be executed sorted by id
|
||||
]
|
|
@ -0,0 +1,58 @@
|
|||
import { context, locks } from "@budibase/backend-core"
|
||||
import { LockName, LockType } from "@budibase/types"
|
||||
|
||||
import {
|
||||
getAppMigrationVersion,
|
||||
updateAppMigrationMetadata,
|
||||
} from "./appMigrationMetadata"
|
||||
import { AppMigration } from "."
|
||||
|
||||
export async function processMigrations(
|
||||
appId: string,
|
||||
migrations: AppMigration[]
|
||||
) {
|
||||
console.log(`Processing app migration for "${appId}"`)
|
||||
|
||||
await locks.doWithLock(
|
||||
{
|
||||
name: LockName.APP_MIGRATION,
|
||||
type: LockType.AUTO_EXTEND,
|
||||
resource: appId,
|
||||
},
|
||||
async () => {
|
||||
await context.doInAppMigrationContext(appId, async () => {
|
||||
let currentVersion = await getAppMigrationVersion(appId)
|
||||
|
||||
const pendingMigrations = migrations
|
||||
.filter(m => m.id > currentVersion)
|
||||
.sort((a, b) => a.id.localeCompare(b.id))
|
||||
|
||||
const migrationIds = migrations.map(m => m.id).sort()
|
||||
|
||||
let index = 0
|
||||
for (const { id, func } of pendingMigrations) {
|
||||
const expectedMigration =
|
||||
migrationIds[migrationIds.indexOf(currentVersion) + 1]
|
||||
|
||||
if (expectedMigration !== id) {
|
||||
throw `Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected`
|
||||
}
|
||||
|
||||
const counter = `(${++index}/${pendingMigrations.length})`
|
||||
console.info(`Running migration ${id}... ${counter}`, {
|
||||
migrationId: id,
|
||||
appId,
|
||||
})
|
||||
await func()
|
||||
await updateAppMigrationMetadata({
|
||||
appId,
|
||||
version: id,
|
||||
})
|
||||
currentVersion = id
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
console.log(`App migration for "${appId}" processed`)
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
import { queue } from "@budibase/backend-core"
|
||||
import { Job } from "bull"
|
||||
import { MIGRATIONS } from "./migrations"
|
||||
import { processMigrations } from "./migrationsProcessor"
|
||||
|
||||
const appMigrationQueue = queue.createQueue(queue.JobQueue.APP_MIGRATION)
|
||||
appMigrationQueue.process(processMessage)
|
||||
|
||||
async function processMessage(job: Job) {
|
||||
const { appId } = job.data
|
||||
|
||||
await processMigrations(appId, MIGRATIONS)
|
||||
}
|
||||
|
||||
export default appMigrationQueue
|
|
@ -0,0 +1,25 @@
|
|||
import { context } from "@budibase/backend-core"
|
||||
import * as setup from "../../api/routes/tests/utilities"
|
||||
import * as migrations from "../migrations"
|
||||
|
||||
describe("migration integrity", () => {
|
||||
// These test is checking that each migration is "idempotent".
|
||||
// We should be able to rerun any migration, with any rerun not modifiying anything. The code should be aware that the migration already ran
|
||||
it("each migration can rerun safely", async () => {
|
||||
const config = setup.getConfig()
|
||||
await config.init()
|
||||
|
||||
await config.doInContext(config.getAppId(), async () => {
|
||||
const db = context.getAppDB()
|
||||
for (const migration of migrations.MIGRATIONS) {
|
||||
await migration.func()
|
||||
const docs = await db.allDocs({ include_docs: true })
|
||||
|
||||
await migration.func()
|
||||
const latestDocs = await db.allDocs({ include_docs: true })
|
||||
|
||||
expect(docs).toEqual(latestDocs)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,53 @@
|
|||
import { Header } from "@budibase/backend-core"
|
||||
import * as setup from "../../api/routes/tests/utilities"
|
||||
import * as migrations from "../migrations"
|
||||
import { getAppMigrationVersion } from "../appMigrationMetadata"
|
||||
|
||||
jest.mock<typeof migrations>("../migrations", () => ({
|
||||
MIGRATIONS: [
|
||||
{
|
||||
id: "20231211101320_test",
|
||||
func: async () => {},
|
||||
},
|
||||
],
|
||||
}))
|
||||
|
||||
describe("migrations", () => {
|
||||
it("new apps are created with the latest app migration version set", async () => {
|
||||
const config = setup.getConfig()
|
||||
await config.init()
|
||||
|
||||
await config.doInContext(config.getAppId(), async () => {
|
||||
const migrationVersion = await getAppMigrationVersion(config.getAppId())
|
||||
|
||||
expect(migrationVersion).toEqual("20231211101320_test")
|
||||
})
|
||||
})
|
||||
|
||||
it("accessing an app that has no pending migrations will not attach the migrating header", async () => {
|
||||
const config = setup.getConfig()
|
||||
await config.init()
|
||||
|
||||
const appId = config.getAppId()
|
||||
|
||||
const response = await config.api.application.getRaw(appId)
|
||||
|
||||
expect(response.headers[Header.MIGRATING_APP]).toBeUndefined()
|
||||
})
|
||||
|
||||
it("accessing an app that has pending migrations will attach the migrating header", async () => {
|
||||
const config = setup.getConfig()
|
||||
await config.init()
|
||||
|
||||
const appId = config.getAppId()
|
||||
|
||||
migrations.MIGRATIONS.push({
|
||||
id: "20231211105812_new-test",
|
||||
func: async () => {},
|
||||
})
|
||||
|
||||
const response = await config.api.application.getRaw(appId)
|
||||
|
||||
expect(response.headers[Header.MIGRATING_APP]).toEqual(appId)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,52 @@
|
|||
import * as setup from "../../api/routes/tests/utilities"
|
||||
import { processMigrations } from "../migrationsProcessor"
|
||||
import { getAppMigrationVersion } from "../appMigrationMetadata"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { AppMigration } from ".."
|
||||
|
||||
const futureTimestamp = `20500101174029`
|
||||
|
||||
describe("migrationsProcessor", () => {
|
||||
it("running migrations will update the latest applied migration", async () => {
|
||||
const testMigrations: AppMigration[] = [
|
||||
{ id: `${futureTimestamp}_123`, func: async () => {} },
|
||||
{ id: `${futureTimestamp}_124`, func: async () => {} },
|
||||
{ id: `${futureTimestamp}_125`, func: async () => {} },
|
||||
]
|
||||
|
||||
const config = setup.getConfig()
|
||||
await config.init()
|
||||
|
||||
const appId = config.getAppId()
|
||||
|
||||
await config.doInContext(appId, () =>
|
||||
processMigrations(appId, testMigrations)
|
||||
)
|
||||
|
||||
expect(
|
||||
await config.doInContext(appId, () => getAppMigrationVersion(appId))
|
||||
).toBe(`${futureTimestamp}_125`)
|
||||
})
|
||||
|
||||
it("no context can be initialised within a migration", async () => {
|
||||
const testMigrations: AppMigration[] = [
|
||||
{
|
||||
id: `${futureTimestamp}_123`,
|
||||
func: async () => {
|
||||
await context.doInAppMigrationContext("any", () => {})
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
const config = setup.getConfig()
|
||||
await config.init()
|
||||
|
||||
const appId = config.getAppId()
|
||||
|
||||
await expect(
|
||||
config.doInContext(appId, () => processMigrations(appId, testMigrations))
|
||||
).rejects.toThrowError(
|
||||
"The context cannot be changed, a migration is currently running"
|
||||
)
|
||||
})
|
||||
})
|
|
@ -99,9 +99,15 @@ export async function getLinkDocuments(args: {
|
|||
}
|
||||
|
||||
export function getUniqueByProp(array: any[], prop: string) {
|
||||
return array.filter((obj, pos, arr) => {
|
||||
return arr.map(mapObj => mapObj[prop]).indexOf(obj[prop]) === pos
|
||||
})
|
||||
const seen = new Set()
|
||||
const filteredArray = []
|
||||
for (const item of array) {
|
||||
if (!seen.has(item[prop])) {
|
||||
seen.add(item[prop])
|
||||
filteredArray.push(item)
|
||||
}
|
||||
}
|
||||
return filteredArray
|
||||
}
|
||||
|
||||
export function getLinkedTableIDs(table: Table): string[] {
|
||||
|
|
|
@ -88,6 +88,7 @@ const environment = {
|
|||
},
|
||||
TOP_LEVEL_PATH:
|
||||
process.env.TOP_LEVEL_PATH || process.env.SERVER_TOP_LEVEL_PATH,
|
||||
APP_MIGRATION_TIMEOUT: parseIntSafe(process.env.APP_MIGRATION_TIMEOUT),
|
||||
}
|
||||
|
||||
// threading can cause memory issues with node-ts in development
|
||||
|
|
|
@ -22,3 +22,10 @@ export function automationsEnabled() {
|
|||
export function apiEnabled() {
|
||||
return featureList.includes(AppFeature.API)
|
||||
}
|
||||
|
||||
export function printFeatures() {
|
||||
if (!env.APP_FEATURES) {
|
||||
return
|
||||
}
|
||||
console.log(`**** APP FEATURES SET: ${featureList.join(", ")} ****`)
|
||||
}
|
||||
|
|
|
@ -6,13 +6,19 @@ import {
|
|||
QueryJson,
|
||||
RenameColumn,
|
||||
Table,
|
||||
FieldType,
|
||||
} from "@budibase/types"
|
||||
import { breakExternalTableId } from "../utils"
|
||||
import SchemaBuilder = Knex.SchemaBuilder
|
||||
import CreateTableBuilder = Knex.CreateTableBuilder
|
||||
import { FieldTypes, RelationshipType } from "../../constants"
|
||||
import { RelationshipType } from "../../constants"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
|
||||
function isIgnoredType(type: FieldType) {
|
||||
const ignored = [FieldType.LINK, FieldType.FORMULA]
|
||||
return ignored.indexOf(type) !== -1
|
||||
}
|
||||
|
||||
function generateSchema(
|
||||
schema: CreateTableBuilder,
|
||||
table: Table,
|
||||
|
@ -47,13 +53,13 @@ function generateSchema(
|
|||
continue
|
||||
}
|
||||
switch (column.type) {
|
||||
case FieldTypes.STRING:
|
||||
case FieldTypes.OPTIONS:
|
||||
case FieldTypes.LONGFORM:
|
||||
case FieldTypes.BARCODEQR:
|
||||
case FieldType.STRING:
|
||||
case FieldType.OPTIONS:
|
||||
case FieldType.LONGFORM:
|
||||
case FieldType.BARCODEQR:
|
||||
schema.text(key)
|
||||
break
|
||||
case FieldTypes.BB_REFERENCE:
|
||||
case FieldType.BB_REFERENCE:
|
||||
const subtype = column.subtype as FieldSubtype
|
||||
switch (subtype) {
|
||||
case FieldSubtype.USER:
|
||||
|
@ -66,7 +72,7 @@ function generateSchema(
|
|||
throw utils.unreachable(subtype)
|
||||
}
|
||||
break
|
||||
case FieldTypes.NUMBER:
|
||||
case FieldType.NUMBER:
|
||||
// if meta is specified then this is a junction table entry
|
||||
if (column.meta && column.meta.toKey && column.meta.toTable) {
|
||||
const { toKey, toTable } = column.meta
|
||||
|
@ -76,21 +82,21 @@ function generateSchema(
|
|||
schema.float(key)
|
||||
}
|
||||
break
|
||||
case FieldTypes.BIGINT:
|
||||
case FieldType.BIGINT:
|
||||
schema.bigint(key)
|
||||
break
|
||||
case FieldTypes.BOOLEAN:
|
||||
case FieldType.BOOLEAN:
|
||||
schema.boolean(key)
|
||||
break
|
||||
case FieldTypes.DATETIME:
|
||||
case FieldType.DATETIME:
|
||||
schema.datetime(key, {
|
||||
useTz: !column.ignoreTimezones,
|
||||
})
|
||||
break
|
||||
case FieldTypes.ARRAY:
|
||||
case FieldType.ARRAY:
|
||||
schema.json(key)
|
||||
break
|
||||
case FieldTypes.LINK:
|
||||
case FieldType.LINK:
|
||||
// this side of the relationship doesn't need any SQL work
|
||||
if (
|
||||
column.relationshipType !== RelationshipType.MANY_TO_ONE &&
|
||||
|
@ -121,22 +127,18 @@ function generateSchema(
|
|||
}
|
||||
}
|
||||
|
||||
if (renamed) {
|
||||
const oldType = renamed ? oldTable?.schema[renamed.old].type : undefined
|
||||
if (renamed && oldType && !isIgnoredType(oldType)) {
|
||||
schema.renameColumn(renamed.old, renamed.updated)
|
||||
}
|
||||
|
||||
// need to check if any columns have been deleted
|
||||
if (oldTable) {
|
||||
const deletedColumns = Object.entries(oldTable.schema)
|
||||
.filter(
|
||||
([key, schema]) =>
|
||||
schema.type !== FieldTypes.LINK &&
|
||||
schema.type !== FieldTypes.FORMULA &&
|
||||
table.schema[key] == null
|
||||
)
|
||||
.map(([key]) => key)
|
||||
deletedColumns.forEach(key => {
|
||||
if (renamed?.old === key) {
|
||||
const deletedColumns = Object.entries(oldTable.schema).filter(
|
||||
([key, column]) => isIgnoredType(column.type) && table.schema[key] == null
|
||||
)
|
||||
deletedColumns.forEach(([key, column]) => {
|
||||
if (renamed?.old === key || isIgnoredType(column.type)) {
|
||||
return
|
||||
}
|
||||
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
|
||||
|
|
|
@ -39,7 +39,7 @@ describe("Google Sheets Integration", () => {
|
|||
let cleanupEnv: () => void
|
||||
|
||||
beforeAll(() => {
|
||||
cleanupEnv = config.setEnv({
|
||||
cleanupEnv = config.setCoreEnv({
|
||||
GOOGLE_CLIENT_ID: "test",
|
||||
GOOGLE_CLIENT_SECRET: "test",
|
||||
})
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
import { UserCtx } from "@budibase/types"
|
||||
import { checkMissingMigrations } from "../appMigrations"
|
||||
|
||||
export default async (ctx: UserCtx, next: any) => {
|
||||
const { appId } = ctx
|
||||
|
||||
if (!appId) {
|
||||
return next()
|
||||
}
|
||||
|
||||
return checkMissingMigrations(ctx, next, appId)
|
||||
}
|
|
@ -19,11 +19,10 @@ import { context } from "@budibase/backend-core"
|
|||
import { getTable } from "../getters"
|
||||
import { checkAutoColumns } from "./utils"
|
||||
import * as viewsSdk from "../../views"
|
||||
import sdk from "../../../index"
|
||||
import { getRowParams } from "../../../../db/utils"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import env from "../../../../environment"
|
||||
import { cleanupAttachments } from "../../../../utilities/rowProcessor"
|
||||
import { AttachmentCleanup } from "../../../../utilities/rowProcessor"
|
||||
|
||||
export async function save(
|
||||
table: Table,
|
||||
|
@ -164,9 +163,10 @@ export async function destroy(table: Table) {
|
|||
await runStaticFormulaChecks(table, {
|
||||
deletion: true,
|
||||
})
|
||||
await cleanupAttachments(table, {
|
||||
rows: rowsData.rows.map((row: any) => row.doc),
|
||||
})
|
||||
await AttachmentCleanup.tableDelete(
|
||||
table,
|
||||
rowsData.rows.map((row: any) => row.doc)
|
||||
)
|
||||
|
||||
return { table }
|
||||
}
|
||||
|
|
|
@ -19,11 +19,14 @@ import * as pro from "@budibase/pro"
|
|||
import * as api from "./api"
|
||||
import sdk from "./sdk"
|
||||
import { initialise as initialiseWebsockets } from "./websockets"
|
||||
import { automationsEnabled } from "./features"
|
||||
import { automationsEnabled, printFeatures } from "./features"
|
||||
import Koa from "koa"
|
||||
import { Server } from "http"
|
||||
import { AddressInfo } from "net"
|
||||
|
||||
let STARTUP_RAN = false
|
||||
|
||||
async function initRoutes(app: any) {
|
||||
async function initRoutes(app: Koa) {
|
||||
if (!env.isTest()) {
|
||||
const plugin = await bullboard.init()
|
||||
app.use(plugin)
|
||||
|
@ -48,27 +51,31 @@ async function initPro() {
|
|||
})
|
||||
}
|
||||
|
||||
function shutdown(server?: any) {
|
||||
function shutdown(server?: Server) {
|
||||
if (server) {
|
||||
server.close()
|
||||
server.destroy()
|
||||
}
|
||||
}
|
||||
|
||||
export async function startup(app?: any, server?: any) {
|
||||
export async function startup(app?: Koa, server?: Server) {
|
||||
if (STARTUP_RAN) {
|
||||
return
|
||||
}
|
||||
printFeatures()
|
||||
STARTUP_RAN = true
|
||||
if (server && !env.CLUSTER_MODE) {
|
||||
if (app && server && !env.CLUSTER_MODE) {
|
||||
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
|
||||
env._set("PORT", server.address().port)
|
||||
const address = server.address() as AddressInfo
|
||||
env._set("PORT", address.port)
|
||||
}
|
||||
eventEmitter.emitPort(env.PORT)
|
||||
fileSystem.init()
|
||||
await redis.init()
|
||||
eventInit()
|
||||
initialiseWebsockets(app, server)
|
||||
if (app && server) {
|
||||
initialiseWebsockets(app, server)
|
||||
}
|
||||
|
||||
// run migrations on startup if not done via http
|
||||
// not recommended in a clustered environment
|
||||
|
|
|
@ -17,7 +17,6 @@ import {
|
|||
basicWebhook,
|
||||
} from "./structures"
|
||||
import {
|
||||
auth,
|
||||
cache,
|
||||
constants,
|
||||
context,
|
||||
|
@ -218,20 +217,45 @@ class TestConfiguration {
|
|||
*/
|
||||
setEnv(newEnvVars: Partial<typeof env>): () => void {
|
||||
const oldEnv = cloneDeep(env)
|
||||
const oldCoreEnv = cloneDeep(coreEnv)
|
||||
|
||||
let key: keyof typeof newEnvVars
|
||||
for (key in newEnvVars) {
|
||||
env._set(key, newEnvVars[key])
|
||||
coreEnv._set(key, newEnvVars[key])
|
||||
}
|
||||
|
||||
return () => {
|
||||
for (const [key, value] of Object.entries(oldEnv)) {
|
||||
env._set(key, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(oldCoreEnv)) {
|
||||
async withCoreEnv(
|
||||
newEnvVars: Partial<typeof coreEnv>,
|
||||
f: () => Promise<void>
|
||||
) {
|
||||
let cleanup = this.setCoreEnv(newEnvVars)
|
||||
try {
|
||||
await f()
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Sets the environment variables to the given values and returns a function
|
||||
* that can be called to reset the environment variables to their original values.
|
||||
*/
|
||||
setCoreEnv(newEnvVars: Partial<typeof coreEnv>): () => void {
|
||||
const oldEnv = cloneDeep(env)
|
||||
|
||||
let key: keyof typeof newEnvVars
|
||||
for (key in newEnvVars) {
|
||||
coreEnv._set(key, newEnvVars[key])
|
||||
}
|
||||
|
||||
return () => {
|
||||
for (const [key, value] of Object.entries(oldEnv)) {
|
||||
coreEnv._set(key, value)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { Response } from "supertest"
|
||||
import { App } from "@budibase/types"
|
||||
import TestConfiguration from "../TestConfiguration"
|
||||
import { TestAPI } from "./base"
|
||||
|
@ -7,12 +8,17 @@ export class ApplicationAPI extends TestAPI {
|
|||
super(config)
|
||||
}
|
||||
|
||||
get = async (appId: string): Promise<App> => {
|
||||
getRaw = async (appId: string): Promise<Response> => {
|
||||
const result = await this.request
|
||||
.get(`/api/applications/${appId}/appPackage`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return result
|
||||
}
|
||||
|
||||
get = async (appId: string): Promise<App> => {
|
||||
const result = await this.getRaw(appId)
|
||||
return result.body.application as App
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
import { FieldTypes, ObjectStoreBuckets } from "../../constants"
|
||||
import { context, db as dbCore, objectStore } from "@budibase/backend-core"
|
||||
import { RenameColumn, Row, RowAttachment, Table } from "@budibase/types"
|
||||
|
||||
export class AttachmentCleanup {
|
||||
static async coreCleanup(fileListFn: () => string[]): Promise<void> {
|
||||
const appId = context.getAppId()
|
||||
if (!dbCore.isProdAppID(appId)) {
|
||||
const prodAppId = dbCore.getProdAppID(appId!)
|
||||
// if prod exists, then don't allow deleting
|
||||
const exists = await dbCore.dbExists(prodAppId)
|
||||
if (exists) {
|
||||
return
|
||||
}
|
||||
}
|
||||
const files = fileListFn()
|
||||
if (files.length > 0) {
|
||||
await objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
|
||||
}
|
||||
}
|
||||
|
||||
private static async tableChange(
|
||||
table: Table,
|
||||
rows: Row[],
|
||||
opts: { oldTable?: Table; rename?: RenameColumn; deleting?: boolean }
|
||||
) {
|
||||
return AttachmentCleanup.coreCleanup(() => {
|
||||
let files: string[] = []
|
||||
const tableSchema = opts.oldTable?.schema || table.schema
|
||||
for (let [key, schema] of Object.entries(tableSchema)) {
|
||||
if (schema.type !== FieldTypes.ATTACHMENT) {
|
||||
continue
|
||||
}
|
||||
const columnRemoved = opts.oldTable && !table.schema[key]
|
||||
const renaming = opts.rename?.old === key
|
||||
// old table had this column, new table doesn't - delete it
|
||||
if ((columnRemoved && !renaming) || opts.deleting) {
|
||||
rows.forEach(row => {
|
||||
files = files.concat(
|
||||
row[key].map((attachment: any) => attachment.key)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
return files
|
||||
})
|
||||
}
|
||||
|
||||
static async tableDelete(table: Table, rows: Row[]) {
|
||||
return AttachmentCleanup.tableChange(table, rows, { deleting: true })
|
||||
}
|
||||
|
||||
static async tableUpdate(
|
||||
table: Table,
|
||||
rows: Row[],
|
||||
opts: { oldTable?: Table; rename?: RenameColumn }
|
||||
) {
|
||||
return AttachmentCleanup.tableChange(table, rows, opts)
|
||||
}
|
||||
|
||||
static async rowDelete(table: Table, rows: Row[]) {
|
||||
return AttachmentCleanup.coreCleanup(() => {
|
||||
let files: string[] = []
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
if (schema.type !== FieldTypes.ATTACHMENT) {
|
||||
continue
|
||||
}
|
||||
rows.forEach(row => {
|
||||
files = files.concat(
|
||||
row[key].map((attachment: any) => attachment.key)
|
||||
)
|
||||
})
|
||||
}
|
||||
return files
|
||||
})
|
||||
}
|
||||
|
||||
static rowUpdate(table: Table, opts: { row: Row; oldRow: Row }) {
|
||||
return AttachmentCleanup.coreCleanup(() => {
|
||||
let files: string[] = []
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
if (schema.type !== FieldTypes.ATTACHMENT) {
|
||||
continue
|
||||
}
|
||||
const oldKeys =
|
||||
opts.oldRow[key]?.map(
|
||||
(attachment: RowAttachment) => attachment.key
|
||||
) || []
|
||||
const newKeys =
|
||||
opts.row[key]?.map((attachment: RowAttachment) => attachment.key) ||
|
||||
[]
|
||||
files = files.concat(
|
||||
oldKeys.filter((key: string) => newKeys.indexOf(key) === -1)
|
||||
)
|
||||
}
|
||||
return files
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,16 +1,7 @@
|
|||
import * as linkRows from "../../db/linkedRows"
|
||||
import {
|
||||
FieldTypes,
|
||||
AutoFieldSubTypes,
|
||||
ObjectStoreBuckets,
|
||||
} from "../../constants"
|
||||
import { FieldTypes, AutoFieldSubTypes } from "../../constants"
|
||||
import { processFormulas, fixAutoColumnSubType } from "./utils"
|
||||
import {
|
||||
context,
|
||||
db as dbCore,
|
||||
objectStore,
|
||||
utils,
|
||||
} from "@budibase/backend-core"
|
||||
import { objectStore, utils } from "@budibase/backend-core"
|
||||
import { InternalTables } from "../../db/utils"
|
||||
import { TYPE_TRANSFORM_MAP } from "./map"
|
||||
import { FieldSubtype, Row, RowAttachment, Table } from "@budibase/types"
|
||||
|
@ -22,6 +13,7 @@ import {
|
|||
import { isExternalTableID } from "../../integrations/utils"
|
||||
|
||||
export * from "./utils"
|
||||
export * from "./attachments"
|
||||
|
||||
type AutoColumnProcessingOpts = {
|
||||
reprocessing?: boolean
|
||||
|
@ -30,27 +22,6 @@ type AutoColumnProcessingOpts = {
|
|||
|
||||
const BASE_AUTO_ID = 1
|
||||
|
||||
/**
|
||||
* Given the old state of the row and the new one after an update, this will
|
||||
* find the keys that have been removed in the updated row.
|
||||
*/
|
||||
function getRemovedAttachmentKeys(
|
||||
oldRow: Row,
|
||||
row: Row,
|
||||
attachmentKey: string
|
||||
) {
|
||||
if (!oldRow[attachmentKey]) {
|
||||
return []
|
||||
}
|
||||
const oldKeys = oldRow[attachmentKey].map((attachment: any) => attachment.key)
|
||||
// no attachments in new row, all removed
|
||||
if (!row[attachmentKey]) {
|
||||
return oldKeys
|
||||
}
|
||||
const newKeys = row[attachmentKey].map((attachment: any) => attachment.key)
|
||||
return oldKeys.filter((key: string) => newKeys.indexOf(key) === -1)
|
||||
}
|
||||
|
||||
/**
|
||||
* This will update any auto columns that are found on the row/table with the correct information based on
|
||||
* time now and the current logged in user making the request.
|
||||
|
@ -288,59 +259,3 @@ export async function outputProcessing<T extends Row[] | Row>(
|
|||
}
|
||||
return (wasArray ? enriched : enriched[0]) as T
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up any attachments that were attached to a row.
|
||||
* @param table The table from which a row is being removed.
|
||||
* @param row optional - the row being removed.
|
||||
* @param rows optional - if multiple rows being deleted can do this in bulk.
|
||||
* @param oldRow optional - if updating a row this will determine the difference.
|
||||
* @param oldTable optional - if updating a table, can supply the old table to look for
|
||||
* deleted attachment columns.
|
||||
* @return When all attachments have been removed this will return.
|
||||
*/
|
||||
export async function cleanupAttachments(
|
||||
table: Table,
|
||||
{
|
||||
row,
|
||||
rows,
|
||||
oldRow,
|
||||
oldTable,
|
||||
}: { row?: Row; rows?: Row[]; oldRow?: Row; oldTable?: Table }
|
||||
): Promise<any> {
|
||||
const appId = context.getAppId()
|
||||
if (!dbCore.isProdAppID(appId)) {
|
||||
const prodAppId = dbCore.getProdAppID(appId!)
|
||||
// if prod exists, then don't allow deleting
|
||||
const exists = await dbCore.dbExists(prodAppId)
|
||||
if (exists) {
|
||||
return
|
||||
}
|
||||
}
|
||||
let files: string[] = []
|
||||
function addFiles(row: Row, key: string) {
|
||||
if (row[key]) {
|
||||
files = files.concat(row[key].map((attachment: any) => attachment.key))
|
||||
}
|
||||
}
|
||||
const schemaToUse = oldTable ? oldTable.schema : table.schema
|
||||
for (let [key, schema] of Object.entries(schemaToUse)) {
|
||||
if (schema.type !== FieldTypes.ATTACHMENT) {
|
||||
continue
|
||||
}
|
||||
// old table had this column, new table doesn't - delete it
|
||||
if (rows && oldTable && !table.schema[key]) {
|
||||
rows.forEach(row => addFiles(row, key))
|
||||
} else if (oldRow && row) {
|
||||
// if updating, need to manage the differences
|
||||
files = files.concat(getRemovedAttachmentKeys(oldRow, row, key))
|
||||
} else if (row) {
|
||||
addFiles(row, key)
|
||||
} else if (rows) {
|
||||
rows.forEach(row => addFiles(row, key))
|
||||
}
|
||||
}
|
||||
if (files.length > 0) {
|
||||
await objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
import { AttachmentCleanup } from "../attachments"
|
||||
import { FieldType, Table, Row, TableSourceType } from "@budibase/types"
|
||||
import { DEFAULT_BB_DATASOURCE_ID } from "../../../constants"
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
|
||||
const BUCKET = "prod-budi-app-assets"
|
||||
const FILE_NAME = "file/thing.jpg"
|
||||
|
||||
jest.mock("@budibase/backend-core", () => {
|
||||
const actual = jest.requireActual("@budibase/backend-core")
|
||||
return {
|
||||
...actual,
|
||||
objectStore: {
|
||||
deleteFiles: jest.fn(),
|
||||
ObjectStoreBuckets: actual.objectStore.ObjectStoreBuckets,
|
||||
},
|
||||
db: {
|
||||
isProdAppID: () => jest.fn(() => false),
|
||||
dbExists: () => jest.fn(() => false),
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
const mockedDeleteFiles = objectStore.deleteFiles as jest.MockedFunction<
|
||||
typeof objectStore.deleteFiles
|
||||
>
|
||||
|
||||
function table(): Table {
|
||||
return {
|
||||
name: "table",
|
||||
sourceId: DEFAULT_BB_DATASOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
type: "table",
|
||||
schema: {
|
||||
attach: {
|
||||
name: "attach",
|
||||
type: FieldType.ATTACHMENT,
|
||||
constraints: {},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function row(fileKey: string = FILE_NAME): Row {
|
||||
return {
|
||||
attach: [
|
||||
{
|
||||
size: 1,
|
||||
extension: "jpg",
|
||||
key: fileKey,
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
describe("attachment cleanup", () => {
|
||||
beforeEach(() => {
|
||||
mockedDeleteFiles.mockClear()
|
||||
})
|
||||
|
||||
it("should be able to cleanup a table update", async () => {
|
||||
const originalTable = table()
|
||||
delete originalTable.schema["attach"]
|
||||
await AttachmentCleanup.tableUpdate(originalTable, [row()], {
|
||||
oldTable: table(),
|
||||
})
|
||||
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
|
||||
})
|
||||
|
||||
it("should be able to cleanup a table deletion", async () => {
|
||||
await AttachmentCleanup.tableDelete(table(), [row()])
|
||||
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
|
||||
})
|
||||
|
||||
it("should handle table column renaming", async () => {
|
||||
const updatedTable = table()
|
||||
updatedTable.schema.attach2 = updatedTable.schema.attach
|
||||
delete updatedTable.schema.attach
|
||||
await AttachmentCleanup.tableUpdate(updatedTable, [row()], {
|
||||
oldTable: table(),
|
||||
rename: { old: "attach", updated: "attach2" },
|
||||
})
|
||||
expect(mockedDeleteFiles).not.toBeCalled()
|
||||
})
|
||||
|
||||
it("shouldn't cleanup if no table changes", async () => {
|
||||
await AttachmentCleanup.tableUpdate(table(), [row()], { oldTable: table() })
|
||||
expect(mockedDeleteFiles).not.toBeCalled()
|
||||
})
|
||||
|
||||
it("should handle row updates", async () => {
|
||||
const updatedRow = row()
|
||||
delete updatedRow.attach
|
||||
await AttachmentCleanup.rowUpdate(table(), {
|
||||
row: updatedRow,
|
||||
oldRow: row(),
|
||||
})
|
||||
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
|
||||
})
|
||||
|
||||
it("should handle row deletion", async () => {
|
||||
await AttachmentCleanup.rowDelete(table(), [row()])
|
||||
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
|
||||
})
|
||||
|
||||
it("shouldn't cleanup attachments if row not updated", async () => {
|
||||
await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() })
|
||||
expect(mockedDeleteFiles).not.toBeCalled()
|
||||
})
|
||||
})
|
|
@ -10,7 +10,7 @@
|
|||
"prebuild": "rimraf dist/",
|
||||
"build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null",
|
||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"dev:builder": "tsc -p tsconfig.json --watch --preserveWatchOutput",
|
||||
"dev": "tsc -p tsconfig.json --watch --preserveWatchOutput",
|
||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null"
|
||||
},
|
||||
"dependencies": {
|
||||
|
@ -32,7 +32,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"dev:builder": {
|
||||
"dev": {
|
||||
"dependsOn": [
|
||||
{
|
||||
"projects": [
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
export enum Header {
|
||||
API_KEY = "x-budibase-api-key",
|
||||
LICENSE_KEY = "x-budibase-license-key",
|
||||
API_VER = "x-budibase-api-version",
|
||||
APP_ID = "x-budibase-app-id",
|
||||
SESSION_ID = "x-budibase-session-id",
|
||||
TYPE = "x-budibase-type",
|
||||
PREVIEW_ROLE = "x-budibase-role",
|
||||
TENANT_ID = "x-budibase-tenant-id",
|
||||
VERIFICATION_CODE = "x-budibase-verification-code",
|
||||
RETURN_VERIFICATION_CODE = "x-budibase-return-verification-code",
|
||||
RESET_PASSWORD_CODE = "x-budibase-reset-password-code",
|
||||
RETURN_RESET_PASSWORD_CODE = "x-budibase-return-reset-password-code",
|
||||
TOKEN = "x-budibase-token",
|
||||
CSRF_TOKEN = "x-csrf-token",
|
||||
CORRELATION_ID = "x-budibase-correlation-id",
|
||||
AUTHORIZATION = "authorization",
|
||||
MIGRATING_APP = "x-budibase-migrating-app",
|
||||
}
|
|
@ -1,3 +1,5 @@
|
|||
export * from "./api"
|
||||
|
||||
export const OperatorOptions = {
|
||||
Equals: {
|
||||
value: "equal",
|
|
@ -1,39 +1,42 @@
|
|||
# String templating
|
||||
|
||||
This package provides a common system for string templating across the Budibase Builder, client and server.
|
||||
The templating is provided through the use of [Handlebars](https://handlebarsjs.com/) an extension of Mustache
|
||||
which is capable of carrying out logic. We have also extended the base Handlebars functionality through the use
|
||||
which is capable of carrying out logic. We have also extended the base Handlebars functionality through the use
|
||||
of a set of helpers provided through the [handlebars-helpers](https://github.com/budibase/handlebars-helpers) package.
|
||||
|
||||
We have not implemented all the helpers provided by the helpers package as some of them provide functionality
|
||||
we felt would not be beneficial. The following collections of helpers have been implemented:
|
||||
1. [Math](https://github.com/budibase/handlebars-helpers/tree/master#math) - a set of useful helpers for
|
||||
carrying out logic pertaining to numbers e.g. `avg`, `add`, `abs` and so on.
|
||||
|
||||
1. [Math](https://github.com/budibase/handlebars-helpers/tree/master#math) - a set of useful helpers for
|
||||
carrying out logic pertaining to numbers e.g. `avg`, `add`, `abs` and so on.
|
||||
2. [Array](https://github.com/budibase/handlebars-helpers/tree/master#array) - some very specific helpers
|
||||
for use with arrays, useful for example in automations. Helpers like `first`, `last`, `after` and `join`
|
||||
can be useful for getting particular portions of arrays or turning them into strings.
|
||||
for use with arrays, useful for example in automations. Helpers like `first`, `last`, `after` and `join`
|
||||
can be useful for getting particular portions of arrays or turning them into strings.
|
||||
3. [Number](https://github.com/budibase/handlebars-helpers/tree/master#number) - unlike the math helpers these
|
||||
are useful for converting numbers into useful formats for display, e.g. `bytes`, `addCommas` and `toPrecision`.
|
||||
are useful for converting numbers into useful formats for display, e.g. `bytes`, `addCommas` and `toPrecision`.
|
||||
4. [URL](https://github.com/budibase/handlebars-helpers/tree/master#url) - very specific helpers for dealing with URLs,
|
||||
such as `encodeURI`, `escape`, `stripQueryString` and `stripProtocol`. These are primarily useful
|
||||
for building up particular URLs to hit as say part of an automation.
|
||||
5. [String](https://github.com/budibase/handlebars-helpers/tree/master#string) - these helpers are useful for building
|
||||
strings and preparing them for display, e.g. `append`, `camelcase`, `capitalize` and `ellipsis`.
|
||||
6. [Comparison](https://github.com/budibase/handlebars-helpers/tree/master#comparison) - these helpers are mainly for
|
||||
building strings when particular conditions are met, for example `and`, `or`, `gt`, `lt`, `not` and so on. This is a very
|
||||
extensive set of helpers but is mostly as would be expected from a set of logical operators.
|
||||
such as `encodeURI`, `escape`, `stripQueryString` and `stripProtocol`. These are primarily useful
|
||||
for building up particular URLs to hit as say part of an automation.
|
||||
5. [String](https://github.com/budibase/handlebars-helpers/tree/master#string) - these helpers are useful for building
|
||||
strings and preparing them for display, e.g. `append`, `camelcase`, `capitalize` and `ellipsis`.
|
||||
6. [Comparison](https://github.com/budibase/handlebars-helpers/tree/master#comparison) - these helpers are mainly for
|
||||
building strings when particular conditions are met, for example `and`, `or`, `gt`, `lt`, `not` and so on. This is a very
|
||||
extensive set of helpers but is mostly as would be expected from a set of logical operators.
|
||||
7. [Object](https://github.com/budibase/handlebars-helpers/tree/master#object) - useful operator for parsing objects, as well
|
||||
as converting them to JSON strings.
|
||||
8. [Regex](https://github.com/budibase/handlebars-helpers/tree/master#regex) - allows performing regex tests on strings that
|
||||
can be used in conditional statements.
|
||||
as converting them to JSON strings.
|
||||
8. [Regex](https://github.com/budibase/handlebars-helpers/tree/master#regex) - allows performing regex tests on strings that
|
||||
can be used in conditional statements.
|
||||
9. [Date](https://github.com/helpers/helper-date) - last but certainly not least is a moment based date helper, which can
|
||||
format ISO/timestamp based dates into something human-readable. An example of this would be `{{date dateProperty "DD-MM-YYYY"}}`.
|
||||
format ISO/timestamp based dates into something human-readable. An example of this would be `{{date dateProperty "DD-MM-YYYY"}}`.
|
||||
|
||||
## Date formatting
|
||||
|
||||
This package uses the standard method for formatting date times, using the following syntax:
|
||||
| Input | Example | Description |
|
||||
| ----- | ------- | ----------- |
|
||||
| YYYY | 2014 | 4 or 2 digit year. Note: Only 4 digit can be parsed on strict mode |
|
||||
| YY | 14 | 2 digit year |
|
||||
| YY | 14 | 2 digit year |
|
||||
| Y | -25 | Year with any number of digits and sign |
|
||||
| Q | 1..4 | Quarter of year. Sets month to first month in quarter. |
|
||||
| M MM | 1..12 | Month number |
|
||||
|
@ -42,55 +45,64 @@ This package uses the standard method for formatting date times, using the follo
|
|||
| Do | 1st..31st | Day of month with ordinal |
|
||||
| DDD DDDD | 1..365 | Day of year |
|
||||
| X | 1410715640.579 | Unix timestamp |
|
||||
| x | 1410715640579 | Unix ms timestamp |
|
||||
| x | 1410715640579 | Unix ms timestamp |
|
||||
|
||||
## Template format
|
||||
|
||||
There are two main ways that the templating system can be used, the first is very similar to that which
|
||||
would be produced by Mustache - a single statement:
|
||||
|
||||
```
|
||||
Hello I'm building a {{uppercase adjective}} string with Handlebars!
|
||||
```
|
||||
|
||||
In the statement above provided a context of `{adjective: "cool"}` will produce a string of `Hello I'm building a COOL string with Handlebars!`.
|
||||
Here we can see an example of how string helpers can be used to make a string exactly as we need it. These statements are relatively
|
||||
simple; we can also stack helpers as such: `{{ uppercase (remove string "bad") }}` with the use of parenthesis.
|
||||
|
||||
The other type of statement that can be made with the templating system is conditional ones, that appear as such:
|
||||
|
||||
```
|
||||
Hello I'm building a {{ #gte score "50" }}Great{{ else }}Bad{{ /gte }} string with Handlebars!
|
||||
```
|
||||
In this string we can see that the string `Great` or `Bad` will be inserted depending on the state of the
|
||||
```
|
||||
|
||||
In this string we can see that the string `Great` or `Bad` will be inserted depending on the state of the
|
||||
`score` context variable. The comparison, string and array helpers all provide some conditional operators which can be used
|
||||
in this way. There will also be some operators which will be built with a very similar syntax but will produce an
|
||||
in this way. There will also be some operators which will be built with a very similar syntax but will produce an
|
||||
iterative operation, like a for each - an example of this would be the `forEach` array helper.
|
||||
|
||||
## Usage
|
||||
Usage of this templating package is through one of the primary functions provided by the package - these functions are
|
||||
|
||||
Usage of this templating package is through one of the primary functions provided by the package - these functions are
|
||||
as follows:
|
||||
1. `processString` - `async (string, object)` - given a template string and a context object this will build a string
|
||||
using our pre-processors, post-processors and handlebars.
|
||||
2. `processObject` - `async (object, object)` - carries out the functionality provided by `processString` for any string
|
||||
inside the given object. This will recurse deeply into the provided object so for very large objects this could be slow.
|
||||
3. `processStringSync` - `(string, object)` - a reduced functionality processing of strings which is synchronous, like
|
||||
functions provided by Node (e.g. `readdirSync`)
|
||||
|
||||
1. `processString` - `async (string, object)` - given a template string and a context object this will build a string
|
||||
using our pre-processors, post-processors and handlebars.
|
||||
2. `processObject` - `async (object, object)` - carries out the functionality provided by `processString` for any string
|
||||
inside the given object. This will recurse deeply into the provided object so for very large objects this could be slow.
|
||||
3. `processStringSync` - `(string, object)` - a reduced functionality processing of strings which is synchronous, like
|
||||
functions provided by Node (e.g. `readdirSync`)
|
||||
4. `processObjectSync` - `(object, object)` - as with the sync'd string, recurses an object to process it synchronously.
|
||||
5. `makePropSafe` - `(string)` - some properties cannot be handled by Handlebars, for example `Table 1` is not valid due
|
||||
to spaces found in the property name. This will update the property name to `[Table 1]` wrapping it in literal
|
||||
specifiers so that it is safe for use in Handlebars. Ideally this function should be called for every level of an object
|
||||
being accessed, for example `[Table 1].[property name]` is the syntax that is required for Handlebars.
|
||||
5. `makePropSafe` - `(string)` - some properties cannot be handled by Handlebars, for example `Table 1` is not valid due
|
||||
to spaces found in the property name. This will update the property name to `[Table 1]` wrapping it in literal
|
||||
specifiers so that it is safe for use in Handlebars. Ideally this function should be called for every level of an object
|
||||
being accessed, for example `[Table 1].[property name]` is the syntax that is required for Handlebars.
|
||||
6. `isValid` - `(string)` - checks the given string for any templates and provides a boolean stating whether it is a valid
|
||||
template.
|
||||
template.
|
||||
7. `getManifest` - returns the manifest JSON which has been generated for the helpers, describing them and their params.
|
||||
|
||||
## Development
|
||||
|
||||
This library is built with [Rollup](https://rollupjs.org/guide/en/) as many of the packages built by Budibase are. We have
|
||||
built the string templating package as a UMD so that it can be used by Node and Browser based applications. This package also
|
||||
builds Typescript stubs which when making use of the library will be used by your IDE to provide code completion. The following
|
||||
built the string templating package as a UMD so that it can be used by Node and Browser based applications. This package also
|
||||
builds Typescript stubs which when making use of the library will be used by your IDE to provide code completion. The following
|
||||
commands are provided for development purposes:
|
||||
|
||||
1. `yarn build` - will build the Typescript stubs and the bundle into the `dist` directory.
|
||||
2. `yarn test` - runs the test suite which will check various helpers are still functioning as
|
||||
expected and a few expected use cases.
|
||||
3. `yarn dev:builder` - an internal command which is used by lerna to watch and build any changes
|
||||
to the package as part of the main `yarn dev` of the repo.
|
||||
expected and a few expected use cases.
|
||||
3. `yarn dev` - an internal command which is used by lerna to watch and build any changes
|
||||
to the package as part of the main `yarn dev` of the repo.
|
||||
|
||||
It is also important to note this package is managed in the same manner as all other in the mono-repo,
|
||||
through lerna.
|
||||
through lerna.
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
],
|
||||
"scripts": {
|
||||
"build": "tsc && rollup -c",
|
||||
"dev:builder": "tsc && rollup -cw",
|
||||
"dev": "tsc && rollup -cw",
|
||||
"test": "jest",
|
||||
"manifest": "node ./scripts/gen-collection-info.js"
|
||||
},
|
||||
|
@ -35,7 +35,7 @@
|
|||
"@rollup/plugin-commonjs": "^17.1.0",
|
||||
"@rollup/plugin-json": "^4.1.0",
|
||||
"doctrine": "^3.0.0",
|
||||
"jest": "29.6.2",
|
||||
"jest": "29.7.0",
|
||||
"marked": "^4.0.10",
|
||||
"rollup": "^2.36.2",
|
||||
"rollup-plugin-inject-process-env": "^1.3.1",
|
||||
|
|
|
@ -10,12 +10,12 @@
|
|||
"prebuild": "rimraf dist/",
|
||||
"build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly",
|
||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"dev:builder": "tsc -p tsconfig.json --watch --preserveWatchOutput",
|
||||
"dev": "tsc -p tsconfig.json --watch --preserveWatchOutput",
|
||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null"
|
||||
},
|
||||
"jest": {},
|
||||
"devDependencies": {
|
||||
"@budibase/nano": "10.1.3",
|
||||
"@budibase/nano": "10.1.4",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/node": "18.17.0",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
|
|
|
@ -37,6 +37,7 @@ export enum DocumentType {
|
|||
USER_FLAG = "flag",
|
||||
AUTOMATION_METADATA = "meta_au",
|
||||
AUDIT_LOG = "al",
|
||||
APP_MIGRATION_METADATA = "_design/migrations",
|
||||
}
|
||||
|
||||
// these are the core documents that make up the data, design
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import Nano from "@budibase/nano"
|
||||
import type Nano from "@budibase/nano"
|
||||
import { AllDocsResponse, AnyDocument, Document, ViewTemplateOpts } from "../"
|
||||
import { Writable } from "stream"
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ export enum LockName {
|
|||
UPDATE_TENANTS_DOC = "update_tenants_doc",
|
||||
PERSIST_WRITETHROUGH = "persist_writethrough",
|
||||
QUOTA_USAGE_EVENT = "quota_usage_event",
|
||||
APP_MIGRATION = "app_migrations",
|
||||
}
|
||||
|
||||
export type LockOptions = {
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
|
||||
"run:docker:cluster": "pm2-runtime start pm2.config.js",
|
||||
"dev:stack:init": "node ./scripts/dev/manage.js init",
|
||||
"dev:builder": "npm run dev:stack:init && nodemon",
|
||||
"dev": "npm run dev:stack:init && nodemon",
|
||||
"dev:built": "yarn run dev:stack:init && yarn run run:docker",
|
||||
"test": "bash scripts/test.sh",
|
||||
"test:watch": "jest --watch",
|
||||
|
@ -75,26 +75,26 @@
|
|||
"@swc/jest": "0.2.27",
|
||||
"@trendyol/jest-testcontainers": "2.1.1",
|
||||
"@types/jest": "29.5.5",
|
||||
"@types/jsonwebtoken": "8.5.1",
|
||||
"@types/jsonwebtoken": "9.0.3",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa__router": "8.0.8",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/node": "18.17.0",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/server-destroy": "1.0.1",
|
||||
"@types/supertest": "2.0.12",
|
||||
"@types/supertest": "2.0.14",
|
||||
"@types/uuid": "8.3.4",
|
||||
"jest": "29.6.2",
|
||||
"jest": "29.7.0",
|
||||
"nodemon": "2.0.15",
|
||||
"rimraf": "3.0.2",
|
||||
"supertest": "6.2.2",
|
||||
"supertest": "6.3.3",
|
||||
"timekeeper": "2.2.0",
|
||||
"typescript": "5.2.2",
|
||||
"update-dotenv": "1.1.1"
|
||||
},
|
||||
"nx": {
|
||||
"targets": {
|
||||
"dev:builder": {
|
||||
"dev": {
|
||||
"dependsOn": [
|
||||
{
|
||||
"comment": "Required for pro usage when submodule not loaded",
|
||||
|
|
|
@ -1,44 +1,40 @@
|
|||
#!/usr/bin/env node
|
||||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
const { parsed: existingConfig } = require("dotenv").config()
|
||||
const updateDotEnv = require("update-dotenv")
|
||||
|
||||
async function init() {
|
||||
const envFilePath = path.join(process.cwd(), ".env")
|
||||
if (!fs.existsSync(envFilePath)) {
|
||||
const envFileJson = {
|
||||
SELF_HOSTED: 1,
|
||||
PORT: 4002,
|
||||
CLUSTER_PORT: 10000,
|
||||
JWT_SECRET: "testsecret",
|
||||
INTERNAL_API_KEY: "budibase",
|
||||
MINIO_ACCESS_KEY: "budibase",
|
||||
MINIO_SECRET_KEY: "budibase",
|
||||
REDIS_URL: "localhost:6379",
|
||||
REDIS_PASSWORD: "budibase",
|
||||
MINIO_URL: "http://localhost:4004",
|
||||
COUCH_DB_URL: "http://budibase:budibase@localhost:4005",
|
||||
COUCH_DB_USERNAME: "budibase",
|
||||
COUCH_DB_PASSWORD: "budibase",
|
||||
// empty string is false
|
||||
MULTI_TENANCY: "",
|
||||
DISABLE_ACCOUNT_PORTAL: 1,
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||
ACCOUNT_PORTAL_API_KEY: "budibase",
|
||||
PLATFORM_URL: "http://localhost:10000",
|
||||
APPS_URL: "http://localhost:4001",
|
||||
SERVICE: "worker-service",
|
||||
DEPLOYMENT_ENVIRONMENT: "development",
|
||||
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
|
||||
ENABLE_EMAIL_TEST_MODE: 1,
|
||||
HTTP_LOGGING: 0,
|
||||
VERSION: "0.0.0+local",
|
||||
}
|
||||
let envFile = ""
|
||||
Object.keys(envFileJson).forEach(key => {
|
||||
envFile += `${key}=${envFileJson[key]}\n`
|
||||
})
|
||||
fs.writeFileSync(envFilePath, envFile)
|
||||
let config = {
|
||||
SELF_HOSTED: "1",
|
||||
PORT: "4002",
|
||||
CLUSTER_PORT: "10000",
|
||||
JWT_SECRET: "testsecret",
|
||||
INTERNAL_API_KEY: "budibase",
|
||||
MINIO_ACCESS_KEY: "budibase",
|
||||
MINIO_SECRET_KEY: "budibase",
|
||||
REDIS_URL: "localhost:6379",
|
||||
REDIS_PASSWORD: "budibase",
|
||||
MINIO_URL: "http://localhost:4004",
|
||||
COUCH_DB_URL: "http://budibase:budibase@localhost:4005",
|
||||
COUCH_DB_USERNAME: "budibase",
|
||||
COUCH_DB_PASSWORD: "budibase",
|
||||
// empty string is false
|
||||
MULTI_TENANCY: "",
|
||||
DISABLE_ACCOUNT_PORTAL: "1",
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||
ACCOUNT_PORTAL_API_KEY: "budibase",
|
||||
PLATFORM_URL: "http://localhost:10000",
|
||||
APPS_URL: "http://localhost:4001",
|
||||
SERVICE: "worker-service",
|
||||
DEPLOYMENT_ENVIRONMENT: "development",
|
||||
TENANT_FEATURE_FLAGS: "*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR",
|
||||
ENABLE_EMAIL_TEST_MODE: "1",
|
||||
HTTP_LOGGING: "0",
|
||||
VERSION: "0.0.0+local",
|
||||
}
|
||||
|
||||
config = { ...config, ...existingConfig }
|
||||
|
||||
await updateDotEnv(config)
|
||||
}
|
||||
|
||||
// if more than init required use this to determine the command type
|
||||
|
|
|
@ -26,6 +26,7 @@ import {
|
|||
migrations,
|
||||
platform,
|
||||
tenancy,
|
||||
db,
|
||||
} from "@budibase/backend-core"
|
||||
import { checkAnyUserExists } from "../../../utilities/users"
|
||||
import { isEmailConfigured } from "../../../utilities/email"
|
||||
|
@ -185,9 +186,27 @@ export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => {
|
|||
export const search = async (ctx: Ctx<SearchUsersRequest>) => {
|
||||
const body = ctx.request.body
|
||||
|
||||
// TODO: for now only one supported search key, string.email
|
||||
if (body?.query && !userSdk.core.isSupportedUserSearch(body.query)) {
|
||||
ctx.throw(501, "Can only search by string.email or equal._id")
|
||||
// TODO: for now only two supported search keys; string.email and equal._id
|
||||
if (body?.query) {
|
||||
// Clean numeric prefixing. This will overwrite duplicate search fields,
|
||||
// but this is fine because we only support a single custom search on
|
||||
// email and id
|
||||
for (let filters of Object.values(body.query)) {
|
||||
if (filters && typeof filters === "object") {
|
||||
for (let [field, value] of Object.entries(filters)) {
|
||||
delete filters[field]
|
||||
const cleanedField = db.removeKeyNumbering(field)
|
||||
if (filters[cleanedField] !== undefined) {
|
||||
ctx.throw(400, "Only 1 filter per field is supported")
|
||||
}
|
||||
filters[cleanedField] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
// Validate we aren't trying to search on any illegal fields
|
||||
if (!userSdk.core.isSupportedUserSearch(body.query)) {
|
||||
ctx.throw(400, "Can only search by string.email or equal._id")
|
||||
}
|
||||
}
|
||||
|
||||
if (body.paginate === false) {
|
||||
|
|
|
@ -1,6 +1,25 @@
|
|||
import { Ctx } from "@budibase/types"
|
||||
import env from "../../../environment"
|
||||
import { env as coreEnv } from "@budibase/backend-core"
|
||||
import nodeFetch from "node-fetch"
|
||||
|
||||
let sqsAvailable: boolean
|
||||
async function isSqsAvailable() {
|
||||
if (sqsAvailable !== undefined) {
|
||||
return sqsAvailable
|
||||
}
|
||||
|
||||
try {
|
||||
await nodeFetch(coreEnv.COUCH_DB_SQL_URL, {
|
||||
timeout: 1000,
|
||||
})
|
||||
sqsAvailable = true
|
||||
return true
|
||||
} catch (e) {
|
||||
sqsAvailable = false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export const fetch = async (ctx: Ctx) => {
|
||||
ctx.body = {
|
||||
|
@ -12,4 +31,10 @@ export const fetch = async (ctx: Ctx) => {
|
|||
baseUrl: env.PLATFORM_URL,
|
||||
isDev: env.isDev() && !env.isTest(),
|
||||
}
|
||||
|
||||
if (env.SELF_HOSTED) {
|
||||
ctx.body.infrastructure = {
|
||||
sqs: await isSqsAvailable(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -590,6 +590,15 @@ describe("/api/global/users", () => {
|
|||
expect(response.body.data[0].email).toBe(user.email)
|
||||
})
|
||||
|
||||
it("should be able to search by email with numeric prefixing", async () => {
|
||||
const user = await config.createUser()
|
||||
const response = await config.api.users.searchUsers({
|
||||
query: { string: { ["999:email"]: user.email } },
|
||||
})
|
||||
expect(response.body.data.length).toBe(1)
|
||||
expect(response.body.data[0].email).toBe(user.email)
|
||||
})
|
||||
|
||||
it("should be able to search by _id", async () => {
|
||||
const user = await config.createUser()
|
||||
const response = await config.api.users.searchUsers({
|
||||
|
@ -599,13 +608,52 @@ describe("/api/global/users", () => {
|
|||
expect(response.body.data[0]._id).toBe(user._id)
|
||||
})
|
||||
|
||||
it("should be able to search by _id with numeric prefixing", async () => {
|
||||
const user = await config.createUser()
|
||||
const response = await config.api.users.searchUsers({
|
||||
query: { equal: { ["1:_id"]: user._id } },
|
||||
})
|
||||
expect(response.body.data.length).toBe(1)
|
||||
expect(response.body.data[0]._id).toBe(user._id)
|
||||
})
|
||||
|
||||
it("should throw an error when using multiple filters on the same field", async () => {
|
||||
const user = await config.createUser()
|
||||
await config.api.users.searchUsers(
|
||||
{
|
||||
query: {
|
||||
string: {
|
||||
["1:email"]: user.email,
|
||||
["2:email"]: "something else",
|
||||
},
|
||||
},
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
it("should throw an error when using multiple filters on the same field without prefixes", async () => {
|
||||
const user = await config.createUser()
|
||||
await config.api.users.searchUsers(
|
||||
{
|
||||
query: {
|
||||
string: {
|
||||
["_id"]: user.email,
|
||||
["999:_id"]: "something else",
|
||||
},
|
||||
},
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
it("should throw an error when unimplemented options used", async () => {
|
||||
const user = await config.createUser()
|
||||
await config.api.users.searchUsers(
|
||||
{
|
||||
query: { equal: { firstName: user.firstName } },
|
||||
},
|
||||
{ status: 501 }
|
||||
{ status: 400 }
|
||||
)
|
||||
})
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import { TestConfiguration } from "../../../../tests"
|
||||
|
||||
jest.unmock("node-fetch")
|
||||
|
||||
describe("/api/system/environment", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
|
@ -27,5 +29,22 @@ describe("/api/system/environment", () => {
|
|||
offlineMode: false,
|
||||
})
|
||||
})
|
||||
|
||||
it("returns the expected environment for self hosters", async () => {
|
||||
await config.withEnv({ SELF_HOSTED: true }, async () => {
|
||||
const env = await config.api.environment.getEnvironment()
|
||||
expect(env.body).toEqual({
|
||||
cloud: false,
|
||||
disableAccountPortal: 0,
|
||||
isDev: false,
|
||||
multiTenancy: true,
|
||||
baseUrl: "http://localhost:10000",
|
||||
offlineMode: false,
|
||||
infrastructure: {
|
||||
sqs: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -36,6 +36,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import API from "./api"
|
||||
import jwt, { Secret } from "jsonwebtoken"
|
||||
import cloneDeep from "lodash/fp/cloneDeep"
|
||||
|
||||
class TestConfiguration {
|
||||
server: any
|
||||
|
@ -240,6 +241,34 @@ class TestConfiguration {
|
|||
return { message: "Admin user only endpoint.", status: 403 }
|
||||
}
|
||||
|
||||
async withEnv(newEnvVars: Partial<typeof env>, f: () => Promise<void>) {
|
||||
let cleanup = this.setEnv(newEnvVars)
|
||||
try {
|
||||
await f()
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Sets the environment variables to the given values and returns a function
|
||||
* that can be called to reset the environment variables to their original values.
|
||||
*/
|
||||
setEnv(newEnvVars: Partial<typeof env>): () => void {
|
||||
const oldEnv = cloneDeep(env)
|
||||
|
||||
let key: keyof typeof newEnvVars
|
||||
for (key in newEnvVars) {
|
||||
env._set(key, newEnvVars[key])
|
||||
}
|
||||
|
||||
return () => {
|
||||
for (const [key, value] of Object.entries(oldEnv)) {
|
||||
env._set(key, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// USERS
|
||||
|
||||
async createDefaultUser() {
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
"@types/node-fetch": "2.6.4",
|
||||
"chance": "1.1.8",
|
||||
"dotenv": "16.0.1",
|
||||
"jest": "29.6.2",
|
||||
"jest": "29.7.0",
|
||||
"prettier": "2.7.1",
|
||||
"start-server-and-test": "1.14.0",
|
||||
"@swc/core": "1.3.71",
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue