Merge branch 'master' of github.com:Budibase/budibase into feature/sql-query-aliasing

This commit is contained in:
mike12345567 2023-12-12 13:47:07 +00:00
commit 185e4d2f82
58 changed files with 2229 additions and 1707 deletions

View File

@ -1,194 +0,0 @@
{
"files": [
"README.md"
],
"imageSize": 100,
"commit": false,
"contributors": [
{
"login": "shogunpurple",
"name": "Martin McKeaveney",
"avatar_url": "https://avatars1.githubusercontent.com/u/11256663?v=4",
"profile": "http://martinmck.com",
"contributions": [
"code",
"doc",
"test",
"infra"
]
},
{
"login": "mike12345567",
"name": "Michael Drury",
"avatar_url": "https://avatars2.githubusercontent.com/u/4407001?v=4",
"profile": "http://www.michaeldrury.co.uk/",
"contributions": [
"doc",
"code",
"test",
"infra"
]
},
{
"login": "aptkingston",
"name": "Andrew Kingston",
"avatar_url": "https://avatars3.githubusercontent.com/u/9075550?v=4",
"profile": "https://github.com/aptkingston",
"contributions": [
"doc",
"code",
"test",
"design"
]
},
{
"login": "mjashanks",
"name": "Michael Shanks",
"avatar_url": "https://avatars3.githubusercontent.com/u/3524181?v=4",
"profile": "https://budibase.com/",
"contributions": [
"doc",
"code",
"test"
]
},
{
"login": "kevmodrome",
"name": "Kevin Åberg Kultalahti",
"avatar_url": "https://avatars3.githubusercontent.com/u/534488?v=4",
"profile": "https://github.com/kevmodrome",
"contributions": [
"doc",
"code",
"test"
]
},
{
"login": "joebudi",
"name": "Joe",
"avatar_url": "https://avatars2.githubusercontent.com/u/49767913?v=4",
"profile": "https://www.budibase.com/",
"contributions": [
"doc",
"code",
"content",
"design"
]
},
{
"login": "Rory-Powell",
"name": "Rory Powell",
"avatar_url": "https://avatars.githubusercontent.com/u/8755148?v=4",
"profile": "https://github.com/Rory-Powell",
"contributions": [
"code",
"doc",
"test"
]
},
{
"login": "PClmnt",
"name": "Peter Clement",
"avatar_url": "https://avatars.githubusercontent.com/u/5665926?v=4",
"profile": "https://github.com/PClmnt",
"contributions": [
"code",
"doc",
"test"
]
},
{
"login": "Conor-Mack",
"name": "Conor_Mack",
"avatar_url": "https://avatars1.githubusercontent.com/u/36074859?v=4",
"profile": "https://github.com/Conor-Mack",
"contributions": [
"code",
"test"
]
},
{
"login": "pngwn",
"name": "pngwn",
"avatar_url": "https://avatars1.githubusercontent.com/u/12937446?v=4",
"profile": "https://github.com/pngwn",
"contributions": [
"code",
"test"
]
},
{
"login": "HugoLd",
"name": "HugoLd",
"avatar_url": "https://avatars0.githubusercontent.com/u/26521848?v=4",
"profile": "https://github.com/HugoLd",
"contributions": [
"code"
]
},
{
"login": "victoriasloan",
"name": "victoriasloan",
"avatar_url": "https://avatars.githubusercontent.com/u/9913651?v=4",
"profile": "https://github.com/victoriasloan",
"contributions": [
"code"
]
},
{
"login": "yashank09",
"name": "yashank09",
"avatar_url": "https://avatars.githubusercontent.com/u/37672190?v=4",
"profile": "https://github.com/yashank09",
"contributions": [
"code"
]
},
{
"login": "SOVLOOKUP",
"name": "SOVLOOKUP",
"avatar_url": "https://avatars.githubusercontent.com/u/53158137?v=4",
"profile": "https://github.com/SOVLOOKUP",
"contributions": [
"code"
]
},
{
"login": "seoulaja",
"name": "seoulaja",
"avatar_url": "https://avatars.githubusercontent.com/u/15101654?v=4",
"profile": "https://github.com/seoulaja",
"contributions": [
"translation"
]
},
{
"login": "mslourens",
"name": "Maurits Lourens",
"avatar_url": "https://avatars.githubusercontent.com/u/1907152?v=4",
"profile": "https://github.com/mslourens",
"contributions": [
"test",
"code"
]
},
{
"login": "Rory-Powell",
"name": "Rory Powell",
"avatar_url": "https://avatars.githubusercontent.com/u/8755148?v=4",
"profile": "https://github.com/Rory-Powell",
"contributions": [
"infra",
"test",
"code"
]
}
],
"contributorsPerLine": 7,
"projectName": "budibase",
"projectOwner": "Budibase",
"repoType": "github",
"repoHost": "https://github.com",
"skipCi": true,
"commitConvention": "none"
}

View File

@ -8,3 +8,6 @@ packages/backend-core/coverage
packages/server/client packages/server/client
packages/builder/.routify packages/builder/.routify
packages/sdk/sdk packages/sdk/sdk
packages/account-portal/packages/server/build
packages/account-portal/packages/ui/.routify
packages/account-portal/packages/ui/build

View File

@ -1,139 +1,45 @@
# Budibase CI Pipelines # Budibase CI Pipelines
Welcome to the budibase CI pipelines directory. This document details what each of the CI pipelines are for, and come common combinations. Welcome to the Budibase CI pipelines directory. This document details what each of the CI pipelines are for, and come common combinations.
## All CI Pipelines ## All CI Pipelines
### Note
- When running workflow dispatch jobs, ensure you always run them off the `master` branch. It defaults to `develop`, so double check before running any jobs. The exception to this case is the `deploy-release` job which requires the develop branch.
### Standard CI Build Job (budibase_ci.yml) ### Standard CI Build Job (budibase_ci.yml)
Triggers: Triggers:
- PR or push to develop
- PR or push to master - PR or push to master
The standard CI Build job is what runs when you raise a PR to develop or master. The standard CI Build job is what runs when you raise a PR to master.
- Installs all dependencies, - Installs all dependencies,
- builds the project - builds the project
- run the unit tests - run the unit tests
- Generate test coverage metrics with codecov - Generate test coverage metrics with codecov
- Run the integration tests - Run the integration tests
- Check that the pro and account portal submodules are pointing to the lastest master head
### Release Develop Job (release-develop.yml) ### Release Job (tag-release.yml)
Triggers: Triggers:
- Push to develop - Manually triggered
The job responsible for building, tagging and pushing docker images out to the test and release environments. This job is responsible for building and pushing all the production services, packages and images. This is done via [budibase-deploys](https://github.com/Budibase/budibase-deploys/actions/workflows/release.yml).
- Installs all dependencies An input is required, indicating if the new version will be a `patch`, `minor` or `major` bump.
- builds the project
- run the unit tests
- publish the budibase JS packages under a prerelease tag to NPM
- build, tag and push docker images under the `develop` tag to docker hub
These images will then be pulled by the test and release environments, updating the latest automatically. Discord notifications are sent to the #infra channel when this occurs. More documentation can be found in here: https://budibase.atlassian.net/wiki/spaces/DEVOPS/pages/347930625/Production+release
### Release Job (release.yml)
Triggers:
- Push to master
This job is responsible for building and pushing the latest code to NPM and docker hub, so that it can be deployed.
- Installs all dependencies
- builds the project
- run the unit tests
- publish the budibase JS packages under a release tag to NPM (always incremented by patch versions)
- build, tag and push docker images under the `v.x.x.x` (the tag of the NPM release) tag to docker hub
### Release Selfhost Job (release-selfhost.yml)
Triggers:
- Manual Workflow Dispatch Trigger
This job is responsible for delivering the latest version of budibase to those that are self-hosting.
This job relies on the release job to have run first, so the latest image is pushed to dockerhub. This job then will pull the latest version from `lerna.json` and try to find an image in dockerhub corresponding to that version. For example, if the version in `lerna.json` is `1.0.0`:
- Pull the images for all budibase services tagged `v1.0.0` from dockerhub
- Tag these images as `latest`
- Push them back to dockerhub. This now means anyone who pulls `latest` (self hosters using docker-compose) will get the latest version.
- Build and release the budibase helm chart for kubernetes users
- Perform a github release with the latest version. You can see previous releases here (https://github.com/Budibase/budibase/releases)
### Deploy Release (deploy-release.yml)
Triggers:
- Manual Workflow Dispatch Trigger
This job is responsible for deploying to our release, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur:
- Checks out the release branch
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
- Configures AWS Credentials
- Deploys the helm chart in the budibase repo to our preproduction EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
### Deploy Preprod (deploy-preprod.yml)
Triggers:
- Manual Workflow Dispatch Trigger
This job is responsible for deploying to our preprod, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur:
- Checks out the master branch
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
- Configures AWS Credentials
- Deploys the helm chart in the budibase repo to our preprod EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
### Deploy Production (deploy-cloud.yml)
Triggers:
- Manual Workflow Dispatch Trigger
This job is responsible for deploying to our production, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. You can also manually enter a version number for this job, so you can perform rollbacks or upgrade to a specific version. After kicking off this job, the following will occur:
- Checks out the master branch
- Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration
- Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off
- Configures AWS Credentials
- Deploys the helm chart in the budibase repo to our production EKS cluster, injecting the `values.yaml` we pulled from budibase-infra
- Fires off a discord webhook in the #infra channel to show that the deployment completely successfully.
## Common Workflows ## Common Workflows
### Deploy Changes to Production (Release) ### Deploy Changes to Production (Release)
- Merge `develop` into `master` - Merge your changes into `master`
- Wait for budibase CI job and release job to run - Run `tag-release.yml`
- Run cloud deploy job - Check the progress in [budibase-deploys](https://github.com/Budibase/budibase-deploys/actions/workflows/release.yml)
- Run release selfhost job
### Deploy Changes to Production (Hotfix)
- Branch off `master`
- Perform your hotfix
- Merge back into `master`
- Wait for budibase CI job and release job to run
- Run cloud deploy job
- Run release selfhost job
### Rollback A Bad Cloud Deployment ### Rollback A Bad Cloud Deployment
- Kick off cloud deploy job Rollback documentation can be found in here.
- Ensure you are running off master https://budibase.atlassian.net/wiki/spaces/DEVOPS/pages/347930625/Production+release#Rollback
- Enter the version number of the last known good version of budibase. For example `1.0.0`

View File

@ -246,7 +246,57 @@ jobs:
if (submoduleCommit !== baseCommit) { if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.'); console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md') console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md')
process.exit(1);
} else {
console.log('All good, the submodule had been merged and setup correctly!')
}
check-accountportal-submodule:
runs-on: ubuntu-latest
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v3
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Check account portal commit
id: get_accountportal_commits
run: |
cd packages/account-portal
accountportal_commit=$(git rev-parse HEAD)
branch="${{ github.base_ref || github.ref_name }}"
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
base_commit=$(git rev-parse origin/master)
if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "accountportal_commit=$accountportal_commit"
echo "accountportal_commit=$accountportal_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
else
echo "Nothing to do - branch to branch merge."
fi
- name: Check submodule merged to base branch
if: ${{ steps.get_accountportal_commits.outputs.base_commit != '' }}
uses: actions/github-script@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const submoduleCommit = '${{ steps.get_accountportal_commits.outputs.accountportal_commit }}';
const baseCommit = '${{ steps.get_accountportal_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_accountportal_commits.outputs.target_branch }}" branch.');
console.error('Refer to the account portal repo to merge your changes: https://github.com/Budibase/account-portal/blob/master/docs/index.md')
process.exit(1); process.exit(1);
} else { } else {
console.log('All good, the submodule had been merged and setup correctly!') console.log('All good, the submodule had been merged and setup correctly!')

46
.github/workflows/force-release.yml vendored Normal file
View File

@ -0,0 +1,46 @@
name: Forced release
concurrency:
group: tag-release
cancel-in-progress: false
on:
workflow_dispatch:
jobs:
ensure-is-master-tag:
name: Ensure is a master tag
runs-on: qa-arc-runner-set
steps:
- name: Checkout monorepo
uses: actions/checkout@v4
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-tags: true
fetch-depth: 0
- name: Fail if ref is not a tag
run: |
if ! git show-ref -q --verify "refs/tags/${{ github.ref_name }}" 2>/dev/null; then
echo "'${{ github.ref_name }}' is not a valid tag."
exit 1
fi
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.ref_name }} origin/master; then
echo "Tag is not in master. Release can only execute tags that are present on the master branch"
exit 1
fi
trigger-release:
needs: [ensure-is-master-tag]
runs-on: ubuntu-latest
steps:
- uses: peter-evans/repository-dispatch@v2
with:
repository: budibase/budibase-deploys
event-type: release-prod
token: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"TAG": "${{ github.ref_name }}"
}

6
.gitignore vendored
View File

@ -1,4 +1,3 @@
builder/*
.data/ .data/
.temp/ .temp/
packages/server/runtime_apps/ packages/server/runtime_apps/
@ -41,8 +40,11 @@ bower_components
build/Release build/Release
# Dependency directories # Dependency directories
/node_modules/
jspm_packages/ jspm_packages/
*.min.js
*.map
node_modules/
dist/
# TypeScript v1 declaration files # TypeScript v1 declaration files
typings/ typings/

3
.gitmodules vendored
View File

@ -1,3 +1,6 @@
[submodule "packages/pro"] [submodule "packages/pro"]
path = packages/pro path = packages/pro
url = git@github.com:Budibase/budibase-pro.git url = git@github.com:Budibase/budibase-pro.git
[submodule "packages/account-portal"]
path = packages/account-portal
url = git@github.com:Budibase/account-portal.git

View File

@ -8,4 +8,7 @@ packages/worker/coverage
packages/backend-core/coverage packages/backend-core/coverage
packages/builder/.routify packages/builder/.routify
packages/sdk/sdk packages/sdk/sdk
packages/pro/coverage packages/pro/coverage
packages/account-portal/packages/ui/build
packages/account-portal/packages/ui/.routify
packages/account-portal/packages/server/build

View File

@ -84,7 +84,7 @@ Component libraries are collections of components as well as the definition of t
- If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read. - If the project diverges from your branch, please rebase instead of merging. This makes the commit graph easier to read.
- Once your work is completed, please raise a PR against the `develop` branch with some information about what has changed and why. - Once your work is completed, please raise a PR against the `master` branch with some information about what has changed and why.
### Getting Started For Contributors ### Getting Started For Contributors
@ -246,7 +246,7 @@ From here - to develop a change in pro, you can follow the below flow:
cd packages/pro cd packages/pro
# get the base branch you are working from (same as monorepo) # get the base branch you are working from (same as monorepo)
git fetch git fetch
git checkout <develop | master> git checkout master
# create a branch, named the same as the branch in your monorepo # create a branch, named the same as the branch in your monorepo
git checkout -b <some branch> git checkout -b <some branch>
... make changes ... make changes

View File

@ -22,6 +22,6 @@
"@types/react": "17.0.39", "@types/react": "17.0.39",
"eslint": "8.10.0", "eslint": "8.10.0",
"eslint-config-next": "12.1.0", "eslint-config-next": "12.1.0",
"typescript": "4.6.2" "typescript": "5.2.2"
} }
} }

View File

@ -1,8 +1,10 @@
{ {
"version": "2.13.35", "version": "2.13.37",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*",
"!packages/account-portal",
"packages/account-portal/packages/*"
], ],
"useNx": true, "useNx": true,
"command": { "command": {

View File

@ -6,25 +6,25 @@
"@babel/eslint-parser": "^7.22.5", "@babel/eslint-parser": "^7.22.5",
"@babel/preset-env": "^7.22.5", "@babel/preset-env": "^7.22.5",
"@esbuild-plugins/tsconfig-paths": "^0.1.2", "@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@typescript-eslint/parser": "6.7.2", "@typescript-eslint/parser": "6.9.0",
"esbuild": "^0.18.17", "esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0", "esbuild-node-externals": "^1.8.0",
"eslint": "^8.44.0", "eslint": "^8.52.0",
"eslint-plugin-import": "^2.29.0", "eslint-plugin-import": "^2.29.0",
"eslint-plugin-local-rules": "^2.0.0", "eslint-plugin-local-rules": "^2.0.0",
"eslint-plugin-svelte": "^2.32.2", "eslint-plugin-svelte": "^2.34.0",
"husky": "^8.0.3", "husky": "^8.0.3",
"kill-port": "^1.6.1", "kill-port": "^1.6.1",
"lerna": "7.1.1", "lerna": "7.1.1",
"madge": "^6.0.0", "madge": "^6.0.0",
"minimist": "^1.2.8",
"nx": "16.4.3", "nx": "16.4.3",
"nx-cloud": "16.0.5", "nx-cloud": "16.0.5",
"prettier": "2.8.8", "prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0", "prettier-plugin-svelte": "^2.3.0",
"svelte": "3.49.0", "svelte": "3.49.0",
"svelte-eslint-parser": "^0.32.0", "svelte-eslint-parser": "^0.33.1",
"typescript": "5.2.2" "typescript": "5.2.2",
"yargs": "^17.7.2"
}, },
"scripts": { "scripts": {
"preinstall": "node scripts/syncProPackage.js", "preinstall": "node scripts/syncProPackage.js",
@ -39,13 +39,16 @@
"nuke": "yarn run nuke:packages && yarn run nuke:docker", "nuke": "yarn run nuke:packages && yarn run nuke:docker",
"nuke:packages": "yarn run restore", "nuke:packages": "yarn run restore",
"nuke:docker": "lerna run --stream dev:stack:nuke", "nuke:docker": "lerna run --stream dev:stack:nuke",
"clean": "lerna clean -y", "clean": "lerna clean -y && echo Cleaning top level node modules 🧹 && rm -rf ./node_modules && echo Done! 🚀",
"kill-builder": "kill-port 3000", "kill-builder": "kill-port 3000",
"kill-server": "kill-port 4001 4002", "kill-server": "kill-port 4001 4002",
"kill-all": "yarn run kill-builder && yarn run kill-server", "kill-accountportal": "kill-port 3001 4003",
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev:builder", "kill-all": "yarn run kill-builder && yarn run kill-server && yarn kill-accountportal",
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker", "dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server", "dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
"dev:all": "yarn run kill-all && lerna run --stream dev",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream", "test": "lerna run --stream test --stream",
@ -79,11 +82,14 @@
"security:audit": "node scripts/audit.js", "security:audit": "node scripts/audit.js",
"postinstall": "husky install", "postinstall": "husky install",
"submodules:load": "git submodule init && git submodule update && yarn", "submodules:load": "git submodule init && git submodule update && yarn",
"submodules:unload": "git submodule deinit --all && yarn" "submodules:unload": "git submodule deinit --all && yarn",
"add-app-migration": "node scripts/add-app-migration.js --title"
}, },
"workspaces": { "workspaces": {
"packages": [ "packages": [
"packages/*" "packages/*",
"!packages/account-portal",
"packages/account-portal/packages/*"
] ]
}, },
"resolutions": { "resolutions": {

@ -0,0 +1 @@
Subproject commit a0b13270c36dd188e2a953d026b4560a1208008e

View File

@ -21,7 +21,7 @@
"test:watch": "jest --watchAll" "test:watch": "jest --watchAll"
}, },
"dependencies": { "dependencies": {
"@budibase/nano": "10.1.3", "@budibase/nano": "10.1.4",
"@budibase/pouchdb-replication-stream": "1.2.10", "@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
@ -73,8 +73,8 @@
"@types/uuid": "8.3.4", "@types/uuid": "8.3.4",
"chance": "1.1.8", "chance": "1.1.8",
"ioredis-mock": "8.9.0", "ioredis-mock": "8.9.0",
"jest": "29.6.2", "jest": "29.7.0",
"jest-environment-node": "29.6.2", "jest-environment-node": "29.7.0",
"jest-serial-runner": "1.2.1", "jest-serial-runner": "1.2.1",
"pino-pretty": "10.0.0", "pino-pretty": "10.0.0",
"pouchdb-adapter-memory": "7.2.2", "pouchdb-adapter-memory": "7.2.2",

View File

@ -260,12 +260,12 @@ export async function listAllObjects(bucketName: string, path: string) {
} }
/** /**
* Generate a presigned url with a default TTL of 36 hours * Generate a presigned url with a default TTL of 1 hour
*/ */
export function getPresignedUrl( export function getPresignedUrl(
bucketName: string, bucketName: string,
key: string, key: string,
durationSeconds: number = 129600 durationSeconds: number = 3600
) { ) {
const objectStore = ObjectStore(bucketName, { presigning: true }) const objectStore = ObjectStore(bucketName, { presigning: true })
const params = { const params = {

View File

@ -3,4 +3,5 @@ export enum JobQueue {
APP_BACKUP = "appBackupQueue", APP_BACKUP = "appBackupQueue",
AUDIT_LOG = "auditLogQueue", AUDIT_LOG = "auditLogQueue",
SYSTEM_EVENT_QUEUE = "systemEventQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue",
APP_MIGRATION = "appMigration",
} }

View File

@ -87,6 +87,7 @@ enum QueueEventType {
APP_BACKUP_EVENT = "app-backup-event", APP_BACKUP_EVENT = "app-backup-event",
AUDIT_LOG_EVENT = "audit-log-event", AUDIT_LOG_EVENT = "audit-log-event",
SYSTEM_EVENT = "system-event", SYSTEM_EVENT = "system-event",
APP_MIGRATION = "app-migration",
} }
const EventTypeMap: { [key in JobQueue]: QueueEventType } = { const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
@ -94,6 +95,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
[JobQueue.APP_BACKUP]: QueueEventType.APP_BACKUP_EVENT, [JobQueue.APP_BACKUP]: QueueEventType.APP_BACKUP_EVENT,
[JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT,
[JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT,
[JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION,
} }
function logging(queue: Queue, jobQueue: JobQueue) { function logging(queue: Queue, jobQueue: JobQueue) {

View File

@ -6,7 +6,7 @@
"scripts": { "scripts": {
"build": "routify -b && vite build --emptyOutDir", "build": "routify -b && vite build --emptyOutDir",
"start": "routify -c rollup", "start": "routify -c rollup",
"dev:builder": "routify -c dev:vite", "dev": "routify -c dev:vite",
"dev:vite": "vite --host 0.0.0.0", "dev:vite": "vite --host 0.0.0.0",
"rollup": "rollup -c -w", "rollup": "rollup -c -w",
"test": "vitest run", "test": "vitest run",
@ -61,9 +61,9 @@
"@codemirror/theme-one-dark": "^6.1.2", "@codemirror/theme-one-dark": "^6.1.2",
"@codemirror/view": "^6.11.2", "@codemirror/view": "^6.11.2",
"@fontsource/source-sans-pro": "^5.0.3", "@fontsource/source-sans-pro": "^5.0.3",
"@fortawesome/fontawesome-svg-core": "^6.2.1", "@fortawesome/fontawesome-svg-core": "^6.4.2",
"@fortawesome/free-brands-svg-icons": "^6.2.1", "@fortawesome/free-brands-svg-icons": "^6.4.2",
"@fortawesome/free-solid-svg-icons": "^6.2.1", "@fortawesome/free-solid-svg-icons": "^6.4.2",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",
"codemirror": "^5.59.0", "codemirror": "^5.59.0",
@ -78,25 +78,24 @@
"svelte-dnd-action": "^0.9.8", "svelte-dnd-action": "^0.9.8",
"svelte-loading-spinners": "^0.1.1", "svelte-loading-spinners": "^0.1.1",
"svelte-portal": "1.0.0", "svelte-portal": "1.0.0",
"yup": "0.29.2" "yup": "^0.32.11"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.12.14",
"@babel/plugin-transform-runtime": "^7.13.10", "@babel/plugin-transform-runtime": "^7.13.10",
"@babel/preset-env": "^7.13.12", "@babel/preset-env": "^7.13.12",
"@rollup/plugin-replace": "^5.0.3", "@rollup/plugin-replace": "^5.0.3",
"@roxi/routify": "2.18.12", "@roxi/routify": "2.18.12",
"@sveltejs/vite-plugin-svelte": "1.0.1", "@sveltejs/vite-plugin-svelte": "1.4.0",
"@testing-library/jest-dom": "5.17.0", "@testing-library/jest-dom": "5.17.0",
"@testing-library/svelte": "^3.2.2", "@testing-library/svelte": "^3.2.2",
"babel-jest": "29.6.2", "babel-jest": "^29.6.2",
"identity-obj-proxy": "^3.0.0", "identity-obj-proxy": "^3.0.0",
"jest": "29.6.2", "jest": "29.7.0",
"jsdom": "^21.1.1", "jsdom": "^21.1.1",
"ncp": "^2.0.0", "ncp": "^2.0.0",
"svelte": "^3.48.0", "svelte": "^3.49.0",
"svelte-jester": "^1.3.2", "svelte-jester": "^1.3.2",
"vite": "^4.4.11", "vite": "^4.5.0",
"vite-plugin-static-copy": "^0.17.0", "vite-plugin-static-copy": "^0.17.0",
"vitest": "^0.29.2" "vitest": "^0.29.2"
}, },
@ -115,7 +114,7 @@
} }
] ]
}, },
"dev:builder": { "dev": {
"dependsOn": [ "dependsOn": [
{ {
"projects": [ "projects": [

View File

@ -125,7 +125,6 @@
// Handler for DatasourceModal confirmation, move to screen access select // Handler for DatasourceModal confirmation, move to screen access select
const confirmScreenDatasources = async ({ templates }) => { const confirmScreenDatasources = async ({ templates }) => {
console.log(templates)
selectedTemplates = templates selectedTemplates = templates
screenAccessRoleModal.show() screenAccessRoleModal.show()
} }

View File

@ -1,6 +1,5 @@
<script> <script>
import { import {
banner,
Heading, Heading,
Layout, Layout,
Button, Button,
@ -11,7 +10,6 @@
Notification, Notification,
Body, Body,
Search, Search,
BANNER_TYPES,
} from "@budibase/bbui" } from "@budibase/bbui"
import Spinner from "components/common/Spinner.svelte" import Spinner from "components/common/Spinner.svelte"
import CreateAppModal from "components/start/CreateAppModal.svelte" import CreateAppModal from "components/start/CreateAppModal.svelte"
@ -200,20 +198,6 @@
if (usersLimitLockAction) { if (usersLimitLockAction) {
usersLimitLockAction() usersLimitLockAction()
} }
if (!$admin.isDev) {
await banner.show({
messages: [
{
message:
"We've updated our pricing - see our website to learn more.",
type: BANNER_TYPES.NEUTRAL,
extraButtonText: "Learn More",
extraButtonAction: () =>
window.open("https://budibase.com/pricing"),
},
],
})
}
} catch (error) { } catch (error) {
notifications.error("Error getting init info") notifications.error("Error getting init info")
} }

View File

@ -134,6 +134,10 @@ export default defineConfig(({ mode }) => {
find: "@budibase/shared-core", find: "@budibase/shared-core",
replacement: path.resolve("../shared-core/src"), replacement: path.resolve("../shared-core/src"),
}, },
{
find: "@budibase/bbui",
replacement: path.resolve("../bbui/src"),
},
], ],
}, },
} }

View File

@ -52,11 +52,11 @@
"yaml": "^2.1.1" "yaml": "^2.1.1"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "29.5.3", "@types/jest": "29.5.5",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/pouchdb": "^6.4.0", "@types/pouchdb": "^6.4.0",
"renamer": "^4.0.0", "renamer": "^4.0.0",
"ts-node": "^10.9.1", "ts-node": "10.8.1",
"typescript": "5.2.2" "typescript": "5.2.2"
} }
} }

View File

@ -16,7 +16,7 @@
}, },
"scripts": { "scripts": {
"build": "rollup -c", "build": "rollup -c",
"dev:builder": "rollup -cw" "dev": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "0.0.0", "@budibase/bbui": "0.0.0",
@ -78,7 +78,7 @@
} }
] ]
}, },
"dev:builder": { "dev": {
"dependsOn": [ "dependsOn": [
{ {
"projects": [ "projects": [

View File

@ -9,8 +9,8 @@
"@budibase/bbui": "0.0.0", "@budibase/bbui": "0.0.0",
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"lodash": "^4.17.21", "lodash": "4.17.21",
"socket.io-client": "^4.6.1", "socket.io-client": "^4.6.1",
"svelte": "^3.46.2" "svelte": "^3.49.0"
} }
} }

@ -1 +1 @@
Subproject commit 1037b032d49244678204704d1bca779a29e395eb Subproject commit 056c2093dbc93d9a10ea9f5050c84a84edd8100c

14
packages/server/README.md Normal file
View File

@ -0,0 +1,14 @@
# Budibase server project
This project contains all the server specific logic required to run a Budibase app
## App migrations
A migration system has been created in order to modify existing apps when breaking changes are added. These migrations will run on the app startup (both from the client side or the builder side), blocking the access until they are correctly applied.
### Create a new migration
In order to add a new migration:
1. Run `yarn add-app-migration [title]`
2. Write your code on the newly created file

View File

@ -23,7 +23,7 @@
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",
"dev:stack:down": "node scripts/dev/manage.js down", "dev:stack:down": "node scripts/dev/manage.js down",
"dev:stack:nuke": "node scripts/dev/manage.js nuke", "dev:stack:nuke": "node scripts/dev/manage.js nuke",
"dev:builder": "yarn run dev:stack:up && nodemon", "dev": "yarn run dev:stack:up && nodemon",
"dev:built": "yarn run dev:stack:up && yarn run run:docker", "dev:built": "yarn run dev:stack:up && yarn run run:docker",
"specs": "ts-node specs/generate.ts && openapi-typescript specs/openapi.yaml --output src/definitions/openapi.ts", "specs": "ts-node specs/generate.ts && openapi-typescript specs/openapi.yaml --output src/definitions/openapi.ts",
"initialise": "node scripts/initialise.js", "initialise": "node scripts/initialise.js",
@ -111,7 +111,6 @@
"xml2js": "0.5.0" "xml2js": "0.5.0"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "7.17.4",
"@babel/preset-env": "7.16.11", "@babel/preset-env": "7.16.11",
"@swc/core": "1.3.71", "@swc/core": "1.3.71",
"@swc/jest": "0.2.27", "@swc/jest": "0.2.27",
@ -128,20 +127,20 @@
"@types/oracledb": "5.2.2", "@types/oracledb": "5.2.2",
"@types/pg": "8.6.6", "@types/pg": "8.6.6",
"@types/server-destroy": "1.0.1", "@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.12", "@types/supertest": "2.0.14",
"@types/tar": "6.1.5", "@types/tar": "6.1.5",
"apidoc": "0.50.4", "apidoc": "0.50.4",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"docker-compose": "0.23.17", "docker-compose": "0.23.17",
"jest": "29.6.2", "jest": "29.7.0",
"jest-openapi": "0.14.2", "jest-openapi": "0.14.2",
"jest-runner": "29.6.2", "jest-runner": "29.7.0",
"jest-serial-runner": "1.2.1", "jest-serial-runner": "1.2.1",
"nodemon": "2.0.15", "nodemon": "2.0.15",
"openapi-typescript": "5.2.0", "openapi-typescript": "5.2.0",
"path-to-regexp": "6.2.0", "path-to-regexp": "6.2.0",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"supertest": "6.2.2", "supertest": "6.3.3",
"swagger-jsdoc": "6.1.0", "swagger-jsdoc": "6.1.0",
"timekeeper": "2.2.0", "timekeeper": "2.2.0",
"ts-node": "10.8.1", "ts-node": "10.8.1",
@ -155,7 +154,7 @@
}, },
"nx": { "nx": {
"targets": { "targets": {
"dev:builder": { "dev": {
"dependsOn": [ "dependsOn": [
{ {
"comment": "Required for pro usage when submodule not loaded", "comment": "Required for pro usage when submodule not loaded",

View File

@ -28,6 +28,7 @@ async function init() {
INTERNAL_API_KEY: "budibase", INTERNAL_API_KEY: "budibase",
ACCOUNT_PORTAL_URL: "http://localhost:10001", ACCOUNT_PORTAL_URL: "http://localhost:10001",
ACCOUNT_PORTAL_API_KEY: "budibase", ACCOUNT_PORTAL_API_KEY: "budibase",
PLATFORM_URL: "http://localhost:10000",
JWT_SECRET: "testsecret", JWT_SECRET: "testsecret",
ENCRYPTION_KEY: "testsecret", ENCRYPTION_KEY: "testsecret",
REDIS_PASSWORD: "budibase", REDIS_PASSWORD: "budibase",

View File

@ -52,6 +52,7 @@ import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
import sdk from "../../sdk" import sdk from "../../sdk"
import { builderSocket } from "../../websockets" import { builderSocket } from "../../websockets"
import { sdk as sharedCoreSDK } from "@budibase/shared-core" import { sdk as sharedCoreSDK } from "@budibase/shared-core"
import * as appMigrations from "../../appMigrations"
// utility function, need to do away with this // utility function, need to do away with this
async function getLayouts() { async function getLayouts() {
@ -336,6 +337,12 @@ async function performAppCreate(ctx: UserCtx) {
await createApp(appId) await createApp(appId)
} }
// Initialise the app migration version as the latest one
await appMigrations.updateAppMigrationMetadata({
appId,
version: appMigrations.latestMigration,
})
await cache.app.invalidateAppMetadata(appId, newApplication) await cache.app.invalidateAppMetadata(appId, newApplication)
return newApplication return newApplication
}) })

View File

@ -4,6 +4,7 @@ import currentApp from "../middleware/currentapp"
import zlib from "zlib" import zlib from "zlib"
import { mainRoutes, staticRoutes, publicRoutes } from "./routes" import { mainRoutes, staticRoutes, publicRoutes } from "./routes"
import { middleware as pro } from "@budibase/pro" import { middleware as pro } from "@budibase/pro"
import migrations from "../middleware/appMigrations"
export { shutdown } from "./routes/public" export { shutdown } from "./routes/public"
const compress = require("koa-compress") const compress = require("koa-compress")
@ -47,6 +48,8 @@ router
// @ts-ignore // @ts-ignore
.use(currentApp) .use(currentApp)
.use(auth.auditLog) .use(auth.auditLog)
// @ts-ignore
.use(migrations)
// authenticated routes // authenticated routes
for (let route of mainRoutes) { for (let route of mainRoutes) {

View File

@ -0,0 +1,90 @@
import { Duration, cache, context, db, env } from "@budibase/backend-core"
import { Database, DocumentType, Document } from "@budibase/types"
export interface AppMigrationDoc extends Document {
version: string
history: Record<string, { runAt: string }>
}
const EXPIRY_SECONDS = Duration.fromDays(1).toSeconds()
async function getFromDB(appId: string) {
return db.doWithDB(
appId,
(db: Database) => {
return db.get<AppMigrationDoc>(DocumentType.APP_MIGRATION_METADATA)
},
{ skip_setup: true }
)
}
const getCacheKey = (appId: string) => `appmigrations_${env.VERSION}_${appId}`
export async function getAppMigrationVersion(appId: string): Promise<string> {
const cacheKey = getCacheKey(appId)
let metadata: AppMigrationDoc | undefined = await cache.get(cacheKey)
// We don't want to cache in dev, in order to be able to tweak it
if (metadata && !env.isDev()) {
return metadata.version
}
let version
try {
metadata = await getFromDB(appId)
version = metadata.version
} catch (err: any) {
if (err.status !== 404) {
throw err
}
version = ""
}
await cache.store(cacheKey, version, EXPIRY_SECONDS)
return version
}
export async function updateAppMigrationMetadata({
appId,
version,
}: {
appId: string
version: string
}): Promise<void> {
const db = context.getAppDB()
let appMigrationDoc: AppMigrationDoc
try {
appMigrationDoc = await getFromDB(appId)
} catch (err: any) {
if (err.status !== 404) {
throw err
}
appMigrationDoc = {
_id: DocumentType.APP_MIGRATION_METADATA,
version: "",
history: {},
}
await db.put(appMigrationDoc)
appMigrationDoc = await getFromDB(appId)
}
const updatedMigrationDoc: AppMigrationDoc = {
...appMigrationDoc,
version: version || "",
history: {
...appMigrationDoc.history,
[version]: { runAt: new Date().toISOString() },
},
}
await db.put(updatedMigrationDoc)
const cacheKey = getCacheKey(appId)
await cache.destroy(cacheKey)
}

View File

@ -0,0 +1,33 @@
import queue from "./queue"
import { getAppMigrationVersion } from "./appMigrationMetadata"
import { MIGRATIONS } from "./migrations"
export * from "./appMigrationMetadata"
export type AppMigration = {
id: string
func: () => Promise<void>
}
export const latestMigration = MIGRATIONS.map(m => m.id)
.sort()
.reverse()[0]
const getTimestamp = (versionId: string) => versionId?.split("_")[0]
export async function checkMissingMigrations(appId: string) {
const currentVersion = await getAppMigrationVersion(appId)
if (getTimestamp(currentVersion) < getTimestamp(latestMigration)) {
await queue.add(
{
appId,
},
{
jobId: `${appId}_${latestMigration}`,
removeOnComplete: true,
removeOnFail: true,
}
)
}
}

View File

@ -0,0 +1,7 @@
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
import { AppMigration } from "."
export const MIGRATIONS: AppMigration[] = [
// Migrations will be executed sorted by id
]

View File

@ -0,0 +1,58 @@
import { context, locks } from "@budibase/backend-core"
import { LockName, LockType } from "@budibase/types"
import {
getAppMigrationVersion,
updateAppMigrationMetadata,
} from "./appMigrationMetadata"
import { AppMigration } from "."
export async function processMigrations(
appId: string,
migrations: AppMigration[]
) {
console.log(`Processing app migration for "${appId}"`)
await locks.doWithLock(
{
name: LockName.APP_MIGRATION,
type: LockType.AUTO_EXTEND,
resource: appId,
},
async () => {
await context.doInAppMigrationContext(appId, async () => {
let currentVersion = await getAppMigrationVersion(appId)
const pendingMigrations = migrations
.filter(m => m.id > currentVersion)
.sort((a, b) => a.id.localeCompare(b.id))
const migrationIds = migrations.map(m => m.id).sort()
let index = 0
for (const { id, func } of pendingMigrations) {
const expectedMigration =
migrationIds[migrationIds.indexOf(currentVersion) + 1]
if (expectedMigration !== id) {
throw `Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected`
}
const counter = `(${++index}/${pendingMigrations.length})`
console.info(`Running migration ${id}... ${counter}`, {
migrationId: id,
appId,
})
await func()
await updateAppMigrationMetadata({
appId,
version: id,
})
currentVersion = id
}
})
}
)
console.log(`App migration for "${appId}" processed`)
}

View File

@ -0,0 +1,15 @@
import { queue } from "@budibase/backend-core"
import { Job } from "bull"
import { MIGRATIONS } from "./migrations"
import { processMigrations } from "./migrationsProcessor"
const appMigrationQueue = queue.createQueue(queue.JobQueue.APP_MIGRATION)
appMigrationQueue.process(processMessage)
async function processMessage(job: Job) {
const { appId } = job.data
await processMigrations(appId, MIGRATIONS)
}
export default appMigrationQueue

View File

@ -0,0 +1,25 @@
import { context } from "@budibase/backend-core"
import * as setup from "../../api/routes/tests/utilities"
import { MIGRATIONS } from "../migrations"
describe("migration", () => {
// These test is checking that each migration is "idempotent".
// We should be able to rerun any migration, with any rerun not modifiying anything. The code should be aware that the migration already ran
it("each migration can rerun safely", async () => {
const config = setup.getConfig()
await config.init()
await config.doInContext(config.getAppId(), async () => {
const db = context.getAppDB()
for (const migration of MIGRATIONS) {
await migration.func()
const docs = await db.allDocs({ include_docs: true })
await migration.func()
const latestDocs = await db.allDocs({ include_docs: true })
expect(docs).toEqual(latestDocs)
}
})
})
})

View File

@ -0,0 +1,50 @@
import * as setup from "../../api/routes/tests/utilities"
import { processMigrations } from "../migrationsProcessor"
import { getAppMigrationVersion } from "../appMigrationMetadata"
import { context } from "@budibase/backend-core"
import { AppMigration } from ".."
describe("migrationsProcessor", () => {
it("running migrations will update the latest applied migration", async () => {
const testMigrations: AppMigration[] = [
{ id: "123", func: async () => {} },
{ id: "124", func: async () => {} },
{ id: "125", func: async () => {} },
]
const config = setup.getConfig()
await config.init()
const appId = config.getAppId()
await config.doInContext(appId, () =>
processMigrations(appId, testMigrations)
)
expect(
await config.doInContext(appId, () => getAppMigrationVersion(appId))
).toBe("125")
})
it("no context can be initialised within a migration", async () => {
const testMigrations: AppMigration[] = [
{
id: "123",
func: async () => {
await context.doInAppMigrationContext("any", () => {})
},
},
]
const config = setup.getConfig()
await config.init()
const appId = config.getAppId()
await expect(
config.doInContext(appId, () => processMigrations(appId, testMigrations))
).rejects.toThrowError(
"The context cannot be changed, a migration is currently running"
)
})
})

View File

@ -88,6 +88,7 @@ const environment = {
}, },
TOP_LEVEL_PATH: TOP_LEVEL_PATH:
process.env.TOP_LEVEL_PATH || process.env.SERVER_TOP_LEVEL_PATH, process.env.TOP_LEVEL_PATH || process.env.SERVER_TOP_LEVEL_PATH,
APP_MIGRATION_TIMEOUT: parseIntSafe(process.env.APP_MIGRATION_TIMEOUT),
} }
// threading can cause memory issues with node-ts in development // threading can cause memory issues with node-ts in development

View File

@ -39,7 +39,7 @@ describe("Google Sheets Integration", () => {
let cleanupEnv: () => void let cleanupEnv: () => void
beforeAll(() => { beforeAll(() => {
cleanupEnv = config.setEnv({ cleanupEnv = config.setCoreEnv({
GOOGLE_CLIENT_ID: "test", GOOGLE_CLIENT_ID: "test",
GOOGLE_CLIENT_SECRET: "test", GOOGLE_CLIENT_SECRET: "test",
}) })

View File

@ -0,0 +1,14 @@
import { UserCtx } from "@budibase/types"
import { checkMissingMigrations } from "../appMigrations"
export default async (ctx: UserCtx, next: any) => {
const { appId } = ctx
if (!appId) {
return next()
}
await checkMissingMigrations(appId)
return next()
}

View File

@ -218,20 +218,45 @@ class TestConfiguration {
*/ */
setEnv(newEnvVars: Partial<typeof env>): () => void { setEnv(newEnvVars: Partial<typeof env>): () => void {
const oldEnv = cloneDeep(env) const oldEnv = cloneDeep(env)
const oldCoreEnv = cloneDeep(coreEnv)
let key: keyof typeof newEnvVars let key: keyof typeof newEnvVars
for (key in newEnvVars) { for (key in newEnvVars) {
env._set(key, newEnvVars[key]) env._set(key, newEnvVars[key])
coreEnv._set(key, newEnvVars[key])
} }
return () => { return () => {
for (const [key, value] of Object.entries(oldEnv)) { for (const [key, value] of Object.entries(oldEnv)) {
env._set(key, value) env._set(key, value)
} }
}
}
for (const [key, value] of Object.entries(oldCoreEnv)) { async withCoreEnv(
newEnvVars: Partial<typeof coreEnv>,
f: () => Promise<void>
) {
let cleanup = this.setCoreEnv(newEnvVars)
try {
await f()
} finally {
cleanup()
}
}
/*
* Sets the environment variables to the given values and returns a function
* that can be called to reset the environment variables to their original values.
*/
setCoreEnv(newEnvVars: Partial<typeof coreEnv>): () => void {
const oldEnv = cloneDeep(env)
let key: keyof typeof newEnvVars
for (key in newEnvVars) {
coreEnv._set(key, newEnvVars[key])
}
return () => {
for (const [key, value] of Object.entries(oldEnv)) {
coreEnv._set(key, value) coreEnv._set(key, value)
} }
} }

View File

@ -10,7 +10,7 @@
"prebuild": "rimraf dist/", "prebuild": "rimraf dist/",
"build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null", "build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"dev:builder": "tsc -p tsconfig.json --watch --preserveWatchOutput", "dev": "tsc -p tsconfig.json --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null" "check:types": "tsc -p tsconfig.json --noEmit --paths null"
}, },
"dependencies": { "dependencies": {
@ -32,7 +32,7 @@
} }
] ]
}, },
"dev:builder": { "dev": {
"dependsOn": [ "dependsOn": [
{ {
"projects": [ "projects": [

View File

@ -1,39 +1,42 @@
# String templating # String templating
This package provides a common system for string templating across the Budibase Builder, client and server. This package provides a common system for string templating across the Budibase Builder, client and server.
The templating is provided through the use of [Handlebars](https://handlebarsjs.com/) an extension of Mustache The templating is provided through the use of [Handlebars](https://handlebarsjs.com/) an extension of Mustache
which is capable of carrying out logic. We have also extended the base Handlebars functionality through the use which is capable of carrying out logic. We have also extended the base Handlebars functionality through the use
of a set of helpers provided through the [handlebars-helpers](https://github.com/budibase/handlebars-helpers) package. of a set of helpers provided through the [handlebars-helpers](https://github.com/budibase/handlebars-helpers) package.
We have not implemented all the helpers provided by the helpers package as some of them provide functionality We have not implemented all the helpers provided by the helpers package as some of them provide functionality
we felt would not be beneficial. The following collections of helpers have been implemented: we felt would not be beneficial. The following collections of helpers have been implemented:
1. [Math](https://github.com/budibase/handlebars-helpers/tree/master#math) - a set of useful helpers for
carrying out logic pertaining to numbers e.g. `avg`, `add`, `abs` and so on. 1. [Math](https://github.com/budibase/handlebars-helpers/tree/master#math) - a set of useful helpers for
carrying out logic pertaining to numbers e.g. `avg`, `add`, `abs` and so on.
2. [Array](https://github.com/budibase/handlebars-helpers/tree/master#array) - some very specific helpers 2. [Array](https://github.com/budibase/handlebars-helpers/tree/master#array) - some very specific helpers
for use with arrays, useful for example in automations. Helpers like `first`, `last`, `after` and `join` for use with arrays, useful for example in automations. Helpers like `first`, `last`, `after` and `join`
can be useful for getting particular portions of arrays or turning them into strings. can be useful for getting particular portions of arrays or turning them into strings.
3. [Number](https://github.com/budibase/handlebars-helpers/tree/master#number) - unlike the math helpers these 3. [Number](https://github.com/budibase/handlebars-helpers/tree/master#number) - unlike the math helpers these
are useful for converting numbers into useful formats for display, e.g. `bytes`, `addCommas` and `toPrecision`. are useful for converting numbers into useful formats for display, e.g. `bytes`, `addCommas` and `toPrecision`.
4. [URL](https://github.com/budibase/handlebars-helpers/tree/master#url) - very specific helpers for dealing with URLs, 4. [URL](https://github.com/budibase/handlebars-helpers/tree/master#url) - very specific helpers for dealing with URLs,
such as `encodeURI`, `escape`, `stripQueryString` and `stripProtocol`. These are primarily useful such as `encodeURI`, `escape`, `stripQueryString` and `stripProtocol`. These are primarily useful
for building up particular URLs to hit as say part of an automation. for building up particular URLs to hit as say part of an automation.
5. [String](https://github.com/budibase/handlebars-helpers/tree/master#string) - these helpers are useful for building 5. [String](https://github.com/budibase/handlebars-helpers/tree/master#string) - these helpers are useful for building
strings and preparing them for display, e.g. `append`, `camelcase`, `capitalize` and `ellipsis`. strings and preparing them for display, e.g. `append`, `camelcase`, `capitalize` and `ellipsis`.
6. [Comparison](https://github.com/budibase/handlebars-helpers/tree/master#comparison) - these helpers are mainly for 6. [Comparison](https://github.com/budibase/handlebars-helpers/tree/master#comparison) - these helpers are mainly for
building strings when particular conditions are met, for example `and`, `or`, `gt`, `lt`, `not` and so on. This is a very building strings when particular conditions are met, for example `and`, `or`, `gt`, `lt`, `not` and so on. This is a very
extensive set of helpers but is mostly as would be expected from a set of logical operators. extensive set of helpers but is mostly as would be expected from a set of logical operators.
7. [Object](https://github.com/budibase/handlebars-helpers/tree/master#object) - useful operator for parsing objects, as well 7. [Object](https://github.com/budibase/handlebars-helpers/tree/master#object) - useful operator for parsing objects, as well
as converting them to JSON strings. as converting them to JSON strings.
8. [Regex](https://github.com/budibase/handlebars-helpers/tree/master#regex) - allows performing regex tests on strings that 8. [Regex](https://github.com/budibase/handlebars-helpers/tree/master#regex) - allows performing regex tests on strings that
can be used in conditional statements. can be used in conditional statements.
9. [Date](https://github.com/helpers/helper-date) - last but certainly not least is a moment based date helper, which can 9. [Date](https://github.com/helpers/helper-date) - last but certainly not least is a moment based date helper, which can
format ISO/timestamp based dates into something human-readable. An example of this would be `{{date dateProperty "DD-MM-YYYY"}}`. format ISO/timestamp based dates into something human-readable. An example of this would be `{{date dateProperty "DD-MM-YYYY"}}`.
## Date formatting ## Date formatting
This package uses the standard method for formatting date times, using the following syntax: This package uses the standard method for formatting date times, using the following syntax:
| Input | Example | Description | | Input | Example | Description |
| ----- | ------- | ----------- | | ----- | ------- | ----------- |
| YYYY | 2014 | 4 or 2 digit year. Note: Only 4 digit can be parsed on strict mode | | YYYY | 2014 | 4 or 2 digit year. Note: Only 4 digit can be parsed on strict mode |
| YY | 14 | 2 digit year | | YY | 14 | 2 digit year |
| Y | -25 | Year with any number of digits and sign | | Y | -25 | Year with any number of digits and sign |
| Q | 1..4 | Quarter of year. Sets month to first month in quarter. | | Q | 1..4 | Quarter of year. Sets month to first month in quarter. |
| M MM | 1..12 | Month number | | M MM | 1..12 | Month number |
@ -42,55 +45,64 @@ This package uses the standard method for formatting date times, using the follo
| Do | 1st..31st | Day of month with ordinal | | Do | 1st..31st | Day of month with ordinal |
| DDD DDDD | 1..365 | Day of year | | DDD DDDD | 1..365 | Day of year |
| X | 1410715640.579 | Unix timestamp | | X | 1410715640.579 | Unix timestamp |
| x | 1410715640579 | Unix ms timestamp | | x | 1410715640579 | Unix ms timestamp |
## Template format ## Template format
There are two main ways that the templating system can be used, the first is very similar to that which There are two main ways that the templating system can be used, the first is very similar to that which
would be produced by Mustache - a single statement: would be produced by Mustache - a single statement:
``` ```
Hello I'm building a {{uppercase adjective}} string with Handlebars! Hello I'm building a {{uppercase adjective}} string with Handlebars!
``` ```
In the statement above provided a context of `{adjective: "cool"}` will produce a string of `Hello I'm building a COOL string with Handlebars!`. In the statement above provided a context of `{adjective: "cool"}` will produce a string of `Hello I'm building a COOL string with Handlebars!`.
Here we can see an example of how string helpers can be used to make a string exactly as we need it. These statements are relatively Here we can see an example of how string helpers can be used to make a string exactly as we need it. These statements are relatively
simple; we can also stack helpers as such: `{{ uppercase (remove string "bad") }}` with the use of parenthesis. simple; we can also stack helpers as such: `{{ uppercase (remove string "bad") }}` with the use of parenthesis.
The other type of statement that can be made with the templating system is conditional ones, that appear as such: The other type of statement that can be made with the templating system is conditional ones, that appear as such:
``` ```
Hello I'm building a {{ #gte score "50" }}Great{{ else }}Bad{{ /gte }} string with Handlebars! Hello I'm building a {{ #gte score "50" }}Great{{ else }}Bad{{ /gte }} string with Handlebars!
``` ```
In this string we can see that the string `Great` or `Bad` will be inserted depending on the state of the
In this string we can see that the string `Great` or `Bad` will be inserted depending on the state of the
`score` context variable. The comparison, string and array helpers all provide some conditional operators which can be used `score` context variable. The comparison, string and array helpers all provide some conditional operators which can be used
in this way. There will also be some operators which will be built with a very similar syntax but will produce an in this way. There will also be some operators which will be built with a very similar syntax but will produce an
iterative operation, like a for each - an example of this would be the `forEach` array helper. iterative operation, like a for each - an example of this would be the `forEach` array helper.
## Usage ## Usage
Usage of this templating package is through one of the primary functions provided by the package - these functions are
Usage of this templating package is through one of the primary functions provided by the package - these functions are
as follows: as follows:
1. `processString` - `async (string, object)` - given a template string and a context object this will build a string
using our pre-processors, post-processors and handlebars. 1. `processString` - `async (string, object)` - given a template string and a context object this will build a string
2. `processObject` - `async (object, object)` - carries out the functionality provided by `processString` for any string using our pre-processors, post-processors and handlebars.
inside the given object. This will recurse deeply into the provided object so for very large objects this could be slow. 2. `processObject` - `async (object, object)` - carries out the functionality provided by `processString` for any string
3. `processStringSync` - `(string, object)` - a reduced functionality processing of strings which is synchronous, like inside the given object. This will recurse deeply into the provided object so for very large objects this could be slow.
functions provided by Node (e.g. `readdirSync`) 3. `processStringSync` - `(string, object)` - a reduced functionality processing of strings which is synchronous, like
functions provided by Node (e.g. `readdirSync`)
4. `processObjectSync` - `(object, object)` - as with the sync'd string, recurses an object to process it synchronously. 4. `processObjectSync` - `(object, object)` - as with the sync'd string, recurses an object to process it synchronously.
5. `makePropSafe` - `(string)` - some properties cannot be handled by Handlebars, for example `Table 1` is not valid due 5. `makePropSafe` - `(string)` - some properties cannot be handled by Handlebars, for example `Table 1` is not valid due
to spaces found in the property name. This will update the property name to `[Table 1]` wrapping it in literal to spaces found in the property name. This will update the property name to `[Table 1]` wrapping it in literal
specifiers so that it is safe for use in Handlebars. Ideally this function should be called for every level of an object specifiers so that it is safe for use in Handlebars. Ideally this function should be called for every level of an object
being accessed, for example `[Table 1].[property name]` is the syntax that is required for Handlebars. being accessed, for example `[Table 1].[property name]` is the syntax that is required for Handlebars.
6. `isValid` - `(string)` - checks the given string for any templates and provides a boolean stating whether it is a valid 6. `isValid` - `(string)` - checks the given string for any templates and provides a boolean stating whether it is a valid
template. template.
7. `getManifest` - returns the manifest JSON which has been generated for the helpers, describing them and their params. 7. `getManifest` - returns the manifest JSON which has been generated for the helpers, describing them and their params.
## Development ## Development
This library is built with [Rollup](https://rollupjs.org/guide/en/) as many of the packages built by Budibase are. We have This library is built with [Rollup](https://rollupjs.org/guide/en/) as many of the packages built by Budibase are. We have
built the string templating package as a UMD so that it can be used by Node and Browser based applications. This package also built the string templating package as a UMD so that it can be used by Node and Browser based applications. This package also
builds Typescript stubs which when making use of the library will be used by your IDE to provide code completion. The following builds Typescript stubs which when making use of the library will be used by your IDE to provide code completion. The following
commands are provided for development purposes: commands are provided for development purposes:
1. `yarn build` - will build the Typescript stubs and the bundle into the `dist` directory. 1. `yarn build` - will build the Typescript stubs and the bundle into the `dist` directory.
2. `yarn test` - runs the test suite which will check various helpers are still functioning as 2. `yarn test` - runs the test suite which will check various helpers are still functioning as
expected and a few expected use cases. expected and a few expected use cases.
3. `yarn dev:builder` - an internal command which is used by lerna to watch and build any changes 3. `yarn dev` - an internal command which is used by lerna to watch and build any changes
to the package as part of the main `yarn dev` of the repo. to the package as part of the main `yarn dev` of the repo.
It is also important to note this package is managed in the same manner as all other in the mono-repo, It is also important to note this package is managed in the same manner as all other in the mono-repo,
through lerna. through lerna.

View File

@ -20,7 +20,7 @@
], ],
"scripts": { "scripts": {
"build": "tsc && rollup -c", "build": "tsc && rollup -c",
"dev:builder": "tsc && rollup -cw", "dev": "tsc && rollup -cw",
"test": "jest", "test": "jest",
"manifest": "node ./scripts/gen-collection-info.js" "manifest": "node ./scripts/gen-collection-info.js"
}, },
@ -35,7 +35,7 @@
"@rollup/plugin-commonjs": "^17.1.0", "@rollup/plugin-commonjs": "^17.1.0",
"@rollup/plugin-json": "^4.1.0", "@rollup/plugin-json": "^4.1.0",
"doctrine": "^3.0.0", "doctrine": "^3.0.0",
"jest": "29.6.2", "jest": "29.7.0",
"marked": "^4.0.10", "marked": "^4.0.10",
"rollup": "^2.36.2", "rollup": "^2.36.2",
"rollup-plugin-inject-process-env": "^1.3.1", "rollup-plugin-inject-process-env": "^1.3.1",

View File

@ -10,12 +10,12 @@
"prebuild": "rimraf dist/", "prebuild": "rimraf dist/",
"build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly", "build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"dev:builder": "tsc -p tsconfig.json --watch --preserveWatchOutput", "dev": "tsc -p tsconfig.json --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null" "check:types": "tsc -p tsconfig.json --noEmit --paths null"
}, },
"jest": {}, "jest": {},
"devDependencies": { "devDependencies": {
"@budibase/nano": "10.1.3", "@budibase/nano": "10.1.4",
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/node": "18.17.0", "@types/node": "18.17.0",
"@types/pouchdb": "6.4.0", "@types/pouchdb": "6.4.0",

View File

@ -37,6 +37,7 @@ export enum DocumentType {
USER_FLAG = "flag", USER_FLAG = "flag",
AUTOMATION_METADATA = "meta_au", AUTOMATION_METADATA = "meta_au",
AUDIT_LOG = "al", AUDIT_LOG = "al",
APP_MIGRATION_METADATA = "_design/migrations",
} }
// these are the core documents that make up the data, design // these are the core documents that make up the data, design

View File

@ -1,4 +1,4 @@
import Nano from "@budibase/nano" import type Nano from "@budibase/nano"
import { AllDocsResponse, AnyDocument, Document, ViewTemplateOpts } from "../" import { AllDocsResponse, AnyDocument, Document, ViewTemplateOpts } from "../"
import { Writable } from "stream" import { Writable } from "stream"

View File

@ -20,6 +20,7 @@ export enum LockName {
UPDATE_TENANTS_DOC = "update_tenants_doc", UPDATE_TENANTS_DOC = "update_tenants_doc",
PERSIST_WRITETHROUGH = "persist_writethrough", PERSIST_WRITETHROUGH = "persist_writethrough",
QUOTA_USAGE_EVENT = "quota_usage_event", QUOTA_USAGE_EVENT = "quota_usage_event",
APP_MIGRATION = "app_migrations",
} }
export type LockOptions = { export type LockOptions = {

View File

@ -21,7 +21,7 @@
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:init": "node ./scripts/dev/manage.js init", "dev:stack:init": "node ./scripts/dev/manage.js init",
"dev:builder": "npm run dev:stack:init && nodemon", "dev": "npm run dev:stack:init && nodemon",
"dev:built": "yarn run dev:stack:init && yarn run run:docker", "dev:built": "yarn run dev:stack:init && yarn run run:docker",
"test": "bash scripts/test.sh", "test": "bash scripts/test.sh",
"test:watch": "jest --watch", "test:watch": "jest --watch",
@ -75,26 +75,26 @@
"@swc/jest": "0.2.27", "@swc/jest": "0.2.27",
"@trendyol/jest-testcontainers": "2.1.1", "@trendyol/jest-testcontainers": "2.1.1",
"@types/jest": "29.5.5", "@types/jest": "29.5.5",
"@types/jsonwebtoken": "8.5.1", "@types/jsonwebtoken": "9.0.3",
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/koa__router": "8.0.8", "@types/koa__router": "8.0.8",
"@types/lodash": "4.14.200", "@types/lodash": "4.14.200",
"@types/node": "18.17.0", "@types/node": "18.17.0",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/server-destroy": "1.0.1", "@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.12", "@types/supertest": "2.0.14",
"@types/uuid": "8.3.4", "@types/uuid": "8.3.4",
"jest": "29.6.2", "jest": "29.7.0",
"nodemon": "2.0.15", "nodemon": "2.0.15",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"supertest": "6.2.2", "supertest": "6.3.3",
"timekeeper": "2.2.0", "timekeeper": "2.2.0",
"typescript": "5.2.2", "typescript": "5.2.2",
"update-dotenv": "1.1.1" "update-dotenv": "1.1.1"
}, },
"nx": { "nx": {
"targets": { "targets": {
"dev:builder": { "dev": {
"dependsOn": [ "dependsOn": [
{ {
"comment": "Required for pro usage when submodule not loaded", "comment": "Required for pro usage when submodule not loaded",

View File

@ -26,6 +26,7 @@ import {
migrations, migrations,
platform, platform,
tenancy, tenancy,
db,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { checkAnyUserExists } from "../../../utilities/users" import { checkAnyUserExists } from "../../../utilities/users"
import { isEmailConfigured } from "../../../utilities/email" import { isEmailConfigured } from "../../../utilities/email"
@ -185,9 +186,27 @@ export const getAppUsers = async (ctx: Ctx<SearchUsersRequest>) => {
export const search = async (ctx: Ctx<SearchUsersRequest>) => { export const search = async (ctx: Ctx<SearchUsersRequest>) => {
const body = ctx.request.body const body = ctx.request.body
// TODO: for now only one supported search key, string.email // TODO: for now only two supported search keys; string.email and equal._id
if (body?.query && !userSdk.core.isSupportedUserSearch(body.query)) { if (body?.query) {
ctx.throw(501, "Can only search by string.email or equal._id") // Clean numeric prefixing. This will overwrite duplicate search fields,
// but this is fine because we only support a single custom search on
// email and id
for (let filters of Object.values(body.query)) {
if (filters && typeof filters === "object") {
for (let [field, value] of Object.entries(filters)) {
delete filters[field]
const cleanedField = db.removeKeyNumbering(field)
if (filters[cleanedField] !== undefined) {
ctx.throw(400, "Only 1 filter per field is supported")
}
filters[cleanedField] = value
}
}
}
// Validate we aren't trying to search on any illegal fields
if (!userSdk.core.isSupportedUserSearch(body.query)) {
ctx.throw(400, "Can only search by string.email or equal._id")
}
} }
if (body.paginate === false) { if (body.paginate === false) {

View File

@ -590,6 +590,15 @@ describe("/api/global/users", () => {
expect(response.body.data[0].email).toBe(user.email) expect(response.body.data[0].email).toBe(user.email)
}) })
it("should be able to search by email with numeric prefixing", async () => {
const user = await config.createUser()
const response = await config.api.users.searchUsers({
query: { string: { ["999:email"]: user.email } },
})
expect(response.body.data.length).toBe(1)
expect(response.body.data[0].email).toBe(user.email)
})
it("should be able to search by _id", async () => { it("should be able to search by _id", async () => {
const user = await config.createUser() const user = await config.createUser()
const response = await config.api.users.searchUsers({ const response = await config.api.users.searchUsers({
@ -599,13 +608,52 @@ describe("/api/global/users", () => {
expect(response.body.data[0]._id).toBe(user._id) expect(response.body.data[0]._id).toBe(user._id)
}) })
it("should be able to search by _id with numeric prefixing", async () => {
const user = await config.createUser()
const response = await config.api.users.searchUsers({
query: { equal: { ["1:_id"]: user._id } },
})
expect(response.body.data.length).toBe(1)
expect(response.body.data[0]._id).toBe(user._id)
})
it("should throw an error when using multiple filters on the same field", async () => {
const user = await config.createUser()
await config.api.users.searchUsers(
{
query: {
string: {
["1:email"]: user.email,
["2:email"]: "something else",
},
},
},
{ status: 400 }
)
})
it("should throw an error when using multiple filters on the same field without prefixes", async () => {
const user = await config.createUser()
await config.api.users.searchUsers(
{
query: {
string: {
["_id"]: user.email,
["999:_id"]: "something else",
},
},
},
{ status: 400 }
)
})
it("should throw an error when unimplemented options used", async () => { it("should throw an error when unimplemented options used", async () => {
const user = await config.createUser() const user = await config.createUser()
await config.api.users.searchUsers( await config.api.users.searchUsers(
{ {
query: { equal: { firstName: user.firstName } }, query: { equal: { firstName: user.firstName } },
}, },
{ status: 501 } { status: 400 }
) )
}) })

View File

@ -29,7 +29,7 @@
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"chance": "1.1.8", "chance": "1.1.8",
"dotenv": "16.0.1", "dotenv": "16.0.1",
"jest": "29.6.2", "jest": "29.7.0",
"prettier": "2.7.1", "prettier": "2.7.1",
"start-server-and-test": "1.14.0", "start-server-and-test": "1.14.0",
"@swc/core": "1.3.71", "@swc/core": "1.3.71",

View File

@ -0,0 +1,80 @@
const fs = require("fs")
const path = require("path")
const argv = require("yargs").demandOption(
["title"],
"Please provide the required parameter: --title=[title]"
).argv
const { title } = argv
const generateTimestamp = () => {
const now = new Date()
const year = now.getFullYear()
const month = String(now.getMonth() + 1).padStart(2, "0")
const day = String(now.getDate()).padStart(2, "0")
const hours = String(now.getHours()).padStart(2, "0")
const minutes = String(now.getMinutes()).padStart(2, "0")
const seconds = String(now.getSeconds()).padStart(2, "0")
return `${year}${month}${day}${hours}${minutes}${seconds}`
}
const createMigrationFile = () => {
const migrationFilename = `${generateTimestamp()}_${title}`
const migrationsDir = "../packages/server/src/appMigrations"
const template = `const migration = async () => {
// Add your migration logic here
}
export default migration
`
const newMigrationPath = path.join(
migrationsDir,
"migrations",
`${migrationFilename}.ts`
)
fs.writeFileSync(path.resolve(__dirname, newMigrationPath), template)
console.log(`New migration created: ${newMigrationPath}`)
// Append the new migration to the main migrations file
const migrationsFilePath = path.join(migrationsDir, "migrations.ts")
const migrationDir = fs.readdirSync(
path.join(__dirname, migrationsDir, "migrations")
)
const migrations = migrationDir
.filter(m => m.endsWith(".ts"))
.map(m => m.substring(0, m.length - 3))
let migrationFileContent =
'// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one\n\nimport { AppMigration } from "."\n\n'
for (const migration of migrations) {
migrationFileContent += `import m${migration} from "./migrations/${migration}"\n`
}
migrationFileContent += `\nexport const MIGRATIONS: AppMigration[] = [
// Migrations will be executed sorted by id\n`
for (const migration of migrations) {
migrationFileContent += ` {
id: "${migration}",
func: m${migration}
},\n`
}
migrationFileContent += `]\n`
fs.writeFileSync(
path.resolve(__dirname, migrationsFilePath),
migrationFileContent
)
console.log(`Main migrations file updated: ${migrationsFilePath}`)
}
createMigrationFile()

View File

@ -13,7 +13,7 @@ const {
} = require("@esbuild-plugins/tsconfig-paths") } = require("@esbuild-plugins/tsconfig-paths")
const { nodeExternalsPlugin } = require("esbuild-node-externals") const { nodeExternalsPlugin } = require("esbuild-node-externals")
var argv = require("minimist")(process.argv.slice(2)) var { argv } = require("yargs")
function runBuild(entry, outfile) { function runBuild(entry, outfile) {
const isDev = process.env.NODE_ENV !== "production" const isDev = process.env.NODE_ENV !== "production"
@ -23,7 +23,7 @@ function runBuild(entry, outfile) {
) )
if ( if (
!fs.existsSync("../pro/src") && !fs.existsSync(path.join(__dirname, "../packages/pro/src")) &&
tsconfigPathPluginContent.compilerOptions?.paths tsconfigPathPluginContent.compilerOptions?.paths
) { ) {
// If we don't have pro, we cannot bundle backend-core. // If we don't have pro, we cannot bundle backend-core.
@ -51,7 +51,15 @@ function runBuild(entry, outfile) {
".svelte": "copy", ".svelte": "copy",
}, },
metafile: true, metafile: true,
external: ["deasync", "mock-aws-s3", "nock", "bull"], external: [
"deasync",
"mock-aws-s3",
"nock",
"bull",
"pouchdb",
"bcrypt",
"bcryptjs",
],
} }
build({ build({

View File

@ -39,7 +39,7 @@ getDistro
if [[ $OS == "Darwin" ]]; if [[ $OS == "Darwin" ]];
then then
echo "This script is not setup for your machine type:" $OS echo "This script is not setup for your machine type:" $OS
echo "Please use the manual steps described in https://github.com/Budibase/budibase/blob/develop/docs/CONTRIBUTING.md#getting-started-for-contributors" echo "Please use the manual steps described in https://github.com/Budibase/budibase/blob/master/docs/CONTRIBUTING.md#getting-started-for-contributors"
exit 1 exit 1
fi fi

View File

@ -5,12 +5,12 @@ domain=$2
if [ "$enable" = "enable" ]; then if [ "$enable" = "enable" ]; then
lerna run env:localdomain:enable -- "$domain" lerna run env:localdomain:enable -- "$domain"
cd ../account-portal cd packages/account-portal
yarn env:localdomain:enable "$domain" yarn env:localdomain:enable "$domain"
cd - cd -
else else
lerna run env:localdomain:disable lerna run env:localdomain:disable
cd ../account-portal cd packages/account-portal
yarn env:localdomain:disable yarn env:localdomain:disable
cd - cd -
fi fi

2728
yarn.lock

File diff suppressed because it is too large Load Diff