diff --git a/.eslintignore b/.eslintignore index 0d81de0ef9..021fe8e367 100644 --- a/.eslintignore +++ b/.eslintignore @@ -7,7 +7,4 @@ packages/worker/coverage packages/backend-core/coverage packages/server/client packages/builder/.routify -packages/builder/cypress/support/queryLevelTransformerFunction.js -packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js -packages/builder/cypress/reports packages/sdk/sdk diff --git a/.github/workflows/README.md b/.github/workflows/README.md index f77323d85a..9b75a2e73a 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -1,4 +1,3 @@ - # Budibase CI Pipelines Welcome to the budibase CI pipelines directory. This document details what each of the CI pipelines are for, and come common combinations. @@ -6,27 +5,34 @@ Welcome to the budibase CI pipelines directory. This document details what each ## All CI Pipelines ### Note -- When running workflow dispatch jobs, ensure you always run them off the `master` branch. It defaults to `develop`, so double check before running any jobs. The exception to this case is the `deploy-release` job which requires the develop branch. + +- When running workflow dispatch jobs, ensure you always run them off the `master` branch. It defaults to `develop`, so double check before running any jobs. The exception to this case is the `deploy-release` job which requires the develop branch. ### Standard CI Build Job (budibase_ci.yml) + Triggers: + - PR or push to develop - PR or push to master -The standard CI Build job is what runs when you raise a PR to develop or master. +The standard CI Build job is what runs when you raise a PR to develop or master. + - Installs all dependencies, -- builds the project +- builds the project - run the unit tests - Generate test coverage metrics with codecov -- Run the cypress tests +- Run the integration tests ### Release Develop Job (release-develop.yml) + Triggers: + - Push to develop -The job responsible for building, tagging and pushing docker images out to the test and release environments. +The job responsible for building, tagging and pushing docker images out to the test and release environments. + - Installs all dependencies -- builds the project +- builds the project - run the unit tests - publish the budibase JS packages under a prerelease tag to NPM - build, tag and push docker images under the `develop` tag to docker hub @@ -34,23 +40,29 @@ The job responsible for building, tagging and pushing docker images out to the t These images will then be pulled by the test and release environments, updating the latest automatically. Discord notifications are sent to the #infra channel when this occurs. ### Release Job (release.yml) + Triggers: + - Push to master This job is responsible for building and pushing the latest code to NPM and docker hub, so that it can be deployed. + - Installs all dependencies -- builds the project +- builds the project - run the unit tests - publish the budibase JS packages under a release tag to NPM (always incremented by patch versions) - build, tag and push docker images under the `v.x.x.x` (the tag of the NPM release) tag to docker hub ### Release Selfhost Job (release-selfhost.yml) + Triggers: + - Manual Workflow Dispatch Trigger -This job is responsible for delivering the latest version of budibase to those that are self-hosting. +This job is responsible for delivering the latest version of budibase to those that are self-hosting. This job relies on the release job to have run first, so the latest image is pushed to dockerhub. This job then will pull the latest version from `lerna.json` and try to find an image in dockerhub corresponding to that version. For example, if the version in `lerna.json` is `1.0.0`: + - Pull the images for all budibase services tagged `v1.0.0` from dockerhub - Tag these images as `latest` - Push them back to dockerhub. This now means anyone who pulls `latest` (self hosters using docker-compose) will get the latest version. @@ -58,53 +70,61 @@ This job relies on the release job to have run first, so the latest image is pus - Perform a github release with the latest version. You can see previous releases here (https://github.com/Budibase/budibase/releases) ### Deploy Release (deploy-release.yml) + Triggers: + - Manual Workflow Dispatch Trigger This job is responsible for deploying to our release, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur: -- Checks out the release branch +- Checks out the release branch - Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration - Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off -- Configures AWS Credentials +- Configures AWS Credentials - Deploys the helm chart in the budibase repo to our preproduction EKS cluster, injecting the `values.yaml` we pulled from budibase-infra - Fires off a discord webhook in the #infra channel to show that the deployment completely successfully. ### Deploy Preprod (deploy-preprod.yml) + Triggers: + - Manual Workflow Dispatch Trigger This job is responsible for deploying to our preprod, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. After kicking off this job, the following will occur: -- Checks out the master branch +- Checks out the master branch - Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration - Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off -- Configures AWS Credentials +- Configures AWS Credentials - Deploys the helm chart in the budibase repo to our preprod EKS cluster, injecting the `values.yaml` we pulled from budibase-infra - Fires off a discord webhook in the #infra channel to show that the deployment completely successfully. ### Deploy Production (deploy-cloud.yml) + Triggers: + - Manual Workflow Dispatch Trigger This job is responsible for deploying to our production, cloud kubernetes environment. You must run the release job first, to ensure that the latest images have been built and pushed to docker hub. You can also manually enter a version number for this job, so you can perform rollbacks or upgrade to a specific version. After kicking off this job, the following will occur: -- Checks out the master branch +- Checks out the master branch - Pulls the latest `values.yaml` from budibase infra, a private repo containing budibases infrastructure configuration - Gets the latest budibase version from `lerna.json`, if it hasn't been specified in the workflow when you kicked it off -- Configures AWS Credentials +- Configures AWS Credentials - Deploys the helm chart in the budibase repo to our production EKS cluster, injecting the `values.yaml` we pulled from budibase-infra - Fires off a discord webhook in the #infra channel to show that the deployment completely successfully. ## Common Workflows ### Deploy Changes to Production (Release) + - Merge `develop` into `master` - Wait for budibase CI job and release job to run - Run cloud deploy job - Run release selfhost job ### Deploy Changes to Production (Hotfix) + - Branch off `master` - Perform your hotfix - Merge back into `master` @@ -113,79 +133,7 @@ This job is responsible for deploying to our production, cloud kubernetes enviro - Run release selfhost job ### Rollback A Bad Cloud Deployment + - Kick off cloud deploy job - Ensure you are running off master - Enter the version number of the last known good version of budibase. For example `1.0.0` - -## Pro - -| **NOTE**: When developing for both pro / budibase repositories, your branch names need to match, or else the correct pro doesn't get run within your CI job. - -### Installing Pro - -The pro package is always installed from source in our CI jobs. - -This is done to prevent pro needing to be published prior to CI runs in budiabse. This is required for two reasons: -- To reduce developer need to manually bump versions, i.e: - - release pro, bump pro dep in budibase, now ci can run successfully -- The cyclic dependency on backend-core, i.e: - - pro depends on backend-core - - server depends on pro - - backend-core lives in the monorepo, so it can't be released independently to be used in pro - - therefore the only option is to pull pro from source and release it as a part of the monorepo release, as if it were a mono package - -The install is performed using the same steps as local development, via the `yarn bootstrap` command, see the [Contributing Guide#Pro](../../docs/CONTRIBUTING.md#pro) - -The branch to install pro from can vary depending on ref of the commit that triggered the budibase CI job. This is done to enable branches which have changes in both the monorepo and the pro repo to have their CI pass successfully. - -This is done using the [pro/install.sh](../../scripts/pro/install.sh) script. The script will: -- Clone pro to it's default branch (`develop`) -- Check if the clone worked, on forked versions of budibase this will fail due to no access - - This is fine as the `yarn` command will install the version from NPM - - Community PRs should never touch pro so this will always work -- Checkout the `BRANCH` argument, if this fails fallback to `BASE_BRANCH` - - This enables the more complex case of a feature branch being merged to another feature branch, e.g. - - I am working on a branch `epic/stonks` which exists on budibase and pro. - - I want to merge a change to this branch in budibase from `feature/stonks-ui`, which only exists in budibase - - The base branch ensures that `epic/stonks` in pro will still be checked out for the CI run, rather than falling back to `develop` -- Run `yarn setup` to build and install dependencies - - `yarn` - - `yarn bootstrap` - - `yarn build` - - The will build .ts files, and also update the `main` and `types` of `package.json` to point to `dist` rather than src - - The build command will only ever work in CI, it is prevented in local dev - -#### `BRANCH` and `BASE_BRANCH` arguments -These arguments are supplied by the various budibase build and release pipelines -- `budibase_ci` - - `BRANCH: ${{ github.event.pull_request.head.ref }}` -> The branch being merged - - `BASE_BRANCH: ${{ github.event.pull_request.base.ref}}` -> The base branch -- `release-develop` - - `BRANCH: develop` -> always use the `develop` branch in pro -- `release` - - `BRANCH: master` -> always use the `master` branch in pro - - -### Releasing Pro -After budibase dependencies have been released we will release the new version of pro to match the release version of budibase dependencies. This is to ensure that we are always keeping the version of `backend-core` in sync in the pro package and in budibase packages. Without this we could run into scenarios where different versions are being used when installed via `yarn` inside the docker images, creating very difficult to debug cases. - -Pro is released using the [pro/release.sh](../../scripts/pro/release.sh) script. The script will: -- Inspect the `VERSION` from the `lerna.json` file in budibase -- Determine whether to use the `latest` or `develop` tag based on the command argument -- Go to pro directory - - install npm creds - - update the version of `backend-core` to be `VERSION`, the version just released by lerna - - publish to npm. Uses a `lerna publish` command, pro itself is a mono repo. - - force the version to be the same as `VERSION` to keep pro and budibase in sync - - reverts the changes to `main` and `types` in `package.json` that were made by the build step, to point back to source - - commit & push: `Prep next development iteration` -- Go to budibase - - Update to the new version of pro in `server` and `worker` so the latest pro version is used in the docker builds - - commit & push: `Update pro version to $VERSION` - - -#### `COMMAND` argument -This argument is supplied by the existing `release` and `release:develop` budibase commands, which invoke the pro release -- `release` will supply no command and default to use `latest` -- `release:develop` will supply `develop` - diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 9509a22e99..9d1131ed7f 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -18,27 +18,36 @@ env: BRANCH: ${{ github.event.pull_request.head.ref }} BASE_BRANCH: ${{ github.event.pull_request.base.ref}} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} + NX_BASE_BRANCH: origin/${{ github.base_ref }} + USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}} jobs: lint: runs-on: ubuntu-latest steps: + - name: Maximize build space + uses: easimon/maximize-build-space@master + with: + root-reserve-mb: 35000 + swap-size-mb: 1024 + remove-android: "true" + remove-dotnet: "true" - name: Checkout repo and submodules uses: actions/checkout@v3 - if: github.repository == github.event.pull_request.head.repo.full_name + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' with: submodules: true token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} - name: Checkout repo only uses: actions/checkout@v3 - if: github.repository != github.event.pull_request.head.repo.full_name + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' - - name: Use Node.js 14.x + - name: Use Node.js 18.x uses: actions/setup-node@v3 with: - node-version: 14.x + node-version: 18.x cache: "yarn" - - run: yarn + - run: yarn --frozen-lockfile - run: yarn lint build: @@ -46,71 +55,138 @@ jobs: steps: - name: Checkout repo and submodules uses: actions/checkout@v3 - if: github.repository == github.event.pull_request.head.repo.full_name + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' with: submodules: true token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} + fetch-depth: 0 - name: Checkout repo only uses: actions/checkout@v3 - if: github.repository != github.event.pull_request.head.repo.full_name + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' + with: + fetch-depth: 0 - - name: Use Node.js 14.x + - name: Use Node.js 18.x uses: actions/setup-node@v3 with: - node-version: 14.x + node-version: 18.x cache: "yarn" - - run: yarn + - run: yarn --frozen-lockfile + # Run build all the projects - - run: yarn build + - name: Build + run: | + yarn build # Check the types of the projects built via esbuild - - run: yarn check:types + - name: Check types + run: | + if ${{ env.USE_NX_AFFECTED }}; then + yarn check:types --since=${{ env.NX_BASE_BRANCH }} + else + yarn check:types + fi test-libraries: runs-on: ubuntu-latest steps: - name: Checkout repo and submodules uses: actions/checkout@v3 - if: github.repository == github.event.pull_request.head.repo.full_name + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' with: submodules: true token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} + fetch-depth: 0 - name: Checkout repo only uses: actions/checkout@v3 - if: github.repository != github.event.pull_request.head.repo.full_name + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' + with: + fetch-depth: 0 - - name: Use Node.js 14.x + - name: Use Node.js 18.x uses: actions/setup-node@v3 with: - node-version: 14.x + node-version: 18.x cache: "yarn" - - run: yarn - - run: yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro + - run: yarn --frozen-lockfile + - name: Test + run: | + if ${{ env.USE_NX_AFFECTED }}; then + yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro --since=${{ env.NX_BASE_BRANCH }} + else + yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro + fi - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos name: codecov-umbrella verbose: true - test-services: + test-worker: runs-on: ubuntu-latest steps: - name: Checkout repo and submodules uses: actions/checkout@v3 - if: github.repository == github.event.pull_request.head.repo.full_name + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' with: submodules: true token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} + fetch-depth: 0 - name: Checkout repo only uses: actions/checkout@v3 - if: github.repository != github.event.pull_request.head.repo.full_name + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' + with: + fetch-depth: 0 - - name: Use Node.js 14.x + - name: Use Node.js 18.x uses: actions/setup-node@v3 with: - node-version: 14.x + node-version: 18.x cache: "yarn" - - run: yarn - - run: yarn test --scope=@budibase/worker --scope=@budibase/server + - run: yarn --frozen-lockfile + - name: Test worker + run: | + if ${{ env.USE_NX_AFFECTED }}; then + yarn test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }} + else + yarn test --scope=@budibase/worker + fi + + - uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos + name: codecov-umbrella + verbose: true + + test-server: + runs-on: ubuntu-latest + steps: + - name: Checkout repo and submodules + uses: actions/checkout@v3 + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' + with: + submodules: true + token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} + fetch-depth: 0 + - name: Checkout repo only + uses: actions/checkout@v3 + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' + with: + fetch-depth: 0 + + - name: Use Node.js 18.x + uses: actions/setup-node@v3 + with: + node-version: 18.x + cache: "yarn" + - run: yarn --frozen-lockfile + - name: Test server + run: | + if ${{ env.USE_NX_AFFECTED }}; then + yarn test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }} + else + yarn test --scope=@budibase/server + fi + - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos @@ -119,42 +195,50 @@ jobs: test-pro: runs-on: ubuntu-latest - if: github.repository == github.event.pull_request.head.repo.full_name + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' steps: - name: Checkout repo and submodules uses: actions/checkout@v3 with: submodules: true token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} + fetch-depth: 0 - - name: Use Node.js 14.x + - name: Use Node.js 18.x uses: actions/setup-node@v3 with: - node-version: 14.x + node-version: 18.x cache: "yarn" - - run: yarn - - run: yarn test --scope=@budibase/pro + - run: yarn --frozen-lockfile + - name: Test + run: | + if ${{ env.USE_NX_AFFECTED }}; then + yarn test --scope=@budibase/pro --since=${{ env.NX_BASE_BRANCH }} + else + yarn test --scope=@budibase/pro + fi integration-test: runs-on: ubuntu-latest steps: - name: Checkout repo and submodules uses: actions/checkout@v3 - if: github.repository == github.event.pull_request.head.repo.full_name + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' with: submodules: true token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} - name: Checkout repo only uses: actions/checkout@v3 - if: github.repository != github.event.pull_request.head.repo.full_name + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' - - name: Use Node.js 14.x + - name: Use Node.js 18.x uses: actions/setup-node@v3 with: - node-version: 14.x + node-version: 18.x cache: "yarn" - - run: yarn - - run: yarn build + - run: yarn --frozen-lockfile + - name: Build packages + run: yarn build --scope @budibase/server --scope @budibase/worker --scope @budibase/client --scope @budibase/backend-core - name: Run tests run: | cd qa-core @@ -166,14 +250,14 @@ jobs: check-pro-submodule: runs-on: ubuntu-latest - if: github.repository == github.event.pull_request.head.repo.full_name + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' steps: - name: Checkout repo and submodules uses: actions/checkout@v3 with: submodules: true - fetch-depth: 0 token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} + fetch-depth: 0 - name: Check pro commit id: get_pro_commits @@ -190,6 +274,8 @@ jobs: base_commit=$(git rev-parse origin/develop) fi + echo "target_branch=$branch" + echo "target_branch=$branch" >> "$GITHUB_OUTPUT" echo "pro_commit=$pro_commit" echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT" echo "base_commit=$base_commit" @@ -204,7 +290,7 @@ jobs: const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}'; if (submoduleCommit !== baseCommit) { - console.error('Submodule commit does not match the latest commit on the develop branch.'); + console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}"" branch.'); console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md') process.exit(1); } else { diff --git a/.github/workflows/check_unreleased_changes.yml b/.github/workflows/check_unreleased_changes.yml new file mode 100644 index 0000000000..d558330545 --- /dev/null +++ b/.github/workflows/check_unreleased_changes.yml @@ -0,0 +1,29 @@ +name: check_unreleased_changes + +on: + pull_request: + branches: + - master + +jobs: + check_unreleased: + runs-on: ubuntu-latest + steps: + - name: Check for unreleased changes + env: + REPO: "Budibase/budibase" + TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \ + "https://api.github.com/repos/$REPO/releases/latest" | \ + jq -r .published_at) + COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \ + "https://api.github.com/repos/$REPO/commits/master" | \ + jq -r .commit.committer.date) + RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s") + COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s") + if (( COMMIT_SECONDS > RELEASE_SECONDS )); then + echo "There are unreleased changes. Please release these changes before merging." + exit 1 + fi + echo "No unreleased changes detected." diff --git a/.github/workflows/close-featurebranch.yml b/.github/workflows/close-featurebranch.yml new file mode 100644 index 0000000000..0ec3b43598 --- /dev/null +++ b/.github/workflows/close-featurebranch.yml @@ -0,0 +1,21 @@ +name: close-featurebranch + +on: + pull_request: + types: [closed] + branches: + - develop + +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: passeidireto/trigger-external-workflow-action@main + env: + PAYLOAD_BRANCH: ${{ github.head_ref }} + PAYLOAD_PR_NUMBER: ${{ github.ref }} + with: + repository: budibase/budibase-deploys + event: featurebranch-qa-close + github_pat: ${{ secrets.GH_ACCESS_TOKEN }} diff --git a/.github/workflows/deploy-featurebranch.yml b/.github/workflows/deploy-featurebranch.yml new file mode 100644 index 0000000000..f06707ab2b --- /dev/null +++ b/.github/workflows/deploy-featurebranch.yml @@ -0,0 +1,20 @@ +name: deploy-featurebranch + +on: + pull_request: + branches: + - develop + +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: passeidireto/trigger-external-workflow-action@main + env: + PAYLOAD_BRANCH: ${{ github.head_ref }} + PAYLOAD_PR_NUMBER: ${{ github.ref }} + with: + repository: budibase/budibase-deploys + event: featurebranch-qa-deploy + github_pat: ${{ secrets.GH_ACCESS_TOKEN }} diff --git a/.github/workflows/release-develop.yml b/.github/workflows/release-develop.yml index 61cb283e28..bd727b7865 100644 --- a/.github/workflows/release-develop.yml +++ b/.github/workflows/release-develop.yml @@ -44,7 +44,7 @@ jobs: - uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 18.x - run: yarn install --frozen-lockfile - name: Update versions diff --git a/.github/workflows/release-master.yml b/.github/workflows/release-master.yml index 7f8b8f1d55..b4991cbfbe 100644 --- a/.github/workflows/release-master.yml +++ b/.github/workflows/release-master.yml @@ -36,7 +36,7 @@ jobs: - uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 18.x - run: yarn install --frozen-lockfile - name: Update versions @@ -60,9 +60,9 @@ jobs: - name: "Get Current tag" id: currenttag run: | - version=v$(./scripts/getCurrentVersion.sh) - echo 'Using tag $version' - echo "::set-output name=tag::$resversionult" + version=$(./scripts/getCurrentVersion.sh) + echo "Using tag $version" + echo "version=$version" >> "$GITHUB_OUTPUT" - name: Build/release Docker images run: | @@ -71,7 +71,7 @@ jobs: env: DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} - BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.tag }} + BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }} release-helm-chart: needs: [release-images] diff --git a/.github/workflows/release-selfhost.yml b/.github/workflows/release-selfhost.yml index 39ee812726..d2689a0ea0 100644 --- a/.github/workflows/release-selfhost.yml +++ b/.github/workflows/release-selfhost.yml @@ -28,10 +28,10 @@ jobs: exit 1 fi - - name: Use Node.js 14.x + - name: Use Node.js 18.x uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 18.x - name: Get the latest budibase release version id: version @@ -67,7 +67,6 @@ jobs: - name: Bootstrap and build (CLI) run: | yarn - yarn bootstrap yarn build - name: Build OpenAPI spec diff --git a/.github/workflows/release-singleimage.yml b/.github/workflows/release-singleimage.yml index 5b75c20d29..bd01ed786a 100644 --- a/.github/workflows/release-singleimage.yml +++ b/.github/workflows/release-singleimage.yml @@ -1,4 +1,4 @@ -name: release-singleimage +name: Deploy Budibase Single Container Image to DockerHub on: workflow_dispatch: @@ -8,13 +8,20 @@ env: PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} REGISTRY_URL: registry.hub.docker.com jobs: - build-amd64: - name: "build-amd64" + build: + name: "build" runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + node-version: [18.x] steps: + - name: Maximize build space + uses: easimon/maximize-build-space@master + with: + root-reserve-mb: 35000 + swap-size-mb: 1024 + remove-android: 'true' + remove-dotnet: 'true' - name: Fail if not a tag run: | if [[ $GITHUB_REF != refs/tags/* ]]; then @@ -27,12 +34,14 @@ jobs: submodules: true token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} fetch-depth: 0 + - name: Fail if tag is not in master run: | if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch" exit 1 fi + - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v1 with: @@ -68,139 +77,9 @@ jobs: with: context: . push: true - platforms: linux/amd64 - tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }} + platforms: linux/amd64,linux/arm64 + tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }} file: ./hosting/single/Dockerfile - - - name: Tag and release Budibase Azure App Service docker image - uses: docker/build-push-action@v2 - with: - context: . - push: true - platforms: linux/amd64 - build-args: TARGETBUILD=aas - tags: budibase/budibase-aas,budibase/budibase-aas:v${{ env.RELEASE_VERSION }} - file: ./hosting/single/Dockerfile - - build-arm64: - name: "build-arm64" - runs-on: ubuntu-latest - strategy: - matrix: - node-version: [14.x] - steps: - - name: Fail if not a tag - run: | - if [[ $GITHUB_REF != refs/tags/* ]]; then - echo "Workflow Dispatch can only be run on tags" - exit 1 - fi - - name: "Checkout" - uses: actions/checkout@v2 - with: - submodules: true - token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} - fetch-depth: 0 - - name: Fail if tag is not in master - run: | - if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then - echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch" - exit 1 - fi - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: Setup QEMU - uses: docker/setup-qemu-action@v1 - - name: Setup Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v1 - - name: Run Yarn - run: yarn - - name: Update versions - run: ./scripts/updateVersions.sh - - name: Runt Yarn Lint - run: yarn lint - - name: Update versions - run: ./scripts/updateVersions.sh - - name: Run Yarn Build - run: yarn build:docker:pre - - name: Login to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_API_KEY }} - - name: Get the latest release version - id: version - run: | - release_version=$(cat lerna.json | jq -r '.version') - echo $release_version - echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV - - name: Tag and release Budibase service docker image - uses: docker/build-push-action@v2 - with: - context: . - push: true - platforms: linux/arm64 - tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }} - file: ./hosting/single/Dockerfile - - build-aas: - name: "build-aas" - runs-on: ubuntu-latest - strategy: - matrix: - node-version: [14.x] - steps: - - name: Fail if not a tag - run: | - if [[ $GITHUB_REF != refs/tags/* ]]; then - echo "Workflow Dispatch can only be run on tags" - exit 1 - fi - - name: "Checkout" - uses: actions/checkout@v2 - with: - submodules: true - token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} - fetch-depth: 0 - - name: Fail if tag is not in master - run: | - if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then - echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch" - exit 1 - fi - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: Setup QEMU - uses: docker/setup-qemu-action@v1 - - name: Setup Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v1 - - name: Run Yarn - run: yarn - - name: Update versions - run: ./scripts/updateVersions.sh - - name: Runt Yarn Lint - run: yarn lint - - name: Update versions - run: ./scripts/updateVersions.sh - - name: Run Yarn Build - run: yarn build:docker:pre - - name: Login to Docker Hub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_API_KEY }} - - name: Get the latest release version - id: version - run: | - release_version=$(cat lerna.json | jq -r '.version') - echo $release_version - echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV - name: Tag and release Budibase Azure App Service docker image uses: docker/build-push-action@v2 with: diff --git a/.github/workflows/stale_bot.yml b/.github/workflows/stale_bot.yml index 8cda3a9342..f87d561db9 100644 --- a/.github/workflows/stale_bot.yml +++ b/.github/workflows/stale_bot.yml @@ -2,7 +2,7 @@ name: Close stale issues and PRs # https://github.com/actions/stale on: workflow_dispatch: schedule: - - cron: '30 1 * * *' # 1:30 every morning + - cron: '*/30 * * * *' # Every 30 mins jobs: stale: diff --git a/.gitignore b/.gitignore index b3dc8af0d4..02e0ca300d 100644 --- a/.gitignore +++ b/.gitignore @@ -97,12 +97,8 @@ typings/ bin/ hosting/.generated* -packages/builder/cypress.env.json -packages/builder/cypress/reports stats.html -# TypeScript cache -*.tsbuildinfo # plugins budibase-component diff --git a/.nvmrc b/.nvmrc index 835d07c442..7950a44576 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -v14.20.1 +v18.17.0 diff --git a/.prettierignore b/.prettierignore index 7eb567d517..64607d74ab 100644 --- a/.prettierignore +++ b/.prettierignore @@ -9,6 +9,5 @@ packages/backend-core/coverage packages/server/client packages/server/src/definitions/openapi.ts packages/builder/.routify -packages/builder/cypress/support/queryLevelTransformerFunction.js -packages/builder/cypress/support/queryLevelTransformerFunctionWithData.js -packages/sdk/sdk \ No newline at end of file +packages/sdk/sdk +packages/pro/coverage \ No newline at end of file diff --git a/.tool-versions b/.tool-versions index da92e03885..a909d60941 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,2 +1,3 @@ -nodejs 14.21.3 -python 3.10.0 \ No newline at end of file +nodejs 18.17.0 +python 3.10.0 +yarn 1.22.19 diff --git a/.vscode/launch.json b/.vscode/launch.json index 8cb49d5825..cfd8d7b155 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,42 +1,31 @@ + { - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "Budibase Server", - "type": "node", - "request": "launch", - "runtimeArgs": [ - "--nolazy", - "-r", - "ts-node/register/transpile-only" - ], - "args": [ - "${workspaceFolder}/packages/server/src/index.ts" - ], - "cwd": "${workspaceFolder}/packages/server" - }, - { - "name": "Budibase Worker", - "type": "node", - "request": "launch", - "runtimeArgs": [ - "--nolazy", - "-r", - "ts-node/register/transpile-only" - ], - "args": [ - "${workspaceFolder}/packages/worker/src/index.ts" - ], - "cwd": "${workspaceFolder}/packages/worker" - }, - ], - "compounds": [ - { - "name": "Start Budibase", - "configurations": ["Budibase Server", "Budibase Worker"] - } - ] -} \ No newline at end of file + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Budibase Server", + "type": "node", + "request": "launch", + "runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"], + "args": ["${workspaceFolder}/packages/server/src/index.ts"], + "cwd": "${workspaceFolder}/packages/server" + }, + { + "name": "Budibase Worker", + "type": "node", + "request": "launch", + "runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"], + "args": ["${workspaceFolder}/packages/worker/src/index.ts"], + "cwd": "${workspaceFolder}/packages/worker" + } + ], + "compounds": [ + { + "name": "Start Budibase", + "configurations": ["Budibase Server", "Budibase Worker"] + } + ] +} diff --git a/charts/budibase/templates/app-service-deployment.yaml b/charts/budibase/templates/app-service-deployment.yaml index 2b2589406a..3243509094 100644 --- a/charts/budibase/templates/app-service-deployment.yaml +++ b/charts/budibase/templates/app-service-deployment.yaml @@ -120,6 +120,8 @@ spec: {{ end }} - name: MULTI_TENANCY value: {{ .Values.globals.multiTenancy | quote }} + - name: OFFLINE_MODE + value: {{ .Values.globals.offlineMode | quote }} - name: LOG_LEVEL value: {{ .Values.services.apps.logLevel | quote }} - name: REDIS_PASSWORD @@ -201,25 +203,24 @@ spec: image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }} imagePullPolicy: Always + {{- if .Values.services.apps.startupProbe }} + {{- with .Values.services.apps.startupProbe }} + startupProbe: + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} + {{- if .Values.services.apps.livenessProbe }} + {{- with .Values.services.apps.livenessProbe }} livenessProbe: - httpGet: - path: /health - port: {{ .Values.services.apps.port }} - initialDelaySeconds: 10 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - timeoutSeconds: 3 + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} + {{- if .Values.services.apps.readinessProbe }} + {{- with .Values.services.apps.readinessProbe }} readinessProbe: - httpGet: - path: /health - port: {{ .Values.services.apps.port }} - initialDelaySeconds: 5 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - timeoutSeconds: 3 - + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} name: bbapps ports: - containerPort: {{ .Values.services.apps.port }} diff --git a/charts/budibase/templates/proxy-service-deployment.yaml b/charts/budibase/templates/proxy-service-deployment.yaml index c087627100..53bba6232d 100644 --- a/charts/budibase/templates/proxy-service-deployment.yaml +++ b/charts/budibase/templates/proxy-service-deployment.yaml @@ -40,24 +40,24 @@ spec: - image: budibase/proxy:{{ .Values.globals.appVersion | default .Chart.AppVersion }} imagePullPolicy: Always name: proxy-service + {{- if .Values.services.proxy.startupProbe }} + {{- with .Values.services.proxy.startupProbe }} + startupProbe: + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} + {{- if .Values.services.proxy.livenessProbe }} + {{- with .Values.services.proxy.livenessProbe }} livenessProbe: - httpGet: - path: /health - port: {{ .Values.services.proxy.port }} - initialDelaySeconds: 0 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 2 - timeoutSeconds: 3 + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} + {{- if .Values.services.proxy.readinessProbe }} + {{- with .Values.services.proxy.readinessProbe }} readinessProbe: - httpGet: - path: /health - port: {{ .Values.services.proxy.port }} - initialDelaySeconds: 0 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 2 - timeoutSeconds: 3 + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} ports: - containerPort: {{ .Values.services.proxy.port }} env: diff --git a/charts/budibase/templates/secrets.yaml b/charts/budibase/templates/secrets.yaml index 1c0a914ed3..263934187e 100644 --- a/charts/budibase/templates/secrets.yaml +++ b/charts/budibase/templates/secrets.yaml @@ -1,4 +1,5 @@ -{{- if .Values.globals.createSecrets -}} +{{- $existingSecret := lookup "v1" "Secret" .Release.Namespace (include "budibase.fullname" .) }} +{{- if .Values.globals.createSecrets }} apiVersion: v1 kind: Secret metadata: @@ -10,8 +11,15 @@ metadata: heritage: "{{ .Release.Service }}" type: Opaque data: + {{- if $existingSecret }} + internalApiKey: {{ index $existingSecret.data "internalApiKey" }} + jwtSecret: {{ index $existingSecret.data "jwtSecret" }} + objectStoreAccess: {{ index $existingSecret.data "objectStoreAccess" }} + objectStoreSecret: {{ index $existingSecret.data "objectStoreSecret" }} + {{- else }} internalApiKey: {{ template "budibase.defaultsecret" .Values.globals.internalApiKey }} jwtSecret: {{ template "budibase.defaultsecret" .Values.globals.jwtSecret }} objectStoreAccess: {{ template "budibase.defaultsecret" .Values.services.objectStore.accessKey }} objectStoreSecret: {{ template "budibase.defaultsecret" .Values.services.objectStore.secretKey }} -{{- end -}} + {{- end }} +{{- end }} diff --git a/charts/budibase/templates/worker-service-deployment.yaml b/charts/budibase/templates/worker-service-deployment.yaml index 5fed80b355..7621aa23ef 100644 --- a/charts/budibase/templates/worker-service-deployment.yaml +++ b/charts/budibase/templates/worker-service-deployment.yaml @@ -116,6 +116,8 @@ spec: value: {{ .Values.services.worker.port | quote }} - name: MULTI_TENANCY value: {{ .Values.globals.multiTenancy | quote }} + - name: OFFLINE_MODE + value: {{ .Values.globals.offlineMode | quote }} - name: LOG_LEVEL value: {{ .Values.services.worker.logLevel | quote }} - name: REDIS_PASSWORD @@ -190,24 +192,24 @@ spec: {{ end }} image: budibase/worker:{{ .Values.globals.appVersion | default .Chart.AppVersion }} imagePullPolicy: Always + {{- if .Values.services.worker.startupProbe }} + {{- with .Values.services.worker.startupProbe }} + startupProbe: + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} + {{- if .Values.services.worker.livenessProbe }} + {{- with .Values.services.worker.livenessProbe }} livenessProbe: - httpGet: - path: /health - port: {{ .Values.services.worker.port }} - initialDelaySeconds: 10 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - timeoutSeconds: 3 + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} + {{- if .Values.services.worker.readinessProbe }} + {{- with .Values.services.worker.readinessProbe }} readinessProbe: - httpGet: - path: /health - port: {{ .Values.services.worker.port }} - initialDelaySeconds: 5 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - timeoutSeconds: 3 + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} name: bbworker ports: - containerPort: {{ .Values.services.worker.port }} diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml index 74e4c52654..e5f1eabb53 100644 --- a/charts/budibase/values.yaml +++ b/charts/budibase/values.yaml @@ -82,6 +82,7 @@ globals: posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU" selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs + offlineMode: "0" # set to 1 to enable offline mode accountPortalUrl: "" accountPortalApiKey: "" cookieDomain: "" @@ -119,15 +120,36 @@ services: port: 10000 replicaCount: 1 upstreams: - apps: 'http://app-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.apps.port }}' - worker: 'http://worker-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.worker.port }}' - minio: 'http://minio-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.objectStore.port }}' - couchdb: 'http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}' + apps: "http://app-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.apps.port }}" + worker: "http://worker-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.worker.port }}" + minio: "http://minio-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.objectStore.port }}" + couchdb: "http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}" resources: {} -# annotations: -# co.elastic.logs/module: nginx -# co.elastic.logs/fileset.stdout: access -# co.elastic.logs/fileset.stderr: error + startupProbe: + httpGet: + path: /health + port: 10000 + scheme: HTTP + failureThreshold: 30 + periodSeconds: 3 + readinessProbe: + httpGet: + path: /health + port: 10000 + scheme: HTTP + periodSeconds: 3 + failureThreshold: 1 + livenessProbe: + httpGet: + path: /health + port: 10000 + scheme: HTTP + failureThreshold: 3 + periodSeconds: 5 + # annotations: + # co.elastic.logs/module: nginx + # co.elastic.logs/fileset.stdout: access + # co.elastic.logs/fileset.stderr: error apps: port: 4002 @@ -135,23 +157,65 @@ services: logLevel: info httpLogging: 1 resources: {} -# nodeDebug: "" # set the value of NODE_DEBUG -# annotations: -# co.elastic.logs/multiline.type: pattern -# co.elastic.logs/multiline.pattern: '^[[:space:]]' -# co.elastic.logs/multiline.negate: false -# co.elastic.logs/multiline.match: after + startupProbe: + httpGet: + path: /health + port: 4002 + scheme: HTTP + failureThreshold: 30 + periodSeconds: 3 + readinessProbe: + httpGet: + path: /health + port: 4002 + scheme: HTTP + periodSeconds: 3 + failureThreshold: 1 + livenessProbe: + httpGet: + path: /health + port: 4002 + scheme: HTTP + failureThreshold: 3 + periodSeconds: 5 + # nodeDebug: "" # set the value of NODE_DEBUG + # annotations: + # co.elastic.logs/multiline.type: pattern + # co.elastic.logs/multiline.pattern: '^[[:space:]]' + # co.elastic.logs/multiline.negate: false + # co.elastic.logs/multiline.match: after worker: port: 4003 replicaCount: 1 logLevel: info httpLogging: 1 resources: {} -# annotations: -# co.elastic.logs/multiline.type: pattern -# co.elastic.logs/multiline.pattern: '^[[:space:]]' -# co.elastic.logs/multiline.negate: false -# co.elastic.logs/multiline.match: after + startupProbe: + httpGet: + path: /health + port: 4003 + scheme: HTTP + failureThreshold: 30 + periodSeconds: 3 + readinessProbe: + httpGet: + path: /health + port: 4003 + scheme: HTTP + periodSeconds: 3 + failureThreshold: 1 + livenessProbe: + httpGet: + path: /health + port: 4003 + scheme: HTTP + failureThreshold: 3 + periodSeconds: 5 + # annotations: + # co.elastic.logs/multiline.type: pattern + # co.elastic.logs/multiline.pattern: '^[[:space:]]' + # co.elastic.logs/multiline.negate: false + # co.elastic.logs/multiline.match: after couchdb: enabled: true @@ -344,14 +408,12 @@ couchdb: ## Ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes # FOR COUCHDB livenessProbe: - enabled: true failureThreshold: 3 initialDelaySeconds: 0 periodSeconds: 10 successThreshold: 1 timeoutSeconds: 1 readinessProbe: - enabled: true failureThreshold: 3 initialDelaySeconds: 0 periodSeconds: 10 diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 2fb4c36fa8..3a32075a33 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -90,7 +90,7 @@ Component libraries are collections of components as well as the definition of t #### 1. Prerequisites -- NodeJS version `14.x.x` +- NodeJS version `18.x.x` - Python version `3.x` ### Using asdf (recommended) @@ -264,16 +264,14 @@ Sometimes, things go wrong. This can be due to incompatible updates on the budib ### Running tests -#### End-to-end Tests +#### Unit Tests -Budibase uses Cypress to run a number of E2E tests. To run the tests execute the following command in the root folder: +Budibase uses Jest to run a number of tests. To run the tests execute the following command in the root folder: ``` -yarn test:e2e +yarn test ``` -Or if you are in the builder you can run `yarn cy:test`. - ### Other Useful Information - The contributors are listed in [AUTHORS.md](https://github.com/Budibase/budibase/blob/master/.github/AUTHORS.md) (add yourself). diff --git a/docs/DEV-SETUP-DEBIAN.md b/docs/DEV-SETUP-DEBIAN.md index a8b1e3dce4..e098862c64 100644 --- a/docs/DEV-SETUP-DEBIAN.md +++ b/docs/DEV-SETUP-DEBIAN.md @@ -55,7 +55,7 @@ yarn setup The yarn setup command runs several build steps i.e. ``` -node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev +node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev ``` So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose. diff --git a/docs/DEV-SETUP-MACOSX.md b/docs/DEV-SETUP-MACOSX.md index 94ed3fc1ee..0e13d540b3 100644 --- a/docs/DEV-SETUP-MACOSX.md +++ b/docs/DEV-SETUP-MACOSX.md @@ -55,7 +55,7 @@ yarn setup The yarn setup command runs several build steps i.e. ``` -node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev +node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev ``` So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose. diff --git a/docs/DEV-SETUP-WINDOWS.md b/docs/DEV-SETUP-WINDOWS.md index 176e0700d7..f26a5a0882 100644 --- a/docs/DEV-SETUP-WINDOWS.md +++ b/docs/DEV-SETUP-WINDOWS.md @@ -74,7 +74,7 @@ yarn setup The yarn setup command runs several build steps i.e. ``` -node ./hosting/scripts/setup.js && yarn && yarn bootstrap && yarn build && yarn dev +node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev ``` So this command will actually run the application in dev mode. It creates .env files under `./packages/server` and `./packages/worker` and runs docker containers for each service via docker-compose. diff --git a/hosting/couchdb/Dockerfile b/hosting/couchdb/Dockerfile index 632f326c9b..5a3b8c07d6 100644 --- a/hosting/couchdb/Dockerfile +++ b/hosting/couchdb/Dockerfile @@ -6,11 +6,11 @@ EXPOSE 5984 EXPOSE 4984 RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \ - wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | apt-key add - && \ + wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | sudo apt-key add - && \ apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security/updates main' && \ apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \ - apt-add-repository --yes https://adoptopenjdk.jfrog.io/adoptopenjdk/deb/ && \ - apt-get update && apt-get install -y --no-install-recommends adoptopenjdk-8-hotspot && \ + apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bullseye main' && \ + apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \ rm -rf /var/lib/apt/lists/ # setup clouseau diff --git a/hosting/docker-compose.test.yaml b/hosting/docker-compose.test.yaml deleted file mode 100644 index f059173d2d..0000000000 --- a/hosting/docker-compose.test.yaml +++ /dev/null @@ -1,47 +0,0 @@ -version: "3" - -# optional ports are specified throughout for more advanced use cases. - -services: - minio-service: - restart: on-failure - # Last version that supports the "fs" backend - image: minio/minio:RELEASE.2022-10-24T18-35-07Z - ports: - - "9000" - - "9001" - environment: - MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY} - MINIO_SECRET_KEY: ${MINIO_SECRET_KEY} - command: server /data --console-address ":9001" - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] - interval: 30s - timeout: 20s - retries: 3 - - couchdb-service: - # platform: linux/amd64 - restart: on-failure - image: budibase/couchdb - environment: - - COUCHDB_PASSWORD=${COUCH_DB_PASSWORD} - - COUCHDB_USER=${COUCH_DB_USER} - ports: - - "5984" - - "4369" - - "9100" - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:5984/_up"] - interval: 30s - timeout: 20s - retries: 3 - - redis-service: - restart: on-failure - image: redis - command: redis-server --requirepass ${REDIS_PASSWORD} - ports: - - "6379" - healthcheck: - test: ["CMD", "redis-cli", "ping"] \ No newline at end of file diff --git a/hosting/docker-compose.yaml b/hosting/docker-compose.yaml index bad34a20ea..b3887c15fa 100644 --- a/hosting/docker-compose.yaml +++ b/hosting/docker-compose.yaml @@ -27,6 +27,7 @@ services: BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL} BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD} PLUGINS_DIR: ${PLUGINS_DIR} + OFFLINE_MODE: ${OFFLINE_MODE} depends_on: - worker-service - redis-service @@ -54,6 +55,7 @@ services: INTERNAL_API_KEY: ${INTERNAL_API_KEY} REDIS_URL: redis-service:6379 REDIS_PASSWORD: ${REDIS_PASSWORD} + OFFLINE_MODE: ${OFFLINE_MODE} depends_on: - redis-service - minio-service diff --git a/hosting/proxy/nginx.prod.conf b/hosting/proxy/nginx.prod.conf index 9ce6b54053..365765ccbb 100644 --- a/hosting/proxy/nginx.prod.conf +++ b/hosting/proxy/nginx.prod.conf @@ -55,7 +55,7 @@ http { set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com"; set $csp_object "object-src 'none'"; set $csp_base_uri "base-uri 'self'"; - set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com"; + set $csp_connect "connect-src 'self' https://*.budibase.app https://*.budibaseqa.app https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com"; set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com"; set $csp_frame "frame-src 'self' https:"; set $csp_img "img-src http: https: data: blob:"; diff --git a/hosting/single/Dockerfile b/hosting/single/Dockerfile index e43e5ad10c..9fdf2449d1 100644 --- a/hosting/single/Dockerfile +++ b/hosting/single/Dockerfile @@ -1,7 +1,7 @@ -FROM node:14-slim as build +FROM node:18-slim as build # install node-gyp dependencies -RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python +RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python3 # add pin script WORKDIR / diff --git a/hosting/single/README.md b/hosting/single/README.md index 1147d55c89..09010f5075 100644 --- a/hosting/single/README.md +++ b/hosting/single/README.md @@ -58,7 +58,6 @@ Node setup: ``` node ./hosting/scripts/setup.js yarn -yarn bootstrap yarn build ``` #### Build Image diff --git a/hosting/dependencies/Dockerfile b/hosting/tests/Dockerfile similarity index 100% rename from hosting/dependencies/Dockerfile rename to hosting/tests/Dockerfile diff --git a/hosting/dependencies/README.md b/hosting/tests/README.md similarity index 98% rename from hosting/dependencies/README.md rename to hosting/tests/README.md index 8586b31948..19b9ed5037 100644 --- a/hosting/dependencies/README.md +++ b/hosting/tests/README.md @@ -47,7 +47,6 @@ Node setup: ``` node ./hosting/scripts/setup.js yarn -yarn bootstrap yarn build ``` #### Build Image diff --git a/hosting/dependencies/runner.sh b/hosting/tests/runner.sh similarity index 100% rename from hosting/dependencies/runner.sh rename to hosting/tests/runner.sh diff --git a/jestTestcontainersConfigGenerator.js b/jestTestcontainersConfigGenerator.js index 4b94cf5016..1e39ed771f 100644 --- a/jestTestcontainersConfigGenerator.js +++ b/jestTestcontainersConfigGenerator.js @@ -1,9 +1,16 @@ module.exports = () => { return { - dockerCompose: { - composeFilePath: "../../hosting", - composeFile: "docker-compose.test.yaml", - startupTimeout: 10000, - }, + couchdb: { + image: "budibase/couchdb", + ports: [5984], + env: { + COUCHDB_PASSWORD: "budibase", + COUCHDB_USER: "budibase", + }, + wait: { + type: "ports", + timeout: 20000, + } + } } } diff --git a/lerna.json b/lerna.json index c36d346133..1345c6f54c 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.8.27", + "version": "2.11.5-alpha.0", "npmClient": "yarn", "packages": [ "packages/*" @@ -19,4 +19,4 @@ "loadEnvFiles": false } } -} +} \ No newline at end of file diff --git a/nx.json b/nx.json index c2f44ef70d..8176bae82c 100644 --- a/nx.json +++ b/nx.json @@ -3,19 +3,10 @@ "default": { "runner": "nx-cloud", "options": { - "cacheableOperations": ["build", "test"], + "cacheableOperations": ["build", "test", "check:types"], "accessToken": "MmM4OGYxNzItMDBlYy00ZmE3LTk4MTYtNmJhYWMyZjBjZTUyfHJlYWQ=" } } }, - "targetDefaults": { - "dev:builder": { - "dependsOn": [ - { - "projects": ["@budibase/string-templates"], - "target": "build" - } - ] - } - } + "targetDefaults": {} } diff --git a/package.json b/package.json index 6a678f1bf3..e5b6554fca 100644 --- a/package.json +++ b/package.json @@ -5,11 +5,10 @@ "@esbuild-plugins/tsconfig-paths": "^0.1.2", "@nx/js": "16.4.3", "@rollup/plugin-json": "^4.0.2", - "@typescript-eslint/parser": "5.45.0", + "@typescript-eslint/parser": "6.7.2", "esbuild": "^0.18.17", "esbuild-node-externals": "^1.8.0", "eslint": "^8.44.0", - "eslint-plugin-cypress": "^2.11.3", "husky": "^8.0.3", "js-yaml": "^4.1.0", "kill-port": "^1.6.1", @@ -22,8 +21,8 @@ "prettier-plugin-svelte": "^2.3.0", "rimraf": "^3.0.2", "rollup-plugin-replace": "^2.2.0", - "svelte": "^3.38.2", - "typescript": "4.7.3", + "svelte": "3.49.0", + "typescript": "5.2.2", "@babel/core": "^7.22.5", "@babel/eslint-parser": "^7.22.5", "@babel/preset-env": "^7.22.5", @@ -33,27 +32,24 @@ "scripts": { "preinstall": "node scripts/syncProPackage.js", "setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev", - "bootstrap": "./scripts/link-dependencies.sh && echo '***BOOTSTRAP ONLY REQUIRED FOR USE WITH ACCOUNT PORTAL***'", - "build": "yarn nx run-many -t=build", + "build": "lerna run build --stream", "build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput", - "check:types": "lerna run check:types --skip-nx-cache", - "backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap", - "backend:build": "./scripts/scopeBackend.sh 'lerna run --stream build'", + "check:types": "lerna run check:types", "build:sdk": "lerna run --stream build:sdk", "deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular", "release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset", "release:develop": "yarn release --dist-tag develop", - "restore": "yarn run clean && yarn run bootstrap && yarn run build", + "restore": "yarn run clean && yarn && yarn run build", "nuke": "yarn run nuke:packages && yarn run nuke:docker", "nuke:packages": "yarn run restore", "nuke:docker": "lerna run --stream dev:stack:nuke", - "clean": "lerna clean", + "clean": "lerna clean -y", "kill-builder": "kill-port 3000", "kill-server": "kill-port 4001 4002", "kill-all": "yarn run kill-builder && yarn run kill-server", - "dev": "yarn run kill-all && yarn nx run-many --target=dev:builder", - "dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && yarn nx run-many --target=dev:builder --exclude=@budibase/backend-core,@budibase/server,@budibase/worker", - "dev:server": "yarn run kill-server && yarn nx run-many --target=dev:builder --projects=@budibase/worker,@budibase/server", + "dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev:builder", + "dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker", + "dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "test": "lerna run --stream test --stream", @@ -93,9 +89,8 @@ "mode:account": "yarn mode:cloud && yarn env:account:enable", "security:audit": "node scripts/audit.js", "postinstall": "husky install", - "dep:clean": "yarn clean -y && yarn bootstrap", - "submodules:load": "git submodule init && git submodule update && yarn && yarn bootstrap", - "submodules:unload": "git submodule deinit --all && yarn && yarn bootstrap" + "submodules:load": "git submodule init && git submodule update && yarn", + "submodules:unload": "git submodule deinit --all && yarn" }, "workspaces": { "packages": [ @@ -108,5 +103,8 @@ "@budibase/string-templates": "0.0.0", "@budibase/types": "0.0.0" }, + "engines": { + "node": ">=18.0.0 <19.0.0" + }, "dependencies": {} } diff --git a/packages/backend-core/.npmignore b/packages/backend-core/.npmignore new file mode 100644 index 0000000000..30bba85ce8 --- /dev/null +++ b/packages/backend-core/.npmignore @@ -0,0 +1,6 @@ +* +!dist/**/* +dist/tsconfig.build.tsbuildinfo +!package.json +!src/** +!tests/** \ No newline at end of file diff --git a/packages/backend-core/jest.config.ts b/packages/backend-core/jest.config.ts index 8d64b24a2f..3f1065ead2 100644 --- a/packages/backend-core/jest.config.ts +++ b/packages/backend-core/jest.config.ts @@ -1,8 +1,6 @@ import { Config } from "@jest/types" -const preset = require("ts-jest/jest-preset") const baseConfig: Config.InitialProjectOptions = { - ...preset, preset: "@trendyol/jest-testcontainers", setupFiles: ["./tests/jestEnv.ts"], setupFilesAfterEnv: ["./tests/jestSetup.ts"], @@ -11,6 +9,7 @@ const baseConfig: Config.InitialProjectOptions = { }, moduleNameMapper: { "@budibase/types": "/../types/src", + "@budibase/shared-core": ["/../shared-core/src"], }, } diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index 7f3c064c92..b1dced660c 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -2,10 +2,10 @@ "name": "@budibase/backend-core", "version": "0.0.0", "description": "Budibase backend core libraries used in server and worker", - "main": "dist/src/index.js", + "main": "dist/index.js", "types": "dist/src/index.d.ts", "exports": { - ".": "./dist/src/index.js", + ".": "./dist/index.js", "./tests": "./dist/tests/index.js", "./*": "./dist/*.js" }, @@ -14,16 +14,17 @@ "scripts": { "prebuild": "rimraf dist/", "prepack": "cp package.json dist", - "build": "tsc -p tsconfig.build.json", + "build": "tsc -p tsconfig.build.json --paths null && node ./scripts/build.js", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", + "check:types": "tsc -p tsconfig.json --noEmit --paths null", "test": "bash scripts/test.sh", "test:watch": "jest --watchAll" }, "dependencies": { "@budibase/nano": "10.1.2", "@budibase/pouchdb-replication-stream": "1.2.10", + "@budibase/shared-core": "0.0.0", "@budibase/types": "0.0.0", - "@shopify/jest-koa-mocks": "5.0.1", "@techpass/passport-openidconnect": "0.3.2", "aws-cloudfront-sign": "2.2.0", "aws-sdk": "2.1030.0", @@ -32,17 +33,14 @@ "bull": "4.10.1", "correlation-id": "4.0.0", "dotenv": "16.0.1", - "emitter-listener": "1.1.2", "ioredis": "5.3.2", "joi": "17.6.0", "jsonwebtoken": "9.0.0", "koa-passport": "4.1.4", "koa-pino-logger": "4.0.0", "lodash": "4.17.21", - "lodash.isarguments": "3.1.0", "node-fetch": "2.6.7", "passport-google-oauth": "2.0.0", - "passport-jwt": "4.0.0", "passport-local": "1.0.0", "passport-oauth2-refresh": "^2.1.0", "pino": "8.11.0", @@ -58,16 +56,16 @@ "uuid": "8.3.2" }, "devDependencies": { - "@jest/test-sequencer": "29.5.0", - "@swc/core": "^1.3.25", - "@swc/jest": "^0.2.24", + "@shopify/jest-koa-mocks": "5.1.1", + "@swc/core": "1.3.71", + "@swc/jest": "0.2.27", "@trendyol/jest-testcontainers": "^2.1.1", "@types/chance": "1.1.3", - "@types/jest": "29.5.0", - "@types/koa": "2.13.4", + "@types/cookies": "0.7.8", + "@types/jest": "29.5.3", "@types/lodash": "4.14.180", - "@types/node": "14.18.20", - "@types/node-fetch": "2.6.1", + "@types/node": "18.17.0", + "@types/node-fetch": "2.6.4", "@types/pouchdb": "6.4.0", "@types/redlock": "4.0.3", "@types/semver": "7.3.7", @@ -75,18 +73,13 @@ "@types/uuid": "8.3.4", "chance": "1.1.8", "ioredis-mock": "8.7.0", - "jest": "29.5.0", - "jest-environment-node": "29.5.0", - "jest-serial-runner": "^1.2.1", - "koa": "2.13.4", - "nodemon": "2.0.16", + "jest": "29.6.2", + "jest-environment-node": "29.6.2", + "jest-serial-runner": "1.2.1", "pino-pretty": "10.0.0", "pouchdb-adapter-memory": "7.2.2", "timekeeper": "2.2.0", - "ts-jest": "29.0.5", - "ts-node": "10.8.1", - "tsconfig-paths": "4.0.0", - "typescript": "4.7.3" + "typescript": "5.2.2" }, "nx": { "targets": { @@ -94,6 +87,7 @@ "dependsOn": [ { "projects": [ + "@budibase/shared-core", "@budibase/types" ], "target": "build" @@ -101,6 +95,5 @@ ] } } - }, - "gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc" + } } diff --git a/packages/backend-core/plugins.ts b/packages/backend-core/plugins.ts deleted file mode 100644 index 33354eaf64..0000000000 --- a/packages/backend-core/plugins.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./src/plugin" diff --git a/packages/backend-core/scripts/build.js b/packages/backend-core/scripts/build.js new file mode 100644 index 0000000000..f84f22bf8d --- /dev/null +++ b/packages/backend-core/scripts/build.js @@ -0,0 +1,5 @@ +#!/usr/bin/node +const coreBuild = require("../../../scripts/build") + +coreBuild("./src/plugin/index.ts", "./dist/plugins.js") +coreBuild("./src/index.ts", "./dist/index.js") diff --git a/packages/backend-core/scripts/test.sh b/packages/backend-core/scripts/test.sh index 3d8240e65a..7d19ec96cc 100644 --- a/packages/backend-core/scripts/test.sh +++ b/packages/backend-core/scripts/test.sh @@ -8,6 +8,6 @@ then jest --coverage --runInBand --forceExit else # --maxWorkers performs better in development - echo "jest --coverage --forceExit" - jest --coverage --forceExit + echo "jest --coverage --detectOpenHandles" + jest --coverage --detectOpenHandles fi \ No newline at end of file diff --git a/packages/backend-core/src/cache/appMetadata.ts b/packages/backend-core/src/cache/appMetadata.ts index 0c320ec776..420456fd41 100644 --- a/packages/backend-core/src/cache/appMetadata.ts +++ b/packages/backend-core/src/cache/appMetadata.ts @@ -55,7 +55,7 @@ export async function getAppMetadata(appId: string): Promise { throw err } } - // needed for cypress/some scenarios where the caching happens + // needed for some scenarios where the caching happens // so quickly the requests can get slightly out of sync // might store its invalid just before it stores its valid if (isInvalid(metadata)) { diff --git a/packages/backend-core/src/cache/tests/user.spec.ts b/packages/backend-core/src/cache/tests/user.spec.ts new file mode 100644 index 0000000000..80e5bc3063 --- /dev/null +++ b/packages/backend-core/src/cache/tests/user.spec.ts @@ -0,0 +1,145 @@ +import { User } from "@budibase/types" +import { generator, structures } from "../../../tests" +import { DBTestConfiguration } from "../../../tests/extra" +import { getUsers } from "../user" +import { getGlobalDB } from "../../context" +import _ from "lodash" + +import * as redis from "../../redis/init" +import { UserDB } from "../../users" + +const config = new DBTestConfiguration() + +describe("user cache", () => { + describe("getUsers", () => { + const users: User[] = [] + beforeAll(async () => { + const userCount = 10 + const userIds = generator.arrayOf(() => generator.guid(), { + min: userCount, + max: userCount, + }) + + await config.doInTenant(async () => { + const db = getGlobalDB() + for (const userId of userIds) { + const user = structures.users.user({ _id: userId }) + await db.put(user) + users.push(user) + } + }) + }) + + beforeEach(async () => { + jest.clearAllMocks() + + const redisClient = await redis.getUserClient() + await redisClient.clear() + }) + + it("when no user is in cache, all of them are retrieved from db", async () => { + const usersToRequest = _.sampleSize(users, 5) + + const userIdsToRequest = usersToRequest.map(x => x._id!) + + jest.spyOn(UserDB, "bulkGet") + + const results = await config.doInTenant(() => getUsers(userIdsToRequest)) + + expect(results.users).toHaveLength(5) + expect(results).toEqual({ + users: usersToRequest.map(u => ({ + ...u, + budibaseAccess: true, + _rev: expect.any(String), + })), + }) + + expect(UserDB.bulkGet).toBeCalledTimes(1) + expect(UserDB.bulkGet).toBeCalledWith(userIdsToRequest) + }) + + it("on a second all, all of them are retrieved from cache", async () => { + const usersToRequest = _.sampleSize(users, 5) + + const userIdsToRequest = usersToRequest.map(x => x._id!) + + jest.spyOn(UserDB, "bulkGet") + + await config.doInTenant(() => getUsers(userIdsToRequest)) + const resultsFromCache = await config.doInTenant(() => + getUsers(userIdsToRequest) + ) + + expect(resultsFromCache.users).toHaveLength(5) + expect(resultsFromCache).toEqual({ + users: expect.arrayContaining( + usersToRequest.map(u => ({ + ...u, + budibaseAccess: true, + _rev: expect.any(String), + })) + ), + }) + + expect(UserDB.bulkGet).toBeCalledTimes(1) + }) + + it("when some users are cached, only the missing ones are retrieved from db", async () => { + const usersToRequest = _.sampleSize(users, 5) + + const userIdsToRequest = usersToRequest.map(x => x._id!) + + jest.spyOn(UserDB, "bulkGet") + + await config.doInTenant(() => + getUsers([userIdsToRequest[0], userIdsToRequest[3]]) + ) + ;(UserDB.bulkGet as jest.Mock).mockClear() + + const results = await config.doInTenant(() => getUsers(userIdsToRequest)) + + expect(results.users).toHaveLength(5) + expect(results).toEqual({ + users: expect.arrayContaining( + usersToRequest.map(u => ({ + ...u, + budibaseAccess: true, + _rev: expect.any(String), + })) + ), + }) + + expect(UserDB.bulkGet).toBeCalledTimes(1) + expect(UserDB.bulkGet).toBeCalledWith([ + userIdsToRequest[1], + userIdsToRequest[2], + userIdsToRequest[4], + ]) + }) + + it("requesting existing and unexisting ids will return found ones", async () => { + const usersToRequest = _.sampleSize(users, 3) + const missingIds = [generator.guid(), generator.guid()] + + const userIdsToRequest = _.shuffle([ + ...missingIds, + ...usersToRequest.map(x => x._id!), + ]) + + const results = await config.doInTenant(() => getUsers(userIdsToRequest)) + + expect(results.users).toHaveLength(3) + expect(results).toEqual({ + users: expect.arrayContaining( + usersToRequest.map(u => ({ + ...u, + budibaseAccess: true, + _rev: expect.any(String), + })) + ), + notFoundIds: expect.arrayContaining(missingIds), + }) + }) + }) +}) diff --git a/packages/backend-core/src/cache/tests/writethrough.spec.ts b/packages/backend-core/src/cache/tests/writethrough.spec.ts index 92b073ed64..97d3ece7a6 100644 --- a/packages/backend-core/src/cache/tests/writethrough.spec.ts +++ b/packages/backend-core/src/cache/tests/writethrough.spec.ts @@ -36,7 +36,7 @@ describe("writethrough", () => { _id: docId, value: 1, }) - const output = await db.get(response.id) + const output = await db.get(response.id) current = output expect(output.value).toBe(1) }) @@ -45,7 +45,7 @@ describe("writethrough", () => { it("second put shouldn't update DB", async () => { await config.doInTenant(async () => { const response = await writethrough.put({ ...current, value: 2 }) - const output = await db.get(response.id) + const output = await db.get(response.id) expect(current._rev).toBe(output._rev) expect(output.value).toBe(1) }) @@ -55,7 +55,7 @@ describe("writethrough", () => { await config.doInTenant(async () => { tk.freeze(Date.now() + DELAY + 1) const response = await writethrough.put({ ...current, value: 3 }) - const output = await db.get(response.id) + const output = await db.get(response.id) expect(response.rev).not.toBe(current._rev) expect(output.value).toBe(3) @@ -79,7 +79,7 @@ describe("writethrough", () => { expect.arrayContaining([current._rev, current._rev, newRev]) ) - const output = await db.get(current._id) + const output = await db.get(current._id) expect(output.value).toBe(4) expect(output._rev).toBe(newRev) @@ -107,7 +107,7 @@ describe("writethrough", () => { }) expect(res.ok).toBe(true) - const output = await db.get(id) + const output = await db.get(id) expect(output.value).toBe(3) expect(output._rev).toBe(res.rev) }) @@ -130,8 +130,8 @@ describe("writethrough", () => { const resp2 = await writethrough2.put({ _id: "db1", value: "second" }) expect(resp1.rev).toBeDefined() expect(resp2.rev).toBeDefined() - expect((await db.get("db1")).value).toBe("first") - expect((await db2.get("db1")).value).toBe("second") + expect((await db.get("db1")).value).toBe("first") + expect((await db2.get("db1")).value).toBe("second") }) }) }) diff --git a/packages/backend-core/src/cache/user.ts b/packages/backend-core/src/cache/user.ts index 8281bfca62..481d3691e4 100644 --- a/packages/backend-core/src/cache/user.ts +++ b/packages/backend-core/src/cache/user.ts @@ -4,6 +4,9 @@ import * as context from "../context" import * as platform from "../platform" import env from "../environment" import * as accounts from "../accounts" +import { UserDB } from "../users" +import { sdk } from "@budibase/shared-core" +import { User } from "@budibase/types" const EXPIRY_SECONDS = 3600 @@ -25,6 +28,35 @@ async function populateFromDB(userId: string, tenantId: string) { return user } +async function populateUsersFromDB( + userIds: string[] +): Promise<{ users: User[]; notFoundIds?: string[] }> { + const getUsersResponse = await UserDB.bulkGet(userIds) + + // Handle missed user ids + const notFoundIds = userIds.filter((uid, i) => !getUsersResponse[i]) + + const users = getUsersResponse.filter(x => x) + + await Promise.all( + users.map(async (user: any) => { + user.budibaseAccess = true + if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { + const account = await accounts.getAccount(user.email) + if (account) { + user.account = account + user.accountPortalAccess = true + } + } + }) + ) + + if (notFoundIds.length) { + return { users, notFoundIds } + } + return { users } +} + /** * Get the requested user by id. * Use redis cache to first read the user. @@ -60,9 +92,51 @@ export async function getUser( // make sure the tenant ID is always correct/set user.tenantId = tenantId } + // if has groups, could have builder permissions granted by a group + if (user.userGroups && !sdk.users.isGlobalBuilder(user)) { + await context.doInTenant(tenantId, async () => { + const appIds = await UserDB.getGroupBuilderAppIds(user) + if (appIds.length) { + const existing = user.builder?.apps || [] + user.builder = { + apps: [...new Set(existing.concat(appIds))], + } + } + }) + } return user } +/** + * Get the requested users by id. + * Use redis cache to first read the users. + * If not present fallback to loading the users directly and re-caching. + * @param {*} userIds the ids of the user to get + * @param {*} tenantId the tenant of the users to get + * @returns + */ +export async function getUsers( + userIds: string[] +): Promise<{ users: User[]; notFoundIds?: string[] }> { + const client = await redis.getUserClient() + // try cache + let usersFromCache = await client.bulkGet(userIds) + const missingUsersFromCache = userIds.filter(uid => !usersFromCache[uid]) + const users = Object.values(usersFromCache) + let notFoundIds + + if (missingUsersFromCache.length) { + const usersFromDb = await populateUsersFromDB(missingUsersFromCache) + + notFoundIds = usersFromDb.notFoundIds + for (const userToCache of usersFromDb.users) { + await client.store(userToCache._id!, userToCache, EXPIRY_SECONDS) + } + users.push(...usersFromDb.users) + } + return { users, notFoundIds: notFoundIds } +} + export async function invalidateUser(userId: string) { const client = await redis.getUserClient() await client.delete(userId) diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index 716acc877f..36c284d91e 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -1,5 +1,5 @@ -export const SEPARATOR = "_" -export const UNICODE_MAX = "\ufff0" +import { prefixed, DocumentType } from "@budibase/types" +export { SEPARATOR, UNICODE_MAX, DocumentType } from "@budibase/types" /** * Can be used to create a few different forms of querying a view. @@ -14,13 +14,11 @@ export enum ViewName { USER_BY_APP = "by_app", USER_BY_EMAIL = "by_email2", BY_API_KEY = "by_api_key", - /** @deprecated - could be deleted */ - USER_BY_BUILDERS = "by_builders", LINK = "by_link", ROUTING = "screen_routes", AUTOMATION_LOGS = "automation_logs", ACCOUNT_BY_EMAIL = "account_by_email", - PLATFORM_USERS_LOWERCASE = "platform_users_lowercase", + PLATFORM_USERS_LOWERCASE = "platform_users_lowercase_2", USER_BY_GROUP = "user_by_group", APP_BACKUP_BY_TRIGGER = "by_trigger", } @@ -36,42 +34,6 @@ export enum InternalTable { USER_METADATA = "ta_users", } -export enum DocumentType { - USER = "us", - GROUP = "gr", - WORKSPACE = "workspace", - CONFIG = "config", - TEMPLATE = "template", - APP = "app", - DEV = "dev", - APP_DEV = "app_dev", - APP_METADATA = "app_metadata", - ROLE = "role", - MIGRATIONS = "migrations", - DEV_INFO = "devinfo", - AUTOMATION_LOG = "log_au", - ACCOUNT_METADATA = "acc_metadata", - PLUGIN = "plg", - DATASOURCE = "datasource", - DATASOURCE_PLUS = "datasource_plus", - APP_BACKUP = "backup", - TABLE = "ta", - ROW = "ro", - AUTOMATION = "au", - LINK = "li", - WEBHOOK = "wh", - INSTANCE = "inst", - LAYOUT = "layout", - SCREEN = "screen", - QUERY = "query", - DEPLOYMENTS = "deployments", - METADATA = "metadata", - MEM_VIEW = "view", - USER_FLAG = "flag", - AUTOMATION_METADATA = "meta_au", - AUDIT_LOG = "al", -} - export const StaticDatabases = { GLOBAL: { name: "global-db", @@ -95,8 +57,8 @@ export const StaticDatabases = { }, } -export const APP_PREFIX = DocumentType.APP + SEPARATOR -export const APP_DEV = DocumentType.APP_DEV + SEPARATOR +export const APP_PREFIX = prefixed(DocumentType.APP) +export const APP_DEV = prefixed(DocumentType.APP_DEV) export const APP_DEV_PREFIX = APP_DEV export const BUDIBASE_DATASOURCE_TYPE = "budibase" export const SQLITE_DESIGN_DOC_ID = "_design/sqlite" diff --git a/packages/backend-core/src/constants/misc.ts b/packages/backend-core/src/constants/misc.ts index 0c68798164..8ef34196ed 100644 --- a/packages/backend-core/src/constants/misc.ts +++ b/packages/backend-core/src/constants/misc.ts @@ -22,6 +22,8 @@ export enum Header { TENANT_ID = "x-budibase-tenant-id", VERIFICATION_CODE = "x-budibase-verification-code", RETURN_VERIFICATION_CODE = "x-budibase-return-verification-code", + RESET_PASSWORD_CODE = "x-budibase-reset-password-code", + RETURN_RESET_PASSWORD_CODE = "x-budibase-return-reset-password-code", TOKEN = "x-budibase-token", CSRF_TOKEN = "x-csrf-token", CORRELATION_ID = "x-budibase-correlation-id", diff --git a/packages/backend-core/src/db/db.ts b/packages/backend-core/src/db/db.ts index f13eb9a965..9aae64b892 100644 --- a/packages/backend-core/src/db/db.ts +++ b/packages/backend-core/src/db/db.ts @@ -11,7 +11,11 @@ export function getDB(dbName?: string, opts?: any): Database { // we have to use a callback for this so that we can close // the DB when we're done, without this manual requests would // need to close the database when done with it to avoid memory leaks -export async function doWithDB(dbName: string, cb: any, opts = {}) { +export async function doWithDB( + dbName: string, + cb: (db: Database) => Promise, + opts = {} +) { const db = getDB(dbName, opts) // need this to be async so that we can correctly close DB after all // async operations have been completed diff --git a/packages/backend-core/src/db/lucene.ts b/packages/backend-core/src/db/lucene.ts index a491451a62..7451d581b5 100644 --- a/packages/backend-core/src/db/lucene.ts +++ b/packages/backend-core/src/db/lucene.ts @@ -1,7 +1,6 @@ import fetch from "node-fetch" import { getCouchInfo } from "./couch" -import { SearchFilters, Row } from "@budibase/types" -import { createUserIndex } from "./searchIndexes/searchIndexes" +import { SearchFilters, Row, EmptyFilterOption } from "@budibase/types" const QUERY_START_REGEX = /\d[0-9]*:/g @@ -65,6 +64,7 @@ export class QueryBuilder { this.#index = index this.#query = { allOr: false, + onEmptyFilter: EmptyFilterOption.RETURN_ALL, string: {}, fuzzy: {}, range: {}, @@ -218,6 +218,10 @@ export class QueryBuilder { this.#query.allOr = true } + setOnEmptyFilter(value: EmptyFilterOption) { + this.#query.onEmptyFilter = value + } + handleSpaces(input: string) { if (this.#noEscaping) { return input @@ -289,8 +293,9 @@ export class QueryBuilder { const builder = this let allOr = this.#query && this.#query.allOr let query = allOr ? "" : "*:*" + let allFiltersEmpty = true const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true } - let tableId + let tableId: string = "" if (this.#query.equal!.tableId) { tableId = this.#query.equal!.tableId delete this.#query.equal!.tableId @@ -305,7 +310,7 @@ export class QueryBuilder { } const contains = (key: string, value: any, mode = "AND") => { - if (Array.isArray(value) && value.length === 0) { + if (!value || (Array.isArray(value) && value.length === 0)) { return null } if (!Array.isArray(value)) { @@ -384,6 +389,12 @@ export class QueryBuilder { built += ` ${mode} ` } built += expression + if ( + (typeof value !== "string" && value != null) || + (typeof value === "string" && value !== tableId && value !== "") + ) { + allFiltersEmpty = false + } } if (opts?.returnBuilt) { return built @@ -463,6 +474,13 @@ export class QueryBuilder { allOr = false build({ tableId }, equal) } + if (allFiltersEmpty) { + if (this.#query.onEmptyFilter === EmptyFilterOption.RETURN_NONE) { + return "" + } else if (this.#query?.allOr) { + return query.replace("()", "(*:*)") + } + } return query } diff --git a/packages/backend-core/src/db/tests/lucene.spec.ts b/packages/backend-core/src/db/tests/lucene.spec.ts index a82828d8f2..7716661d88 100644 --- a/packages/backend-core/src/db/tests/lucene.spec.ts +++ b/packages/backend-core/src/db/tests/lucene.spec.ts @@ -1,6 +1,6 @@ import { newid } from "../../docIds/newid" import { getDB } from "../db" -import { Database } from "@budibase/types" +import { Database, EmptyFilterOption } from "@budibase/types" import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene" const INDEX_NAME = "main" @@ -156,6 +156,76 @@ describe("lucene", () => { expect(resp.rows.length).toBe(2) }) + describe("empty filters behaviour", () => { + it("should return all rows by default", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.addEqual("property", "") + builder.addEqual("number", null) + builder.addString("property", "") + builder.addFuzzy("property", "") + builder.addNotEqual("number", undefined) + builder.addOneOf("number", null) + builder.addContains("array", undefined) + builder.addNotContains("array", null) + builder.addContainsAny("array", null) + + const resp = await builder.run() + expect(resp.rows.length).toBe(3) + }) + + it("should return all rows when onEmptyFilter is ALL", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.setOnEmptyFilter(EmptyFilterOption.RETURN_ALL) + builder.setAllOr() + builder.addEqual("property", "") + builder.addEqual("number", null) + builder.addString("property", "") + builder.addFuzzy("property", "") + builder.addNotEqual("number", undefined) + builder.addOneOf("number", null) + builder.addContains("array", undefined) + builder.addNotContains("array", null) + builder.addContainsAny("array", null) + + const resp = await builder.run() + expect(resp.rows.length).toBe(3) + }) + + it("should return no rows when onEmptyFilter is NONE", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.setOnEmptyFilter(EmptyFilterOption.RETURN_NONE) + builder.addEqual("property", "") + builder.addEqual("number", null) + builder.addString("property", "") + builder.addFuzzy("property", "") + builder.addNotEqual("number", undefined) + builder.addOneOf("number", null) + builder.addContains("array", undefined) + builder.addNotContains("array", null) + builder.addContainsAny("array", null) + + const resp = await builder.run() + expect(resp.rows.length).toBe(0) + }) + + it("should return all matching rows when onEmptyFilter is NONE, but a filter value is provided", async () => { + const builder = new QueryBuilder(dbName, INDEX_NAME) + builder.setOnEmptyFilter(EmptyFilterOption.RETURN_NONE) + builder.addEqual("property", "") + builder.addEqual("number", 1) + builder.addString("property", "") + builder.addFuzzy("property", "") + builder.addNotEqual("number", undefined) + builder.addOneOf("number", null) + builder.addContains("array", undefined) + builder.addNotContains("array", null) + builder.addContainsAny("array", null) + + const resp = await builder.run() + expect(resp.rows.length).toBe(1) + }) + }) + describe("skip", () => { const skipDbName = `db-${newid()}` let docs: { diff --git a/packages/backend-core/src/db/views.ts b/packages/backend-core/src/db/views.ts index fddb1ab34b..f0980ad217 100644 --- a/packages/backend-core/src/db/views.ts +++ b/packages/backend-core/src/db/views.ts @@ -105,16 +105,6 @@ export const createApiKeyView = async () => { await createView(db, viewJs, ViewName.BY_API_KEY) } -export const createUserBuildersView = async () => { - const db = getGlobalDB() - const viewJs = `function(doc) { - if (doc.builder && doc.builder.global === true) { - emit(doc._id, doc._id) - } - }` - await createView(db, viewJs, ViewName.USER_BY_BUILDERS) -} - export interface QueryViewOptions { arrayResponse?: boolean } @@ -200,6 +190,10 @@ export const createPlatformUserView = async () => { if (doc.tenantId) { emit(doc._id.toLowerCase(), doc._id) } + + if (doc.ssoId) { + emit(doc.ssoId, doc._id) + } }` await createPlatformView(viewJs, ViewName.PLATFORM_USERS_LOWERCASE) } @@ -223,7 +217,6 @@ export const queryPlatformView = async ( const CreateFuncByName: any = { [ViewName.USER_BY_EMAIL]: createNewUserEmailView, [ViewName.BY_API_KEY]: createApiKeyView, - [ViewName.USER_BY_BUILDERS]: createUserBuildersView, [ViewName.USER_BY_APP]: createUserAppView, } diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 8694e44f16..510d580f28 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -1,15 +1,16 @@ import { existsSync, readFileSync } from "fs" +import { ServiceType } from "@budibase/types" function isTest() { - return isCypress() || isJest() + return isJest() } function isJest() { - return !!(process.env.NODE_ENV === "jest" || process.env.JEST_WORKER_ID) -} - -function isCypress() { - return process.env.NODE_ENV === "cypress" + return ( + process.env.NODE_ENV === "jest" || + (process.env.JEST_WORKER_ID != null && + process.env.JEST_WORKER_ID !== "null") + ) } function isDev() { @@ -83,10 +84,20 @@ function getPackageJsonFields(): { } } +function isWorker() { + return environment.SERVICE_TYPE === ServiceType.WORKER +} + +function isApps() { + return environment.SERVICE_TYPE === ServiceType.APPS +} + const environment = { isTest, isJest, isDev, + isWorker, + isApps, isProd: () => { return !isDev() }, @@ -154,6 +165,7 @@ const environment = { SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS, DISABLE_JWT_WARNING: process.env.DISABLE_JWT_WARNING, BLACKLIST_IPS: process.env.BLACKLIST_IPS, + SERVICE_TYPE: "unknown", /** * Enable to allow an admin user to login using a password. * This can be useful to prevent lockout when configuring SSO. diff --git a/packages/backend-core/src/events/identification.ts b/packages/backend-core/src/events/identification.ts index 948d3b692b..c7bc1c817b 100644 --- a/packages/backend-core/src/events/identification.ts +++ b/packages/backend-core/src/events/identification.ts @@ -21,6 +21,7 @@ import { processors } from "./processors" import { newid } from "../utils" import * as installation from "../installation" import * as configs from "../configs" +import * as users from "../users" import { withCache, TTL, CacheKey } from "../cache/generic" /** @@ -164,8 +165,8 @@ const identifyUser = async ( const id = user._id as string const tenantId = await getEventTenantId(user.tenantId) const type = IdentityType.USER - let builder = user.builder?.global || false - let admin = user.admin?.global || false + let builder = users.hasBuilderPermissions(user) + let admin = users.hasAdminPermissions(user) let providerType if (isSSOUser(user)) { providerType = user.providerType diff --git a/packages/backend-core/src/featureFlags/index.ts b/packages/backend-core/src/features/index.ts similarity index 98% rename from packages/backend-core/src/featureFlags/index.ts rename to packages/backend-core/src/features/index.ts index 877cd60e1a..8f5c903e05 100644 --- a/packages/backend-core/src/featureFlags/index.ts +++ b/packages/backend-core/src/features/index.ts @@ -1,5 +1,6 @@ import env from "../environment" import * as context from "../context" +export * from "./installation" /** * Read the TENANT_FEATURE_FLAGS env var and return an array of features flags for each tenant. diff --git a/packages/backend-core/src/features/installation.ts b/packages/backend-core/src/features/installation.ts new file mode 100644 index 0000000000..defc8bf987 --- /dev/null +++ b/packages/backend-core/src/features/installation.ts @@ -0,0 +1,17 @@ +export function processFeatureEnvVar( + fullList: string[], + featureList?: string +) { + let list + if (!featureList) { + list = fullList + } else { + list = featureList.split(",") + } + for (let feature of list) { + if (!fullList.includes(feature)) { + throw new Error(`Feature: ${feature} is not an allowed option`) + } + } + return list as unknown as T[] +} diff --git a/packages/backend-core/src/featureFlags/tests/featureFlags.spec.ts b/packages/backend-core/src/features/tests/featureFlags.spec.ts similarity index 100% rename from packages/backend-core/src/featureFlags/tests/featureFlags.spec.ts rename to packages/backend-core/src/features/tests/featureFlags.spec.ts diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts index 7b98674788..ffffd8240a 100644 --- a/packages/backend-core/src/index.ts +++ b/packages/backend-core/src/index.ts @@ -6,7 +6,8 @@ export * as roles from "./security/roles" export * as permissions from "./security/permissions" export * as accounts from "./accounts" export * as installation from "./installation" -export * as featureFlags from "./featureFlags" +export * as featureFlags from "./features" +export * as features from "./features/installation" export * as sessions from "./security/sessions" export * as platform from "./platform" export * as auth from "./auth" diff --git a/packages/backend-core/src/middleware/adminOnly.ts b/packages/backend-core/src/middleware/adminOnly.ts index dbe1e3a501..6b2ee87c01 100644 --- a/packages/backend-core/src/middleware/adminOnly.ts +++ b/packages/backend-core/src/middleware/adminOnly.ts @@ -1,10 +1,8 @@ -import { BBContext } from "@budibase/types" +import { UserCtx } from "@budibase/types" +import { isAdmin } from "../users" -export default async (ctx: BBContext, next: any) => { - if ( - !ctx.internal && - (!ctx.user || !ctx.user.admin || !ctx.user.admin.global) - ) { +export default async (ctx: UserCtx, next: any) => { + if (!ctx.internal && !isAdmin(ctx.user)) { ctx.throw(403, "Admin user only endpoint.") } return next() diff --git a/packages/backend-core/src/middleware/builderOnly.ts b/packages/backend-core/src/middleware/builderOnly.ts index a00fd63a22..fafcc524cc 100644 --- a/packages/backend-core/src/middleware/builderOnly.ts +++ b/packages/backend-core/src/middleware/builderOnly.ts @@ -1,10 +1,20 @@ -import { BBContext } from "@budibase/types" +import { UserCtx } from "@budibase/types" +import { isBuilder, hasBuilderPermissions } from "../users" +import { getAppId } from "../context" +import env from "../environment" -export default async (ctx: BBContext, next: any) => { - if ( - !ctx.internal && - (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) - ) { +export default async (ctx: UserCtx, next: any) => { + const appId = getAppId() + const builderFn = + env.isWorker() || !appId + ? hasBuilderPermissions + : env.isApps() + ? isBuilder + : undefined + if (!builderFn) { + throw new Error("Service name unknown - middleware inactive.") + } + if (!ctx.internal && !builderFn(ctx.user, appId)) { ctx.throw(403, "Builder user only endpoint.") } return next() diff --git a/packages/backend-core/src/middleware/builderOrAdmin.ts b/packages/backend-core/src/middleware/builderOrAdmin.ts index 26bb3a1bda..4b8badec15 100644 --- a/packages/backend-core/src/middleware/builderOrAdmin.ts +++ b/packages/backend-core/src/middleware/builderOrAdmin.ts @@ -1,12 +1,21 @@ -import { BBContext } from "@budibase/types" +import { UserCtx } from "@budibase/types" +import { isBuilder, isAdmin, hasBuilderPermissions } from "../users" +import { getAppId } from "../context" +import env from "../environment" -export default async (ctx: BBContext, next: any) => { - if ( - !ctx.internal && - (!ctx.user || !ctx.user.builder || !ctx.user.builder.global) && - (!ctx.user || !ctx.user.admin || !ctx.user.admin.global) - ) { - ctx.throw(403, "Builder user only endpoint.") +export default async (ctx: UserCtx, next: any) => { + const appId = getAppId() + const builderFn = + env.isWorker() || !appId + ? hasBuilderPermissions + : env.isApps() + ? isBuilder + : undefined + if (!builderFn) { + throw new Error("Service name unknown - middleware inactive.") + } + if (!ctx.internal && !builderFn(ctx.user, appId) && !isAdmin(ctx.user)) { + ctx.throw(403, "Admin/Builder user only endpoint.") } return next() } diff --git a/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts b/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts index 484a118cbd..c3ddf220e6 100644 --- a/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts +++ b/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts @@ -102,6 +102,7 @@ describe("sso", () => { // modified external id to match user format ssoUser._id = "us_" + details.userId + delete ssoUser.userId // new sso user won't have a password delete ssoUser.password diff --git a/packages/backend-core/src/middleware/tests/builder.spec.ts b/packages/backend-core/src/middleware/tests/builder.spec.ts new file mode 100644 index 0000000000..d350eff4f6 --- /dev/null +++ b/packages/backend-core/src/middleware/tests/builder.spec.ts @@ -0,0 +1,180 @@ +import adminOnly from "../adminOnly" +import builderOnly from "../builderOnly" +import builderOrAdmin from "../builderOrAdmin" +import { structures } from "../../../tests" +import { ContextUser, ServiceType } from "@budibase/types" +import { doInAppContext } from "../../context" +import env from "../../environment" +env._set("SERVICE_TYPE", ServiceType.APPS) + +const appId = "app_aaa" +const basicUser = structures.users.user() +const adminUser = structures.users.adminUser() +const adminOnlyUser = structures.users.adminOnlyUser() +const builderUser = structures.users.builderUser() +const appBuilderUser = structures.users.appBuilderUser(appId) + +function buildUserCtx(user: ContextUser) { + return { + internal: false, + user, + throw: jest.fn(), + } as any +} + +function passed(throwFn: jest.Func, nextFn: jest.Func) { + expect(throwFn).not.toBeCalled() + expect(nextFn).toBeCalled() +} + +function threw(throwFn: jest.Func) { + // cant check next, the throw function doesn't actually throw - so it still continues + expect(throwFn).toBeCalled() +} + +describe("adminOnly middleware", () => { + it("should allow admin user", () => { + const ctx = buildUserCtx(adminUser), + next = jest.fn() + adminOnly(ctx, next) + passed(ctx.throw, next) + }) + + it("should not allow basic user", () => { + const ctx = buildUserCtx(basicUser), + next = jest.fn() + adminOnly(ctx, next) + threw(ctx.throw) + }) + + it("should not allow builder user", () => { + const ctx = buildUserCtx(builderUser), + next = jest.fn() + adminOnly(ctx, next) + threw(ctx.throw) + }) +}) + +describe("builderOnly middleware", () => { + it("should allow builder user", () => { + const ctx = buildUserCtx(builderUser), + next = jest.fn() + builderOnly(ctx, next) + passed(ctx.throw, next) + }) + + it("should allow app builder user", () => { + const ctx = buildUserCtx(appBuilderUser), + next = jest.fn() + doInAppContext(appId, () => { + builderOnly(ctx, next) + }) + passed(ctx.throw, next) + }) + + it("should allow admin and builder user", () => { + const ctx = buildUserCtx(adminUser), + next = jest.fn() + builderOnly(ctx, next) + passed(ctx.throw, next) + }) + + it("should not allow admin user", () => { + const ctx = buildUserCtx(adminOnlyUser), + next = jest.fn() + builderOnly(ctx, next) + threw(ctx.throw) + }) + + it("should not allow app builder user to different app", () => { + const ctx = buildUserCtx(appBuilderUser), + next = jest.fn() + doInAppContext("app_bbb", () => { + builderOnly(ctx, next) + }) + threw(ctx.throw) + }) + + it("should not allow basic user", () => { + const ctx = buildUserCtx(basicUser), + next = jest.fn() + builderOnly(ctx, next) + threw(ctx.throw) + }) +}) + +describe("builderOrAdmin middleware", () => { + it("should allow builder user", () => { + const ctx = buildUserCtx(builderUser), + next = jest.fn() + builderOrAdmin(ctx, next) + passed(ctx.throw, next) + }) + + it("should allow builder and admin user", () => { + const ctx = buildUserCtx(adminUser), + next = jest.fn() + builderOrAdmin(ctx, next) + passed(ctx.throw, next) + }) + + it("should allow admin user", () => { + const ctx = buildUserCtx(adminOnlyUser), + next = jest.fn() + builderOrAdmin(ctx, next) + passed(ctx.throw, next) + }) + + it("should allow app builder user", () => { + const ctx = buildUserCtx(appBuilderUser), + next = jest.fn() + doInAppContext(appId, () => { + builderOrAdmin(ctx, next) + }) + passed(ctx.throw, next) + }) + + it("should not allow basic user", () => { + const ctx = buildUserCtx(basicUser), + next = jest.fn() + builderOrAdmin(ctx, next) + threw(ctx.throw) + }) +}) + +describe("check service difference", () => { + it("should not allow without app ID in apps", () => { + env._set("SERVICE_TYPE", ServiceType.APPS) + const appId = "app_a" + const ctx = buildUserCtx({ + ...basicUser, + builder: { + apps: [appId], + }, + }) + const next = jest.fn() + doInAppContext(appId, () => { + builderOnly(ctx, next) + }) + passed(ctx.throw, next) + doInAppContext("app_b", () => { + builderOnly(ctx, next) + }) + threw(ctx.throw) + }) + + it("should allow without app ID in worker", () => { + env._set("SERVICE_TYPE", ServiceType.WORKER) + const ctx = buildUserCtx({ + ...basicUser, + builder: { + apps: ["app_a"], + }, + }) + const next = jest.fn() + doInAppContext("app_b", () => { + builderOnly(ctx, next) + }) + passed(ctx.throw, next) + }) +}) diff --git a/packages/backend-core/src/platform/users.ts b/packages/backend-core/src/platform/users.ts index c65a7e0ec4..6f030afb7c 100644 --- a/packages/backend-core/src/platform/users.ts +++ b/packages/backend-core/src/platform/users.ts @@ -5,6 +5,7 @@ import { PlatformUser, PlatformUserByEmail, PlatformUserById, + PlatformUserBySsoId, User, } from "@budibase/types" @@ -45,6 +46,20 @@ function newUserEmailDoc( } } +function newUserSsoIdDoc( + ssoId: string, + email: string, + userId: string, + tenantId: string +): PlatformUserBySsoId { + return { + _id: ssoId, + userId, + email, + tenantId, + } +} + /** * Add a new user id or email doc if it doesn't exist. */ @@ -64,11 +79,24 @@ async function addUserDoc(emailOrId: string, newDocFn: () => PlatformUser) { } } -export async function addUser(tenantId: string, userId: string, email: string) { - await Promise.all([ +export async function addUser( + tenantId: string, + userId: string, + email: string, + ssoId?: string +) { + const promises = [ addUserDoc(userId, () => newUserIdDoc(userId, tenantId)), addUserDoc(email, () => newUserEmailDoc(userId, email, tenantId)), - ]) + ] + + if (ssoId) { + promises.push( + addUserDoc(ssoId, () => newUserSsoIdDoc(ssoId, email, userId, tenantId)) + ) + } + + await Promise.all(promises) } // DELETE diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 5056a5d549..e7755f275d 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -242,7 +242,7 @@ class RedisWrapper { } } - async bulkGet(keys: string[]) { + async bulkGet(keys: string[]) { const db = this._db if (keys.length === 0) { return {} @@ -250,7 +250,7 @@ class RedisWrapper { const prefixedKeys = keys.map(key => addDbPrefix(db, key)) let response = await this.getClient().mget(prefixedKeys) if (Array.isArray(response)) { - let final: any = {} + let final: Record = {} let count = 0 for (let result of response) { if (result) { diff --git a/packages/backend-core/src/security/permissions.ts b/packages/backend-core/src/security/permissions.ts index 6cacc12dd6..539bbaef27 100644 --- a/packages/backend-core/src/security/permissions.ts +++ b/packages/backend-core/src/security/permissions.ts @@ -1,29 +1,13 @@ -const { flatten } = require("lodash") -const { cloneDeep } = require("lodash/fp") +import { PermissionLevel, PermissionType } from "@budibase/types" +import flatten from "lodash/flatten" +import cloneDeep from "lodash/fp/cloneDeep" + +export { PermissionType, PermissionLevel } from "@budibase/types" export type RoleHierarchy = { permissionId: string }[] -export enum PermissionLevel { - READ = "read", - WRITE = "write", - EXECUTE = "execute", - ADMIN = "admin", -} - -// these are the global types, that govern the underlying default behaviour -export enum PermissionType { - APP = "app", - TABLE = "table", - USER = "user", - AUTOMATION = "automation", - WEBHOOK = "webhook", - BUILDER = "builder", - VIEW = "view", - QUERY = "query", -} - export class Permission { type: PermissionType level: PermissionLevel @@ -95,7 +79,7 @@ export const BUILTIN_PERMISSIONS = { permissions: [ new Permission(PermissionType.QUERY, PermissionLevel.READ), new Permission(PermissionType.TABLE, PermissionLevel.READ), - new Permission(PermissionType.VIEW, PermissionLevel.READ), + new Permission(PermissionType.APP, PermissionLevel.READ), ], }, WRITE: { @@ -104,8 +88,9 @@ export const BUILTIN_PERMISSIONS = { permissions: [ new Permission(PermissionType.QUERY, PermissionLevel.WRITE), new Permission(PermissionType.TABLE, PermissionLevel.WRITE), - new Permission(PermissionType.VIEW, PermissionLevel.READ), new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE), + new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ), + new Permission(PermissionType.APP, PermissionLevel.READ), ], }, POWER: { @@ -115,8 +100,9 @@ export const BUILTIN_PERMISSIONS = { new Permission(PermissionType.TABLE, PermissionLevel.WRITE), new Permission(PermissionType.USER, PermissionLevel.READ), new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE), - new Permission(PermissionType.VIEW, PermissionLevel.READ), new Permission(PermissionType.WEBHOOK, PermissionLevel.READ), + new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ), + new Permission(PermissionType.APP, PermissionLevel.READ), ], }, ADMIN: { @@ -126,9 +112,10 @@ export const BUILTIN_PERMISSIONS = { new Permission(PermissionType.TABLE, PermissionLevel.ADMIN), new Permission(PermissionType.USER, PermissionLevel.ADMIN), new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN), - new Permission(PermissionType.VIEW, PermissionLevel.ADMIN), new Permission(PermissionType.WEBHOOK, PermissionLevel.READ), new Permission(PermissionType.QUERY, PermissionLevel.ADMIN), + new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ), + new Permission(PermissionType.APP, PermissionLevel.READ), ], }, } @@ -173,3 +160,4 @@ export function isPermissionLevelHigherThanRead(level: PermissionLevel) { // utility as a lot of things need simply the builder permission export const BUILDER = PermissionType.BUILDER +export const GLOBAL_BUILDER = PermissionType.GLOBAL_BUILDER diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts index cf5c6bc406..24279e6b5c 100644 --- a/packages/backend-core/src/security/roles.ts +++ b/packages/backend-core/src/security/roles.ts @@ -3,7 +3,7 @@ import { prefixRoleID, getRoleParams, DocumentType, SEPARATOR } from "../db" import { getAppDB } from "../context" import { doWithDB } from "../db" import { Screen, Role as RoleDoc } from "@budibase/types" -const { cloneDeep } = require("lodash/fp") +import cloneDeep from "lodash/fp/cloneDeep" export const BUILTIN_ROLE_IDS = { ADMIN: "ADMIN", @@ -215,21 +215,23 @@ async function getAllUserRoles(userRoleId?: string): Promise { return roles } +export async function getUserRoleIdHierarchy( + userRoleId?: string +): Promise { + const roles = await getUserRoleHierarchy(userRoleId) + return roles.map(role => role._id!) +} + /** * Returns an ordered array of the user's inherited role IDs, this can be used * to determine if a user can access something that requires a specific role. * @param {string} userRoleId The user's role ID, this can be found in their access token. - * @param {object} opts Various options, such as whether to only retrieve the IDs (default true). - * @returns {Promise} returns an ordered array of the roles, with the first being their + * @returns {Promise} returns an ordered array of the roles, with the first being their * highest level of access and the last being the lowest level. */ -export async function getUserRoleHierarchy( - userRoleId?: string, - opts = { idOnly: true } -) { +export async function getUserRoleHierarchy(userRoleId?: string) { // special case, if they don't have a role then they are a public user - const roles = await getAllUserRoles(userRoleId) - return opts.idOnly ? roles.map(role => role._id) : roles + return getAllUserRoles(userRoleId) } // this function checks that the provided permissions are in an array format @@ -249,11 +251,16 @@ export function checkForRoleResourceArray( return rolePerms } +export async function getAllRoleIds(appId?: string) { + const roles = await getAllRoles(appId) + return roles.map(role => role._id) +} + /** * Given an app ID this will retrieve all of the roles that are currently within that app. * @return {Promise} An array of the role objects that were found. */ -export async function getAllRoles(appId?: string) { +export async function getAllRoles(appId?: string): Promise { if (appId) { return doWithDB(appId, internal) } else { @@ -312,37 +319,6 @@ export async function getAllRoles(appId?: string) { } } -/** - * This retrieves the required role for a resource - * @param permLevel The level of request - * @param resourceId The resource being requested - * @param subResourceId The sub resource being requested - * @return {Promise<{permissions}|Object>} returns the permissions required to access. - */ -export async function getRequiredResourceRole( - permLevel: string, - { resourceId, subResourceId }: { resourceId?: string; subResourceId?: string } -) { - const roles = await getAllRoles() - let main = [], - sub = [] - for (let role of roles) { - // no permissions, ignore it - if (!role.permissions) { - continue - } - const mainRes = resourceId ? role.permissions[resourceId] : undefined - const subRes = subResourceId ? role.permissions[subResourceId] : undefined - if (mainRes && mainRes.indexOf(permLevel) !== -1) { - main.push(role._id) - } else if (subRes && subRes.indexOf(permLevel) !== -1) { - sub.push(role._id) - } - } - // for now just return the IDs - return main.concat(sub) -} - export class AccessController { userHierarchies: { [key: string]: string[] } constructor() { @@ -363,9 +339,7 @@ export class AccessController { } let roleIds = userRoleId ? this.userHierarchies[userRoleId] : null if (!roleIds && userRoleId) { - roleIds = (await getUserRoleHierarchy(userRoleId, { - idOnly: true, - })) as string[] + roleIds = await getUserRoleIdHierarchy(userRoleId) this.userHierarchies[userRoleId] = roleIds } @@ -411,8 +385,8 @@ export function getDBRoleID(roleName: string) { export function getExternalRoleID(roleId: string, version?: string) { // for built-in roles we want to remove the DB role ID element (role_) if ( - (roleId.startsWith(DocumentType.ROLE) && isBuiltin(roleId)) || - version === RoleIDVersion.NAME + roleId.startsWith(DocumentType.ROLE) && + (isBuiltin(roleId) || version === RoleIDVersion.NAME) ) { return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1] } diff --git a/packages/backend-core/src/security/tests/permissions.spec.ts b/packages/backend-core/src/security/tests/permissions.spec.ts index caf8bb29a6..39348646fb 100644 --- a/packages/backend-core/src/security/tests/permissions.spec.ts +++ b/packages/backend-core/src/security/tests/permissions.spec.ts @@ -1,4 +1,4 @@ -import { cloneDeep } from "lodash" +import cloneDeep from "lodash/cloneDeep" import * as permissions from "../permissions" import { BUILTIN_ROLE_IDS } from "../roles" diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts new file mode 100644 index 0000000000..1d02bebc32 --- /dev/null +++ b/packages/backend-core/src/users/db.ts @@ -0,0 +1,489 @@ +import env from "../environment" +import * as eventHelpers from "./events" +import * as accounts from "../accounts" +import * as accountSdk from "../accounts" +import * as cache from "../cache" +import { getGlobalDB, getIdentity, getTenantId } from "../context" +import * as dbUtils from "../db" +import { EmailUnavailableError, HTTPError } from "../errors" +import * as platform from "../platform" +import * as sessions from "../security/sessions" +import * as usersCore from "./users" +import { + Account, + AllDocsResponse, + BulkUserCreated, + BulkUserDeleted, + isSSOAccount, + isSSOUser, + RowResponse, + SaveUserOpts, + User, + UserStatus, + UserGroup, + ContextUser, +} from "@budibase/types" +import { + getAccountHolderFromUserIds, + isAdmin, + validateUniqueUser, +} from "./utils" +import { searchExistingEmails } from "./lookup" +import { hash } from "../utils" + +type QuotaUpdateFn = (change: number, cb?: () => Promise) => Promise +type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise +type FeatureFn = () => Promise +type GroupGetFn = (ids: string[]) => Promise +type GroupBuildersFn = (user: User) => Promise +type QuotaFns = { addUsers: QuotaUpdateFn; removeUsers: QuotaUpdateFn } +type GroupFns = { + addUsers: GroupUpdateFn + getBulk: GroupGetFn + getGroupBuilderAppIds: GroupBuildersFn +} +type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn } + +const bulkDeleteProcessing = async (dbUser: User) => { + const userId = dbUser._id as string + await platform.users.removeUser(dbUser) + await eventHelpers.handleDeleteEvents(dbUser) + await cache.user.invalidateUser(userId) + await sessions.invalidateSessions(userId, { reason: "bulk-deletion" }) +} + +export class UserDB { + static quotas: QuotaFns + static groups: GroupFns + static features: FeatureFns + + static init(quotaFns: QuotaFns, groupFns: GroupFns, featureFns: FeatureFns) { + UserDB.quotas = quotaFns + UserDB.groups = groupFns + UserDB.features = featureFns + } + + static async isPreventPasswordActions(user: User, account?: Account) { + // when in maintenance mode we allow sso users with the admin role + // to perform any password action - this prevents lockout + if (env.ENABLE_SSO_MAINTENANCE_MODE && isAdmin(user)) { + return false + } + + // SSO is enforced for all users + if (await UserDB.features.isSSOEnforced()) { + return true + } + + // Check local sso + if (isSSOUser(user)) { + return true + } + + // Check account sso + if (!account) { + account = await accountSdk.getAccountByTenantId(getTenantId()) + } + return !!(account && account.email === user.email && isSSOAccount(account)) + } + + static async buildUser( + user: User, + opts: SaveUserOpts = { + hashPassword: true, + requirePassword: true, + }, + tenantId: string, + dbUser?: any, + account?: Account + ): Promise { + let { password, _id } = user + + // don't require a password if the db user doesn't already have one + if (dbUser && !dbUser.password) { + opts.requirePassword = false + } + + let hashedPassword + if (password) { + if (await UserDB.isPreventPasswordActions(user, account)) { + throw new HTTPError("Password change is disabled for this user", 400) + } + hashedPassword = opts.hashPassword ? await hash(password) : password + } else if (dbUser) { + hashedPassword = dbUser.password + } + + // passwords are never required if sso is enforced + const requirePasswords = + opts.requirePassword && !(await UserDB.features.isSSOEnforced()) + if (!hashedPassword && requirePasswords) { + throw "Password must be specified." + } + + _id = _id || dbUtils.generateGlobalUserID() + + const fullUser = { + createdAt: Date.now(), + ...dbUser, + ...user, + _id, + password: hashedPassword, + tenantId, + } + // make sure the roles object is always present + if (!fullUser.roles) { + fullUser.roles = {} + } + // add the active status to a user if its not provided + if (fullUser.status == null) { + fullUser.status = UserStatus.ACTIVE + } + + return fullUser + } + + static async allUsers() { + const db = getGlobalDB() + const response = await db.allDocs( + dbUtils.getGlobalUserParams(null, { + include_docs: true, + }) + ) + return response.rows.map((row: any) => row.doc) + } + + static async countUsersByApp(appId: string) { + let response: any = await usersCore.searchGlobalUsersByApp(appId, {}) + return { + userCount: response.length, + } + } + + static async getUsersByAppAccess(appId?: string) { + const opts: any = { + include_docs: true, + limit: 50, + } + let response: User[] = await usersCore.searchGlobalUsersByAppAccess( + appId, + opts + ) + return response + } + + static async getUserByEmail(email: string) { + return usersCore.getGlobalUserByEmail(email) + } + + /** + * Gets a user by ID from the global database, based on the current tenancy. + */ + static async getUser(userId: string) { + const user = await usersCore.getById(userId) + if (user) { + delete user.password + } + return user + } + + static async bulkGet(userIds: string[]) { + return await usersCore.bulkGetGlobalUsersById(userIds) + } + + static async bulkUpdate(users: User[]) { + return await usersCore.bulkUpdateGlobalUsers(users) + } + + static async save(user: User, opts: SaveUserOpts = {}): Promise { + // default booleans to true + if (opts.hashPassword == null) { + opts.hashPassword = true + } + if (opts.requirePassword == null) { + opts.requirePassword = true + } + const tenantId = getTenantId() + const db = getGlobalDB() + + let { email, _id, userGroups = [], roles } = user + + if (!email && !_id) { + throw new Error("_id or email is required") + } + + if ( + user.builder?.apps?.length && + !(await UserDB.features.isAppBuildersEnabled()) + ) { + throw new Error("Unable to update app builders, please check license") + } + + let dbUser: User | undefined + if (_id) { + // try to get existing user from db + try { + dbUser = (await db.get(_id)) as User + if (email && dbUser.email !== email) { + throw "Email address cannot be changed" + } + email = dbUser.email + } catch (e: any) { + if (e.status === 404) { + // do nothing, save this new user with the id specified - required for SSO auth + } else { + throw e + } + } + } + + if (!dbUser && email) { + // no id was specified - load from email instead + dbUser = await usersCore.getGlobalUserByEmail(email) + if (dbUser && dbUser._id !== _id) { + throw new EmailUnavailableError(email) + } + } + + const change = dbUser ? 0 : 1 // no change if there is existing user + return UserDB.quotas.addUsers(change, async () => { + await validateUniqueUser(email, tenantId) + + let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser) + // don't allow a user to update its own roles/perms + if (opts.currentUserId && opts.currentUserId === dbUser?._id) { + builtUser = usersCore.cleanseUserObject(builtUser, dbUser) as User + } + + if (!dbUser && roles?.length) { + builtUser.roles = { ...roles } + } + + // make sure we set the _id field for a new user + // Also if this is a new user, associate groups with them + let groupPromises = [] + if (!_id) { + _id = builtUser._id! + + if (userGroups.length > 0) { + for (let groupId of userGroups) { + groupPromises.push(UserDB.groups.addUsers(groupId, [_id!])) + } + } + } + + try { + // save the user to db + let response = await db.put(builtUser) + builtUser._rev = response.rev + + await eventHelpers.handleSaveEvents(builtUser, dbUser) + await platform.users.addUser( + tenantId, + builtUser._id!, + builtUser.email, + builtUser.ssoId + ) + await cache.user.invalidateUser(response.id) + + await Promise.all(groupPromises) + + // finally returned the saved user from the db + return db.get(builtUser._id!) + } catch (err: any) { + if (err.status === 409) { + throw "User exists already" + } else { + throw err + } + } + }) + } + + static async bulkCreate( + newUsersRequested: User[], + groups: string[] + ): Promise { + const tenantId = getTenantId() + + let usersToSave: any[] = [] + let newUsers: any[] = [] + + const emails = newUsersRequested.map((user: User) => user.email) + const existingEmails = await searchExistingEmails(emails) + const unsuccessful: { email: string; reason: string }[] = [] + + for (const newUser of newUsersRequested) { + if ( + newUsers.find( + (x: User) => x.email.toLowerCase() === newUser.email.toLowerCase() + ) || + existingEmails.includes(newUser.email.toLowerCase()) + ) { + unsuccessful.push({ + email: newUser.email, + reason: `Unavailable`, + }) + continue + } + newUser.userGroups = groups + newUsers.push(newUser) + } + + const account = await accountSdk.getAccountByTenantId(tenantId) + return UserDB.quotas.addUsers(newUsers.length, async () => { + // create the promises array that will be called by bulkDocs + newUsers.forEach((user: any) => { + usersToSave.push( + UserDB.buildUser( + user, + { + hashPassword: true, + requirePassword: user.requirePassword, + }, + tenantId, + undefined, // no dbUser + account + ) + ) + }) + + const usersToBulkSave = await Promise.all(usersToSave) + await usersCore.bulkUpdateGlobalUsers(usersToBulkSave) + + // Post-processing of bulk added users, e.g. events and cache operations + for (const user of usersToBulkSave) { + // TODO: Refactor to bulk insert users into the info db + // instead of relying on looping tenant creation + await platform.users.addUser(tenantId, user._id, user.email) + await eventHelpers.handleSaveEvents(user, undefined) + } + + const saved = usersToBulkSave.map(user => { + return { + _id: user._id, + email: user.email, + } + }) + + // now update the groups + if (Array.isArray(saved) && groups) { + const groupPromises = [] + const createdUserIds = saved.map(user => user._id) + for (let groupId of groups) { + groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds)) + } + await Promise.all(groupPromises) + } + + return { + successful: saved, + unsuccessful, + } + }) + } + + static async bulkDelete(userIds: string[]): Promise { + const db = getGlobalDB() + + const response: BulkUserDeleted = { + successful: [], + unsuccessful: [], + } + + // remove the account holder from the delete request if present + const account = await getAccountHolderFromUserIds(userIds) + if (account) { + userIds = userIds.filter(u => u !== account.budibaseUserId) + // mark user as unsuccessful + response.unsuccessful.push({ + _id: account.budibaseUserId, + email: account.email, + reason: "Account holder cannot be deleted", + }) + } + + // Get users and delete + const allDocsResponse: AllDocsResponse = await db.allDocs({ + include_docs: true, + keys: userIds, + }) + const usersToDelete: User[] = allDocsResponse.rows.map( + (user: RowResponse) => { + return user.doc + } + ) + + // Delete from DB + const toDelete = usersToDelete.map(user => ({ + ...user, + _deleted: true, + })) + const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete) + + await UserDB.quotas.removeUsers(toDelete.length) + for (let user of usersToDelete) { + await bulkDeleteProcessing(user) + } + + // Build Response + // index users by id + const userIndex: { [key: string]: User } = {} + usersToDelete.reduce((prev, current) => { + prev[current._id!] = current + return prev + }, userIndex) + + // add the successful and unsuccessful users to response + dbResponse.forEach(item => { + const email = userIndex[item.id].email + if (item.ok) { + response.successful.push({ _id: item.id, email }) + } else { + response.unsuccessful.push({ + _id: item.id, + email, + reason: "Database error", + }) + } + }) + + return response + } + + static async destroy(id: string) { + const db = getGlobalDB() + const dbUser = (await db.get(id)) as User + const userId = dbUser._id as string + + if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { + // root account holder can't be deleted from inside budibase + const email = dbUser.email + const account = await accounts.getAccount(email) + if (account) { + if (dbUser.userId === getIdentity()!._id) { + throw new HTTPError('Please visit "Account" to delete this user', 400) + } else { + throw new HTTPError("Account holder cannot be deleted", 400) + } + } + } + + await platform.users.removeUser(dbUser) + + await db.remove(userId, dbUser._rev) + + await UserDB.quotas.removeUsers(1) + await eventHelpers.handleDeleteEvents(dbUser) + await cache.user.invalidateUser(userId) + await sessions.invalidateSessions(userId, { reason: "deletion" }) + } + + static async getGroups(groupIds: string[]) { + return await this.groups.getBulk(groupIds) + } + + static async getGroupBuilderAppIds(user: User) { + return await this.groups.getGroupBuilderAppIds(user) + } +} diff --git a/packages/worker/src/sdk/users/events.ts b/packages/backend-core/src/users/events.ts similarity index 86% rename from packages/worker/src/sdk/users/events.ts rename to packages/backend-core/src/users/events.ts index 7d86182a3c..f170c9ffe9 100644 --- a/packages/worker/src/sdk/users/events.ts +++ b/packages/backend-core/src/users/events.ts @@ -1,15 +1,18 @@ -import env from "../../environment" -import { events, accounts, tenancy } from "@budibase/backend-core" +import env from "../environment" +import * as events from "../events" +import * as accounts from "../accounts" +import { getTenantId } from "../context" import { User, UserRoles, CloudAccount } from "@budibase/types" +import { hasBuilderPermissions, hasAdminPermissions } from "./utils" export const handleDeleteEvents = async (user: any) => { await events.user.deleted(user) - if (isBuilder(user)) { + if (hasBuilderPermissions(user)) { await events.user.permissionBuilderRemoved(user) } - if (isAdmin(user)) { + if (hasAdminPermissions(user)) { await events.user.permissionAdminRemoved(user) } } @@ -55,7 +58,7 @@ export const handleSaveEvents = async ( user: User, existingUser: User | undefined ) => { - const tenantId = tenancy.getTenantId() + const tenantId = getTenantId() let tenantAccount: CloudAccount | undefined if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { tenantAccount = await accounts.getAccountByTenantId(tenantId) @@ -103,23 +106,20 @@ export const handleSaveEvents = async ( await handleAppRoleEvents(user, existingUser) } -const isBuilder = (user: any) => user.builder && user.builder.global -const isAdmin = (user: any) => user.admin && user.admin.global - export const isAddingBuilder = (user: any, existingUser: any) => { - return isAddingPermission(user, existingUser, isBuilder) + return isAddingPermission(user, existingUser, hasBuilderPermissions) } export const isRemovingBuilder = (user: any, existingUser: any) => { - return isRemovingPermission(user, existingUser, isBuilder) + return isRemovingPermission(user, existingUser, hasBuilderPermissions) } const isAddingAdmin = (user: any, existingUser: any) => { - return isAddingPermission(user, existingUser, isAdmin) + return isAddingPermission(user, existingUser, hasAdminPermissions) } const isRemovingAdmin = (user: any, existingUser: any) => { - return isRemovingPermission(user, existingUser, isAdmin) + return isRemovingPermission(user, existingUser, hasAdminPermissions) } const isOnboardingComplete = (user: any, existingUser: any) => { diff --git a/packages/backend-core/src/users/index.ts b/packages/backend-core/src/users/index.ts new file mode 100644 index 0000000000..c11d2a2c62 --- /dev/null +++ b/packages/backend-core/src/users/index.ts @@ -0,0 +1,4 @@ +export * from "./users" +export * from "./utils" +export * from "./lookup" +export { UserDB } from "./db" diff --git a/packages/backend-core/src/users/lookup.ts b/packages/backend-core/src/users/lookup.ts new file mode 100644 index 0000000000..17d0e91d88 --- /dev/null +++ b/packages/backend-core/src/users/lookup.ts @@ -0,0 +1,102 @@ +import { + AccountMetadata, + PlatformUser, + PlatformUserByEmail, + User, +} from "@budibase/types" +import * as dbUtils from "../db" +import { ViewName } from "../constants" + +/** + * Apply a system-wide search on emails: + * - in tenant + * - cross tenant + * - accounts + * return an array of emails that match the supplied emails. + */ +export async function searchExistingEmails(emails: string[]) { + let matchedEmails: string[] = [] + + const existingTenantUsers = await getExistingTenantUsers(emails) + matchedEmails.push(...existingTenantUsers.map(user => user.email)) + + const existingPlatformUsers = await getExistingPlatformUsers(emails) + matchedEmails.push(...existingPlatformUsers.map(user => user._id!)) + + const existingAccounts = await getExistingAccounts(emails) + matchedEmails.push(...existingAccounts.map(account => account.email)) + + return [...new Set(matchedEmails.map(email => email.toLowerCase()))] +} + +// lookup, could be email or userId, either will return a doc +export async function getPlatformUser( + identifier: string +): Promise { + // use the view here and allow to find anyone regardless of casing + // Use lowercase to ensure email login is case insensitive + return (await dbUtils.queryPlatformView(ViewName.PLATFORM_USERS_LOWERCASE, { + keys: [identifier.toLowerCase()], + include_docs: true, + })) as PlatformUser +} + +export async function getExistingTenantUsers( + emails: string[] +): Promise { + const lcEmails = emails.map(email => email.toLowerCase()) + const params = { + keys: lcEmails, + include_docs: true, + } + + const opts = { + arrayResponse: true, + } + + return (await dbUtils.queryGlobalView( + ViewName.USER_BY_EMAIL, + params, + undefined, + opts + )) as User[] +} + +export async function getExistingPlatformUsers( + emails: string[] +): Promise { + const lcEmails = emails.map(email => email.toLowerCase()) + const params = { + keys: lcEmails, + include_docs: true, + } + + const opts = { + arrayResponse: true, + } + return (await dbUtils.queryPlatformView( + ViewName.PLATFORM_USERS_LOWERCASE, + params, + opts + )) as PlatformUserByEmail[] +} + +export async function getExistingAccounts( + emails: string[] +): Promise { + const lcEmails = emails.map(email => email.toLowerCase()) + const params = { + keys: lcEmails, + include_docs: true, + } + + const opts = { + arrayResponse: true, + } + + return (await dbUtils.queryPlatformView( + ViewName.ACCOUNT_BY_EMAIL, + params, + opts + )) as AccountMetadata[] +} diff --git a/packages/backend-core/src/users.ts b/packages/backend-core/src/users/users.ts similarity index 88% rename from packages/backend-core/src/users.ts rename to packages/backend-core/src/users/users.ts index b49058f546..a7e1389920 100644 --- a/packages/backend-core/src/users.ts +++ b/packages/backend-core/src/users/users.ts @@ -11,10 +11,16 @@ import { SEPARATOR, UNICODE_MAX, ViewName, -} from "./db" -import { BulkDocsResponse, SearchUsersRequest, User } from "@budibase/types" -import { getGlobalDB } from "./context" -import * as context from "./context" +} from "../db" +import { + BulkDocsResponse, + SearchUsersRequest, + User, + ContextUser, +} from "@budibase/types" +import { getGlobalDB } from "../context" +import * as context from "../context" +import { user as userCache } from "../cache" type GetOpts = { cleanup?: boolean } @@ -178,7 +184,7 @@ export const getGlobalUserByAppPage = (appId: string, user: User) => { * Performs a starts with search on the global email view. */ export const searchGlobalUsersByEmail = async ( - email: string, + email: string | unknown, opts: any, getOpts?: GetOpts ) => { @@ -248,3 +254,23 @@ export async function getUserCount() { }) return response.total_rows } + +// used to remove the builder/admin permissions, for processing the +// user as an app user (they may have some specific role/group +export function removePortalUserPermissions(user: User | ContextUser) { + delete user.admin + delete user.builder + return user +} + +export function cleanseUserObject(user: User | ContextUser, base?: User) { + delete user.admin + delete user.builder + delete user.roles + if (base) { + user.admin = base.admin + user.builder = base.builder + user.roles = base.roles + } + return user +} diff --git a/packages/backend-core/src/users/utils.ts b/packages/backend-core/src/users/utils.ts new file mode 100644 index 0000000000..af0e8e10c7 --- /dev/null +++ b/packages/backend-core/src/users/utils.ts @@ -0,0 +1,55 @@ +import { CloudAccount } from "@budibase/types" +import * as accountSdk from "../accounts" +import env from "../environment" +import { getPlatformUser } from "./lookup" +import { EmailUnavailableError } from "../errors" +import { getTenantId } from "../context" +import { sdk } from "@budibase/shared-core" +import { getAccountByTenantId } from "../accounts" + +// extract from shared-core to make easily accessible from backend-core +export const isBuilder = sdk.users.isBuilder +export const isAdmin = sdk.users.isAdmin +export const isGlobalBuilder = sdk.users.isGlobalBuilder +export const isAdminOrBuilder = sdk.users.isAdminOrBuilder +export const hasAdminPermissions = sdk.users.hasAdminPermissions +export const hasBuilderPermissions = sdk.users.hasBuilderPermissions +export const hasAppBuilderPermissions = sdk.users.hasAppBuilderPermissions + +export async function validateUniqueUser(email: string, tenantId: string) { + // check budibase users in other tenants + if (env.MULTI_TENANCY) { + const tenantUser = await getPlatformUser(email) + if (tenantUser != null && tenantUser.tenantId !== tenantId) { + throw new EmailUnavailableError(email) + } + } + + // check root account users in account portal + if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { + const account = await accountSdk.getAccount(email) + if (account && account.verified && account.tenantId !== tenantId) { + throw new EmailUnavailableError(email) + } + } +} + +/** + * For the given user id's, return the account holder if it is in the ids. + */ +export async function getAccountHolderFromUserIds( + userIds: string[] +): Promise { + if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { + const tenantId = getTenantId() + const account = await getAccountByTenantId(tenantId) + if (!account) { + throw new Error(`Account not found for tenantId=${tenantId}`) + } + + const budibaseUserId = account.budibaseUserId + if (userIds.includes(budibaseUserId)) { + return account + } + } +} diff --git a/packages/backend-core/src/utils/utils.ts b/packages/backend-core/src/utils/utils.ts index 82da95983a..ac43fa1fdb 100644 --- a/packages/backend-core/src/utils/utils.ts +++ b/packages/backend-core/src/utils/utils.ts @@ -10,7 +10,7 @@ import { Event, TenantResolutionStrategy, } from "@budibase/types" -import { SetOption } from "cookies" +import type { SetOption } from "cookies" const jwt = require("jsonwebtoken") const APP_PREFIX = DocumentType.APP + SEPARATOR diff --git a/packages/backend-core/tests/core/utilities/mocks/events.ts b/packages/backend-core/tests/core/utilities/mocks/events.ts index 81de1f8175..fef730768a 100644 --- a/packages/backend-core/tests/core/utilities/mocks/events.ts +++ b/packages/backend-core/tests/core/utilities/mocks/events.ts @@ -1,5 +1,3 @@ -import * as events from "../../../../src/events" - beforeAll(async () => { const processors = await import("../../../../src/events/processors") const events = await import("../../../../src/events") diff --git a/packages/backend-core/tests/core/utilities/mocks/licenses.ts b/packages/backend-core/tests/core/utilities/mocks/licenses.ts index 4272e78eb8..758fd6bf9a 100644 --- a/packages/backend-core/tests/core/utilities/mocks/licenses.ts +++ b/packages/backend-core/tests/core/utilities/mocks/licenses.ts @@ -1,5 +1,5 @@ import { Feature, License, Quotas } from "@budibase/types" -import _ from "lodash" +import cloneDeep from "lodash/cloneDeep" let CLOUD_FREE_LICENSE: License let UNLIMITED_LICENSE: License @@ -58,7 +58,7 @@ export const useCloudFree = () => { // FEATURES const useFeature = (feature: Feature) => { - const license = _.cloneDeep(UNLIMITED_LICENSE) + const license = cloneDeep(UNLIMITED_LICENSE) const opts: UseLicenseOpts = { features: [feature], } @@ -86,6 +86,10 @@ export const useAuditLogs = () => { return useFeature(Feature.AUDIT_LOGS) } +export const useExpandedPublicApi = () => { + return useFeature(Feature.EXPANDED_PUBLIC_API) +} + export const useScimIntegration = () => { return useFeature(Feature.SCIM) } @@ -94,10 +98,18 @@ export const useSyncAutomations = () => { return useFeature(Feature.SYNC_AUTOMATIONS) } +export const useAppBuilders = () => { + return useFeature(Feature.APP_BUILDERS) +} + +export const useViewPermissions = () => { + return useFeature(Feature.VIEW_PERMISSIONS) +} + // QUOTAS export const setAutomationLogsQuota = (value: number) => { - const license = _.cloneDeep(UNLIMITED_LICENSE) + const license = cloneDeep(UNLIMITED_LICENSE) license.quotas.constant.automationLogRetentionDays.value = value return useLicense(license) } diff --git a/packages/backend-core/tests/core/utilities/structures/accounts.ts b/packages/backend-core/tests/core/utilities/structures/accounts.ts index 8476399aa3..515f94db1e 100644 --- a/packages/backend-core/tests/core/utilities/structures/accounts.ts +++ b/packages/backend-core/tests/core/utilities/structures/accounts.ts @@ -1,4 +1,4 @@ -import { generator, uuid, quotas } from "." +import { generator, quotas, uuid } from "." import { generateGlobalUserID } from "../../../../src/docIds" import { Account, @@ -6,12 +6,13 @@ import { AccountSSOProviderType, AuthType, CloudAccount, - Hosting, - SSOAccount, CreateAccount, CreatePassswordAccount, + CreateVerifiableSSOAccount, + Hosting, + SSOAccount, } from "@budibase/types" -import _ from "lodash" +import sample from "lodash/sample" export const account = (partial: Partial = {}): Account => { return { @@ -46,13 +47,11 @@ export const cloudAccount = (): CloudAccount => { } function providerType(): AccountSSOProviderType { - return _.sample( - Object.values(AccountSSOProviderType) - ) as AccountSSOProviderType + return sample(Object.values(AccountSSOProviderType)) as AccountSSOProviderType } function provider(): AccountSSOProvider { - return _.sample(Object.values(AccountSSOProvider)) as AccountSSOProvider + return sample(Object.values(AccountSSOProvider)) as AccountSSOProvider } export function ssoAccount(account: Account = cloudAccount()): SSOAccount { @@ -70,6 +69,23 @@ export function ssoAccount(account: Account = cloudAccount()): SSOAccount { } } +export function verifiableSsoAccount( + account: Account = cloudAccount() +): SSOAccount { + return { + ...account, + authType: AuthType.SSO, + oauth2: { + accessToken: generator.string(), + refreshToken: generator.string(), + }, + pictureUrl: generator.url(), + provider: AccountSSOProvider.MICROSOFT, + providerType: AccountSSOProviderType.MICROSOFT, + thirdPartyProfile: { id: "abc123" }, + } +} + export const cloudCreateAccount: CreatePassswordAccount = { email: "cloud@budibase.com", tenantId: "cloud", @@ -93,6 +109,19 @@ export const cloudSSOCreateAccount: CreateAccount = { profession: "Software Engineer", } +export const cloudVerifiableSSOCreateAccount: CreateVerifiableSSOAccount = { + email: "cloud-sso@budibase.com", + tenantId: "cloud-sso", + hosting: Hosting.CLOUD, + authType: AuthType.SSO, + tenantName: "cloudsso", + name: "Budi Armstrong", + size: "10+", + profession: "Software Engineer", + provider: AccountSSOProvider.MICROSOFT, + thirdPartyProfile: { id: "abc123" }, +} + export const selfCreateAccount: CreatePassswordAccount = { email: "self@budibase.com", tenantId: "self", diff --git a/packages/backend-core/tests/core/utilities/structures/scim.ts b/packages/backend-core/tests/core/utilities/structures/scim.ts index 741cff165e..80f41c605d 100644 --- a/packages/backend-core/tests/core/utilities/structures/scim.ts +++ b/packages/backend-core/tests/core/utilities/structures/scim.ts @@ -1,7 +1,6 @@ import { ScimCreateGroupRequest, ScimCreateUserRequest } from "@budibase/types" import { uuid } from "./common" import { generator } from "./generator" -import _ from "lodash" interface CreateUserRequestFields { externalId: string @@ -20,10 +19,10 @@ export function createUserRequest(userData?: Partial) { username: generator.name(), } - const { externalId, email, firstName, lastName, username } = _.assign( - defaultValues, - userData - ) + const { externalId, email, firstName, lastName, username } = { + ...defaultValues, + ...userData, + } let user: ScimCreateUserRequest = { schemas: [ diff --git a/packages/backend-core/tests/core/utilities/structures/shared.ts b/packages/backend-core/tests/core/utilities/structures/shared.ts deleted file mode 100644 index de0e19486c..0000000000 --- a/packages/backend-core/tests/core/utilities/structures/shared.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { User } from "@budibase/types" -import { generator } from "./generator" -import { uuid } from "./common" - -export const newEmail = () => { - return `${uuid()}@test.com` -} - -export const user = (userProps?: any): User => { - return { - email: newEmail(), - password: "test", - roles: { app_test: "admin" }, - firstName: generator.first(), - lastName: generator.last(), - pictureUrl: "http://test.com", - ...userProps, - } -} diff --git a/packages/backend-core/tests/core/utilities/structures/sso.ts b/packages/backend-core/tests/core/utilities/structures/sso.ts index 9da9c82223..2e3af712a9 100644 --- a/packages/backend-core/tests/core/utilities/structures/sso.ts +++ b/packages/backend-core/tests/core/utilities/structures/sso.ts @@ -13,9 +13,8 @@ import { } from "@budibase/types" import { generator } from "./generator" import { email, uuid } from "./common" -import * as shared from "./shared" -import { user } from "./shared" -import _ from "lodash" +import * as users from "./users" +import sample from "lodash/sample" export function OAuth(): OAuth2 { return { @@ -26,7 +25,7 @@ export function OAuth(): OAuth2 { export function authDetails(userDoc?: User): SSOAuthDetails { if (!userDoc) { - userDoc = user() + userDoc = users.user() } const userId = userDoc._id || uuid() @@ -47,12 +46,12 @@ export function authDetails(userDoc?: User): SSOAuthDetails { } export function providerType(): SSOProviderType { - return _.sample(Object.values(SSOProviderType)) as SSOProviderType + return sample(Object.values(SSOProviderType)) as SSOProviderType } export function ssoProfile(user?: User): SSOProfile { if (!user) { - user = shared.user() + user = users.user() } return { id: user._id!, diff --git a/packages/backend-core/tests/core/utilities/structures/users.ts b/packages/backend-core/tests/core/utilities/structures/users.ts index 7a6b4f0d80..66d23696e0 100644 --- a/packages/backend-core/tests/core/utilities/structures/users.ts +++ b/packages/backend-core/tests/core/utilities/structures/users.ts @@ -1,13 +1,35 @@ import { AdminUser, + AdminOnlyUser, BuilderUser, SSOAuthDetails, SSOUser, + User, } from "@budibase/types" -import { user } from "./shared" import { authDetails } from "./sso" +import { uuid } from "./common" +import { generator } from "./generator" +import { tenant } from "." -export { user, newEmail } from "./shared" +export const newEmail = () => { + return `${uuid()}@test.com` +} + +export const user = (userProps?: Partial>): User => { + const userId = userProps?._id + return { + _id: userId, + userId, + email: newEmail(), + password: "test", + roles: { app_test: "admin" }, + firstName: generator.first(), + lastName: generator.last(), + pictureUrl: "http://test.com", + tenantId: tenant.id(), + ...userProps, + } +} export const adminUser = (userProps?: any): AdminUser => { return { @@ -21,7 +43,16 @@ export const adminUser = (userProps?: any): AdminUser => { } } -export const builderUser = (userProps?: any): BuilderUser => { +export const adminOnlyUser = (userProps?: any): AdminOnlyUser => { + return { + ...user(userProps), + admin: { + global: true, + }, + } +} + +export const builderUser = (userProps?: Partial): BuilderUser => { return { ...user(userProps), builder: { @@ -30,6 +61,15 @@ export const builderUser = (userProps?: any): BuilderUser => { } } +export const appBuilderUser = (appId: string, userProps?: any): BuilderUser => { + return { + ...user(userProps), + builder: { + apps: [appId], + }, + } +} + export function ssoUser( opts: { user?: any; details?: SSOAuthDetails } = {} ): SSOUser { diff --git a/packages/backend-core/tests/core/utilities/testContainerUtils.ts b/packages/backend-core/tests/core/utilities/testContainerUtils.ts index f6c702f7ef..7da6cbc777 100644 --- a/packages/backend-core/tests/core/utilities/testContainerUtils.ts +++ b/packages/backend-core/tests/core/utilities/testContainerUtils.ts @@ -32,8 +32,8 @@ function getTestContainerSettings( ): string | null { const entry = Object.entries(global).find( ([k]) => - k.includes(`_${serverName.toUpperCase()}`) && - k.includes(`_${key.toUpperCase()}__`) + k.includes(`${serverName.toUpperCase()}`) && + k.includes(`${key.toUpperCase()}`) ) if (!entry) { return null @@ -67,27 +67,14 @@ function getContainerInfo(containerName: string, port: number) { } function getCouchConfig() { - return getContainerInfo("couchdb-service", 5984) -} - -function getMinioConfig() { - return getContainerInfo("minio-service", 9000) -} - -function getRedisConfig() { - return getContainerInfo("redis-service", 6379) + return getContainerInfo("couchdb", 5984) } export function setupEnv(...envs: any[]) { - const couch = getCouchConfig(), - minio = getCouchConfig(), - redis = getRedisConfig() + const couch = getCouchConfig() const configs = [ { key: "COUCH_DB_PORT", value: couch.port }, { key: "COUCH_DB_URL", value: couch.url }, - { key: "MINIO_PORT", value: minio.port }, - { key: "MINIO_URL", value: minio.url }, - { key: "REDIS_URL", value: redis.url }, ] for (const config of configs.filter(x => !!x.value)) { diff --git a/packages/backend-core/tests/extra/DBTestConfiguration.ts b/packages/backend-core/tests/extra/DBTestConfiguration.ts index a2550a6e24..99a5bcba46 100644 --- a/packages/backend-core/tests/extra/DBTestConfiguration.ts +++ b/packages/backend-core/tests/extra/DBTestConfiguration.ts @@ -18,7 +18,7 @@ class DBTestConfiguration { // TENANCY - doInTenant(task: any) { + doInTenant(task: () => Promise) { return context.doInTenant(this.tenantId, () => { return task() }) diff --git a/packages/backend-core/tests/index.ts b/packages/backend-core/tests/index.ts index 50fc1dc431..cdbacc12d8 100644 --- a/packages/backend-core/tests/index.ts +++ b/packages/backend-core/tests/index.ts @@ -1 +1,2 @@ export * from "./core/utilities" +export * from "./extra" diff --git a/packages/backend-core/tsconfig.build.json b/packages/backend-core/tsconfig.build.json index bfbed31e23..c714f4d942 100644 --- a/packages/backend-core/tsconfig.build.json +++ b/packages/backend-core/tsconfig.build.json @@ -12,7 +12,11 @@ "declaration": true, "types": ["node", "jest"], "outDir": "dist", - "skipLibCheck": true + "skipLibCheck": true, + "paths": { + "@budibase/types": ["../types/src"], + "@budibase/shared-core": ["../shared-core/src"] + } }, "include": ["**/*.js", "**/*.ts"], "exclude": [ diff --git a/packages/backend-core/tsconfig.json b/packages/backend-core/tsconfig.json index 2b1419b051..33e37179d7 100644 --- a/packages/backend-core/tsconfig.json +++ b/packages/backend-core/tsconfig.json @@ -1,12 +1,4 @@ { "extends": "./tsconfig.build.json", - "compilerOptions": { - "composite": true, - "baseUrl": ".", - "paths": { - "@budibase/types": ["../types/src"] - } - }, - "exclude": ["node_modules", "dist"] } diff --git a/packages/bbui/package.json b/packages/bbui/package.json index 4d39f6330b..4791776c57 100644 --- a/packages/bbui/package.json +++ b/packages/bbui/package.json @@ -20,14 +20,12 @@ "@rollup/plugin-commonjs": "^16.0.0", "@rollup/plugin-json": "^4.1.0", "@rollup/plugin-node-resolve": "^11.2.1", - "cross-env": "^7.0.2", - "nollup": "^0.14.1", "postcss": "^8.2.9", "rollup": "^2.45.2", "rollup-plugin-postcss": "^4.0.0", "rollup-plugin-svelte": "^7.1.0", "rollup-plugin-terser": "^7.0.2", - "svelte": "^3.38.2" + "svelte": "3.49.0" }, "keywords": [ "svelte" @@ -82,10 +80,11 @@ "@spectrum-css/typography": "3.0.1", "@spectrum-css/underlay": "2.0.9", "@spectrum-css/vars": "3.0.1", - "dayjs": "^1.10.4", + "dayjs": "^1.10.8", "easymde": "^2.16.1", "svelte-flatpickr": "3.2.3", - "svelte-portal": "^1.0.0" + "svelte-portal": "^1.0.0", + "svelte-dnd-action": "^0.9.8" }, "resolutions": { "loader-utils": "1.4.1" @@ -104,6 +103,5 @@ ] } } - }, - "gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc" + } } diff --git a/packages/bbui/src/Actions/position_dropdown.js b/packages/bbui/src/Actions/position_dropdown.js index 8fa02bb8f3..f2018272f6 100644 --- a/packages/bbui/src/Actions/position_dropdown.js +++ b/packages/bbui/src/Actions/position_dropdown.js @@ -17,6 +17,8 @@ export default function positionDropdown(element, opts) { maxWidth, useAnchorWidth, offset = 5, + customUpdate, + offsetBelow, } = opts if (!anchor) { return @@ -33,33 +35,41 @@ export default function positionDropdown(element, opts) { top: null, } - // Determine vertical styles - if (align === "right-outside") { - styles.top = anchorBounds.top - } else if (window.innerHeight - anchorBounds.bottom < 100) { - styles.top = anchorBounds.top - elementBounds.height - offset - styles.maxHeight = maxHeight || 240 + if (typeof customUpdate === "function") { + styles = customUpdate(anchorBounds, elementBounds, styles) } else { - styles.top = anchorBounds.bottom + offset - styles.maxHeight = - maxHeight || window.innerHeight - anchorBounds.bottom - 20 - } + // Determine vertical styles + if (align === "right-outside") { + styles.top = anchorBounds.top + } else if ( + window.innerHeight - anchorBounds.bottom < + (maxHeight || 100) + ) { + styles.top = anchorBounds.top - elementBounds.height - offset + styles.maxHeight = maxHeight || 240 + } else { + styles.top = anchorBounds.bottom + (offsetBelow || offset) + styles.maxHeight = + maxHeight || window.innerHeight - anchorBounds.bottom - 20 + } - // Determine horizontal styles - if (!maxWidth && useAnchorWidth) { - styles.maxWidth = anchorBounds.width - } - if (useAnchorWidth) { - styles.minWidth = anchorBounds.width - } - if (align === "right") { - styles.left = anchorBounds.left + anchorBounds.width - elementBounds.width - } else if (align === "right-outside") { - styles.left = anchorBounds.right + offset - } else if (align === "left-outside") { - styles.left = anchorBounds.left - elementBounds.width - offset - } else { - styles.left = anchorBounds.left + // Determine horizontal styles + if (!maxWidth && useAnchorWidth) { + styles.maxWidth = anchorBounds.width + } + if (useAnchorWidth) { + styles.minWidth = anchorBounds.width + } + if (align === "right") { + styles.left = + anchorBounds.left + anchorBounds.width - elementBounds.width + } else if (align === "right-outside") { + styles.left = anchorBounds.right + offset + } else if (align === "left-outside") { + styles.left = anchorBounds.left - elementBounds.width - offset + } else { + styles.left = anchorBounds.left + } } // Apply styles diff --git a/packages/bbui/src/Banner/Banner.svelte b/packages/bbui/src/Banner/Banner.svelte index 3810021a61..a04d469cc7 100644 --- a/packages/bbui/src/Banner/Banner.svelte +++ b/packages/bbui/src/Banner/Banner.svelte @@ -66,6 +66,10 @@ pointer-events: all; width: 100%; } + + .spectrum-Toast--neutral { + background-color: var(--grey-2); + } .spectrum-Button { border: 1px solid rgba(255, 255, 255, 0.2); } diff --git a/packages/bbui/src/ColorPicker/ColorPicker.svelte b/packages/bbui/src/ColorPicker/ColorPicker.svelte index 9a70134fb6..2ba5309860 100644 --- a/packages/bbui/src/ColorPicker/ColorPicker.svelte +++ b/packages/bbui/src/ColorPicker/ColorPicker.svelte @@ -1,8 +1,8 @@ -
-
(open = true)}> -
-
- {#if open} -
+
{ + dropdown.toggle() + }} +> +
+
+ + + +
{#each categories as category}
{category.label}
@@ -187,8 +184,8 @@
- {/if} -
+ + diff --git a/packages/bbui/src/Form/Core/Combobox.svelte b/packages/bbui/src/Form/Core/Combobox.svelte index b68a24d8db..b1b264a9b7 100644 --- a/packages/bbui/src/Form/Core/Combobox.svelte +++ b/packages/bbui/src/Form/Core/Combobox.svelte @@ -2,8 +2,8 @@ import "@spectrum-css/inputgroup/dist/index-vars.css" import "@spectrum-css/popover/dist/index-vars.css" import "@spectrum-css/menu/dist/index-vars.css" - import { fly } from "svelte/transition" import { createEventDispatcher } from "svelte" + import clickOutside from "../../Actions/click_outside" export let value = null export let id = null @@ -80,10 +80,11 @@ {#if open} -
(open = false)} />
{ + open = false + }} >
    {#if options && Array.isArray(options)} @@ -125,14 +126,6 @@ .spectrum-Textfield-input { width: 0; } - .overlay { - position: fixed; - top: 0; - left: 0; - width: 100vw; - height: 100vh; - z-index: 999; - } .spectrum-Popover { max-height: 240px; width: 100%; diff --git a/packages/bbui/src/Form/Core/Multiselect.svelte b/packages/bbui/src/Form/Core/Multiselect.svelte index ea9b5858f5..8d72dd0652 100644 --- a/packages/bbui/src/Form/Core/Multiselect.svelte +++ b/packages/bbui/src/Form/Core/Multiselect.svelte @@ -14,9 +14,12 @@ export let autocomplete = false export let sort = false export let autoWidth = false - export let fetchTerm = null - export let useFetch = false + export let searchTerm = null export let customPopoverHeight + export let customPopoverOffsetBelow + export let customPopoverMaxHeight + export let open = false + export let loading const dispatch = createEventDispatcher() @@ -79,6 +82,7 @@ diff --git a/packages/bbui/src/Form/Core/Picker.svelte b/packages/bbui/src/Form/Core/Picker.svelte index aada17b318..aa06d5f748 100644 --- a/packages/bbui/src/Form/Core/Picker.svelte +++ b/packages/bbui/src/Form/Core/Picker.svelte @@ -2,12 +2,15 @@ import "@spectrum-css/picker/dist/index-vars.css" import "@spectrum-css/popover/dist/index-vars.css" import "@spectrum-css/menu/dist/index-vars.css" - import { createEventDispatcher } from "svelte" + import { createEventDispatcher, onDestroy } from "svelte" import clickOutside from "../../Actions/click_outside" import Search from "./Search.svelte" import Icon from "../../Icon/Icon.svelte" import StatusLight from "../../StatusLight/StatusLight.svelte" import Popover from "../../Popover/Popover.svelte" + import Tags from "../../Tags/Tags.svelte" + import Tag from "../../Tags/Tag.svelte" + import ProgressCircle from "../../ProgressCircle/ProgressCircle.svelte" export let id = null export let disabled = false @@ -26,23 +29,27 @@ export let getOptionIcon = () => null export let useOptionIconImage = false export let getOptionColour = () => null + export let getOptionSubtitle = () => null export let open = false export let readonly = false export let quiet = false export let autoWidth = false export let autocomplete = false export let sort = false - export let fetchTerm = null - export let useFetch = false + export let searchTerm = null export let customPopoverHeight + export let customPopoverOffsetBelow + export let customPopoverMaxHeight export let align = "left" export let footer = null + export let customAnchor = null + export let loading const dispatch = createEventDispatcher() - let searchTerm = null let button let popover + let component $: sortedOptions = getSortedOptions(options, getOptionLabel, sort) $: filteredOptions = getFilteredOptions( @@ -77,7 +84,7 @@ } const getFilteredOptions = (options, term, getLabel) => { - if (autocomplete && term && !fetchTerm) { + if (autocomplete && term) { const lowerCaseTerm = term.toLowerCase() return options.filter(option => { return `${getLabel(option)}`.toLowerCase().includes(lowerCaseTerm) @@ -85,6 +92,20 @@ } return options } + + const onScroll = e => { + const scrollPxThreshold = 100 + const scrollPositionFromBottom = + e.target.scrollHeight - e.target.clientHeight - e.target.scrollTop + if (scrollPositionFromBottom < scrollPxThreshold) { + dispatch("loadMore") + } + } + + $: component?.addEventListener("scroll", onScroll) + onDestroy(() => { + component?.removeEventListener("scroll", null) + }) - (open = false)} useAnchorWidth={!autoWidth} maxWidth={autoWidth ? 400 : null} + maxHeight={customPopoverMaxHeight} customHeight={customPopoverHeight} + offsetBelow={customPopoverOffsetBelow} >
    {#if autocomplete} - useFetch ? (fetchTerm = event.detail) : (searchTerm = event.detail)} + value={searchTerm} + on:change={event => (searchTerm = event.detail)} {disabled} placeholder="Search" /> {/if} -
      +
        {#if placeholderOption}
      • {/if} + {#if getOptionSubtitle(option, idx)} + {getOptionSubtitle(option, idx)} + {/if} + {getOptionLabel(option, idx)} + {#if option.tag} + + + {option.tag} + + + {/if} + {#if loading} +
        + +
        + {/if} + {#if footer} +
    {/if} - { - editableColumn = { - ...editableColumn, - formula: e.detail, - } - }} - bindings={getBindings({ table })} - allowJS - /> +
    +
    + +
    +
    + { + editableColumn = { + ...editableColumn, + formula: e.detail, + } + }} + bindings={getBindings({ table })} + allowJS + /> +
    +
    {:else if editableColumn.type === JSON_TYPE} + {:else if editableColumn.type === USER_REFRENCE_TYPE} + + (editableColumn.relationshipType = e.detail + ? RelationshipType.MANY_TO_MANY + : RelationshipType.ONE_TO_MANY)} + disabled={!isCreating} + thin + text="Allow multiple users" + /> {/if} {#if editableColumn.type === AUTO_TYPE || editableColumn.autocolumn} + + diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte deleted file mode 100644 index 8f2679f874..0000000000 --- a/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte +++ /dev/null @@ -1,38 +0,0 @@ - - - - - diff --git a/packages/builder/src/components/backend/DataTable/modals/ExportModal.svelte b/packages/builder/src/components/backend/DataTable/modals/ExportModal.svelte index ac168698fc..09f76d3522 100644 --- a/packages/builder/src/components/backend/DataTable/modals/ExportModal.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/ExportModal.svelte @@ -9,30 +9,43 @@ import download from "downloadjs" import { API } from "api" import { Constants, LuceneUtils } from "@budibase/frontend-core" - - const FORMATS = [ - { - name: "CSV", - key: "csv", - }, - { - name: "JSON", - key: "json", - }, - { - name: "JSON with Schema", - key: "jsonWithSchema", - }, - ] + import { ROW_EXPORT_FORMATS } from "constants/backend" export let view export let filters export let sorting export let selectedRows = [] + export let formats - let exportFormat = FORMATS[0].key + const FORMATS = [ + { + name: "CSV", + key: ROW_EXPORT_FORMATS.CSV, + }, + { + name: "JSON", + key: ROW_EXPORT_FORMATS.JSON, + }, + { + name: "JSON with Schema", + key: ROW_EXPORT_FORMATS.JSON_WITH_SCHEMA, + }, + ] + + $: options = FORMATS.filter(format => { + if (formats && !formats.includes(format.key)) { + return false + } + return true + }) + + let exportFormat let filterLookup + $: if (options && !exportFormat) { + exportFormat = Array.isArray(options) ? options[0]?.key : [] + } + $: luceneFilter = LuceneUtils.buildLuceneQuery(filters) $: exportOpDisplay = buildExportOpDisplay(sorting, filterDisplay, filters) @@ -190,7 +203,7 @@ + diff --git a/packages/builder/src/components/backend/DataTable/modals/grid/GridEditColumnModal.svelte b/packages/builder/src/components/backend/DataTable/modals/grid/GridEditColumnModal.svelte index 3297037265..020c58d19f 100644 --- a/packages/builder/src/components/backend/DataTable/modals/grid/GridEditColumnModal.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/grid/GridEditColumnModal.svelte @@ -1,24 +1,19 @@ - - - + diff --git a/packages/builder/src/components/backend/DataTable/modals/grid/GridUsersTableButton.svelte b/packages/builder/src/components/backend/DataTable/modals/grid/GridUsersTableButton.svelte new file mode 100644 index 0000000000..2c353943de --- /dev/null +++ b/packages/builder/src/components/backend/DataTable/modals/grid/GridUsersTableButton.svelte @@ -0,0 +1,54 @@ + + +
    + (open = true)} icon="Help" quiet> + Why can't I edit this table? + +
    + + +
    + The app users table is read only + + You can continue to view the users that have access to your application. + + + Manage and invite more application users using the user side panel in the + top right of your screen. + +
    + +
    +
    +
    + + diff --git a/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte b/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte index f7b6f61a10..0cc61c69e6 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte @@ -1,7 +1,14 @@ @@ -163,7 +180,7 @@
    selectTable(TableNames.USERS)} diff --git a/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte b/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte index 36c6a32801..9e42dfecd9 100644 --- a/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte +++ b/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte @@ -57,7 +57,8 @@ label: table.name, value: table._id, })) - $: valid = getErrorCount(errors) === 0 && allRequiredAttributesSet() + $: valid = + getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType) $: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY $: isManyToOne = relationshipType === RelationshipType.MANY_TO_ONE @@ -114,7 +115,7 @@ return Object.entries(errors).filter(entry => !!entry[1]).length } - function allRequiredAttributesSet() { + function allRequiredAttributesSet(relationshipType) { const base = getTable(fromId) && getTable(toId) && fromColumn && toColumn if (relationshipType === RelationshipType.MANY_TO_ONE) { return base && fromPrimary && fromForeign @@ -124,9 +125,10 @@ } function validate() { - if (!allRequiredAttributesSet() && !hasValidated) { + if (!allRequiredAttributesSet(relationshipType) && !hasValidated) { return } + hasValidated = true errorChecker.setType(relationshipType) const fromTable = getTable(fromId), @@ -290,11 +292,11 @@ datasource.entities[getTable(toId).name].schema[toRelationship.name] = toRelationship - await save() + await save({ action: "saved" }) } async function deleteRelationship() { removeExistingRelationship() - await save() + await save({ action: "deleted" }) await tables.fetch() close() } diff --git a/packages/builder/src/components/backend/Datasources/CreateEditRelationshipModal.svelte b/packages/builder/src/components/backend/Datasources/CreateEditRelationshipModal.svelte index 03683bcfc9..0dfb4ca796 100644 --- a/packages/builder/src/components/backend/Datasources/CreateEditRelationshipModal.svelte +++ b/packages/builder/src/components/backend/Datasources/CreateEditRelationshipModal.svelte @@ -33,7 +33,7 @@ } // action is one of 'created', 'updated' or 'deleted' - async function saveRelationship(action) { + async function saveRelationship({ action }) { try { await beforeSave({ action, datasource }) diff --git a/packages/builder/src/components/backend/TableNavigator/TableNavigator.svelte b/packages/builder/src/components/backend/TableNavigator/TableNavigator.svelte index d9def682dc..056a36c4a7 100644 --- a/packages/builder/src/components/backend/TableNavigator/TableNavigator.svelte +++ b/packages/builder/src/components/backend/TableNavigator/TableNavigator.svelte @@ -1,5 +1,5 @@ {#if $database?._id} @@ -37,18 +45,23 @@ {/if} - {#each [...Object.keys(table.views || {})].sort() as viewName, idx (idx)} + {#each [...Object.entries(table.views || {})].sort() as [name, view], idx (idx)} $goto(`./view/${encodeURIComponent(viewName)}`)} - selectedBy={$userSelectedResourceMap[viewName]} + text={name} + selected={isViewActive(view, $isActive, $views, $viewsV2)} + on:click={() => { + if (view.version === 2) { + $goto(`./view/v2/${view.id}`) + } else { + $goto(`./view/v1/${encodeURIComponent(name)}`) + } + }} + selectedBy={$userSelectedResourceMap[name] || + $userSelectedResourceMap[view.id]} > - + {/each} {/each} diff --git a/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte b/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte index 11ef60480b..1760938c53 100644 --- a/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte +++ b/packages/builder/src/components/backend/TableNavigator/popovers/EditTablePopover.svelte @@ -35,7 +35,7 @@ screen => screen.autoTableId === table._id ) willBeDeleted = ["All table data"].concat( - templateScreens.map(screen => `Screen ${screen.props._instanceName}`) + templateScreens.map(screen => `Screen ${screen.routing?.route || ""}`) ) confirmDeleteDialog.show() } @@ -44,7 +44,10 @@ const isSelected = $params.tableId === table._id try { await tables.delete(table) - await store.actions.screens.delete(templateScreens) + // Screens need deleted one at a time because of undo/redo + for (let screen of templateScreens) { + await store.actions.screens.delete(screen) + } if (table.type === "external") { await datasources.fetch() } diff --git a/packages/builder/src/components/backend/TableNavigator/popovers/EditViewPopover.svelte b/packages/builder/src/components/backend/TableNavigator/popovers/EditViewPopover.svelte index 99f19935a1..5e2b0102f8 100644 --- a/packages/builder/src/components/backend/TableNavigator/popovers/EditViewPopover.svelte +++ b/packages/builder/src/components/backend/TableNavigator/popovers/EditViewPopover.svelte @@ -1,6 +1,5 @@ + +
    +
    + table.name} + getOptionValue={table => table._id} + bind:value={relationshipTableIdPrimary} + /> +
    +
    +
    +
    + table._id !== relationshipTableIdPrimary + )} + getOptionLabel={table => table.name} + getOptionValue={table => table._id} + /> +
    +
    + + + diff --git a/packages/builder/src/components/common/RoleSelect.svelte b/packages/builder/src/components/common/RoleSelect.svelte index 09a67cb6fe..82752554d5 100644 --- a/packages/builder/src/components/common/RoleSelect.svelte +++ b/packages/builder/src/components/common/RoleSelect.svelte @@ -1,8 +1,11 @@ - role.name} + getOptionValue={role => role._id} + getOptionColour={getColor} + getOptionIcon={getIcon} + isOptionEnabled={option => + option._id !== Constants.Roles.CREATOR || + $licensing.perAppBuildersEnabled} + {placeholder} + {error} + /> +{/if} diff --git a/packages/builder/src/components/common/UndoRedoControl.svelte b/packages/builder/src/components/common/UndoRedoControl.svelte index 84144b77ee..ebdad7d73c 100644 --- a/packages/builder/src/components/common/UndoRedoControl.svelte +++ b/packages/builder/src/components/common/UndoRedoControl.svelte @@ -38,14 +38,12 @@ hoverable on:click={store.undo} disabled={!$store.canUndo} - tooltip="Undo latest change" />
    diff --git a/packages/builder/src/components/common/VerificationPromptBanner.svelte b/packages/builder/src/components/common/VerificationPromptBanner.svelte new file mode 100644 index 0000000000..e9109ae0b1 --- /dev/null +++ b/packages/builder/src/components/common/VerificationPromptBanner.svelte @@ -0,0 +1,102 @@ + + +{#if user?.account?.verified === false} + +{/if} + + diff --git a/packages/builder/src/components/common/bindings/DrawerBindableInput.svelte b/packages/builder/src/components/common/bindings/DrawerBindableInput.svelte index dacb076bdb..5c4f90606d 100644 --- a/packages/builder/src/components/common/bindings/DrawerBindableInput.svelte +++ b/packages/builder/src/components/common/bindings/DrawerBindableInput.svelte @@ -74,6 +74,8 @@ {/if}
+ import { Icon, Input, Drawer, Button } from "@budibase/bbui" + import { + readableToRuntimeBinding, + runtimeToReadableBinding, + } from "builderStore/dataBinding" + + import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte" + import { createEventDispatcher, setContext } from "svelte" + import { isJSBinding } from "@budibase/string-templates" + + export let panel = ClientBindingPanel + export let value = "" + export let bindings = [] + export let title = "Bindings" + export let placeholder + export let label + export let disabled = false + export let fillWidth + export let allowJS = true + export let allowHelpers = true + export let updateOnChange = true + export let drawerLeft + export let type + export let schema + + const dispatch = createEventDispatcher() + let bindingDrawer + let valid = true + let currentVal = value + + $: readableValue = runtimeToReadableBinding(bindings, value) + $: tempValue = readableValue + $: isJS = isJSBinding(value) + + const saveBinding = () => { + onChange(tempValue) + onBlur() + bindingDrawer.hide() + } + + setContext("binding-drawer-actions", { + save: saveBinding, + }) + + const onChange = value => { + if (type === "link" && value && hasValidLinks(value)) { + currentVal = value.split(",") + } else if (type === "array" && value && hasValidOptions(value)) { + currentVal = value.split(",") + } else { + currentVal = readableToRuntimeBinding(bindings, value) + } + dispatch("change", currentVal) + } + + const onBlur = () => { + dispatch("blur", currentVal) + } + + const isValidDate = value => { + return !value || !isNaN(new Date(value).valueOf()) + } + + const hasValidLinks = value => { + let links = [] + if (Array.isArray(value)) { + links = value + } else if (value && typeof value === "string") { + links = value.split(",") + } else { + return !value + } + + return links.every(link => link.startsWith("ro_")) + } + + const hasValidOptions = value => { + let links = [] + if (Array.isArray(value)) { + links = value + } else if (value && typeof value === "string") { + links = value.split(",") + } else { + return !value + } + return links.every(link => schema?.constraints?.inclusion?.includes(link)) + } + + const isValidBoolean = value => { + return value === "false" || value === "true" || value == "" + } + + const validationMap = { + date: isValidDate, + datetime: isValidDate, + link: hasValidLinks, + array: hasValidOptions, + longform: value => !isJSBinding(value), + json: value => !isJSBinding(value), + boolean: isValidBoolean, + } + + const isValid = value => { + const validate = validationMap[type] + return validate ? validate(value) : true + } + + const getIconClass = (value, type) => { + if (type === "longform" && !isJSBinding(value)) { + return "text-area-slot-icon" + } + if (type === "json" && !isJSBinding(value)) { + return "json-slot-icon" + } + if (type !== "string" && type !== "number") { + return "slot-icon" + } + return "" + } + + +
+ {#if !isValid(value)} + onChange(event.detail)} + on:blur={onBlur} + {placeholder} + {updateOnChange} + /> +
{ + if (!isJS) { + dispatch("change", "") + } + }} + > + +
+ {:else} + + {/if} + {#if !disabled && type !== "formula"} +
{ + bindingDrawer.show() + }} + > + +
+ {/if} +
+ + + Add the objects on the left to enrich your text. + + + (tempValue = event.detail)} + {bindings} + {allowJS} + {allowHelpers} + /> + + + diff --git a/packages/builder/src/components/deploy/AppActions.svelte b/packages/builder/src/components/deploy/AppActions.svelte index 4b366f2412..7259e7e402 100644 --- a/packages/builder/src/components/deploy/AppActions.svelte +++ b/packages/builder/src/components/deploy/AppActions.svelte @@ -224,10 +224,10 @@ { + appActionPopover.hide() if (isPublished) { viewApp() } else { - appActionPopover.hide() updateAppModal.show() } }} diff --git a/packages/builder/src/components/design/Panel.svelte b/packages/builder/src/components/design/Panel.svelte index 3968292ba9..91ea3f98ad 100644 --- a/packages/builder/src/components/design/Panel.svelte +++ b/packages/builder/src/components/design/Panel.svelte @@ -1,5 +1,5 @@ -
+
{#if showBackButton} @@ -25,7 +33,7 @@ {/if}
- {title || ""} + {title}
{#if showAddButton}
@@ -33,7 +41,7 @@
{/if} {#if showCloseButton} - + {/if}
@@ -70,6 +78,10 @@ width: 310px; flex: 0 0 310px; } + .panel.extraWide { + width: 450px; + flex: 0 0 450px; + } .header { flex: 0 0 48px; display: flex; @@ -78,15 +90,14 @@ align-items: center; padding: 0 var(--spacing-l); border-bottom: var(--border-light); - gap: var(--spacing-l); + gap: var(--spacing-m); } .title { flex: 1 1 auto; width: 0; } - .title :global(h1) { + .title :global(p) { overflow: hidden; - font-weight: 600; text-overflow: ellipsis; white-space: nowrap; } diff --git a/packages/builder/src/components/design/settings/componentSettings.js b/packages/builder/src/components/design/settings/componentSettings.js index 8b151564a1..4c49587372 100644 --- a/packages/builder/src/components/design/settings/componentSettings.js +++ b/packages/builder/src/components/design/settings/componentSettings.js @@ -1,4 +1,4 @@ -import { Checkbox, Select, RadioGroup, Stepper } from "@budibase/bbui" +import { Checkbox, Select, RadioGroup, Stepper, Input } from "@budibase/bbui" import DataSourceSelect from "./controls/DataSourceSelect.svelte" import S3DataSourceSelect from "./controls/S3DataSourceSelect.svelte" import DataProviderSelect from "./controls/DataProviderSelect.svelte" @@ -60,10 +60,12 @@ const componentMap = { "field/longform": FormFieldSelect, "field/datetime": FormFieldSelect, "field/attachment": FormFieldSelect, + "field/s3": Input, "field/link": FormFieldSelect, "field/array": FormFieldSelect, "field/json": FormFieldSelect, "field/barcodeqr": FormFieldSelect, + "field/bb_reference": FormFieldSelect, // Some validation types are the same as others, so not all types are // explicitly listed here. e.g. options uses string validation "validation/string": ValidationEditor, @@ -73,6 +75,7 @@ const componentMap = { "validation/datetime": ValidationEditor, "validation/attachment": ValidationEditor, "validation/link": ValidationEditor, + "validation/bb_reference": ValidationEditor, } export const getComponentForSetting = setting => { diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/ButtonActionDrawer.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/ButtonActionDrawer.svelte index ef6410abca..f9541ea79f 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/ButtonActionDrawer.svelte +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/ButtonActionDrawer.svelte @@ -13,9 +13,9 @@ import { generate } from "shortid" import { getEventContextBindings, + getActionBindings, makeStateBinding, } from "builderStore/dataBinding" - import { currentAsset, store } from "builderStore" import { cloneDeep } from "lodash/fp" const flipDurationMs = 150 @@ -26,6 +26,7 @@ export let actions export let bindings = [] export let nested + export let componentInstance let actionQuery let selectedAction = actions?.length ? actions[0] : null @@ -68,15 +69,19 @@ acc[action.type].push(action) return acc }, {}) + // These are ephemeral bindings which only exist while executing actions - $: eventContexBindings = getEventContextBindings( - $currentAsset, - $store.selectedComponentId, - key, - actions, - selectedAction?.id + $: eventContextBindings = getEventContextBindings({ + componentInstance, + settingKey: key, + }) + $: actionContextBindings = getActionBindings(actions, selectedAction?.id) + + $: allBindings = getAllBindings( + bindings, + [...eventContextBindings, ...actionContextBindings], + actions ) - $: allBindings = getAllBindings(bindings, eventContexBindings, actions) $: { // Ensure each action has a unique ID if (actions) { diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/ButtonActionEditor.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/ButtonActionEditor.svelte index 3fefb60387..6d9e96a564 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/ButtonActionEditor.svelte +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/ButtonActionEditor.svelte @@ -3,7 +3,6 @@ import { createEventDispatcher } from "svelte" import { notifications } from "@budibase/bbui" import ButtonActionDrawer from "./ButtonActionDrawer.svelte" - import { automationStore } from "builderStore" import { cloneDeep } from "lodash/fp" const dispatch = createEventDispatcher() @@ -13,6 +12,7 @@ export let name export let bindings export let nested + export let componentInstance let drawer let tmpValue @@ -23,47 +23,11 @@ } const saveEventData = async () => { - // any automations that need created from event triggers - const automationsToCreate = tmpValue.filter( - action => action["##eventHandlerType"] === "Trigger Automation" - ) - for (let action of automationsToCreate) { - await createAutomation(action.parameters) - } - dispatch("change", tmpValue) notifications.success("Component actions saved.") drawer.hide() } - // called by the parent modal when actions are saved - const createAutomation = async parameters => { - if (parameters.automationId || !parameters.newAutomationName) { - return - } - try { - let trigger = automationStore.actions.constructBlock( - "TRIGGER", - "APP", - $automationStore.blockDefinitions.TRIGGER.APP - ) - trigger.inputs = { - fields: Object.keys(parameters.fields ?? {}).reduce((fields, key) => { - fields[key] = "string" - return fields - }, {}), - } - const automation = await automationStore.actions.create( - parameters.newAutomationName, - trigger - ) - parameters.automationId = automation._id - delete parameters.newAutomationName - } catch (error) { - notifications.error("Error creating automation") - } - } - $: actionCount = value?.length $: actionText = `${actionCount || "No"} action${ actionCount !== 1 ? "s" : "" @@ -74,7 +38,7 @@ {actionText}
- + Define what actions to run. @@ -86,6 +50,7 @@ {bindings} {key} {nested} + {componentInstance} /> diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ChangeFormStep.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ChangeFormStep.svelte index ca2df71c6d..81a2119474 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ChangeFormStep.svelte +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ChangeFormStep.svelte @@ -1,10 +1,12 @@
@@ -15,9 +23,9 @@ table.name} - getOptionValue={table => table._id} + {options} + getOptionLabel={table => table.label} + getOptionValue={table => table.resourceId} /> diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte index d16a279c68..c1917ad90f 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte @@ -1,10 +1,10 @@ + +
    + {#each draggableItems as draggable (draggable.id)} +
  • +
    + {#if showHandle} +
    + +
    + {/if} +
    +
    + +
    +
  • + {/each} +
+ + diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/EditFieldPopover.svelte b/packages/builder/src/components/design/settings/controls/FieldConfiguration/EditFieldPopover.svelte new file mode 100644 index 0000000000..7d2eaae478 --- /dev/null +++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/EditFieldPopover.svelte @@ -0,0 +1,160 @@ + + + { + if (!open) { + popover.show() + open = true + } + }} +/> + + { + drawers = [] + $draggable.actions.select(field._id) + }} + on:close={() => { + open = false + if ($draggable.selected == field._id) { + $draggable.actions.select() + } + }} + {anchor} + align="left-outside" + showPopover={drawers.length == 0} + clickOutsideOverride={drawers.length > 0} + maxHeight={600} + handlePostionUpdate={(anchorBounds, eleBounds, cfg) => { + let { left, top } = cfg + let percentageOffset = 30 + // left-outside + left = anchorBounds.left - eleBounds.width - 18 + + // shift up from the anchor, if space allows + let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset + let defaultTop = anchorBounds.top - offsetPos + + if (window.innerHeight - defaultTop < eleBounds.height) { + top = window.innerHeight - eleBounds.height - 5 + } else { + top = anchorBounds.top - offsetPos + } + + return { ...cfg, left, top } + }} +> + + +
+ + {field.field} +
+ { + drawers = [...drawers, e.detail] + }} + on:drawerHide={() => { + drawers = drawers.slice(0, -1) + }} + /> +
+
+
+ + diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldConfiguration.svelte b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldConfiguration.svelte index f9dccf586c..4c4fa0b7b7 100644 --- a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldConfiguration.svelte +++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldConfiguration.svelte @@ -1,45 +1,81 @@
- {text} + {#if fieldList?.length} + + {/if}
- - - Configure the fields in your form. - - - - - diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/utils.js b/packages/builder/src/components/design/settings/controls/FieldConfiguration/utils.js new file mode 100644 index 0000000000..c929263db1 --- /dev/null +++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/utils.js @@ -0,0 +1,47 @@ +export const convertOldFieldFormat = fields => { + if (!fields) { + return [] + } + const converted = fields.map(field => { + if (typeof field === "string") { + // existed but was a string + return { + field, + active: true, + } + } else if (typeof field?.active != "boolean") { + // existed but had no state + return { + field: field.name, + active: true, + } + } else { + return field + } + }) + return converted +} + +export const getComponentForField = (field, schema) => { + if (!field || !schema?.[field]) { + return null + } + const type = schema[field].type + return FieldTypeToComponentMap[type] +} + +export const FieldTypeToComponentMap = { + string: "stringfield", + number: "numberfield", + bigint: "bigintfield", + options: "optionsfield", + array: "multifieldselect", + boolean: "booleanfield", + longform: "longformfield", + datetime: "datetimefield", + attachment: "attachmentfield", + link: "relationshipfield", + json: "jsonfield", + barcodeqr: "codescanner", + bb_reference: "bbreferencefield", +} diff --git a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterDrawer.svelte b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterDrawer.svelte index 4b1ab0d68a..ef8699824e 100644 --- a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterDrawer.svelte +++ b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterDrawer.svelte @@ -17,7 +17,7 @@ import { generate } from "shortid" import { LuceneUtils, Constants } from "@budibase/frontend-core" import { getFields } from "helpers/searchFields" - import { createEventDispatcher } from "svelte" + import { createEventDispatcher, onMount } from "svelte" export let schemaFields export let filters = [] @@ -35,22 +35,28 @@ { value: "and", label: "Match all filters" }, { value: "or", label: "Match any filter" }, ] + const onEmptyOptions = [ + { value: "all", label: "Return all table rows" }, + { value: "none", label: "Return no rows" }, + ] let rawFilters let matchAny = false + let onEmptyFilter = "all" $: parseFilters(filters) - $: dispatch("change", enrichFilters(rawFilters, matchAny)) + $: dispatch("change", enrichFilters(rawFilters, matchAny, onEmptyFilter)) $: enrichedSchemaFields = getFields(schemaFields || [], { allowLinks: true }) $: fieldOptions = enrichedSchemaFields.map(field => field.name) || [] $: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"] - // Remove field key prefixes and determine whether to use the "match all" - // or "match any" behaviour + // Remove field key prefixes and determine which behaviours to use const parseFilters = filters => { matchAny = filters?.find(filter => filter.operator === "allOr") != null + onEmptyFilter = + filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all" rawFilters = (filters || []) - .filter(filter => filter.operator !== "allOr") + .filter(filter => filter.operator !== "allOr" && !filter.onEmptyFilter) .map(filter => { const { field } = filter let newFilter = { ...filter } @@ -64,9 +70,18 @@ }) } + onMount(() => { + parseFilters(filters) + rawFilters.forEach(filter => { + filter.type = + schemaFields.find(field => field.name === filter.field)?.type || + filter.type + }) + }) + // Add field key prefixes and a special metadata filter object to indicate - // whether to use the "match all" or "match any" behaviour - const enrichFilters = (rawFilters, matchAny) => { + // how to handle filter behaviour + const enrichFilters = (rawFilters, matchAny, onEmptyFilter) => { let count = 1 return rawFilters .filter(filter => filter.field) @@ -75,6 +90,7 @@ field: `${count++}:${filter.field}`, })) .concat(matchAny ? [{ operator: "allOr" }] : []) + .concat([{ onEmptyFilter }]) } const addFilter = () => { @@ -186,6 +202,17 @@ on:change={e => (matchAny = e.detail === "or")} placeholder={null} /> + {#if datasource?.type === "table"} + x.tableId} + value={value?.resourceId} + {options} + getOptionValue={x => x.resourceId} getOptionLabel={x => x.label} /> diff --git a/packages/builder/src/components/design/settings/controls/ValidationEditor/ValidationEditor.svelte b/packages/builder/src/components/design/settings/controls/ValidationEditor/ValidationEditor.svelte index 6db24e8d69..96953b56b8 100644 --- a/packages/builder/src/components/design/settings/controls/ValidationEditor/ValidationEditor.svelte +++ b/packages/builder/src/components/design/settings/controls/ValidationEditor/ValidationEditor.svelte @@ -5,9 +5,8 @@ export let value = [] export let bindings = [] - export let componentDefinition + export let componentInstance export let type - const dispatch = createEventDispatcher() let drawer @@ -31,7 +30,7 @@ {text}
- + Configure validation rules for this field. @@ -41,7 +40,7 @@ bind:rules={value} {type} {bindings} - {componentDefinition} + fieldName={componentInstance?.field} /> diff --git a/packages/builder/src/components/integration/AccessLevelSelect.svelte b/packages/builder/src/components/integration/AccessLevelSelect.svelte index 091f33cbcd..3dc24983d3 100644 --- a/packages/builder/src/components/integration/AccessLevelSelect.svelte +++ b/packages/builder/src/components/integration/AccessLevelSelect.svelte @@ -40,7 +40,7 @@ return } try { - roleId = (await permissions.forResource(queryToFetch._id))["read"] + roleId = (await permissions.forResource(queryToFetch._id))["read"].role } catch (err) { roleId = Constants.Roles.BASIC } diff --git a/packages/builder/src/components/integration/ExtraQueryConfig.svelte b/packages/builder/src/components/integration/ExtraQueryConfig.svelte index 303beb1e4b..59b08c5b7a 100644 --- a/packages/builder/src/components/integration/ExtraQueryConfig.svelte +++ b/packages/builder/src/components/integration/ExtraQueryConfig.svelte @@ -18,31 +18,20 @@ {#each extraFields as { key, displayName, type }} -
- - {#if type === "string"} - populateExtraQuery(extraQueryFields)} - bind:value={extraQueryFields[key]} - /> - {/if} + + {#if type === "string"} + populateExtraQuery(extraQueryFields)} + bind:value={extraQueryFields[key]} + /> + {/if} - {#if type === "list"} - populateExtraQuery(extraQueryFields)} + bind:value={extraQueryFields[key]} + options={config[key].data[query.queryVerb]} + getOptionLabel={current => current} + /> + {/if} {/each} - - diff --git a/packages/builder/src/components/integration/KeyValueBuilder.svelte b/packages/builder/src/components/integration/KeyValueBuilder.svelte index 5d35498cfe..096d5c0f71 100644 --- a/packages/builder/src/components/integration/KeyValueBuilder.svelte +++ b/packages/builder/src/components/integration/KeyValueBuilder.svelte @@ -34,6 +34,7 @@ export let bindings = [] export let bindingDrawerLeft export let allowHelpers = true + export let customButtonText = null let fields = Object.entries(object || {}).map(([name, value]) => ({ name, @@ -158,9 +159,13 @@ {/if} {#if !readOnly && !noAddButton}
- Add{name ? ` ${lowercase(name)}` : ""} + + {#if customButtonText} + {customButtonText} + {:else} + {`Add${name ? ` ${lowercase(name)}` : ""}`} + {/if} +
{/if} diff --git a/packages/builder/src/components/integration/QueryViewer.svelte b/packages/builder/src/components/integration/QueryViewer.svelte index 4683bc6335..59a3289731 100644 --- a/packages/builder/src/components/integration/QueryViewer.svelte +++ b/packages/builder/src/components/integration/QueryViewer.svelte @@ -1,364 +1,444 @@ - { - navigateTo = null - }} -> - { - await saveQuery() - override = true - resumeNavigation() - }} - onCancel={async () => { - override = true - resumeNavigation() - }} - > - Leaving this section will mean losing and changes to your query - - - -
- - Query {integrationInfo?.friendlyName} - - Config -
-
- - { - let newValue = e.target.value || "" - if (newValue.match(ValidQueryNameRegex)) { - query.name = newValue.trim() - nameError = null - } else { - nameError = "Invalid query name" - } - }} - error={nameError} - /> -
- {#if queryConfig} -
- - { + let newValue = e.target.value || "" + if (newValue.match(ValidQueryNameRegex)) { + newQuery.name = newValue.trim() + nameError = null + } else { + nameError = "Invalid query name" + } + }} + error={nameError} + /> + {#if integration.query} + +