diff --git a/.eslintignore b/.eslintignore index 8d4c64d960..94984a446f 100644 --- a/.eslintignore +++ b/.eslintignore @@ -6,9 +6,11 @@ packages/server/coverage packages/worker/coverage packages/backend-core/coverage packages/server/client +packages/server/coverage packages/builder/.routify packages/sdk/sdk packages/account-portal/packages/server/build packages/account-portal/packages/ui/.routify packages/account-portal/packages/ui/build -**/*.ivm.bundle.js \ No newline at end of file +**/*.ivm.bundle.js +packages/server/build/oldClientVersions/**/** diff --git a/.eslintrc.json b/.eslintrc.json index 3de9d13046..525072dc6c 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -35,16 +35,40 @@ { "files": ["**/*.ts"], "parser": "@typescript-eslint/parser", + "plugins": ["@typescript-eslint"], "extends": ["eslint:recommended"], + "globals": { + "NodeJS": true + }, "rules": { "no-unused-vars": "off", - "no-inner-declarations": "off", - "no-case-declarations": "off", - "no-useless-escape": "off", - "no-undef": "off", - "no-prototype-builtins": "off", - "local-rules/no-budibase-imports": "error", - "local-rules/no-test-com": "error" + "@typescript-eslint/no-unused-vars": "error", + "local-rules/no-budibase-imports": "error" + } + }, + { + "files": ["**/*.spec.ts"], + "parser": "@typescript-eslint/parser", + "plugins": ["jest", "@typescript-eslint"], + "extends": ["eslint:recommended", "plugin:jest/recommended"], + "env": { + "jest/globals": true + }, + "globals": { + "NodeJS": true + }, + "rules": { + "no-unused-vars": "off", + "@typescript-eslint/no-unused-vars": "error", + "local-rules/no-test-com": "error", + "local-rules/email-domain-example-com": "error", + "no-console": "warn", + // We have a lot of tests that don't have assertions, they use our test + // API client that does the assertions for them + "jest/expect-expect": "off", + // We do this in some tests where the behaviour of internal tables + // differs to external, but the API is broadly the same + "jest/no-conditional-expect": "off" } }, { diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 3060660d47..4ae0766242 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -66,7 +66,8 @@ jobs: # Run build all the projects - name: Build run: | - yarn build + yarn build:oss + yarn build:account-portal # Check the types of the projects built via esbuild - name: Check types run: | @@ -107,9 +108,9 @@ jobs: - name: Test run: | if ${{ env.USE_NX_AFFECTED }}; then - yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro --since=${{ env.NX_BASE_BRANCH }} + yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} else - yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro + yarn test --ignore=@budibase/worker --ignore=@budibase/server fi test-worker: @@ -138,6 +139,8 @@ jobs: test-server: runs-on: ubuntu-latest + env: + DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull steps: - name: Checkout repo uses: actions/checkout@v4 @@ -151,7 +154,19 @@ jobs: with: node-version: 20.x cache: yarn + + - name: Pull testcontainers images + run: | + docker pull mcr.microsoft.com/mssql/server:2022-latest + docker pull mysql:8.3 + docker pull postgres:16.1-bullseye + docker pull mongo:7.0-jammy + docker pull mariadb:lts + docker pull testcontainers/ryuk:0.5.1 + docker pull budibase/couchdb + - run: yarn --frozen-lockfile + - name: Test server run: | if ${{ env.USE_NX_AFFECTED }}; then @@ -160,60 +175,6 @@ jobs: yarn test --scope=@budibase/server fi - test-pro: - runs-on: ubuntu-latest - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' - steps: - - name: Checkout repo and submodules - uses: actions/checkout@v4 - with: - submodules: true - token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} - fetch-depth: 0 - - - name: Use Node.js 20.x - uses: actions/setup-node@v4 - with: - node-version: 20.x - cache: yarn - - run: yarn --frozen-lockfile - - name: Test - run: | - if ${{ env.USE_NX_AFFECTED }}; then - yarn test --scope=@budibase/pro --since=${{ env.NX_BASE_BRANCH }} - else - yarn test --scope=@budibase/pro - fi - - integration-test: - runs-on: ubuntu-latest - steps: - - name: Checkout repo - uses: actions/checkout@v4 - with: - submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }} - token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} - - - name: Use Node.js 20.x - uses: actions/setup-node@v4 - with: - node-version: 20.x - cache: yarn - - run: yarn --frozen-lockfile - - name: Build packages - run: yarn build --scope @budibase/server --scope @budibase/worker - - name: Build backend-core for OSS contributor (required for pro) - if: ${{ env.IS_OSS_CONTRIBUTOR == 'true' }} - run: yarn build --scope @budibase/backend-core - - name: Run tests - run: | - cd qa-core - yarn setup - yarn serve:test:self:ci - env: - BB_ADMIN_USER_EMAIL: admin - BB_ADMIN_USER_PASSWORD: admin - check-pro-submodule: runs-on: ubuntu-latest if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase') @@ -242,27 +203,34 @@ jobs: echo "pro_commit=$pro_commit" echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT" echo "base_commit=$base_commit" - echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT" + + base_commit_excluding_merges=$(git log --no-merges -n 1 --format=format:%H $base_commit) + echo "base_commit_excluding_merges=$base_commit_excluding_merges" + echo "base_commit_excluding_merges=$base_commit_excluding_merges" >> "$GITHUB_OUTPUT" else echo "Nothing to do - branch to branch merge." fi - - name: Check submodule merged to base branch - if: ${{ steps.get_pro_commits.outputs.base_commit != '' }} - uses: actions/github-script@v7 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const submoduleCommit = '${{ steps.get_pro_commits.outputs.pro_commit }}'; - const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}'; + - name: Check submodule merged and latest on base branch + if: ${{ steps.get_pro_commits.outputs.base_commit_excluding_merges != '' }} + run: | + cd packages/pro + base_commit_excluding_merges='${{ steps.get_pro_commits.outputs.base_commit_excluding_merges }}' + pro_commit='${{ steps.get_pro_commits.outputs.pro_commit }}' - if (submoduleCommit !== baseCommit) { - console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.'); - console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md') - process.exit(1); - } else { - console.log('All good, the submodule had been merged and setup correctly!') - } + any_commit=$(git log --no-merges $base_commit_excluding_merges...$pro_commit) + + if [ -n "$any_commit" ]; then + echo $any_commit + + echo "An error occurred: " + echo 'Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.' + echo 'Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md' + + exit 1 + else + echo 'All good, the submodule had been merged and setup correctly!' + fi check-accountportal-submodule: runs-on: ubuntu-latest @@ -275,7 +243,15 @@ jobs: token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} fetch-depth: 0 - - name: Check account portal commit + - uses: dorny/paths-filter@v3 + id: changes + with: + filters: | + src: + - packages/account-portal/** + + - if: steps.changes.outputs.src == 'true' + name: Check account portal commit id: get_accountportal_commits run: | cd packages/account-portal diff --git a/.gitignore b/.gitignore index 3eb705dbbf..b68ddd975f 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,9 @@ packages/server/runtime_apps/ bb-airgapped.tar.gz *.iml +packages/server/build/oldClientVersions/**/* +packages/builder/src/components/deploy/clientVersions.json + # Logs logs *.log @@ -66,7 +69,6 @@ typings/ # dotenv environment variables file .env -!qa-core/.env !hosting/.env # parcel-bundler cache (https://parceljs.org/) @@ -107,3 +109,4 @@ budibase-component budibase-datasource *.iml +.nx \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index cfd8d7b155..2fda61345b 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,4 +1,3 @@ - { // Use IntelliSense to learn about possible attributes. // Hover to view descriptions of existing attributes. @@ -20,6 +19,13 @@ "runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"], "args": ["${workspaceFolder}/packages/worker/src/index.ts"], "cwd": "${workspaceFolder}/packages/worker" + }, + { + "type": "chrome", + "request": "launch", + "name": "Launch Chrome against localhost", + "url": "http://localhost:10000", + "webRoot": "${workspaceFolder}" } ], "compounds": [ diff --git a/charts/budibase/README.md b/charts/budibase/README.md index 342011bdb1..dea7d1dbae 100644 --- a/charts/budibase/README.md +++ b/charts/budibase/README.md @@ -140,7 +140,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml | ingress.className | string | `""` | What ingress class to use. | | ingress.enabled | bool | `true` | Whether to create an Ingress resource pointing to the Budibase proxy. | | ingress.hosts | list | `[]` | Standard hosts block for the Ingress resource. Defaults to pointing to the Budibase proxy. | -| nameOverride | string | `""` | Override the name of the deploymen. Defaults to {{ .Chart.Name }}. | +| nameOverride | string | `""` | Override the name of the deployment. Defaults to {{ .Chart.Name }}. | | service.port | int | `10000` | Port to expose on the service. | | service.type | string | `"ClusterIP"` | Service type for the service that points to the main Budibase proxy pod. | | serviceAccount.annotations | object | `{}` | Annotations to add to the service account | diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml index 09262df463..19b6c22d6c 100644 --- a/charts/budibase/values.yaml +++ b/charts/budibase/values.yaml @@ -1,6 +1,6 @@ # -- Passed to all pods created by this chart. Should not ordinarily need to be changed. imagePullSecrets: [] -# -- Override the name of the deploymen. Defaults to {{ .Chart.Name }}. +# -- Override the name of the deployment. Defaults to {{ .Chart.Name }}. nameOverride: "" serviceAccount: diff --git a/eslint-local-rules/index.js b/eslint-local-rules/index.js index 71bb5068da..a4866bc1f8 100644 --- a/eslint-local-rules/index.js +++ b/eslint-local-rules/index.js @@ -7,11 +7,12 @@ module.exports = { if ( /^@budibase\/[^/]+\/.*$/.test(importPath) && - importPath !== "@budibase/backend-core/tests" + importPath !== "@budibase/backend-core/tests" && + importPath !== "@budibase/string-templates/test/utils" ) { context.report({ node, - message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests.`, + message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests and @budibase/string-templates/test/utils.`, }) } }, @@ -24,11 +25,9 @@ module.exports = { docs: { description: "disallow the use of 'test.com' in strings and replace it with 'example.com'", - category: "Possible Errors", - recommended: false, }, - schema: [], // no options - fixable: "code", // Indicates that this rule supports automatic fixing + schema: [], + fixable: "code", }, create: function (context) { return { @@ -51,4 +50,39 @@ module.exports = { } }, }, + "email-domain-example-com": { + meta: { + type: "problem", + docs: { + description: + "enforce using the example.com domain for generator.email calls", + }, + fixable: "code", + schema: [], + }, + create: function (context) { + return { + CallExpression(node) { + if ( + node.callee.type === "MemberExpression" && + node.callee.object.name === "generator" && + node.callee.property.name === "email" && + node.arguments.length === 0 + ) { + context.report({ + node, + message: + "Prefer using generator.email with the domain \"{ domain: 'example.com' }\".", + fix: function (fixer) { + return fixer.replaceText( + node, + 'generator.email({ domain: "example.com" })' + ) + }, + }) + } + }, + } + }, + }, } diff --git a/globalSetup.ts b/globalSetup.ts new file mode 100644 index 0000000000..4cb542a3c3 --- /dev/null +++ b/globalSetup.ts @@ -0,0 +1,25 @@ +import { GenericContainer, Wait } from "testcontainers" + +export default async function setup() { + await new GenericContainer("budibase/couchdb") + .withExposedPorts(5984) + .withEnvironment({ + COUCHDB_PASSWORD: "budibase", + COUCHDB_USER: "budibase", + }) + .withCopyContentToContainer([ + { + content: ` + [log] + level = warn + `, + target: "/opt/couchdb/etc/local.d/test-couchdb.ini", + }, + ]) + .withWaitStrategy( + Wait.forSuccessfulCommand( + "curl http://budibase:budibase@localhost:5984/_up" + ).withStartupTimeout(20000) + ) + .start() +} diff --git a/hosting/single/Dockerfile b/hosting/single/Dockerfile index ee98b0729d..be01056b53 100644 --- a/hosting/single/Dockerfile +++ b/hosting/single/Dockerfile @@ -12,8 +12,6 @@ COPY .yarnrc . COPY packages/server/package.json packages/server/package.json COPY packages/worker/package.json packages/worker/package.json -# string-templates does not get bundled during the esbuild process, so we want to use the local version -COPY packages/string-templates/package.json packages/string-templates/package.json COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh @@ -26,7 +24,7 @@ RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json RUN echo '' > scripts/syncProPackage.js RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json RUN ./scripts/removeWorkspaceDependencies.sh package.json -RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production +RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production --frozen-lockfile # copy the actual code COPY packages/server/dist packages/server/dist @@ -35,7 +33,6 @@ COPY packages/server/client packages/server/client COPY packages/server/builder packages/server/builder COPY packages/worker/dist packages/worker/dist COPY packages/worker/pm2.config.js packages/worker/pm2.config.js -COPY packages/string-templates packages/string-templates FROM budibase/couchdb:v3.3.3 as runner @@ -52,11 +49,11 @@ RUN apt-get update && \ # Install postgres client for pg_dump utils RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \ - && curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \ - && echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \ - && apt update -y \ - && apt install postgresql-client-15 -y \ - && apt remove software-properties-common apt-transport-https gpg -y + && curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \ + && echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \ + && apt update -y \ + && apt install postgresql-client-15 -y \ + && apt remove software-properties-common apt-transport-https gpg -y # We use pm2 in order to run multiple node processes in a single container RUN npm install --global pm2 @@ -100,9 +97,6 @@ COPY --from=build /app/node_modules /node_modules COPY --from=build /app/package.json /package.json COPY --from=build /app/packages/server /app COPY --from=build /app/packages/worker /worker -COPY --from=build /app/packages/string-templates /string-templates - -RUN cd /string-templates && yarn link && cd ../app && yarn link @budibase/string-templates && cd ../worker && yarn link @budibase/string-templates EXPOSE 80 diff --git a/jestTestcontainersConfigGenerator.js b/jestTestcontainersConfigGenerator.js deleted file mode 100644 index 1e39ed771f..0000000000 --- a/jestTestcontainersConfigGenerator.js +++ /dev/null @@ -1,16 +0,0 @@ -module.exports = () => { - return { - couchdb: { - image: "budibase/couchdb", - ports: [5984], - env: { - COUCHDB_PASSWORD: "budibase", - COUCHDB_USER: "budibase", - }, - wait: { - type: "ports", - timeout: 20000, - } - } - } -} diff --git a/lerna.json b/lerna.json index c06173fe04..93b103ee00 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.21.0", + "version": "2.22.15", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/package.json b/package.json index 0a20f01d52..c927002c88 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ "esbuild-node-externals": "^1.8.0", "eslint": "^8.52.0", "eslint-plugin-import": "^2.29.0", + "eslint-plugin-jest": "^27.9.0", "eslint-plugin-local-rules": "^2.0.0", "eslint-plugin-svelte": "^2.34.0", "husky": "^8.0.3", @@ -25,12 +26,16 @@ "svelte": "^4.2.10", "svelte-eslint-parser": "^0.33.1", "typescript": "5.2.2", + "typescript-eslint": "^7.3.1", "yargs": "^17.7.2" }, "scripts": { "preinstall": "node scripts/syncProPackage.js", + "get-past-client-version": "node scripts/getPastClientVersion.js", "setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev", "build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream", + "build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui", + "build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal --scope @budibase/account-portal-server --scope @budibase/account-portal-ui", "build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput", "check:types": "lerna run check:types", "build:sdk": "lerna run --stream build:sdk", @@ -53,11 +58,11 @@ "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "test": "lerna run --stream test --stream", - "lint:eslint": "eslint packages qa-core --max-warnings=0", - "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"", + "lint:eslint": "eslint packages --max-warnings=0", + "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"", "lint": "yarn run lint:eslint && yarn run lint:prettier", - "lint:fix:eslint": "eslint --fix --max-warnings=0 packages qa-core", - "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"", + "lint:fix:eslint": "eslint --fix --max-warnings=0 packages", + "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"", "lint:fix": "yarn run lint:fix:eslint && yarn run lint:fix:prettier", "build:specs": "lerna run --stream specs", "build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild", diff --git a/packages/account-portal b/packages/account-portal index 806b6fd5c1..360ad2dc29 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit 806b6fd5c11c284ebf4a01627d75db939f0f8152 +Subproject commit 360ad2dc29c3f1fd5a1182ae258c45666b7f5eb1 diff --git a/packages/backend-core/jest-testcontainers-config.js b/packages/backend-core/jest-testcontainers-config.js deleted file mode 100644 index 8ac0f0cd9d..0000000000 --- a/packages/backend-core/jest-testcontainers-config.js +++ /dev/null @@ -1,8 +0,0 @@ -const { join } = require("path") -require("dotenv").config({ - path: join(__dirname, "..", "..", "hosting", ".env"), -}) - -const jestTestcontainersConfigGenerator = require("../../jestTestcontainersConfigGenerator") - -module.exports = jestTestcontainersConfigGenerator() diff --git a/packages/backend-core/jest.config.ts b/packages/backend-core/jest.config.ts index 3f1065ead2..c944b0d7e1 100644 --- a/packages/backend-core/jest.config.ts +++ b/packages/backend-core/jest.config.ts @@ -1,8 +1,8 @@ import { Config } from "@jest/types" const baseConfig: Config.InitialProjectOptions = { - preset: "@trendyol/jest-testcontainers", setupFiles: ["./tests/jestEnv.ts"], + globalSetup: "./../../globalSetup.ts", setupFilesAfterEnv: ["./tests/jestSetup.ts"], transform: { "^.+\\.ts?$": "@swc/jest", diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index 3f8c34f823..030fec8728 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -60,14 +60,13 @@ "@shopify/jest-koa-mocks": "5.1.1", "@swc/core": "1.3.71", "@swc/jest": "0.2.27", - "@trendyol/jest-testcontainers": "^2.1.1", "@types/chance": "1.1.3", "@types/cookies": "0.7.8", "@types/jest": "29.5.5", "@types/lodash": "4.14.200", "@types/node-fetch": "2.6.4", "@types/pouchdb": "6.4.0", - "@types/redlock": "4.0.3", + "@types/redlock": "4.0.7", "@types/semver": "7.3.7", "@types/tar-fs": "2.0.1", "@types/uuid": "8.3.4", @@ -78,6 +77,7 @@ "jest-serial-runner": "1.2.1", "pino-pretty": "10.0.0", "pouchdb-adapter-memory": "7.2.2", + "testcontainers": "^10.7.2", "timekeeper": "2.2.0", "typescript": "5.2.2" }, diff --git a/packages/backend-core/scripts/test.sh b/packages/backend-core/scripts/test.sh index 7d19ec96cc..b9937e3a4a 100644 --- a/packages/backend-core/scripts/test.sh +++ b/packages/backend-core/scripts/test.sh @@ -4,10 +4,10 @@ set -e if [[ -n $CI ]] then # --runInBand performs better in ci where resources are limited - echo "jest --coverage --runInBand --forceExit" - jest --coverage --runInBand --forceExit + echo "jest --coverage --runInBand --forceExit $@" + jest --coverage --runInBand --forceExit $@ else # --maxWorkers performs better in development - echo "jest --coverage --detectOpenHandles" - jest --coverage --detectOpenHandles + echo "jest --coverage --forceExit --detectOpenHandles $@" + jest --coverage --forceExit --detectOpenHandles $@ fi \ No newline at end of file diff --git a/packages/backend-core/src/auth/auth.ts b/packages/backend-core/src/auth/auth.ts index 1951c7986c..87ac46cf1c 100644 --- a/packages/backend-core/src/auth/auth.ts +++ b/packages/backend-core/src/auth/auth.ts @@ -133,7 +133,7 @@ export async function refreshOAuthToken( configId?: string ): Promise { switch (providerType) { - case SSOProviderType.OIDC: + case SSOProviderType.OIDC: { if (!configId) { return { err: { data: "OIDC config id not provided" } } } @@ -142,12 +142,14 @@ export async function refreshOAuthToken( return { err: { data: "OIDC configuration not found" } } } return refreshOIDCAccessToken(oidcConfig, refreshToken) - case SSOProviderType.GOOGLE: + } + case SSOProviderType.GOOGLE: { let googleConfig = await configs.getGoogleConfig() if (!googleConfig) { return { err: { data: "Google configuration not found" } } } return refreshGoogleAccessToken(googleConfig, refreshToken) + } } } diff --git a/packages/backend-core/src/auth/tests/auth.spec.ts b/packages/backend-core/src/auth/tests/auth.spec.ts index 3ae691be58..a80e1ea739 100644 --- a/packages/backend-core/src/auth/tests/auth.spec.ts +++ b/packages/backend-core/src/auth/tests/auth.spec.ts @@ -8,7 +8,7 @@ describe("platformLogout", () => { await testEnv.withTenant(async () => { const ctx = structures.koa.newContext() await auth.platformLogout({ ctx, userId: "test" }) - expect(events.auth.logout).toBeCalledTimes(1) + expect(events.auth.logout).toHaveBeenCalledTimes(1) }) }) }) diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..433941b5c7 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -23,6 +23,18 @@ export default class BaseCache { return client.keys(pattern) } + async exists(key: string, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + return client.exists(key) + } + + async scan(key: string, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + return client.scan(key) + } + /** * Read only from the cache. */ @@ -32,6 +44,15 @@ export default class BaseCache { return client.get(key) } + /** + * Read only from the cache. + */ + async bulkGet(keys: string[], opts = { useTenancy: true }) { + keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys + const client = await this.getClient() + return client.bulkGet(keys) + } + /** * Write to the cache. */ @@ -46,6 +67,25 @@ export default class BaseCache { await client.store(key, value, ttl) } + /** + * Bulk write to the cache. + */ + async bulkStore( + data: Record, + ttl: number | null = null, + opts = { useTenancy: true } + ) { + if (opts.useTenancy) { + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[generateTenantKey(key)] = value + return acc + }, {} as Record) + } + + const client = await this.getClient() + await client.bulkStore(data, ttl) + } + /** * Remove from cache. */ @@ -55,15 +95,24 @@ export default class BaseCache { return client.delete(key) } + /** + * Remove from cache. + */ + async bulkDelete(keys: string[], opts = { useTenancy: true }) { + keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys + const client = await this.getClient() + return client.bulkDelete(keys) + } + /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, - ttl: number, - fetchFn: any, + ttl: number | null = null, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue @@ -80,7 +129,7 @@ export default class BaseCache { } } - async bustCache(key: string, opts = { client: null }) { + async bustCache(key: string) { const client = await this.getClient() try { await client.delete(generateTenantKey(key)) @@ -89,4 +138,13 @@ export default class BaseCache { throw err } } + + /** + * Delete the entry if the provided value matches the stored one. + */ + async deleteIfValue(key: string, value: any, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + await client.deleteIfValue(key, value) + } } diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..05f13a0d91 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,105 @@ +import { AnyDocument, Database } from "@budibase/types" + +import { JobQueue, Queue, createQueue } from "../queue" +import * as dbUtils from "../db" + +interface ProcessDocMessage { + dbName: string + docId: string + data: Record +} + +const PERSIST_MAX_ATTEMPTS = 100 +let processor: DocWritethroughProcessor | undefined + +export class DocWritethroughProcessor { + private static _queue: Queue + + public static get queue() { + if (!DocWritethroughProcessor._queue) { + DocWritethroughProcessor._queue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE, + { + jobOptions: { + attempts: PERSIST_MAX_ATTEMPTS, + }, + } + ) + } + + return DocWritethroughProcessor._queue + } + + init() { + DocWritethroughProcessor.queue.process(async message => { + try { + await this.persistToDb(message.data) + } catch (err: any) { + if (err.status === 409) { + // If we get a 409, it means that another job updated it meanwhile. We want to retry it to persist it again. + throw new Error( + `Conflict persisting message ${message.id}. Attempt ${message.attemptsMade}` + ) + } + + throw err + } + }) + return this + } + + private async persistToDb({ + dbName, + docId, + data, + }: { + dbName: string + docId: string + data: Record + }) { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + doc = { ...doc, ...data } + await db.put(doc) + } +} + +export class DocWritethrough { + private db: Database + private _docId: string + + constructor(db: Database, docId: string) { + this.db = db + this._docId = docId + } + + get docId() { + return this._docId + } + + async patch(data: Record) { + await DocWritethroughProcessor.queue.add({ + dbName: this.db.name, + docId: this.docId, + data, + }) + } +} + +export function init(): DocWritethroughProcessor { + processor = new DocWritethroughProcessor().init() + return processor +} + +export function getProcessor(): DocWritethroughProcessor { + if (!processor) { + return init() + } + return processor +} diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) diff --git a/packages/backend-core/src/cache/index.ts b/packages/backend-core/src/cache/index.ts index 4fa986e4e2..3b25108634 100644 --- a/packages/backend-core/src/cache/index.ts +++ b/packages/backend-core/src/cache/index.ts @@ -5,3 +5,4 @@ export * as writethrough from "./writethrough" export * as invite from "./invite" export * as passwordReset from "./passwordReset" export * from "./generic" +export * as docWritethrough from "./docWritethrough" diff --git a/packages/backend-core/src/cache/invite.ts b/packages/backend-core/src/cache/invite.ts index e43ebc4aa8..e3d698bcc6 100644 --- a/packages/backend-core/src/cache/invite.ts +++ b/packages/backend-core/src/cache/invite.ts @@ -1,5 +1,5 @@ import * as utils from "../utils" -import { Duration, DurationType } from "../utils" +import { Duration } from "../utils" import env from "../environment" import { getTenantId } from "../context" import * as redis from "../redis/init" diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..47b3f0672f --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,294 @@ +import tk from "timekeeper" + +import _ from "lodash" +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" + +import { + DocWritethrough, + DocWritethroughProcessor, + init, +} from "../docWritethrough" + +import InMemoryQueue from "../../queue/inMemoryQueue" + +const initialTime = Date.now() + +async function waitForQueueCompletion() { + const queue: InMemoryQueue = DocWritethroughProcessor.queue as never + await queue.waitForCompletion() +} + +describe("docWritethrough", () => { + beforeAll(() => { + init() + }) + + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.guid(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(async () => { + jest.clearAllMocks() + documentId = structures.uuid() + docWritethrough = new DocWritethrough(db, documentId) + }) + + it("patching will not persist until the messages are persisted", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.exists(documentId)).toBe(false) + }) + }) + + it("patching will persist when the messages are persisted", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + await waitForQueueCompletion() + + // This will not be persisted + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + _rev: expect.stringMatching(/2-.+/), + createdAt: new Date(initialTime).toISOString(), + updatedAt: new Date(initialTime).toISOString(), + }) + }) + }) + + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + await waitForQueueCompletion() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + await waitForQueueCompletion() + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) + + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const date1 = new Date() + await waitForQueueCompletion() + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + 100) + const date2 = new Date() + await waitForQueueCompletion() + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await waitForQueueCompletion() + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + await waitForQueueCompletion() + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + await waitForQueueCompletion() + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) + + it("concurrent patches to different docWritethrough will not pollute each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id() + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + await waitForQueueCompletion() + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + await waitForQueueCompletion() + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + await waitForQueueCompletion() + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + await waitForQueueCompletion() + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + await waitForQueueCompletion() + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) + + it("concurrent calls will not cause conflicts", async () => { + async function parallelPatch(count: number) { + const patches = Array.from({ length: count }).map(() => + generatePatchObject(1) + ) + await Promise.all(patches.map(p => docWritethrough.patch(p))) + + return patches.reduce((acc, c) => { + acc = { ...acc, ...c } + return acc + }, {}) + } + const queueMessageSpy = jest.spyOn(DocWritethroughProcessor.queue, "add") + + await config.doInTenant(async () => { + let patches = await parallelPatch(5) + expect(queueMessageSpy).toHaveBeenCalledTimes(5) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining(patches) + ) + + patches = { ...patches, ...(await parallelPatch(40)) } + expect(queueMessageSpy).toHaveBeenCalledTimes(45) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining(patches) + ) + + patches = { ...patches, ...(await parallelPatch(10)) } + expect(queueMessageSpy).toHaveBeenCalledTimes(55) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining(patches) + ) + }) + }) + + // This is not yet supported + // eslint-disable-next-line jest/no-disabled-tests + it.skip("patches will execute in order", async () => { + let incrementalValue = 0 + const keyToOverride = generator.word() + async function incrementalPatches(count: number) { + for (let i = 0; i < count; i++) { + await docWritethrough.patch({ [keyToOverride]: incrementalValue++ }) + } + } + + await config.doInTenant(async () => { + await incrementalPatches(5) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ [keyToOverride]: 5 }) + ) + + await incrementalPatches(40) + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ [keyToOverride]: 45 }) + ) + }) + }) + }) +}) diff --git a/packages/backend-core/src/cache/tests/user.spec.ts b/packages/backend-core/src/cache/tests/user.spec.ts index 80e5bc3063..49a8d51c16 100644 --- a/packages/backend-core/src/cache/tests/user.spec.ts +++ b/packages/backend-core/src/cache/tests/user.spec.ts @@ -55,8 +55,8 @@ describe("user cache", () => { })), }) - expect(UserDB.bulkGet).toBeCalledTimes(1) - expect(UserDB.bulkGet).toBeCalledWith(userIdsToRequest) + expect(UserDB.bulkGet).toHaveBeenCalledTimes(1) + expect(UserDB.bulkGet).toHaveBeenCalledWith(userIdsToRequest) }) it("on a second all, all of them are retrieved from cache", async () => { @@ -82,7 +82,7 @@ describe("user cache", () => { ), }) - expect(UserDB.bulkGet).toBeCalledTimes(1) + expect(UserDB.bulkGet).toHaveBeenCalledTimes(1) }) it("when some users are cached, only the missing ones are retrieved from db", async () => { @@ -110,8 +110,8 @@ describe("user cache", () => { ), }) - expect(UserDB.bulkGet).toBeCalledTimes(1) - expect(UserDB.bulkGet).toBeCalledWith([ + expect(UserDB.bulkGet).toHaveBeenCalledTimes(1) + expect(UserDB.bulkGet).toHaveBeenCalledWith([ userIdsToRequest[1], userIdsToRequest[2], userIdsToRequest[4], diff --git a/packages/backend-core/src/cache/user.ts b/packages/backend-core/src/cache/user.ts index 313b9a4d4a..ecfa20f99e 100644 --- a/packages/backend-core/src/cache/user.ts +++ b/packages/backend-core/src/cache/user.ts @@ -6,7 +6,7 @@ import env from "../environment" import * as accounts from "../accounts" import { UserDB } from "../users" import { sdk } from "@budibase/shared-core" -import { User } from "@budibase/types" +import { User, UserMetadata } from "@budibase/types" const EXPIRY_SECONDS = 3600 @@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600 */ async function populateFromDB(userId: string, tenantId: string) { const db = tenancy.getTenantDB(tenantId) - const user = await db.get(userId) + const user = await db.get(userId) user.budibaseAccess = true if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { const account = await accounts.getAccount(user.email) diff --git a/packages/backend-core/src/cache/writethrough.ts b/packages/backend-core/src/cache/writethrough.ts index 5cafe418d7..cd7409ca15 100644 --- a/packages/backend-core/src/cache/writethrough.ts +++ b/packages/backend-core/src/cache/writethrough.ts @@ -8,7 +8,7 @@ const DEFAULT_WRITE_RATE_MS = 10000 let CACHE: BaseCache | null = null interface CacheItem { - doc: any + doc: T lastWrite: number } diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index ac00483021..f4caac502e 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -57,6 +57,9 @@ export const StaticDatabases = { AUDIT_LOGS: { name: "audit-logs", }, + SCIM_LOGS: { + name: "scim-logs", + }, } export const APP_PREFIX = prefixed(DocumentType.APP) diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 36fd5dcb48..6cea7efeba 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -10,7 +10,7 @@ import { StaticDatabases, DEFAULT_TENANT_ID, } from "../constants" -import { Database, IdentityContext } from "@budibase/types" +import { Database, IdentityContext, Snippet, App } from "@budibase/types" import { ContextMap } from "./types" let TEST_APP_ID: string | null = null @@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) { } } +export function getScimDBName(tenantId?: string) { + if (!tenantId) { + tenantId = getTenantId() + } + if (tenantId === DEFAULT_TENANT_ID) { + return StaticDatabases.SCIM_LOGS.name + } else { + return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}` + } +} + export function baseGlobalDBName(tenantId: string | undefined | null) { if (!tenantId || tenantId === DEFAULT_TENANT_ID) { return StaticDatabases.GLOBAL.name @@ -111,10 +122,10 @@ export async function doInAutomationContext(params: { automationId: string task: () => T }): Promise { - const tenantId = getTenantIDFromAppID(params.appId) + await ensureSnippetContext() return newContext( { - tenantId, + tenantId: getTenantIDFromAppID(params.appId), appId: params.appId, automationId: params.automationId, }, @@ -270,6 +281,27 @@ export function doInScimContext(task: any) { return newContext(updates, task) } +export async function ensureSnippetContext() { + const ctx = getCurrentContext() + + // If we've already added snippets to context, continue + if (!ctx || ctx.snippets) { + return + } + + // Otherwise get snippets for this app and update context + let snippets: Snippet[] | undefined + const db = getAppDB() + if (db && !env.isTest()) { + const app = await db.get(DocumentType.APP_METADATA) + snippets = app.snippets + } + + // Always set snippets to a non-null value so that we can tell we've attempted + // to load snippets + ctx.snippets = snippets || [] +} + export function getEnvironmentVariables() { const context = Context.get() if (!context.environmentVariables) { diff --git a/packages/backend-core/src/context/tests/index.spec.ts b/packages/backend-core/src/context/tests/index.spec.ts index cfc820e169..2d89131549 100644 --- a/packages/backend-core/src/context/tests/index.spec.ts +++ b/packages/backend-core/src/context/tests/index.spec.ts @@ -246,7 +246,7 @@ describe("context", () => { context.doInAppMigrationContext(db.generateAppID(), async () => { await otherContextCall() }) - ).rejects.toThrowError( + ).rejects.toThrow( "The context cannot be changed, a migration is currently running" ) } diff --git a/packages/backend-core/src/context/types.ts b/packages/backend-core/src/context/types.ts index 6fb9f44fad..f297d3089f 100644 --- a/packages/backend-core/src/context/types.ts +++ b/packages/backend-core/src/context/types.ts @@ -1,5 +1,4 @@ -import { IdentityContext, VM } from "@budibase/types" -import { ExecutionTimeTracker } from "../timers" +import { IdentityContext, Snippet, VM } from "@budibase/types" // keep this out of Budibase types, don't want to expose context info export type ContextMap = { @@ -10,6 +9,7 @@ export type ContextMap = { isScim?: boolean automationId?: string isMigrating?: boolean - jsExecutionTracker?: ExecutionTimeTracker vm?: VM + cleanup?: (() => void | Promise)[] + snippets?: Snippet[] } diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts index f91a37ce8f..735c2fa86e 100644 --- a/packages/backend-core/src/db/Replication.ts +++ b/packages/backend-core/src/db/Replication.ts @@ -1,66 +1,57 @@ +import PouchDB from "pouchdb" import { getPouchDB, closePouchDB } from "./couch" import { DocumentType } from "../constants" class Replication { - source: any - target: any - replication: any + source: PouchDB.Database + target: PouchDB.Database - /** - * - * @param source - the DB you want to replicate or rollback to - * @param target - the DB you want to replicate to, or rollback from - */ - constructor({ source, target }: any) { + constructor({ source, target }: { source: string; target: string }) { this.source = getPouchDB(source) this.target = getPouchDB(target) } - close() { - return Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) + async close() { + await Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) } - promisify(operation: any, opts = {}) { - return new Promise(resolve => { - operation(this.target, opts) - .on("denied", function (err: any) { + replicate(opts: PouchDB.Replication.ReplicateOptions = {}) { + return new Promise>(resolve => { + this.source.replicate + .to(this.target, opts) + .on("denied", function (err) { // a document failed to replicate (e.g. due to permissions) throw new Error(`Denied: Document failed to replicate ${err}`) }) - .on("complete", function (info: any) { + .on("complete", function (info) { return resolve(info) }) - .on("error", function (err: any) { - throw new Error(`Replication Error: ${err}`) + .on("error", function (err) { + throw err }) }) } - /** - * Two way replication operation, intended to be promise based. - * @param opts - PouchDB replication options - */ - sync(opts = {}) { - this.replication = this.promisify(this.source.sync, opts) - return this.replication - } + appReplicateOpts( + opts: PouchDB.Replication.ReplicateOptions = {} + ): PouchDB.Replication.ReplicateOptions { + if (typeof opts.filter === "string") { + return opts + } - /** - * One way replication operation, intended to be promise based. - * @param opts - PouchDB replication options - */ - replicate(opts = {}) { - this.replication = this.promisify(this.source.replicate.to, opts) - return this.replication - } + const filter = opts.filter + delete opts.filter - appReplicateOpts() { return { - filter: (doc: any) => { + ...opts, + filter: (doc: any, params: any) => { if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) { return false } - return doc._id !== DocumentType.APP_METADATA + if (doc._id === DocumentType.APP_METADATA) { + return false + } + return filter ? filter(doc, params) : true }, } } @@ -75,10 +66,6 @@ class Replication { // take the opportunity to remove deleted tombstones await this.replicate() } - - cancel() { - this.replication.cancel() - } } export default Replication diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 7e7c997cbe..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -70,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -79,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 03010d4c92..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } diff --git a/packages/backend-core/src/db/lucene.ts b/packages/backend-core/src/db/lucene.ts index f982ee67d0..987d750d45 100644 --- a/packages/backend-core/src/db/lucene.ts +++ b/packages/backend-core/src/db/lucene.ts @@ -10,10 +10,6 @@ interface SearchResponse { totalRows: number } -interface PaginatedSearchResponse extends SearchResponse { - hasNextPage: boolean -} - export type SearchParams = { tableId?: string sort?: string @@ -247,7 +243,7 @@ export class QueryBuilder { } // Escape characters if (!this.#noEscaping && escape && originalType === "string") { - value = `${value}`.replace(/[ \/#+\-&|!(){}\]^"~*?:\\]/g, "\\$&") + value = `${value}`.replace(/[ /#+\-&|!(){}\]^"~*?:\\]/g, "\\$&") } // Wrap in quotes diff --git a/packages/backend-core/src/db/searchIndexes/searchIndexes.ts b/packages/backend-core/src/db/searchIndexes/searchIndexes.ts index b953e3516e..8742d405f2 100644 --- a/packages/backend-core/src/db/searchIndexes/searchIndexes.ts +++ b/packages/backend-core/src/db/searchIndexes/searchIndexes.ts @@ -34,12 +34,12 @@ export async function createUserIndex() { } let idxKey = prev != null ? `${prev}.${key}` : key if (typeof input[key] === "string") { + // @ts-expect-error index is available in a CouchDB map function // eslint-disable-next-line no-undef - // @ts-ignore index(idxKey, input[key].toLowerCase(), { facet: true }) } else if (typeof input[key] !== "object") { + // @ts-expect-error index is available in a CouchDB map function // eslint-disable-next-line no-undef - // @ts-ignore index(idxKey, input[key], { facet: true }) } else { idx(input[key], idxKey) diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..586f13f417 --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("document exists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.exists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.exists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.exists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.exists(id)).toBe(false) + }) + }) +}) diff --git a/packages/backend-core/src/docUpdates/index.ts b/packages/backend-core/src/docUpdates/index.ts index 3971f8de12..bd34f4f0cd 100644 --- a/packages/backend-core/src/docUpdates/index.ts +++ b/packages/backend-core/src/docUpdates/index.ts @@ -17,13 +17,8 @@ export function init(processors: ProcessorMap) { // if not processing in this instance, kick it off if (!processingPromise) { processingPromise = asyncEventQueue.process(async job => { - const { event, identity, properties, timestamp } = job.data - await documentProcessor.processEvent( - event, - identity, - properties, - timestamp - ) + const { event, identity, properties } = job.data + await documentProcessor.processEvent(event, identity, properties) }) } } diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index b3179cbeea..2da2a77d67 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -186,6 +186,7 @@ const environment = { environment[key] = value }, ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M", + DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS, } // clean up any environment variable edge cases diff --git a/packages/backend-core/src/events/events.ts b/packages/backend-core/src/events/events.ts index f02b9fdf32..92b81553b0 100644 --- a/packages/backend-core/src/events/events.ts +++ b/packages/backend-core/src/events/events.ts @@ -1,4 +1,4 @@ -import { Event } from "@budibase/types" +import { Event, Identity } from "@budibase/types" import { processors } from "./processors" import identification from "./identification" import * as backfill from "./backfill" @@ -7,12 +7,19 @@ import { publishAsyncEvent } from "./asyncEvents" export const publishEvent = async ( event: Event, properties: any, - timestamp?: string | number + timestamp?: string | number, + identityOverride?: Identity ) => { // in future this should use async events via a distributed queue. - const identity = await identification.getCurrentIdentity() + const identity = + identityOverride || (await identification.getCurrentIdentity()) + + // Backfilling is get from the user cache, but when we override the identity cache is not available. Overrides are + // normally performed in automatic actions or operations in async flows (BPM) where the user session is not available. + const backfilling = identityOverride + ? false + : await backfill.isBackfillingEvent(event) - const backfilling = await backfill.isBackfillingEvent(event) // no backfill - send the event and exit if (!backfilling) { // send off async events if required diff --git a/packages/backend-core/src/events/processors/AuditLogsProcessor.ts b/packages/backend-core/src/events/processors/AuditLogsProcessor.ts index 94b4e1b09f..3dd2ab9d10 100644 --- a/packages/backend-core/src/events/processors/AuditLogsProcessor.ts +++ b/packages/backend-core/src/events/processors/AuditLogsProcessor.ts @@ -1,7 +1,6 @@ import { Event, Identity, - Group, IdentityType, AuditLogQueueEvent, AuditLogFn, @@ -79,11 +78,11 @@ export default class AuditLogsProcessor implements EventProcessor { } } - async identify(identity: Identity, timestamp?: string | number) { + async identify() { // no-op } - async identifyGroup(group: Group, timestamp?: string | number) { + async identifyGroup() { // no-op } diff --git a/packages/backend-core/src/events/processors/LoggingProcessor.ts b/packages/backend-core/src/events/processors/LoggingProcessor.ts index 0f4d02b99c..9f2dc5a473 100644 --- a/packages/backend-core/src/events/processors/LoggingProcessor.ts +++ b/packages/backend-core/src/events/processors/LoggingProcessor.ts @@ -8,8 +8,7 @@ export default class LoggingProcessor implements EventProcessor { async processEvent( event: Event, identity: Identity, - properties: any, - timestamp?: string + properties: any ): Promise { if (skipLogging) { return @@ -17,14 +16,14 @@ export default class LoggingProcessor implements EventProcessor { console.log(`[audit] [identityType=${identity.type}] ${event}`, properties) } - async identify(identity: Identity, timestamp?: string | number) { + async identify(identity: Identity) { if (skipLogging) { return } console.log(`[audit] identified`, identity) } - async identifyGroup(group: Group, timestamp?: string | number) { + async identifyGroup(group: Group) { if (skipLogging) { return } diff --git a/packages/backend-core/src/events/processors/async/DocumentUpdateProcessor.ts b/packages/backend-core/src/events/processors/async/DocumentUpdateProcessor.ts index 54304ee21b..92afcdc637 100644 --- a/packages/backend-core/src/events/processors/async/DocumentUpdateProcessor.ts +++ b/packages/backend-core/src/events/processors/async/DocumentUpdateProcessor.ts @@ -14,12 +14,7 @@ export default class DocumentUpdateProcessor implements EventProcessor { this.processors = processors } - async processEvent( - event: Event, - identity: Identity, - properties: any, - timestamp?: string | number - ) { + async processEvent(event: Event, identity: Identity, properties: any) { const tenantId = identity.realTenantId const docId = getDocumentId(event, properties) if (!tenantId || !docId) { diff --git a/packages/backend-core/src/events/publishers/account.ts b/packages/backend-core/src/events/publishers/account.ts index d337e404ef..99767962dd 100644 --- a/packages/backend-core/src/events/publishers/account.ts +++ b/packages/backend-core/src/events/publishers/account.ts @@ -5,13 +5,19 @@ import { AccountCreatedEvent, AccountDeletedEvent, AccountVerifiedEvent, + Identity, } from "@budibase/types" -async function created(account: Account) { +async function created(account: Account, identityOverride?: Identity) { const properties: AccountCreatedEvent = { tenantId: account.tenantId, } - await publishEvent(Event.ACCOUNT_CREATED, properties) + await publishEvent( + Event.ACCOUNT_CREATED, + properties, + undefined, + identityOverride + ) } async function deleted(account: Account) { diff --git a/packages/backend-core/src/events/publishers/app.ts b/packages/backend-core/src/events/publishers/app.ts index d08d59b5f1..af26b09e72 100644 --- a/packages/backend-core/src/events/publishers/app.ts +++ b/packages/backend-core/src/events/publishers/app.ts @@ -13,6 +13,7 @@ import { AppVersionRevertedEvent, AppRevertedEvent, AppExportedEvent, + AppDuplicatedEvent, } from "@budibase/types" const created = async (app: App, timestamp?: string | number) => { @@ -77,6 +78,17 @@ async function fileImported(app: App) { await publishEvent(Event.APP_FILE_IMPORTED, properties) } +async function duplicated(app: App, duplicateAppId: string) { + const properties: AppDuplicatedEvent = { + duplicateAppId, + appId: app.appId, + audited: { + name: app.name, + }, + } + await publishEvent(Event.APP_DUPLICATED, properties) +} + async function templateImported(app: App, templateKey: string) { const properties: AppTemplateImportedEvent = { appId: app.appId, @@ -147,6 +159,7 @@ export default { published, unpublished, fileImported, + duplicated, templateImported, versionUpdated, versionReverted, diff --git a/packages/backend-core/src/logging/pino/logger.ts b/packages/backend-core/src/logging/pino/logger.ts index 7a051e7f12..0a8470a453 100644 --- a/packages/backend-core/src/logging/pino/logger.ts +++ b/packages/backend-core/src/logging/pino/logger.ts @@ -10,6 +10,18 @@ import { formats } from "dd-trace/ext" import { localFileDestination } from "../system" +function isPlainObject(obj: any) { + return typeof obj === "object" && obj !== null && !(obj instanceof Error) +} + +function isError(obj: any) { + return obj instanceof Error +} + +function isMessage(obj: any) { + return typeof obj === "string" +} + // LOGGER let pinoInstance: pino.Logger | undefined @@ -71,23 +83,11 @@ if (!env.DISABLE_PINO_LOGGER) { err?: Error } - function isPlainObject(obj: any) { - return typeof obj === "object" && obj !== null && !(obj instanceof Error) - } - - function isError(obj: any) { - return obj instanceof Error - } - - function isMessage(obj: any) { - return typeof obj === "string" - } - /** * Backwards compatibility between console logging statements * and pino logging requirements. */ - function getLogParams(args: any[]): [MergingObject, string] { + const getLogParams = (args: any[]): [MergingObject, string] => { let error = undefined let objects: any[] = [] let message = "" diff --git a/packages/backend-core/src/middleware/errorHandling.ts b/packages/backend-core/src/middleware/errorHandling.ts index ebdd4107e9..2b8f7195ed 100644 --- a/packages/backend-core/src/middleware/errorHandling.ts +++ b/packages/backend-core/src/middleware/errorHandling.ts @@ -1,5 +1,6 @@ import { APIError } from "@budibase/types" import * as errors from "../errors" +import environment from "../environment" export async function errorHandling(ctx: any, next: any) { try { @@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) { console.error(err) } - const error = errors.getPublicError(err) - const body: APIError = { + let error: APIError = { message: err.message, status: status, validationErrors: err.validation, - error, + error: errors.getPublicError(err), } - ctx.body = body + if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) { + // @ts-ignore + error.stack = err.stack + } + + ctx.body = error } } diff --git a/packages/backend-core/src/middleware/matchers.ts b/packages/backend-core/src/middleware/matchers.ts index efbdec2dbe..757d93a60d 100644 --- a/packages/backend-core/src/middleware/matchers.ts +++ b/packages/backend-core/src/middleware/matchers.ts @@ -11,7 +11,6 @@ export const buildMatcherRegex = ( return patterns.map(pattern => { let route = pattern.route const method = pattern.method - const strict = pattern.strict ? pattern.strict : false // if there is a param in the route // use a wildcard pattern @@ -24,24 +23,17 @@ export const buildMatcherRegex = ( } } - return { regex: new RegExp(route), method, strict, route } + return { regex: new RegExp(route), method, route } }) } export const matches = (ctx: BBContext, options: RegexMatcher[]) => { - return options.find(({ regex, method, strict, route }) => { - let urlMatch - if (strict) { - urlMatch = ctx.request.url === route - } else { - urlMatch = regex.test(ctx.request.url) - } - + return options.find(({ regex, method }) => { + const urlMatch = regex.test(ctx.request.url) const methodMatch = method === "ALL" ? true : ctx.request.method.toLowerCase() === method.toLowerCase() - return urlMatch && methodMatch }) } diff --git a/packages/backend-core/src/middleware/passport/datasource/google.ts b/packages/backend-core/src/middleware/passport/datasource/google.ts index ab4ffee9d2..7f768f1623 100644 --- a/packages/backend-core/src/middleware/passport/datasource/google.ts +++ b/packages/backend-core/src/middleware/passport/datasource/google.ts @@ -3,7 +3,7 @@ import { Cookie } from "../../../constants" import * as configs from "../../../configs" import * as cache from "../../../cache" import * as utils from "../../../utils" -import { UserCtx, SSOProfile, DatasourceAuthCookie } from "@budibase/types" +import { UserCtx, SSOProfile } from "@budibase/types" import { ssoSaveUserNoOp } from "../sso/sso" const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy diff --git a/packages/backend-core/src/middleware/passport/sso/sso.ts b/packages/backend-core/src/middleware/passport/sso/sso.ts index 2fc1184722..ee84f03dae 100644 --- a/packages/backend-core/src/middleware/passport/sso/sso.ts +++ b/packages/backend-core/src/middleware/passport/sso/sso.ts @@ -5,7 +5,6 @@ import * as context from "../../../context" import fetch from "node-fetch" import { SaveSSOUserFunction, - SaveUserOpts, SSOAuthDetails, SSOUser, User, @@ -14,10 +13,8 @@ import { // no-op function for user save // - this allows datasource auth and access token refresh to work correctly // - prefer no-op over an optional argument to ensure function is provided to login flows -export const ssoSaveUserNoOp: SaveSSOUserFunction = ( - user: SSOUser, - opts: SaveUserOpts -) => Promise.resolve(user) +export const ssoSaveUserNoOp: SaveSSOUserFunction = (user: SSOUser) => + Promise.resolve(user) /** * Common authentication logic for third parties. e.g. OAuth, OIDC. diff --git a/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts b/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts index d3486a5b14..ea9584c284 100644 --- a/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts +++ b/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts @@ -114,11 +114,11 @@ describe("sso", () => { // tenant id added ssoUser.tenantId = context.getTenantId() - expect(mockSaveUser).toBeCalledWith(ssoUser, { + expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, { hashPassword: false, requirePassword: false, }) - expect(mockDone).toBeCalledWith(null, ssoUser) + expect(mockDone).toHaveBeenCalledWith(null, ssoUser) }) }) }) @@ -159,11 +159,11 @@ describe("sso", () => { // existing id preserved ssoUser._id = existingUser._id - expect(mockSaveUser).toBeCalledWith(ssoUser, { + expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, { hashPassword: false, requirePassword: false, }) - expect(mockDone).toBeCalledWith(null, ssoUser) + expect(mockDone).toHaveBeenCalledWith(null, ssoUser) }) }) @@ -187,11 +187,11 @@ describe("sso", () => { // existing id preserved ssoUser._id = existingUser._id - expect(mockSaveUser).toBeCalledWith(ssoUser, { + expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, { hashPassword: false, requirePassword: false, }) - expect(mockDone).toBeCalledWith(null, ssoUser) + expect(mockDone).toHaveBeenCalledWith(null, ssoUser) }) }) }) diff --git a/packages/backend-core/src/middleware/tests/builder.spec.ts b/packages/backend-core/src/middleware/tests/builder.spec.ts index 0514dc13f0..0f35b0b833 100644 --- a/packages/backend-core/src/middleware/tests/builder.spec.ts +++ b/packages/backend-core/src/middleware/tests/builder.spec.ts @@ -24,13 +24,13 @@ function buildUserCtx(user: ContextUser) { } function passed(throwFn: jest.Func, nextFn: jest.Func) { - expect(throwFn).not.toBeCalled() - expect(nextFn).toBeCalled() + expect(throwFn).not.toHaveBeenCalled() + expect(nextFn).toHaveBeenCalled() } function threw(throwFn: jest.Func) { // cant check next, the throw function doesn't actually throw - so it still continues - expect(throwFn).toBeCalled() + expect(throwFn).toHaveBeenCalled() } describe("adminOnly middleware", () => { diff --git a/packages/backend-core/src/middleware/tests/matchers.spec.ts b/packages/backend-core/src/middleware/tests/matchers.spec.ts index c39bbb6dd3..1b79db2e68 100644 --- a/packages/backend-core/src/middleware/tests/matchers.spec.ts +++ b/packages/backend-core/src/middleware/tests/matchers.spec.ts @@ -34,23 +34,6 @@ describe("matchers", () => { expect(!!matchers.matches(ctx, built)).toBe(true) }) - it("doesn't wildcard path with strict", () => { - const pattern = [ - { - route: "/api/tests", - method: "POST", - strict: true, - }, - ] - const ctx = structures.koa.newContext() - ctx.request.url = "/api/tests/id/something/else" - ctx.request.method = "POST" - - const built = matchers.buildMatcherRegex(pattern) - - expect(!!matchers.matches(ctx, built)).toBe(false) - }) - it("matches with param", () => { const pattern = [ { @@ -67,23 +50,6 @@ describe("matchers", () => { expect(!!matchers.matches(ctx, built)).toBe(true) }) - // TODO: Support the below behaviour - // Strict does not work when a param is present - // it("matches with param with strict", () => { - // const pattern = [{ - // route: "/api/tests/:testId", - // method: "GET", - // strict: true - // }] - // const ctx = structures.koa.newContext() - // ctx.request.url = "/api/tests/id" - // ctx.request.method = "GET" - // - // const built = matchers.buildMatcherRegex(pattern) - // - // expect(!!matchers.matches(ctx, built)).toBe(true) - // }) - it("doesn't match by path", () => { const pattern = [ { diff --git a/packages/backend-core/src/migrations/migrations.ts b/packages/backend-core/src/migrations/migrations.ts index c750bc4882..3f033b8cdb 100644 --- a/packages/backend-core/src/migrations/migrations.ts +++ b/packages/backend-core/src/migrations/migrations.ts @@ -45,10 +45,6 @@ export const runMigration = async ( options: MigrationOptions = {} ) => { const migrationType = migration.type - let tenantId: string | undefined - if (migrationType !== MigrationType.INSTALLATION) { - tenantId = context.getTenantId() - } const migrationName = migration.name const silent = migration.silent diff --git a/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts b/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts index cbbbee6255..4a132ce54d 100644 --- a/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts +++ b/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts @@ -126,7 +126,7 @@ describe("app", () => { it("gets url with embedded minio", async () => { testEnv.withMinio() - await testEnv.withTenant(tenantId => { + await testEnv.withTenant(() => { const url = getAppFileUrl() expect(url).toBe( "/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg" @@ -136,7 +136,7 @@ describe("app", () => { it("gets url with custom S3", async () => { testEnv.withS3() - await testEnv.withTenant(tenantId => { + await testEnv.withTenant(() => { const url = getAppFileUrl() expect(url).toBe( "http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg" @@ -146,7 +146,7 @@ describe("app", () => { it("gets url with cloudfront + s3", async () => { testEnv.withCloudfront() - await testEnv.withTenant(tenantId => { + await testEnv.withTenant(() => { const url = getAppFileUrl() // omit rest of signed params expect( diff --git a/packages/backend-core/src/platform/tests/tenants.spec.ts b/packages/backend-core/src/platform/tests/tenants.spec.ts index b2ab75c954..e22003fd45 100644 --- a/packages/backend-core/src/platform/tests/tenants.spec.ts +++ b/packages/backend-core/src/platform/tests/tenants.spec.ts @@ -3,7 +3,7 @@ import { DBTestConfiguration } from "../../../tests/extra" import * as tenants from "../tenants" describe("tenants", () => { - const config = new DBTestConfiguration() + new DBTestConfiguration() describe("addTenant", () => { it("concurrently adds multiple tenants safely", async () => { diff --git a/packages/backend-core/src/platform/users.ts b/packages/backend-core/src/platform/users.ts index 6f030afb7c..ccaad76b19 100644 --- a/packages/backend-core/src/platform/users.ts +++ b/packages/backend-core/src/platform/users.ts @@ -20,7 +20,7 @@ export async function lookupTenantId(userId: string) { return user.tenantId } -async function getUserDoc(emailOrId: string): Promise { +export async function getUserDoc(emailOrId: string): Promise { const db = getPlatformDB() return db.get(emailOrId) } @@ -79,6 +79,17 @@ async function addUserDoc(emailOrId: string, newDocFn: () => PlatformUser) { } } +export async function addSsoUser( + ssoId: string, + email: string, + userId: string, + tenantId: string +) { + return addUserDoc(ssoId, () => + newUserSsoIdDoc(ssoId, email, userId, tenantId) + ) +} + export async function addUser( tenantId: string, userId: string, @@ -91,9 +102,7 @@ export async function addUser( ] if (ssoId) { - promises.push( - addUserDoc(ssoId, () => newUserSsoIdDoc(ssoId, email, userId, tenantId)) - ) + promises.push(addSsoUser(ssoId, email, userId, tenantId)) } await Promise.all(promises) diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..87e43b324d 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,14 @@ import events from "events" -import { timeout } from "../utils" +import { newid, timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" + +interface JobMessage { + id: string + timestamp: number + queue: string + data: any + opts?: JobOptions +} /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -10,12 +19,13 @@ import { timeout } from "../utils" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { + id: newid(), timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -24,26 +34,29 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any - _messages: any[] - _emitter: EventEmitter + _opts?: QueueOptions + _messages: JobMessage[] + _queuedJobIds: Set + _emitter: NodeJS.EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -55,22 +68,42 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { if (this._messages.length <= 0) { return } let msg = this._messages.shift() + let resp = func(msg) + + async function retryFunc(fnc: any) { + try { + await fnc + } catch (e: any) { + await new Promise(r => setTimeout(() => r(), 50)) + + await retryFunc(func(msg)) + } + } + if (resp.then != null) { - await resp + try { + await retryFunc(resp) + } catch (e: any) { + console.error(e) + } } this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } }) } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +116,45 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +162,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } - // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async removeJobs(pattern: string) { // no-op } @@ -128,18 +179,22 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs()) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..cd25ff2254 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { @@ -130,7 +132,7 @@ function logging(queue: Queue, jobQueue: JobQueue) { // A Job is waiting to be processed as soon as a worker is idling. console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId })) }) - .on(BullEvent.ACTIVE, async (job: Job, jobPromise: any) => { + .on(BullEvent.ACTIVE, async (job: Job) => { // A job has started. You can use `jobPromise.cancel()`` to abort it. await doInJobContext(job, () => { console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job })) diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..44ba28a83c 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -36,6 +40,7 @@ export async function shutdown() { if (inviteClient) await inviteClient.finish() if (passwordResetClient) await passwordResetClient.finish() if (socketClient) await socketClient.finish() + if (docWritethroughClient) await docWritethroughClient.finish() } process.on("exit", async () => { @@ -104,3 +109,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index d15453ba62..79f75421d3 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -1,5 +1,5 @@ import env from "../environment" -import Redis from "ioredis" +import Redis, { Cluster } from "ioredis" // mock-redis doesn't have any typing let MockRedis: any | undefined if (env.MOCK_REDIS) { @@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT // for testing just generate the client once let CLOSED = false -let CLIENTS: { [key: number]: any } = {} +const CLIENTS: Record = {} let CONNECTED = false // mock redis always connected @@ -36,7 +36,7 @@ if (env.MOCK_REDIS) { CONNECTED = true } -function pickClient(selectDb: number): any { +function pickClient(selectDb: number) { return CLIENTS[selectDb] } @@ -201,12 +201,15 @@ class RedisWrapper { key = `${db}${SEPARATOR}${key}` let stream if (CLUSTERED) { - let node = this.getClient().nodes("master") + let node = (this.getClient() as never as Cluster).nodes("master") stream = node[0].scanStream({ match: key + "*", count: 100 }) } else { - stream = this.getClient().scanStream({ match: key + "*", count: 100 }) + stream = (this.getClient() as Redis).scanStream({ + match: key + "*", + count: 100, + }) } - return promisifyStream(stream, this.getClient()) + return promisifyStream(stream, this.getClient() as any) } async keys(pattern: string) { @@ -221,14 +224,16 @@ class RedisWrapper { async get(key: string) { const db = this._db - let response = await this.getClient().get(addDbPrefix(db, key)) + const response = await this.getClient().get(addDbPrefix(db, key)) // overwrite the prefixed key + // @ts-ignore if (response != null && response.key) { + // @ts-ignore response.key = key } // if its not an object just return the response try { - return JSON.parse(response) + return JSON.parse(response!) } catch (err) { return response } @@ -274,13 +279,37 @@ class RedisWrapper { } } + async bulkStore( + data: Record, + expirySeconds: number | null = null + ) { + const client = this.getClient() + + const dataToStore = Object.entries(data).reduce((acc, [key, value]) => { + acc[addDbPrefix(this._db, key)] = + typeof value === "object" ? JSON.stringify(value) : value + return acc + }, {} as Record) + + const pipeline = client.pipeline() + pipeline.mset(dataToStore) + + if (expirySeconds !== null) { + for (const key of Object.keys(dataToStore)) { + pipeline.expire(key, expirySeconds) + } + } + + await pipeline.exec() + } + async getTTL(key: string) { const db = this._db const prefixedKey = addDbPrefix(db, key) return this.getClient().ttl(prefixedKey) } - async setExpiry(key: string, expirySeconds: number | null) { + async setExpiry(key: string, expirySeconds: number) { const db = this._db const prefixedKey = addDbPrefix(db, key) await this.getClient().expire(prefixedKey, expirySeconds) @@ -291,10 +320,35 @@ class RedisWrapper { await this.getClient().del(addDbPrefix(db, key)) } + async bulkDelete(keys: string[]) { + const db = this._db + await this.getClient().del(keys.map(key => addDbPrefix(db, key))) + } + async clear() { let items = await this.scan() await Promise.all(items.map((obj: any) => this.delete(obj.key))) } + + async increment(key: string) { + const result = await this.getClient().incr(addDbPrefix(this._db, key)) + if (isNaN(result)) { + throw new Error(`Redis ${key} does not contain a number`) + } + return result + } + + async deleteIfValue(key: string, value: any) { + const client = this.getClient() + + const luaScript = ` + if redis.call('GET', KEYS[1]) == ARGV[1] then + redis.call('DEL', KEYS[1]) + end + ` + + await client.eval(luaScript, 1, addDbPrefix(this._db, key), value) + } } export default RedisWrapper diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index 7009dc6f55..adeb5b12ec 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -72,7 +72,7 @@ const OPTIONS: Record = { export async function newRedlock(opts: Redlock.Options = {}) { const options = { ...OPTIONS.DEFAULT, ...opts } const redisWrapper = await getLockClient() - const client = redisWrapper.getClient() + const client = redisWrapper.getClient() as any return new Redlock([client], options) } diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts new file mode 100644 index 0000000000..4d11caf220 --- /dev/null +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -0,0 +1,203 @@ +import { GenericContainer, StartedTestContainer } from "testcontainers" +import { generator, structures } from "../../../tests" +import RedisWrapper from "../redis" +import { env } from "../.." + +jest.setTimeout(30000) + +describe("redis", () => { + let redis: RedisWrapper + let container: StartedTestContainer + + beforeAll(async () => { + const container = await new GenericContainer("redis") + .withExposedPorts(6379) + .start() + + env._set( + "REDIS_URL", + `${container.getHost()}:${container.getMappedPort(6379)}` + ) + env._set("MOCK_REDIS", 0) + env._set("REDIS_PASSWORD", 0) + }) + + afterAll(() => container?.stop()) + + beforeEach(async () => { + redis = new RedisWrapper(structures.db.id()) + await redis.init() + }) + + describe("store", () => { + it("a basic value can be persisted", async () => { + const key = structures.uuid() + const value = generator.word() + + await redis.store(key, value) + + expect(await redis.get(key)).toEqual(value) + }) + + it("objects can be persisted", async () => { + const key = structures.uuid() + const value = { [generator.word()]: generator.word() } + + await redis.store(key, value) + + expect(await redis.get(key)).toEqual(value) + }) + }) + + describe("bulkStore", () => { + function createRandomObject( + keyLength: number, + valueGenerator: () => any = () => generator.word() + ) { + return generator + .unique(() => generator.word(), keyLength) + .reduce((acc, key) => { + acc[key] = valueGenerator() + return acc + }, {} as Record) + } + + it("a basic object can be persisted", async () => { + const data = createRandomObject(10) + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + } + + expect(await redis.keys("*")).toHaveLength(10) + }) + + it("a complex object can be persisted", async () => { + const data = { + ...createRandomObject(10, () => createRandomObject(5)), + ...createRandomObject(5), + } + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + } + + expect(await redis.keys("*")).toHaveLength(15) + }) + + it("no TTL is set by default", async () => { + const data = createRandomObject(10) + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + expect(await redis.getTTL(key)).toEqual(-1) + } + }) + + it("a bulk store can be persisted with TTL", async () => { + const ttl = 500 + const data = createRandomObject(8) + + await redis.bulkStore(data, ttl) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + expect(await redis.getTTL(key)).toEqual(ttl) + } + + expect(await redis.keys("*")).toHaveLength(8) + }) + + it("setting a TTL of -1 will not persist the key", async () => { + const ttl = -1 + const data = createRandomObject(5) + + await redis.bulkStore(data, ttl) + + for (const key of Object.keys(data)) { + expect(await redis.get(key)).toBe(null) + } + + expect(await redis.keys("*")).toHaveLength(0) + }) + }) + + describe("increment", () => { + it("can increment on a new key", async () => { + const key = structures.uuid() + const result = await redis.increment(key) + expect(result).toBe(1) + }) + + it("can increment multiple times", async () => { + const key = structures.uuid() + const results = [ + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + ] + expect(results).toEqual([1, 2, 3, 4, 5]) + }) + + it("can increment multiple times in parallel", async () => { + const key = structures.uuid() + const results = await Promise.all( + Array.from({ length: 100 }).map(() => redis.increment(key)) + ) + expect(results).toHaveLength(100) + expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1)) + }) + + it("can increment existing set keys", async () => { + const key = structures.uuid() + await redis.store(key, 70) + await redis.increment(key) + + const result = await redis.increment(key) + expect(result).toBe(72) + }) + + it.each([ + generator.word(), + generator.bool(), + { [generator.word()]: generator.word() }, + ])("cannot increment if the store value is not a number", async value => { + const key = structures.uuid() + await redis.store(key, value) + + await expect(redis.increment(key)).rejects.toThrow( + "ERR value is not an integer or out of range" + ) + }) + }) + + describe("deleteIfValue", () => { + it("can delete if the value matches", async () => { + const key = structures.uuid() + const value = generator.word() + await redis.store(key, value) + + await redis.deleteIfValue(key, value) + + expect(await redis.get(key)).toBeNull() + }) + + it("will not delete if the value does not matches", async () => { + const key = structures.uuid() + const value = generator.word() + await redis.store(key, value) + + await redis.deleteIfValue(key, generator.word()) + + expect(await redis.get(key)).toEqual(value) + }) + }) +}) diff --git a/packages/backend-core/src/redis/tests/redlockImpl.spec.ts b/packages/backend-core/src/redis/tests/redlockImpl.spec.ts index a1e83d8e6c..e647b63bf5 100644 --- a/packages/backend-core/src/redis/tests/redlockImpl.spec.ts +++ b/packages/backend-core/src/redis/tests/redlockImpl.spec.ts @@ -96,8 +96,8 @@ describe("redlockImpl", () => { task: mockTask, executionTimeMs: lockTtl * 2, }) - ).rejects.toThrowError( - `Unable to fully release the lock on resource \"lock:${config.tenantId}_persist_writethrough\".` + ).rejects.toThrow( + `Unable to fully release the lock on resource "lock:${config.tenantId}_persist_writethrough".` ) } ) diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts index 4f048c0a11..a64be6b319 100644 --- a/packages/backend-core/src/security/roles.ts +++ b/packages/backend-core/src/security/roles.ts @@ -84,25 +84,24 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } { return cloneDeep(BUILTIN_ROLES) } -export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map( - role => role._id -) +export function isBuiltin(role: string) { + return getBuiltinRole(role) !== undefined +} -export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map( - role => role.name -) - -export function isBuiltin(role?: string) { - return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin)) +export function getBuiltinRole(roleId: string): Role | undefined { + const role = Object.values(BUILTIN_ROLES).find(role => + roleId.includes(role._id) + ) + if (!role) { + return undefined + } + return cloneDeep(role) } /** * Works through the inheritance ranks to see how far up the builtin stack this ID is. */ -export function builtinRoleToNumber(id?: string) { - if (!id) { - return 0 - } +export function builtinRoleToNumber(id: string) { const builtins = getBuiltinRoles() const MAX = Object.values(builtins).length + 1 if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) { @@ -123,7 +122,7 @@ export function builtinRoleToNumber(id?: string) { /** * Converts any role to a number, but has to be async to get the roles from db. */ -export async function roleToNumber(id?: string) { +export async function roleToNumber(id: string) { if (isBuiltin(id)) { return builtinRoleToNumber(id) } @@ -131,7 +130,7 @@ export async function roleToNumber(id?: string) { defaultPublic: true, })) as RoleDoc[] for (let role of hierarchy) { - if (isBuiltin(role?.inherits)) { + if (role?.inherits && isBuiltin(role.inherits)) { return builtinRoleToNumber(role.inherits) + 1 } } @@ -161,35 +160,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string { * @returns The role object, which may contain an "inherits" property. */ export async function getRole( - roleId?: string, + roleId: string, opts?: { defaultPublic?: boolean } -): Promise { - if (!roleId) { - return undefined - } - let role: any = {} +): Promise { // built in roles mostly come from the in-code implementation, // but can be extended by a doc stored about them (e.g. permissions) - if (isBuiltin(roleId)) { - role = cloneDeep( - Object.values(BUILTIN_ROLES).find(role => role._id === roleId) - ) - } else { + let role: RoleDoc | undefined = getBuiltinRole(roleId) + if (!role) { // make sure has the prefix (if it has it then it won't be added) roleId = prefixRoleID(roleId) } try { const db = getAppDB() - const dbRole = await db.get(getDBRoleID(roleId)) - role = Object.assign(role, dbRole) + const dbRole = await db.get(getDBRoleID(roleId)) + role = Object.assign(role || {}, dbRole) // finalise the ID - role._id = getExternalRoleID(role._id, role.version) + role._id = getExternalRoleID(role._id!, role.version) } catch (err) { if (!isBuiltin(roleId) && opts?.defaultPublic) { return cloneDeep(BUILTIN_ROLES.PUBLIC) } // only throw an error if there is no role at all - if (Object.keys(role).length === 0) { + if (!role || Object.keys(role).length === 0) { throw err } } @@ -200,7 +192,7 @@ export async function getRole( * Simple function to get all the roles based on the top level user role ID. */ async function getAllUserRoles( - userRoleId?: string, + userRoleId: string, opts?: { defaultPublic?: boolean } ): Promise { // admins have access to all roles @@ -226,7 +218,7 @@ async function getAllUserRoles( } export async function getUserRoleIdHierarchy( - userRoleId?: string + userRoleId: string ): Promise { const roles = await getUserRoleHierarchy(userRoleId) return roles.map(role => role._id!) @@ -241,7 +233,7 @@ export async function getUserRoleIdHierarchy( * highest level of access and the last being the lowest level. */ export async function getUserRoleHierarchy( - userRoleId?: string, + userRoleId: string, opts?: { defaultPublic?: boolean } ) { // special case, if they don't have a role then they are a public user @@ -265,9 +257,9 @@ export function checkForRoleResourceArray( return rolePerms } -export async function getAllRoleIds(appId?: string) { +export async function getAllRoleIds(appId: string): Promise { const roles = await getAllRoles(appId) - return roles.map(role => role._id) + return roles.map(role => role._id!) } /** diff --git a/packages/backend-core/src/tenancy/tests/tenancy.spec.ts b/packages/backend-core/src/tenancy/tests/tenancy.spec.ts index 95dd76a6dd..34e9f87064 100644 --- a/packages/backend-core/src/tenancy/tests/tenancy.spec.ts +++ b/packages/backend-core/src/tenancy/tests/tenancy.spec.ts @@ -158,8 +158,8 @@ describe("getTenantIDFromCtx", () => { ], } expect(getTenantIDFromCtx(ctx, mockOpts)).toBeUndefined() - expect(ctx.throw).toBeCalledTimes(1) - expect(ctx.throw).toBeCalledWith(403, "Tenant id not set") + expect(ctx.throw).toHaveBeenCalledTimes(1) + expect(ctx.throw).toHaveBeenCalledWith(403, "Tenant id not set") }) it("returns undefined if allowNoTenant is true", () => { diff --git a/packages/backend-core/src/timers/timers.ts b/packages/backend-core/src/timers/timers.ts index 9121c576cd..000be74821 100644 --- a/packages/backend-core/src/timers/timers.ts +++ b/packages/backend-core/src/timers/timers.ts @@ -20,41 +20,3 @@ export function cleanup() { } intervals = [] } - -export class ExecutionTimeoutError extends Error { - public readonly name = "ExecutionTimeoutError" -} - -export class ExecutionTimeTracker { - static withLimit(limitMs: number) { - return new ExecutionTimeTracker(limitMs) - } - - constructor(readonly limitMs: number) {} - - private totalTimeMs = 0 - - track(f: () => T): T { - this.checkLimit() - const start = process.hrtime.bigint() - try { - return f() - } finally { - const end = process.hrtime.bigint() - this.totalTimeMs += Number(end - start) / 1e6 - this.checkLimit() - } - } - - get elapsedMS() { - return this.totalTimeMs - } - - checkLimit() { - if (this.totalTimeMs > this.limitMs) { - throw new ExecutionTimeoutError( - `Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms` - ) - } - } -} diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts index 136cb4b8ad..04d3264e6f 100644 --- a/packages/backend-core/src/users/db.ts +++ b/packages/backend-core/src/users/db.ts @@ -500,13 +500,13 @@ export class UserDB { static async createAdminUser( email: string, - password: string, tenantId: string, + password?: string, opts?: CreateAdminUserOpts ) { const user: User = { email: email, - password: password, + password, createdAt: Date.now(), roles: {}, builder: { diff --git a/packages/backend-core/src/users/test/utils.spec.ts b/packages/backend-core/src/users/test/utils.spec.ts index 0fe27f57a6..cb98b8972b 100644 --- a/packages/backend-core/src/users/test/utils.spec.ts +++ b/packages/backend-core/src/users/test/utils.spec.ts @@ -45,7 +45,7 @@ describe("Users", () => { ...{ _id: groupId, roles: { app1: "ADMIN" } }, } const users: User[] = [] - for (const _ of Array.from({ length: usersInGroup })) { + for (let i = 0; i < usersInGroup; i++) { const userId = `us_${generator.guid()}` const user: User = structures.users.user({ _id: userId, diff --git a/packages/backend-core/src/users/users.ts b/packages/backend-core/src/users/users.ts index 638da4a5b1..48920a3771 100644 --- a/packages/backend-core/src/users/users.ts +++ b/packages/backend-core/src/users/users.ts @@ -14,16 +14,16 @@ import { } from "../db" import { BulkDocsResponse, + ContextUser, + CouchFindOptions, + DatabaseQueryOpts, SearchQuery, SearchQueryOperators, SearchUsersRequest, User, - ContextUser, - DatabaseQueryOpts, - CouchFindOptions, } from "@budibase/types" -import { getGlobalDB } from "../context" import * as context from "../context" +import { getGlobalDB } from "../context" import { isCreator } from "./utils" import { UserDB } from "./db" @@ -48,6 +48,7 @@ export function isSupportedUserSearch(query: SearchQuery) { const allowed = [ { op: SearchQueryOperators.STRING, key: "email" }, { op: SearchQueryOperators.EQUAL, key: "_id" }, + { op: SearchQueryOperators.ONE_OF, key: "_id" }, ] for (let [key, operation] of Object.entries(query)) { if (typeof operation !== "object") { @@ -285,6 +286,10 @@ export async function paginatedUsers({ } else if (query?.string?.email) { userList = await searchGlobalUsersByEmail(query?.string?.email, opts) property = "email" + } else if (query?.oneOf?._id) { + userList = await bulkGetGlobalUsersById(query?.oneOf?._id, { + cleanup: true, + }) } else { // no search, query allDocs const response = await db.allDocs(getGlobalUserParams(null, opts)) diff --git a/packages/backend-core/tests/core/utilities/mocks/events.ts b/packages/backend-core/tests/core/utilities/mocks/events.ts index fef730768a..96f351de10 100644 --- a/packages/backend-core/tests/core/utilities/mocks/events.ts +++ b/packages/backend-core/tests/core/utilities/mocks/events.ts @@ -15,6 +15,7 @@ beforeAll(async () => { jest.spyOn(events.app, "created") jest.spyOn(events.app, "updated") + jest.spyOn(events.app, "duplicated") jest.spyOn(events.app, "deleted") jest.spyOn(events.app, "published") jest.spyOn(events.app, "unpublished") diff --git a/packages/backend-core/tests/core/utilities/structures/accounts.ts b/packages/backend-core/tests/core/utilities/structures/accounts.ts index 515f94db1e..7dcc2de116 100644 --- a/packages/backend-core/tests/core/utilities/structures/accounts.ts +++ b/packages/backend-core/tests/core/utilities/structures/accounts.ts @@ -18,7 +18,7 @@ export const account = (partial: Partial = {}): Account => { return { accountId: uuid(), tenantId: generator.word(), - email: generator.email(), + email: generator.email({ domain: "example.com" }), tenantName: generator.word(), hosting: Hosting.SELF, createdAt: Date.now(), diff --git a/packages/backend-core/tests/core/utilities/structures/scim.ts b/packages/backend-core/tests/core/utilities/structures/scim.ts index 80f41c605d..f424b2881a 100644 --- a/packages/backend-core/tests/core/utilities/structures/scim.ts +++ b/packages/backend-core/tests/core/utilities/structures/scim.ts @@ -13,7 +13,7 @@ interface CreateUserRequestFields { export function createUserRequest(userData?: Partial) { const defaultValues = { externalId: uuid(), - email: generator.email(), + email: `${uuid()}@example.com`, firstName: generator.first(), lastName: generator.last(), username: generator.name(), diff --git a/packages/backend-core/tests/core/utilities/structures/userGroups.ts b/packages/backend-core/tests/core/utilities/structures/userGroups.ts index 4dc870a00a..4af3f72e51 100644 --- a/packages/backend-core/tests/core/utilities/structures/userGroups.ts +++ b/packages/backend-core/tests/core/utilities/structures/userGroups.ts @@ -3,7 +3,7 @@ import { generator } from "./generator" export function userGroup(): UserGroup { return { - name: generator.word(), + name: generator.guid(), icon: generator.word(), color: generator.word(), } diff --git a/packages/backend-core/tests/core/utilities/testContainerUtils.ts b/packages/backend-core/tests/core/utilities/testContainerUtils.ts index 7da6cbc777..5d4f5a3c11 100644 --- a/packages/backend-core/tests/core/utilities/testContainerUtils.ts +++ b/packages/backend-core/tests/core/utilities/testContainerUtils.ts @@ -1,80 +1,58 @@ +import { DatabaseImpl } from "../../../src/db" import { execSync } from "child_process" -let dockerPsResult: string | undefined - -function formatDockerPsResult(serverName: string, port: number) { - const lines = dockerPsResult?.split("\n") - let first = true - if (!lines) { - return null - } - for (let line of lines) { - if (first) { - first = false - continue - } - let toLookFor = serverName.split("-service")[0] - if (!line.includes(toLookFor)) { - continue - } - const regex = new RegExp(`0.0.0.0:([0-9]*)->${port}`, "g") - const found = line.match(regex) - if (found) { - return found[0].split(":")[1].split("->")[0] - } - } - return null +interface ContainerInfo { + Command: string + CreatedAt: string + ID: string + Image: string + Labels: string + LocalVolumes: string + Mounts: string + Names: string + Networks: string + Ports: string + RunningFor: string + Size: string + State: string + Status: string } -function getTestContainerSettings( - serverName: string, - key: string -): string | null { - const entry = Object.entries(global).find( - ([k]) => - k.includes(`${serverName.toUpperCase()}`) && - k.includes(`${key.toUpperCase()}`) - ) - if (!entry) { - return null - } - return entry[1] +function getTestcontainers(): ContainerInfo[] { + return execSync("docker ps --format json") + .toString() + .split("\n") + .filter(x => x.length > 0) + .map(x => JSON.parse(x) as ContainerInfo) + .filter(x => x.Labels.includes("org.testcontainers=true")) } -function getContainerInfo(containerName: string, port: number) { - let assignedPort = getTestContainerSettings( - containerName.toUpperCase(), - `PORT_${port}` - ) - if (!dockerPsResult) { - try { - const outputBuffer = execSync("docker ps") - dockerPsResult = outputBuffer.toString("utf8") - } catch (err) { - //no-op - } - } - const possiblePort = formatDockerPsResult(containerName, port) - if (possiblePort) { - assignedPort = possiblePort - } - const host = getTestContainerSettings(containerName.toUpperCase(), "IP") - return { - port: assignedPort, - host, - url: host && assignedPort && `http://${host}:${assignedPort}`, - } +function getContainerByImage(image: string) { + return getTestcontainers().find(x => x.Image.startsWith(image)) } -function getCouchConfig() { - return getContainerInfo("couchdb", 5984) +function getExposedPort(container: ContainerInfo, port: number) { + const match = container.Ports.match(new RegExp(`0.0.0.0:(\\d+)->${port}/tcp`)) + if (!match) { + return undefined + } + return parseInt(match[1]) } export function setupEnv(...envs: any[]) { - const couch = getCouchConfig() + const couch = getContainerByImage("budibase/couchdb") + if (!couch) { + throw new Error("CouchDB container not found") + } + + const couchPort = getExposedPort(couch, 5984) + if (!couchPort) { + throw new Error("CouchDB port not found") + } + const configs = [ - { key: "COUCH_DB_PORT", value: couch.port }, - { key: "COUCH_DB_URL", value: couch.url }, + { key: "COUCH_DB_PORT", value: `${couchPort}` }, + { key: "COUCH_DB_URL", value: `http://localhost:${couchPort}` }, ] for (const config of configs.filter(x => !!x.value)) { @@ -82,4 +60,7 @@ export function setupEnv(...envs: any[]) { env._set(config.key, config.value) } } + + // @ts-expect-error + DatabaseImpl.nano = undefined } diff --git a/packages/backend-core/tests/jestEnv.ts b/packages/backend-core/tests/jestEnv.ts index c2047118ec..2c797c9fff 100644 --- a/packages/backend-core/tests/jestEnv.ts +++ b/packages/backend-core/tests/jestEnv.ts @@ -4,3 +4,7 @@ process.env.NODE_ENV = "jest" process.env.MOCK_REDIS = "1" process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error" process.env.REDIS_PASSWORD = "budibase" +process.env.COUCH_DB_PASSWORD = "budibase" +process.env.COUCH_DB_USER = "budibase" +process.env.API_ENCRYPTION_KEY = "testsecret" +process.env.JWT_SECRET = "testsecret" diff --git a/packages/bbui/rollup.config.js b/packages/bbui/rollup.config.js index e285d548d6..da274e0ba5 100644 --- a/packages/bbui/rollup.config.js +++ b/packages/bbui/rollup.config.js @@ -12,6 +12,13 @@ export default { format: "esm", file: "dist/bbui.es.js", }, + onwarn(warning, warn) { + // suppress eval warnings + if (warning.code === "EVAL") { + return + } + warn(warning) + }, plugins: [ resolve(), commonjs(), diff --git a/packages/bbui/src/ActionMenu/ActionMenu.svelte b/packages/bbui/src/ActionMenu/ActionMenu.svelte index 642ec4932a..c55d1cb43d 100644 --- a/packages/bbui/src/ActionMenu/ActionMenu.svelte +++ b/packages/bbui/src/ActionMenu/ActionMenu.svelte @@ -38,7 +38,7 @@
- + diff --git a/packages/bbui/src/Actions/click_outside.js b/packages/bbui/src/Actions/click_outside.js index 1961dca47c..eafca657f3 100644 --- a/packages/bbui/src/Actions/click_outside.js +++ b/packages/bbui/src/Actions/click_outside.js @@ -32,19 +32,30 @@ const handleClick = event => { return } + // Ignore clicks for drawers, unless the handler is registered from a drawer + const sourceInDrawer = handler.anchor.closest(".drawer-wrapper") != null + const clickInDrawer = event.target.closest(".drawer-wrapper") != null + if (clickInDrawer && !sourceInDrawer) { + return + } + + if (handler.allowedType && event.type !== handler.allowedType) { + return + } + handler.callback?.(event) }) } document.documentElement.addEventListener("click", handleClick, true) -document.documentElement.addEventListener("contextmenu", handleClick, true) +document.documentElement.addEventListener("mousedown", handleClick, true) /** * Adds or updates a click handler */ -const updateHandler = (id, element, anchor, callback) => { +const updateHandler = (id, element, anchor, callback, allowedType) => { let existingHandler = clickHandlers.find(x => x.id === id) if (!existingHandler) { - clickHandlers.push({ id, element, anchor, callback }) + clickHandlers.push({ id, element, anchor, callback, allowedType }) } else { existingHandler.callback = callback } @@ -68,9 +79,11 @@ const removeHandler = id => { export default (element, opts) => { const id = Math.random() const update = newOpts => { - const callback = newOpts?.callback || newOpts + const callback = + newOpts?.callback || (typeof newOpts === "function" ? newOpts : null) const anchor = newOpts?.anchor || element - updateHandler(id, element, anchor, callback) + const allowedType = newOpts?.allowedType || "click" + updateHandler(id, element, anchor, callback, allowedType) } update(opts) return { diff --git a/packages/bbui/src/Actions/position_dropdown.js b/packages/bbui/src/Actions/position_dropdown.js index cc169eac09..770d1bd507 100644 --- a/packages/bbui/src/Actions/position_dropdown.js +++ b/packages/bbui/src/Actions/position_dropdown.js @@ -15,6 +15,7 @@ export default function positionDropdown(element, opts) { align, maxHeight, maxWidth, + minWidth, useAnchorWidth, offset = 5, customUpdate, @@ -28,18 +29,26 @@ export default function positionDropdown(element, opts) { const elementBounds = element.getBoundingClientRect() let styles = { maxHeight: null, - minWidth: null, + minWidth, maxWidth, left: null, top: null, } if (typeof customUpdate === "function") { - styles = customUpdate(anchorBounds, elementBounds, styles) + styles = customUpdate(anchorBounds, elementBounds, { + ...styles, + offset: opts.offset, + }) } else { // Determine vertical styles - if (align === "right-outside") { - styles.top = anchorBounds.top + if (align === "right-outside" || align === "left-outside") { + styles.top = + anchorBounds.top + anchorBounds.height / 2 - elementBounds.height / 2 + styles.maxHeight = maxHeight + if (styles.top + elementBounds.height > window.innerHeight) { + styles.top = window.innerHeight - elementBounds.height + } } else if ( window.innerHeight - anchorBounds.bottom < (maxHeight || 100) diff --git a/packages/bbui/src/Drawer/Drawer.svelte b/packages/bbui/src/Drawer/Drawer.svelte index 8976bfb81e..89ee92726d 100644 --- a/packages/bbui/src/Drawer/Drawer.svelte +++ b/packages/bbui/src/Drawer/Drawer.svelte @@ -1,28 +1,111 @@ + + {#if visible} - -
- {#if !headless} + + +
+
+
0} + class:modal={$modal} + transition:drawerSlide|local + {style} + >
-
- {title} - - - -
+ {#if $$slots.title} + + {:else} +
{title || "Bindings"}
+ {/if}
+ {#if $resizable} + modal.set(!$modal)} + > + + + {/if}
- {/if} - -
+ +
+
+
{/if} diff --git a/packages/bbui/src/Drawer/DrawerContent.svelte b/packages/bbui/src/Drawer/DrawerContent.svelte index 944a3f4313..f7345afb11 100644 --- a/packages/bbui/src/Drawer/DrawerContent.svelte +++ b/packages/bbui/src/Drawer/DrawerContent.svelte @@ -1,4 +1,8 @@ -
+ + +
{#if $$slots.sidebar}