Merge branch 'master' of github.com:Budibase/budibase into data-provider-auto-refresh
This commit is contained in:
commit
7076b574fc
|
@ -6,9 +6,11 @@ packages/server/coverage
|
|||
packages/worker/coverage
|
||||
packages/backend-core/coverage
|
||||
packages/server/client
|
||||
packages/server/coverage
|
||||
packages/builder/.routify
|
||||
packages/sdk/sdk
|
||||
packages/account-portal/packages/server/build
|
||||
packages/account-portal/packages/ui/.routify
|
||||
packages/account-portal/packages/ui/build
|
||||
**/*.ivm.bundle.js
|
||||
**/*.ivm.bundle.js
|
||||
packages/server/build/oldClientVersions/**/**
|
||||
|
|
|
@ -35,16 +35,40 @@
|
|||
{
|
||||
"files": ["**/*.ts"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["eslint:recommended"],
|
||||
"globals": {
|
||||
"NodeJS": true
|
||||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"no-inner-declarations": "off",
|
||||
"no-case-declarations": "off",
|
||||
"no-useless-escape": "off",
|
||||
"no-undef": "off",
|
||||
"no-prototype-builtins": "off",
|
||||
"local-rules/no-budibase-imports": "error",
|
||||
"local-rules/no-test-com": "error"
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"local-rules/no-budibase-imports": "error"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.spec.ts"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["eslint:recommended", "plugin:jest/recommended"],
|
||||
"env": {
|
||||
"jest/globals": true
|
||||
},
|
||||
"globals": {
|
||||
"NodeJS": true
|
||||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"local-rules/no-test-com": "error",
|
||||
"local-rules/email-domain-example-com": "error",
|
||||
"no-console": "warn",
|
||||
// We have a lot of tests that don't have assertions, they use our test
|
||||
// API client that does the assertions for them
|
||||
"jest/expect-expect": "off",
|
||||
// We do this in some tests where the behaviour of internal tables
|
||||
// differs to external, but the API is broadly the same
|
||||
"jest/no-conditional-expect": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
|
@ -66,7 +66,8 @@ jobs:
|
|||
# Run build all the projects
|
||||
- name: Build
|
||||
run: |
|
||||
yarn build
|
||||
yarn build:oss
|
||||
yarn build:account-portal
|
||||
# Check the types of the projects built via esbuild
|
||||
- name: Check types
|
||||
run: |
|
||||
|
@ -107,9 +108,9 @@ jobs:
|
|||
- name: Test
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server
|
||||
fi
|
||||
|
||||
test-worker:
|
||||
|
@ -138,6 +139,8 @@ jobs:
|
|||
|
||||
test-server:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -151,7 +154,19 @@ jobs:
|
|||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
|
||||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull mcr.microsoft.com/mssql/server:2022-latest
|
||||
docker pull mysql:8.3
|
||||
docker pull postgres:16.1-bullseye
|
||||
docker pull mongo:7.0-jammy
|
||||
docker pull mariadb:lts
|
||||
docker pull testcontainers/ryuk:0.5.1
|
||||
docker pull budibase/couchdb
|
||||
|
||||
- run: yarn --frozen-lockfile
|
||||
|
||||
- name: Test server
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
|
@ -160,60 +175,6 @@ jobs:
|
|||
yarn test --scope=@budibase/server
|
||||
fi
|
||||
|
||||
test-pro:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||
steps:
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Test
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
yarn test --scope=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --scope=@budibase/pro
|
||||
fi
|
||||
|
||||
integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
|
||||
- name: Use Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Build packages
|
||||
run: yarn build --scope @budibase/server --scope @budibase/worker
|
||||
- name: Build backend-core for OSS contributor (required for pro)
|
||||
if: ${{ env.IS_OSS_CONTRIBUTOR == 'true' }}
|
||||
run: yarn build --scope @budibase/backend-core
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd qa-core
|
||||
yarn setup
|
||||
yarn serve:test:self:ci
|
||||
env:
|
||||
BB_ADMIN_USER_EMAIL: admin
|
||||
BB_ADMIN_USER_PASSWORD: admin
|
||||
|
||||
check-pro-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
|
@ -242,27 +203,34 @@ jobs:
|
|||
echo "pro_commit=$pro_commit"
|
||||
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
|
||||
echo "base_commit=$base_commit"
|
||||
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
|
||||
|
||||
base_commit_excluding_merges=$(git log --no-merges -n 1 --format=format:%H $base_commit)
|
||||
echo "base_commit_excluding_merges=$base_commit_excluding_merges"
|
||||
echo "base_commit_excluding_merges=$base_commit_excluding_merges" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "Nothing to do - branch to branch merge."
|
||||
fi
|
||||
|
||||
- name: Check submodule merged to base branch
|
||||
if: ${{ steps.get_pro_commits.outputs.base_commit != '' }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const submoduleCommit = '${{ steps.get_pro_commits.outputs.pro_commit }}';
|
||||
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
|
||||
- name: Check submodule merged and latest on base branch
|
||||
if: ${{ steps.get_pro_commits.outputs.base_commit_excluding_merges != '' }}
|
||||
run: |
|
||||
cd packages/pro
|
||||
base_commit_excluding_merges='${{ steps.get_pro_commits.outputs.base_commit_excluding_merges }}'
|
||||
pro_commit='${{ steps.get_pro_commits.outputs.pro_commit }}'
|
||||
|
||||
if (submoduleCommit !== baseCommit) {
|
||||
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
|
||||
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md')
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('All good, the submodule had been merged and setup correctly!')
|
||||
}
|
||||
any_commit=$(git log --no-merges $base_commit_excluding_merges...$pro_commit)
|
||||
|
||||
if [ -n "$any_commit" ]; then
|
||||
echo $any_commit
|
||||
|
||||
echo "An error occurred: <error_message>"
|
||||
echo 'Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.'
|
||||
echo 'Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md'
|
||||
|
||||
exit 1
|
||||
else
|
||||
echo 'All good, the submodule had been merged and setup correctly!'
|
||||
fi
|
||||
|
||||
check-accountportal-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -275,7 +243,15 @@ jobs:
|
|||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check account portal commit
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
src:
|
||||
- packages/account-portal/**
|
||||
|
||||
- if: steps.changes.outputs.src == 'true'
|
||||
name: Check account portal commit
|
||||
id: get_accountportal_commits
|
||||
run: |
|
||||
cd packages/account-portal
|
||||
|
|
|
@ -5,6 +5,9 @@ packages/server/runtime_apps/
|
|||
bb-airgapped.tar.gz
|
||||
*.iml
|
||||
|
||||
packages/server/build/oldClientVersions/**/*
|
||||
packages/builder/src/components/deploy/clientVersions.json
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
|
@ -66,7 +69,6 @@ typings/
|
|||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
!qa-core/.env
|
||||
!hosting/.env
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
@ -107,3 +109,4 @@ budibase-component
|
|||
budibase-datasource
|
||||
|
||||
*.iml
|
||||
.nx
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
|
@ -20,6 +19,13 @@
|
|||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
|
||||
"args": ["${workspaceFolder}/packages/worker/src/index.ts"],
|
||||
"cwd": "${workspaceFolder}/packages/worker"
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Launch Chrome against localhost",
|
||||
"url": "http://localhost:10000",
|
||||
"webRoot": "${workspaceFolder}"
|
||||
}
|
||||
],
|
||||
"compounds": [
|
||||
|
|
|
@ -140,7 +140,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
|
|||
| ingress.className | string | `""` | What ingress class to use. |
|
||||
| ingress.enabled | bool | `true` | Whether to create an Ingress resource pointing to the Budibase proxy. |
|
||||
| ingress.hosts | list | `[]` | Standard hosts block for the Ingress resource. Defaults to pointing to the Budibase proxy. |
|
||||
| nameOverride | string | `""` | Override the name of the deploymen. Defaults to {{ .Chart.Name }}. |
|
||||
| nameOverride | string | `""` | Override the name of the deployment. Defaults to {{ .Chart.Name }}. |
|
||||
| service.port | int | `10000` | Port to expose on the service. |
|
||||
| service.type | string | `"ClusterIP"` | Service type for the service that points to the main Budibase proxy pod. |
|
||||
| serviceAccount.annotations | object | `{}` | Annotations to add to the service account |
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# -- Passed to all pods created by this chart. Should not ordinarily need to be changed.
|
||||
imagePullSecrets: []
|
||||
# -- Override the name of the deploymen. Defaults to {{ .Chart.Name }}.
|
||||
# -- Override the name of the deployment. Defaults to {{ .Chart.Name }}.
|
||||
nameOverride: ""
|
||||
|
||||
serviceAccount:
|
||||
|
|
|
@ -7,11 +7,12 @@ module.exports = {
|
|||
|
||||
if (
|
||||
/^@budibase\/[^/]+\/.*$/.test(importPath) &&
|
||||
importPath !== "@budibase/backend-core/tests"
|
||||
importPath !== "@budibase/backend-core/tests" &&
|
||||
importPath !== "@budibase/string-templates/test/utils"
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests.`,
|
||||
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests and @budibase/string-templates/test/utils.`,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -24,11 +25,9 @@ module.exports = {
|
|||
docs: {
|
||||
description:
|
||||
"disallow the use of 'test.com' in strings and replace it with 'example.com'",
|
||||
category: "Possible Errors",
|
||||
recommended: false,
|
||||
},
|
||||
schema: [], // no options
|
||||
fixable: "code", // Indicates that this rule supports automatic fixing
|
||||
schema: [],
|
||||
fixable: "code",
|
||||
},
|
||||
create: function (context) {
|
||||
return {
|
||||
|
@ -51,4 +50,39 @@ module.exports = {
|
|||
}
|
||||
},
|
||||
},
|
||||
"email-domain-example-com": {
|
||||
meta: {
|
||||
type: "problem",
|
||||
docs: {
|
||||
description:
|
||||
"enforce using the example.com domain for generator.email calls",
|
||||
},
|
||||
fixable: "code",
|
||||
schema: [],
|
||||
},
|
||||
create: function (context) {
|
||||
return {
|
||||
CallExpression(node) {
|
||||
if (
|
||||
node.callee.type === "MemberExpression" &&
|
||||
node.callee.object.name === "generator" &&
|
||||
node.callee.property.name === "email" &&
|
||||
node.arguments.length === 0
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
message:
|
||||
"Prefer using generator.email with the domain \"{ domain: 'example.com' }\".",
|
||||
fix: function (fixer) {
|
||||
return fixer.replaceText(
|
||||
node,
|
||||
'generator.email({ domain: "example.com" })'
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
|
||||
export default async function setup() {
|
||||
await new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
COUCHDB_USER: "budibase",
|
||||
})
|
||||
.withCopyContentToContainer([
|
||||
{
|
||||
content: `
|
||||
[log]
|
||||
level = warn
|
||||
`,
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
])
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"curl http://budibase:budibase@localhost:5984/_up"
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
.start()
|
||||
}
|
|
@ -12,8 +12,6 @@ COPY .yarnrc .
|
|||
|
||||
COPY packages/server/package.json packages/server/package.json
|
||||
COPY packages/worker/package.json packages/worker/package.json
|
||||
# string-templates does not get bundled during the esbuild process, so we want to use the local version
|
||||
COPY packages/string-templates/package.json packages/string-templates/package.json
|
||||
|
||||
|
||||
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
|
||||
|
@ -26,7 +24,7 @@ RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
|
|||
RUN echo '' > scripts/syncProPackage.js
|
||||
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
|
||||
RUN ./scripts/removeWorkspaceDependencies.sh package.json
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production --frozen-lockfile
|
||||
|
||||
# copy the actual code
|
||||
COPY packages/server/dist packages/server/dist
|
||||
|
@ -35,7 +33,6 @@ COPY packages/server/client packages/server/client
|
|||
COPY packages/server/builder packages/server/builder
|
||||
COPY packages/worker/dist packages/worker/dist
|
||||
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
|
||||
COPY packages/string-templates packages/string-templates
|
||||
|
||||
|
||||
FROM budibase/couchdb:v3.3.3 as runner
|
||||
|
@ -52,11 +49,11 @@ RUN apt-get update && \
|
|||
|
||||
# Install postgres client for pg_dump utils
|
||||
RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
&& apt install postgresql-client-15 -y \
|
||||
&& apt remove software-properties-common apt-transport-https gpg -y
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
&& apt install postgresql-client-15 -y \
|
||||
&& apt remove software-properties-common apt-transport-https gpg -y
|
||||
|
||||
# We use pm2 in order to run multiple node processes in a single container
|
||||
RUN npm install --global pm2
|
||||
|
@ -100,9 +97,6 @@ COPY --from=build /app/node_modules /node_modules
|
|||
COPY --from=build /app/package.json /package.json
|
||||
COPY --from=build /app/packages/server /app
|
||||
COPY --from=build /app/packages/worker /worker
|
||||
COPY --from=build /app/packages/string-templates /string-templates
|
||||
|
||||
RUN cd /string-templates && yarn link && cd ../app && yarn link @budibase/string-templates && cd ../worker && yarn link @budibase/string-templates
|
||||
|
||||
|
||||
EXPOSE 80
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
module.exports = () => {
|
||||
return {
|
||||
couchdb: {
|
||||
image: "budibase/couchdb",
|
||||
ports: [5984],
|
||||
env: {
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
COUCHDB_USER: "budibase",
|
||||
},
|
||||
wait: {
|
||||
type: "ports",
|
||||
timeout: 20000,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.21.0",
|
||||
"version": "2.22.15",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
13
package.json
13
package.json
|
@ -12,6 +12,7 @@
|
|||
"esbuild-node-externals": "^1.8.0",
|
||||
"eslint": "^8.52.0",
|
||||
"eslint-plugin-import": "^2.29.0",
|
||||
"eslint-plugin-jest": "^27.9.0",
|
||||
"eslint-plugin-local-rules": "^2.0.0",
|
||||
"eslint-plugin-svelte": "^2.34.0",
|
||||
"husky": "^8.0.3",
|
||||
|
@ -25,12 +26,16 @@
|
|||
"svelte": "^4.2.10",
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.2.2",
|
||||
"typescript-eslint": "^7.3.1",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"scripts": {
|
||||
"preinstall": "node scripts/syncProPackage.js",
|
||||
"get-past-client-version": "node scripts/getPastClientVersion.js",
|
||||
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
|
||||
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
|
||||
"build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
|
||||
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
|
||||
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
|
||||
"check:types": "lerna run check:types",
|
||||
"build:sdk": "lerna run --stream build:sdk",
|
||||
|
@ -53,11 +58,11 @@
|
|||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"test": "lerna run --stream test --stream",
|
||||
"lint:eslint": "eslint packages qa-core --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint:eslint": "eslint packages --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
"lint": "yarn run lint:eslint && yarn run lint:prettier",
|
||||
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages qa-core",
|
||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages",
|
||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
"lint:fix": "yarn run lint:fix:eslint && yarn run lint:fix:prettier",
|
||||
"build:specs": "lerna run --stream specs",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 806b6fd5c11c284ebf4a01627d75db939f0f8152
|
||||
Subproject commit 360ad2dc29c3f1fd5a1182ae258c45666b7f5eb1
|
|
@ -1,8 +0,0 @@
|
|||
const { join } = require("path")
|
||||
require("dotenv").config({
|
||||
path: join(__dirname, "..", "..", "hosting", ".env"),
|
||||
})
|
||||
|
||||
const jestTestcontainersConfigGenerator = require("../../jestTestcontainersConfigGenerator")
|
||||
|
||||
module.exports = jestTestcontainersConfigGenerator()
|
|
@ -1,8 +1,8 @@
|
|||
import { Config } from "@jest/types"
|
||||
|
||||
const baseConfig: Config.InitialProjectOptions = {
|
||||
preset: "@trendyol/jest-testcontainers",
|
||||
setupFiles: ["./tests/jestEnv.ts"],
|
||||
globalSetup: "./../../globalSetup.ts",
|
||||
setupFilesAfterEnv: ["./tests/jestSetup.ts"],
|
||||
transform: {
|
||||
"^.+\\.ts?$": "@swc/jest",
|
||||
|
|
|
@ -60,14 +60,13 @@
|
|||
"@shopify/jest-koa-mocks": "5.1.1",
|
||||
"@swc/core": "1.3.71",
|
||||
"@swc/jest": "0.2.27",
|
||||
"@trendyol/jest-testcontainers": "^2.1.1",
|
||||
"@types/chance": "1.1.3",
|
||||
"@types/cookies": "0.7.8",
|
||||
"@types/jest": "29.5.5",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/redlock": "4.0.3",
|
||||
"@types/redlock": "4.0.7",
|
||||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
|
@ -78,6 +77,7 @@
|
|||
"jest-serial-runner": "1.2.1",
|
||||
"pino-pretty": "10.0.0",
|
||||
"pouchdb-adapter-memory": "7.2.2",
|
||||
"testcontainers": "^10.7.2",
|
||||
"timekeeper": "2.2.0",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
|
|
|
@ -4,10 +4,10 @@ set -e
|
|||
if [[ -n $CI ]]
|
||||
then
|
||||
# --runInBand performs better in ci where resources are limited
|
||||
echo "jest --coverage --runInBand --forceExit"
|
||||
jest --coverage --runInBand --forceExit
|
||||
echo "jest --coverage --runInBand --forceExit $@"
|
||||
jest --coverage --runInBand --forceExit $@
|
||||
else
|
||||
# --maxWorkers performs better in development
|
||||
echo "jest --coverage --detectOpenHandles"
|
||||
jest --coverage --detectOpenHandles
|
||||
echo "jest --coverage --forceExit --detectOpenHandles $@"
|
||||
jest --coverage --forceExit --detectOpenHandles $@
|
||||
fi
|
|
@ -133,7 +133,7 @@ export async function refreshOAuthToken(
|
|||
configId?: string
|
||||
): Promise<RefreshResponse> {
|
||||
switch (providerType) {
|
||||
case SSOProviderType.OIDC:
|
||||
case SSOProviderType.OIDC: {
|
||||
if (!configId) {
|
||||
return { err: { data: "OIDC config id not provided" } }
|
||||
}
|
||||
|
@ -142,12 +142,14 @@ export async function refreshOAuthToken(
|
|||
return { err: { data: "OIDC configuration not found" } }
|
||||
}
|
||||
return refreshOIDCAccessToken(oidcConfig, refreshToken)
|
||||
case SSOProviderType.GOOGLE:
|
||||
}
|
||||
case SSOProviderType.GOOGLE: {
|
||||
let googleConfig = await configs.getGoogleConfig()
|
||||
if (!googleConfig) {
|
||||
return { err: { data: "Google configuration not found" } }
|
||||
}
|
||||
return refreshGoogleAccessToken(googleConfig, refreshToken)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ describe("platformLogout", () => {
|
|||
await testEnv.withTenant(async () => {
|
||||
const ctx = structures.koa.newContext()
|
||||
await auth.platformLogout({ ctx, userId: "test" })
|
||||
expect(events.auth.logout).toBeCalledTimes(1)
|
||||
expect(events.auth.logout).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -23,6 +23,18 @@ export default class BaseCache {
|
|||
return client.keys(pattern)
|
||||
}
|
||||
|
||||
async exists(key: string, opts = { useTenancy: true }) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
return client.exists(key)
|
||||
}
|
||||
|
||||
async scan(key: string, opts = { useTenancy: true }) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
return client.scan(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read only from the cache.
|
||||
*/
|
||||
|
@ -32,6 +44,15 @@ export default class BaseCache {
|
|||
return client.get(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read only from the cache.
|
||||
*/
|
||||
async bulkGet<T>(keys: string[], opts = { useTenancy: true }) {
|
||||
keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys
|
||||
const client = await this.getClient()
|
||||
return client.bulkGet<T>(keys)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write to the cache.
|
||||
*/
|
||||
|
@ -46,6 +67,25 @@ export default class BaseCache {
|
|||
await client.store(key, value, ttl)
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk write to the cache.
|
||||
*/
|
||||
async bulkStore(
|
||||
data: Record<string, any>,
|
||||
ttl: number | null = null,
|
||||
opts = { useTenancy: true }
|
||||
) {
|
||||
if (opts.useTenancy) {
|
||||
data = Object.entries(data).reduce((acc, [key, value]) => {
|
||||
acc[generateTenantKey(key)] = value
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
}
|
||||
|
||||
const client = await this.getClient()
|
||||
await client.bulkStore(data, ttl)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove from cache.
|
||||
*/
|
||||
|
@ -55,15 +95,24 @@ export default class BaseCache {
|
|||
return client.delete(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove from cache.
|
||||
*/
|
||||
async bulkDelete(keys: string[], opts = { useTenancy: true }) {
|
||||
keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys
|
||||
const client = await this.getClient()
|
||||
return client.bulkDelete(keys)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from the cache. Write to the cache if not exists.
|
||||
*/
|
||||
async withCache(
|
||||
async withCache<T>(
|
||||
key: string,
|
||||
ttl: number,
|
||||
fetchFn: any,
|
||||
ttl: number | null = null,
|
||||
fetchFn: () => Promise<T> | T,
|
||||
opts = { useTenancy: true }
|
||||
) {
|
||||
): Promise<T> {
|
||||
const cachedValue = await this.get(key, opts)
|
||||
if (cachedValue) {
|
||||
return cachedValue
|
||||
|
@ -80,7 +129,7 @@ export default class BaseCache {
|
|||
}
|
||||
}
|
||||
|
||||
async bustCache(key: string, opts = { client: null }) {
|
||||
async bustCache(key: string) {
|
||||
const client = await this.getClient()
|
||||
try {
|
||||
await client.delete(generateTenantKey(key))
|
||||
|
@ -89,4 +138,13 @@ export default class BaseCache {
|
|||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the entry if the provided value matches the stored one.
|
||||
*/
|
||||
async deleteIfValue(key: string, value: any, opts = { useTenancy: true }) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
await client.deleteIfValue(key, value)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
import { AnyDocument, Database } from "@budibase/types"
|
||||
|
||||
import { JobQueue, Queue, createQueue } from "../queue"
|
||||
import * as dbUtils from "../db"
|
||||
|
||||
interface ProcessDocMessage {
|
||||
dbName: string
|
||||
docId: string
|
||||
data: Record<string, any>
|
||||
}
|
||||
|
||||
const PERSIST_MAX_ATTEMPTS = 100
|
||||
let processor: DocWritethroughProcessor | undefined
|
||||
|
||||
export class DocWritethroughProcessor {
|
||||
private static _queue: Queue
|
||||
|
||||
public static get queue() {
|
||||
if (!DocWritethroughProcessor._queue) {
|
||||
DocWritethroughProcessor._queue = createQueue<ProcessDocMessage>(
|
||||
JobQueue.DOC_WRITETHROUGH_QUEUE,
|
||||
{
|
||||
jobOptions: {
|
||||
attempts: PERSIST_MAX_ATTEMPTS,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
return DocWritethroughProcessor._queue
|
||||
}
|
||||
|
||||
init() {
|
||||
DocWritethroughProcessor.queue.process(async message => {
|
||||
try {
|
||||
await this.persistToDb(message.data)
|
||||
} catch (err: any) {
|
||||
if (err.status === 409) {
|
||||
// If we get a 409, it means that another job updated it meanwhile. We want to retry it to persist it again.
|
||||
throw new Error(
|
||||
`Conflict persisting message ${message.id}. Attempt ${message.attemptsMade}`
|
||||
)
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
private async persistToDb({
|
||||
dbName,
|
||||
docId,
|
||||
data,
|
||||
}: {
|
||||
dbName: string
|
||||
docId: string
|
||||
data: Record<string, any>
|
||||
}) {
|
||||
const db = dbUtils.getDB(dbName)
|
||||
let doc: AnyDocument | undefined
|
||||
try {
|
||||
doc = await db.get(docId)
|
||||
} catch {
|
||||
doc = { _id: docId }
|
||||
}
|
||||
|
||||
doc = { ...doc, ...data }
|
||||
await db.put(doc)
|
||||
}
|
||||
}
|
||||
|
||||
export class DocWritethrough {
|
||||
private db: Database
|
||||
private _docId: string
|
||||
|
||||
constructor(db: Database, docId: string) {
|
||||
this.db = db
|
||||
this._docId = docId
|
||||
}
|
||||
|
||||
get docId() {
|
||||
return this._docId
|
||||
}
|
||||
|
||||
async patch(data: Record<string, any>) {
|
||||
await DocWritethroughProcessor.queue.add({
|
||||
dbName: this.db.name,
|
||||
docId: this.docId,
|
||||
data,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function init(): DocWritethroughProcessor {
|
||||
processor = new DocWritethroughProcessor().init()
|
||||
return processor
|
||||
}
|
||||
|
||||
export function getProcessor(): DocWritethroughProcessor {
|
||||
if (!processor) {
|
||||
return init()
|
||||
}
|
||||
return processor
|
||||
}
|
|
@ -26,7 +26,8 @@ export const store = (...args: Parameters<typeof GENERIC.store>) =>
|
|||
GENERIC.store(...args)
|
||||
export const destroy = (...args: Parameters<typeof GENERIC.delete>) =>
|
||||
GENERIC.delete(...args)
|
||||
export const withCache = (...args: Parameters<typeof GENERIC.withCache>) =>
|
||||
GENERIC.withCache(...args)
|
||||
export const withCache = <T>(
|
||||
...args: Parameters<typeof GENERIC.withCache<T>>
|
||||
) => GENERIC.withCache(...args)
|
||||
export const bustCache = (...args: Parameters<typeof GENERIC.bustCache>) =>
|
||||
GENERIC.bustCache(...args)
|
||||
|
|
|
@ -5,3 +5,4 @@ export * as writethrough from "./writethrough"
|
|||
export * as invite from "./invite"
|
||||
export * as passwordReset from "./passwordReset"
|
||||
export * from "./generic"
|
||||
export * as docWritethrough from "./docWritethrough"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as utils from "../utils"
|
||||
import { Duration, DurationType } from "../utils"
|
||||
import { Duration } from "../utils"
|
||||
import env from "../environment"
|
||||
import { getTenantId } from "../context"
|
||||
import * as redis from "../redis/init"
|
||||
|
|
|
@ -0,0 +1,294 @@
|
|||
import tk from "timekeeper"
|
||||
|
||||
import _ from "lodash"
|
||||
import { DBTestConfiguration, generator, structures } from "../../../tests"
|
||||
import { getDB } from "../../db"
|
||||
|
||||
import {
|
||||
DocWritethrough,
|
||||
DocWritethroughProcessor,
|
||||
init,
|
||||
} from "../docWritethrough"
|
||||
|
||||
import InMemoryQueue from "../../queue/inMemoryQueue"
|
||||
|
||||
const initialTime = Date.now()
|
||||
|
||||
async function waitForQueueCompletion() {
|
||||
const queue: InMemoryQueue = DocWritethroughProcessor.queue as never
|
||||
await queue.waitForCompletion()
|
||||
}
|
||||
|
||||
describe("docWritethrough", () => {
|
||||
beforeAll(() => {
|
||||
init()
|
||||
})
|
||||
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
const db = getDB(structures.db.id())
|
||||
let documentId: string
|
||||
let docWritethrough: DocWritethrough
|
||||
|
||||
describe("patch", () => {
|
||||
function generatePatchObject(fieldCount: number) {
|
||||
const keys = generator.unique(() => generator.guid(), fieldCount)
|
||||
return keys.reduce((acc, c) => {
|
||||
acc[c] = generator.word()
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
documentId = structures.uuid()
|
||||
docWritethrough = new DocWritethrough(db, documentId)
|
||||
})
|
||||
|
||||
it("patching will not persist until the messages are persisted", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await docWritethrough.patch(generatePatchObject(2))
|
||||
await docWritethrough.patch(generatePatchObject(2))
|
||||
|
||||
expect(await db.exists(documentId)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
it("patching will persist when the messages are persisted", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const patch1 = generatePatchObject(2)
|
||||
const patch2 = generatePatchObject(2)
|
||||
await docWritethrough.patch(patch1)
|
||||
await docWritethrough.patch(patch2)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
// This will not be persisted
|
||||
const patch3 = generatePatchObject(3)
|
||||
await docWritethrough.patch(patch3)
|
||||
|
||||
expect(await db.get(documentId)).toEqual({
|
||||
_id: documentId,
|
||||
...patch1,
|
||||
...patch2,
|
||||
_rev: expect.stringMatching(/2-.+/),
|
||||
createdAt: new Date(initialTime).toISOString(),
|
||||
updatedAt: new Date(initialTime).toISOString(),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("patching will persist keeping the previous data", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const patch1 = generatePatchObject(2)
|
||||
const patch2 = generatePatchObject(2)
|
||||
await docWritethrough.patch(patch1)
|
||||
await docWritethrough.patch(patch2)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
const patch3 = generatePatchObject(3)
|
||||
await docWritethrough.patch(patch3)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: documentId,
|
||||
...patch1,
|
||||
...patch2,
|
||||
...patch3,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("date audit fields are set correctly when persisting", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const patch1 = generatePatchObject(2)
|
||||
const patch2 = generatePatchObject(2)
|
||||
await docWritethrough.patch(patch1)
|
||||
const date1 = new Date()
|
||||
await waitForQueueCompletion()
|
||||
await docWritethrough.patch(patch2)
|
||||
|
||||
tk.travel(Date.now() + 100)
|
||||
const date2 = new Date()
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(date1).not.toEqual(date2)
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
createdAt: date1.toISOString(),
|
||||
updatedAt: date2.toISOString(),
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("concurrent patches will override keys", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const patch1 = generatePatchObject(2)
|
||||
await docWritethrough.patch(patch1)
|
||||
await waitForQueueCompletion()
|
||||
const patch2 = generatePatchObject(1)
|
||||
await docWritethrough.patch(patch2)
|
||||
|
||||
const keyToOverride = _.sample(Object.keys(patch1))!
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
[keyToOverride]: patch1[keyToOverride],
|
||||
})
|
||||
)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
const patch3 = {
|
||||
...generatePatchObject(3),
|
||||
[keyToOverride]: generator.word(),
|
||||
}
|
||||
await docWritethrough.patch(patch3)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...patch1,
|
||||
...patch2,
|
||||
...patch3,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("concurrent patches to different docWritethrough will not pollute each other", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const secondDocWritethrough = new DocWritethrough(
|
||||
db,
|
||||
structures.db.id()
|
||||
)
|
||||
|
||||
const doc1Patch = generatePatchObject(2)
|
||||
await docWritethrough.patch(doc1Patch)
|
||||
const doc2Patch = generatePatchObject(1)
|
||||
await secondDocWritethrough.patch(doc2Patch)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
const doc1Patch2 = generatePatchObject(3)
|
||||
await docWritethrough.patch(doc1Patch2)
|
||||
const doc2Patch2 = generatePatchObject(3)
|
||||
await secondDocWritethrough.patch(doc2Patch2)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(docWritethrough.docId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...doc1Patch,
|
||||
...doc1Patch2,
|
||||
})
|
||||
)
|
||||
|
||||
expect(await db.get(secondDocWritethrough.docId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...doc2Patch,
|
||||
...doc2Patch2,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("cached values are persisted only once", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const initialPatch = generatePatchObject(5)
|
||||
|
||||
await docWritethrough.patch(initialPatch)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(initialPatch)
|
||||
)
|
||||
|
||||
await db.remove(await db.get(documentId))
|
||||
|
||||
await waitForQueueCompletion()
|
||||
const extraPatch = generatePatchObject(5)
|
||||
await docWritethrough.patch(extraPatch)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(extraPatch)
|
||||
)
|
||||
expect(await db.get(documentId)).not.toEqual(
|
||||
expect.objectContaining(initialPatch)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("concurrent calls will not cause conflicts", async () => {
|
||||
async function parallelPatch(count: number) {
|
||||
const patches = Array.from({ length: count }).map(() =>
|
||||
generatePatchObject(1)
|
||||
)
|
||||
await Promise.all(patches.map(p => docWritethrough.patch(p)))
|
||||
|
||||
return patches.reduce((acc, c) => {
|
||||
acc = { ...acc, ...c }
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
const queueMessageSpy = jest.spyOn(DocWritethroughProcessor.queue, "add")
|
||||
|
||||
await config.doInTenant(async () => {
|
||||
let patches = await parallelPatch(5)
|
||||
expect(queueMessageSpy).toHaveBeenCalledTimes(5)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
|
||||
patches = { ...patches, ...(await parallelPatch(40)) }
|
||||
expect(queueMessageSpy).toHaveBeenCalledTimes(45)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
|
||||
patches = { ...patches, ...(await parallelPatch(10)) }
|
||||
expect(queueMessageSpy).toHaveBeenCalledTimes(55)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
// This is not yet supported
|
||||
// eslint-disable-next-line jest/no-disabled-tests
|
||||
it.skip("patches will execute in order", async () => {
|
||||
let incrementalValue = 0
|
||||
const keyToOverride = generator.word()
|
||||
async function incrementalPatches(count: number) {
|
||||
for (let i = 0; i < count; i++) {
|
||||
await docWritethrough.patch({ [keyToOverride]: incrementalValue++ })
|
||||
}
|
||||
}
|
||||
|
||||
await config.doInTenant(async () => {
|
||||
await incrementalPatches(5)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({ [keyToOverride]: 5 })
|
||||
)
|
||||
|
||||
await incrementalPatches(40)
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({ [keyToOverride]: 45 })
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -55,8 +55,8 @@ describe("user cache", () => {
|
|||
})),
|
||||
})
|
||||
|
||||
expect(UserDB.bulkGet).toBeCalledTimes(1)
|
||||
expect(UserDB.bulkGet).toBeCalledWith(userIdsToRequest)
|
||||
expect(UserDB.bulkGet).toHaveBeenCalledTimes(1)
|
||||
expect(UserDB.bulkGet).toHaveBeenCalledWith(userIdsToRequest)
|
||||
})
|
||||
|
||||
it("on a second all, all of them are retrieved from cache", async () => {
|
||||
|
@ -82,7 +82,7 @@ describe("user cache", () => {
|
|||
),
|
||||
})
|
||||
|
||||
expect(UserDB.bulkGet).toBeCalledTimes(1)
|
||||
expect(UserDB.bulkGet).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("when some users are cached, only the missing ones are retrieved from db", async () => {
|
||||
|
@ -110,8 +110,8 @@ describe("user cache", () => {
|
|||
),
|
||||
})
|
||||
|
||||
expect(UserDB.bulkGet).toBeCalledTimes(1)
|
||||
expect(UserDB.bulkGet).toBeCalledWith([
|
||||
expect(UserDB.bulkGet).toHaveBeenCalledTimes(1)
|
||||
expect(UserDB.bulkGet).toHaveBeenCalledWith([
|
||||
userIdsToRequest[1],
|
||||
userIdsToRequest[2],
|
||||
userIdsToRequest[4],
|
||||
|
|
|
@ -6,7 +6,7 @@ import env from "../environment"
|
|||
import * as accounts from "../accounts"
|
||||
import { UserDB } from "../users"
|
||||
import { sdk } from "@budibase/shared-core"
|
||||
import { User } from "@budibase/types"
|
||||
import { User, UserMetadata } from "@budibase/types"
|
||||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
|
@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600
|
|||
*/
|
||||
async function populateFromDB(userId: string, tenantId: string) {
|
||||
const db = tenancy.getTenantDB(tenantId)
|
||||
const user = await db.get<any>(userId)
|
||||
const user = await db.get<UserMetadata>(userId)
|
||||
user.budibaseAccess = true
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
|
|
|
@ -8,7 +8,7 @@ const DEFAULT_WRITE_RATE_MS = 10000
|
|||
let CACHE: BaseCache | null = null
|
||||
|
||||
interface CacheItem<T extends Document> {
|
||||
doc: any
|
||||
doc: T
|
||||
lastWrite: number
|
||||
}
|
||||
|
||||
|
|
|
@ -57,6 +57,9 @@ export const StaticDatabases = {
|
|||
AUDIT_LOGS: {
|
||||
name: "audit-logs",
|
||||
},
|
||||
SCIM_LOGS: {
|
||||
name: "scim-logs",
|
||||
},
|
||||
}
|
||||
|
||||
export const APP_PREFIX = prefixed(DocumentType.APP)
|
||||
|
|
|
@ -10,7 +10,7 @@ import {
|
|||
StaticDatabases,
|
||||
DEFAULT_TENANT_ID,
|
||||
} from "../constants"
|
||||
import { Database, IdentityContext } from "@budibase/types"
|
||||
import { Database, IdentityContext, Snippet, App } from "@budibase/types"
|
||||
import { ContextMap } from "./types"
|
||||
|
||||
let TEST_APP_ID: string | null = null
|
||||
|
@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) {
|
|||
}
|
||||
}
|
||||
|
||||
export function getScimDBName(tenantId?: string) {
|
||||
if (!tenantId) {
|
||||
tenantId = getTenantId()
|
||||
}
|
||||
if (tenantId === DEFAULT_TENANT_ID) {
|
||||
return StaticDatabases.SCIM_LOGS.name
|
||||
} else {
|
||||
return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}`
|
||||
}
|
||||
}
|
||||
|
||||
export function baseGlobalDBName(tenantId: string | undefined | null) {
|
||||
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
||||
return StaticDatabases.GLOBAL.name
|
||||
|
@ -111,10 +122,10 @@ export async function doInAutomationContext<T>(params: {
|
|||
automationId: string
|
||||
task: () => T
|
||||
}): Promise<T> {
|
||||
const tenantId = getTenantIDFromAppID(params.appId)
|
||||
await ensureSnippetContext()
|
||||
return newContext(
|
||||
{
|
||||
tenantId,
|
||||
tenantId: getTenantIDFromAppID(params.appId),
|
||||
appId: params.appId,
|
||||
automationId: params.automationId,
|
||||
},
|
||||
|
@ -270,6 +281,27 @@ export function doInScimContext(task: any) {
|
|||
return newContext(updates, task)
|
||||
}
|
||||
|
||||
export async function ensureSnippetContext() {
|
||||
const ctx = getCurrentContext()
|
||||
|
||||
// If we've already added snippets to context, continue
|
||||
if (!ctx || ctx.snippets) {
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise get snippets for this app and update context
|
||||
let snippets: Snippet[] | undefined
|
||||
const db = getAppDB()
|
||||
if (db && !env.isTest()) {
|
||||
const app = await db.get<App>(DocumentType.APP_METADATA)
|
||||
snippets = app.snippets
|
||||
}
|
||||
|
||||
// Always set snippets to a non-null value so that we can tell we've attempted
|
||||
// to load snippets
|
||||
ctx.snippets = snippets || []
|
||||
}
|
||||
|
||||
export function getEnvironmentVariables() {
|
||||
const context = Context.get()
|
||||
if (!context.environmentVariables) {
|
||||
|
|
|
@ -246,7 +246,7 @@ describe("context", () => {
|
|||
context.doInAppMigrationContext(db.generateAppID(), async () => {
|
||||
await otherContextCall()
|
||||
})
|
||||
).rejects.toThrowError(
|
||||
).rejects.toThrow(
|
||||
"The context cannot be changed, a migration is currently running"
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { IdentityContext, VM } from "@budibase/types"
|
||||
import { ExecutionTimeTracker } from "../timers"
|
||||
import { IdentityContext, Snippet, VM } from "@budibase/types"
|
||||
|
||||
// keep this out of Budibase types, don't want to expose context info
|
||||
export type ContextMap = {
|
||||
|
@ -10,6 +9,7 @@ export type ContextMap = {
|
|||
isScim?: boolean
|
||||
automationId?: string
|
||||
isMigrating?: boolean
|
||||
jsExecutionTracker?: ExecutionTimeTracker
|
||||
vm?: VM
|
||||
cleanup?: (() => void | Promise<void>)[]
|
||||
snippets?: Snippet[]
|
||||
}
|
||||
|
|
|
@ -1,66 +1,57 @@
|
|||
import PouchDB from "pouchdb"
|
||||
import { getPouchDB, closePouchDB } from "./couch"
|
||||
import { DocumentType } from "../constants"
|
||||
|
||||
class Replication {
|
||||
source: any
|
||||
target: any
|
||||
replication: any
|
||||
source: PouchDB.Database
|
||||
target: PouchDB.Database
|
||||
|
||||
/**
|
||||
*
|
||||
* @param source - the DB you want to replicate or rollback to
|
||||
* @param target - the DB you want to replicate to, or rollback from
|
||||
*/
|
||||
constructor({ source, target }: any) {
|
||||
constructor({ source, target }: { source: string; target: string }) {
|
||||
this.source = getPouchDB(source)
|
||||
this.target = getPouchDB(target)
|
||||
}
|
||||
|
||||
close() {
|
||||
return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
|
||||
async close() {
|
||||
await Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
|
||||
}
|
||||
|
||||
promisify(operation: any, opts = {}) {
|
||||
return new Promise(resolve => {
|
||||
operation(this.target, opts)
|
||||
.on("denied", function (err: any) {
|
||||
replicate(opts: PouchDB.Replication.ReplicateOptions = {}) {
|
||||
return new Promise<PouchDB.Replication.ReplicationResult<{}>>(resolve => {
|
||||
this.source.replicate
|
||||
.to(this.target, opts)
|
||||
.on("denied", function (err) {
|
||||
// a document failed to replicate (e.g. due to permissions)
|
||||
throw new Error(`Denied: Document failed to replicate ${err}`)
|
||||
})
|
||||
.on("complete", function (info: any) {
|
||||
.on("complete", function (info) {
|
||||
return resolve(info)
|
||||
})
|
||||
.on("error", function (err: any) {
|
||||
throw new Error(`Replication Error: ${err}`)
|
||||
.on("error", function (err) {
|
||||
throw err
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Two way replication operation, intended to be promise based.
|
||||
* @param opts - PouchDB replication options
|
||||
*/
|
||||
sync(opts = {}) {
|
||||
this.replication = this.promisify(this.source.sync, opts)
|
||||
return this.replication
|
||||
}
|
||||
appReplicateOpts(
|
||||
opts: PouchDB.Replication.ReplicateOptions = {}
|
||||
): PouchDB.Replication.ReplicateOptions {
|
||||
if (typeof opts.filter === "string") {
|
||||
return opts
|
||||
}
|
||||
|
||||
/**
|
||||
* One way replication operation, intended to be promise based.
|
||||
* @param opts - PouchDB replication options
|
||||
*/
|
||||
replicate(opts = {}) {
|
||||
this.replication = this.promisify(this.source.replicate.to, opts)
|
||||
return this.replication
|
||||
}
|
||||
const filter = opts.filter
|
||||
delete opts.filter
|
||||
|
||||
appReplicateOpts() {
|
||||
return {
|
||||
filter: (doc: any) => {
|
||||
...opts,
|
||||
filter: (doc: any, params: any) => {
|
||||
if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) {
|
||||
return false
|
||||
}
|
||||
return doc._id !== DocumentType.APP_METADATA
|
||||
if (doc._id === DocumentType.APP_METADATA) {
|
||||
return false
|
||||
}
|
||||
return filter ? filter(doc, params) : true
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -75,10 +66,6 @@ class Replication {
|
|||
// take the opportunity to remove deleted tombstones
|
||||
await this.replicate()
|
||||
}
|
||||
|
||||
cancel() {
|
||||
this.replication.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
export default Replication
|
||||
|
|
|
@ -70,7 +70,15 @@ export class DatabaseImpl implements Database {
|
|||
DatabaseImpl.nano = buildNano(couchInfo)
|
||||
}
|
||||
|
||||
async exists() {
|
||||
exists(docId?: string) {
|
||||
if (docId === undefined) {
|
||||
return this.dbExists()
|
||||
}
|
||||
|
||||
return this.docExists(docId)
|
||||
}
|
||||
|
||||
private async dbExists() {
|
||||
const response = await directCouchUrlCall({
|
||||
url: `${this.couchInfo.url}/${this.name}`,
|
||||
method: "HEAD",
|
||||
|
@ -79,6 +87,15 @@ export class DatabaseImpl implements Database {
|
|||
return response.status === 200
|
||||
}
|
||||
|
||||
private async docExists(id: string): Promise<boolean> {
|
||||
try {
|
||||
await this.performCall(db => () => db.head(id))
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
private nano() {
|
||||
return this.instanceNano || DatabaseImpl.nano
|
||||
}
|
||||
|
|
|
@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database {
|
|||
return this.db.name
|
||||
}
|
||||
|
||||
exists(): Promise<boolean> {
|
||||
exists(docId?: string): Promise<boolean> {
|
||||
return tracer.trace("db.exists", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span?.addTags({ db_name: this.name, doc_id: docId })
|
||||
if (docId) {
|
||||
return this.db.exists(docId)
|
||||
}
|
||||
return this.db.exists()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -10,10 +10,6 @@ interface SearchResponse<T> {
|
|||
totalRows: number
|
||||
}
|
||||
|
||||
interface PaginatedSearchResponse<T> extends SearchResponse<T> {
|
||||
hasNextPage: boolean
|
||||
}
|
||||
|
||||
export type SearchParams<T> = {
|
||||
tableId?: string
|
||||
sort?: string
|
||||
|
@ -247,7 +243,7 @@ export class QueryBuilder<T> {
|
|||
}
|
||||
// Escape characters
|
||||
if (!this.#noEscaping && escape && originalType === "string") {
|
||||
value = `${value}`.replace(/[ \/#+\-&|!(){}\]^"~*?:\\]/g, "\\$&")
|
||||
value = `${value}`.replace(/[ /#+\-&|!(){}\]^"~*?:\\]/g, "\\$&")
|
||||
}
|
||||
|
||||
// Wrap in quotes
|
||||
|
|
|
@ -34,12 +34,12 @@ export async function createUserIndex() {
|
|||
}
|
||||
let idxKey = prev != null ? `${prev}.${key}` : key
|
||||
if (typeof input[key] === "string") {
|
||||
// @ts-expect-error index is available in a CouchDB map function
|
||||
// eslint-disable-next-line no-undef
|
||||
// @ts-ignore
|
||||
index(idxKey, input[key].toLowerCase(), { facet: true })
|
||||
} else if (typeof input[key] !== "object") {
|
||||
// @ts-expect-error index is available in a CouchDB map function
|
||||
// eslint-disable-next-line no-undef
|
||||
// @ts-ignore
|
||||
index(idxKey, input[key], { facet: true })
|
||||
} else {
|
||||
idx(input[key], idxKey)
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
import _ from "lodash"
|
||||
import { AnyDocument } from "@budibase/types"
|
||||
import { generator } from "../../../tests"
|
||||
import { DatabaseImpl } from "../couch"
|
||||
import { newid } from "../../utils"
|
||||
|
||||
describe("DatabaseImpl", () => {
|
||||
const database = new DatabaseImpl(generator.word())
|
||||
const documents: AnyDocument[] = []
|
||||
|
||||
beforeAll(async () => {
|
||||
const docsToCreate = Array.from({ length: 10 }).map(() => ({
|
||||
_id: newid(),
|
||||
}))
|
||||
const createdDocs = await database.bulkDocs(docsToCreate)
|
||||
|
||||
documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev })))
|
||||
})
|
||||
|
||||
describe("document exists", () => {
|
||||
it("can check existing docs by id", async () => {
|
||||
const existingDoc = _.sample(documents)
|
||||
const result = await database.exists(existingDoc!._id!)
|
||||
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it("can check non existing docs by id", async () => {
|
||||
const result = await database.exists(newid())
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it("can check an existing doc by id multiple times", async () => {
|
||||
const existingDoc = _.sample(documents)
|
||||
const id = existingDoc!._id!
|
||||
|
||||
const results = []
|
||||
results.push(await database.exists(id))
|
||||
results.push(await database.exists(id))
|
||||
results.push(await database.exists(id))
|
||||
|
||||
expect(results).toEqual([true, true, true])
|
||||
})
|
||||
|
||||
it("returns false after the doc is deleted", async () => {
|
||||
const existingDoc = _.sample(documents)
|
||||
const id = existingDoc!._id!
|
||||
expect(await database.exists(id)).toBe(true)
|
||||
|
||||
await database.remove(existingDoc!)
|
||||
expect(await database.exists(id)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -17,13 +17,8 @@ export function init(processors: ProcessorMap) {
|
|||
// if not processing in this instance, kick it off
|
||||
if (!processingPromise) {
|
||||
processingPromise = asyncEventQueue.process(async job => {
|
||||
const { event, identity, properties, timestamp } = job.data
|
||||
await documentProcessor.processEvent(
|
||||
event,
|
||||
identity,
|
||||
properties,
|
||||
timestamp
|
||||
)
|
||||
const { event, identity, properties } = job.data
|
||||
await documentProcessor.processEvent(event, identity, properties)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -186,6 +186,7 @@ const environment = {
|
|||
environment[key] = value
|
||||
},
|
||||
ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M",
|
||||
DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS,
|
||||
}
|
||||
|
||||
// clean up any environment variable edge cases
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { Event } from "@budibase/types"
|
||||
import { Event, Identity } from "@budibase/types"
|
||||
import { processors } from "./processors"
|
||||
import identification from "./identification"
|
||||
import * as backfill from "./backfill"
|
||||
|
@ -7,12 +7,19 @@ import { publishAsyncEvent } from "./asyncEvents"
|
|||
export const publishEvent = async (
|
||||
event: Event,
|
||||
properties: any,
|
||||
timestamp?: string | number
|
||||
timestamp?: string | number,
|
||||
identityOverride?: Identity
|
||||
) => {
|
||||
// in future this should use async events via a distributed queue.
|
||||
const identity = await identification.getCurrentIdentity()
|
||||
const identity =
|
||||
identityOverride || (await identification.getCurrentIdentity())
|
||||
|
||||
// Backfilling is get from the user cache, but when we override the identity cache is not available. Overrides are
|
||||
// normally performed in automatic actions or operations in async flows (BPM) where the user session is not available.
|
||||
const backfilling = identityOverride
|
||||
? false
|
||||
: await backfill.isBackfillingEvent(event)
|
||||
|
||||
const backfilling = await backfill.isBackfillingEvent(event)
|
||||
// no backfill - send the event and exit
|
||||
if (!backfilling) {
|
||||
// send off async events if required
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import {
|
||||
Event,
|
||||
Identity,
|
||||
Group,
|
||||
IdentityType,
|
||||
AuditLogQueueEvent,
|
||||
AuditLogFn,
|
||||
|
@ -79,11 +78,11 @@ export default class AuditLogsProcessor implements EventProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
async identify() {
|
||||
// no-op
|
||||
}
|
||||
|
||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||
async identifyGroup() {
|
||||
// no-op
|
||||
}
|
||||
|
||||
|
|
|
@ -8,8 +8,7 @@ export default class LoggingProcessor implements EventProcessor {
|
|||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string
|
||||
properties: any
|
||||
): Promise<void> {
|
||||
if (skipLogging) {
|
||||
return
|
||||
|
@ -17,14 +16,14 @@ export default class LoggingProcessor implements EventProcessor {
|
|||
console.log(`[audit] [identityType=${identity.type}] ${event}`, properties)
|
||||
}
|
||||
|
||||
async identify(identity: Identity, timestamp?: string | number) {
|
||||
async identify(identity: Identity) {
|
||||
if (skipLogging) {
|
||||
return
|
||||
}
|
||||
console.log(`[audit] identified`, identity)
|
||||
}
|
||||
|
||||
async identifyGroup(group: Group, timestamp?: string | number) {
|
||||
async identifyGroup(group: Group) {
|
||||
if (skipLogging) {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -14,12 +14,7 @@ export default class DocumentUpdateProcessor implements EventProcessor {
|
|||
this.processors = processors
|
||||
}
|
||||
|
||||
async processEvent(
|
||||
event: Event,
|
||||
identity: Identity,
|
||||
properties: any,
|
||||
timestamp?: string | number
|
||||
) {
|
||||
async processEvent(event: Event, identity: Identity, properties: any) {
|
||||
const tenantId = identity.realTenantId
|
||||
const docId = getDocumentId(event, properties)
|
||||
if (!tenantId || !docId) {
|
||||
|
|
|
@ -5,13 +5,19 @@ import {
|
|||
AccountCreatedEvent,
|
||||
AccountDeletedEvent,
|
||||
AccountVerifiedEvent,
|
||||
Identity,
|
||||
} from "@budibase/types"
|
||||
|
||||
async function created(account: Account) {
|
||||
async function created(account: Account, identityOverride?: Identity) {
|
||||
const properties: AccountCreatedEvent = {
|
||||
tenantId: account.tenantId,
|
||||
}
|
||||
await publishEvent(Event.ACCOUNT_CREATED, properties)
|
||||
await publishEvent(
|
||||
Event.ACCOUNT_CREATED,
|
||||
properties,
|
||||
undefined,
|
||||
identityOverride
|
||||
)
|
||||
}
|
||||
|
||||
async function deleted(account: Account) {
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
AppVersionRevertedEvent,
|
||||
AppRevertedEvent,
|
||||
AppExportedEvent,
|
||||
AppDuplicatedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
const created = async (app: App, timestamp?: string | number) => {
|
||||
|
@ -77,6 +78,17 @@ async function fileImported(app: App) {
|
|||
await publishEvent(Event.APP_FILE_IMPORTED, properties)
|
||||
}
|
||||
|
||||
async function duplicated(app: App, duplicateAppId: string) {
|
||||
const properties: AppDuplicatedEvent = {
|
||||
duplicateAppId,
|
||||
appId: app.appId,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_DUPLICATED, properties)
|
||||
}
|
||||
|
||||
async function templateImported(app: App, templateKey: string) {
|
||||
const properties: AppTemplateImportedEvent = {
|
||||
appId: app.appId,
|
||||
|
@ -147,6 +159,7 @@ export default {
|
|||
published,
|
||||
unpublished,
|
||||
fileImported,
|
||||
duplicated,
|
||||
templateImported,
|
||||
versionUpdated,
|
||||
versionReverted,
|
||||
|
|
|
@ -10,6 +10,18 @@ import { formats } from "dd-trace/ext"
|
|||
|
||||
import { localFileDestination } from "../system"
|
||||
|
||||
function isPlainObject(obj: any) {
|
||||
return typeof obj === "object" && obj !== null && !(obj instanceof Error)
|
||||
}
|
||||
|
||||
function isError(obj: any) {
|
||||
return obj instanceof Error
|
||||
}
|
||||
|
||||
function isMessage(obj: any) {
|
||||
return typeof obj === "string"
|
||||
}
|
||||
|
||||
// LOGGER
|
||||
|
||||
let pinoInstance: pino.Logger | undefined
|
||||
|
@ -71,23 +83,11 @@ if (!env.DISABLE_PINO_LOGGER) {
|
|||
err?: Error
|
||||
}
|
||||
|
||||
function isPlainObject(obj: any) {
|
||||
return typeof obj === "object" && obj !== null && !(obj instanceof Error)
|
||||
}
|
||||
|
||||
function isError(obj: any) {
|
||||
return obj instanceof Error
|
||||
}
|
||||
|
||||
function isMessage(obj: any) {
|
||||
return typeof obj === "string"
|
||||
}
|
||||
|
||||
/**
|
||||
* Backwards compatibility between console logging statements
|
||||
* and pino logging requirements.
|
||||
*/
|
||||
function getLogParams(args: any[]): [MergingObject, string] {
|
||||
const getLogParams = (args: any[]): [MergingObject, string] => {
|
||||
let error = undefined
|
||||
let objects: any[] = []
|
||||
let message = ""
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { APIError } from "@budibase/types"
|
||||
import * as errors from "../errors"
|
||||
import environment from "../environment"
|
||||
|
||||
export async function errorHandling(ctx: any, next: any) {
|
||||
try {
|
||||
|
@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) {
|
|||
console.error(err)
|
||||
}
|
||||
|
||||
const error = errors.getPublicError(err)
|
||||
const body: APIError = {
|
||||
let error: APIError = {
|
||||
message: err.message,
|
||||
status: status,
|
||||
validationErrors: err.validation,
|
||||
error,
|
||||
error: errors.getPublicError(err),
|
||||
}
|
||||
|
||||
ctx.body = body
|
||||
if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
|
||||
// @ts-ignore
|
||||
error.stack = err.stack
|
||||
}
|
||||
|
||||
ctx.body = error
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ export const buildMatcherRegex = (
|
|||
return patterns.map(pattern => {
|
||||
let route = pattern.route
|
||||
const method = pattern.method
|
||||
const strict = pattern.strict ? pattern.strict : false
|
||||
|
||||
// if there is a param in the route
|
||||
// use a wildcard pattern
|
||||
|
@ -24,24 +23,17 @@ export const buildMatcherRegex = (
|
|||
}
|
||||
}
|
||||
|
||||
return { regex: new RegExp(route), method, strict, route }
|
||||
return { regex: new RegExp(route), method, route }
|
||||
})
|
||||
}
|
||||
|
||||
export const matches = (ctx: BBContext, options: RegexMatcher[]) => {
|
||||
return options.find(({ regex, method, strict, route }) => {
|
||||
let urlMatch
|
||||
if (strict) {
|
||||
urlMatch = ctx.request.url === route
|
||||
} else {
|
||||
urlMatch = regex.test(ctx.request.url)
|
||||
}
|
||||
|
||||
return options.find(({ regex, method }) => {
|
||||
const urlMatch = regex.test(ctx.request.url)
|
||||
const methodMatch =
|
||||
method === "ALL"
|
||||
? true
|
||||
: ctx.request.method.toLowerCase() === method.toLowerCase()
|
||||
|
||||
return urlMatch && methodMatch
|
||||
})
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import { Cookie } from "../../../constants"
|
|||
import * as configs from "../../../configs"
|
||||
import * as cache from "../../../cache"
|
||||
import * as utils from "../../../utils"
|
||||
import { UserCtx, SSOProfile, DatasourceAuthCookie } from "@budibase/types"
|
||||
import { UserCtx, SSOProfile } from "@budibase/types"
|
||||
import { ssoSaveUserNoOp } from "../sso/sso"
|
||||
|
||||
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
|
||||
|
|
|
@ -5,7 +5,6 @@ import * as context from "../../../context"
|
|||
import fetch from "node-fetch"
|
||||
import {
|
||||
SaveSSOUserFunction,
|
||||
SaveUserOpts,
|
||||
SSOAuthDetails,
|
||||
SSOUser,
|
||||
User,
|
||||
|
@ -14,10 +13,8 @@ import {
|
|||
// no-op function for user save
|
||||
// - this allows datasource auth and access token refresh to work correctly
|
||||
// - prefer no-op over an optional argument to ensure function is provided to login flows
|
||||
export const ssoSaveUserNoOp: SaveSSOUserFunction = (
|
||||
user: SSOUser,
|
||||
opts: SaveUserOpts
|
||||
) => Promise.resolve(user)
|
||||
export const ssoSaveUserNoOp: SaveSSOUserFunction = (user: SSOUser) =>
|
||||
Promise.resolve(user)
|
||||
|
||||
/**
|
||||
* Common authentication logic for third parties. e.g. OAuth, OIDC.
|
||||
|
|
|
@ -114,11 +114,11 @@ describe("sso", () => {
|
|||
// tenant id added
|
||||
ssoUser.tenantId = context.getTenantId()
|
||||
|
||||
expect(mockSaveUser).toBeCalledWith(ssoUser, {
|
||||
expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, {
|
||||
hashPassword: false,
|
||||
requirePassword: false,
|
||||
})
|
||||
expect(mockDone).toBeCalledWith(null, ssoUser)
|
||||
expect(mockDone).toHaveBeenCalledWith(null, ssoUser)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -159,11 +159,11 @@ describe("sso", () => {
|
|||
// existing id preserved
|
||||
ssoUser._id = existingUser._id
|
||||
|
||||
expect(mockSaveUser).toBeCalledWith(ssoUser, {
|
||||
expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, {
|
||||
hashPassword: false,
|
||||
requirePassword: false,
|
||||
})
|
||||
expect(mockDone).toBeCalledWith(null, ssoUser)
|
||||
expect(mockDone).toHaveBeenCalledWith(null, ssoUser)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -187,11 +187,11 @@ describe("sso", () => {
|
|||
// existing id preserved
|
||||
ssoUser._id = existingUser._id
|
||||
|
||||
expect(mockSaveUser).toBeCalledWith(ssoUser, {
|
||||
expect(mockSaveUser).toHaveBeenCalledWith(ssoUser, {
|
||||
hashPassword: false,
|
||||
requirePassword: false,
|
||||
})
|
||||
expect(mockDone).toBeCalledWith(null, ssoUser)
|
||||
expect(mockDone).toHaveBeenCalledWith(null, ssoUser)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -24,13 +24,13 @@ function buildUserCtx(user: ContextUser) {
|
|||
}
|
||||
|
||||
function passed(throwFn: jest.Func, nextFn: jest.Func) {
|
||||
expect(throwFn).not.toBeCalled()
|
||||
expect(nextFn).toBeCalled()
|
||||
expect(throwFn).not.toHaveBeenCalled()
|
||||
expect(nextFn).toHaveBeenCalled()
|
||||
}
|
||||
|
||||
function threw(throwFn: jest.Func) {
|
||||
// cant check next, the throw function doesn't actually throw - so it still continues
|
||||
expect(throwFn).toBeCalled()
|
||||
expect(throwFn).toHaveBeenCalled()
|
||||
}
|
||||
|
||||
describe("adminOnly middleware", () => {
|
||||
|
|
|
@ -34,23 +34,6 @@ describe("matchers", () => {
|
|||
expect(!!matchers.matches(ctx, built)).toBe(true)
|
||||
})
|
||||
|
||||
it("doesn't wildcard path with strict", () => {
|
||||
const pattern = [
|
||||
{
|
||||
route: "/api/tests",
|
||||
method: "POST",
|
||||
strict: true,
|
||||
},
|
||||
]
|
||||
const ctx = structures.koa.newContext()
|
||||
ctx.request.url = "/api/tests/id/something/else"
|
||||
ctx.request.method = "POST"
|
||||
|
||||
const built = matchers.buildMatcherRegex(pattern)
|
||||
|
||||
expect(!!matchers.matches(ctx, built)).toBe(false)
|
||||
})
|
||||
|
||||
it("matches with param", () => {
|
||||
const pattern = [
|
||||
{
|
||||
|
@ -67,23 +50,6 @@ describe("matchers", () => {
|
|||
expect(!!matchers.matches(ctx, built)).toBe(true)
|
||||
})
|
||||
|
||||
// TODO: Support the below behaviour
|
||||
// Strict does not work when a param is present
|
||||
// it("matches with param with strict", () => {
|
||||
// const pattern = [{
|
||||
// route: "/api/tests/:testId",
|
||||
// method: "GET",
|
||||
// strict: true
|
||||
// }]
|
||||
// const ctx = structures.koa.newContext()
|
||||
// ctx.request.url = "/api/tests/id"
|
||||
// ctx.request.method = "GET"
|
||||
//
|
||||
// const built = matchers.buildMatcherRegex(pattern)
|
||||
//
|
||||
// expect(!!matchers.matches(ctx, built)).toBe(true)
|
||||
// })
|
||||
|
||||
it("doesn't match by path", () => {
|
||||
const pattern = [
|
||||
{
|
||||
|
|
|
@ -45,10 +45,6 @@ export const runMigration = async (
|
|||
options: MigrationOptions = {}
|
||||
) => {
|
||||
const migrationType = migration.type
|
||||
let tenantId: string | undefined
|
||||
if (migrationType !== MigrationType.INSTALLATION) {
|
||||
tenantId = context.getTenantId()
|
||||
}
|
||||
const migrationName = migration.name
|
||||
const silent = migration.silent
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ describe("app", () => {
|
|||
|
||||
it("gets url with embedded minio", async () => {
|
||||
testEnv.withMinio()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
|
@ -136,7 +136,7 @@ describe("app", () => {
|
|||
|
||||
it("gets url with custom S3", async () => {
|
||||
testEnv.withS3()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
|
@ -146,7 +146,7 @@ describe("app", () => {
|
|||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
testEnv.withCloudfront()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
await testEnv.withTenant(() => {
|
||||
const url = getAppFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
|
|
|
@ -3,7 +3,7 @@ import { DBTestConfiguration } from "../../../tests/extra"
|
|||
import * as tenants from "../tenants"
|
||||
|
||||
describe("tenants", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
new DBTestConfiguration()
|
||||
|
||||
describe("addTenant", () => {
|
||||
it("concurrently adds multiple tenants safely", async () => {
|
||||
|
|
|
@ -20,7 +20,7 @@ export async function lookupTenantId(userId: string) {
|
|||
return user.tenantId
|
||||
}
|
||||
|
||||
async function getUserDoc(emailOrId: string): Promise<PlatformUser> {
|
||||
export async function getUserDoc(emailOrId: string): Promise<PlatformUser> {
|
||||
const db = getPlatformDB()
|
||||
return db.get(emailOrId)
|
||||
}
|
||||
|
@ -79,6 +79,17 @@ async function addUserDoc(emailOrId: string, newDocFn: () => PlatformUser) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function addSsoUser(
|
||||
ssoId: string,
|
||||
email: string,
|
||||
userId: string,
|
||||
tenantId: string
|
||||
) {
|
||||
return addUserDoc(ssoId, () =>
|
||||
newUserSsoIdDoc(ssoId, email, userId, tenantId)
|
||||
)
|
||||
}
|
||||
|
||||
export async function addUser(
|
||||
tenantId: string,
|
||||
userId: string,
|
||||
|
@ -91,9 +102,7 @@ export async function addUser(
|
|||
]
|
||||
|
||||
if (ssoId) {
|
||||
promises.push(
|
||||
addUserDoc(ssoId, () => newUserSsoIdDoc(ssoId, email, userId, tenantId))
|
||||
)
|
||||
promises.push(addSsoUser(ssoId, email, userId, tenantId))
|
||||
}
|
||||
|
||||
await Promise.all(promises)
|
||||
|
|
|
@ -4,4 +4,5 @@ export enum JobQueue {
|
|||
AUDIT_LOG = "auditLogQueue",
|
||||
SYSTEM_EVENT_QUEUE = "systemEventQueue",
|
||||
APP_MIGRATION = "appMigration",
|
||||
DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue",
|
||||
}
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
import events from "events"
|
||||
import { timeout } from "../utils"
|
||||
import { newid, timeout } from "../utils"
|
||||
import { Queue, QueueOptions, JobOptions } from "./queue"
|
||||
|
||||
interface JobMessage {
|
||||
id: string
|
||||
timestamp: number
|
||||
queue: string
|
||||
data: any
|
||||
opts?: JobOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
||||
|
@ -10,12 +19,13 @@ import { timeout } from "../utils"
|
|||
* @returns A new job which can now be put onto the queue, this is mostly an
|
||||
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
|
||||
*/
|
||||
function newJob(queue: string, message: any) {
|
||||
function newJob(queue: string, message: any, opts?: JobOptions): JobMessage {
|
||||
return {
|
||||
id: newid(),
|
||||
timestamp: Date.now(),
|
||||
queue: queue,
|
||||
data: message,
|
||||
opts: {},
|
||||
opts,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -24,26 +34,29 @@ function newJob(queue: string, message: any) {
|
|||
* It is relatively simple, using an event emitter internally to register when messages are available
|
||||
* to the consumers - in can support many inputs and many consumers.
|
||||
*/
|
||||
class InMemoryQueue {
|
||||
class InMemoryQueue implements Partial<Queue> {
|
||||
_name: string
|
||||
_opts?: any
|
||||
_messages: any[]
|
||||
_emitter: EventEmitter
|
||||
_opts?: QueueOptions
|
||||
_messages: JobMessage[]
|
||||
_queuedJobIds: Set<string>
|
||||
_emitter: NodeJS.EventEmitter
|
||||
_runCount: number
|
||||
_addCount: number
|
||||
|
||||
/**
|
||||
* The constructor the queue, exactly the same as that of Bulls.
|
||||
* @param name The name of the queue which is being configured.
|
||||
* @param opts This is not used by the in memory queue as there is no real use
|
||||
* case when in memory, but is the same API as Bull
|
||||
*/
|
||||
constructor(name: string, opts?: any) {
|
||||
constructor(name: string, opts?: QueueOptions) {
|
||||
this._name = name
|
||||
this._opts = opts
|
||||
this._messages = []
|
||||
this._emitter = new events.EventEmitter()
|
||||
this._runCount = 0
|
||||
this._addCount = 0
|
||||
this._queuedJobIds = new Set<string>()
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -55,22 +68,42 @@ class InMemoryQueue {
|
|||
* note this is incredibly limited compared to Bull as in reality the Job would contain
|
||||
* a lot more information about the queue and current status of Bull cluster.
|
||||
*/
|
||||
process(func: any) {
|
||||
async process(func: any) {
|
||||
this._emitter.on("message", async () => {
|
||||
if (this._messages.length <= 0) {
|
||||
return
|
||||
}
|
||||
let msg = this._messages.shift()
|
||||
|
||||
let resp = func(msg)
|
||||
|
||||
async function retryFunc(fnc: any) {
|
||||
try {
|
||||
await fnc
|
||||
} catch (e: any) {
|
||||
await new Promise<void>(r => setTimeout(() => r(), 50))
|
||||
|
||||
await retryFunc(func(msg))
|
||||
}
|
||||
}
|
||||
|
||||
if (resp.then != null) {
|
||||
await resp
|
||||
try {
|
||||
await retryFunc(resp)
|
||||
} catch (e: any) {
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
this._runCount++
|
||||
const jobId = msg?.opts?.jobId?.toString()
|
||||
if (jobId && msg?.opts?.removeOnComplete) {
|
||||
this._queuedJobIds.delete(jobId)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async isReady() {
|
||||
return true
|
||||
return this as any
|
||||
}
|
||||
|
||||
// simply puts a message to the queue and emits to the queue for processing
|
||||
|
@ -83,27 +116,45 @@ class InMemoryQueue {
|
|||
* @param repeat serves no purpose for the import queue.
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
add(msg: any, repeat: boolean) {
|
||||
if (typeof msg !== "object") {
|
||||
async add(data: any, opts?: JobOptions) {
|
||||
const jobId = opts?.jobId?.toString()
|
||||
if (jobId && this._queuedJobIds.has(jobId)) {
|
||||
console.log(`Ignoring already queued job ${jobId}`)
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof data !== "object") {
|
||||
throw "Queue only supports carrying JSON."
|
||||
}
|
||||
this._messages.push(newJob(this._name, msg))
|
||||
this._addCount++
|
||||
this._emitter.emit("message")
|
||||
if (jobId) {
|
||||
this._queuedJobIds.add(jobId)
|
||||
}
|
||||
|
||||
const pushMessage = () => {
|
||||
this._messages.push(newJob(this._name, data, opts))
|
||||
this._addCount++
|
||||
this._emitter.emit("message")
|
||||
}
|
||||
|
||||
const delay = opts?.delay
|
||||
if (delay) {
|
||||
setTimeout(pushMessage, delay)
|
||||
} else {
|
||||
pushMessage()
|
||||
}
|
||||
return {} as any
|
||||
}
|
||||
|
||||
/**
|
||||
* replicating the close function from bull, which waits for jobs to finish.
|
||||
*/
|
||||
async close() {
|
||||
return []
|
||||
}
|
||||
async close() {}
|
||||
|
||||
/**
|
||||
* This removes a cron which has been implemented, this is part of Bull API.
|
||||
* @param cronJobId The cron which is to be removed.
|
||||
*/
|
||||
removeRepeatableByKey(cronJobId: string) {
|
||||
async removeRepeatableByKey(cronJobId: string) {
|
||||
// TODO: implement for testing
|
||||
console.log(cronJobId)
|
||||
}
|
||||
|
@ -111,12 +162,12 @@ class InMemoryQueue {
|
|||
/**
|
||||
* Implemented for tests
|
||||
*/
|
||||
getRepeatableJobs() {
|
||||
async getRepeatableJobs() {
|
||||
return []
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
removeJobs(pattern: string) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async removeJobs(pattern: string) {
|
||||
// no-op
|
||||
}
|
||||
|
||||
|
@ -128,18 +179,22 @@ class InMemoryQueue {
|
|||
}
|
||||
|
||||
async getJob() {
|
||||
return {}
|
||||
return null
|
||||
}
|
||||
|
||||
on() {
|
||||
// do nothing
|
||||
return this
|
||||
return this as any
|
||||
}
|
||||
|
||||
async waitForCompletion() {
|
||||
do {
|
||||
await timeout(50)
|
||||
} while (this._addCount < this._runCount)
|
||||
} while (this.hasRunningJobs())
|
||||
}
|
||||
|
||||
hasRunningJobs() {
|
||||
return this._addCount > this._runCount
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -88,6 +88,7 @@ enum QueueEventType {
|
|||
AUDIT_LOG_EVENT = "audit-log-event",
|
||||
SYSTEM_EVENT = "system-event",
|
||||
APP_MIGRATION = "app-migration",
|
||||
DOC_WRITETHROUGH = "doc-writethrough",
|
||||
}
|
||||
|
||||
const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
|
||||
|
@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
|
|||
[JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT,
|
||||
[JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT,
|
||||
[JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION,
|
||||
[JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH,
|
||||
}
|
||||
|
||||
function logging(queue: Queue, jobQueue: JobQueue) {
|
||||
|
@ -130,7 +132,7 @@ function logging(queue: Queue, jobQueue: JobQueue) {
|
|||
// A Job is waiting to be processed as soon as a worker is idling.
|
||||
console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId }))
|
||||
})
|
||||
.on(BullEvent.ACTIVE, async (job: Job, jobPromise: any) => {
|
||||
.on(BullEvent.ACTIVE, async (job: Job) => {
|
||||
// A job has started. You can use `jobPromise.cancel()`` to abort it.
|
||||
await doInJobContext(job, () => {
|
||||
console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job }))
|
||||
|
|
|
@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners"
|
|||
import { Duration } from "../utils"
|
||||
import * as timers from "../timers"
|
||||
|
||||
export { QueueOptions, Queue, JobOptions } from "bull"
|
||||
|
||||
// the queue lock is held for 5 minutes
|
||||
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
|
||||
// queue lock is refreshed every 30 seconds
|
||||
|
|
|
@ -9,7 +9,8 @@ let userClient: Client,
|
|||
lockClient: Client,
|
||||
socketClient: Client,
|
||||
inviteClient: Client,
|
||||
passwordResetClient: Client
|
||||
passwordResetClient: Client,
|
||||
docWritethroughClient: Client
|
||||
|
||||
export async function init() {
|
||||
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
||||
|
@ -24,6 +25,9 @@ export async function init() {
|
|||
utils.Databases.SOCKET_IO,
|
||||
utils.SelectableDatabase.SOCKET_IO
|
||||
).init()
|
||||
docWritethroughClient = await new Client(
|
||||
utils.Databases.DOC_WRITE_THROUGH
|
||||
).init()
|
||||
}
|
||||
|
||||
export async function shutdown() {
|
||||
|
@ -36,6 +40,7 @@ export async function shutdown() {
|
|||
if (inviteClient) await inviteClient.finish()
|
||||
if (passwordResetClient) await passwordResetClient.finish()
|
||||
if (socketClient) await socketClient.finish()
|
||||
if (docWritethroughClient) await docWritethroughClient.finish()
|
||||
}
|
||||
|
||||
process.on("exit", async () => {
|
||||
|
@ -104,3 +109,10 @@ export async function getPasswordResetClient() {
|
|||
}
|
||||
return passwordResetClient
|
||||
}
|
||||
|
||||
export async function getDocWritethroughClient() {
|
||||
if (!writethroughClient) {
|
||||
await init()
|
||||
}
|
||||
return writethroughClient
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import env from "../environment"
|
||||
import Redis from "ioredis"
|
||||
import Redis, { Cluster } from "ioredis"
|
||||
// mock-redis doesn't have any typing
|
||||
let MockRedis: any | undefined
|
||||
if (env.MOCK_REDIS) {
|
||||
|
@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
|
|||
|
||||
// for testing just generate the client once
|
||||
let CLOSED = false
|
||||
let CLIENTS: { [key: number]: any } = {}
|
||||
const CLIENTS: Record<number, Redis> = {}
|
||||
let CONNECTED = false
|
||||
|
||||
// mock redis always connected
|
||||
|
@ -36,7 +36,7 @@ if (env.MOCK_REDIS) {
|
|||
CONNECTED = true
|
||||
}
|
||||
|
||||
function pickClient(selectDb: number): any {
|
||||
function pickClient(selectDb: number) {
|
||||
return CLIENTS[selectDb]
|
||||
}
|
||||
|
||||
|
@ -201,12 +201,15 @@ class RedisWrapper {
|
|||
key = `${db}${SEPARATOR}${key}`
|
||||
let stream
|
||||
if (CLUSTERED) {
|
||||
let node = this.getClient().nodes("master")
|
||||
let node = (this.getClient() as never as Cluster).nodes("master")
|
||||
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
||||
} else {
|
||||
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
||||
stream = (this.getClient() as Redis).scanStream({
|
||||
match: key + "*",
|
||||
count: 100,
|
||||
})
|
||||
}
|
||||
return promisifyStream(stream, this.getClient())
|
||||
return promisifyStream(stream, this.getClient() as any)
|
||||
}
|
||||
|
||||
async keys(pattern: string) {
|
||||
|
@ -221,14 +224,16 @@ class RedisWrapper {
|
|||
|
||||
async get(key: string) {
|
||||
const db = this._db
|
||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||
const response = await this.getClient().get(addDbPrefix(db, key))
|
||||
// overwrite the prefixed key
|
||||
// @ts-ignore
|
||||
if (response != null && response.key) {
|
||||
// @ts-ignore
|
||||
response.key = key
|
||||
}
|
||||
// if its not an object just return the response
|
||||
try {
|
||||
return JSON.parse(response)
|
||||
return JSON.parse(response!)
|
||||
} catch (err) {
|
||||
return response
|
||||
}
|
||||
|
@ -274,13 +279,37 @@ class RedisWrapper {
|
|||
}
|
||||
}
|
||||
|
||||
async bulkStore(
|
||||
data: Record<string, any>,
|
||||
expirySeconds: number | null = null
|
||||
) {
|
||||
const client = this.getClient()
|
||||
|
||||
const dataToStore = Object.entries(data).reduce((acc, [key, value]) => {
|
||||
acc[addDbPrefix(this._db, key)] =
|
||||
typeof value === "object" ? JSON.stringify(value) : value
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
|
||||
const pipeline = client.pipeline()
|
||||
pipeline.mset(dataToStore)
|
||||
|
||||
if (expirySeconds !== null) {
|
||||
for (const key of Object.keys(dataToStore)) {
|
||||
pipeline.expire(key, expirySeconds)
|
||||
}
|
||||
}
|
||||
|
||||
await pipeline.exec()
|
||||
}
|
||||
|
||||
async getTTL(key: string) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
return this.getClient().ttl(prefixedKey)
|
||||
}
|
||||
|
||||
async setExpiry(key: string, expirySeconds: number | null) {
|
||||
async setExpiry(key: string, expirySeconds: number) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||
|
@ -291,10 +320,35 @@ class RedisWrapper {
|
|||
await this.getClient().del(addDbPrefix(db, key))
|
||||
}
|
||||
|
||||
async bulkDelete(keys: string[]) {
|
||||
const db = this._db
|
||||
await this.getClient().del(keys.map(key => addDbPrefix(db, key)))
|
||||
}
|
||||
|
||||
async clear() {
|
||||
let items = await this.scan()
|
||||
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
||||
}
|
||||
|
||||
async increment(key: string) {
|
||||
const result = await this.getClient().incr(addDbPrefix(this._db, key))
|
||||
if (isNaN(result)) {
|
||||
throw new Error(`Redis ${key} does not contain a number`)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async deleteIfValue(key: string, value: any) {
|
||||
const client = this.getClient()
|
||||
|
||||
const luaScript = `
|
||||
if redis.call('GET', KEYS[1]) == ARGV[1] then
|
||||
redis.call('DEL', KEYS[1])
|
||||
end
|
||||
`
|
||||
|
||||
await client.eval(luaScript, 1, addDbPrefix(this._db, key), value)
|
||||
}
|
||||
}
|
||||
|
||||
export default RedisWrapper
|
||||
|
|
|
@ -72,7 +72,7 @@ const OPTIONS: Record<keyof typeof LockType, Redlock.Options> = {
|
|||
export async function newRedlock(opts: Redlock.Options = {}) {
|
||||
const options = { ...OPTIONS.DEFAULT, ...opts }
|
||||
const redisWrapper = await getLockClient()
|
||||
const client = redisWrapper.getClient()
|
||||
const client = redisWrapper.getClient() as any
|
||||
return new Redlock([client], options)
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,203 @@
|
|||
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||
import { generator, structures } from "../../../tests"
|
||||
import RedisWrapper from "../redis"
|
||||
import { env } from "../.."
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("redis", () => {
|
||||
let redis: RedisWrapper
|
||||
let container: StartedTestContainer
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.start()
|
||||
|
||||
env._set(
|
||||
"REDIS_URL",
|
||||
`${container.getHost()}:${container.getMappedPort(6379)}`
|
||||
)
|
||||
env._set("MOCK_REDIS", 0)
|
||||
env._set("REDIS_PASSWORD", 0)
|
||||
})
|
||||
|
||||
afterAll(() => container?.stop())
|
||||
|
||||
beforeEach(async () => {
|
||||
redis = new RedisWrapper(structures.db.id())
|
||||
await redis.init()
|
||||
})
|
||||
|
||||
describe("store", () => {
|
||||
it("a basic value can be persisted", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
|
||||
await redis.store(key, value)
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
|
||||
it("objects can be persisted", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = { [generator.word()]: generator.word() }
|
||||
|
||||
await redis.store(key, value)
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
|
||||
describe("bulkStore", () => {
|
||||
function createRandomObject(
|
||||
keyLength: number,
|
||||
valueGenerator: () => any = () => generator.word()
|
||||
) {
|
||||
return generator
|
||||
.unique(() => generator.word(), keyLength)
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = valueGenerator()
|
||||
return acc
|
||||
}, {} as Record<string, string>)
|
||||
}
|
||||
|
||||
it("a basic object can be persisted", async () => {
|
||||
const data = createRandomObject(10)
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("a complex object can be persisted", async () => {
|
||||
const data = {
|
||||
...createRandomObject(10, () => createRandomObject(5)),
|
||||
...createRandomObject(5),
|
||||
}
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(15)
|
||||
})
|
||||
|
||||
it("no TTL is set by default", async () => {
|
||||
const data = createRandomObject(10)
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
expect(await redis.getTTL(key)).toEqual(-1)
|
||||
}
|
||||
})
|
||||
|
||||
it("a bulk store can be persisted with TTL", async () => {
|
||||
const ttl = 500
|
||||
const data = createRandomObject(8)
|
||||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
expect(await redis.getTTL(key)).toEqual(ttl)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(8)
|
||||
})
|
||||
|
||||
it("setting a TTL of -1 will not persist the key", async () => {
|
||||
const ttl = -1
|
||||
const data = createRandomObject(5)
|
||||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const key of Object.keys(data)) {
|
||||
expect(await redis.get(key)).toBe(null)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("increment", () => {
|
||||
it("can increment on a new key", async () => {
|
||||
const key = structures.uuid()
|
||||
const result = await redis.increment(key)
|
||||
expect(result).toBe(1)
|
||||
})
|
||||
|
||||
it("can increment multiple times", async () => {
|
||||
const key = structures.uuid()
|
||||
const results = [
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
]
|
||||
expect(results).toEqual([1, 2, 3, 4, 5])
|
||||
})
|
||||
|
||||
it("can increment multiple times in parallel", async () => {
|
||||
const key = structures.uuid()
|
||||
const results = await Promise.all(
|
||||
Array.from({ length: 100 }).map(() => redis.increment(key))
|
||||
)
|
||||
expect(results).toHaveLength(100)
|
||||
expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1))
|
||||
})
|
||||
|
||||
it("can increment existing set keys", async () => {
|
||||
const key = structures.uuid()
|
||||
await redis.store(key, 70)
|
||||
await redis.increment(key)
|
||||
|
||||
const result = await redis.increment(key)
|
||||
expect(result).toBe(72)
|
||||
})
|
||||
|
||||
it.each([
|
||||
generator.word(),
|
||||
generator.bool(),
|
||||
{ [generator.word()]: generator.word() },
|
||||
])("cannot increment if the store value is not a number", async value => {
|
||||
const key = structures.uuid()
|
||||
await redis.store(key, value)
|
||||
|
||||
await expect(redis.increment(key)).rejects.toThrow(
|
||||
"ERR value is not an integer or out of range"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("deleteIfValue", () => {
|
||||
it("can delete if the value matches", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
await redis.store(key, value)
|
||||
|
||||
await redis.deleteIfValue(key, value)
|
||||
|
||||
expect(await redis.get(key)).toBeNull()
|
||||
})
|
||||
|
||||
it("will not delete if the value does not matches", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
await redis.store(key, value)
|
||||
|
||||
await redis.deleteIfValue(key, generator.word())
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -96,8 +96,8 @@ describe("redlockImpl", () => {
|
|||
task: mockTask,
|
||||
executionTimeMs: lockTtl * 2,
|
||||
})
|
||||
).rejects.toThrowError(
|
||||
`Unable to fully release the lock on resource \"lock:${config.tenantId}_persist_writethrough\".`
|
||||
).rejects.toThrow(
|
||||
`Unable to fully release the lock on resource "lock:${config.tenantId}_persist_writethrough".`
|
||||
)
|
||||
}
|
||||
)
|
||||
|
|
|
@ -30,6 +30,7 @@ export enum Databases {
|
|||
LOCKS = "locks",
|
||||
SOCKET_IO = "socket_io",
|
||||
BPM_EVENTS = "bpmEvents",
|
||||
DOC_WRITE_THROUGH = "docWriteThrough",
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -84,25 +84,24 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } {
|
|||
return cloneDeep(BUILTIN_ROLES)
|
||||
}
|
||||
|
||||
export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(
|
||||
role => role._id
|
||||
)
|
||||
export function isBuiltin(role: string) {
|
||||
return getBuiltinRole(role) !== undefined
|
||||
}
|
||||
|
||||
export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(
|
||||
role => role.name
|
||||
)
|
||||
|
||||
export function isBuiltin(role?: string) {
|
||||
return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin))
|
||||
export function getBuiltinRole(roleId: string): Role | undefined {
|
||||
const role = Object.values(BUILTIN_ROLES).find(role =>
|
||||
roleId.includes(role._id)
|
||||
)
|
||||
if (!role) {
|
||||
return undefined
|
||||
}
|
||||
return cloneDeep(role)
|
||||
}
|
||||
|
||||
/**
|
||||
* Works through the inheritance ranks to see how far up the builtin stack this ID is.
|
||||
*/
|
||||
export function builtinRoleToNumber(id?: string) {
|
||||
if (!id) {
|
||||
return 0
|
||||
}
|
||||
export function builtinRoleToNumber(id: string) {
|
||||
const builtins = getBuiltinRoles()
|
||||
const MAX = Object.values(builtins).length + 1
|
||||
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
|
||||
|
@ -123,7 +122,7 @@ export function builtinRoleToNumber(id?: string) {
|
|||
/**
|
||||
* Converts any role to a number, but has to be async to get the roles from db.
|
||||
*/
|
||||
export async function roleToNumber(id?: string) {
|
||||
export async function roleToNumber(id: string) {
|
||||
if (isBuiltin(id)) {
|
||||
return builtinRoleToNumber(id)
|
||||
}
|
||||
|
@ -131,7 +130,7 @@ export async function roleToNumber(id?: string) {
|
|||
defaultPublic: true,
|
||||
})) as RoleDoc[]
|
||||
for (let role of hierarchy) {
|
||||
if (isBuiltin(role?.inherits)) {
|
||||
if (role?.inherits && isBuiltin(role.inherits)) {
|
||||
return builtinRoleToNumber(role.inherits) + 1
|
||||
}
|
||||
}
|
||||
|
@ -161,35 +160,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
|
|||
* @returns The role object, which may contain an "inherits" property.
|
||||
*/
|
||||
export async function getRole(
|
||||
roleId?: string,
|
||||
roleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc | undefined> {
|
||||
if (!roleId) {
|
||||
return undefined
|
||||
}
|
||||
let role: any = {}
|
||||
): Promise<RoleDoc> {
|
||||
// built in roles mostly come from the in-code implementation,
|
||||
// but can be extended by a doc stored about them (e.g. permissions)
|
||||
if (isBuiltin(roleId)) {
|
||||
role = cloneDeep(
|
||||
Object.values(BUILTIN_ROLES).find(role => role._id === roleId)
|
||||
)
|
||||
} else {
|
||||
let role: RoleDoc | undefined = getBuiltinRole(roleId)
|
||||
if (!role) {
|
||||
// make sure has the prefix (if it has it then it won't be added)
|
||||
roleId = prefixRoleID(roleId)
|
||||
}
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const dbRole = await db.get(getDBRoleID(roleId))
|
||||
role = Object.assign(role, dbRole)
|
||||
const dbRole = await db.get<RoleDoc>(getDBRoleID(roleId))
|
||||
role = Object.assign(role || {}, dbRole)
|
||||
// finalise the ID
|
||||
role._id = getExternalRoleID(role._id, role.version)
|
||||
role._id = getExternalRoleID(role._id!, role.version)
|
||||
} catch (err) {
|
||||
if (!isBuiltin(roleId) && opts?.defaultPublic) {
|
||||
return cloneDeep(BUILTIN_ROLES.PUBLIC)
|
||||
}
|
||||
// only throw an error if there is no role at all
|
||||
if (Object.keys(role).length === 0) {
|
||||
if (!role || Object.keys(role).length === 0) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
@ -200,7 +192,7 @@ export async function getRole(
|
|||
* Simple function to get all the roles based on the top level user role ID.
|
||||
*/
|
||||
async function getAllUserRoles(
|
||||
userRoleId?: string,
|
||||
userRoleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc[]> {
|
||||
// admins have access to all roles
|
||||
|
@ -226,7 +218,7 @@ async function getAllUserRoles(
|
|||
}
|
||||
|
||||
export async function getUserRoleIdHierarchy(
|
||||
userRoleId?: string
|
||||
userRoleId: string
|
||||
): Promise<string[]> {
|
||||
const roles = await getUserRoleHierarchy(userRoleId)
|
||||
return roles.map(role => role._id!)
|
||||
|
@ -241,7 +233,7 @@ export async function getUserRoleIdHierarchy(
|
|||
* highest level of access and the last being the lowest level.
|
||||
*/
|
||||
export async function getUserRoleHierarchy(
|
||||
userRoleId?: string,
|
||||
userRoleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
) {
|
||||
// special case, if they don't have a role then they are a public user
|
||||
|
@ -265,9 +257,9 @@ export function checkForRoleResourceArray(
|
|||
return rolePerms
|
||||
}
|
||||
|
||||
export async function getAllRoleIds(appId?: string) {
|
||||
export async function getAllRoleIds(appId: string): Promise<string[]> {
|
||||
const roles = await getAllRoles(appId)
|
||||
return roles.map(role => role._id)
|
||||
return roles.map(role => role._id!)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -158,8 +158,8 @@ describe("getTenantIDFromCtx", () => {
|
|||
],
|
||||
}
|
||||
expect(getTenantIDFromCtx(ctx, mockOpts)).toBeUndefined()
|
||||
expect(ctx.throw).toBeCalledTimes(1)
|
||||
expect(ctx.throw).toBeCalledWith(403, "Tenant id not set")
|
||||
expect(ctx.throw).toHaveBeenCalledTimes(1)
|
||||
expect(ctx.throw).toHaveBeenCalledWith(403, "Tenant id not set")
|
||||
})
|
||||
|
||||
it("returns undefined if allowNoTenant is true", () => {
|
||||
|
|
|
@ -20,41 +20,3 @@ export function cleanup() {
|
|||
}
|
||||
intervals = []
|
||||
}
|
||||
|
||||
export class ExecutionTimeoutError extends Error {
|
||||
public readonly name = "ExecutionTimeoutError"
|
||||
}
|
||||
|
||||
export class ExecutionTimeTracker {
|
||||
static withLimit(limitMs: number) {
|
||||
return new ExecutionTimeTracker(limitMs)
|
||||
}
|
||||
|
||||
constructor(readonly limitMs: number) {}
|
||||
|
||||
private totalTimeMs = 0
|
||||
|
||||
track<T>(f: () => T): T {
|
||||
this.checkLimit()
|
||||
const start = process.hrtime.bigint()
|
||||
try {
|
||||
return f()
|
||||
} finally {
|
||||
const end = process.hrtime.bigint()
|
||||
this.totalTimeMs += Number(end - start) / 1e6
|
||||
this.checkLimit()
|
||||
}
|
||||
}
|
||||
|
||||
get elapsedMS() {
|
||||
return this.totalTimeMs
|
||||
}
|
||||
|
||||
checkLimit() {
|
||||
if (this.totalTimeMs > this.limitMs) {
|
||||
throw new ExecutionTimeoutError(
|
||||
`Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -500,13 +500,13 @@ export class UserDB {
|
|||
|
||||
static async createAdminUser(
|
||||
email: string,
|
||||
password: string,
|
||||
tenantId: string,
|
||||
password?: string,
|
||||
opts?: CreateAdminUserOpts
|
||||
) {
|
||||
const user: User = {
|
||||
email: email,
|
||||
password: password,
|
||||
password,
|
||||
createdAt: Date.now(),
|
||||
roles: {},
|
||||
builder: {
|
||||
|
|
|
@ -45,7 +45,7 @@ describe("Users", () => {
|
|||
...{ _id: groupId, roles: { app1: "ADMIN" } },
|
||||
}
|
||||
const users: User[] = []
|
||||
for (const _ of Array.from({ length: usersInGroup })) {
|
||||
for (let i = 0; i < usersInGroup; i++) {
|
||||
const userId = `us_${generator.guid()}`
|
||||
const user: User = structures.users.user({
|
||||
_id: userId,
|
||||
|
|
|
@ -14,16 +14,16 @@ import {
|
|||
} from "../db"
|
||||
import {
|
||||
BulkDocsResponse,
|
||||
ContextUser,
|
||||
CouchFindOptions,
|
||||
DatabaseQueryOpts,
|
||||
SearchQuery,
|
||||
SearchQueryOperators,
|
||||
SearchUsersRequest,
|
||||
User,
|
||||
ContextUser,
|
||||
DatabaseQueryOpts,
|
||||
CouchFindOptions,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalDB } from "../context"
|
||||
import * as context from "../context"
|
||||
import { getGlobalDB } from "../context"
|
||||
import { isCreator } from "./utils"
|
||||
import { UserDB } from "./db"
|
||||
|
||||
|
@ -48,6 +48,7 @@ export function isSupportedUserSearch(query: SearchQuery) {
|
|||
const allowed = [
|
||||
{ op: SearchQueryOperators.STRING, key: "email" },
|
||||
{ op: SearchQueryOperators.EQUAL, key: "_id" },
|
||||
{ op: SearchQueryOperators.ONE_OF, key: "_id" },
|
||||
]
|
||||
for (let [key, operation] of Object.entries(query)) {
|
||||
if (typeof operation !== "object") {
|
||||
|
@ -285,6 +286,10 @@ export async function paginatedUsers({
|
|||
} else if (query?.string?.email) {
|
||||
userList = await searchGlobalUsersByEmail(query?.string?.email, opts)
|
||||
property = "email"
|
||||
} else if (query?.oneOf?._id) {
|
||||
userList = await bulkGetGlobalUsersById(query?.oneOf?._id, {
|
||||
cleanup: true,
|
||||
})
|
||||
} else {
|
||||
// no search, query allDocs
|
||||
const response = await db.allDocs(getGlobalUserParams(null, opts))
|
||||
|
|
|
@ -15,6 +15,7 @@ beforeAll(async () => {
|
|||
|
||||
jest.spyOn(events.app, "created")
|
||||
jest.spyOn(events.app, "updated")
|
||||
jest.spyOn(events.app, "duplicated")
|
||||
jest.spyOn(events.app, "deleted")
|
||||
jest.spyOn(events.app, "published")
|
||||
jest.spyOn(events.app, "unpublished")
|
||||
|
|
|
@ -18,7 +18,7 @@ export const account = (partial: Partial<Account> = {}): Account => {
|
|||
return {
|
||||
accountId: uuid(),
|
||||
tenantId: generator.word(),
|
||||
email: generator.email(),
|
||||
email: generator.email({ domain: "example.com" }),
|
||||
tenantName: generator.word(),
|
||||
hosting: Hosting.SELF,
|
||||
createdAt: Date.now(),
|
||||
|
|
|
@ -13,7 +13,7 @@ interface CreateUserRequestFields {
|
|||
export function createUserRequest(userData?: Partial<CreateUserRequestFields>) {
|
||||
const defaultValues = {
|
||||
externalId: uuid(),
|
||||
email: generator.email(),
|
||||
email: `${uuid()}@example.com`,
|
||||
firstName: generator.first(),
|
||||
lastName: generator.last(),
|
||||
username: generator.name(),
|
||||
|
|
|
@ -3,7 +3,7 @@ import { generator } from "./generator"
|
|||
|
||||
export function userGroup(): UserGroup {
|
||||
return {
|
||||
name: generator.word(),
|
||||
name: generator.guid(),
|
||||
icon: generator.word(),
|
||||
color: generator.word(),
|
||||
}
|
||||
|
|
|
@ -1,80 +1,58 @@
|
|||
import { DatabaseImpl } from "../../../src/db"
|
||||
import { execSync } from "child_process"
|
||||
|
||||
let dockerPsResult: string | undefined
|
||||
|
||||
function formatDockerPsResult(serverName: string, port: number) {
|
||||
const lines = dockerPsResult?.split("\n")
|
||||
let first = true
|
||||
if (!lines) {
|
||||
return null
|
||||
}
|
||||
for (let line of lines) {
|
||||
if (first) {
|
||||
first = false
|
||||
continue
|
||||
}
|
||||
let toLookFor = serverName.split("-service")[0]
|
||||
if (!line.includes(toLookFor)) {
|
||||
continue
|
||||
}
|
||||
const regex = new RegExp(`0.0.0.0:([0-9]*)->${port}`, "g")
|
||||
const found = line.match(regex)
|
||||
if (found) {
|
||||
return found[0].split(":")[1].split("->")[0]
|
||||
}
|
||||
}
|
||||
return null
|
||||
interface ContainerInfo {
|
||||
Command: string
|
||||
CreatedAt: string
|
||||
ID: string
|
||||
Image: string
|
||||
Labels: string
|
||||
LocalVolumes: string
|
||||
Mounts: string
|
||||
Names: string
|
||||
Networks: string
|
||||
Ports: string
|
||||
RunningFor: string
|
||||
Size: string
|
||||
State: string
|
||||
Status: string
|
||||
}
|
||||
|
||||
function getTestContainerSettings(
|
||||
serverName: string,
|
||||
key: string
|
||||
): string | null {
|
||||
const entry = Object.entries(global).find(
|
||||
([k]) =>
|
||||
k.includes(`${serverName.toUpperCase()}`) &&
|
||||
k.includes(`${key.toUpperCase()}`)
|
||||
)
|
||||
if (!entry) {
|
||||
return null
|
||||
}
|
||||
return entry[1]
|
||||
function getTestcontainers(): ContainerInfo[] {
|
||||
return execSync("docker ps --format json")
|
||||
.toString()
|
||||
.split("\n")
|
||||
.filter(x => x.length > 0)
|
||||
.map(x => JSON.parse(x) as ContainerInfo)
|
||||
.filter(x => x.Labels.includes("org.testcontainers=true"))
|
||||
}
|
||||
|
||||
function getContainerInfo(containerName: string, port: number) {
|
||||
let assignedPort = getTestContainerSettings(
|
||||
containerName.toUpperCase(),
|
||||
`PORT_${port}`
|
||||
)
|
||||
if (!dockerPsResult) {
|
||||
try {
|
||||
const outputBuffer = execSync("docker ps")
|
||||
dockerPsResult = outputBuffer.toString("utf8")
|
||||
} catch (err) {
|
||||
//no-op
|
||||
}
|
||||
}
|
||||
const possiblePort = formatDockerPsResult(containerName, port)
|
||||
if (possiblePort) {
|
||||
assignedPort = possiblePort
|
||||
}
|
||||
const host = getTestContainerSettings(containerName.toUpperCase(), "IP")
|
||||
return {
|
||||
port: assignedPort,
|
||||
host,
|
||||
url: host && assignedPort && `http://${host}:${assignedPort}`,
|
||||
}
|
||||
function getContainerByImage(image: string) {
|
||||
return getTestcontainers().find(x => x.Image.startsWith(image))
|
||||
}
|
||||
|
||||
function getCouchConfig() {
|
||||
return getContainerInfo("couchdb", 5984)
|
||||
function getExposedPort(container: ContainerInfo, port: number) {
|
||||
const match = container.Ports.match(new RegExp(`0.0.0.0:(\\d+)->${port}/tcp`))
|
||||
if (!match) {
|
||||
return undefined
|
||||
}
|
||||
return parseInt(match[1])
|
||||
}
|
||||
|
||||
export function setupEnv(...envs: any[]) {
|
||||
const couch = getCouchConfig()
|
||||
const couch = getContainerByImage("budibase/couchdb")
|
||||
if (!couch) {
|
||||
throw new Error("CouchDB container not found")
|
||||
}
|
||||
|
||||
const couchPort = getExposedPort(couch, 5984)
|
||||
if (!couchPort) {
|
||||
throw new Error("CouchDB port not found")
|
||||
}
|
||||
|
||||
const configs = [
|
||||
{ key: "COUCH_DB_PORT", value: couch.port },
|
||||
{ key: "COUCH_DB_URL", value: couch.url },
|
||||
{ key: "COUCH_DB_PORT", value: `${couchPort}` },
|
||||
{ key: "COUCH_DB_URL", value: `http://localhost:${couchPort}` },
|
||||
]
|
||||
|
||||
for (const config of configs.filter(x => !!x.value)) {
|
||||
|
@ -82,4 +60,7 @@ export function setupEnv(...envs: any[]) {
|
|||
env._set(config.key, config.value)
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
DatabaseImpl.nano = undefined
|
||||
}
|
||||
|
|
|
@ -4,3 +4,7 @@ process.env.NODE_ENV = "jest"
|
|||
process.env.MOCK_REDIS = "1"
|
||||
process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error"
|
||||
process.env.REDIS_PASSWORD = "budibase"
|
||||
process.env.COUCH_DB_PASSWORD = "budibase"
|
||||
process.env.COUCH_DB_USER = "budibase"
|
||||
process.env.API_ENCRYPTION_KEY = "testsecret"
|
||||
process.env.JWT_SECRET = "testsecret"
|
||||
|
|
|
@ -12,6 +12,13 @@ export default {
|
|||
format: "esm",
|
||||
file: "dist/bbui.es.js",
|
||||
},
|
||||
onwarn(warning, warn) {
|
||||
// suppress eval warnings
|
||||
if (warning.code === "EVAL") {
|
||||
return
|
||||
}
|
||||
warn(warning)
|
||||
},
|
||||
plugins: [
|
||||
resolve(),
|
||||
commonjs(),
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
<div use:getAnchor on:click={openMenu}>
|
||||
<slot name="control" />
|
||||
</div>
|
||||
<Popover bind:this={dropdown} {anchor} {align} {portalTarget}>
|
||||
<Popover bind:this={dropdown} {anchor} {align} {portalTarget} on:open on:close>
|
||||
<Menu>
|
||||
<slot />
|
||||
</Menu>
|
||||
|
|
|
@ -32,19 +32,30 @@ const handleClick = event => {
|
|||
return
|
||||
}
|
||||
|
||||
// Ignore clicks for drawers, unless the handler is registered from a drawer
|
||||
const sourceInDrawer = handler.anchor.closest(".drawer-wrapper") != null
|
||||
const clickInDrawer = event.target.closest(".drawer-wrapper") != null
|
||||
if (clickInDrawer && !sourceInDrawer) {
|
||||
return
|
||||
}
|
||||
|
||||
if (handler.allowedType && event.type !== handler.allowedType) {
|
||||
return
|
||||
}
|
||||
|
||||
handler.callback?.(event)
|
||||
})
|
||||
}
|
||||
document.documentElement.addEventListener("click", handleClick, true)
|
||||
document.documentElement.addEventListener("contextmenu", handleClick, true)
|
||||
document.documentElement.addEventListener("mousedown", handleClick, true)
|
||||
|
||||
/**
|
||||
* Adds or updates a click handler
|
||||
*/
|
||||
const updateHandler = (id, element, anchor, callback) => {
|
||||
const updateHandler = (id, element, anchor, callback, allowedType) => {
|
||||
let existingHandler = clickHandlers.find(x => x.id === id)
|
||||
if (!existingHandler) {
|
||||
clickHandlers.push({ id, element, anchor, callback })
|
||||
clickHandlers.push({ id, element, anchor, callback, allowedType })
|
||||
} else {
|
||||
existingHandler.callback = callback
|
||||
}
|
||||
|
@ -68,9 +79,11 @@ const removeHandler = id => {
|
|||
export default (element, opts) => {
|
||||
const id = Math.random()
|
||||
const update = newOpts => {
|
||||
const callback = newOpts?.callback || newOpts
|
||||
const callback =
|
||||
newOpts?.callback || (typeof newOpts === "function" ? newOpts : null)
|
||||
const anchor = newOpts?.anchor || element
|
||||
updateHandler(id, element, anchor, callback)
|
||||
const allowedType = newOpts?.allowedType || "click"
|
||||
updateHandler(id, element, anchor, callback, allowedType)
|
||||
}
|
||||
update(opts)
|
||||
return {
|
||||
|
|
|
@ -15,6 +15,7 @@ export default function positionDropdown(element, opts) {
|
|||
align,
|
||||
maxHeight,
|
||||
maxWidth,
|
||||
minWidth,
|
||||
useAnchorWidth,
|
||||
offset = 5,
|
||||
customUpdate,
|
||||
|
@ -28,18 +29,26 @@ export default function positionDropdown(element, opts) {
|
|||
const elementBounds = element.getBoundingClientRect()
|
||||
let styles = {
|
||||
maxHeight: null,
|
||||
minWidth: null,
|
||||
minWidth,
|
||||
maxWidth,
|
||||
left: null,
|
||||
top: null,
|
||||
}
|
||||
|
||||
if (typeof customUpdate === "function") {
|
||||
styles = customUpdate(anchorBounds, elementBounds, styles)
|
||||
styles = customUpdate(anchorBounds, elementBounds, {
|
||||
...styles,
|
||||
offset: opts.offset,
|
||||
})
|
||||
} else {
|
||||
// Determine vertical styles
|
||||
if (align === "right-outside") {
|
||||
styles.top = anchorBounds.top
|
||||
if (align === "right-outside" || align === "left-outside") {
|
||||
styles.top =
|
||||
anchorBounds.top + anchorBounds.height / 2 - elementBounds.height / 2
|
||||
styles.maxHeight = maxHeight
|
||||
if (styles.top + elementBounds.height > window.innerHeight) {
|
||||
styles.top = window.innerHeight - elementBounds.height
|
||||
}
|
||||
} else if (
|
||||
window.innerHeight - anchorBounds.bottom <
|
||||
(maxHeight || 100)
|
||||
|
|
|
@ -1,28 +1,111 @@
|
|||
<script context="module">
|
||||
import { writable, get } from "svelte/store"
|
||||
|
||||
// Observe this class name if possible in order to know how to size the
|
||||
// drawer. If this doesn't exist we'll use a fixed size.
|
||||
const drawerContainer = "drawer-container"
|
||||
|
||||
// Context level stores to keep drawers in sync
|
||||
const openDrawers = writable([])
|
||||
const modal = writable(false)
|
||||
const resizable = writable(true)
|
||||
const drawerLeft = writable(null)
|
||||
const drawerWidth = writable(null)
|
||||
|
||||
// Resize observer to keep track of size changes
|
||||
let observer
|
||||
|
||||
// Starts observing the target node to watching to size changes.
|
||||
// Invoked when the first drawer of a chain is rendered.
|
||||
const observe = () => {
|
||||
const target = document.getElementsByClassName(drawerContainer)[0]
|
||||
if (observer || !target) {
|
||||
return
|
||||
}
|
||||
observer = new ResizeObserver(entries => {
|
||||
if (!entries?.[0]) {
|
||||
return
|
||||
}
|
||||
const bounds = entries[0].target.getBoundingClientRect()
|
||||
drawerLeft.set(bounds.left)
|
||||
drawerWidth.set(bounds.width)
|
||||
})
|
||||
observer.observe(target)
|
||||
|
||||
// Manually measure once to ensure that we have dimensions for the initial
|
||||
// paint
|
||||
const bounds = target.getBoundingClientRect()
|
||||
drawerLeft.set(bounds.left)
|
||||
drawerWidth.set(bounds.width)
|
||||
}
|
||||
|
||||
// Stops observing the target node.
|
||||
// Invoked when the last drawer of a chain is removed.
|
||||
const unobserve = () => {
|
||||
if (get(openDrawers).length) {
|
||||
return
|
||||
}
|
||||
observer?.disconnect()
|
||||
|
||||
// Reset state
|
||||
observer = null
|
||||
modal.set(false)
|
||||
resizable.set(true)
|
||||
drawerLeft.set(null)
|
||||
drawerWidth.set(null)
|
||||
}
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import Portal from "svelte-portal"
|
||||
import Button from "../Button/Button.svelte"
|
||||
import Body from "../Typography/Body.svelte"
|
||||
import Heading from "../Typography/Heading.svelte"
|
||||
import { setContext, createEventDispatcher } from "svelte"
|
||||
import Icon from "../Icon/Icon.svelte"
|
||||
import ActionButton from "../ActionButton/ActionButton.svelte"
|
||||
import Portal from "svelte-portal"
|
||||
import { setContext, createEventDispatcher, onDestroy } from "svelte"
|
||||
import { generate } from "shortid"
|
||||
|
||||
export let title
|
||||
export let fillWidth
|
||||
export let left = "314px"
|
||||
export let width = "calc(100% - 626px)"
|
||||
export let headless = false
|
||||
export let forceModal = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const spacing = 11
|
||||
|
||||
let visible = false
|
||||
let drawerId = generate()
|
||||
|
||||
$: depth = $openDrawers.length - $openDrawers.indexOf(drawerId) - 1
|
||||
$: style = getStyle(depth, $drawerLeft, $drawerWidth, $modal)
|
||||
|
||||
const getStyle = (depth, left, width, modal) => {
|
||||
let style = `
|
||||
--scale-factor: ${getScaleFactor(depth)};
|
||||
--spacing: ${spacing}px;
|
||||
`
|
||||
// Most modal styles are handled by class names
|
||||
if (modal || left == null || width == null) {
|
||||
return style
|
||||
}
|
||||
|
||||
// Drawers observing another dom node need custom position styles
|
||||
return `
|
||||
${style}
|
||||
left: ${left + spacing}px;
|
||||
width: ${width - 2 * spacing}px;
|
||||
`
|
||||
}
|
||||
|
||||
export function show() {
|
||||
if (visible) {
|
||||
return
|
||||
}
|
||||
if (forceModal) {
|
||||
modal.set(true)
|
||||
resizable.set(false)
|
||||
}
|
||||
observe()
|
||||
visible = true
|
||||
dispatch("drawerShow", drawerId)
|
||||
openDrawers.update(state => [...state, drawerId])
|
||||
}
|
||||
|
||||
export function hide() {
|
||||
|
@ -31,12 +114,15 @@
|
|||
}
|
||||
visible = false
|
||||
dispatch("drawerHide", drawerId)
|
||||
openDrawers.update(state => state.filter(id => id !== drawerId))
|
||||
unobserve()
|
||||
}
|
||||
|
||||
setContext("drawer-actions", {
|
||||
setContext("drawer", {
|
||||
hide,
|
||||
show,
|
||||
headless,
|
||||
modal,
|
||||
resizable,
|
||||
})
|
||||
|
||||
const easeInOutQuad = x => {
|
||||
|
@ -45,66 +131,142 @@
|
|||
|
||||
// Use a custom svelte transition here because the built-in slide
|
||||
// transition has a horrible overshoot
|
||||
const slide = () => {
|
||||
const drawerSlide = () => {
|
||||
return {
|
||||
duration: 360,
|
||||
duration: 260,
|
||||
css: t => {
|
||||
const translation = 100 - Math.round(easeInOutQuad(t) * 100)
|
||||
return `transform: translateY(${translation}%);`
|
||||
const f = easeInOutQuad(t)
|
||||
const yOffset = (1 - f) * 200
|
||||
return `
|
||||
transform: translateY(calc(${yOffset}px - 800px * (1 - var(--scale-factor))));
|
||||
opacity: ${f};
|
||||
`
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Custom fade transition because the default svelte one doesn't work any more
|
||||
// with svelte 4
|
||||
const drawerFade = () => {
|
||||
return {
|
||||
duration: 260,
|
||||
css: t => {
|
||||
return `opacity: ${easeInOutQuad(t)};`
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const getScaleFactor = depth => {
|
||||
// Quadratic function approaching a limit of 1 as depth tends to infinity
|
||||
const lim = 1 - 1 / (depth * depth + 1)
|
||||
// Scale drawers between 1 and 0.9 as depth approaches infinity
|
||||
return 1 - lim * 0.1
|
||||
}
|
||||
|
||||
onDestroy(() => {
|
||||
if (visible) {
|
||||
hide()
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
{#if visible}
|
||||
<Portal>
|
||||
<section
|
||||
class:fillWidth
|
||||
class="drawer"
|
||||
class:headless
|
||||
transition:slide|local
|
||||
style={`width: ${width}; left: ${left};`}
|
||||
>
|
||||
{#if !headless}
|
||||
<Portal target=".modal-container">
|
||||
<!-- This class is unstyled, but needed by click_outside -->
|
||||
<div class="drawer-wrapper">
|
||||
<div
|
||||
class="underlay"
|
||||
class:hidden={!$modal}
|
||||
transition:drawerFade|local
|
||||
/>
|
||||
<div
|
||||
class="drawer"
|
||||
class:stacked={depth > 0}
|
||||
class:modal={$modal}
|
||||
transition:drawerSlide|local
|
||||
{style}
|
||||
>
|
||||
<header>
|
||||
<div class="text">
|
||||
<Heading size="XS">{title}</Heading>
|
||||
<Body size="S">
|
||||
<slot name="description" />
|
||||
</Body>
|
||||
</div>
|
||||
{#if $$slots.title}
|
||||
<slot name="title" />
|
||||
{:else}
|
||||
<div class="text">{title || "Bindings"}</div>
|
||||
{/if}
|
||||
<div class="buttons">
|
||||
<Button secondary quiet on:click={hide}>Cancel</Button>
|
||||
<slot name="buttons" />
|
||||
{#if $resizable}
|
||||
<ActionButton
|
||||
size="M"
|
||||
quiet
|
||||
selected={$modal}
|
||||
on:click={() => modal.set(!$modal)}
|
||||
>
|
||||
<Icon name={$modal ? "Minimize" : "Maximize"} size="S" />
|
||||
</ActionButton>
|
||||
{/if}
|
||||
</div>
|
||||
</header>
|
||||
{/if}
|
||||
<slot name="body" />
|
||||
</section>
|
||||
<slot name="body" />
|
||||
<div class="overlay" class:hidden={$modal || depth === 0} />
|
||||
</div>
|
||||
</div>
|
||||
</Portal>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.drawer.headless :global(.drawer-contents) {
|
||||
height: calc(40vh + 75px);
|
||||
}
|
||||
|
||||
.buttons {
|
||||
display: flex;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
|
||||
.drawer {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 25vw;
|
||||
width: 50vw;
|
||||
bottom: var(--spacing);
|
||||
height: 420px;
|
||||
background: var(--background);
|
||||
border-top: var(--border-light);
|
||||
z-index: 3;
|
||||
border: var(--border-light);
|
||||
z-index: 100;
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
box-sizing: border-box;
|
||||
transition: transform 260ms ease-out, bottom 260ms ease-out,
|
||||
left 260ms ease-out, width 260ms ease-out, height 260ms ease-out;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
}
|
||||
.drawer.modal {
|
||||
left: 15vw;
|
||||
width: 70vw;
|
||||
bottom: 15vh;
|
||||
height: 70vh;
|
||||
}
|
||||
.drawer.stacked {
|
||||
transform: translateY(calc(-1 * 1024px * (1 - var(--scale-factor))))
|
||||
scale(var(--scale-factor));
|
||||
}
|
||||
|
||||
.fillWidth {
|
||||
left: 260px !important;
|
||||
width: calc(100% - 260px) !important;
|
||||
.overlay,
|
||||
.underlay {
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
z-index: 100;
|
||||
display: block;
|
||||
transition: opacity 260ms ease-out;
|
||||
}
|
||||
.overlay {
|
||||
position: absolute;
|
||||
background: var(--background);
|
||||
opacity: 0.5;
|
||||
}
|
||||
.underlay {
|
||||
position: fixed;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
.underlay.hidden,
|
||||
.overlay.hidden {
|
||||
opacity: 0 !important;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
header {
|
||||
|
@ -112,10 +274,9 @@
|
|||
justify-content: space-between;
|
||||
align-items: center;
|
||||
border-bottom: var(--border-light);
|
||||
padding: var(--spacing-l) var(--spacing-xl);
|
||||
padding: var(--spacing-m) var(--spacing-xl);
|
||||
gap: var(--spacing-xl);
|
||||
}
|
||||
|
||||
.text {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
@ -123,7 +284,6 @@
|
|||
align-items: flex-start;
|
||||
gap: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.buttons {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
@ -131,4 +291,8 @@
|
|||
align-items: center;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
.buttons :global(.icon) {
|
||||
width: 16px;
|
||||
display: flex;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
<div class="drawer-contents">
|
||||
<script>
|
||||
export let padding = true
|
||||
</script>
|
||||
|
||||
<div class="drawer-contents" class:padding>
|
||||
<div class:no-sidebar={!$$slots.sidebar} class="container">
|
||||
{#if $$slots.sidebar}
|
||||
<div class="sidebar">
|
||||
|
@ -13,8 +17,8 @@
|
|||
|
||||
<style>
|
||||
.drawer-contents {
|
||||
height: 40vh;
|
||||
overflow-y: auto;
|
||||
flex: 1 1 auto;
|
||||
}
|
||||
.container {
|
||||
height: 100%;
|
||||
|
@ -27,14 +31,21 @@
|
|||
.sidebar {
|
||||
border-right: var(--border-light);
|
||||
overflow: auto;
|
||||
padding: var(--spacing-xl);
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.padding .sidebar {
|
||||
padding: var(--spacing-xl);
|
||||
}
|
||||
.sidebar::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
.main {
|
||||
height: 100%;
|
||||
overflow: auto;
|
||||
}
|
||||
.padding .main {
|
||||
padding: var(--spacing-xl);
|
||||
height: calc(100% - var(--spacing-xl) * 2);
|
||||
}
|
||||
.main :global(textarea) {
|
||||
min-height: 200px;
|
||||
|
|
|
@ -197,7 +197,9 @@
|
|||
>
|
||||
<Icon name="ChevronRight" />
|
||||
</div>
|
||||
<div class="footer">File {selectedImageIdx + 1} of {fileCount}</div>
|
||||
{#if maximum !== 1}
|
||||
<div class="footer">File {selectedImageIdx + 1} of {fileCount}</div>
|
||||
{/if}
|
||||
</div>
|
||||
{:else if value?.length}
|
||||
{#each value as file}
|
||||
|
|
|
@ -1,58 +1,54 @@
|
|||
<script context="module">
|
||||
export const directions = ["n", "ne", "e", "se", "s", "sw", "w", "nw"]
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import Tooltip from "../Tooltip/Tooltip.svelte"
|
||||
import { fade } from "svelte/transition"
|
||||
import {
|
||||
default as AbsTooltip,
|
||||
TooltipPosition,
|
||||
TooltipType,
|
||||
} from "../Tooltip/AbsTooltip.svelte"
|
||||
|
||||
export let direction = "n"
|
||||
export let name = "Add"
|
||||
export let hidden = false
|
||||
export let size = "M"
|
||||
export let hoverable = false
|
||||
export let disabled = false
|
||||
export let color
|
||||
export let hoverColor
|
||||
export let tooltip
|
||||
|
||||
$: rotation = getRotation(direction)
|
||||
|
||||
let showTooltip = false
|
||||
|
||||
const getRotation = direction => {
|
||||
return directions.indexOf(direction) * 45
|
||||
}
|
||||
export let tooltipPosition = TooltipPosition.Bottom
|
||||
export let tooltipType = TooltipType.Default
|
||||
export let tooltipColor
|
||||
export let tooltipWrap = true
|
||||
export let newStyles = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
class="icon"
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:focus={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
on:click={() => (showTooltip = false)}
|
||||
<AbsTooltip
|
||||
text={tooltip}
|
||||
type={tooltipType}
|
||||
position={tooltipPosition}
|
||||
color={tooltipColor}
|
||||
noWrap={tooltipWrap}
|
||||
>
|
||||
<svg
|
||||
on:click
|
||||
class:hoverable
|
||||
class:disabled
|
||||
class="spectrum-Icon spectrum-Icon--size{size}"
|
||||
focusable="false"
|
||||
aria-hidden={hidden}
|
||||
aria-label={name}
|
||||
style={`transform: rotate(${rotation}deg); ${
|
||||
color ? `color: ${color};` : ""
|
||||
}`}
|
||||
>
|
||||
<use style="pointer-events: none;" xlink:href="#spectrum-icon-18-{name}" />
|
||||
</svg>
|
||||
{#if tooltip && showTooltip}
|
||||
<div class="tooltip" in:fade={{ duration: 130, delay: 250 }}>
|
||||
<Tooltip textWrapping direction="top" text={tooltip} />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="icon" class:newStyles>
|
||||
<svg
|
||||
on:click
|
||||
class:hoverable
|
||||
class:disabled
|
||||
class="spectrum-Icon spectrum-Icon--size{size}"
|
||||
focusable="false"
|
||||
aria-hidden={hidden}
|
||||
aria-label={name}
|
||||
style={`${color ? `color: ${color};` : ""} ${
|
||||
hoverColor
|
||||
? `--hover-color: ${hoverColor}`
|
||||
: "--hover-color: var(--spectrum-alias-icon-color-selected-hover)"
|
||||
}`}
|
||||
>
|
||||
<use
|
||||
style="pointer-events: none;"
|
||||
xlink:href="#spectrum-icon-18-{name}"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</AbsTooltip>
|
||||
|
||||
<style>
|
||||
.icon {
|
||||
|
@ -60,19 +56,25 @@
|
|||
display: grid;
|
||||
place-items: center;
|
||||
}
|
||||
.newStyles {
|
||||
color: var(--spectrum-global-color-gray-700);
|
||||
}
|
||||
|
||||
svg.hoverable {
|
||||
pointer-events: all;
|
||||
transition: color var(--spectrum-global-animation-duration-100, 130ms);
|
||||
}
|
||||
svg.hoverable:hover {
|
||||
color: var(--spectrum-alias-icon-color-selected-hover) !important;
|
||||
color: var(--hover-color) !important;
|
||||
cursor: pointer;
|
||||
}
|
||||
svg.hoverable:active {
|
||||
color: var(--spectrum-global-color-blue-400) !important;
|
||||
}
|
||||
|
||||
.newStyles svg.hoverable:hover,
|
||||
.newStyles svg.hoverable:active {
|
||||
color: var(--spectrum-global-color-gray-900) !important;
|
||||
}
|
||||
svg.disabled {
|
||||
color: var(--spectrum-global-color-gray-500) !important;
|
||||
pointer-events: none !important;
|
||||
|
|
|
@ -1,22 +1,41 @@
|
|||
<script>
|
||||
import Icon from "./Icon.svelte"
|
||||
|
||||
import Tooltip from "../Tooltip/Tooltip.svelte"
|
||||
import { fade } from "svelte/transition"
|
||||
|
||||
export let icon
|
||||
export let background
|
||||
export let color
|
||||
export let size = "M"
|
||||
export let tooltip
|
||||
|
||||
let showTooltip = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
class="icon size--{size}"
|
||||
style="background: {background || `transparent`};"
|
||||
class:filled={!!background}
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
on:focus={() => (showTooltip = true)}
|
||||
on:blur={() => (showTooltip = false)}
|
||||
on:click={() => (showTooltip = false)}
|
||||
>
|
||||
<Icon name={icon} color={background ? "white" : color} />
|
||||
{#if tooltip && showTooltip}
|
||||
<div class="tooltip" in:fade={{ duration: 130, delay: 250 }}>
|
||||
<Tooltip textWrapping direction="right" text={tooltip} />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.icon {
|
||||
position: relative;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
flex: 0 0 28px;
|
||||
|
@ -32,6 +51,15 @@
|
|||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
||||
.icon.size--XS {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
flex: 0 0 18px;
|
||||
}
|
||||
.icon.size--XS :global(.spectrum-Icon) {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
.icon.size--S {
|
||||
width: 22px;
|
||||
height: 22px;
|
||||
|
@ -58,4 +86,14 @@
|
|||
width: 22px;
|
||||
height: 22px;
|
||||
}
|
||||
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
pointer-events: none;
|
||||
left: calc(50% + 8px);
|
||||
bottom: calc(-50% + 6px);
|
||||
/* transform: translateY(-50%); */
|
||||
text-align: center;
|
||||
z-index: 1;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
export let inline = false
|
||||
export let disableCancel = false
|
||||
export let autoFocus = true
|
||||
export let zIndex = 999
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
let visible = fixed || inline
|
||||
|
@ -101,7 +102,11 @@
|
|||
<Portal target=".modal-container">
|
||||
{#if visible}
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class="spectrum-Underlay is-open" on:mousedown|self={cancel}>
|
||||
<div
|
||||
class="spectrum-Underlay is-open"
|
||||
on:mousedown|self={cancel}
|
||||
style="z-index:{zIndex || 999}"
|
||||
>
|
||||
<div
|
||||
class="background"
|
||||
in:fade={{ duration: 200 }}
|
||||
|
@ -132,7 +137,6 @@
|
|||
flex-direction: row;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 999;
|
||||
overflow: auto;
|
||||
overflow-x: hidden;
|
||||
background: transparent;
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
export let anchor
|
||||
export let align = "right"
|
||||
export let portalTarget
|
||||
export let minWidth
|
||||
export let maxWidth
|
||||
export let maxHeight
|
||||
export let open = false
|
||||
|
@ -21,7 +22,6 @@
|
|||
export let customHeight
|
||||
export let animate = true
|
||||
export let customZindex
|
||||
|
||||
export let handlePostionUpdate
|
||||
export let showPopover = true
|
||||
export let clickOutsideOverride = false
|
||||
|
@ -87,6 +87,7 @@
|
|||
align,
|
||||
maxHeight,
|
||||
maxWidth,
|
||||
minWidth,
|
||||
useAnchorWidth,
|
||||
offset,
|
||||
customUpdate: handlePostionUpdate,
|
||||
|
@ -102,6 +103,8 @@
|
|||
role="presentation"
|
||||
style="height: {customHeight}; --customZindex: {customZindex};"
|
||||
transition:fly|local={{ y: -20, duration: animate ? 200 : 0 }}
|
||||
on:mouseenter
|
||||
on:mouseleave
|
||||
>
|
||||
<slot />
|
||||
</div>
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
export let schema
|
||||
export let value
|
||||
export let customRenderers = []
|
||||
export let snippets
|
||||
|
||||
let renderer
|
||||
const typeMap = {
|
||||
|
@ -44,7 +45,7 @@
|
|||
if (!template) {
|
||||
return value
|
||||
}
|
||||
return processStringSync(template, { value })
|
||||
return processStringSync(template, { value, snippets })
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@
|
|||
export let customPlaceholder = false
|
||||
export let showHeaderBorder = true
|
||||
export let placeholderText = "No rows found"
|
||||
export let snippets = []
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
|
@ -425,6 +426,7 @@
|
|||
<CellRenderer
|
||||
{customRenderers}
|
||||
{row}
|
||||
{snippets}
|
||||
schema={schema[field]}
|
||||
value={deepGet(row, field)}
|
||||
on:clickrelationship
|
||||
|
@ -470,6 +472,7 @@
|
|||
--table-border: 1px solid var(--spectrum-alias-border-color-mid);
|
||||
--cell-padding: var(--spectrum-global-dimension-size-250);
|
||||
overflow: auto;
|
||||
display: contents;
|
||||
}
|
||||
.wrapper--quiet {
|
||||
--table-bg: var(--spectrum-alias-background-color-transparent);
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
export let text = ""
|
||||
export let fixed = false
|
||||
export let color = null
|
||||
export let noWrap = false
|
||||
|
||||
let wrapper
|
||||
let hovered = false
|
||||
|
@ -105,6 +106,7 @@
|
|||
<Portal target=".spectrum">
|
||||
<span
|
||||
class="spectrum-Tooltip spectrum-Tooltip--{type} spectrum-Tooltip--{position} is-open"
|
||||
class:noWrap
|
||||
style={`left:${left}px;top:${top}px;${tooltipStyle}`}
|
||||
transition:fade|local={{ duration: 130 }}
|
||||
>
|
||||
|
@ -118,6 +120,9 @@
|
|||
.abs-tooltip {
|
||||
display: contents;
|
||||
}
|
||||
.spectrum-Tooltip.noWrap .spectrum-Tooltip-label {
|
||||
width: max-content;
|
||||
}
|
||||
.spectrum-Tooltip {
|
||||
position: absolute;
|
||||
z-index: 9999;
|
||||
|
|
|
@ -19,7 +19,7 @@ export { default as ActionMenu } from "./ActionMenu/ActionMenu.svelte"
|
|||
export { default as Button } from "./Button/Button.svelte"
|
||||
export { default as ButtonGroup } from "./ButtonGroup/ButtonGroup.svelte"
|
||||
export { default as ClearButton } from "./ClearButton/ClearButton.svelte"
|
||||
export { default as Icon, directions } from "./Icon/Icon.svelte"
|
||||
export { default as Icon } from "./Icon/Icon.svelte"
|
||||
export { default as IconAvatar } from "./Icon/IconAvatar.svelte"
|
||||
export { default as Toggle } from "./Form/Toggle.svelte"
|
||||
export { default as RadioGroup } from "./Form/RadioGroup.svelte"
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Launch Chrome against localhost",
|
||||
"url": "http://localhost:3000",
|
||||
"webRoot": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"javascript.format.enable": false,
|
||||
"svelte.plugin.svelte.format.enable": false,
|
||||
"html.format.enable": false,
|
||||
"json.format.enable": false,
|
||||
"editor.trimAutoWhitespace": false,
|
||||
"sass.format.deleteWhitespace": false
|
||||
}
|
|
@ -66,10 +66,11 @@
|
|||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
"@zerodevx/svelte-json-view": "^1.0.7",
|
||||
"codemirror": "^5.59.0",
|
||||
"codemirror": "^5.65.16",
|
||||
"dayjs": "^1.10.8",
|
||||
"downloadjs": "1.4.7",
|
||||
"fast-json-patch": "^3.1.1",
|
||||
"json-format-highlight": "^1.0.4",
|
||||
"lodash": "4.17.21",
|
||||
"posthog-js": "^1.36.0",
|
||||
"remixicon": "2.5.0",
|
||||
|
|
|
@ -9,13 +9,17 @@ const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
|
|||
class AnalyticsHub {
|
||||
constructor() {
|
||||
this.clients = [posthog, intercom]
|
||||
this.initialised = false
|
||||
}
|
||||
|
||||
async activate() {
|
||||
// Check analytics are enabled
|
||||
const analyticsStatus = await API.getAnalyticsStatus()
|
||||
if (analyticsStatus.enabled) {
|
||||
this.clients.forEach(client => client.init())
|
||||
if (analyticsStatus.enabled && !this.initialised) {
|
||||
this.clients.forEach(client => {
|
||||
client.init()
|
||||
})
|
||||
this.initialised = true
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue