Merge branch 'master' of https://github.com/budibase/budibase into csv-delims
This commit is contained in:
commit
f41bb83cd1
|
@ -1,16 +0,0 @@
|
|||
node_modules
|
||||
public
|
||||
dist
|
||||
packages/server/builder
|
||||
packages/server/coverage
|
||||
packages/worker/coverage
|
||||
packages/backend-core/coverage
|
||||
packages/server/client
|
||||
packages/server/coverage
|
||||
packages/builder/.routify
|
||||
packages/sdk/sdk
|
||||
packages/account-portal/packages/server/build
|
||||
packages/account-portal/packages/ui/.routify
|
||||
packages/account-portal/packages/ui/build
|
||||
**/*.ivm.bundle.js
|
||||
packages/server/build/oldClientVersions/**/**
|
130
.eslintrc.json
130
.eslintrc.json
|
@ -1,130 +0,0 @@
|
|||
{
|
||||
"root": true,
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es6": true,
|
||||
"jest": true,
|
||||
"node": true
|
||||
},
|
||||
"parser": "@babel/eslint-parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2019,
|
||||
"sourceType": "module",
|
||||
"allowImportExportEverywhere": true
|
||||
},
|
||||
"ignorePatterns": [
|
||||
"node_modules",
|
||||
"dist",
|
||||
"public",
|
||||
"*.spec.js",
|
||||
"bundle.js"
|
||||
],
|
||||
"extends": ["eslint:recommended"],
|
||||
"plugins": ["import", "eslint-plugin-local-rules"],
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["**/*.svelte"],
|
||||
"extends": "plugin:svelte/recommended",
|
||||
"parser": "svelte-eslint-parser",
|
||||
"parserOptions": {
|
||||
"parser": "@babel/eslint-parser",
|
||||
"ecmaVersion": 2019,
|
||||
"sourceType": "module",
|
||||
"allowImportExportEverywhere": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.ts"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["eslint:recommended"],
|
||||
"globals": {
|
||||
"NodeJS": true
|
||||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"local-rules/no-barrel-imports": "error",
|
||||
"local-rules/no-budibase-imports": "error",
|
||||
"local-rules/no-console-error": "error",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
"varsIgnorePattern": "^_",
|
||||
"argsIgnorePattern": "^_",
|
||||
"destructuredArrayIgnorePattern": "^_",
|
||||
"ignoreRestSiblings": true
|
||||
}
|
||||
],
|
||||
"no-redeclare": "off",
|
||||
"@typescript-eslint/no-redeclare": "error",
|
||||
// have to turn this off to allow function overloading in typescript
|
||||
"no-dupe-class-members": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.spec.ts"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["eslint:recommended", "plugin:jest/recommended"],
|
||||
"env": {
|
||||
"jest/globals": true
|
||||
},
|
||||
"globals": {
|
||||
"NodeJS": true
|
||||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
"varsIgnorePattern": "^_",
|
||||
"argsIgnorePattern": "^_",
|
||||
"destructuredArrayIgnorePattern": "^_",
|
||||
"ignoreRestSiblings": true
|
||||
}
|
||||
],
|
||||
"local-rules/no-test-com": "error",
|
||||
"local-rules/email-domain-example-com": "error",
|
||||
"no-console": "warn",
|
||||
// We have a lot of tests that don't have assertions, they use our test
|
||||
// API client that does the assertions for them
|
||||
"jest/expect-expect": "off",
|
||||
// We do this in some tests where the behaviour of internal tables
|
||||
// differs to external, but the API is broadly the same
|
||||
"jest/no-conditional-expect": "off",
|
||||
// have to turn this off to allow function overloading in typescript
|
||||
"no-dupe-class-members": "off",
|
||||
"no-redeclare": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"packages/builder/**/*",
|
||||
"packages/client/**/*",
|
||||
"packages/frontend-core/**/*"
|
||||
],
|
||||
"rules": {
|
||||
"no-console": ["error", { "allow": ["warn", "error", "debug"] }]
|
||||
}
|
||||
}
|
||||
],
|
||||
"rules": {
|
||||
"no-self-assign": "off",
|
||||
"no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
"varsIgnorePattern": "^_",
|
||||
"argsIgnorePattern": "^_",
|
||||
"destructuredArrayIgnorePattern": "^_",
|
||||
"ignoreRestSiblings": true
|
||||
}
|
||||
],
|
||||
"import/no-relative-packages": "error",
|
||||
"import/export": "error",
|
||||
"import/no-duplicates": "error",
|
||||
"import/newline-after-import": "error"
|
||||
},
|
||||
"globals": {
|
||||
"GeolocationPositionError": true
|
||||
}
|
||||
}
|
|
@ -30,7 +30,7 @@ env:
|
|||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -47,7 +47,7 @@ jobs:
|
|||
- run: yarn lint
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -64,22 +64,19 @@ jobs:
|
|||
- run: yarn --frozen-lockfile
|
||||
|
||||
# Run build all the projects
|
||||
- name: Build OSS
|
||||
run: yarn build:oss
|
||||
- name: Build account portal
|
||||
run: yarn build:account-portal
|
||||
if: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
|
||||
- name: Build
|
||||
run: yarn build
|
||||
# Check the types of the projects built via esbuild
|
||||
- name: Check types
|
||||
run: |
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
yarn check:types --since=${{ env.NX_BASE_BRANCH }} --ignore @budibase/account-portal-server
|
||||
yarn check:types --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn check:types --ignore @budibase/account-portal-server
|
||||
yarn check:types
|
||||
fi
|
||||
|
||||
helm-lint:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -91,7 +88,7 @@ jobs:
|
|||
- run: cd charts/budibase && helm lint .
|
||||
|
||||
test-libraries:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -117,13 +114,15 @@ jobs:
|
|||
- name: Test
|
||||
run: |
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions
|
||||
yarn test -- --scope=@budibase/builder --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server
|
||||
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix -- --verbose --reporters=default --reporters=github-actions
|
||||
yarn test -- --scope=@budibase/builder --no-prefix
|
||||
fi
|
||||
|
||||
test-worker:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -141,16 +140,22 @@ jobs:
|
|||
- name: Test worker
|
||||
run: |
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
node scripts/run-affected.js --task=test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --scope=@budibase/worker
|
||||
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/worker)
|
||||
if [ -z "$AFFECTED" ]; then
|
||||
echo "No affected tests to run"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
cd packages/worker
|
||||
yarn test --verbose --reporters=default --reporters=github-actions
|
||||
|
||||
test-server:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
datasource: [mssql, mysql, postgres, mongodb, mariadb, oracle, none]
|
||||
datasource:
|
||||
[mssql, mysql, postgres, postgres_legacy, mongodb, mariadb, oracle, sqs, none]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -185,6 +190,8 @@ jobs:
|
|||
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }}
|
||||
elif [ "${{ matrix.datasource }}" == "oracle" ]; then
|
||||
docker pull budibase/oracle-database:23.2-slim-faststart
|
||||
elif [ "${{ matrix.datasource }}" == "postgres_legacy" ]; then
|
||||
docker pull postgres:9.5.25
|
||||
fi
|
||||
docker pull minio/minio &
|
||||
docker pull redis &
|
||||
|
@ -195,6 +202,20 @@ jobs:
|
|||
|
||||
- run: yarn --frozen-lockfile
|
||||
|
||||
- name: Set up PostgreSQL 16
|
||||
if: matrix.datasource == 'postgres'
|
||||
run: |
|
||||
sudo systemctl stop postgresql
|
||||
sudo apt-get remove --purge -y postgresql* libpq-dev
|
||||
sudo rm -rf /etc/postgresql /var/lib/postgresql
|
||||
sudo apt-get autoremove -y
|
||||
sudo apt-get autoclean
|
||||
|
||||
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y postgresql-16
|
||||
|
||||
- name: Test server
|
||||
env:
|
||||
DATASOURCE: ${{ matrix.datasource }}
|
||||
|
@ -213,10 +234,10 @@ jobs:
|
|||
fi
|
||||
|
||||
cd packages/server
|
||||
yarn test --filter $FILTER --passWithNoTests
|
||||
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
|
||||
|
||||
check-pro-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
steps:
|
||||
- name: Checkout repo and submodules
|
||||
|
@ -274,66 +295,9 @@ jobs:
|
|||
echo 'All good, the submodule had been merged and setup correctly!'
|
||||
fi
|
||||
|
||||
check-accountportal-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
steps:
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
src:
|
||||
- packages/account-portal/**
|
||||
|
||||
- if: steps.changes.outputs.src == 'true'
|
||||
name: Check account portal commit
|
||||
id: get_accountportal_commits
|
||||
run: |
|
||||
cd packages/account-portal
|
||||
accountportal_commit=$(git rev-parse HEAD)
|
||||
|
||||
branch="${{ github.base_ref || github.ref_name }}"
|
||||
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
|
||||
|
||||
base_commit=$(git rev-parse origin/master)
|
||||
|
||||
if [[ ! -z $base_commit ]]; then
|
||||
echo "target_branch=$branch"
|
||||
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
|
||||
echo "accountportal_commit=$accountportal_commit"
|
||||
echo "accountportal_commit=$accountportal_commit" >> "$GITHUB_OUTPUT"
|
||||
echo "base_commit=$base_commit"
|
||||
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "Nothing to do - branch to branch merge."
|
||||
fi
|
||||
|
||||
- name: Check submodule merged to base branch
|
||||
if: ${{ steps.get_accountportal_commits.outputs.base_commit != '' }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const submoduleCommit = '${{ steps.get_accountportal_commits.outputs.accountportal_commit }}';
|
||||
const baseCommit = '${{ steps.get_accountportal_commits.outputs.base_commit }}';
|
||||
|
||||
if (submoduleCommit !== baseCommit) {
|
||||
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_accountportal_commits.outputs.target_branch }}" branch.');
|
||||
console.error('Refer to the account portal repo to merge your changes: https://github.com/Budibase/account-portal/blob/master/docs/index.md')
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('All good, the submodule had been merged and setup correctly!')
|
||||
}
|
||||
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
name: ReadMe GitHub Action 🦉
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
rdme-openapi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Use Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- run: yarn --frozen-lockfile
|
||||
|
||||
- name: Install OpenAPI pkg
|
||||
run: yarn global add openapi
|
||||
|
||||
- name: update specs
|
||||
run: cd packages/server && yarn specs && openapi specs/openapi.yaml --key=${{ secrets.README_API_KEY }} --id=6728a74f5918b50036c61841
|
|
@ -8,41 +8,15 @@ jobs:
|
|||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
days-before-stale: 330
|
||||
operations-per-run: 1
|
||||
# stale rules for PRs
|
||||
days-before-pr-stale: 7
|
||||
stale-issue-label: stale
|
||||
exempt-pr-labels: pinned,security,roadmap
|
||||
days-before-pr-close: 7
|
||||
days-before-issue-close: 30
|
||||
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
operations-per-run: 3
|
||||
# stale rules for high priority bugs
|
||||
days-before-stale: 30
|
||||
only-issue-labels: bug,High priority
|
||||
stale-issue-label: warn
|
||||
days-before-close: 30
|
||||
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
operations-per-run: 3
|
||||
# stale rules for medium priority bugs
|
||||
days-before-stale: 90
|
||||
only-issue-labels: bug,Medium priority
|
||||
stale-issue-label: warn
|
||||
days-before-close: 30
|
||||
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
operations-per-run: 3
|
||||
# stale rules for all bugs
|
||||
days-before-stale: 180
|
||||
stale-issue-label: stale
|
||||
only-issue-labels: bug
|
||||
stale-issue-message: "This issue has been automatically marked as stale because it has not had any activity for six months."
|
||||
days-before-close: 30
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
# Issues
|
||||
days-before-stale: 180
|
||||
stale-issue-label: stale
|
||||
days-before-close: 30
|
||||
stale-issue-message: "This issue has been automatically marked as stale as there has been no activity for 6 months."
|
||||
# Pull requests
|
||||
days-before-pr-stale: 7
|
||||
days-before-pr-close: 14
|
||||
exempt-pr-labels: pinned,security,roadmap
|
||||
operations-per-run: 100
|
||||
|
|
|
@ -8,6 +8,7 @@ packages/server/build/oldClientVersions/**/*
|
|||
packages/builder/src/components/deploy/clientVersions.json
|
||||
packages/server/src/integrations/tests/utils/*.lock
|
||||
packages/builder/vite.config.mjs.timestamp*
|
||||
packages/account-portal
|
||||
|
||||
# Logs
|
||||
logs
|
||||
|
@ -110,4 +111,4 @@ budibase-component
|
|||
budibase-datasource
|
||||
|
||||
*.iml
|
||||
.nx
|
||||
.nx
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
[submodule "packages/pro"]
|
||||
path = packages/pro
|
||||
url = git@github.com:Budibase/budibase-pro.git
|
||||
[submodule "packages/account-portal"]
|
||||
path = packages/account-portal
|
||||
url = git@github.com:Budibase/account-portal.git
|
||||
|
|
|
@ -9,8 +9,4 @@ packages/backend-core/coverage
|
|||
packages/builder/.routify
|
||||
packages/sdk/sdk
|
||||
packages/pro/coverage
|
||||
packages/account-portal/packages/ui/build
|
||||
packages/account-portal/packages/ui/.routify
|
||||
packages/account-portal/packages/server/build
|
||||
packages/account-portal/packages/server/coverage
|
||||
**/*.ivm.bundle.js
|
|
@ -20,16 +20,6 @@
|
|||
"args": ["${workspaceFolder}/packages/worker/src/index.ts"],
|
||||
"cwd": "${workspaceFolder}/packages/worker"
|
||||
},
|
||||
{
|
||||
"name": "Camunda Worker",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
|
||||
"args": [
|
||||
"${workspaceFolder}/packages/account-portal/packages/server/src/v2/run.ts"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/packages/account-portal/packages/server"
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
|
|
|
@ -12,12 +12,12 @@ metadata:
|
|||
type: Opaque
|
||||
data:
|
||||
{{- if $existingSecret }}
|
||||
internalApiKey: {{ index $existingSecret.data "internalApiKey" }}
|
||||
jwtSecret: {{ index $existingSecret.data "jwtSecret" }}
|
||||
objectStoreAccess: {{ index $existingSecret.data "objectStoreAccess" }}
|
||||
objectStoreSecret: {{ index $existingSecret.data "objectStoreSecret" }}
|
||||
bbEncryptionKey: {{ index $existingSecret.data "bbEncryptionKey" }}
|
||||
apiEncryptionKey: {{ index $existingSecret.data "apiEncryptionKey" }}
|
||||
internalApiKey: {{ index $existingSecret.data "internalApiKey" | quote }}
|
||||
jwtSecret: {{ index $existingSecret.data "jwtSecret" | quote }}
|
||||
objectStoreAccess: {{ index $existingSecret.data "objectStoreAccess" | quote }}
|
||||
objectStoreSecret: {{ index $existingSecret.data "objectStoreSecret" | quote }}
|
||||
bbEncryptionKey: {{ index $existingSecret.data "bbEncryptionKey" | quote }}
|
||||
apiEncryptionKey: {{ index $existingSecret.data "apiEncryptionKey" | quote }}
|
||||
{{- else }}
|
||||
internalApiKey: {{ template "budibase.defaultsecret" .Values.globals.internalApiKey }}
|
||||
jwtSecret: {{ template "budibase.defaultsecret" .Values.globals.jwtSecret }}
|
||||
|
|
|
@ -41,11 +41,12 @@ module.exports = {
|
|||
if (
|
||||
/^@budibase\/[^/]+\/.*$/.test(importPath) &&
|
||||
importPath !== "@budibase/backend-core/tests" &&
|
||||
importPath !== "@budibase/string-templates/test/utils"
|
||||
importPath !== "@budibase/string-templates/test/utils" &&
|
||||
importPath !== "@budibase/client/manifest.json"
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests and @budibase/string-templates/test/utils.`,
|
||||
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests, @budibase/string-templates/test/utils and @budibase/client/manifest.json.`,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
|
|
@ -0,0 +1,197 @@
|
|||
import globals from "globals"
|
||||
import babelParser from "@babel/eslint-parser"
|
||||
import svelteParser from "svelte-eslint-parser"
|
||||
import tsParser from "@typescript-eslint/parser"
|
||||
|
||||
import eslintPluginJest from "eslint-plugin-jest"
|
||||
import eslintPluginSvelte from "eslint-plugin-svelte"
|
||||
import eslintPluginLocalRules from "eslint-plugin-local-rules"
|
||||
import eslintPluginVitest from "@vitest/eslint-plugin"
|
||||
|
||||
import eslint from "@eslint/js"
|
||||
import tseslint from "typescript-eslint"
|
||||
|
||||
export default [
|
||||
eslint.configs.recommended,
|
||||
{
|
||||
ignores: [
|
||||
"**/node_modules",
|
||||
"**/dist",
|
||||
"**/public",
|
||||
"**/bundle.js",
|
||||
"**/coverage",
|
||||
"packages/server/builder",
|
||||
"packages/server/client",
|
||||
"packages/builder/.routify",
|
||||
"packages/sdk/sdk",
|
||||
"**/*.ivm.bundle.js",
|
||||
"packages/server/build/oldClientVersions/**/**/*",
|
||||
],
|
||||
},
|
||||
{
|
||||
plugins: {
|
||||
"local-rules": eslintPluginLocalRules,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.jest,
|
||||
...globals.node,
|
||||
GeolocationPositionError: true,
|
||||
},
|
||||
|
||||
parser: babelParser,
|
||||
ecmaVersion: 2019,
|
||||
sourceType: "module",
|
||||
|
||||
parserOptions: {
|
||||
allowImportExportEverywhere: true,
|
||||
},
|
||||
},
|
||||
|
||||
rules: {
|
||||
"no-self-compare": "error",
|
||||
"no-template-curly-in-string": "error",
|
||||
"no-unmodified-loop-condition": "error",
|
||||
"no-unreachable-loop": "error",
|
||||
"no-implied-eval": "error",
|
||||
"no-extend-native": "error",
|
||||
"no-labels": "error",
|
||||
"no-lone-blocks": "error",
|
||||
"no-new-wrappers": "error",
|
||||
"no-octal-escape": "error",
|
||||
"no-return-assign": "error",
|
||||
"no-useless-concat": "error",
|
||||
"no-useless-constructor": "error",
|
||||
"no-useless-rename": "error",
|
||||
"no-var": "error",
|
||||
"no-void": "error",
|
||||
|
||||
"no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
varsIgnorePattern: "^_",
|
||||
argsIgnorePattern: "^_",
|
||||
destructuredArrayIgnorePattern: "^_",
|
||||
ignoreRestSiblings: true,
|
||||
caughtErrors: "none",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
...eslintPluginSvelte.configs["flat/recommended"].map(config => ({
|
||||
...config,
|
||||
files: ["**/*.svelte"],
|
||||
|
||||
languageOptions: {
|
||||
parser: svelteParser,
|
||||
ecmaVersion: 2019,
|
||||
sourceType: "script",
|
||||
|
||||
parserOptions: {
|
||||
parser: "@typescript-eslint/parser",
|
||||
allowImportExportEverywhere: true,
|
||||
},
|
||||
},
|
||||
})),
|
||||
...tseslint.configs.strict.map(config => ({
|
||||
...config,
|
||||
files: ["**/*.ts"],
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
NodeJS: true,
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
},
|
||||
|
||||
rules: {
|
||||
"local-rules/no-barrel-imports": "error",
|
||||
"local-rules/no-budibase-imports": "error",
|
||||
"local-rules/no-console-error": "error",
|
||||
|
||||
"@typescript-eslint/no-inferrable-types": "error",
|
||||
"@typescript-eslint/adjacent-overload-signatures": "error",
|
||||
"@typescript-eslint/class-literal-property-style": "error",
|
||||
"@typescript-eslint/no-confusing-non-null-assertion": "error",
|
||||
"@typescript-eslint/no-unnecessary-parameter-property-assignment":
|
||||
"error",
|
||||
"@typescript-eslint/no-useless-empty-export": "error",
|
||||
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
varsIgnorePattern: "^_",
|
||||
argsIgnorePattern: "^_",
|
||||
destructuredArrayIgnorePattern: "^_",
|
||||
ignoreRestSiblings: true,
|
||||
caughtErrors: "none",
|
||||
},
|
||||
],
|
||||
|
||||
"no-redeclare": "off",
|
||||
"@typescript-eslint/no-redeclare": "error",
|
||||
|
||||
// @typescript-eslint/no-dupe-class-members supersedes no-dupe-class-members
|
||||
"no-dupe-class-members": "off",
|
||||
"@typescript-eslint/no-dupe-class-members": "error",
|
||||
|
||||
"no-useless-constructor": "off",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
},
|
||||
})),
|
||||
{
|
||||
files: ["**/*.spec.ts", "**/*.spec.js"],
|
||||
|
||||
plugins: {
|
||||
jest: eslintPluginJest,
|
||||
vitest: eslintPluginVitest,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...eslintPluginJest.environments.globals.globals,
|
||||
...eslintPluginVitest.environments.env.globals,
|
||||
NodeJS: true,
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
},
|
||||
|
||||
rules: {
|
||||
...eslintPluginVitest.configs.recommended.rules,
|
||||
...eslintPluginJest.configs.recommended.rules,
|
||||
|
||||
"no-console": "warn",
|
||||
|
||||
"vitest/expect-expect": "off",
|
||||
|
||||
"jest/expect-expect": "off",
|
||||
"jest/no-conditional-expect": "off",
|
||||
"jest/no-disabled-tests": "off",
|
||||
"jest/no-standalone-expect": "off",
|
||||
|
||||
"local-rules/no-test-com": "error",
|
||||
"local-rules/email-domain-example-com": "error",
|
||||
},
|
||||
},
|
||||
{
|
||||
files: [
|
||||
"packages/builder/**/*",
|
||||
"packages/client/**/*",
|
||||
"packages/frontend-core/**/*",
|
||||
],
|
||||
|
||||
rules: {
|
||||
"no-console": [
|
||||
"error",
|
||||
{
|
||||
allow: ["warn", "error", "debug"],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
|
@ -10,7 +10,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"bulma": "^0.9.3",
|
||||
"next": "14.2.10",
|
||||
"next": "14.2.21",
|
||||
"node-fetch": "^3.2.10",
|
||||
"sass": "^1.52.3",
|
||||
"react": "17.0.2",
|
||||
|
@ -18,10 +18,10 @@
|
|||
"react-notifications-component": "^3.4.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "17.0.21",
|
||||
"@types/node": "^20.17.9",
|
||||
"@types/react": "17.0.39",
|
||||
"eslint": "8.10.0",
|
||||
"eslint-config-next": "12.1.0",
|
||||
"typescript": "5.5.2"
|
||||
"typescript": "5.7.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,10 +46,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
|
||||
integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
|
||||
|
||||
"@next/env@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.2.10.tgz#1d3178340028ced2d679f84140877db4f420333c"
|
||||
integrity sha512-dZIu93Bf5LUtluBXIv4woQw2cZVZ2DJTjax5/5DOs3lzEOeKLy7GxRSr4caK9/SCPdaW6bCgpye6+n4Dh9oJPw==
|
||||
"@next/env@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.2.21.tgz#09ff0813d29c596397e141205d4f5fd5c236bdd0"
|
||||
integrity sha512-lXcwcJd5oR01tggjWJ6SrNNYFGuOOMB9c251wUNkjCpkoXOPkDeF/15c3mnVlBqrW4JJXb2kVxDFhC4GduJt2A==
|
||||
|
||||
"@next/eslint-plugin-next@12.1.0":
|
||||
version "12.1.0"
|
||||
|
@ -58,50 +58,50 @@
|
|||
dependencies:
|
||||
glob "7.1.7"
|
||||
|
||||
"@next/swc-darwin-arm64@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.10.tgz#49d10ca4086fbd59ee68e204f75d7136eda2aa80"
|
||||
integrity sha512-V3z10NV+cvMAfxQUMhKgfQnPbjw+Ew3cnr64b0lr8MDiBJs3eLnM6RpGC46nhfMZsiXgQngCJKWGTC/yDcgrDQ==
|
||||
"@next/swc-darwin-arm64@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.21.tgz#32a31992aace1440981df9cf7cb3af7845d94fec"
|
||||
integrity sha512-HwEjcKsXtvszXz5q5Z7wCtrHeTTDSTgAbocz45PHMUjU3fBYInfvhR+ZhavDRUYLonm53aHZbB09QtJVJj8T7g==
|
||||
|
||||
"@next/swc-darwin-x64@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.10.tgz#0ebeae3afb8eac433882b79543295ab83624a1a8"
|
||||
integrity sha512-Y0TC+FXbFUQ2MQgimJ/7Ina2mXIKhE7F+GUe1SgnzRmwFY3hX2z8nyVCxE82I2RicspdkZnSWMn4oTjIKz4uzA==
|
||||
"@next/swc-darwin-x64@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.21.tgz#5ab4b3f6685b6b52f810d0f5cf6e471480ddffdb"
|
||||
integrity sha512-TSAA2ROgNzm4FhKbTbyJOBrsREOMVdDIltZ6aZiKvCi/v0UwFmwigBGeqXDA97TFMpR3LNNpw52CbVelkoQBxA==
|
||||
|
||||
"@next/swc-linux-arm64-gnu@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.10.tgz#7e602916d2fb55a3c532f74bed926a0137c16f20"
|
||||
integrity sha512-ZfQ7yOy5zyskSj9rFpa0Yd7gkrBnJTkYVSya95hX3zeBG9E55Z6OTNPn1j2BTFWvOVVj65C3T+qsjOyVI9DQpA==
|
||||
"@next/swc-linux-arm64-gnu@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.21.tgz#8a0e1fa887aef19ca218af2af515d0a5ee67ba3f"
|
||||
integrity sha512-0Dqjn0pEUz3JG+AImpnMMW/m8hRtl1GQCNbO66V1yp6RswSTiKmnHf3pTX6xMdJYSemf3O4Q9ykiL0jymu0TuA==
|
||||
|
||||
"@next/swc-linux-arm64-musl@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.10.tgz#6b143f628ccee490b527562e934f8de578d4be47"
|
||||
integrity sha512-n2i5o3y2jpBfXFRxDREr342BGIQCJbdAUi/K4q6Env3aSx8erM9VuKXHw5KNROK9ejFSPf0LhoSkU/ZiNdacpQ==
|
||||
"@next/swc-linux-arm64-musl@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.21.tgz#ddad844406b42fa8965fe11250abc85c1fe0fd05"
|
||||
integrity sha512-Ggfw5qnMXldscVntwnjfaQs5GbBbjioV4B4loP+bjqNEb42fzZlAaK+ldL0jm2CTJga9LynBMhekNfV8W4+HBw==
|
||||
|
||||
"@next/swc-linux-x64-gnu@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.10.tgz#086f2f16a0678890a1eb46518c4dda381b046082"
|
||||
integrity sha512-GXvajAWh2woTT0GKEDlkVhFNxhJS/XdDmrVHrPOA83pLzlGPQnixqxD8u3bBB9oATBKB//5e4vpACnx5Vaxdqg==
|
||||
"@next/swc-linux-x64-gnu@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.21.tgz#db55fd666f9ba27718f65caa54b622a912cdd16b"
|
||||
integrity sha512-uokj0lubN1WoSa5KKdThVPRffGyiWlm/vCc/cMkWOQHw69Qt0X1o3b2PyLLx8ANqlefILZh1EdfLRz9gVpG6tg==
|
||||
|
||||
"@next/swc-linux-x64-musl@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.10.tgz#1befef10ed8dbcc5047b5d637a25ae3c30a0bfc3"
|
||||
integrity sha512-opFFN5B0SnO+HTz4Wq4HaylXGFV+iHrVxd3YvREUX9K+xfc4ePbRrxqOuPOFjtSuiVouwe6uLeDtabjEIbkmDA==
|
||||
"@next/swc-linux-x64-musl@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.21.tgz#dddb850353624efcd58c4c4e30ad8a1aab379642"
|
||||
integrity sha512-iAEBPzWNbciah4+0yI4s7Pce6BIoxTQ0AGCkxn/UBuzJFkYyJt71MadYQkjPqCQCJAFQ26sYh7MOKdU+VQFgPg==
|
||||
|
||||
"@next/swc-win32-arm64-msvc@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.10.tgz#731f52c3ae3c56a26cf21d474b11ae1529531209"
|
||||
integrity sha512-9NUzZuR8WiXTvv+EiU/MXdcQ1XUvFixbLIMNQiVHuzs7ZIFrJDLJDaOF1KaqttoTujpcxljM/RNAOmw1GhPPQQ==
|
||||
"@next/swc-win32-arm64-msvc@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.21.tgz#290012ee57b196d3d2d04853e6bf0179cae9fbaf"
|
||||
integrity sha512-plykgB3vL2hB4Z32W3ktsfqyuyGAPxqwiyrAi2Mr8LlEUhNn9VgkiAl5hODSBpzIfWweX3er1f5uNpGDygfQVQ==
|
||||
|
||||
"@next/swc-win32-ia32-msvc@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.10.tgz#32723ef7f04e25be12af357cc72ddfdd42fd1041"
|
||||
integrity sha512-fr3aEbSd1GeW3YUMBkWAu4hcdjZ6g4NBl1uku4gAn661tcxd1bHs1THWYzdsbTRLcCKLjrDZlNp6j2HTfrw+Bg==
|
||||
"@next/swc-win32-ia32-msvc@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.21.tgz#c959135a78cab18cca588d11d1e33bcf199590d4"
|
||||
integrity sha512-w5bacz4Vxqrh06BjWgua3Yf7EMDb8iMcVhNrNx8KnJXt8t+Uu0Zg4JHLDL/T7DkTCEEfKXO/Er1fcfWxn2xfPA==
|
||||
|
||||
"@next/swc-win32-x64-msvc@14.2.10":
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.10.tgz#ee1d036cb5ec871816f96baee7991035bb242455"
|
||||
integrity sha512-UjeVoRGKNL2zfbcQ6fscmgjBAS/inHBh63mjIlfPg/NG8Yn2ztqylXt5qilYb6hoHIwaU2ogHknHWWmahJjgZQ==
|
||||
"@next/swc-win32-x64-msvc@14.2.21":
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.21.tgz#21ff892286555b90538a7d1b505ea21a005d6ead"
|
||||
integrity sha512-sT6+llIkzpsexGYZq8cjjthRyRGe5cJVhqh12FmlbxHqna6zsDDK8UNaV7g41T6atFHCJUPeLb3uyAwrBwy0NA==
|
||||
|
||||
"@nodelib/fs.scandir@2.1.5":
|
||||
version "2.1.5"
|
||||
|
@ -147,10 +147,12 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
|
||||
integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4=
|
||||
|
||||
"@types/node@17.0.21":
|
||||
version "17.0.21"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.21.tgz#864b987c0c68d07b4345845c3e63b75edd143644"
|
||||
integrity sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==
|
||||
"@types/node@^20.17.9":
|
||||
version "20.17.9"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.17.9.tgz#5f141d4b7ee125cdee5faefe28de095398865bab"
|
||||
integrity sha512-0JOXkRyLanfGPE2QRCwgxhzlBAvaRdCNMcvbd7jFfpmD4eEXll7LRwy5ymJmyeZqk7Nh7eD2LeUyQ68BbndmXw==
|
||||
dependencies:
|
||||
undici-types "~6.19.2"
|
||||
|
||||
"@types/prop-types@*":
|
||||
version "15.7.4"
|
||||
|
@ -1242,21 +1244,21 @@ ms@^2.1.1:
|
|||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||
|
||||
nanoid@^3.3.6:
|
||||
version "3.3.7"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
|
||||
integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
|
||||
version "3.3.8"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf"
|
||||
integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==
|
||||
|
||||
natural-compare@^1.4.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
|
||||
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
|
||||
|
||||
next@14.2.10:
|
||||
version "14.2.10"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-14.2.10.tgz#331981a4fecb1ae8af1817d4db98fc9687ee1cb6"
|
||||
integrity sha512-sDDExXnh33cY3RkS9JuFEKaS4HmlWmDKP1VJioucCG6z5KuA008DPsDZOzi8UfqEk3Ii+2NCQSJrfbEWtZZfww==
|
||||
next@14.2.21:
|
||||
version "14.2.21"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-14.2.21.tgz#f6da9e2abba1a0e4ca7a5273825daf06632554ba"
|
||||
integrity sha512-rZmLwucLHr3/zfDMYbJXbw0ZeoBpirxkXuvsJbk7UPorvPYZhP7vq7aHbKnU7dQNCYIimRrbB2pp3xmf+wsYUg==
|
||||
dependencies:
|
||||
"@next/env" "14.2.10"
|
||||
"@next/env" "14.2.21"
|
||||
"@swc/helpers" "0.5.5"
|
||||
busboy "1.6.0"
|
||||
caniuse-lite "^1.0.30001579"
|
||||
|
@ -1264,15 +1266,15 @@ next@14.2.10:
|
|||
postcss "8.4.31"
|
||||
styled-jsx "5.1.1"
|
||||
optionalDependencies:
|
||||
"@next/swc-darwin-arm64" "14.2.10"
|
||||
"@next/swc-darwin-x64" "14.2.10"
|
||||
"@next/swc-linux-arm64-gnu" "14.2.10"
|
||||
"@next/swc-linux-arm64-musl" "14.2.10"
|
||||
"@next/swc-linux-x64-gnu" "14.2.10"
|
||||
"@next/swc-linux-x64-musl" "14.2.10"
|
||||
"@next/swc-win32-arm64-msvc" "14.2.10"
|
||||
"@next/swc-win32-ia32-msvc" "14.2.10"
|
||||
"@next/swc-win32-x64-msvc" "14.2.10"
|
||||
"@next/swc-darwin-arm64" "14.2.21"
|
||||
"@next/swc-darwin-x64" "14.2.21"
|
||||
"@next/swc-linux-arm64-gnu" "14.2.21"
|
||||
"@next/swc-linux-arm64-musl" "14.2.21"
|
||||
"@next/swc-linux-x64-gnu" "14.2.21"
|
||||
"@next/swc-linux-x64-musl" "14.2.21"
|
||||
"@next/swc-win32-arm64-msvc" "14.2.21"
|
||||
"@next/swc-win32-ia32-msvc" "14.2.21"
|
||||
"@next/swc-win32-x64-msvc" "14.2.21"
|
||||
|
||||
node-domexception@^1.0.0:
|
||||
version "1.0.0"
|
||||
|
@ -1746,10 +1748,10 @@ type-fest@^0.20.2:
|
|||
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
|
||||
integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
|
||||
|
||||
typescript@5.5.2:
|
||||
version "5.5.2"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.2.tgz#c26f023cb0054e657ce04f72583ea2d85f8d0507"
|
||||
integrity sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==
|
||||
typescript@5.7.2:
|
||||
version "5.7.2"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.7.2.tgz#3169cf8c4c8a828cde53ba9ecb3d2b1d5dd67be6"
|
||||
integrity sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==
|
||||
|
||||
unbox-primitive@^1.0.1:
|
||||
version "1.0.1"
|
||||
|
@ -1761,6 +1763,11 @@ unbox-primitive@^1.0.1:
|
|||
has-symbols "^1.0.2"
|
||||
which-boxed-primitive "^1.0.2"
|
||||
|
||||
undici-types@~6.19.2:
|
||||
version "6.19.8"
|
||||
resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.19.8.tgz#35111c9d1437ab83a7cdc0abae2f26d88eda0a02"
|
||||
integrity sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==
|
||||
|
||||
uri-js@^4.2.2:
|
||||
version "4.4.1"
|
||||
resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e"
|
||||
|
|
|
@ -4,13 +4,33 @@ import {
|
|||
getContainerRuntimeClient,
|
||||
} from "testcontainers"
|
||||
import { ContainerInfo } from "dockerode"
|
||||
import path from "path"
|
||||
import lockfile from "proper-lockfile"
|
||||
import * as path from "path"
|
||||
import * as lockfile from "proper-lockfile"
|
||||
import { execSync } from "child_process"
|
||||
|
||||
interface DockerContext {
|
||||
Name: string
|
||||
Description: string
|
||||
DockerEndpoint: string
|
||||
ContextType: string
|
||||
Error: string
|
||||
}
|
||||
|
||||
function getCurrentDockerContext(): DockerContext {
|
||||
const out = execSync("docker context ls --format json")
|
||||
for (const line of out.toString().split("\n")) {
|
||||
const parsed = JSON.parse(line)
|
||||
if (parsed.Current) {
|
||||
return parsed as DockerContext
|
||||
}
|
||||
}
|
||||
throw new Error("No current Docker context")
|
||||
}
|
||||
|
||||
async function getBudibaseContainers() {
|
||||
const client = await getContainerRuntimeClient()
|
||||
const conatiners = await client.container.list()
|
||||
return conatiners.filter(
|
||||
const containers = await client.container.list()
|
||||
return containers.filter(
|
||||
container =>
|
||||
container.Labels["com.budibase"] === "true" &&
|
||||
container.Labels["org.testcontainers"] === "true"
|
||||
|
@ -27,6 +47,16 @@ async function killContainers(containers: ContainerInfo[]) {
|
|||
}
|
||||
|
||||
export default async function setup() {
|
||||
process.env.TESTCONTAINERS_RYUK_DISABLED = "true"
|
||||
|
||||
// For whatever reason, testcontainers doesn't always use the correct current
|
||||
// docker context. This bit of code forces the issue by finding the current
|
||||
// context and setting it as the DOCKER_HOST environment
|
||||
if (!process.env.DOCKER_HOST) {
|
||||
const dockerContext = getCurrentDockerContext()
|
||||
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
|
||||
}
|
||||
|
||||
const lockPath = path.resolve(__dirname, "globalSetup.ts")
|
||||
// If you run multiple tests at the same time, it's possible for the CouchDB
|
||||
// shared container to get started multiple times despite having an
|
||||
|
@ -47,6 +77,7 @@ export default async function setup() {
|
|||
|
||||
try {
|
||||
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3-sqs-v2.1.1")
|
||||
.withName("couchdb_testcontainer")
|
||||
.withExposedPorts(5984, 4984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
|
@ -71,6 +102,7 @@ export default async function setup() {
|
|||
)
|
||||
|
||||
const minio = new GenericContainer("minio/minio")
|
||||
.withName("minio_testcontainer")
|
||||
.withExposedPorts(9000)
|
||||
.withCommand(["server", "/data"])
|
||||
.withTmpFs({ "/data": "rw" })
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
const os = require("os")
|
||||
const exec = require("child_process").exec
|
||||
const fs = require("fs")
|
||||
const platform = os.platform()
|
||||
|
||||
const windows = platform === "win32"
|
||||
|
@ -17,10 +16,11 @@ function execute(command) {
|
|||
|
||||
async function commandExistsUnix(command) {
|
||||
const unixCmd = `command -v ${command} 2>/dev/null && { echo >&1 ${command}; exit 0; }`
|
||||
return execute(command)
|
||||
return execute(unixCmd)
|
||||
}
|
||||
|
||||
async function commandExistsWindows(command) {
|
||||
// eslint-disable-next-line no-control-regex
|
||||
if (/[\x00-\x1f<>:"|?*]/.test(command)) {
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -46,6 +46,11 @@ server {
|
|||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
# Enable buffering for potentially large OIDC configs
|
||||
proxy_buffering on;
|
||||
proxy_buffer_size 16k;
|
||||
proxy_buffers 4 32k;
|
||||
|
||||
proxy_pass http://127.0.0.1:4002;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,7 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.2.7",
|
||||
"version": "3.3.1",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
"!packages/account-portal",
|
||||
"packages/account-portal/packages/*"
|
||||
],
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
"publish": {
|
||||
|
|
4
nx.json
4
nx.json
|
@ -2,7 +2,6 @@
|
|||
"$schema": "./node_modules/nx/schemas/nx-schema.json",
|
||||
"tasksRunnerOptions": {
|
||||
"default": {
|
||||
"runner": "nx-cloud",
|
||||
"options": {
|
||||
"cacheableOperations": ["build", "test", "check:types"]
|
||||
}
|
||||
|
@ -13,7 +12,8 @@
|
|||
"inputs": [
|
||||
"{workspaceRoot}/scripts/*",
|
||||
"{workspaceRoot}/lerna.json",
|
||||
"{workspaceRoot}/.github/workflows/*"
|
||||
"{workspaceRoot}/.github/workflows/*",
|
||||
"{workspaceRoot}/tsconfig.build.json"
|
||||
]
|
||||
},
|
||||
"test": {
|
||||
|
|
62
package.json
62
package.json
|
@ -3,31 +3,34 @@
|
|||
"private": true,
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.22.5",
|
||||
"@babel/eslint-parser": "^7.22.5",
|
||||
"@babel/eslint-parser": "7.25.9",
|
||||
"@babel/preset-env": "^7.22.5",
|
||||
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
|
||||
"@types/node": "20.10.0",
|
||||
"@types/node": "^20.17.9",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
"@typescript-eslint/parser": "6.9.0",
|
||||
"@typescript-eslint/parser": "8.17.0",
|
||||
"@vitest/eslint-plugin": "^1.1.14",
|
||||
"cross-spawn": "7.0.6",
|
||||
"depcheck": "^1.4.7",
|
||||
"esbuild": "^0.18.17",
|
||||
"esbuild-node-externals": "^1.14.0",
|
||||
"eslint": "^8.52.0",
|
||||
"eslint-plugin-import": "^2.29.0",
|
||||
"eslint-plugin-jest": "^27.9.0",
|
||||
"eslint-plugin-local-rules": "^2.0.0",
|
||||
"eslint-plugin-svelte": "^2.34.0",
|
||||
"eslint": "9.16.0",
|
||||
"eslint-plugin-jest": "28.9.0",
|
||||
"eslint-plugin-local-rules": "3.0.2",
|
||||
"eslint-plugin-svelte": "2.46.1",
|
||||
"husky": "^8.0.3",
|
||||
"kill-port": "^1.6.1",
|
||||
"lerna": "7.4.2",
|
||||
"load-tsconfig": "^0.2.5",
|
||||
"madge": "^6.0.0",
|
||||
"nx-cloud": "16.0.5",
|
||||
"prettier": "2.8.8",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"proper-lockfile": "^4.1.2",
|
||||
"svelte": "^4.2.10",
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.5.2",
|
||||
"typescript-eslint": "^7.3.1",
|
||||
"svelte": "4.2.19",
|
||||
"svelte-eslint-parser": "0.43.0",
|
||||
"typescript": "5.7.2",
|
||||
"typescript-eslint": "8.17.0",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"scripts": {
|
||||
|
@ -35,11 +38,10 @@
|
|||
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
|
||||
"build": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
|
||||
"build:apps": "DISABLE_V8_COMPILE_CACHE=1 yarn build --scope @budibase/server --scope @budibase/worker",
|
||||
"build:oss": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
|
||||
"build:cli": "yarn build --scope @budibase/cli",
|
||||
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
|
||||
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
|
||||
"check:types": "lerna run --concurrency 2 check:types --ignore @budibase/account-portal-server",
|
||||
"check:types": "yarn check:dependencies && lerna run --concurrency 2 check:types",
|
||||
"check:dependencies": "lerna run --concurrency 2 check:dependencies",
|
||||
"build:sdk": "lerna run --stream build:sdk",
|
||||
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
|
||||
"release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset",
|
||||
|
@ -52,15 +54,12 @@
|
|||
"kill-server": "kill-port 4001 4002",
|
||||
"kill-accountportal": "kill-port 3001 4003",
|
||||
"kill-all": "yarn run kill-builder && yarn run kill-server && yarn kill-accountportal",
|
||||
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
|
||||
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev",
|
||||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
|
||||
"dev:camunda": "./scripts/deploy-camunda.sh",
|
||||
"dev:all": "yarn run kill-all && lerna run --stream dev",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "./scripts/devDocker.sh",
|
||||
"test": "lerna run --concurrency 1 --stream test --stream",
|
||||
"test": "lerna run --concurrency 1 --stream test",
|
||||
"test:containers:kill": "./scripts/killTestcontainers.sh",
|
||||
"lint:eslint": "eslint packages --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
|
@ -78,7 +77,6 @@
|
|||
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
||||
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 -t budibase/couchdb:v3.3.3-sqs-v2.1.1 --push ./hosting/couchdb",
|
||||
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
||||
"release:helm": "node scripts/releaseHelmChart",
|
||||
"env:multi:enable": "lerna run --stream env:multi:enable",
|
||||
"env:multi:disable": "lerna run --stream env:multi:disable",
|
||||
"env:selfhost:enable": "lerna run --stream env:selfhost:enable",
|
||||
|
@ -98,32 +96,30 @@
|
|||
},
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
"packages/*",
|
||||
"!packages/account-portal",
|
||||
"packages/account-portal/packages/*"
|
||||
"packages/*"
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
"@budibase/backend-core": "0.0.0",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/string-templates": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
"@budibase/backend-core": "*",
|
||||
"@budibase/shared-core": "*",
|
||||
"@budibase/string-templates": "*",
|
||||
"@budibase/types": "*",
|
||||
"@budibase/pro": "npm:@budibase/pro@latest",
|
||||
"tough-cookie": "4.1.3",
|
||||
"node-fetch": "2.6.7",
|
||||
"semver": "7.5.3",
|
||||
"http-cache-semantics": "4.1.1",
|
||||
"msgpackr": "1.10.1",
|
||||
"axios": "1.6.3",
|
||||
"axios": "1.7.7",
|
||||
"xml2js": "0.6.2",
|
||||
"unset-value": "2.0.1",
|
||||
"passport": "0.6.0",
|
||||
"fast-xml-parser": "4.4.1",
|
||||
"@azure/identity": "4.2.1",
|
||||
"kind-of": "6.0.3"
|
||||
"kind-of": "6.0.3",
|
||||
"globals": "15.13.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0 <21.0.0"
|
||||
},
|
||||
"dependencies": {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Subproject commit 9bef5d1656b4f3c991447ded6d65b0eba393a140
|
|
@ -21,28 +21,34 @@
|
|||
"scripts": {
|
||||
"prebuild": "rimraf dist/",
|
||||
"prepack": "cp package.json dist",
|
||||
"build": "tsc -p tsconfig.build.json --paths null && node ./scripts/build.js",
|
||||
"build": "node ./scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null && tsc -p tsconfig.test.json --paths null",
|
||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020",
|
||||
"build:oss": "node ./scripts/build.js",
|
||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
||||
"check:dependencies": "node ../../scripts/depcheck.js",
|
||||
"test": "bash scripts/test.sh",
|
||||
"test:watch": "jest --watchAll"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/nano": "10.1.5",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.11",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
"@budibase/shared-core": "*",
|
||||
"@budibase/types": "*",
|
||||
"@techpass/passport-openidconnect": "0.3.3",
|
||||
"aws-cloudfront-sign": "3.0.2",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"aws-sdk": "2.1692.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"correlation-id": "4.0.0",
|
||||
"dd-trace": "5.2.0",
|
||||
"dd-trace": "5.26.0",
|
||||
"dotenv": "16.0.1",
|
||||
"google-auth-library": "^8.0.1",
|
||||
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
|
||||
"ioredis": "5.3.2",
|
||||
"joi": "17.6.0",
|
||||
"jsonwebtoken": "9.0.2",
|
||||
"knex": "2.4.2",
|
||||
"koa-passport": "^6.0.0",
|
||||
"koa-pino-logger": "4.0.0",
|
||||
"lodash": "4.17.21",
|
||||
|
@ -53,17 +59,17 @@
|
|||
"pino": "8.11.0",
|
||||
"pino-http": "8.3.3",
|
||||
"posthog-node": "4.0.1",
|
||||
"pouchdb": "7.3.0",
|
||||
"pouchdb-find": "7.2.2",
|
||||
"pouchdb": "9.0.0",
|
||||
"pouchdb-find": "9.0.0",
|
||||
"redlock": "4.2.0",
|
||||
"rotating-file-stream": "3.1.0",
|
||||
"sanitize-s3-objectkey": "0.0.1",
|
||||
"semver": "^7.5.4",
|
||||
"tar-fs": "2.1.1",
|
||||
"uuid": "^8.3.2",
|
||||
"knex": "2.4.2"
|
||||
"uuid": "^8.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jest/types": "^29.6.3",
|
||||
"@shopify/jest-koa-mocks": "5.1.1",
|
||||
"@swc/core": "1.3.71",
|
||||
"@swc/jest": "0.2.27",
|
||||
|
@ -72,20 +78,22 @@
|
|||
"@types/jest": "29.5.5",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/pouchdb": "6.4.2",
|
||||
"@types/redlock": "4.0.7",
|
||||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
"@types/koa": "2.13.4",
|
||||
"chance": "1.1.8",
|
||||
"ioredis-mock": "8.9.0",
|
||||
"jest": "29.7.0",
|
||||
"jest-serial-runner": "1.2.1",
|
||||
"nock": "^13.5.6",
|
||||
"pino-pretty": "10.0.0",
|
||||
"pouchdb-adapter-memory": "7.2.2",
|
||||
"testcontainers": "^10.7.2",
|
||||
"testcontainers": "10.16.0",
|
||||
"timekeeper": "2.2.0",
|
||||
"typescript": "5.5.2"
|
||||
"typescript": "5.7.2"
|
||||
},
|
||||
"nx": {
|
||||
"targets": {
|
||||
|
|
|
@ -3,18 +3,10 @@ import { Duration } from "../utils"
|
|||
import env from "../environment"
|
||||
import { getTenantId } from "../context"
|
||||
import * as redis from "../redis/init"
|
||||
import { Invite, InviteWithCode } from "@budibase/types"
|
||||
|
||||
const TTL_SECONDS = Duration.fromDays(7).toSeconds()
|
||||
|
||||
interface Invite {
|
||||
email: string
|
||||
info: any
|
||||
}
|
||||
|
||||
interface InviteWithCode extends Invite {
|
||||
code: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an invite code and invite body, allow the update an existing/valid invite in redis
|
||||
* @param code The invite code for an invite in redis
|
||||
|
|
|
@ -385,17 +385,17 @@ export function getCurrentContext(): ContextMap | undefined {
|
|||
}
|
||||
}
|
||||
|
||||
export function getFeatureFlags<T extends Record<string, any>>(
|
||||
export function getFeatureFlags(
|
||||
key: string
|
||||
): T | undefined {
|
||||
): Record<string, boolean> | undefined {
|
||||
const context = getCurrentContext()
|
||||
if (!context) {
|
||||
return undefined
|
||||
}
|
||||
return context.featureFlagCache?.[key] as T
|
||||
return context.featureFlagCache?.[key]
|
||||
}
|
||||
|
||||
export function setFeatureFlags(key: string, value: Record<string, any>) {
|
||||
export function setFeatureFlags(key: string, value: Record<string, boolean>) {
|
||||
const context = getCurrentContext()
|
||||
if (!context) {
|
||||
return
|
||||
|
|
|
@ -20,7 +20,7 @@ export type ContextMap = {
|
|||
clients: Record<string, GoogleSpreadsheet>
|
||||
}
|
||||
featureFlagCache?: {
|
||||
[key: string]: Record<string, any>
|
||||
[key: string]: Record<string, boolean>
|
||||
}
|
||||
viewToTableCache?: Record<string, Table>
|
||||
}
|
||||
|
|
|
@ -190,7 +190,7 @@ export class DatabaseImpl implements Database {
|
|||
}
|
||||
}
|
||||
|
||||
private async performCall<T>(call: DBCallback<T>): Promise<any> {
|
||||
private async performCall<T>(call: DBCallback<T>): Promise<T> {
|
||||
const db = this.getDb()
|
||||
const fnc = await call(db)
|
||||
try {
|
||||
|
@ -289,7 +289,7 @@ export class DatabaseImpl implements Database {
|
|||
return
|
||||
}
|
||||
let errorFound = false
|
||||
let errorMessage: string = "Unable to bulk remove documents: "
|
||||
let errorMessage = "Unable to bulk remove documents: "
|
||||
for (let res of response) {
|
||||
if (res.error) {
|
||||
errorFound = true
|
||||
|
@ -467,7 +467,7 @@ export class DatabaseImpl implements Database {
|
|||
} catch (err: any) {
|
||||
// didn't exist, don't worry
|
||||
if (err.statusCode === 404) {
|
||||
return
|
||||
return { ok: true }
|
||||
} else {
|
||||
throw new CouchDBError(err.message, err)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import env from "../../environment"
|
||||
|
||||
export const getCouchInfo = (connection?: string) => {
|
||||
export const getCouchInfo = (connection?: string | null) => {
|
||||
// clean out any auth credentials
|
||||
const urlInfo = getUrlInfo(connection)
|
||||
let username
|
||||
|
@ -45,7 +45,7 @@ export const getCouchInfo = (connection?: string) => {
|
|||
}
|
||||
}
|
||||
|
||||
export const getUrlInfo = (url = env.COUCH_DB_URL) => {
|
||||
export const getUrlInfo = (url: string | null = env.COUCH_DB_URL) => {
|
||||
let cleanUrl, username, password, host
|
||||
if (url) {
|
||||
// Ensure the URL starts with a protocol
|
||||
|
|
|
@ -4,7 +4,7 @@ import { checkSlashesInUrl } from "../../helpers"
|
|||
|
||||
export async function directCouchCall(
|
||||
path: string,
|
||||
method: string = "GET",
|
||||
method = "GET",
|
||||
body?: any
|
||||
) {
|
||||
let { url, cookie } = getCouchInfo()
|
||||
|
@ -43,7 +43,7 @@ export async function directCouchUrlCall({
|
|||
|
||||
export async function directCouchQuery(
|
||||
path: string,
|
||||
method: string = "GET",
|
||||
method = "GET",
|
||||
body?: any
|
||||
) {
|
||||
const response = await directCouchCall(path, method, body)
|
||||
|
|
|
@ -27,7 +27,7 @@ export class DDInstrumentedDatabase implements Database {
|
|||
|
||||
exists(docId?: string): Promise<boolean> {
|
||||
return tracer.trace("db.exists", span => {
|
||||
span?.addTags({ db_name: this.name, doc_id: docId })
|
||||
span.addTags({ db_name: this.name, doc_id: docId })
|
||||
if (docId) {
|
||||
return this.db.exists(docId)
|
||||
}
|
||||
|
@ -37,15 +37,17 @@ export class DDInstrumentedDatabase implements Database {
|
|||
|
||||
get<T extends Document>(id?: string | undefined): Promise<T> {
|
||||
return tracer.trace("db.get", span => {
|
||||
span?.addTags({ db_name: this.name, doc_id: id })
|
||||
span.addTags({ db_name: this.name, doc_id: id })
|
||||
return this.db.get(id)
|
||||
})
|
||||
}
|
||||
|
||||
tryGet<T extends Document>(id?: string | undefined): Promise<T | undefined> {
|
||||
return tracer.trace("db.tryGet", span => {
|
||||
span?.addTags({ db_name: this.name, doc_id: id })
|
||||
return this.db.tryGet(id)
|
||||
return tracer.trace("db.tryGet", async span => {
|
||||
span.addTags({ db_name: this.name, doc_id: id })
|
||||
const doc = await this.db.tryGet<T>(id)
|
||||
span.addTags({ doc_found: doc !== undefined })
|
||||
return doc
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -53,13 +55,15 @@ export class DDInstrumentedDatabase implements Database {
|
|||
ids: string[],
|
||||
opts?: { allowMissing?: boolean | undefined } | undefined
|
||||
): Promise<T[]> {
|
||||
return tracer.trace("db.getMultiple", span => {
|
||||
span?.addTags({
|
||||
return tracer.trace("db.getMultiple", async span => {
|
||||
span.addTags({
|
||||
db_name: this.name,
|
||||
num_docs: ids.length,
|
||||
allow_missing: opts?.allowMissing,
|
||||
})
|
||||
return this.db.getMultiple(ids, opts)
|
||||
const docs = await this.db.getMultiple<T>(ids, opts)
|
||||
span.addTags({ num_docs_found: docs.length })
|
||||
return docs
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -69,12 +73,14 @@ export class DDInstrumentedDatabase implements Database {
|
|||
idOrDoc: string | Document,
|
||||
rev?: string
|
||||
): Promise<DocumentDestroyResponse> {
|
||||
return tracer.trace("db.remove", span => {
|
||||
span?.addTags({ db_name: this.name, doc_id: idOrDoc })
|
||||
return tracer.trace("db.remove", async span => {
|
||||
span.addTags({ db_name: this.name, doc_id: idOrDoc, rev })
|
||||
const isDocument = typeof idOrDoc === "object"
|
||||
const id = isDocument ? idOrDoc._id! : idOrDoc
|
||||
rev = isDocument ? idOrDoc._rev : rev
|
||||
return this.db.remove(id, rev)
|
||||
const resp = await this.db.remove(id, rev)
|
||||
span.addTags({ ok: resp.ok })
|
||||
return resp
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -83,7 +89,11 @@ export class DDInstrumentedDatabase implements Database {
|
|||
opts?: { silenceErrors?: boolean }
|
||||
): Promise<void> {
|
||||
return tracer.trace("db.bulkRemove", span => {
|
||||
span?.addTags({ db_name: this.name, num_docs: documents.length })
|
||||
span.addTags({
|
||||
db_name: this.name,
|
||||
num_docs: documents.length,
|
||||
silence_errors: opts?.silenceErrors,
|
||||
})
|
||||
return this.db.bulkRemove(documents, opts)
|
||||
})
|
||||
}
|
||||
|
@ -92,15 +102,21 @@ export class DDInstrumentedDatabase implements Database {
|
|||
document: AnyDocument,
|
||||
opts?: DatabasePutOpts | undefined
|
||||
): Promise<DocumentInsertResponse> {
|
||||
return tracer.trace("db.put", span => {
|
||||
span?.addTags({ db_name: this.name, doc_id: document._id })
|
||||
return this.db.put(document, opts)
|
||||
return tracer.trace("db.put", async span => {
|
||||
span.addTags({
|
||||
db_name: this.name,
|
||||
doc_id: document._id,
|
||||
force: opts?.force,
|
||||
})
|
||||
const resp = await this.db.put(document, opts)
|
||||
span.addTags({ ok: resp.ok })
|
||||
return resp
|
||||
})
|
||||
}
|
||||
|
||||
bulkDocs(documents: AnyDocument[]): Promise<DocumentBulkResponse[]> {
|
||||
return tracer.trace("db.bulkDocs", span => {
|
||||
span?.addTags({ db_name: this.name, num_docs: documents.length })
|
||||
span.addTags({ db_name: this.name, num_docs: documents.length })
|
||||
return this.db.bulkDocs(documents)
|
||||
})
|
||||
}
|
||||
|
@ -108,9 +124,15 @@ export class DDInstrumentedDatabase implements Database {
|
|||
allDocs<T extends Document | RowValue>(
|
||||
params: DatabaseQueryOpts
|
||||
): Promise<AllDocsResponse<T>> {
|
||||
return tracer.trace("db.allDocs", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.allDocs(params)
|
||||
return tracer.trace("db.allDocs", async span => {
|
||||
span.addTags({ db_name: this.name, ...params })
|
||||
const resp = await this.db.allDocs<T>(params)
|
||||
span.addTags({
|
||||
total_rows: resp.total_rows,
|
||||
rows_length: resp.rows.length,
|
||||
offset: resp.offset,
|
||||
})
|
||||
return resp
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -118,57 +140,75 @@ export class DDInstrumentedDatabase implements Database {
|
|||
viewName: string,
|
||||
params: DatabaseQueryOpts
|
||||
): Promise<AllDocsResponse<T>> {
|
||||
return tracer.trace("db.query", span => {
|
||||
span?.addTags({ db_name: this.name, view_name: viewName })
|
||||
return this.db.query(viewName, params)
|
||||
return tracer.trace("db.query", async span => {
|
||||
span.addTags({ db_name: this.name, view_name: viewName, ...params })
|
||||
const resp = await this.db.query<T>(viewName, params)
|
||||
span.addTags({
|
||||
total_rows: resp.total_rows,
|
||||
rows_length: resp.rows.length,
|
||||
offset: resp.offset,
|
||||
})
|
||||
return resp
|
||||
})
|
||||
}
|
||||
|
||||
destroy(): Promise<void | OkResponse> {
|
||||
return tracer.trace("db.destroy", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.destroy()
|
||||
destroy(): Promise<OkResponse> {
|
||||
return tracer.trace("db.destroy", async span => {
|
||||
span.addTags({ db_name: this.name })
|
||||
const resp = await this.db.destroy()
|
||||
span.addTags({ ok: resp.ok })
|
||||
return resp
|
||||
})
|
||||
}
|
||||
|
||||
compact(): Promise<void | OkResponse> {
|
||||
return tracer.trace("db.compact", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.compact()
|
||||
compact(): Promise<OkResponse> {
|
||||
return tracer.trace("db.compact", async span => {
|
||||
span.addTags({ db_name: this.name })
|
||||
const resp = await this.db.compact()
|
||||
span.addTags({ ok: resp.ok })
|
||||
return resp
|
||||
})
|
||||
}
|
||||
|
||||
dump(stream: Writable, opts?: DatabaseDumpOpts | undefined): Promise<any> {
|
||||
return tracer.trace("db.dump", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span.addTags({
|
||||
db_name: this.name,
|
||||
batch_limit: opts?.batch_limit,
|
||||
batch_size: opts?.batch_size,
|
||||
style: opts?.style,
|
||||
timeout: opts?.timeout,
|
||||
num_doc_ids: opts?.doc_ids?.length,
|
||||
view: opts?.view,
|
||||
})
|
||||
return this.db.dump(stream, opts)
|
||||
})
|
||||
}
|
||||
|
||||
load(...args: any[]): Promise<any> {
|
||||
return tracer.trace("db.load", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span.addTags({ db_name: this.name, num_args: args.length })
|
||||
return this.db.load(...args)
|
||||
})
|
||||
}
|
||||
|
||||
createIndex(...args: any[]): Promise<any> {
|
||||
return tracer.trace("db.createIndex", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span.addTags({ db_name: this.name, num_args: args.length })
|
||||
return this.db.createIndex(...args)
|
||||
})
|
||||
}
|
||||
|
||||
deleteIndex(...args: any[]): Promise<any> {
|
||||
return tracer.trace("db.deleteIndex", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span.addTags({ db_name: this.name, num_args: args.length })
|
||||
return this.db.deleteIndex(...args)
|
||||
})
|
||||
}
|
||||
|
||||
getIndexes(...args: any[]): Promise<any> {
|
||||
return tracer.trace("db.getIndexes", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span.addTags({ db_name: this.name, num_args: args.length })
|
||||
return this.db.getIndexes(...args)
|
||||
})
|
||||
}
|
||||
|
@ -177,22 +217,27 @@ export class DDInstrumentedDatabase implements Database {
|
|||
sql: string,
|
||||
parameters?: SqlQueryBinding
|
||||
): Promise<T[]> {
|
||||
return tracer.trace("db.sql", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
return this.db.sql(sql, parameters)
|
||||
return tracer.trace("db.sql", async span => {
|
||||
span.addTags({ db_name: this.name, num_bindings: parameters?.length })
|
||||
const resp = await this.db.sql<T>(sql, parameters)
|
||||
span.addTags({ num_rows: resp.length })
|
||||
return resp
|
||||
})
|
||||
}
|
||||
|
||||
sqlPurgeDocument(docIds: string[] | string): Promise<void> {
|
||||
return tracer.trace("db.sqlPurgeDocument", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span.addTags({
|
||||
db_name: this.name,
|
||||
num_docs: Array.isArray(docIds) ? docIds.length : 1,
|
||||
})
|
||||
return this.db.sqlPurgeDocument(docIds)
|
||||
})
|
||||
}
|
||||
|
||||
sqlDiskCleanup(): Promise<void> {
|
||||
return tracer.trace("db.sqlDiskCleanup", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span.addTags({ db_name: this.name })
|
||||
return this.db.sqlDiskCleanup()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -279,7 +279,7 @@ export class QueryBuilder<T> {
|
|||
let query = allOr ? "" : "*:*"
|
||||
let allFiltersEmpty = true
|
||||
const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true }
|
||||
let tableId: string = ""
|
||||
let tableId = ""
|
||||
if (this.#query.equal!.tableId) {
|
||||
tableId = this.#query.equal!.tableId
|
||||
delete this.#query.equal!.tableId
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
require("../../../tests")
|
||||
const getUrlInfo = require("../couch").getUrlInfo
|
||||
|
||||
import { getUrlInfo } from "../couch"
|
||||
|
||||
describe("pouch", () => {
|
||||
describe("Couch DB URL parsing", () => {
|
|
@ -134,10 +134,10 @@ const environment = {
|
|||
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
||||
JS_BCRYPT: process.env.JS_BCRYPT,
|
||||
JWT_SECRET: process.env.JWT_SECRET
|
||||
? createSecretKey(Buffer.from(process.env.JWT_SECRET))
|
||||
? createSecretKey(process.env.JWT_SECRET, "utf8")
|
||||
: undefined,
|
||||
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK
|
||||
? createSecretKey(Buffer.from(process.env.JWT_SECRET_FALLBACK))
|
||||
? createSecretKey(process.env.JWT_SECRET_FALLBACK, "utf8")
|
||||
: undefined,
|
||||
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
||||
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
||||
|
|
|
@ -121,7 +121,7 @@ const identifyInstallationGroup = async (
|
|||
|
||||
const identifyTenantGroup = async (
|
||||
tenantId: string,
|
||||
account: Account | undefined,
|
||||
hosting: Hosting,
|
||||
timestamp?: string | number
|
||||
): Promise<void> => {
|
||||
const id = await getEventTenantId(tenantId)
|
||||
|
@ -129,26 +129,12 @@ const identifyTenantGroup = async (
|
|||
const installationId = await getInstallationId()
|
||||
const environment = getDeploymentEnvironment()
|
||||
|
||||
let hosting: Hosting
|
||||
let profession: string | undefined
|
||||
let companySize: string | undefined
|
||||
|
||||
if (account) {
|
||||
profession = account.profession
|
||||
companySize = account.size
|
||||
hosting = account.hosting
|
||||
} else {
|
||||
hosting = getHostingFromEnv()
|
||||
}
|
||||
|
||||
const group: TenantGroup = {
|
||||
id,
|
||||
type,
|
||||
hosting,
|
||||
environment,
|
||||
installationId,
|
||||
profession,
|
||||
companySize,
|
||||
}
|
||||
|
||||
await identifyGroup(group, timestamp)
|
||||
|
|
|
@ -2,7 +2,7 @@ import { Event, Identity, Group } from "@budibase/types"
|
|||
import { EventProcessor } from "./types"
|
||||
|
||||
export default class Processor implements EventProcessor {
|
||||
initialised: boolean = false
|
||||
initialised = false
|
||||
processors: EventProcessor[] = []
|
||||
|
||||
constructor(processors: EventProcessor[]) {
|
||||
|
|
|
@ -13,9 +13,7 @@ const EXCLUDED_EVENTS: Event[] = [
|
|||
Event.ROLE_UPDATED,
|
||||
Event.DATASOURCE_UPDATED,
|
||||
Event.QUERY_UPDATED,
|
||||
Event.TABLE_UPDATED,
|
||||
Event.VIEW_UPDATED,
|
||||
Event.VIEW_FILTER_UPDATED,
|
||||
Event.VIEW_CALCULATION_UPDATED,
|
||||
Event.AUTOMATION_TRIGGER_UPDATED,
|
||||
Event.USER_GROUP_UPDATED,
|
||||
|
|
|
@ -1 +1 @@
|
|||
export { EventProcessor } from "@budibase/types"
|
||||
export type { EventProcessor } from "@budibase/types"
|
||||
|
|
|
@ -23,3 +23,4 @@ export { default as plugin } from "./plugin"
|
|||
export { default as backup } from "./backup"
|
||||
export { default as environmentVariable } from "./environmentVariable"
|
||||
export { default as auditLog } from "./auditLog"
|
||||
export { default as rowAction } from "./rowAction"
|
||||
|
|
|
@ -12,8 +12,6 @@ import {
|
|||
QueriesRunEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
const created = async (
|
||||
datasource: Datasource,
|
||||
query: Query,
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
import { publishEvent } from "../events"
|
||||
import { Event, RowActionCreatedEvent } from "@budibase/types"
|
||||
|
||||
async function created(
|
||||
rowAction: RowActionCreatedEvent,
|
||||
timestamp?: string | number
|
||||
) {
|
||||
await publishEvent(Event.ROW_ACTION_CREATED, rowAction, timestamp)
|
||||
}
|
||||
|
||||
export default {
|
||||
created,
|
||||
}
|
|
@ -6,8 +6,6 @@ import {
|
|||
Table,
|
||||
} from "@budibase/types"
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
const created = async (count: number, timestamp?: string | number) => {
|
||||
const properties: RowsCreatedEvent = {
|
||||
count,
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
Event,
|
||||
TableExportFormat,
|
||||
FieldType,
|
||||
Table,
|
||||
TableCreatedEvent,
|
||||
TableUpdatedEvent,
|
||||
TableDeletedEvent,
|
||||
TableExportedEvent,
|
||||
TableExportFormat,
|
||||
TableImportedEvent,
|
||||
TableUpdatedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
async function created(table: Table, timestamp?: string | number) {
|
||||
|
@ -20,14 +21,34 @@ async function created(table: Table, timestamp?: string | number) {
|
|||
await publishEvent(Event.TABLE_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
async function updated(table: Table) {
|
||||
async function updated(oldTable: Table, newTable: Table) {
|
||||
// only publish the event if it has fields we are interested in
|
||||
let defaultValues, aiColumn
|
||||
|
||||
// check that new fields have been added
|
||||
for (const key in newTable.schema) {
|
||||
if (!oldTable.schema[key]) {
|
||||
const newColumn = newTable.schema[key]
|
||||
if ("default" in newColumn && newColumn.default != null) {
|
||||
defaultValues = true
|
||||
}
|
||||
if (newColumn.type === FieldType.AI) {
|
||||
aiColumn = newColumn.operation
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const properties: TableUpdatedEvent = {
|
||||
tableId: table._id as string,
|
||||
tableId: newTable._id as string,
|
||||
defaultValues,
|
||||
aiColumn,
|
||||
audited: {
|
||||
name: table.name,
|
||||
name: newTable.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.TABLE_UPDATED, properties)
|
||||
if (defaultValues || aiColumn) {
|
||||
await publishEvent(Event.TABLE_UPDATED, properties)
|
||||
}
|
||||
}
|
||||
|
||||
async function deleted(table: Table) {
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
import { publishEvent } from "../events"
|
||||
import {
|
||||
CalculationType,
|
||||
Event,
|
||||
Table,
|
||||
TableExportFormat,
|
||||
View,
|
||||
ViewCalculation,
|
||||
ViewCalculationCreatedEvent,
|
||||
ViewCalculationDeletedEvent,
|
||||
ViewCalculationUpdatedEvent,
|
||||
|
@ -11,22 +16,20 @@ import {
|
|||
ViewFilterDeletedEvent,
|
||||
ViewFilterUpdatedEvent,
|
||||
ViewUpdatedEvent,
|
||||
View,
|
||||
ViewCalculation,
|
||||
Table,
|
||||
TableExportFormat,
|
||||
ViewV2,
|
||||
ViewJoinCreatedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
/* eslint-disable */
|
||||
|
||||
async function created(view: View, timestamp?: string | number) {
|
||||
async function created(view: ViewV2, timestamp?: string | number) {
|
||||
const properties: ViewCreatedEvent = {
|
||||
name: view.name,
|
||||
type: view.type,
|
||||
tableId: view.tableId,
|
||||
}
|
||||
await publishEvent(Event.VIEW_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
async function updated(view: View) {
|
||||
async function updated(view: ViewV2) {
|
||||
const properties: ViewUpdatedEvent = {
|
||||
tableId: view.tableId,
|
||||
}
|
||||
|
@ -48,16 +51,27 @@ async function exported(table: Table, format: TableExportFormat) {
|
|||
await publishEvent(Event.VIEW_EXPORTED, properties)
|
||||
}
|
||||
|
||||
async function filterCreated(view: View, timestamp?: string | number) {
|
||||
async function filterCreated(
|
||||
{ tableId, filterGroups }: { tableId: string; filterGroups: number },
|
||||
timestamp?: string | number
|
||||
) {
|
||||
const properties: ViewFilterCreatedEvent = {
|
||||
tableId: view.tableId,
|
||||
tableId,
|
||||
filterGroups,
|
||||
}
|
||||
await publishEvent(Event.VIEW_FILTER_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
async function filterUpdated(view: View) {
|
||||
async function filterUpdated({
|
||||
tableId,
|
||||
filterGroups,
|
||||
}: {
|
||||
tableId: string
|
||||
filterGroups: number
|
||||
}) {
|
||||
const properties: ViewFilterUpdatedEvent = {
|
||||
tableId: view.tableId,
|
||||
tableId: tableId,
|
||||
filterGroups,
|
||||
}
|
||||
await publishEvent(Event.VIEW_FILTER_UPDATED, properties)
|
||||
}
|
||||
|
@ -69,10 +83,16 @@ async function filterDeleted(view: View) {
|
|||
await publishEvent(Event.VIEW_FILTER_DELETED, properties)
|
||||
}
|
||||
|
||||
async function calculationCreated(view: View, timestamp?: string | number) {
|
||||
async function calculationCreated(
|
||||
{
|
||||
tableId,
|
||||
calculationType,
|
||||
}: { tableId: string; calculationType: CalculationType },
|
||||
timestamp?: string | number
|
||||
) {
|
||||
const properties: ViewCalculationCreatedEvent = {
|
||||
tableId: view.tableId,
|
||||
calculation: view.calculation as ViewCalculation,
|
||||
tableId,
|
||||
calculation: calculationType,
|
||||
}
|
||||
await publishEvent(Event.VIEW_CALCULATION_CREATED, properties, timestamp)
|
||||
}
|
||||
|
@ -93,6 +113,13 @@ async function calculationDeleted(existingView: View) {
|
|||
await publishEvent(Event.VIEW_CALCULATION_DELETED, properties)
|
||||
}
|
||||
|
||||
async function viewJoinCreated(tableId: any, timestamp?: string | number) {
|
||||
const properties: ViewJoinCreatedEvent = {
|
||||
tableId,
|
||||
}
|
||||
await publishEvent(Event.VIEW_JOIN_CREATED, properties, timestamp)
|
||||
}
|
||||
|
||||
export default {
|
||||
created,
|
||||
updated,
|
||||
|
@ -104,4 +131,5 @@ export default {
|
|||
calculationCreated,
|
||||
calculationUpdated,
|
||||
calculationDeleted,
|
||||
viewJoinCreated,
|
||||
}
|
||||
|
|
|
@ -2,9 +2,10 @@ import env from "../environment"
|
|||
import * as crypto from "crypto"
|
||||
import * as context from "../context"
|
||||
import { PostHog, PostHogOptions } from "posthog-node"
|
||||
import { FeatureFlag } from "@budibase/types"
|
||||
import tracer from "dd-trace"
|
||||
import { Duration } from "../utils"
|
||||
import { cloneDeep } from "lodash"
|
||||
import { FeatureFlagDefaults } from "@budibase/types"
|
||||
|
||||
let posthog: PostHog | undefined
|
||||
export function init(opts?: PostHogOptions) {
|
||||
|
@ -30,74 +31,6 @@ export function shutdown() {
|
|||
posthog?.shutdown()
|
||||
}
|
||||
|
||||
export abstract class Flag<T> {
|
||||
static boolean(defaultValue: boolean): Flag<boolean> {
|
||||
return new BooleanFlag(defaultValue)
|
||||
}
|
||||
|
||||
static string(defaultValue: string): Flag<string> {
|
||||
return new StringFlag(defaultValue)
|
||||
}
|
||||
|
||||
static number(defaultValue: number): Flag<number> {
|
||||
return new NumberFlag(defaultValue)
|
||||
}
|
||||
|
||||
protected constructor(public defaultValue: T) {}
|
||||
|
||||
abstract parse(value: any): T
|
||||
}
|
||||
|
||||
type UnwrapFlag<F> = F extends Flag<infer U> ? U : never
|
||||
|
||||
export type FlagValues<T> = {
|
||||
[K in keyof T]: UnwrapFlag<T[K]>
|
||||
}
|
||||
|
||||
type KeysOfType<T, U> = {
|
||||
[K in keyof T]: T[K] extends Flag<U> ? K : never
|
||||
}[keyof T]
|
||||
|
||||
class BooleanFlag extends Flag<boolean> {
|
||||
parse(value: any) {
|
||||
if (typeof value === "string") {
|
||||
return ["true", "t", "1"].includes(value.toLowerCase())
|
||||
}
|
||||
|
||||
if (typeof value === "boolean") {
|
||||
return value
|
||||
}
|
||||
|
||||
throw new Error(`could not parse value "${value}" as boolean`)
|
||||
}
|
||||
}
|
||||
|
||||
class StringFlag extends Flag<string> {
|
||||
parse(value: any) {
|
||||
if (typeof value === "string") {
|
||||
return value
|
||||
}
|
||||
throw new Error(`could not parse value "${value}" as string`)
|
||||
}
|
||||
}
|
||||
|
||||
class NumberFlag extends Flag<number> {
|
||||
parse(value: any) {
|
||||
if (typeof value === "number") {
|
||||
return value
|
||||
}
|
||||
|
||||
if (typeof value === "string") {
|
||||
const parsed = parseFloat(value)
|
||||
if (!isNaN(parsed)) {
|
||||
return parsed
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`could not parse value "${value}" as number`)
|
||||
}
|
||||
}
|
||||
|
||||
export interface EnvFlagEntry {
|
||||
tenantId: string
|
||||
key: string
|
||||
|
@ -120,7 +53,7 @@ export function parseEnvFlags(flags: string): EnvFlagEntry[] {
|
|||
return result
|
||||
}
|
||||
|
||||
export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
||||
export class FlagSet<T extends { [name: string]: boolean }> {
|
||||
// This is used to safely cache flags sets in the current request context.
|
||||
// Because multiple sets could theoretically exist, we don't want the cache of
|
||||
// one to leak into another.
|
||||
|
@ -130,34 +63,25 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
this.setId = crypto.randomUUID()
|
||||
}
|
||||
|
||||
defaults(): FlagValues<T> {
|
||||
return Object.keys(this.flagSchema).reduce((acc, key) => {
|
||||
const typedKey = key as keyof T
|
||||
acc[typedKey] = this.flagSchema[key].defaultValue
|
||||
return acc
|
||||
}, {} as FlagValues<T>)
|
||||
defaults(): T {
|
||||
return cloneDeep(this.flagSchema)
|
||||
}
|
||||
|
||||
isFlagName(name: string | number | symbol): name is keyof T {
|
||||
return this.flagSchema[name as keyof T] !== undefined
|
||||
}
|
||||
|
||||
async get<K extends keyof T>(key: K): Promise<FlagValues<T>[K]> {
|
||||
async isEnabled<K extends keyof T>(key: K): Promise<T[K]> {
|
||||
const flags = await this.fetch()
|
||||
return flags[key]
|
||||
}
|
||||
|
||||
async isEnabled<K extends KeysOfType<T, boolean>>(key: K): Promise<boolean> {
|
||||
const flags = await this.fetch()
|
||||
return flags[key]
|
||||
}
|
||||
|
||||
async fetch(): Promise<FlagValues<T>> {
|
||||
async fetch(): Promise<T> {
|
||||
return await tracer.trace("features.fetch", async span => {
|
||||
const cachedFlags = context.getFeatureFlags<FlagValues<T>>(this.setId)
|
||||
const cachedFlags = context.getFeatureFlags(this.setId)
|
||||
if (cachedFlags) {
|
||||
span?.addTags({ fromCache: true })
|
||||
return cachedFlags
|
||||
return cachedFlags as T
|
||||
}
|
||||
|
||||
const tags: Record<string, any> = {}
|
||||
|
@ -189,7 +113,7 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
|
||||
// @ts-expect-error - TS does not like you writing into a generic type,
|
||||
// but we know that it's okay in this case because it's just an object.
|
||||
flagValues[key as keyof FlagValues] = value
|
||||
flagValues[key as keyof T] = value
|
||||
tags[`flags.${key}.source`] = "environment"
|
||||
}
|
||||
|
||||
|
@ -217,11 +141,11 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
tags[`readFromPostHog`] = true
|
||||
|
||||
const personProperties: Record<string, string> = { tenantId }
|
||||
const posthogFlags = await posthog.getAllFlagsAndPayloads(userId, {
|
||||
const posthogFlags = await posthog.getAllFlags(userId, {
|
||||
personProperties,
|
||||
})
|
||||
|
||||
for (const [name, value] of Object.entries(posthogFlags.featureFlags)) {
|
||||
for (const [name, value] of Object.entries(posthogFlags)) {
|
||||
if (!this.isFlagName(name)) {
|
||||
// We don't want an unexpected PostHog flag to break the app, so we
|
||||
// just log it and continue.
|
||||
|
@ -229,19 +153,20 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
continue
|
||||
}
|
||||
|
||||
if (typeof value !== "boolean") {
|
||||
console.warn(`Invalid value for posthog flag "${name}": ${value}`)
|
||||
continue
|
||||
}
|
||||
|
||||
if (flagValues[name] === true || specificallySetFalse.has(name)) {
|
||||
// If the flag is already set to through environment variables, we
|
||||
// don't want to override it back to false here.
|
||||
continue
|
||||
}
|
||||
|
||||
const payload = posthogFlags.featureFlagPayloads?.[name]
|
||||
const flag = this.flagSchema[name]
|
||||
try {
|
||||
// @ts-expect-error - TS does not like you writing into a generic
|
||||
// type, but we know that it's okay in this case because it's just
|
||||
// an object.
|
||||
flagValues[name] = flag.parse(payload || value)
|
||||
// @ts-expect-error - TS does not like you writing into a generic type.
|
||||
flagValues[name] = value
|
||||
tags[`flags.${name}.source`] = "posthog"
|
||||
} catch (err) {
|
||||
// We don't want an invalid PostHog flag to break the app, so we just
|
||||
|
@ -262,16 +187,12 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
}
|
||||
}
|
||||
|
||||
// This is the primary source of truth for feature flags. If you want to add a
|
||||
// new flag, add it here and use the `fetch` and `get` functions to access it.
|
||||
// All of the machinery in this file is to make sure that flags have their
|
||||
// default values set correctly and their types flow through the system.
|
||||
export const flags = new FlagSet({
|
||||
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
|
||||
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
|
||||
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
|
||||
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
|
||||
})
|
||||
export const flags = new FlagSet(FeatureFlagDefaults)
|
||||
|
||||
type UnwrapPromise<T> = T extends Promise<infer U> ? U : T
|
||||
export type FeatureFlags = UnwrapPromise<ReturnType<typeof flags.fetch>>
|
||||
export async function isEnabled(flag: keyof typeof FeatureFlagDefaults) {
|
||||
return await flags.isEnabled(flag)
|
||||
}
|
||||
|
||||
export async function all() {
|
||||
return await flags.fetch()
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { IdentityContext, IdentityType } from "@budibase/types"
|
||||
import { Flag, FlagSet, FlagValues, init, shutdown } from "../"
|
||||
import { FlagSet, init, shutdown } from "../"
|
||||
import * as context from "../../context"
|
||||
import environment, { withEnv } from "../../environment"
|
||||
import nodeFetch from "node-fetch"
|
||||
|
@ -7,10 +7,8 @@ import nock from "nock"
|
|||
import * as crypto from "crypto"
|
||||
|
||||
const schema = {
|
||||
TEST_BOOLEAN: Flag.boolean(false),
|
||||
TEST_STRING: Flag.string("default value"),
|
||||
TEST_NUMBER: Flag.number(0),
|
||||
TEST_BOOLEAN_DEFAULT_TRUE: Flag.boolean(true),
|
||||
TEST_BOOLEAN: false,
|
||||
TEST_BOOLEAN_DEFAULT_TRUE: true,
|
||||
}
|
||||
const flags = new FlagSet(schema)
|
||||
|
||||
|
@ -19,7 +17,7 @@ interface TestCase {
|
|||
identity?: Partial<IdentityContext>
|
||||
environmentFlags?: string
|
||||
posthogFlags?: PostHogFlags
|
||||
expected?: Partial<FlagValues<typeof schema>>
|
||||
expected?: Partial<typeof schema>
|
||||
errorMessage?: string | RegExp
|
||||
}
|
||||
|
||||
|
@ -83,22 +81,6 @@ describe("feature flags", () => {
|
|||
},
|
||||
expected: { TEST_BOOLEAN: true },
|
||||
},
|
||||
{
|
||||
it: "should be able to read string flags from PostHog",
|
||||
posthogFlags: {
|
||||
featureFlags: { TEST_STRING: true },
|
||||
featureFlagPayloads: { TEST_STRING: "test" },
|
||||
},
|
||||
expected: { TEST_STRING: "test" },
|
||||
},
|
||||
{
|
||||
it: "should be able to read numeric flags from PostHog",
|
||||
posthogFlags: {
|
||||
featureFlags: { TEST_NUMBER: true },
|
||||
featureFlagPayloads: { TEST_NUMBER: "123" },
|
||||
},
|
||||
expected: { TEST_NUMBER: 123 },
|
||||
},
|
||||
{
|
||||
it: "should not be able to override a negative environment flag from PostHog",
|
||||
environmentFlags: "default:!TEST_BOOLEAN",
|
||||
|
@ -177,7 +159,7 @@ describe("feature flags", () => {
|
|||
expect(values).toMatchObject(expected)
|
||||
|
||||
for (const [key, expectedValue] of Object.entries(expected)) {
|
||||
const value = await flags.get(key as keyof typeof schema)
|
||||
const value = await flags.isEnabled(key as keyof typeof schema)
|
||||
expect(value).toBe(expectedValue)
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { FeatureFlags, parseEnvFlags } from ".."
|
||||
import { FeatureFlags } from "@budibase/types"
|
||||
import { setEnv } from "../../environment"
|
||||
import { parseEnvFlags } from "../features"
|
||||
|
||||
function getCurrentFlags(): Record<string, Record<string, boolean>> {
|
||||
const result: Record<string, Record<string, boolean>> = {}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
export * as configs from "./configs"
|
||||
export * as events from "./events"
|
||||
export * as migrations from "./migrations"
|
||||
export * as users from "./users"
|
||||
export * as userUtils from "./users/utils"
|
||||
export * as roles from "./security/roles"
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
export default async (ctx: BBContext | any, next: any) => {
|
||||
// this middleware exists purely to be overridden by middlewares supplied by the @budibase/pro library
|
||||
const middleware = (async (ctx: Ctx, next: Next) => {
|
||||
// Placeholder for audit log middleware
|
||||
return next()
|
||||
}
|
||||
}) as Middleware
|
||||
|
||||
export default middleware
|
||||
|
|
|
@ -22,6 +22,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { ErrorCode, InvalidAPIKeyError } from "../errors"
|
||||
import tracer from "dd-trace"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
|
||||
? parseInt(env.SESSION_UPDATE_PERIOD)
|
||||
|
@ -94,6 +95,14 @@ async function checkApiKey(
|
|||
})
|
||||
}
|
||||
|
||||
function getHeader(ctx: Ctx, header: Header): string | undefined {
|
||||
const contents = ctx.request.headers[header]
|
||||
if (Array.isArray(contents)) {
|
||||
throw new Error("Unexpected header format")
|
||||
}
|
||||
return contents
|
||||
}
|
||||
|
||||
/**
|
||||
* This middleware is tenancy aware, so that it does not depend on other middlewares being used.
|
||||
* The tenancy modules should not be used here and it should be assumed that the tenancy context
|
||||
|
@ -106,9 +115,9 @@ export default function (
|
|||
}
|
||||
) {
|
||||
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
|
||||
return async (ctx: Ctx | any, next: any) => {
|
||||
return (async (ctx: Ctx, next: Next) => {
|
||||
let publicEndpoint = false
|
||||
const version = ctx.request.headers[Header.API_VER]
|
||||
const version = getHeader(ctx, Header.API_VER)
|
||||
// the path is not authenticated
|
||||
const found = matches(ctx, noAuthOptions)
|
||||
if (found) {
|
||||
|
@ -116,21 +125,21 @@ export default function (
|
|||
}
|
||||
try {
|
||||
// check the actual user is authenticated first, try header or cookie
|
||||
let headerToken = ctx.request.headers[Header.TOKEN]
|
||||
let headerToken = getHeader(ctx, Header.TOKEN)
|
||||
|
||||
const authCookie =
|
||||
getCookie<SessionCookie>(ctx, Cookie.Auth) ||
|
||||
openJwt<SessionCookie>(headerToken)
|
||||
let apiKey = ctx.request.headers[Header.API_KEY]
|
||||
let apiKey = getHeader(ctx, Header.API_KEY)
|
||||
|
||||
if (!apiKey && ctx.request.headers[Header.AUTHORIZATION]) {
|
||||
apiKey = ctx.request.headers[Header.AUTHORIZATION].split(" ")[1]
|
||||
}
|
||||
|
||||
const tenantId = ctx.request.headers[Header.TENANT_ID]
|
||||
let authenticated: boolean = false,
|
||||
const tenantId = getHeader(ctx, Header.TENANT_ID)
|
||||
let authenticated = false,
|
||||
user: User | { tenantId: string } | undefined = undefined,
|
||||
internal: boolean = false,
|
||||
internal = false,
|
||||
loginMethod: LoginMethod | undefined = undefined
|
||||
if (authCookie && !apiKey) {
|
||||
const sessionId = authCookie.sessionId
|
||||
|
@ -243,5 +252,5 @@ export default function (
|
|||
ctx.throw(err.status || 403, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}) as Middleware
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { Header } from "../constants"
|
||||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { BBContext, EndpointMatcher } from "@budibase/types"
|
||||
import { Ctx, EndpointMatcher } from "@budibase/types"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
/**
|
||||
* GET, HEAD and OPTIONS methods are considered safe operations
|
||||
|
@ -36,7 +37,7 @@ export default function (
|
|||
opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] }
|
||||
) {
|
||||
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
|
||||
return async (ctx: BBContext | any, next: any) => {
|
||||
return (async (ctx: Ctx, next: Next) => {
|
||||
// don't apply for excluded paths
|
||||
const found = matches(ctx, noCsrfOptions)
|
||||
if (found) {
|
||||
|
@ -77,5 +78,5 @@ export default function (
|
|||
}
|
||||
|
||||
return next()
|
||||
}
|
||||
}) as Middleware
|
||||
}
|
||||
|
|
|
@ -32,8 +32,12 @@ export async function errorHandling(ctx: any, next: any) {
|
|||
}
|
||||
|
||||
if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
|
||||
let rootErr = err
|
||||
while (rootErr.cause) {
|
||||
rootErr = rootErr.cause
|
||||
}
|
||||
// @ts-ignore
|
||||
error.stack = err.stack
|
||||
error.stack = rootErr.stack
|
||||
}
|
||||
|
||||
ctx.body = error
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import { Header } from "../constants"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
import { isValidInternalAPIKey } from "../utils"
|
||||
|
||||
/**
|
||||
* API Key only endpoint.
|
||||
*/
|
||||
export default async (ctx: BBContext, next: any) => {
|
||||
export default async (ctx: Ctx, next: any) => {
|
||||
const apiKey = ctx.request.headers[Header.API_KEY]
|
||||
if (!apiKey) {
|
||||
ctx.throw(403, "Unauthorized")
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { BBContext, EndpointMatcher, RegexMatcher } from "@budibase/types"
|
||||
import { Ctx, EndpointMatcher, RegexMatcher } from "@budibase/types"
|
||||
|
||||
const PARAM_REGEX = /\/:(.*?)(\/.*)?$/g
|
||||
|
||||
|
@ -27,7 +27,7 @@ export const buildMatcherRegex = (
|
|||
})
|
||||
}
|
||||
|
||||
export const matches = (ctx: BBContext, options: RegexMatcher[]) => {
|
||||
export const matches = (ctx: Ctx, options: RegexMatcher[]) => {
|
||||
return options.find(({ regex, method }) => {
|
||||
const urlMatch = regex.test(ctx.request.url)
|
||||
const methodMatch =
|
||||
|
|
|
@ -2,7 +2,7 @@ import { UserStatus } from "../../constants"
|
|||
import { compare } from "../../utils"
|
||||
import * as users from "../../users"
|
||||
import { authError } from "./utils"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
const INVALID_ERR = "Invalid credentials"
|
||||
const EXPIRED = "This account has expired. Please reset your password"
|
||||
|
@ -20,7 +20,7 @@ export const options = {
|
|||
* @returns The authenticated user, or errors if they occur
|
||||
*/
|
||||
export async function authenticate(
|
||||
ctx: BBContext,
|
||||
ctx: Ctx,
|
||||
email: string,
|
||||
password: string,
|
||||
done: Function
|
||||
|
|
|
@ -20,7 +20,7 @@ export const ssoSaveUserNoOp: SaveSSOUserFunction = (user: SSOUser) =>
|
|||
*/
|
||||
export async function authenticate(
|
||||
details: SSOAuthDetails,
|
||||
requireLocalAccount: boolean = true,
|
||||
requireLocalAccount = true,
|
||||
done: any,
|
||||
saveUserFn: SaveSSOUserFunction
|
||||
) {
|
||||
|
|
|
@ -3,11 +3,12 @@ import { getTenantIDFromCtx } from "../tenancy"
|
|||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { Header } from "../constants"
|
||||
import {
|
||||
BBContext,
|
||||
Ctx,
|
||||
EndpointMatcher,
|
||||
GetTenantIdOptions,
|
||||
TenantResolutionStrategy,
|
||||
} from "@budibase/types"
|
||||
import type { Next, Middleware } from "koa"
|
||||
|
||||
export default function (
|
||||
allowQueryStringPatterns: EndpointMatcher[],
|
||||
|
@ -17,7 +18,7 @@ export default function (
|
|||
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
|
||||
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
|
||||
|
||||
return async function (ctx: BBContext | any, next: any) {
|
||||
return async function (ctx: Ctx, next: Next) {
|
||||
const allowNoTenant =
|
||||
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
|
||||
const tenantOpts: GetTenantIdOptions = {
|
||||
|
@ -32,5 +33,5 @@ export default function (
|
|||
const tenantId = getTenantIDFromCtx(ctx, tenantOpts)
|
||||
ctx.set(Header.TENANT_ID, tenantId as string)
|
||||
return doInTenant(tenantId, next)
|
||||
}
|
||||
} as Middleware
|
||||
}
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
import {
|
||||
MigrationType,
|
||||
MigrationName,
|
||||
MigrationDefinition,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const DEFINITIONS: MigrationDefinition[] = [
|
||||
{
|
||||
type: MigrationType.GLOBAL,
|
||||
name: MigrationName.USER_EMAIL_VIEW_CASING,
|
||||
},
|
||||
{
|
||||
type: MigrationType.GLOBAL,
|
||||
name: MigrationName.SYNC_QUOTAS,
|
||||
},
|
||||
{
|
||||
type: MigrationType.APP,
|
||||
name: MigrationName.APP_URLS,
|
||||
},
|
||||
{
|
||||
type: MigrationType.APP,
|
||||
name: MigrationName.EVENT_APP_BACKFILL,
|
||||
},
|
||||
{
|
||||
type: MigrationType.APP,
|
||||
name: MigrationName.TABLE_SETTINGS_LINKS_TO_ACTIONS,
|
||||
},
|
||||
{
|
||||
type: MigrationType.GLOBAL,
|
||||
name: MigrationName.EVENT_GLOBAL_BACKFILL,
|
||||
},
|
||||
{
|
||||
type: MigrationType.INSTALLATION,
|
||||
name: MigrationName.EVENT_INSTALLATION_BACKFILL,
|
||||
},
|
||||
{
|
||||
type: MigrationType.GLOBAL,
|
||||
name: MigrationName.GLOBAL_INFO_SYNC_USERS,
|
||||
},
|
||||
]
|
|
@ -1,2 +0,0 @@
|
|||
export * from "./migrations"
|
||||
export * from "./definitions"
|
|
@ -1,186 +0,0 @@
|
|||
import { DEFAULT_TENANT_ID } from "../constants"
|
||||
import {
|
||||
DocumentType,
|
||||
StaticDatabases,
|
||||
getAllApps,
|
||||
getGlobalDBName,
|
||||
getDB,
|
||||
} from "../db"
|
||||
import environment from "../environment"
|
||||
import * as platform from "../platform"
|
||||
import * as context from "../context"
|
||||
import { DEFINITIONS } from "."
|
||||
import {
|
||||
Migration,
|
||||
MigrationOptions,
|
||||
MigrationType,
|
||||
MigrationNoOpOptions,
|
||||
App,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const getMigrationsDoc = async (db: any) => {
|
||||
// get the migrations doc
|
||||
try {
|
||||
return await db.get(DocumentType.MIGRATIONS)
|
||||
} catch (err: any) {
|
||||
if (err.status && err.status === 404) {
|
||||
return { _id: DocumentType.MIGRATIONS }
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const backPopulateMigrations = async (opts: MigrationNoOpOptions) => {
|
||||
// filter migrations to the type and populate a no-op migration
|
||||
const migrations: Migration[] = DEFINITIONS.filter(
|
||||
def => def.type === opts.type
|
||||
).map(d => ({ ...d, fn: () => {} }))
|
||||
await runMigrations(migrations, { noOp: opts })
|
||||
}
|
||||
|
||||
export const runMigration = async (
|
||||
migration: Migration,
|
||||
options: MigrationOptions = {}
|
||||
) => {
|
||||
const migrationType = migration.type
|
||||
const migrationName = migration.name
|
||||
const silent = migration.silent
|
||||
|
||||
const log = (message: string) => {
|
||||
if (!silent) {
|
||||
console.log(message)
|
||||
}
|
||||
}
|
||||
|
||||
// get the db to store the migration in
|
||||
let dbNames: string[]
|
||||
if (migrationType === MigrationType.GLOBAL) {
|
||||
dbNames = [getGlobalDBName()]
|
||||
} else if (migrationType === MigrationType.APP) {
|
||||
if (options.noOp) {
|
||||
if (!options.noOp.appId) {
|
||||
throw new Error("appId is required for noOp app migration")
|
||||
}
|
||||
dbNames = [options.noOp.appId]
|
||||
} else {
|
||||
const apps = (await getAllApps(migration.appOpts)) as App[]
|
||||
dbNames = apps.map(app => app.appId)
|
||||
}
|
||||
} else if (migrationType === MigrationType.INSTALLATION) {
|
||||
dbNames = [StaticDatabases.PLATFORM_INFO.name]
|
||||
} else {
|
||||
throw new Error(`Unrecognised migration type [${migrationType}]`)
|
||||
}
|
||||
|
||||
const length = dbNames.length
|
||||
let count = 0
|
||||
|
||||
// run the migration against each db
|
||||
for (const dbName of dbNames) {
|
||||
count++
|
||||
const lengthStatement = length > 1 ? `[${count}/${length}]` : ""
|
||||
|
||||
const db = getDB(dbName)
|
||||
|
||||
try {
|
||||
const doc = await getMigrationsDoc(db)
|
||||
|
||||
// the migration has already been run
|
||||
if (doc[migrationName]) {
|
||||
// check for force
|
||||
if (
|
||||
options.force &&
|
||||
options.force[migrationType] &&
|
||||
options.force[migrationType].includes(migrationName)
|
||||
) {
|
||||
log(`[Migration: ${migrationName}] [DB: ${dbName}] Forcing`)
|
||||
} else {
|
||||
// no force, exit
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// check if the migration is not a no-op
|
||||
if (!options.noOp) {
|
||||
log(
|
||||
`[Migration: ${migrationName}] [DB: ${dbName}] Running ${lengthStatement}`
|
||||
)
|
||||
|
||||
if (migration.preventRetry) {
|
||||
// eagerly set the completion date
|
||||
// so that we never run this migration twice even upon failure
|
||||
doc[migrationName] = Date.now()
|
||||
const response = await db.put(doc)
|
||||
doc._rev = response.rev
|
||||
}
|
||||
|
||||
// run the migration
|
||||
if (migrationType === MigrationType.APP) {
|
||||
await context.doInAppContext(db.name, async () => {
|
||||
await migration.fn(db)
|
||||
})
|
||||
} else {
|
||||
await migration.fn(db)
|
||||
}
|
||||
|
||||
log(`[Migration: ${migrationName}] [DB: ${dbName}] Complete`)
|
||||
}
|
||||
|
||||
// mark as complete
|
||||
doc[migrationName] = Date.now()
|
||||
await db.put(doc)
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`[Migration: ${migrationName}] [DB: ${dbName}] Error: `,
|
||||
err
|
||||
)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const runMigrations = async (
|
||||
migrations: Migration[],
|
||||
options: MigrationOptions = {}
|
||||
) => {
|
||||
let tenantIds
|
||||
|
||||
if (environment.MULTI_TENANCY) {
|
||||
if (options.noOp) {
|
||||
tenantIds = [options.noOp.tenantId]
|
||||
} else if (!options.tenantIds || !options.tenantIds.length) {
|
||||
// run for all tenants
|
||||
tenantIds = await platform.tenants.getTenantIds()
|
||||
} else {
|
||||
tenantIds = options.tenantIds
|
||||
}
|
||||
} else {
|
||||
// single tenancy
|
||||
tenantIds = [DEFAULT_TENANT_ID]
|
||||
}
|
||||
|
||||
if (tenantIds.length > 1) {
|
||||
console.log(`Checking migrations for ${tenantIds.length} tenants`)
|
||||
} else {
|
||||
console.log("Checking migrations")
|
||||
}
|
||||
|
||||
let count = 0
|
||||
// for all tenants
|
||||
for (const tenantId of tenantIds) {
|
||||
count++
|
||||
if (tenantIds.length > 1) {
|
||||
console.log(`Progress [${count}/${tenantIds.length}]`)
|
||||
}
|
||||
// for all migrations
|
||||
for (const migration of migrations) {
|
||||
// run the migration
|
||||
await context.doInTenant(
|
||||
tenantId,
|
||||
async () => await runMigration(migration, options)
|
||||
)
|
||||
}
|
||||
}
|
||||
console.log("Migrations complete")
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`migrations should match snapshot 1`] = `
|
||||
{
|
||||
"_id": "migrations",
|
||||
"_rev": "1-2f64479842a0513aa8b97f356b0b9127",
|
||||
"createdAt": "2020-01-01T00:00:00.000Z",
|
||||
"test": 1577836800000,
|
||||
"updatedAt": "2020-01-01T00:00:00.000Z",
|
||||
}
|
||||
`;
|
|
@ -1,64 +0,0 @@
|
|||
import { testEnv, DBTestConfiguration } from "../../../tests/extra"
|
||||
import * as migrations from "../index"
|
||||
import * as context from "../../context"
|
||||
import { MigrationType } from "@budibase/types"
|
||||
|
||||
testEnv.multiTenant()
|
||||
|
||||
describe("migrations", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
const migrationFunction = jest.fn()
|
||||
|
||||
const MIGRATIONS = [
|
||||
{
|
||||
type: MigrationType.GLOBAL,
|
||||
name: "test" as any,
|
||||
fn: migrationFunction,
|
||||
},
|
||||
]
|
||||
|
||||
beforeEach(() => {
|
||||
config.newTenant()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
const migrate = () => {
|
||||
return migrations.runMigrations(MIGRATIONS, {
|
||||
tenantIds: [config.tenantId],
|
||||
})
|
||||
}
|
||||
|
||||
it("should run a new migration", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await migrate()
|
||||
expect(migrationFunction).toHaveBeenCalled()
|
||||
const db = context.getGlobalDB()
|
||||
const doc = await migrations.getMigrationsDoc(db)
|
||||
expect(doc.test).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("should match snapshot", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await migrate()
|
||||
const doc = await migrations.getMigrationsDoc(context.getGlobalDB())
|
||||
expect(doc).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
|
||||
it("should skip a previously run migration", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const db = context.getGlobalDB()
|
||||
await migrate()
|
||||
const previousDoc = await migrations.getMigrationsDoc(db)
|
||||
await migrate()
|
||||
const currentDoc = await migrations.getMigrationsDoc(db)
|
||||
expect(migrationFunction).toHaveBeenCalledTimes(1)
|
||||
expect(currentDoc.test).toBe(previousDoc.test)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -334,7 +334,7 @@ export async function listAllObjects(bucketName: string, path: string) {
|
|||
export function getPresignedUrl(
|
||||
bucketName: string,
|
||||
key: string,
|
||||
durationSeconds: number = 3600
|
||||
durationSeconds = 3600
|
||||
) {
|
||||
const objectStore = ObjectStore(bucketName, { presigning: true })
|
||||
const params = {
|
||||
|
|
|
@ -7,7 +7,7 @@ import { addListeners, StalledFn } from "./listeners"
|
|||
import { Duration } from "../utils"
|
||||
import * as timers from "../timers"
|
||||
|
||||
export { QueueOptions, Queue, JobOptions } from "bull"
|
||||
export type { QueueOptions, Queue, JobOptions } from "bull"
|
||||
|
||||
// the queue lock is held for 5 minutes
|
||||
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
|
||||
|
|
|
@ -92,7 +92,7 @@ function getLockName(opts: LockOptions) {
|
|||
// determine lock name
|
||||
// by default use the tenantId for uniqueness, unless using a system lock
|
||||
const prefix = opts.systemLock ? "system" : context.getTenantId()
|
||||
let name: string = `lock:${prefix}_${opts.name}`
|
||||
let name = `lock:${prefix}_${opts.name}`
|
||||
// add additional unique name if required
|
||||
if (opts.resource) {
|
||||
name = name + `_${opts.resource}`
|
||||
|
|
|
@ -11,7 +11,7 @@ describe("redis", () => {
|
|||
let container: StartedTestContainer
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("redis")
|
||||
container = await new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.start()
|
||||
|
||||
|
|
|
@ -2,6 +2,8 @@ import {
|
|||
PermissionLevel,
|
||||
PermissionType,
|
||||
BuiltinPermissionID,
|
||||
Permission,
|
||||
BuiltinPermissions,
|
||||
} from "@budibase/types"
|
||||
import flatten from "lodash/flatten"
|
||||
import cloneDeep from "lodash/fp/cloneDeep"
|
||||
|
@ -12,7 +14,7 @@ export type RoleHierarchy = {
|
|||
permissionId: string
|
||||
}[]
|
||||
|
||||
export class Permission {
|
||||
export class PermissionImpl implements Permission {
|
||||
type: PermissionType
|
||||
level: PermissionLevel
|
||||
|
||||
|
@ -61,68 +63,62 @@ export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
|
|||
}
|
||||
}
|
||||
|
||||
export const BUILTIN_PERMISSIONS: {
|
||||
[key in keyof typeof BuiltinPermissionID]: {
|
||||
_id: (typeof BuiltinPermissionID)[key]
|
||||
name: string
|
||||
permissions: Permission[]
|
||||
}
|
||||
} = {
|
||||
export const BUILTIN_PERMISSIONS: BuiltinPermissions = {
|
||||
PUBLIC: {
|
||||
_id: BuiltinPermissionID.PUBLIC,
|
||||
name: "Public",
|
||||
permissions: [
|
||||
new Permission(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),
|
||||
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),
|
||||
],
|
||||
},
|
||||
READ_ONLY: {
|
||||
_id: BuiltinPermissionID.READ_ONLY,
|
||||
name: "Read only",
|
||||
permissions: [
|
||||
new Permission(PermissionType.QUERY, PermissionLevel.READ),
|
||||
new Permission(PermissionType.TABLE, PermissionLevel.READ),
|
||||
new Permission(PermissionType.APP, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.QUERY, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.TABLE, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
|
||||
],
|
||||
},
|
||||
WRITE: {
|
||||
_id: BuiltinPermissionID.WRITE,
|
||||
name: "Read/Write",
|
||||
permissions: [
|
||||
new Permission(PermissionType.QUERY, PermissionLevel.WRITE),
|
||||
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new Permission(PermissionType.APP, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.QUERY, PermissionLevel.WRITE),
|
||||
new PermissionImpl(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
|
||||
],
|
||||
},
|
||||
POWER: {
|
||||
_id: BuiltinPermissionID.POWER,
|
||||
name: "Power",
|
||||
permissions: [
|
||||
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
new Permission(PermissionType.USER, PermissionLevel.READ),
|
||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new Permission(PermissionType.APP, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
new PermissionImpl(PermissionType.USER, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
|
||||
],
|
||||
},
|
||||
ADMIN: {
|
||||
_id: BuiltinPermissionID.ADMIN,
|
||||
name: "Admin",
|
||||
permissions: [
|
||||
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),
|
||||
new Permission(PermissionType.USER, PermissionLevel.ADMIN),
|
||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
|
||||
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),
|
||||
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new Permission(PermissionType.APP, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.TABLE, PermissionLevel.ADMIN),
|
||||
new PermissionImpl(PermissionType.USER, PermissionLevel.ADMIN),
|
||||
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
|
||||
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.QUERY, PermissionLevel.ADMIN),
|
||||
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
export function getBuiltinPermissions() {
|
||||
export function getBuiltinPermissions(): BuiltinPermissions {
|
||||
return cloneDeep(BUILTIN_PERMISSIONS)
|
||||
}
|
||||
|
||||
|
|
|
@ -592,7 +592,10 @@ export class AccessController {
|
|||
)
|
||||
}
|
||||
|
||||
async checkScreensAccess(screens: Screen[], userRoleId: string) {
|
||||
async checkScreensAccess(
|
||||
screens: Screen[],
|
||||
userRoleId: string
|
||||
): Promise<Screen[]> {
|
||||
let accessibleScreens = []
|
||||
// don't want to handle this with Promise.all as this would mean all custom roles would be
|
||||
// retrieved at same time, it is likely a custom role will be re-used and therefore want
|
||||
|
|
|
@ -133,7 +133,7 @@ describe("getBuiltinPermissionByID", () => {
|
|||
_id: BuiltinPermissionID.PUBLIC,
|
||||
name: "Public",
|
||||
permissions: [
|
||||
new permissions.Permission(
|
||||
new permissions.PermissionImpl(
|
||||
permissions.PermissionType.WEBHOOK,
|
||||
permissions.PermissionLevel.EXECUTE
|
||||
),
|
||||
|
|
|
@ -18,6 +18,7 @@ import {
|
|||
BasicOperator,
|
||||
BBReferenceFieldMetadata,
|
||||
CalculationType,
|
||||
EnrichedQueryJson,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
|
@ -27,7 +28,6 @@ import {
|
|||
LogicalOperator,
|
||||
Operation,
|
||||
prefixed,
|
||||
QueryJson,
|
||||
QueryOptions,
|
||||
RangeOperator,
|
||||
RelationshipsJson,
|
||||
|
@ -134,18 +134,18 @@ const allowEmptyRelationships: Record<SearchFilterKey, boolean> = {
|
|||
|
||||
class InternalBuilder {
|
||||
private readonly client: SqlClient
|
||||
private readonly query: QueryJson
|
||||
private readonly query: EnrichedQueryJson
|
||||
private readonly splitter: dataFilters.ColumnSplitter
|
||||
private readonly knex: Knex
|
||||
|
||||
constructor(client: SqlClient, knex: Knex, query: QueryJson) {
|
||||
constructor(client: SqlClient, knex: Knex, query: EnrichedQueryJson) {
|
||||
this.client = client
|
||||
this.query = query
|
||||
this.knex = knex
|
||||
|
||||
this.splitter = new dataFilters.ColumnSplitter([this.table], {
|
||||
aliases: this.query.tableAliases,
|
||||
columnPrefix: this.query.meta.columnPrefix,
|
||||
columnPrefix: this.query.meta?.columnPrefix,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -167,7 +167,7 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
get table(): Table {
|
||||
return this.query.meta.table
|
||||
return this.query.table
|
||||
}
|
||||
|
||||
get knexClient(): Knex.Client {
|
||||
|
@ -272,32 +272,18 @@ class InternalBuilder {
|
|||
return parts.join(".")
|
||||
}
|
||||
|
||||
private isFullSelectStatementRequired(): boolean {
|
||||
const { meta } = this.query
|
||||
for (let column of Object.values(meta.table.schema)) {
|
||||
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(column)) {
|
||||
return true
|
||||
} else if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(column)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
|
||||
const { meta, endpoint, resource } = this.query
|
||||
const { table, resource } = this.query
|
||||
|
||||
if (!resource || !resource.fields || resource.fields.length === 0) {
|
||||
return "*"
|
||||
}
|
||||
|
||||
const alias = this.getTableName(endpoint.entityId)
|
||||
const schema = meta.table.schema
|
||||
if (!this.isFullSelectStatementRequired()) {
|
||||
return [this.knex.raw("??", [`${alias}.*`])]
|
||||
}
|
||||
const alias = this.getTableName(table)
|
||||
const schema = this.table.schema
|
||||
|
||||
// get just the fields for this table
|
||||
return resource.fields
|
||||
const tableFields = resource.fields
|
||||
.map(field => {
|
||||
const parts = field.split(/\./g)
|
||||
let table: string | undefined = undefined
|
||||
|
@ -312,34 +298,33 @@ class InternalBuilder {
|
|||
return { table, column, field }
|
||||
})
|
||||
.filter(({ table }) => !table || table === alias)
|
||||
.map(({ table, column, field }) => {
|
||||
const columnSchema = schema[column]
|
||||
|
||||
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) {
|
||||
return this.knex.raw(`??::money::numeric as ??`, [
|
||||
this.rawQuotedIdentifier([table, column].join(".")),
|
||||
this.knex.raw(this.quote(field)),
|
||||
])
|
||||
}
|
||||
return tableFields.map(({ table, column, field }) => {
|
||||
const columnSchema = schema[column]
|
||||
|
||||
if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(columnSchema)) {
|
||||
// Time gets returned as timestamp from mssql, not matching the expected
|
||||
// HH:mm format
|
||||
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) {
|
||||
return this.knex.raw(`??::money::numeric as ??`, [
|
||||
this.rawQuotedIdentifier([table, column].join(".")),
|
||||
this.knex.raw(this.quote(field)),
|
||||
])
|
||||
}
|
||||
|
||||
// TODO: figure out how to express this safely without string
|
||||
// interpolation.
|
||||
return this.knex.raw(`CONVERT(varchar, ??, 108) as ??`, [
|
||||
this.rawQuotedIdentifier(field),
|
||||
this.knex.raw(this.quote(field)),
|
||||
])
|
||||
}
|
||||
if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(columnSchema)) {
|
||||
// Time gets returned as timestamp from mssql, not matching the expected
|
||||
// HH:mm format
|
||||
|
||||
if (table) {
|
||||
return this.rawQuotedIdentifier(`${table}.${column}`)
|
||||
} else {
|
||||
return this.rawQuotedIdentifier(field)
|
||||
}
|
||||
})
|
||||
return this.knex.raw(`CONVERT(varchar, ??, 108) as ??`, [
|
||||
this.rawQuotedIdentifier(field),
|
||||
this.knex.raw(this.quote(field)),
|
||||
])
|
||||
}
|
||||
|
||||
if (table) {
|
||||
return this.rawQuotedIdentifier(`${table}.${column}`)
|
||||
} else {
|
||||
return this.rawQuotedIdentifier(field)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// OracleDB can't use character-large-objects (CLOBs) in WHERE clauses,
|
||||
|
@ -497,9 +482,8 @@ class InternalBuilder {
|
|||
filterKey: string,
|
||||
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
|
||||
): Knex.QueryBuilder {
|
||||
const { relationships, endpoint, tableAliases: aliases } = this.query
|
||||
const tableName = endpoint.entityId
|
||||
const fromAlias = aliases?.[tableName] || tableName
|
||||
const { relationships, schema, tableAliases: aliases, table } = this.query
|
||||
const fromAlias = aliases?.[table.name] || table.name
|
||||
const matches = (value: string) =>
|
||||
filterKey.match(new RegExp(`^${value}\\.`))
|
||||
if (!relationships) {
|
||||
|
@ -539,7 +523,7 @@ class InternalBuilder {
|
|||
aliases?.[manyToMany.through] || relationship.through
|
||||
let throughTable = this.tableNameWithSchema(manyToMany.through, {
|
||||
alias: throughAlias,
|
||||
schema: endpoint.schema,
|
||||
schema,
|
||||
})
|
||||
subQuery = subQuery
|
||||
// add a join through the junction table
|
||||
|
@ -818,14 +802,29 @@ class InternalBuilder {
|
|||
filters.oneOf,
|
||||
ArrayOperator.ONE_OF,
|
||||
(q, key: string, array) => {
|
||||
const schema = this.getFieldSchema(key)
|
||||
const values = Array.isArray(array) ? array : [array]
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
// @ts-ignore
|
||||
key = this.convertClobs(key)
|
||||
} else if (
|
||||
this.client === SqlClient.SQL_LITE &&
|
||||
schema?.type === FieldType.DATETIME &&
|
||||
schema.dateOnly
|
||||
) {
|
||||
for (const value of values) {
|
||||
if (value != null) {
|
||||
q = q.or.whereLike(key, `${value.toISOString().slice(0, 10)}%`)
|
||||
} else {
|
||||
q = q.or.whereNull(key)
|
||||
}
|
||||
}
|
||||
return q
|
||||
}
|
||||
return q.whereIn(key, Array.isArray(array) ? array : [array])
|
||||
return q.whereIn(key, values)
|
||||
},
|
||||
(q, key: string[], array) => {
|
||||
if (shouldOr) {
|
||||
|
@ -884,6 +883,19 @@ class InternalBuilder {
|
|||
let high = value.high
|
||||
let low = value.low
|
||||
|
||||
if (
|
||||
this.client === SqlClient.SQL_LITE &&
|
||||
schema?.type === FieldType.DATETIME &&
|
||||
schema.dateOnly
|
||||
) {
|
||||
if (high != null) {
|
||||
high = `${high.toISOString().slice(0, 10)}T23:59:59.999Z`
|
||||
}
|
||||
if (low != null) {
|
||||
low = low.toISOString().slice(0, 10)
|
||||
}
|
||||
}
|
||||
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
rawKey = this.convertClobs(key)
|
||||
} else if (
|
||||
|
@ -916,6 +928,7 @@ class InternalBuilder {
|
|||
}
|
||||
if (filters.equal) {
|
||||
iterate(filters.equal, BasicOperator.EQUAL, (q, key, value) => {
|
||||
const schema = this.getFieldSchema(key)
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
|
@ -930,6 +943,16 @@ class InternalBuilder {
|
|||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
subq.whereNotNull(identifier).andWhere(identifier, value)
|
||||
)
|
||||
} else if (
|
||||
this.client === SqlClient.SQL_LITE &&
|
||||
schema?.type === FieldType.DATETIME &&
|
||||
schema.dateOnly
|
||||
) {
|
||||
if (value != null) {
|
||||
return q.whereLike(key, `${value.toISOString().slice(0, 10)}%`)
|
||||
} else {
|
||||
return q.whereNull(key)
|
||||
}
|
||||
} else {
|
||||
return q.whereRaw(`COALESCE(?? = ?, FALSE)`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
|
@ -940,6 +963,7 @@ class InternalBuilder {
|
|||
}
|
||||
if (filters.notEqual) {
|
||||
iterate(filters.notEqual, BasicOperator.NOT_EQUAL, (q, key, value) => {
|
||||
const schema = this.getFieldSchema(key)
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
|
@ -961,6 +985,18 @@ class InternalBuilder {
|
|||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
.or.whereNull(identifier)
|
||||
)
|
||||
} else if (
|
||||
this.client === SqlClient.SQL_LITE &&
|
||||
schema?.type === FieldType.DATETIME &&
|
||||
schema.dateOnly
|
||||
) {
|
||||
if (value != null) {
|
||||
return q.not
|
||||
.whereLike(key, `${value.toISOString().slice(0, 10)}%`)
|
||||
.or.whereNull(key)
|
||||
} else {
|
||||
return q.not.whereNull(key)
|
||||
}
|
||||
} else {
|
||||
return q.whereRaw(`COALESCE(?? != ?, TRUE)`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
|
@ -1012,28 +1048,10 @@ class InternalBuilder {
|
|||
return isSqs(this.table)
|
||||
}
|
||||
|
||||
getTableName(tableOrName?: Table | string): string {
|
||||
let table: Table
|
||||
if (typeof tableOrName === "string") {
|
||||
const name = tableOrName
|
||||
if (this.query.table?.name === name) {
|
||||
table = this.query.table
|
||||
} else if (this.query.meta.table?.name === name) {
|
||||
table = this.query.meta.table
|
||||
} else if (!this.query.meta.tables?.[name]) {
|
||||
// This can legitimately happen in custom queries, where the user is
|
||||
// querying against a table that may not have been imported into
|
||||
// Budibase.
|
||||
return name
|
||||
} else {
|
||||
table = this.query.meta.tables[name]
|
||||
}
|
||||
} else if (tableOrName) {
|
||||
table = tableOrName
|
||||
} else {
|
||||
getTableName(table?: Table): string {
|
||||
if (!table) {
|
||||
table = this.table
|
||||
}
|
||||
|
||||
let name = table.name
|
||||
if (isSqs(table) && table._id) {
|
||||
// SQS uses the table ID rather than the table name
|
||||
|
@ -1154,20 +1172,22 @@ class InternalBuilder {
|
|||
nulls = value.direction === SortOrder.ASCENDING ? "first" : "last"
|
||||
}
|
||||
|
||||
const composite = `${aliased}.${key}`
|
||||
let identifier
|
||||
|
||||
if (this.isAggregateField(key)) {
|
||||
query = query.orderBy(key, direction, nulls)
|
||||
identifier = this.rawQuotedIdentifier(key)
|
||||
} else if (this.client === SqlClient.ORACLE) {
|
||||
identifier = this.convertClobs(composite)
|
||||
} else {
|
||||
let composite = `${aliased}.${key}`
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
query = query.orderByRaw(`?? ?? nulls ??`, [
|
||||
this.convertClobs(composite),
|
||||
this.knex.raw(direction),
|
||||
this.knex.raw(nulls as string),
|
||||
])
|
||||
} else {
|
||||
query = query.orderBy(composite, direction, nulls)
|
||||
}
|
||||
identifier = this.rawQuotedIdentifier(composite)
|
||||
}
|
||||
|
||||
query = query.orderByRaw(`?? ?? ${nulls ? "nulls ??" : ""}`, [
|
||||
identifier,
|
||||
this.knex.raw(direction),
|
||||
...(nulls ? [this.knex.raw(nulls as string)] : []),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1191,8 +1211,9 @@ class InternalBuilder {
|
|||
return withSchema
|
||||
}
|
||||
|
||||
private buildJsonField(field: string): string {
|
||||
private buildJsonField(table: Table, field: string): [string, Knex.Raw] {
|
||||
const parts = field.split(".")
|
||||
let baseName = parts[parts.length - 1]
|
||||
let unaliased: string
|
||||
|
||||
let tableField: string
|
||||
|
@ -1205,10 +1226,19 @@ class InternalBuilder {
|
|||
tableField = unaliased
|
||||
}
|
||||
|
||||
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
|
||||
return this.knex
|
||||
.raw(`?${separator}??`, [unaliased, this.rawQuotedIdentifier(tableField)])
|
||||
.toString()
|
||||
if (this.query.meta?.columnPrefix) {
|
||||
baseName = baseName.replace(this.query.meta.columnPrefix, "")
|
||||
}
|
||||
|
||||
let identifier = this.rawQuotedIdentifier(tableField)
|
||||
// Internal tables have special _id, _rev, createdAt, and updatedAt fields
|
||||
// that do not appear in the schema, meaning schema could actually be
|
||||
// undefined.
|
||||
const schema: FieldSchema | undefined = table.schema[baseName]
|
||||
if (schema && schema.type === FieldType.BIGINT) {
|
||||
identifier = this.castIntToString(identifier)
|
||||
}
|
||||
return [unaliased, identifier]
|
||||
}
|
||||
|
||||
maxFunctionParameters() {
|
||||
|
@ -1234,7 +1264,7 @@ class InternalBuilder {
|
|||
): Knex.QueryBuilder {
|
||||
const sqlClient = this.client
|
||||
const knex = this.knex
|
||||
const { resource, tableAliases: aliases, endpoint, meta } = this.query
|
||||
const { resource, tableAliases: aliases, schema, tables } = this.query
|
||||
const fields = resource?.fields || []
|
||||
for (let relationship of relationships) {
|
||||
const {
|
||||
|
@ -1249,13 +1279,17 @@ class InternalBuilder {
|
|||
if (!toTable || !fromTable) {
|
||||
continue
|
||||
}
|
||||
const relatedTable = meta.tables?.[toTable]
|
||||
|
||||
const relatedTable = tables[toTable]
|
||||
if (!relatedTable) {
|
||||
throw new Error(`related table "${toTable}" not found in datasource`)
|
||||
}
|
||||
const toAlias = aliases?.[toTable] || toTable,
|
||||
fromAlias = aliases?.[fromTable] || fromTable,
|
||||
throughAlias = (throughTable && aliases?.[throughTable]) || throughTable
|
||||
let toTableWithSchema = this.tableNameWithSchema(toTable, {
|
||||
alias: toAlias,
|
||||
schema: endpoint.schema,
|
||||
schema,
|
||||
})
|
||||
const requiredFields = [
|
||||
...(relatedTable?.primary || []),
|
||||
|
@ -1271,8 +1305,18 @@ class InternalBuilder {
|
|||
0,
|
||||
Math.floor(this.maxFunctionParameters() / 2)
|
||||
)
|
||||
const fieldList: string = relationshipFields
|
||||
.map(field => this.buildJsonField(field))
|
||||
const fieldList = relationshipFields.map(field =>
|
||||
this.buildJsonField(relatedTable, field)
|
||||
)
|
||||
if (!fieldList.length) {
|
||||
continue
|
||||
}
|
||||
|
||||
const fieldListFormatted = fieldList
|
||||
.map(f => {
|
||||
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
|
||||
return this.knex.raw(`?${separator}??`, [f[0], f[1]]).toString()
|
||||
})
|
||||
.join(",")
|
||||
// SQL Server uses TOP - which performs a little differently to the normal LIMIT syntax
|
||||
// it reduces the result set rather than limiting how much data it filters over
|
||||
|
@ -1293,7 +1337,7 @@ class InternalBuilder {
|
|||
if (isManyToMany) {
|
||||
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
|
||||
alias: throughAlias,
|
||||
schema: endpoint.schema,
|
||||
schema,
|
||||
})
|
||||
subQuery = subQuery.join(throughTableWithSchema, function () {
|
||||
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
|
||||
|
@ -1302,13 +1346,17 @@ class InternalBuilder {
|
|||
|
||||
// add the correlation to the overall query
|
||||
subQuery = subQuery.where(
|
||||
correlatedTo,
|
||||
this.rawQuotedIdentifier(correlatedTo),
|
||||
"=",
|
||||
this.rawQuotedIdentifier(correlatedFrom)
|
||||
)
|
||||
|
||||
const standardWrap = (select: Knex.Raw): Knex.QueryBuilder => {
|
||||
subQuery = subQuery.select(`${toAlias}.*`).limit(getRelationshipLimit())
|
||||
subQuery = subQuery
|
||||
.select(
|
||||
relationshipFields.map(field => this.rawQuotedIdentifier(field))
|
||||
)
|
||||
.limit(getRelationshipLimit())
|
||||
// @ts-ignore - the from alias syntax isn't in Knex typing
|
||||
return knex.select(select).from({
|
||||
[toAlias]: subQuery,
|
||||
|
@ -1320,35 +1368,42 @@ class InternalBuilder {
|
|||
// need to check the junction table document is to the right column, this is just for SQS
|
||||
subQuery = this.addJoinFieldCheck(subQuery, relationship)
|
||||
wrapperQuery = standardWrap(
|
||||
this.knex.raw(`json_group_array(json_object(${fieldList}))`)
|
||||
this.knex.raw(
|
||||
`json_group_array(json_object(${fieldListFormatted}))`
|
||||
)
|
||||
)
|
||||
break
|
||||
case SqlClient.POSTGRES:
|
||||
wrapperQuery = standardWrap(
|
||||
this.knex.raw(`json_agg(json_build_object(${fieldList}))`)
|
||||
this.knex.raw(`json_agg(json_build_object(${fieldListFormatted}))`)
|
||||
)
|
||||
break
|
||||
case SqlClient.MARIADB:
|
||||
// can't use the standard wrap due to correlated sub-query limitations in MariaDB
|
||||
wrapperQuery = subQuery.select(
|
||||
knex.raw(
|
||||
`json_arrayagg(json_object(${fieldList}) LIMIT ${getRelationshipLimit()})`
|
||||
`json_arrayagg(json_object(${fieldListFormatted}) LIMIT ${getRelationshipLimit()})`
|
||||
)
|
||||
)
|
||||
break
|
||||
case SqlClient.MY_SQL:
|
||||
case SqlClient.ORACLE:
|
||||
wrapperQuery = standardWrap(
|
||||
this.knex.raw(`json_arrayagg(json_object(${fieldList}))`)
|
||||
this.knex.raw(`json_arrayagg(json_object(${fieldListFormatted}))`)
|
||||
)
|
||||
break
|
||||
case SqlClient.MS_SQL: {
|
||||
const comparatorQuery = knex
|
||||
.select(`${fromAlias}.*`)
|
||||
.select(`*`)
|
||||
// @ts-ignore - from alias syntax not TS supported
|
||||
.from({
|
||||
[fromAlias]: subQuery
|
||||
.select(`${toAlias}.*`)
|
||||
.select(
|
||||
fieldList.map(f => {
|
||||
// @ts-expect-error raw is fine here, knex types are wrong
|
||||
return knex.ref(f[1]).as(f[0])
|
||||
})
|
||||
)
|
||||
.limit(getRelationshipLimit()),
|
||||
})
|
||||
|
||||
|
@ -1377,8 +1432,7 @@ class InternalBuilder {
|
|||
toPrimary?: string
|
||||
}[]
|
||||
): Knex.QueryBuilder {
|
||||
const { tableAliases: aliases, endpoint } = this.query
|
||||
const schema = endpoint.schema
|
||||
const { tableAliases: aliases, schema } = this.query
|
||||
const toTable = tables.to,
|
||||
fromTable = tables.from,
|
||||
throughTable = tables.through
|
||||
|
@ -1429,16 +1483,16 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
qualifiedKnex(opts?: { alias?: string | boolean }): Knex.QueryBuilder {
|
||||
let alias = this.query.tableAliases?.[this.query.endpoint.entityId]
|
||||
let alias = this.query.tableAliases?.[this.query.table.name]
|
||||
if (opts?.alias === false) {
|
||||
alias = undefined
|
||||
} else if (typeof opts?.alias === "string") {
|
||||
alias = opts.alias
|
||||
}
|
||||
return this.knex(
|
||||
this.tableNameWithSchema(this.query.endpoint.entityId, {
|
||||
this.tableNameWithSchema(this.query.table.name, {
|
||||
alias,
|
||||
schema: this.query.endpoint.schema,
|
||||
schema: this.query.schema,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
@ -1455,9 +1509,7 @@ class InternalBuilder {
|
|||
if (this.client === SqlClient.ORACLE) {
|
||||
// Oracle doesn't seem to automatically insert nulls
|
||||
// if we don't specify them, so we need to do that here
|
||||
for (const [column, schema] of Object.entries(
|
||||
this.query.meta.table.schema
|
||||
)) {
|
||||
for (const [column, schema] of Object.entries(this.query.table.schema)) {
|
||||
if (
|
||||
schema.constraints?.presence === true ||
|
||||
schema.type === FieldType.FORMULA ||
|
||||
|
@ -1534,13 +1586,12 @@ class InternalBuilder {
|
|||
limits?: { base: number; query: number }
|
||||
} = {}
|
||||
): Knex.QueryBuilder {
|
||||
let { endpoint, filters, paginate, relationships } = this.query
|
||||
const { operation, filters, paginate, relationships, table } = this.query
|
||||
const { limits } = opts
|
||||
const counting = endpoint.operation === Operation.COUNT
|
||||
|
||||
const tableName = endpoint.entityId
|
||||
// start building the query
|
||||
let query = this.qualifiedKnex()
|
||||
|
||||
// handle pagination
|
||||
let foundOffset: number | null = null
|
||||
let foundLimit = limits?.query || limits?.base
|
||||
|
@ -1557,7 +1608,7 @@ class InternalBuilder {
|
|||
foundLimit = paginate.limit
|
||||
}
|
||||
// counting should not sort, limit or offset
|
||||
if (!counting) {
|
||||
if (operation !== Operation.COUNT) {
|
||||
// add the found limit if supplied
|
||||
if (foundLimit != null) {
|
||||
query = query.limit(foundLimit)
|
||||
|
@ -1569,7 +1620,7 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
const aggregations = this.query.resource?.aggregations || []
|
||||
if (counting) {
|
||||
if (operation === Operation.COUNT) {
|
||||
query = this.addDistinctCount(query)
|
||||
} else if (aggregations.length > 0) {
|
||||
query = this.addAggregations(query, aggregations)
|
||||
|
@ -1578,7 +1629,7 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
// have to add after as well (this breaks MS-SQL)
|
||||
if (!counting) {
|
||||
if (operation !== Operation.COUNT) {
|
||||
query = this.addSorting(query)
|
||||
}
|
||||
|
||||
|
@ -1586,19 +1637,17 @@ class InternalBuilder {
|
|||
|
||||
// handle relationships with a CTE for all others
|
||||
if (relationships?.length && aggregations.length === 0) {
|
||||
const mainTable =
|
||||
this.query.tableAliases?.[this.query.endpoint.entityId] ||
|
||||
this.query.endpoint.entityId
|
||||
const mainTable = this.query.tableAliases?.[table.name] || table.name
|
||||
const cte = this.addSorting(
|
||||
this.knex
|
||||
.with("paginated", query)
|
||||
.with("paginated", query.clone().clearSelect().select("*"))
|
||||
.select(this.generateSelectStatement())
|
||||
.from({
|
||||
[mainTable]: "paginated",
|
||||
})
|
||||
)
|
||||
// add JSON aggregations attached to the CTE
|
||||
return this.addJsonRelationships(cte, tableName, relationships)
|
||||
return this.addJsonRelationships(cte, table.name, relationships)
|
||||
}
|
||||
|
||||
return query
|
||||
|
@ -1661,7 +1710,10 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
||||
* @return the query ready to be passed to the driver.
|
||||
*/
|
||||
_query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] {
|
||||
_query(
|
||||
json: EnrichedQueryJson,
|
||||
opts: QueryOptions = {}
|
||||
): SqlQuery | SqlQuery[] {
|
||||
const sqlClient = this.getSqlClient()
|
||||
const config: Knex.Config = {
|
||||
client: this.getBaseSqlClient(),
|
||||
|
@ -1711,34 +1763,30 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
return this.convertToNative(query, opts)
|
||||
}
|
||||
|
||||
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
|
||||
async getReturningRow(queryFn: QueryFunction, json: EnrichedQueryJson) {
|
||||
if (!json.extra || !json.extra.idFilter) {
|
||||
return {}
|
||||
}
|
||||
const input = this._query({
|
||||
endpoint: {
|
||||
...json.endpoint,
|
||||
operation: Operation.READ,
|
||||
},
|
||||
resource: {
|
||||
fields: [],
|
||||
},
|
||||
operation: Operation.READ,
|
||||
datasource: json.datasource,
|
||||
schema: json.schema,
|
||||
table: json.table,
|
||||
tables: json.tables,
|
||||
resource: { fields: [] },
|
||||
filters: json.extra?.idFilter,
|
||||
paginate: {
|
||||
limit: 1,
|
||||
},
|
||||
meta: json.meta,
|
||||
paginate: { limit: 1 },
|
||||
})
|
||||
return queryFn(input, Operation.READ)
|
||||
}
|
||||
|
||||
// when creating if an ID has been inserted need to make sure
|
||||
// the id filter is enriched with it before trying to retrieve the row
|
||||
checkLookupKeys(id: any, json: QueryJson) {
|
||||
if (!id || !json.meta.table || !json.meta.table.primary) {
|
||||
checkLookupKeys(id: any, json: EnrichedQueryJson) {
|
||||
if (!id || !json.table.primary) {
|
||||
return json
|
||||
}
|
||||
const primaryKey = json.meta.table.primary?.[0]
|
||||
const primaryKey = json.table.primary[0]
|
||||
json.extra = {
|
||||
idFilter: {
|
||||
equal: {
|
||||
|
@ -1751,7 +1799,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
|
||||
// this function recreates the returning functionality of postgres
|
||||
async queryWithReturning(
|
||||
json: QueryJson,
|
||||
json: EnrichedQueryJson,
|
||||
queryFn: QueryFunction,
|
||||
processFn: Function = (result: any) => result
|
||||
) {
|
||||
|
|
|
@ -3,13 +3,13 @@ import {
|
|||
FieldType,
|
||||
NumberFieldMetadata,
|
||||
Operation,
|
||||
QueryJson,
|
||||
RelationshipType,
|
||||
RenameColumn,
|
||||
SqlQuery,
|
||||
Table,
|
||||
TableSourceType,
|
||||
SqlClient,
|
||||
EnrichedQueryJson,
|
||||
} from "@budibase/types"
|
||||
import { breakExternalTableId, getNativeSql } from "./utils"
|
||||
import { helpers, utils } from "@budibase/shared-core"
|
||||
|
@ -25,7 +25,7 @@ function generateSchema(
|
|||
schema: CreateTableBuilder,
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
oldTable: null | Table = null,
|
||||
oldTable?: Table,
|
||||
renamed?: RenameColumn
|
||||
) {
|
||||
let primaryKeys = table && table.primary ? table.primary : []
|
||||
|
@ -55,7 +55,7 @@ function generateSchema(
|
|||
)
|
||||
for (let [key, column] of Object.entries(table.schema)) {
|
||||
// skip things that are already correct
|
||||
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||
const oldColumn = oldTable?.schema[key]
|
||||
if (
|
||||
(oldColumn && oldColumn.type) ||
|
||||
columnTypeSet.includes(key) ||
|
||||
|
@ -199,8 +199,8 @@ function buildUpdateTable(
|
|||
knex: SchemaBuilder,
|
||||
table: Table,
|
||||
tables: Record<string, Table>,
|
||||
oldTable: Table,
|
||||
renamed: RenameColumn
|
||||
oldTable?: Table,
|
||||
renamed?: RenameColumn
|
||||
): SchemaBuilder {
|
||||
return knex.alterTable(table.name, schema => {
|
||||
generateSchema(schema, table, tables, oldTable, renamed)
|
||||
|
@ -238,19 +238,18 @@ class SqlTableQueryBuilder {
|
|||
* @param json the input JSON structure from which an SQL query will be built.
|
||||
* @return the operation that was found in the JSON.
|
||||
*/
|
||||
_operation(json: QueryJson): Operation {
|
||||
return json.endpoint.operation
|
||||
_operation(json: EnrichedQueryJson): Operation {
|
||||
return json.operation
|
||||
}
|
||||
|
||||
_tableQuery(json: QueryJson): SqlQuery | SqlQuery[] {
|
||||
_tableQuery(json: EnrichedQueryJson): SqlQuery | SqlQuery[] {
|
||||
let client = knex({ client: this.sqlClient }).schema
|
||||
let schemaName = json?.endpoint?.schema
|
||||
if (schemaName) {
|
||||
client = client.withSchema(schemaName)
|
||||
if (json?.schema) {
|
||||
client = client.withSchema(json.schema)
|
||||
}
|
||||
|
||||
let query: Knex.SchemaBuilder
|
||||
if (!json.table || !json.meta || !json.meta.tables) {
|
||||
if (!json.table || !json.tables) {
|
||||
throw new Error("Cannot execute without table being specified")
|
||||
}
|
||||
if (json.table.sourceType === TableSourceType.INTERNAL) {
|
||||
|
@ -259,17 +258,17 @@ class SqlTableQueryBuilder {
|
|||
|
||||
switch (this._operation(json)) {
|
||||
case Operation.CREATE_TABLE:
|
||||
query = buildCreateTable(client, json.table, json.meta.tables)
|
||||
query = buildCreateTable(client, json.table, json.tables)
|
||||
break
|
||||
case Operation.UPDATE_TABLE:
|
||||
if (!json.meta || !json.meta.table) {
|
||||
if (!json.table) {
|
||||
throw new Error("Must specify old table for update")
|
||||
}
|
||||
// renameColumn does not work for MySQL, so return a raw query
|
||||
if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) {
|
||||
if (this.sqlClient === SqlClient.MY_SQL && json.meta?.renamed) {
|
||||
const updatedColumn = json.meta.renamed.updated
|
||||
const tableName = schemaName
|
||||
? `\`${schemaName}\`.\`${json.table.name}\``
|
||||
const tableName = json?.schema
|
||||
? `\`${json.schema}\`.\`${json.table.name}\``
|
||||
: `\`${json.table.name}\``
|
||||
return {
|
||||
sql: `alter table ${tableName} rename column \`${json.meta.renamed.old}\` to \`${updatedColumn}\`;`,
|
||||
|
@ -280,18 +279,18 @@ class SqlTableQueryBuilder {
|
|||
query = buildUpdateTable(
|
||||
client,
|
||||
json.table,
|
||||
json.meta.tables,
|
||||
json.meta.table,
|
||||
json.meta.renamed!
|
||||
json.tables,
|
||||
json.meta?.oldTable,
|
||||
json.meta?.renamed
|
||||
)
|
||||
|
||||
// renameColumn for SQL Server returns a parameterised `sp_rename` query,
|
||||
// which is not supported by SQL Server and gives a syntax error.
|
||||
if (this.sqlClient === SqlClient.MS_SQL && json.meta.renamed) {
|
||||
if (this.sqlClient === SqlClient.MS_SQL && json.meta?.renamed) {
|
||||
const oldColumn = json.meta.renamed.old
|
||||
const updatedColumn = json.meta.renamed.updated
|
||||
const tableName = schemaName
|
||||
? `${schemaName}.${json.table.name}`
|
||||
const tableName = json?.schema
|
||||
? `${json.schema}.${json.table.name}`
|
||||
: `${json.table.name}`
|
||||
const sql = getNativeSql(query)
|
||||
if (Array.isArray(sql)) {
|
||||
|
|
|
@ -14,7 +14,7 @@ import environment from "../environment"
|
|||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||
const ROW_ID_REGEX = /^\[.*]$/g
|
||||
const ENCODED_SPACE = encodeURIComponent(" ")
|
||||
const ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/
|
||||
const ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}(?:T\d{2}:\d{2}:\d{2}.\d{3}Z)?$/
|
||||
const TIME_REGEX = /^(?:\d{2}:)?(?:\d{2}:)(?:\d{2})$/
|
||||
|
||||
export function isExternalTableID(tableId: string) {
|
||||
|
@ -59,11 +59,19 @@ export function isExternalTable(table: Table) {
|
|||
}
|
||||
|
||||
export function buildExternalTableId(datasourceId: string, tableName: string) {
|
||||
// encode spaces
|
||||
if (tableName.includes(" ")) {
|
||||
tableName = encodeURIComponent(tableName)
|
||||
return `${datasourceId}${DOUBLE_SEPARATOR}${encodeURIComponent(tableName)}`
|
||||
}
|
||||
|
||||
export function encodeTableId(tableId: string) {
|
||||
if (isExternalTableID(tableId)) {
|
||||
return encodeURIComponent(tableId)
|
||||
} else {
|
||||
return tableId
|
||||
}
|
||||
return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}`
|
||||
}
|
||||
|
||||
export function encodeViewId(viewId: string) {
|
||||
return encodeURIComponent(viewId)
|
||||
}
|
||||
|
||||
export function breakExternalTableId(tableId: string) {
|
||||
|
@ -141,15 +149,7 @@ export function isInvalidISODateString(str: string) {
|
|||
}
|
||||
|
||||
export function isValidISODateString(str: string) {
|
||||
const trimmedValue = str.trim()
|
||||
if (!ISO_DATE_REGEX.test(trimmedValue)) {
|
||||
return false
|
||||
}
|
||||
let d = new Date(trimmedValue)
|
||||
if (isNaN(d.getTime())) {
|
||||
return false
|
||||
}
|
||||
return d.toISOString() === trimmedValue
|
||||
return ISO_DATE_REGEX.test(str.trim())
|
||||
}
|
||||
|
||||
export function isValidFilter(value: any) {
|
||||
|
|
|
@ -6,7 +6,7 @@ import {
|
|||
getPlatformURL,
|
||||
} from "../context"
|
||||
import {
|
||||
BBContext,
|
||||
Ctx,
|
||||
TenantResolutionStrategy,
|
||||
GetTenantIdOptions,
|
||||
} from "@budibase/types"
|
||||
|
@ -37,7 +37,7 @@ export const isUserInAppTenant = (appId: string, user?: any) => {
|
|||
const ALL_STRATEGIES = Object.values(TenantResolutionStrategy)
|
||||
|
||||
export const getTenantIDFromCtx = (
|
||||
ctx: BBContext,
|
||||
ctx: Ctx,
|
||||
opts: GetTenantIdOptions
|
||||
): string | undefined => {
|
||||
// exit early if not multi-tenant
|
||||
|
|
|
@ -5,7 +5,7 @@ import * as db from "../../db"
|
|||
import { Header } from "../../constants"
|
||||
import { newid } from "../../utils"
|
||||
import env from "../../environment"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
describe("utils", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
|
@ -109,7 +109,7 @@ describe("utils", () => {
|
|||
})
|
||||
|
||||
describe("isServingBuilder", () => {
|
||||
let ctx: BBContext
|
||||
let ctx: Ctx
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isServingBuilder(ctx)).toBe(result)
|
||||
|
@ -133,7 +133,7 @@ describe("utils", () => {
|
|||
})
|
||||
|
||||
describe("isServingBuilderPreview", () => {
|
||||
let ctx: BBContext
|
||||
let ctx: Ctx
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isServingBuilderPreview(ctx)).toBe(result)
|
||||
|
@ -157,7 +157,7 @@ describe("utils", () => {
|
|||
})
|
||||
|
||||
describe("isPublicAPIRequest", () => {
|
||||
let ctx: BBContext
|
||||
let ctx: Ctx
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isPublicApiRequest(ctx)).toBe(result)
|
||||
|
|
|
@ -1,32 +1,31 @@
|
|||
const _ = require("lodash/fp")
|
||||
const { structures } = require("../../../tests")
|
||||
import { range } from "lodash/fp"
|
||||
import { structures } from "../.."
|
||||
|
||||
jest.mock("../../../src/context")
|
||||
jest.mock("../../../src/db")
|
||||
|
||||
const context = require("../../../src/context")
|
||||
const db = require("../../../src/db")
|
||||
import * as context from "../../../src/context"
|
||||
import * as db from "../../../src/db"
|
||||
|
||||
const { getCreatorCount } = require("../../../src/users/users")
|
||||
import { getCreatorCount } from "../../../src/users/users"
|
||||
|
||||
describe("Users", () => {
|
||||
let getGlobalDBMock
|
||||
let getGlobalUserParamsMock
|
||||
let paginationMock
|
||||
let getGlobalDBMock: jest.SpyInstance
|
||||
let paginationMock: jest.SpyInstance
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
|
||||
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
|
||||
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
|
||||
paginationMock = jest.spyOn(db, "pagination")
|
||||
|
||||
jest.spyOn(db, "getGlobalUserParams")
|
||||
})
|
||||
|
||||
it("Retrieves the number of creators", async () => {
|
||||
const getUsers = (offset, limit, creators = false) => {
|
||||
const range = _.range(offset, limit)
|
||||
it("retrieves the number of creators", async () => {
|
||||
const getUsers = (offset: number, limit: number, creators = false) => {
|
||||
const opts = creators ? { builder: { global: true } } : undefined
|
||||
return range.map(() => structures.users.user(opts))
|
||||
return range(offset, limit).map(() => structures.users.user(opts))
|
||||
}
|
||||
const page1Data = getUsers(0, 8)
|
||||
const page2Data = getUsers(8, 12, true)
|
|
@ -117,6 +117,7 @@ beforeAll(async () => {
|
|||
jest.spyOn(events.view, "calculationCreated")
|
||||
jest.spyOn(events.view, "calculationUpdated")
|
||||
jest.spyOn(events.view, "calculationDeleted")
|
||||
jest.spyOn(events.view, "viewJoinCreated")
|
||||
|
||||
jest.spyOn(events.plugin, "init")
|
||||
jest.spyOn(events.plugin, "imported")
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { createMockContext, createMockCookies } from "@shopify/jest-koa-mocks"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
export const newContext = (): BBContext => {
|
||||
const ctx = createMockContext() as any
|
||||
export const newContext = (): Ctx => {
|
||||
const ctx = createMockContext() as Ctx
|
||||
return {
|
||||
...ctx,
|
||||
path: "/",
|
||||
|
|
|
@ -22,7 +22,6 @@ export function price(): PurchasedPrice {
|
|||
currency: "usd",
|
||||
duration: PriceDuration.MONTHLY,
|
||||
priceId: "price_123",
|
||||
dayPasses: undefined,
|
||||
isPerUser: true,
|
||||
}
|
||||
}
|
||||
|
@ -50,11 +49,6 @@ export function quotas(): Quotas {
|
|||
value: 1,
|
||||
triggers: [],
|
||||
},
|
||||
dayPasses: {
|
||||
name: "Queries",
|
||||
value: 1,
|
||||
triggers: [],
|
||||
},
|
||||
budibaseAICredits: {
|
||||
name: "Budibase AI Credits",
|
||||
value: 1,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { MonthlyQuotaName, QuotaUsage } from "@budibase/types"
|
||||
|
||||
export const usage = (users: number = 0, creators: number = 0): QuotaUsage => {
|
||||
export const usage = (users = 0, creators = 0): QuotaUsage => {
|
||||
return {
|
||||
_id: "usage_quota",
|
||||
quotaReset: new Date().toISOString(),
|
||||
|
@ -15,7 +15,6 @@ export const usage = (users: number = 0, creators: number = 0): QuotaUsage => {
|
|||
monthly: {
|
||||
"01-2023": {
|
||||
automations: 0,
|
||||
dayPasses: 0,
|
||||
queries: 0,
|
||||
budibaseAICredits: 0,
|
||||
triggers: {},
|
||||
|
@ -45,14 +44,12 @@ export const usage = (users: number = 0, creators: number = 0): QuotaUsage => {
|
|||
},
|
||||
"02-2023": {
|
||||
automations: 0,
|
||||
dayPasses: 0,
|
||||
queries: 0,
|
||||
budibaseAICredits: 0,
|
||||
triggers: {},
|
||||
},
|
||||
current: {
|
||||
automations: 0,
|
||||
dayPasses: 0,
|
||||
queries: 0,
|
||||
budibaseAICredits: 0,
|
||||
triggers: {},
|
||||
|
|
|
@ -25,7 +25,7 @@ function getTestcontainers(): ContainerInfo[] {
|
|||
// We use --format json to make sure the output is nice and machine-readable,
|
||||
// and we use --no-trunc so that the command returns full container IDs so we
|
||||
// can filter on them correctly.
|
||||
return execSync("docker ps --format json --no-trunc")
|
||||
return execSync("docker ps --all --format json --no-trunc")
|
||||
.toString()
|
||||
.split("\n")
|
||||
.filter(x => x.length > 0)
|
||||
|
@ -70,7 +70,36 @@ export function getExposedV4Port(container: ContainerInfo, port: number) {
|
|||
return getExposedV4Ports(container).find(x => x.container === port)?.host
|
||||
}
|
||||
|
||||
interface DockerContext {
|
||||
Name: string
|
||||
Description: string
|
||||
DockerEndpoint: string
|
||||
ContextType: string
|
||||
Error: string
|
||||
}
|
||||
|
||||
function getCurrentDockerContext(): DockerContext {
|
||||
const out = execSync("docker context ls --format json")
|
||||
for (const line of out.toString().split("\n")) {
|
||||
const parsed = JSON.parse(line)
|
||||
if (parsed.Current) {
|
||||
return parsed as DockerContext
|
||||
}
|
||||
}
|
||||
throw new Error("No current Docker context")
|
||||
}
|
||||
|
||||
export function setupEnv(...envs: any[]) {
|
||||
process.env.TESTCONTAINERS_RYUK_DISABLED = "true"
|
||||
|
||||
// For whatever reason, testcontainers doesn't always use the correct current
|
||||
// docker context. This bit of code forces the issue by finding the current
|
||||
// context and setting it as the DOCKER_HOST environment
|
||||
if (!process.env.DOCKER_HOST) {
|
||||
const dockerContext = getCurrentDockerContext()
|
||||
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
|
||||
}
|
||||
|
||||
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
|
||||
// should be relatively safe to look for it by its image name.
|
||||
const couch = getContainerByImage("budibase/couchdb")
|
||||
|
@ -116,11 +145,12 @@ export async function startContainer(container: GenericContainer) {
|
|||
key = imageName.split("@")[0]
|
||||
}
|
||||
key = key.replace(/\//g, "-").replace(/:/g, "-")
|
||||
const name = `${key}_testcontainer`
|
||||
|
||||
container = container
|
||||
.withReuse()
|
||||
.withLabels({ "com.budibase": "true" })
|
||||
.withName(`${key}_testcontainer`)
|
||||
.withName(name)
|
||||
|
||||
let startedContainer: StartedTestContainer | undefined = undefined
|
||||
let lastError = undefined
|
||||
|
|
|
@ -1,24 +1,8 @@
|
|||
{
|
||||
"extends": "../../tsconfig.build.json",
|
||||
"compilerOptions": {
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"lib": ["es2020"],
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true,
|
||||
"incremental": true,
|
||||
"sourceMap": true,
|
||||
"declaration": true,
|
||||
"types": ["node", "jest"],
|
||||
"outDir": "dist",
|
||||
"skipLibCheck": true,
|
||||
"paths": {
|
||||
"@budibase/types": ["../types/src"],
|
||||
"@budibase/shared-core": ["../shared-core/src"]
|
||||
}
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["**/*.js", "**/*.ts"],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"dist",
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"extends": "../../tsconfig.build.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"sourceMap": true
|
||||
},
|
||||
"include": ["tests/**/*.js", "tests/**/*.ts"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
|
@ -3,28 +3,23 @@
|
|||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "0.0.0",
|
||||
"license": "MPL-2.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
"svelte": "src/index.ts",
|
||||
"module": "dist/bbui.mjs",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/bbui.es.js"
|
||||
"import": "./dist/bbui.mjs"
|
||||
},
|
||||
"./package.json": "./package.json",
|
||||
"./spectrum-icons-rollup.js": "./src/spectrum-icons-rollup.js",
|
||||
"./spectrum-icons-vite.js": "./src/spectrum-icons-vite.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "rollup -c"
|
||||
"build": "vite build",
|
||||
"dev": "vite build --watch --mode=dev"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-commonjs": "^16.0.0",
|
||||
"@rollup/plugin-json": "^4.1.0",
|
||||
"@rollup/plugin-node-resolve": "^11.2.1",
|
||||
"postcss": "^8.2.9",
|
||||
"rollup": "^2.45.2",
|
||||
"rollup-plugin-postcss": "^4.0.0",
|
||||
"rollup-plugin-svelte": "^7.1.0",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
"@sveltejs/vite-plugin-svelte": "1.4.0",
|
||||
"vite-plugin-css-injected-by-js": "3.5.2"
|
||||
},
|
||||
"keywords": [
|
||||
"svelte"
|
||||
|
@ -35,8 +30,8 @@
|
|||
],
|
||||
"dependencies": {
|
||||
"@adobe/spectrum-css-workflow-icons": "1.2.1",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/string-templates": "0.0.0",
|
||||
"@budibase/shared-core": "*",
|
||||
"@budibase/string-templates": "*",
|
||||
"@spectrum-css/accordion": "3.0.24",
|
||||
"@spectrum-css/actionbutton": "1.0.1",
|
||||
"@spectrum-css/actiongroup": "1.0.1",
|
||||
|
@ -96,8 +91,7 @@
|
|||
"dependsOn": [
|
||||
{
|
||||
"projects": [
|
||||
"@budibase/string-templates",
|
||||
"@budibase/shared-core"
|
||||
"@budibase/string-templates"
|
||||
],
|
||||
"target": "build"
|
||||
}
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
import svelte from "rollup-plugin-svelte"
|
||||
import resolve from "@rollup/plugin-node-resolve"
|
||||
import commonjs from "@rollup/plugin-commonjs"
|
||||
import json from "@rollup/plugin-json"
|
||||
import { terser } from "rollup-plugin-terser"
|
||||
import postcss from "rollup-plugin-postcss"
|
||||
|
||||
export default {
|
||||
input: "src/index.js",
|
||||
output: {
|
||||
sourcemap: true,
|
||||
format: "esm",
|
||||
file: "dist/bbui.es.js",
|
||||
},
|
||||
onwarn(warning, warn) {
|
||||
// suppress eval warnings
|
||||
if (warning.code === "EVAL") {
|
||||
return
|
||||
}
|
||||
warn(warning)
|
||||
},
|
||||
plugins: [
|
||||
resolve(),
|
||||
commonjs(),
|
||||
svelte({
|
||||
emitCss: true,
|
||||
}),
|
||||
postcss(),
|
||||
terser(),
|
||||
json(),
|
||||
],
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
import AbsTooltip from "../Tooltip/AbsTooltip.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
export let type
|
||||
export let type = undefined
|
||||
export let disabled = false
|
||||
export let size = "M"
|
||||
export let cta = false
|
||||
|
@ -16,8 +16,8 @@
|
|||
export let active = false
|
||||
export let tooltip = undefined
|
||||
export let newStyles = true
|
||||
export let id
|
||||
export let ref
|
||||
export let id = undefined
|
||||
export let ref = undefined
|
||||
export let reverse = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
|
|
@ -13,9 +13,8 @@
|
|||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
const onChange = e => {
|
||||
const optionValue = e.target.value
|
||||
if (e.target.checked && !value.includes(optionValue)) {
|
||||
const onChange = optionValue => {
|
||||
if (!value.includes(optionValue)) {
|
||||
dispatch("change", [...value, optionValue])
|
||||
} else {
|
||||
dispatch(
|
||||
|
@ -39,10 +38,9 @@
|
|||
class="spectrum-Checkbox spectrum-Checkbox--sizeM spectrum-FieldGroup-item"
|
||||
>
|
||||
<input
|
||||
on:change={onChange}
|
||||
on:change={() => onChange(optionValue)}
|
||||
type="checkbox"
|
||||
class="spectrum-Checkbox-input"
|
||||
value={optionValue}
|
||||
checked={value.includes(optionValue)}
|
||||
{disabled}
|
||||
/>
|
||||
|
|
|
@ -51,6 +51,7 @@
|
|||
}
|
||||
input.hide-arrows {
|
||||
-moz-appearance: textfield;
|
||||
appearance: textfield;
|
||||
}
|
||||
input[type="time"]::-webkit-calendar-picker-indicator {
|
||||
display: none;
|
||||
|
|
|
@ -2,13 +2,6 @@
|
|||
import CoreDatePicker from "./DatePicker/DatePicker.svelte"
|
||||
import Icon from "../../Icon/Icon.svelte"
|
||||
|
||||
export let value = null
|
||||
export let disabled = false
|
||||
export let readonly = false
|
||||
export let error = null
|
||||
export let appendTo = undefined
|
||||
export let ignoreTimezones = false
|
||||
|
||||
let fromDate
|
||||
let toDate
|
||||
</script>
|
||||
|
|
|
@ -85,7 +85,7 @@
|
|||
}
|
||||
|
||||
const getPos = e => {
|
||||
var rect = canvasRef.getBoundingClientRect()
|
||||
let rect = canvasRef.getBoundingClientRect()
|
||||
const canvasX = e.offsetX || e.targetTouches?.[0].pageX - rect.left
|
||||
const canvasY = e.offsetY || e.targetTouches?.[0].pageY - rect.top
|
||||
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
padding: 0;
|
||||
margin: 0;
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
background: transparent;
|
||||
}
|
||||
input::-webkit-slider-thumb {
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
export let disabled = false
|
||||
export let updateOnChange = true
|
||||
export let quiet = false
|
||||
export let inputRef
|
||||
export let inputRef = undefined
|
||||
export let helpText = null
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
|
|
@ -17,18 +17,18 @@
|
|||
export let getOptionIcon = option => option?.icon
|
||||
export let getOptionColour = option => option?.colour
|
||||
export let useOptionIconImage = false
|
||||
export let isOptionEnabled
|
||||
export let isOptionEnabled = undefined
|
||||
export let quiet = false
|
||||
export let autoWidth = false
|
||||
export let sort = false
|
||||
export let tooltip = ""
|
||||
export let autocomplete = false
|
||||
export let customPopoverHeight
|
||||
export let align
|
||||
export let customPopoverHeight = undefined
|
||||
export let align = undefined
|
||||
export let footer = null
|
||||
export let tag = null
|
||||
export let helpText = null
|
||||
export let compare
|
||||
export let compare = undefined
|
||||
export let onOptionMouseenter = () => {}
|
||||
export let onOptionMouseleave = () => {}
|
||||
|
||||
|
|
|
@ -1,23 +1,23 @@
|
|||
<script>
|
||||
<script lang="ts">
|
||||
import {
|
||||
default as AbsTooltip,
|
||||
TooltipPosition,
|
||||
TooltipType,
|
||||
} from "../Tooltip/AbsTooltip.svelte"
|
||||
|
||||
export let name = "Add"
|
||||
export let hidden = false
|
||||
export let name: string = "Add"
|
||||
export let hidden: boolean = false
|
||||
export let size = "M"
|
||||
export let hoverable = false
|
||||
export let disabled = false
|
||||
export let color
|
||||
export let hoverColor
|
||||
export let tooltip
|
||||
export let hoverable: boolean = false
|
||||
export let disabled: boolean = false
|
||||
export let color: string | undefined = undefined
|
||||
export let hoverColor: string | undefined = undefined
|
||||
export let tooltip: string | undefined = undefined
|
||||
export let tooltipPosition = TooltipPosition.Bottom
|
||||
export let tooltipType = TooltipType.Default
|
||||
export let tooltipColor
|
||||
export let tooltipWrap = true
|
||||
export let newStyles = false
|
||||
export let tooltipColor: string | undefined = undefined
|
||||
export let tooltipWrap: boolean = true
|
||||
export let newStyles: boolean = false
|
||||
</script>
|
||||
|
||||
<AbsTooltip
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
export let size = "M"
|
||||
export let tooltip = ""
|
||||
export let muted
|
||||
export let muted = undefined
|
||||
</script>
|
||||
|
||||
<TooltipWrapper {tooltip} {size}>
|
||||
|
|
|
@ -43,12 +43,11 @@
|
|||
flex-direction: row;
|
||||
justify-content: flex-start;
|
||||
align-items: stretch;
|
||||
overflow-y: scroll !important;
|
||||
flex: 1 1 auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
.main {
|
||||
overflow: auto;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
.content {
|
||||
display: flex;
|
||||
|
|
|
@ -37,7 +37,7 @@
|
|||
}
|
||||
}
|
||||
|
||||
export let overBackground
|
||||
export let overBackground = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
export let showHeaderBorder = true
|
||||
export let placeholderText = "No rows found"
|
||||
export let snippets = []
|
||||
export let defaultSortColumn
|
||||
export let defaultSortColumn = undefined
|
||||
export let defaultSortOrder = "Ascending"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
|
|
@ -124,8 +124,6 @@
|
|||
.spectrum-Tabs-selectionIndicator.emphasized {
|
||||
background-color: var(--spectrum-global-color-blue-400);
|
||||
}
|
||||
.spectrum-Tabs--horizontal .spectrum-Tabs-selectionIndicator {
|
||||
}
|
||||
.noHorizPadding {
|
||||
padding: 0;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
export let type = TooltipType.Default
|
||||
export let text = ""
|
||||
export let fixed = false
|
||||
export let color = null
|
||||
export let color = ""
|
||||
export let noWrap = false
|
||||
|
||||
let wrapper
|
||||
|
@ -134,6 +134,7 @@
|
|||
.spectrum-Tooltip-label {
|
||||
display: -webkit-box;
|
||||
-webkit-line-clamp: 3;
|
||||
line-clamp: 3;
|
||||
-webkit-box-orient: vertical;
|
||||
overflow: hidden;
|
||||
font-size: 12px;
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
<script>
|
||||
<script lang="ts">
|
||||
import "@spectrum-css/typography/dist/index-vars.css"
|
||||
|
||||
// Sizes
|
||||
export let size = "M"
|
||||
export let textAlign
|
||||
export let noPadding = false
|
||||
export let weight = "default" // light, heavy, default
|
||||
export let size: "XS" | "S" | "M" | "L" = "M"
|
||||
export let textAlign: string | undefined = undefined
|
||||
export let noPadding: boolean = false
|
||||
export let weight: "light" | "heavy" | "default" = "default"
|
||||
</script>
|
||||
|
||||
<h1
|
||||
|
|
|
@ -45,6 +45,11 @@
|
|||
--purple: #806fde;
|
||||
--purple-dark: #130080;
|
||||
|
||||
--error-bg: rgba(226, 109, 105, 0.3);
|
||||
--warning-bg: rgba(255, 210, 106, 0.3);
|
||||
--error-content: rgba(226, 109, 105, 0.6);
|
||||
--warning-content: rgba(255, 210, 106, 0.6);
|
||||
|
||||
--rounded-small: 4px;
|
||||
--rounded-medium: 8px;
|
||||
--rounded-large: 16px;
|
||||
|
|
|
@ -6,9 +6,8 @@ export const deepGet = helpers.deepGet
|
|||
/**
|
||||
* Generates a DOM safe UUID.
|
||||
* Starting with a letter is important to make it DOM safe.
|
||||
* @return {string} a random DOM safe UUID
|
||||
*/
|
||||
export function uuid() {
|
||||
export function uuid(): string {
|
||||
return "cxxxxxxxxxxxx4xxxyxxxxxxxxxxxxxxx".replace(/[xy]/g, c => {
|
||||
const r = (Math.random() * 16) | 0
|
||||
const v = c === "x" ? r : (r & 0x3) | 0x8
|
||||
|
@ -18,22 +17,18 @@ export function uuid() {
|
|||
|
||||
/**
|
||||
* Capitalises a string
|
||||
* @param string the string to capitalise
|
||||
* @return {string} the capitalised string
|
||||
*/
|
||||
export const capitalise = string => {
|
||||
export const capitalise = (string?: string | null): string => {
|
||||
if (!string) {
|
||||
return string
|
||||
return ""
|
||||
}
|
||||
return string.substring(0, 1).toUpperCase() + string.substring(1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes a short hash of a string
|
||||
* @param string the string to compute a hash of
|
||||
* @return {string} the hash string
|
||||
*/
|
||||
export const hashString = string => {
|
||||
export const hashString = (string?: string | null): string => {
|
||||
if (!string) {
|
||||
return "0"
|
||||
}
|
||||
|
@ -54,11 +49,12 @@ export const hashString = string => {
|
|||
* will override the value "foo" rather than "bar".
|
||||
* If a deep path is specified and the parent keys don't exist then these will
|
||||
* be created.
|
||||
* @param obj the object
|
||||
* @param key the key
|
||||
* @param value the value
|
||||
*/
|
||||
export const deepSet = (obj, key, value) => {
|
||||
export const deepSet = (
|
||||
obj: Record<string, any> | null,
|
||||
key: string | null,
|
||||
value: any
|
||||
): void => {
|
||||
if (!obj || !key) {
|
||||
return
|
||||
}
|
||||
|
@ -82,9 +78,8 @@ export const deepSet = (obj, key, value) => {
|
|||
|
||||
/**
|
||||
* Deeply clones an object. Functions are not supported.
|
||||
* @param obj the object to clone
|
||||
*/
|
||||
export const cloneDeep = obj => {
|
||||
export const cloneDeep = <T>(obj: T): T => {
|
||||
if (!obj) {
|
||||
return obj
|
||||
}
|
||||
|
@ -93,9 +88,8 @@ export const cloneDeep = obj => {
|
|||
|
||||
/**
|
||||
* Copies a value to the clipboard
|
||||
* @param value the value to copy
|
||||
*/
|
||||
export const copyToClipboard = value => {
|
||||
export const copyToClipboard = (value: any): Promise<void> => {
|
||||
return new Promise(res => {
|
||||
if (navigator.clipboard && window.isSecureContext) {
|
||||
// Try using the clipboard API first
|
||||
|
@ -117,9 +111,12 @@ export const copyToClipboard = value => {
|
|||
})
|
||||
}
|
||||
|
||||
// Parsed a date value. This is usually an ISO string, but can be a
|
||||
// Parse a date value. This is usually an ISO string, but can be a
|
||||
// bunch of different formats and shapes depending on schema flags.
|
||||
export const parseDate = (value, { enableTime = true }) => {
|
||||
export const parseDate = (
|
||||
value: string | dayjs.Dayjs | null,
|
||||
{ enableTime = true }
|
||||
): dayjs.Dayjs | null => {
|
||||
// If empty then invalid
|
||||
if (!value) {
|
||||
return null
|
||||
|
@ -128,7 +125,7 @@ export const parseDate = (value, { enableTime = true }) => {
|
|||
// Certain string values need transformed
|
||||
if (typeof value === "string") {
|
||||
// Check for time only values
|
||||
if (!isNaN(new Date(`0-${value}`))) {
|
||||
if (!isNaN(new Date(`0-${value}`).valueOf())) {
|
||||
value = `0-${value}`
|
||||
}
|
||||
|
||||
|
@ -153,9 +150,9 @@ export const parseDate = (value, { enableTime = true }) => {
|
|||
// Stringifies a dayjs object to create an ISO string that respects the various
|
||||
// schema flags
|
||||
export const stringifyDate = (
|
||||
value,
|
||||
value: null | dayjs.Dayjs,
|
||||
{ enableTime = true, timeOnly = false, ignoreTimezones = false } = {}
|
||||
) => {
|
||||
): string | null => {
|
||||
if (!value) {
|
||||
return null
|
||||
}
|
||||
|
@ -192,7 +189,7 @@ export const stringifyDate = (
|
|||
}
|
||||
|
||||
// Determine the dayjs-compatible format of the browser's default locale
|
||||
const getPatternForPart = part => {
|
||||
const getPatternForPart = (part: Intl.DateTimeFormatPart): string => {
|
||||
switch (part.type) {
|
||||
case "day":
|
||||
return "D".repeat(part.value.length)
|
||||
|
@ -214,9 +211,9 @@ const localeDateFormat = new Intl.DateTimeFormat()
|
|||
|
||||
// Formats a dayjs date according to schema flags
|
||||
export const getDateDisplayValue = (
|
||||
value,
|
||||
value: dayjs.Dayjs | null,
|
||||
{ enableTime = true, timeOnly = false } = {}
|
||||
) => {
|
||||
): string => {
|
||||
if (!value?.isValid()) {
|
||||
return ""
|
||||
}
|
||||
|
@ -229,7 +226,7 @@ export const getDateDisplayValue = (
|
|||
}
|
||||
}
|
||||
|
||||
export const hexToRGBA = (color, opacity) => {
|
||||
export const hexToRGBA = (color: string, opacity: number): string => {
|
||||
if (color.includes("#")) {
|
||||
color = color.replace("#", "")
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue