Merge branch 'master' into table-improvements-2
This commit is contained in:
commit
1a434e82db
|
@ -25,7 +25,7 @@ env:
|
|||
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
|
||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
NX_BASE_BRANCH: origin/${{ github.base_ref }}
|
||||
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }}
|
||||
ONLY_AFFECTED_TASKS: ${{ github.event_name == 'pull_request' }}
|
||||
IS_OSS_CONTRIBUTOR: ${{ inputs.run_as_oss == true || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase') }}
|
||||
|
||||
jobs:
|
||||
|
@ -72,7 +72,7 @@ jobs:
|
|||
# Check the types of the projects built via esbuild
|
||||
- name: Check types
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
yarn check:types --since=${{ env.NX_BASE_BRANCH }} --ignore @budibase/account-portal-server
|
||||
else
|
||||
yarn check:types --ignore @budibase/account-portal-server
|
||||
|
@ -108,7 +108,7 @@ jobs:
|
|||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
docker pull budibase/couchdb:v3.2.1-sql &
|
||||
docker pull budibase/couchdb:v3.2.1-sqs &
|
||||
docker pull redis &
|
||||
|
||||
wait $(jobs -p)
|
||||
|
@ -116,7 +116,7 @@ jobs:
|
|||
- run: yarn --frozen-lockfile
|
||||
- name: Test
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server
|
||||
|
@ -140,8 +140,8 @@ jobs:
|
|||
- run: yarn --frozen-lockfile
|
||||
- name: Test worker
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
yarn test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }}
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
node scripts/run-affected.js --task=test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --scope=@budibase/worker
|
||||
fi
|
||||
|
@ -180,8 +180,8 @@ jobs:
|
|||
|
||||
- name: Test server
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
yarn test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
node scripts/run-affected.js --task=test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --scope=@budibase/server
|
||||
fi
|
||||
|
@ -214,6 +214,7 @@ jobs:
|
|||
echo "pro_commit=$pro_commit"
|
||||
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
|
||||
echo "base_commit=$base_commit"
|
||||
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
|
||||
|
||||
base_commit_excluding_merges=$(git log --no-merges -n 1 --format=format:%H $base_commit)
|
||||
echo "base_commit_excluding_merges=$base_commit_excluding_merges"
|
||||
|
@ -230,7 +231,7 @@ jobs:
|
|||
base_commit_excluding_merges='${{ steps.get_pro_commits.outputs.base_commit_excluding_merges }}'
|
||||
pro_commit='${{ steps.get_pro_commits.outputs.pro_commit }}'
|
||||
|
||||
any_commit=$(git log --no-merges $base_commit...$pro_commit)
|
||||
any_commit=$(git log --no-merges $base_commit_excluding_merges...$pro_commit)
|
||||
|
||||
if [ -n "$any_commit" ]; then
|
||||
echo $any_commit
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"bulma": "^0.9.3",
|
||||
"next": "12.1.0",
|
||||
"next": "14.1.1",
|
||||
"node-fetch": "^3.2.10",
|
||||
"sass": "^1.52.3",
|
||||
"react": "17.0.2",
|
||||
|
|
|
@ -46,10 +46,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
|
||||
integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
|
||||
|
||||
"@next/env@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-12.1.0.tgz#73713399399b34aa5a01771fb73272b55b22c314"
|
||||
integrity sha512-nrIgY6t17FQ9xxwH3jj0a6EOiQ/WDHUos35Hghtr+SWN/ntHIQ7UpuvSi0vaLzZVHQWaDupKI+liO5vANcDeTQ==
|
||||
"@next/env@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.1.1.tgz#80150a8440eb0022a73ba353c6088d419b908bac"
|
||||
integrity sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA==
|
||||
|
||||
"@next/eslint-plugin-next@12.1.0":
|
||||
version "12.1.0"
|
||||
|
@ -58,60 +58,50 @@
|
|||
dependencies:
|
||||
glob "7.1.7"
|
||||
|
||||
"@next/swc-android-arm64@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-android-arm64/-/swc-android-arm64-12.1.0.tgz#865ba3a9afc204ff2bdeea49dd64d58705007a39"
|
||||
integrity sha512-/280MLdZe0W03stA69iL+v6I+J1ascrQ6FrXBlXGCsGzrfMaGr7fskMa0T5AhQIVQD4nA/46QQWxG//DYuFBcA==
|
||||
"@next/swc-darwin-arm64@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz#b74ba7c14af7d05fa2848bdeb8ee87716c939b64"
|
||||
integrity sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ==
|
||||
|
||||
"@next/swc-darwin-arm64@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.1.0.tgz#08e8b411b8accd095009ed12efbc2f1d4d547135"
|
||||
integrity sha512-R8vcXE2/iONJ1Unf5Ptqjk6LRW3bggH+8drNkkzH4FLEQkHtELhvcmJwkXcuipyQCsIakldAXhRbZmm3YN1vXg==
|
||||
"@next/swc-darwin-x64@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz#82c3e67775e40094c66e76845d1a36cc29c9e78b"
|
||||
integrity sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw==
|
||||
|
||||
"@next/swc-darwin-x64@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-12.1.0.tgz#fcd684497a76e8feaca88db3c394480ff0b007cd"
|
||||
integrity sha512-ieAz0/J0PhmbZBB8+EA/JGdhRHBogF8BWaeqR7hwveb6SYEIJaDNQy0I+ZN8gF8hLj63bEDxJAs/cEhdnTq+ug==
|
||||
"@next/swc-linux-arm64-gnu@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz#4f4134457b90adc5c3d167d07dfb713c632c0caa"
|
||||
integrity sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg==
|
||||
|
||||
"@next/swc-linux-arm-gnueabihf@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.1.0.tgz#9ec6380a27938a5799aaa6035c205b3c478468a7"
|
||||
integrity sha512-njUd9hpl6o6A5d08dC0cKAgXKCzm5fFtgGe6i0eko8IAdtAPbtHxtpre3VeSxdZvuGFh+hb0REySQP9T1ttkog==
|
||||
"@next/swc-linux-arm64-musl@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz#594bedafaeba4a56db23a48ffed2cef7cd09c31a"
|
||||
integrity sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ==
|
||||
|
||||
"@next/swc-linux-arm64-gnu@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.1.0.tgz#7f4196dff1049cea479607c75b81033ae2dbd093"
|
||||
integrity sha512-OqangJLkRxVxMhDtcb7Qn1xjzFA3s50EIxY7mljbSCLybU+sByPaWAHY4px97ieOlr2y4S0xdPKkQ3BCAwyo6Q==
|
||||
"@next/swc-linux-x64-gnu@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz#cb4e75f1ff2b9bcadf2a50684605928ddfc58528"
|
||||
integrity sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ==
|
||||
|
||||
"@next/swc-linux-arm64-musl@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.1.0.tgz#b445f767569cdc2dddee785ca495e1a88c025566"
|
||||
integrity sha512-hB8cLSt4GdmOpcwRe2UzI5UWn6HHO/vLkr5OTuNvCJ5xGDwpPXelVkYW/0+C3g5axbDW2Tym4S+MQCkkH9QfWA==
|
||||
"@next/swc-linux-x64-musl@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz#15f26800df941b94d06327f674819ab64b272e25"
|
||||
integrity sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og==
|
||||
|
||||
"@next/swc-linux-x64-gnu@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.1.0.tgz#67610e9be4fbc987de7535f1bcb17e45fe12f90e"
|
||||
integrity sha512-OKO4R/digvrVuweSw/uBM4nSdyzsBV5EwkUeeG4KVpkIZEe64ZwRpnFB65bC6hGwxIBnTv5NMSnJ+0K/WmG78A==
|
||||
"@next/swc-win32-arm64-msvc@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz#060c134fa7fa843666e3e8574972b2b723773dd9"
|
||||
integrity sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A==
|
||||
|
||||
"@next/swc-linux-x64-musl@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.1.0.tgz#ea19a23db08a9f2e34ac30401f774cf7d1669d31"
|
||||
integrity sha512-JohhgAHZvOD3rQY7tlp7NlmvtvYHBYgY0x5ZCecUT6eCCcl9lv6iV3nfu82ErkxNk1H893fqH0FUpznZ/H3pSw==
|
||||
"@next/swc-win32-ia32-msvc@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz#5c06889352b1f77e3807834a0d0afd7e2d2d1da2"
|
||||
integrity sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw==
|
||||
|
||||
"@next/swc-win32-arm64-msvc@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.1.0.tgz#eadf054fc412085659b98e145435bbba200b5283"
|
||||
integrity sha512-T/3gIE6QEfKIJ4dmJk75v9hhNiYZhQYAoYm4iVo1TgcsuaKLFa+zMPh4056AHiG6n9tn2UQ1CFE8EoybEsqsSw==
|
||||
|
||||
"@next/swc-win32-ia32-msvc@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.1.0.tgz#68faeae10c89f698bf9d28759172b74c9c21bda1"
|
||||
integrity sha512-iwnKgHJdqhIW19H9PRPM9j55V6RdcOo6rX+5imx832BCWzkDbyomWnlzBfr6ByUYfhohb8QuH4hSGEikpPqI0Q==
|
||||
|
||||
"@next/swc-win32-x64-msvc@12.1.0":
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.1.0.tgz#d27e7e76c87a460a4da99c5bfdb1618dcd6cd064"
|
||||
integrity sha512-aBvcbMwuanDH4EMrL2TthNJy+4nP59Bimn8egqv6GHMVj0a44cU6Au4PjOhLNqEh9l+IpRGBqMTzec94UdC5xg==
|
||||
"@next/swc-win32-x64-msvc@14.1.1":
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz#d38c63a8f9b7f36c1470872797d3735b4a9c5c52"
|
||||
integrity sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A==
|
||||
|
||||
"@nodelib/fs.scandir@2.1.5":
|
||||
version "2.1.5"
|
||||
|
@ -139,6 +129,13 @@
|
|||
resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.1.0.tgz#7f698254aadf921e48dda8c0a6b304026b8a9323"
|
||||
integrity sha512-JLo+Y592QzIE+q7Dl2pMUtt4q8SKYI5jDrZxrozEQxnGVOyYE+GWK9eLkwTaeN9DDctlaRAQ3TBmzZ1qdLE30A==
|
||||
|
||||
"@swc/helpers@0.5.2":
|
||||
version "0.5.2"
|
||||
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.2.tgz#85ea0c76450b61ad7d10a37050289eded783c27d"
|
||||
integrity sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==
|
||||
dependencies:
|
||||
tslib "^2.4.0"
|
||||
|
||||
"@types/json5@^0.0.29":
|
||||
version "0.0.29"
|
||||
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
|
||||
|
@ -344,6 +341,13 @@ bulma@^0.9.3:
|
|||
resolved "https://registry.yarnpkg.com/bulma/-/bulma-0.9.3.tgz#ddccb7436ebe3e21bf47afe01d3c43a296b70243"
|
||||
integrity sha512-0d7GNW1PY4ud8TWxdNcP6Cc8Bu7MxcntD/RRLGWuiw/s0a9P+XlH/6QoOIrmbj6o8WWJzJYhytiu9nFjTszk1g==
|
||||
|
||||
busboy@1.6.0:
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893"
|
||||
integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==
|
||||
dependencies:
|
||||
streamsearch "^1.1.0"
|
||||
|
||||
call-bind@^1.0.0, call-bind@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
|
||||
|
@ -357,10 +361,10 @@ callsites@^3.0.0:
|
|||
resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
|
||||
integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
|
||||
|
||||
caniuse-lite@^1.0.30001283:
|
||||
version "1.0.30001314"
|
||||
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001314.tgz#65c7f9fb7e4594fca0a333bec1d8939662377596"
|
||||
integrity sha512-0zaSO+TnCHtHJIbpLroX7nsD+vYuOVjl3uzFbJO1wMVbuveJA0RK2WcQA9ZUIOiO0/ArMiMgHJLxfEZhQiC0kw==
|
||||
caniuse-lite@^1.0.30001579:
|
||||
version "1.0.30001640"
|
||||
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001640.tgz#32c467d4bf1f1a0faa63fc793c2ba81169e7652f"
|
||||
integrity sha512-lA4VMpW0PSUrFnkmVuEKBUovSWKhj7puyCg8StBChgu298N1AtuF1sKWEvfDuimSEDbhlb/KqPKC3fs1HbuQUA==
|
||||
|
||||
chalk@^4.0.0:
|
||||
version "4.1.2"
|
||||
|
@ -385,6 +389,11 @@ chalk@^4.0.0:
|
|||
optionalDependencies:
|
||||
fsevents "~2.3.2"
|
||||
|
||||
client-only@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1"
|
||||
integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==
|
||||
|
||||
color-convert@^2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
|
||||
|
@ -909,6 +918,11 @@ globby@^11.0.4:
|
|||
merge2 "^1.4.1"
|
||||
slash "^3.0.0"
|
||||
|
||||
graceful-fs@^4.2.11:
|
||||
version "4.2.11"
|
||||
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
|
||||
integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
|
||||
|
||||
has-bigints@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113"
|
||||
|
@ -1221,38 +1235,38 @@ ms@^2.1.1:
|
|||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
||||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||
|
||||
nanoid@^3.1.30:
|
||||
version "3.3.1"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.1.tgz#6347a18cac88af88f58af0b3594b723d5e99bb35"
|
||||
integrity sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw==
|
||||
nanoid@^3.3.6:
|
||||
version "3.3.7"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
|
||||
integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
|
||||
|
||||
natural-compare@^1.4.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
|
||||
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
|
||||
|
||||
next@12.1.0:
|
||||
version "12.1.0"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-12.1.0.tgz#c33d753b644be92fc58e06e5a214f143da61dd5d"
|
||||
integrity sha512-s885kWvnIlxsUFHq9UGyIyLiuD0G3BUC/xrH0CEnH5lHEWkwQcHOORgbDF0hbrW9vr/7am4ETfX4A7M6DjrE7Q==
|
||||
next@14.1.1:
|
||||
version "14.1.1"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-14.1.1.tgz#92bd603996c050422a738e90362dff758459a171"
|
||||
integrity sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww==
|
||||
dependencies:
|
||||
"@next/env" "12.1.0"
|
||||
caniuse-lite "^1.0.30001283"
|
||||
postcss "8.4.5"
|
||||
styled-jsx "5.0.0"
|
||||
use-subscription "1.5.1"
|
||||
"@next/env" "14.1.1"
|
||||
"@swc/helpers" "0.5.2"
|
||||
busboy "1.6.0"
|
||||
caniuse-lite "^1.0.30001579"
|
||||
graceful-fs "^4.2.11"
|
||||
postcss "8.4.31"
|
||||
styled-jsx "5.1.1"
|
||||
optionalDependencies:
|
||||
"@next/swc-android-arm64" "12.1.0"
|
||||
"@next/swc-darwin-arm64" "12.1.0"
|
||||
"@next/swc-darwin-x64" "12.1.0"
|
||||
"@next/swc-linux-arm-gnueabihf" "12.1.0"
|
||||
"@next/swc-linux-arm64-gnu" "12.1.0"
|
||||
"@next/swc-linux-arm64-musl" "12.1.0"
|
||||
"@next/swc-linux-x64-gnu" "12.1.0"
|
||||
"@next/swc-linux-x64-musl" "12.1.0"
|
||||
"@next/swc-win32-arm64-msvc" "12.1.0"
|
||||
"@next/swc-win32-ia32-msvc" "12.1.0"
|
||||
"@next/swc-win32-x64-msvc" "12.1.0"
|
||||
"@next/swc-darwin-arm64" "14.1.1"
|
||||
"@next/swc-darwin-x64" "14.1.1"
|
||||
"@next/swc-linux-arm64-gnu" "14.1.1"
|
||||
"@next/swc-linux-arm64-musl" "14.1.1"
|
||||
"@next/swc-linux-x64-gnu" "14.1.1"
|
||||
"@next/swc-linux-x64-musl" "14.1.1"
|
||||
"@next/swc-win32-arm64-msvc" "14.1.1"
|
||||
"@next/swc-win32-ia32-msvc" "14.1.1"
|
||||
"@next/swc-win32-x64-msvc" "14.1.1"
|
||||
|
||||
node-domexception@^1.0.0:
|
||||
version "1.0.0"
|
||||
|
@ -1413,14 +1427,14 @@ picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3:
|
|||
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
|
||||
integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
|
||||
|
||||
postcss@8.4.5:
|
||||
version "8.4.5"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.5.tgz#bae665764dfd4c6fcc24dc0fdf7e7aa00cc77f95"
|
||||
integrity sha512-jBDboWM8qpaqwkMwItqTQTiFikhs/67OYVvblFFTM7MrZjt6yMKd6r2kgXizEbTTljacm4NldIlZnhbjr84QYg==
|
||||
postcss@8.4.31:
|
||||
version "8.4.31"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d"
|
||||
integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==
|
||||
dependencies:
|
||||
nanoid "^3.1.30"
|
||||
nanoid "^3.3.6"
|
||||
picocolors "^1.0.0"
|
||||
source-map-js "^1.0.1"
|
||||
source-map-js "^1.0.2"
|
||||
|
||||
prelude-ls@^1.2.1:
|
||||
version "1.2.1"
|
||||
|
@ -1594,11 +1608,21 @@ slash@^3.0.0:
|
|||
resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
|
||||
integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
|
||||
|
||||
"source-map-js@>=0.6.2 <2.0.0", source-map-js@^1.0.1:
|
||||
"source-map-js@>=0.6.2 <2.0.0":
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c"
|
||||
integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==
|
||||
|
||||
source-map-js@^1.0.2:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
|
||||
integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==
|
||||
|
||||
streamsearch@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764"
|
||||
integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==
|
||||
|
||||
string.prototype.matchall@^4.0.6:
|
||||
version "4.0.6"
|
||||
resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.6.tgz#5abb5dabc94c7b0ea2380f65ba610b3a544b15fa"
|
||||
|
@ -1646,10 +1670,12 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
|
|||
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
|
||||
integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
|
||||
|
||||
styled-jsx@5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.0.0.tgz#816b4b92e07b1786c6b7111821750e0ba4d26e77"
|
||||
integrity sha512-qUqsWoBquEdERe10EW8vLp3jT25s/ssG1/qX5gZ4wu15OZpmSMFI2v+fWlRhLfykA5rFtlJ1ME8A8pm/peV4WA==
|
||||
styled-jsx@5.1.1:
|
||||
version "5.1.1"
|
||||
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.1.tgz#839a1c3aaacc4e735fed0781b8619ea5d0009d1f"
|
||||
integrity sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==
|
||||
dependencies:
|
||||
client-only "0.0.1"
|
||||
|
||||
supports-color@^7.1.0:
|
||||
version "7.2.0"
|
||||
|
@ -1690,6 +1716,11 @@ tslib@^1.8.1:
|
|||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
|
||||
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
|
||||
|
||||
tslib@^2.4.0:
|
||||
version "2.6.3"
|
||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.3.tgz#0438f810ad7a9edcde7a241c3d80db693c8cbfe0"
|
||||
integrity sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==
|
||||
|
||||
tsutils@^3.21.0:
|
||||
version "3.21.0"
|
||||
resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623"
|
||||
|
@ -1709,10 +1740,10 @@ type-fest@^0.20.2:
|
|||
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
|
||||
integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
|
||||
|
||||
typescript@5.2.2:
|
||||
version "5.2.2"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78"
|
||||
integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==
|
||||
typescript@5.5.2:
|
||||
version "5.5.2"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.2.tgz#c26f023cb0054e657ce04f72583ea2d85f8d0507"
|
||||
integrity sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==
|
||||
|
||||
unbox-primitive@^1.0.1:
|
||||
version "1.0.1"
|
||||
|
@ -1731,13 +1762,6 @@ uri-js@^4.2.2:
|
|||
dependencies:
|
||||
punycode "^2.1.0"
|
||||
|
||||
use-subscription@1.5.1:
|
||||
version "1.5.1"
|
||||
resolved "https://registry.yarnpkg.com/use-subscription/-/use-subscription-1.5.1.tgz#73501107f02fad84c6dd57965beb0b75c68c42d1"
|
||||
integrity sha512-Xv2a1P/yReAjAbhylMfFplFKj9GssgTwN7RlcTxBujFQcloStWNDQdc4g4NRWH9xS4i/FDk04vQBptAXoF3VcA==
|
||||
dependencies:
|
||||
object-assign "^4.1.1"
|
||||
|
||||
v8-compile-cache@^2.0.3:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee"
|
||||
|
|
|
@ -96,10 +96,13 @@ EXPOSE 5984 4369 9100
|
|||
CMD ["/opt/couchdb/bin/couchdb"]
|
||||
|
||||
FROM base as runner
|
||||
ARG TARGETARCH
|
||||
ENV TARGETARCH $TARGETARCH
|
||||
|
||||
ENV COUCHDB_USER admin
|
||||
ENV COUCHDB_PASSWORD admin
|
||||
EXPOSE 5984
|
||||
EXPOSE 4984
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
|
||||
|
@ -125,7 +128,12 @@ ADD clouseau/log4j.properties clouseau/clouseau.ini ./
|
|||
WORKDIR /opt/couchdb
|
||||
ADD couch/vm.args couch/local.ini ./etc/
|
||||
|
||||
# setup SQS
|
||||
WORKDIR /opt/sqs
|
||||
ADD sqs ./
|
||||
RUN chmod +x ./install.sh && ./install.sh
|
||||
|
||||
WORKDIR /
|
||||
ADD runner.sh ./bbcouch-runner.sh
|
||||
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau
|
||||
CMD ["./bbcouch-runner.sh"]
|
||||
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs
|
||||
CMD ["./bbcouch-runner.sh"]
|
||||
|
|
|
@ -1,139 +0,0 @@
|
|||
# Modified from https://github.com/apache/couchdb-docker/blob/main/3.3.3/Dockerfile
|
||||
#
|
||||
# Everything in this `base` image is adapted from the official `couchdb` image's
|
||||
# Dockerfile. Only modifications related to upgrading from Debian bullseye to
|
||||
# bookworm have been included. The `runner` image contains Budibase's
|
||||
# customisations to the image, e.g. adding Clouseau.
|
||||
FROM node:20-slim AS base
|
||||
|
||||
# Add CouchDB user account to make sure the IDs are assigned consistently
|
||||
RUN groupadd -g 5984 -r couchdb && useradd -u 5984 -d /opt/couchdb -g couchdb couchdb
|
||||
|
||||
# be sure GPG and apt-transport-https are available and functional
|
||||
RUN set -ex; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
dirmngr \
|
||||
gnupg \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# grab tini for signal handling and zombie reaping
|
||||
# see https://github.com/apache/couchdb-docker/pull/28#discussion_r141112407
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends tini; \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
tini --version
|
||||
|
||||
# http://docs.couchdb.org/en/latest/install/unix.html#installing-the-apache-couchdb-packages
|
||||
ENV GPG_COUCH_KEY \
|
||||
# gpg: rsa8192 205-01-19 The Apache Software Foundation (Package repository signing key) <root@apache.org>
|
||||
390EF70BB1EA12B2773962950EE62FB37A00258D
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y curl; \
|
||||
export GNUPGHOME="$(mktemp -d)"; \
|
||||
curl -fL -o keys.asc https://couchdb.apache.org/repo/keys.asc; \
|
||||
gpg --batch --import keys.asc; \
|
||||
gpg --batch --export "${GPG_COUCH_KEY}" > /usr/share/keyrings/couchdb-archive-keyring.gpg; \
|
||||
command -v gpgconf && gpgconf --kill all || :; \
|
||||
rm -rf "$GNUPGHOME"; \
|
||||
apt-key list; \
|
||||
apt purge -y --autoremove curl; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV COUCHDB_VERSION 3.3.3
|
||||
|
||||
RUN . /etc/os-release; \
|
||||
echo "deb [signed-by=/usr/share/keyrings/couchdb-archive-keyring.gpg] https://apache.jfrog.io/artifactory/couchdb-deb/ ${VERSION_CODENAME} main" | \
|
||||
tee /etc/apt/sources.list.d/couchdb.list >/dev/null
|
||||
|
||||
# https://github.com/apache/couchdb-pkg/blob/master/debian/README.Debian
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
\
|
||||
echo "couchdb couchdb/mode select none" | debconf-set-selections; \
|
||||
# we DO want recommends this time
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
|
||||
couchdb="$COUCHDB_VERSION"~bookworm \
|
||||
; \
|
||||
# Undo symlinks to /var/log and /var/lib
|
||||
rmdir /var/lib/couchdb /var/log/couchdb; \
|
||||
rm /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
mkdir -p /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
chown couchdb:couchdb /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
chmod 777 /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
# Remove file that sets logging to a file
|
||||
rm /opt/couchdb/etc/default.d/10-filelog.ini; \
|
||||
# Check we own everything in /opt/couchdb. Matches the command in dockerfile_entrypoint.sh
|
||||
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +; \
|
||||
# Setup directories and permissions for config. Technically these could be 555 and 444 respectively
|
||||
# but we keep them as 755 and 644 for consistency with CouchDB defaults and the dockerfile_entrypoint.sh.
|
||||
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +; \
|
||||
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +; \
|
||||
# only local.d needs to be writable for the docker_entrypoint.sh
|
||||
chmod -f 0777 /opt/couchdb/etc/local.d; \
|
||||
# apt clean-up
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
# Add configuration
|
||||
COPY --chown=couchdb:couchdb couch/10-docker-default.ini /opt/couchdb/etc/default.d/
|
||||
# COPY --chown=couchdb:couchdb vm.args /opt/couchdb/etc/
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
RUN ln -s usr/local/bin/docker-entrypoint.sh /docker-entrypoint.sh # backwards compat
|
||||
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||
|
||||
VOLUME /opt/couchdb/data
|
||||
|
||||
# 5984: Main CouchDB endpoint
|
||||
# 4369: Erlang portmap daemon (epmd)
|
||||
# 9100: CouchDB cluster communication port
|
||||
EXPOSE 5984 4369 9100
|
||||
CMD ["/opt/couchdb/bin/couchdb"]
|
||||
|
||||
FROM base as runner
|
||||
ARG TARGETARCH
|
||||
ENV TARGETARCH $TARGETARCH
|
||||
|
||||
ENV COUCHDB_USER admin
|
||||
ENV COUCHDB_PASSWORD admin
|
||||
EXPOSE 5984
|
||||
EXPOSE 4984
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security bookworm-security/updates main' && \
|
||||
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
|
||||
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bookworm main' && \
|
||||
apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \
|
||||
rm -rf /var/lib/apt/lists/
|
||||
|
||||
# setup clouseau
|
||||
WORKDIR /
|
||||
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
|
||||
unzip clouseau-2.21.0-dist.zip && \
|
||||
mv clouseau-2.21.0 /opt/clouseau && \
|
||||
rm clouseau-2.21.0-dist.zip
|
||||
|
||||
WORKDIR /opt/clouseau
|
||||
RUN mkdir ./bin
|
||||
ADD clouseau/clouseau ./bin/
|
||||
ADD clouseau/log4j.properties clouseau/clouseau.ini ./
|
||||
|
||||
# setup CouchDB
|
||||
WORKDIR /opt/couchdb
|
||||
ADD couch/vm.args couch/local.ini ./etc/
|
||||
|
||||
# setup SQS
|
||||
WORKDIR /opt/sqs
|
||||
ADD sqs ./
|
||||
RUN chmod +x ./install.sh && ./install.sh
|
||||
|
||||
WORKDIR /
|
||||
ADD runner.v2.sh ./bbcouch-runner.sh
|
||||
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs
|
||||
CMD ["./bbcouch-runner.sh"]
|
|
@ -70,9 +70,12 @@ sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouse
|
|||
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
||||
|
||||
# Start CouchDB.
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 &
|
||||
|
||||
# Wati for CouchDB to start up.
|
||||
# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues.
|
||||
/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 &
|
||||
|
||||
# Wait for CouchDB to start up.
|
||||
while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do
|
||||
echo 'Waiting for CouchDB to start...';
|
||||
sleep 5;
|
||||
|
@ -82,4 +85,4 @@ done
|
|||
# function correctly, so we create them here.
|
||||
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users
|
||||
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator
|
||||
sleep infinity
|
||||
sleep infinity
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
DATA_DIR=${DATA_DIR:-/data}
|
||||
COUCHDB_ERLANG_COOKIE=${COUCHDB_ERLANG_COOKIE:-B9CFC32C-3458-4A86-8448-B3C753991CA7}
|
||||
|
||||
mkdir -p ${DATA_DIR}
|
||||
mkdir -p ${DATA_DIR}/couch/{dbs,views}
|
||||
mkdir -p ${DATA_DIR}/search
|
||||
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
||||
|
||||
echo ${TARGETBUILD} > /buildtarget.txt
|
||||
if [[ "${TARGETBUILD}" = "aas" ]]; then
|
||||
# Azure AppService uses /home for persistent data & SSH on port 2222
|
||||
DATA_DIR="${DATA_DIR:-/home}"
|
||||
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
|
||||
mkdir -p $DATA_DIR/{search,minio,couch}
|
||||
mkdir -p $DATA_DIR/couch/{dbs,views}
|
||||
chown -R couchdb:couchdb $DATA_DIR/couch/
|
||||
apt update
|
||||
apt-get install -y openssh-server
|
||||
echo "root:Docker!" | chpasswd
|
||||
mkdir -p /tmp
|
||||
chmod +x /tmp/ssh_setup.sh \
|
||||
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
|
||||
cp /etc/sshd_config /etc/ssh/sshd_config
|
||||
/etc/init.d/ssh restart
|
||||
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
|
||||
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
|
||||
elif [[ "${TARGETBUILD}" = "single" ]]; then
|
||||
# In the single image build, the Dockerfile specifies /data as a volume
|
||||
# mount, so we use that for all persistent data.
|
||||
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
|
||||
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
|
||||
elif [[ "${TARGETBUILD}" = "docker-compose" ]]; then
|
||||
# We remove the database_dir and view_index_dir settings from the local.ini
|
||||
# in docker-compose because it will default to /opt/couchdb/data which is what
|
||||
# our docker-compose was using prior to us switching to using our own CouchDB
|
||||
# image.
|
||||
sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
|
||||
elif [[ -n $KUBERNETES_SERVICE_HOST ]]; then
|
||||
# In Kubernetes the directory /opt/couchdb/data has a persistent volume
|
||||
# mount for storing database data.
|
||||
sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
|
||||
|
||||
# We remove the database_dir and view_index_dir settings from the local.ini
|
||||
# in Kubernetes because it will default to /opt/couchdb/data which is what
|
||||
# our Helm chart was using prior to us switching to using our own CouchDB
|
||||
# image.
|
||||
sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
|
||||
# We remove the -name setting from the vm.args file in Kubernetes because
|
||||
# it will default to the pod FQDN, which is what's required for clustering
|
||||
# to work.
|
||||
sed -i "s/^-name .*$//g" /opt/couchdb/etc/vm.args
|
||||
else
|
||||
# For all other builds, we use /data for persistent data.
|
||||
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
|
||||
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
|
||||
fi
|
||||
|
||||
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/couchdb/etc/vm.args
|
||||
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouseau.ini
|
||||
|
||||
# Start Clouseau. Budibase won't function correctly without Clouseau running, it
|
||||
# powers the search API endpoints which are used to do all sorts, including
|
||||
# populating app grids.
|
||||
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
||||
|
||||
# Start CouchDB.
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 &
|
||||
|
||||
# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues.
|
||||
/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 &
|
||||
|
||||
# Wait for CouchDB to start up.
|
||||
while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do
|
||||
echo 'Waiting for CouchDB to start...';
|
||||
sleep 5;
|
||||
done
|
||||
|
||||
# CouchDB needs the `_users` and `_replicator` databases to exist before it will
|
||||
# function correctly, so we create them here.
|
||||
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users
|
||||
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator
|
||||
sleep infinity
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"version": "2.29.3",
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "2.29.20",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
"!packages/account-portal",
|
||||
"packages/account-portal/packages/*"
|
||||
],
|
||||
"useNx": true,
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
"publish": {
|
||||
|
|
6
nx.json
6
nx.json
|
@ -1,4 +1,5 @@
|
|||
{
|
||||
"$schema": "./node_modules/nx/schemas/nx-schema.json",
|
||||
"tasksRunnerOptions": {
|
||||
"default": {
|
||||
"runner": "nx-cloud",
|
||||
|
@ -11,5 +12,10 @@
|
|||
"build": {
|
||||
"inputs": ["{workspaceRoot}/scripts/*", "{workspaceRoot}/lerna.json"]
|
||||
}
|
||||
},
|
||||
"namedInputs": {
|
||||
"default": ["{projectRoot}/**/*", "sharedGlobals"],
|
||||
"sharedGlobals": [],
|
||||
"production": ["default"]
|
||||
}
|
||||
}
|
||||
|
|
10
package.json
10
package.json
|
@ -18,9 +18,8 @@
|
|||
"eslint-plugin-svelte": "^2.34.0",
|
||||
"husky": "^8.0.3",
|
||||
"kill-port": "^1.6.1",
|
||||
"lerna": "7.1.1",
|
||||
"lerna": "7.4.2",
|
||||
"madge": "^6.0.0",
|
||||
"nx": "16.4.3",
|
||||
"nx-cloud": "16.0.5",
|
||||
"prettier": "2.8.8",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
|
@ -34,10 +33,10 @@
|
|||
"scripts": {
|
||||
"get-past-client-version": "node scripts/getPastClientVersion.js",
|
||||
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
|
||||
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
|
||||
"build:apps": "yarn build --scope @budibase/server --scope @budibase/worker",
|
||||
"build": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
|
||||
"build:apps": "DISABLE_V8_COMPILE_CACHE=1 yarn build --scope @budibase/server --scope @budibase/worker",
|
||||
"build:oss": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
|
||||
"build:cli": "yarn build --scope @budibase/cli",
|
||||
"build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
|
||||
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
|
||||
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
|
||||
"check:types": "lerna run --concurrency 2 check:types --ignore @budibase/account-portal-server",
|
||||
|
@ -78,7 +77,6 @@
|
|||
"build:docker:single:sqs": "./scripts/build-single-image-sqs.sh",
|
||||
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
||||
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 --push ./hosting/couchdb",
|
||||
"publish:docker:couch-sqs": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile.v2 -t budibase/couchdb:v3.3.3-sqs --push ./hosting/couchdb",
|
||||
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
||||
"release:helm": "node scripts/releaseHelmChart",
|
||||
"env:multi:enable": "lerna run --stream env:multi:enable",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit ff16525b73c5751d344f5c161a682609c0a993f2
|
||||
Subproject commit b03e584e465f620b49a1b688ff4afc973e6c0758
|
|
@ -22,10 +22,9 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@budibase/nano": "10.1.5",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.10",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.11",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
"@govtechsg/passport-openidconnect": "^1.0.2",
|
||||
"aws-cloudfront-sign": "3.0.2",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.1.0",
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
export {
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
CONSTANT_EXTERNAL_ROW_COLS,
|
||||
isInternalColumnName,
|
||||
} from "@budibase/shared-core"
|
|
@ -13,6 +13,7 @@ import {
|
|||
isDocument,
|
||||
RowResponse,
|
||||
RowValue,
|
||||
SqlClient,
|
||||
SQLiteDefinition,
|
||||
SqlQueryBinding,
|
||||
} from "@budibase/types"
|
||||
|
@ -25,6 +26,7 @@ import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
|||
import { DDInstrumentedDatabase } from "../instrumentation"
|
||||
import { checkSlashesInUrl } from "../../helpers"
|
||||
import env from "../../environment"
|
||||
import { sqlLog } from "../../sql/utils"
|
||||
|
||||
const DATABASE_NOT_FOUND = "Database does not exist."
|
||||
|
||||
|
@ -80,6 +82,11 @@ export function DatabaseWithConnection(
|
|||
connection: string,
|
||||
opts?: DatabaseOpts
|
||||
) {
|
||||
if (!dbName || !connection) {
|
||||
throw new Error(
|
||||
"Unable to create database without database name or connection"
|
||||
)
|
||||
}
|
||||
const db = new DatabaseImpl(dbName, opts, connection)
|
||||
return new DDInstrumentedDatabase(db)
|
||||
}
|
||||
|
@ -317,6 +324,7 @@ export class DatabaseImpl implements Database {
|
|||
): Promise<T[]> {
|
||||
const dbName = this.name
|
||||
const url = `/${dbName}/${SQLITE_DESIGN_DOC_ID}`
|
||||
sqlLog(SqlClient.SQL_LITE, sql, parameters)
|
||||
return await this._sqlQuery<T[]>(url, "POST", {
|
||||
query: sql,
|
||||
args: parameters,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import env from "../../environment"
|
||||
|
||||
export const getCouchInfo = (connection?: string) => {
|
||||
// clean out any auth credentials
|
||||
const urlInfo = getUrlInfo(connection)
|
||||
let username
|
||||
let password
|
||||
|
@ -23,9 +24,19 @@ export const getCouchInfo = (connection?: string) => {
|
|||
throw new Error("CouchDB password not set")
|
||||
}
|
||||
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
|
||||
let sqlUrl = env.COUCH_DB_SQL_URL
|
||||
// default for dev
|
||||
if (env.isDev() && !sqlUrl) {
|
||||
sqlUrl = "http://localhost:4006"
|
||||
} else if (!sqlUrl && urlInfo.url) {
|
||||
const parsed = new URL(urlInfo.url)
|
||||
// attempt to connect on default port
|
||||
sqlUrl = urlInfo.url.replace(parsed.port, "4984")
|
||||
}
|
||||
return {
|
||||
url: urlInfo.url!,
|
||||
sqlUrl: env.COUCH_DB_SQL_URL,
|
||||
// clean out any auth credentials
|
||||
sqlUrl: getUrlInfo(sqlUrl).url,
|
||||
auth: {
|
||||
username: username,
|
||||
password: password,
|
||||
|
|
|
@ -2,4 +2,3 @@ export * from "./connections"
|
|||
export * from "./DatabaseImpl"
|
||||
export * from "./utils"
|
||||
export { init, getPouch, getPouchDB, closePouchDB } from "./pouchDB"
|
||||
export * from "../constants"
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import env from "../../environment"
|
||||
import { getCouchInfo } from "../couch"
|
||||
|
||||
const MAIN_COUCH_URL = "http://user:test@localhost:5984"
|
||||
|
||||
describe("connections", () => {
|
||||
beforeAll(() => {
|
||||
env._set("COUCH_DB_SQL_URL", "https://user:test@localhost:4984")
|
||||
})
|
||||
|
||||
it("should strip URL credentials", () => {
|
||||
const response = getCouchInfo(MAIN_COUCH_URL)
|
||||
expect(response.url).toBe("http://localhost:5984")
|
||||
expect(response.sqlUrl).toBe("https://localhost:4984")
|
||||
})
|
||||
|
||||
it("should return separate auth credentials", () => {
|
||||
const response = getCouchInfo(MAIN_COUCH_URL)
|
||||
expect(response.auth.username).toBe("user")
|
||||
expect(response.auth.password).toBe("test")
|
||||
})
|
||||
})
|
|
@ -1,6 +1,6 @@
|
|||
import env from "../environment"
|
||||
import { DEFAULT_TENANT_ID, SEPARATOR, DocumentType } from "../constants"
|
||||
import { getTenantId, getGlobalDBName } from "../context"
|
||||
import { getTenantId, getGlobalDBName, isMultiTenant } from "../context"
|
||||
import { doWithDB, directCouchAllDbs } from "./db"
|
||||
import { AppState, DeletedApp, getAppMetadata } from "../cache/appMetadata"
|
||||
import { isDevApp, isDevAppID, getProdAppID } from "../docIds/conversions"
|
||||
|
@ -206,3 +206,34 @@ export function pagination<T>(
|
|||
nextPage,
|
||||
}
|
||||
}
|
||||
|
||||
export function isSqsEnabledForTenant(): boolean {
|
||||
const tenantId = getTenantId()
|
||||
if (!env.SQS_SEARCH_ENABLE) {
|
||||
return false
|
||||
}
|
||||
|
||||
// single tenant (self host and dev) always enabled if flag set
|
||||
if (!isMultiTenant()) {
|
||||
return true
|
||||
}
|
||||
|
||||
// This is to guard against the situation in tests where tests pass because
|
||||
// we're not actually using SQS, we're using Lucene and the tests pass due to
|
||||
// parity.
|
||||
if (env.isTest() && env.SQS_SEARCH_ENABLE_TENANTS.length === 0) {
|
||||
throw new Error(
|
||||
"to enable SQS you must specify a list of tenants in the SQS_SEARCH_ENABLE_TENANTS env var"
|
||||
)
|
||||
}
|
||||
|
||||
// Special case to enable all tenants, for testing in QA.
|
||||
if (
|
||||
env.SQS_SEARCH_ENABLE_TENANTS.length === 1 &&
|
||||
env.SQS_SEARCH_ENABLE_TENANTS[0] === "*"
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
return env.SQS_SEARCH_ENABLE_TENANTS.includes(tenantId)
|
||||
}
|
||||
|
|
|
@ -114,8 +114,11 @@ const environment = {
|
|||
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
||||
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006",
|
||||
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL,
|
||||
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
||||
SQS_SEARCH_ENABLE_TENANTS:
|
||||
process.env.SQS_SEARCH_ENABLE_TENANTS?.split(",") || [],
|
||||
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
|
||||
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
||||
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
||||
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
||||
|
@ -200,8 +203,28 @@ const environment = {
|
|||
},
|
||||
ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M",
|
||||
DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS,
|
||||
BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL,
|
||||
BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD,
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
}
|
||||
|
||||
type EnvironmentKey = keyof typeof environment
|
||||
export const SECRETS: EnvironmentKey[] = [
|
||||
"API_ENCRYPTION_KEY",
|
||||
"BB_ADMIN_USER_PASSWORD",
|
||||
"COUCH_DB_PASSWORD",
|
||||
"COUCH_DB_SQL_URL",
|
||||
"COUCH_DB_URL",
|
||||
"GOOGLE_CLIENT_SECRET",
|
||||
"INTERNAL_API_KEY_FALLBACK",
|
||||
"INTERNAL_API_KEY",
|
||||
"JWT_SECRET",
|
||||
"MINIO_ACCESS_KEY",
|
||||
"MINIO_SECRET_KEY",
|
||||
"OPENAI_API_KEY",
|
||||
"REDIS_PASSWORD",
|
||||
]
|
||||
|
||||
// clean up any environment variable edge cases
|
||||
for (let [key, value] of Object.entries(environment)) {
|
||||
// handle the edge case of "0" to disable an environment variable
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { APIError } from "@budibase/types"
|
||||
import * as errors from "../errors"
|
||||
import environment from "../environment"
|
||||
import { stringContainsSecret } from "../security/secrets"
|
||||
|
||||
export async function errorHandling(ctx: any, next: any) {
|
||||
try {
|
||||
|
@ -17,11 +18,19 @@ export async function errorHandling(ctx: any, next: any) {
|
|||
|
||||
let error: APIError = {
|
||||
message: err.message,
|
||||
status: status,
|
||||
status,
|
||||
validationErrors: err.validation,
|
||||
error: errors.getPublicError(err),
|
||||
}
|
||||
|
||||
if (stringContainsSecret(JSON.stringify(error))) {
|
||||
error = {
|
||||
message: "Unexpected error",
|
||||
status,
|
||||
error: "Unexpected error",
|
||||
}
|
||||
}
|
||||
|
||||
if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
|
||||
// @ts-ignore
|
||||
error.stack = err.stack
|
||||
|
|
|
@ -2,6 +2,7 @@ import { GenericContainer, StartedTestContainer } from "testcontainers"
|
|||
import { generator, structures } from "../../../tests"
|
||||
import RedisWrapper from "../redis"
|
||||
import { env } from "../.."
|
||||
import { randomUUID } from "crypto"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
|
@ -52,10 +53,10 @@ describe("redis", () => {
|
|||
describe("bulkStore", () => {
|
||||
function createRandomObject(
|
||||
keyLength: number,
|
||||
valueGenerator: () => any = () => generator.word()
|
||||
valueGenerator: () => any = () => randomUUID()
|
||||
) {
|
||||
return generator
|
||||
.unique(() => generator.word(), keyLength)
|
||||
.unique(() => randomUUID(), keyLength)
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = valueGenerator()
|
||||
return acc
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
import environment, { SECRETS } from "../environment"
|
||||
|
||||
export function stringContainsSecret(str: string) {
|
||||
if (str.includes("-----BEGIN PRIVATE KEY-----")) {
|
||||
return true
|
||||
}
|
||||
|
||||
for (const key of SECRETS) {
|
||||
const value = environment[key]
|
||||
if (typeof value !== "string" || value === "") {
|
||||
continue
|
||||
}
|
||||
|
||||
if (str.includes(value)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
import { randomUUID } from "crypto"
|
||||
import environment, { SECRETS } from "../../environment"
|
||||
import { stringContainsSecret } from "../secrets"
|
||||
|
||||
describe("secrets", () => {
|
||||
describe("stringContainsSecret", () => {
|
||||
it.each(SECRETS)("detects that a string contains a secret in: %s", key => {
|
||||
const needle = randomUUID()
|
||||
const haystack = `this is a secret: ${needle}`
|
||||
const old = environment[key]
|
||||
environment._set(key, needle)
|
||||
|
||||
try {
|
||||
expect(stringContainsSecret(haystack)).toBe(true)
|
||||
} finally {
|
||||
environment._set(key, old)
|
||||
}
|
||||
})
|
||||
|
||||
it.each(SECRETS)(
|
||||
"detects that a string does not contain a secret in: %s",
|
||||
key => {
|
||||
const needle = randomUUID()
|
||||
const haystack = `this does not contain a secret`
|
||||
const old = environment[key]
|
||||
environment._set(key, needle)
|
||||
try {
|
||||
expect(stringContainsSecret(haystack)).toBe(false)
|
||||
} finally {
|
||||
environment._set(key, old)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
|
@ -3,16 +3,20 @@ import * as dbCore from "../db"
|
|||
import {
|
||||
getNativeSql,
|
||||
isExternalTable,
|
||||
isIsoDateString,
|
||||
isValidISODateString,
|
||||
isValidFilter,
|
||||
sqlLog,
|
||||
isInvalidISODateString,
|
||||
} from "./utils"
|
||||
import { SqlStatements } from "./sqlStatements"
|
||||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
import {
|
||||
AnySearchFilter,
|
||||
BBReferenceFieldMetadata,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
InternalSearchFilterOperator,
|
||||
JsonFieldMetadata,
|
||||
JsonTypes,
|
||||
Operation,
|
||||
|
@ -38,11 +42,7 @@ const envLimit = environment.SQL_MAX_ROWS
|
|||
: null
|
||||
const BASE_LIMIT = envLimit || 5000
|
||||
|
||||
// these are invalid dates sent by the client, need to convert them to a real max date
|
||||
const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z"
|
||||
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
|
||||
|
||||
function likeKey(client: string, key: string): string {
|
||||
function likeKey(client: string | string[], key: string): string {
|
||||
let start: string, end: string
|
||||
switch (client) {
|
||||
case SqlClient.MY_SQL:
|
||||
|
@ -75,10 +75,10 @@ function parse(input: any) {
|
|||
if (typeof input !== "string") {
|
||||
return input
|
||||
}
|
||||
if (input === MAX_ISO_DATE || input === MIN_ISO_DATE) {
|
||||
if (isInvalidISODateString(input)) {
|
||||
return null
|
||||
}
|
||||
if (isIsoDateString(input)) {
|
||||
if (isValidISODateString(input)) {
|
||||
return new Date(input.trim())
|
||||
}
|
||||
return input
|
||||
|
@ -184,7 +184,11 @@ class InternalBuilder {
|
|||
query: Knex.QueryBuilder,
|
||||
filters: SearchFilters | undefined,
|
||||
table: Table,
|
||||
opts: { aliases?: Record<string, string>; relationship?: boolean }
|
||||
opts: {
|
||||
aliases?: Record<string, string>
|
||||
relationship?: boolean
|
||||
columnPrefix?: string
|
||||
}
|
||||
): Knex.QueryBuilder {
|
||||
if (!filters) {
|
||||
return query
|
||||
|
@ -192,7 +196,10 @@ class InternalBuilder {
|
|||
filters = parseFilters(filters)
|
||||
// if all or specified in filters, then everything is an or
|
||||
const allOr = filters.allOr
|
||||
const sqlStatements = new SqlStatements(this.client, table, { allOr })
|
||||
const sqlStatements = new SqlStatements(this.client, table, {
|
||||
allOr,
|
||||
columnPrefix: opts.columnPrefix,
|
||||
})
|
||||
const tableName =
|
||||
this.client === SqlClient.SQL_LITE ? table._id! : table.name
|
||||
|
||||
|
@ -201,17 +208,32 @@ class InternalBuilder {
|
|||
return alias || name
|
||||
}
|
||||
function iterate(
|
||||
structure: { [key: string]: any },
|
||||
fn: (key: string, value: any) => void
|
||||
structure: AnySearchFilter,
|
||||
fn: (key: string, value: any) => void,
|
||||
complexKeyFn?: (key: string[], value: any) => void
|
||||
) {
|
||||
for (let [key, value] of Object.entries(structure)) {
|
||||
for (const key in structure) {
|
||||
const value = structure[key]
|
||||
const updatedKey = dbCore.removeKeyNumbering(key)
|
||||
const isRelationshipField = updatedKey.includes(".")
|
||||
if (!opts.relationship && !isRelationshipField) {
|
||||
|
||||
let castedTypeValue
|
||||
if (
|
||||
key === InternalSearchFilterOperator.COMPLEX_ID_OPERATOR &&
|
||||
(castedTypeValue = structure[key]) &&
|
||||
complexKeyFn
|
||||
) {
|
||||
const alias = getTableAlias(tableName)
|
||||
complexKeyFn(
|
||||
castedTypeValue.id.map((x: string) =>
|
||||
alias ? `${alias}.${x}` : x
|
||||
),
|
||||
castedTypeValue.values
|
||||
)
|
||||
} else if (!opts.relationship && !isRelationshipField) {
|
||||
const alias = getTableAlias(tableName)
|
||||
fn(alias ? `${alias}.${updatedKey}` : updatedKey, value)
|
||||
}
|
||||
if (opts.relationship && isRelationshipField) {
|
||||
} else if (opts.relationship && isRelationshipField) {
|
||||
const [filterTableName, property] = updatedKey.split(".")
|
||||
const alias = getTableAlias(filterTableName)
|
||||
fn(alias ? `${alias}.${property}` : property, value)
|
||||
|
@ -234,7 +256,7 @@ class InternalBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
const contains = (mode: object, any: boolean = false) => {
|
||||
const contains = (mode: AnySearchFilter, any: boolean = false) => {
|
||||
const rawFnc = allOr ? "orWhereRaw" : "whereRaw"
|
||||
const not = mode === filters?.notContains ? "NOT " : ""
|
||||
function stringifyArray(value: Array<any>, quoteStyle = '"'): string {
|
||||
|
@ -246,7 +268,7 @@ class InternalBuilder {
|
|||
return `[${value.join(",")}]`
|
||||
}
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
iterate(mode, (key: string, value: Array<any>) => {
|
||||
iterate(mode, (key, value) => {
|
||||
const wrap = any ? "" : "'"
|
||||
const op = any ? "\\?| array" : "@>"
|
||||
const fieldNames = key.split(/\./g)
|
||||
|
@ -261,7 +283,7 @@ class InternalBuilder {
|
|||
})
|
||||
} else if (this.client === SqlClient.MY_SQL) {
|
||||
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
|
||||
iterate(mode, (key: string, value: Array<any>) => {
|
||||
iterate(mode, (key, value) => {
|
||||
query = query[rawFnc](
|
||||
`${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray(
|
||||
value
|
||||
|
@ -270,7 +292,7 @@ class InternalBuilder {
|
|||
})
|
||||
} else {
|
||||
const andOr = mode === filters?.containsAny ? " OR " : " AND "
|
||||
iterate(mode, (key: string, value: Array<any>) => {
|
||||
iterate(mode, (key, value) => {
|
||||
let statement = ""
|
||||
for (let i in value) {
|
||||
if (typeof value[i] === "string") {
|
||||
|
@ -294,10 +316,16 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
if (filters.oneOf) {
|
||||
iterate(filters.oneOf, (key, array) => {
|
||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||
query = query[fnc](key, Array.isArray(array) ? array : [array])
|
||||
})
|
||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||
iterate(
|
||||
filters.oneOf,
|
||||
(key: string, array) => {
|
||||
query = query[fnc](key, Array.isArray(array) ? array : [array])
|
||||
},
|
||||
(key: string[], array) => {
|
||||
query = query[fnc](key, Array.isArray(array) ? array : [array])
|
||||
}
|
||||
)
|
||||
}
|
||||
if (filters.string) {
|
||||
iterate(filters.string, (key, value) => {
|
||||
|
@ -663,6 +691,7 @@ class InternalBuilder {
|
|||
}
|
||||
// add filters to the query (where)
|
||||
query = this.addFilters(query, filters, json.meta.table, {
|
||||
columnPrefix: json.meta.columnPrefix,
|
||||
aliases: tableAliases,
|
||||
})
|
||||
|
||||
|
@ -698,6 +727,7 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
return this.addFilters(query, filters, json.meta.table, {
|
||||
columnPrefix: json.meta.columnPrefix,
|
||||
relationship: true,
|
||||
aliases: tableAliases,
|
||||
})
|
||||
|
@ -708,6 +738,7 @@ class InternalBuilder {
|
|||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
const parsedBody = parseBody(body)
|
||||
query = this.addFilters(query, filters, json.meta.table, {
|
||||
columnPrefix: json.meta.columnPrefix,
|
||||
aliases: tableAliases,
|
||||
})
|
||||
// mysql can't use returning
|
||||
|
@ -722,6 +753,7 @@ class InternalBuilder {
|
|||
const { endpoint, filters, tableAliases } = json
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
query = this.addFilters(query, filters, json.meta.table, {
|
||||
columnPrefix: json.meta.columnPrefix,
|
||||
aliases: tableAliases,
|
||||
})
|
||||
// mysql can't use returning
|
||||
|
@ -735,6 +767,7 @@ class InternalBuilder {
|
|||
|
||||
class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||
private readonly limit: number
|
||||
|
||||
// pass through client to get flavour of SQL
|
||||
constructor(client: string, limit: number = BASE_LIMIT) {
|
||||
super(client)
|
||||
|
@ -927,15 +960,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
}
|
||||
|
||||
log(query: string, values?: SqlQueryBinding) {
|
||||
if (!environment.SQL_LOGGING_ENABLE) {
|
||||
return
|
||||
}
|
||||
const sqlClient = this.getSqlClient()
|
||||
let string = `[SQL] [${sqlClient.toUpperCase()}] query="${query}"`
|
||||
if (values) {
|
||||
string += ` values="${values.join(", ")}"`
|
||||
}
|
||||
console.log(string)
|
||||
sqlLog(this.getSqlClient(), query, values)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,19 +5,27 @@ export class SqlStatements {
|
|||
client: string
|
||||
table: Table
|
||||
allOr: boolean | undefined
|
||||
columnPrefix: string | undefined
|
||||
|
||||
constructor(
|
||||
client: string,
|
||||
table: Table,
|
||||
{ allOr }: { allOr?: boolean } = {}
|
||||
{ allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {}
|
||||
) {
|
||||
this.client = client
|
||||
this.table = table
|
||||
this.allOr = allOr
|
||||
this.columnPrefix = columnPrefix
|
||||
}
|
||||
|
||||
getField(key: string): FieldSchema | undefined {
|
||||
const fieldName = key.split(".")[1]
|
||||
return this.table.schema[fieldName]
|
||||
let found = this.table.schema[fieldName]
|
||||
if (!found && this.columnPrefix) {
|
||||
const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "")
|
||||
found = this.table.schema[prefixRemovedFieldName]
|
||||
}
|
||||
return found
|
||||
}
|
||||
|
||||
between(
|
||||
|
|
|
@ -2,10 +2,12 @@ import { DocumentType, SqlQuery, Table, TableSourceType } from "@budibase/types"
|
|||
import { DEFAULT_BB_DATASOURCE_ID } from "../constants"
|
||||
import { Knex } from "knex"
|
||||
import { SEPARATOR } from "../db"
|
||||
import environment from "../environment"
|
||||
|
||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||
const ROW_ID_REGEX = /^\[.*]$/g
|
||||
const ENCODED_SPACE = encodeURIComponent(" ")
|
||||
const ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/
|
||||
|
||||
export function isExternalTableID(tableId: string) {
|
||||
return tableId.startsWith(DocumentType.DATASOURCE + SEPARATOR)
|
||||
|
@ -120,15 +122,38 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
|
|||
}
|
||||
}
|
||||
|
||||
export function isIsoDateString(str: string) {
|
||||
export function isInvalidISODateString(str: string) {
|
||||
const trimmedValue = str.trim()
|
||||
if (!/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/.test(trimmedValue)) {
|
||||
if (!ISO_DATE_REGEX.test(trimmedValue)) {
|
||||
return false
|
||||
}
|
||||
let d = new Date(trimmedValue)
|
||||
return isNaN(d.getTime())
|
||||
}
|
||||
|
||||
export function isValidISODateString(str: string) {
|
||||
const trimmedValue = str.trim()
|
||||
if (!ISO_DATE_REGEX.test(trimmedValue)) {
|
||||
return false
|
||||
}
|
||||
let d = new Date(trimmedValue)
|
||||
if (isNaN(d.getTime())) {
|
||||
return false
|
||||
}
|
||||
return d.toISOString() === trimmedValue
|
||||
}
|
||||
|
||||
export function isValidFilter(value: any) {
|
||||
return value != null && value !== ""
|
||||
}
|
||||
|
||||
export function sqlLog(client: string, query: string, values?: any[]) {
|
||||
if (!environment.SQL_LOGGING_ENABLE) {
|
||||
return
|
||||
}
|
||||
let string = `[SQL] [${client.toUpperCase()}] query="${query}"`
|
||||
if (values) {
|
||||
string += ` values="${values.join(", ")}"`
|
||||
}
|
||||
console.log(string)
|
||||
}
|
||||
|
|
|
@ -221,7 +221,7 @@ export class UserDB {
|
|||
const tenantId = getTenantId()
|
||||
const db = getGlobalDB()
|
||||
|
||||
let { email, _id, userGroups = [], roles } = user
|
||||
const { email, _id, userGroups = [], roles } = user
|
||||
|
||||
if (!email && !_id) {
|
||||
throw new Error("_id or email is required")
|
||||
|
@ -231,11 +231,10 @@ export class UserDB {
|
|||
if (_id) {
|
||||
// try to get existing user from db
|
||||
try {
|
||||
dbUser = (await db.get(_id)) as User
|
||||
if (email && dbUser.email !== email) {
|
||||
throw "Email address cannot be changed"
|
||||
dbUser = await usersCore.getById(_id)
|
||||
if (email && dbUser.email !== email && !opts.allowChangingEmail) {
|
||||
throw new Error("Email address cannot be changed")
|
||||
}
|
||||
email = dbUser.email
|
||||
} catch (e: any) {
|
||||
if (e.status === 404) {
|
||||
// do nothing, save this new user with the id specified - required for SSO auth
|
||||
|
@ -271,13 +270,13 @@ export class UserDB {
|
|||
|
||||
// make sure we set the _id field for a new user
|
||||
// Also if this is a new user, associate groups with them
|
||||
let groupPromises = []
|
||||
const groupPromises = []
|
||||
if (!_id) {
|
||||
_id = builtUser._id!
|
||||
|
||||
if (userGroups.length > 0) {
|
||||
for (let groupId of userGroups) {
|
||||
groupPromises.push(UserDB.groups.addUsers(groupId, [_id!]))
|
||||
groupPromises.push(
|
||||
UserDB.groups.addUsers(groupId, [builtUser._id!])
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -288,6 +287,11 @@ export class UserDB {
|
|||
builtUser._rev = response.rev
|
||||
|
||||
await eventHelpers.handleSaveEvents(builtUser, dbUser)
|
||||
if (dbUser && builtUser.email !== dbUser.email) {
|
||||
// Remove the plaform email reference if the email changed
|
||||
await platform.users.removeUser({ email: dbUser.email } as User)
|
||||
}
|
||||
|
||||
await platform.users.addUser(
|
||||
tenantId,
|
||||
builtUser._id!,
|
||||
|
|
|
@ -0,0 +1,188 @@
|
|||
import { User, UserStatus } from "@budibase/types"
|
||||
import { DBTestConfiguration, generator, structures } from "../../../tests"
|
||||
import { UserDB } from "../db"
|
||||
import { searchExistingEmails } from "../lookup"
|
||||
|
||||
const db = UserDB
|
||||
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
const quotas = {
|
||||
addUsers: jest
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
(_change: number, _creatorsChange: number, cb?: () => Promise<any>) =>
|
||||
cb && cb()
|
||||
),
|
||||
removeUsers: jest
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
(_change: number, _creatorsChange: number, cb?: () => Promise<any>) =>
|
||||
cb && cb()
|
||||
),
|
||||
}
|
||||
const groups = {
|
||||
addUsers: jest.fn(),
|
||||
getBulk: jest.fn(),
|
||||
getGroupBuilderAppIds: jest.fn(),
|
||||
}
|
||||
const features = { isSSOEnforced: jest.fn(), isAppBuildersEnabled: jest.fn() }
|
||||
|
||||
describe("UserDB", () => {
|
||||
beforeAll(() => {
|
||||
db.init(quotas, groups, features)
|
||||
})
|
||||
|
||||
describe("save", () => {
|
||||
describe("create", () => {
|
||||
it("creating a new user will persist it", async () => {
|
||||
const email = generator.email({})
|
||||
const user: User = structures.users.user({
|
||||
email,
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
|
||||
await config.doInTenant(async () => {
|
||||
const saveUserResponse = await db.save(user)
|
||||
|
||||
const persistedUser = await db.getUserByEmail(email)
|
||||
expect(persistedUser).toEqual({
|
||||
...user,
|
||||
_id: saveUserResponse._id,
|
||||
_rev: expect.stringMatching(/^1-\w+/),
|
||||
password: expect.not.stringMatching(user.password!),
|
||||
status: UserStatus.ACTIVE,
|
||||
createdAt: Date.now(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("the same email cannot be used twice in the same tenant", async () => {
|
||||
const email = generator.email({})
|
||||
const user: User = structures.users.user({
|
||||
email,
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
|
||||
await config.doInTenant(() => db.save(user))
|
||||
|
||||
await config.doInTenant(() =>
|
||||
expect(db.save(user)).rejects.toThrow(
|
||||
`Email already in use: '${email}'`
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it("the same email cannot be used twice in different tenants", async () => {
|
||||
const email = generator.email({})
|
||||
const user: User = structures.users.user({
|
||||
email,
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
|
||||
await config.doInTenant(() => db.save(user))
|
||||
|
||||
config.newTenant()
|
||||
await config.doInTenant(() =>
|
||||
expect(db.save(user)).rejects.toThrow(
|
||||
`Email already in use: '${email}'`
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
let user: User
|
||||
|
||||
beforeEach(async () => {
|
||||
user = await config.doInTenant(() =>
|
||||
db.save(
|
||||
structures.users.user({
|
||||
email: generator.email({}),
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it("can update user properties", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const updatedName = generator.first()
|
||||
user.firstName = updatedName
|
||||
|
||||
await db.save(user)
|
||||
|
||||
const persistedUser = await db.getUserByEmail(user.email)
|
||||
expect(persistedUser).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: user._id,
|
||||
email: user.email,
|
||||
firstName: updatedName,
|
||||
lastName: user.lastName,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("email cannot be updated by default", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await expect(
|
||||
db.save({ ...user, email: generator.email({}) })
|
||||
).rejects.toThrow("Email address cannot be changed")
|
||||
})
|
||||
})
|
||||
|
||||
it("email can be updated if specified", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const newEmail = generator.email({})
|
||||
|
||||
await db.save(
|
||||
{ ...user, email: newEmail },
|
||||
{ allowChangingEmail: true }
|
||||
)
|
||||
|
||||
const persistedUser = await db.getUserByEmail(newEmail)
|
||||
expect(persistedUser).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: user._id,
|
||||
email: newEmail,
|
||||
lastName: user.lastName,
|
||||
_rev: expect.stringMatching(/^2-\w+/),
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("updating emails frees previous emails", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const previousEmail = user.email
|
||||
const newEmail = generator.email({})
|
||||
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
|
||||
[previousEmail]
|
||||
)
|
||||
|
||||
await db.save(
|
||||
{ ...user, email: newEmail },
|
||||
{ allowChangingEmail: true }
|
||||
)
|
||||
|
||||
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
|
||||
[newEmail]
|
||||
)
|
||||
|
||||
await db.save(
|
||||
structures.users.user({
|
||||
email: previousEmail,
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
)
|
||||
|
||||
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
|
||||
[previousEmail, newEmail]
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -18,9 +18,10 @@ import {
|
|||
CouchFindOptions,
|
||||
DatabaseQueryOpts,
|
||||
SearchFilters,
|
||||
SearchFilterOperator,
|
||||
SearchUsersRequest,
|
||||
User,
|
||||
BasicOperator,
|
||||
ArrayOperator,
|
||||
} from "@budibase/types"
|
||||
import * as context from "../context"
|
||||
import { getGlobalDB } from "../context"
|
||||
|
@ -46,9 +47,9 @@ function removeUserPassword(users: User | User[]) {
|
|||
|
||||
export function isSupportedUserSearch(query: SearchFilters) {
|
||||
const allowed = [
|
||||
{ op: SearchFilterOperator.STRING, key: "email" },
|
||||
{ op: SearchFilterOperator.EQUAL, key: "_id" },
|
||||
{ op: SearchFilterOperator.ONE_OF, key: "_id" },
|
||||
{ op: BasicOperator.STRING, key: "email" },
|
||||
{ op: BasicOperator.EQUAL, key: "_id" },
|
||||
{ op: ArrayOperator.ONE_OF, key: "_id" },
|
||||
]
|
||||
for (let [key, operation] of Object.entries(query)) {
|
||||
if (typeof operation !== "object") {
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
import { db } from "../../../src"
|
||||
import {
|
||||
CONSTANT_EXTERNAL_ROW_COLS,
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
} from "@budibase/shared-core"
|
||||
|
||||
export function expectFunctionWasCalledTimesWith(
|
||||
jestFunction: any,
|
||||
|
@ -11,7 +14,7 @@ export function expectFunctionWasCalledTimesWith(
|
|||
}
|
||||
|
||||
export const expectAnyInternalColsAttributes: {
|
||||
[K in (typeof db.CONSTANT_INTERNAL_ROW_COLS)[number]]: any
|
||||
[K in (typeof CONSTANT_INTERNAL_ROW_COLS)[number]]: any
|
||||
} = {
|
||||
tableId: expect.anything(),
|
||||
type: expect.anything(),
|
||||
|
@ -22,7 +25,7 @@ export const expectAnyInternalColsAttributes: {
|
|||
}
|
||||
|
||||
export const expectAnyExternalColsAttributes: {
|
||||
[K in (typeof db.CONSTANT_EXTERNAL_ROW_COLS)[number]]: any
|
||||
[K in (typeof CONSTANT_EXTERNAL_ROW_COLS)[number]]: any
|
||||
} = {
|
||||
tableId: expect.anything(),
|
||||
_id: expect.anything(),
|
||||
|
|
|
@ -223,7 +223,7 @@
|
|||
height: 420px;
|
||||
background: var(--background);
|
||||
border: var(--border-light);
|
||||
z-index: 100;
|
||||
z-index: 1000;
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
box-sizing: border-box;
|
||||
|
|
|
@ -1,33 +1,25 @@
|
|||
<script>
|
||||
import Tooltip from "./Tooltip.svelte"
|
||||
import Icon from "../Icon/Icon.svelte"
|
||||
import AbsTooltip from "./AbsTooltip.svelte"
|
||||
|
||||
export let tooltip = ""
|
||||
export let size = "M"
|
||||
export let disabled = true
|
||||
|
||||
let showTooltip = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class:container={!!tooltip}>
|
||||
<slot />
|
||||
{#if tooltip}
|
||||
<div class="icon-container">
|
||||
<div
|
||||
class="icon"
|
||||
class:icon-small={size === "M" || size === "S"}
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
on:focus
|
||||
>
|
||||
<Icon name="InfoOutline" size="S" {disabled} />
|
||||
</div>
|
||||
{#if showTooltip}
|
||||
<div class="tooltip">
|
||||
<Tooltip textWrapping={true} direction={"bottom"} text={tooltip} />
|
||||
<AbsTooltip text={tooltip}>
|
||||
<div
|
||||
class="icon"
|
||||
class:icon-small={size === "M" || size === "S"}
|
||||
on:focus
|
||||
>
|
||||
<Icon name="InfoOutline" size="S" {disabled} hoverable />
|
||||
</div>
|
||||
{/if}
|
||||
</AbsTooltip>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
@ -44,14 +36,6 @@
|
|||
margin-left: 5px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
top: 15px;
|
||||
z-index: 200;
|
||||
width: 160px;
|
||||
}
|
||||
.icon {
|
||||
transform: scale(0.75);
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@
|
|||
"lodash": "4.17.21",
|
||||
"posthog-js": "^1.118.0",
|
||||
"remixicon": "2.5.0",
|
||||
"sanitize-html": "^2.7.0",
|
||||
"sanitize-html": "^2.13.0",
|
||||
"shortid": "2.2.15",
|
||||
"svelte-dnd-action": "^0.9.8",
|
||||
"svelte-loading-spinners": "^0.1.1",
|
||||
|
|
|
@ -112,7 +112,7 @@
|
|||
This action cannot be undone.
|
||||
</ConfirmDialog>
|
||||
|
||||
<Modal bind:this={testDataModal} width="30%">
|
||||
<Modal bind:this={testDataModal} width="30%" zIndex={5}>
|
||||
<TestDataModal />
|
||||
</Modal>
|
||||
|
||||
|
@ -148,7 +148,6 @@
|
|||
.header.scrolling {
|
||||
background: var(--background);
|
||||
border-bottom: var(--border-light);
|
||||
border-left: var(--border-light);
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,11 +8,63 @@
|
|||
import { automationStore, selectedAutomation } from "stores/builder"
|
||||
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { memo } from "@budibase/frontend-core"
|
||||
import { AutomationEventType } from "@budibase/types"
|
||||
|
||||
let failedParse = null
|
||||
let trigger = {}
|
||||
let schemaProperties = {}
|
||||
|
||||
const rowTriggers = [
|
||||
AutomationEventType.ROW_DELETE,
|
||||
AutomationEventType.ROW_UPDATE,
|
||||
AutomationEventType.ROW_SAVE,
|
||||
]
|
||||
|
||||
/**
|
||||
* Parses the automation test data and ensures it is valid
|
||||
* @param {object} testData contains all config for the test
|
||||
* @returns {object} valid testData
|
||||
* @todo Parse *all* data for each trigger type and relay adequate feedback
|
||||
*/
|
||||
const parseTestData = testData => {
|
||||
const autoTrigger = $selectedAutomation?.definition?.trigger
|
||||
const { tableId } = autoTrigger?.inputs || {}
|
||||
|
||||
// Ensure the tableId matches the trigger table for row trigger automations
|
||||
if (
|
||||
rowTriggers.includes(autoTrigger?.event) &&
|
||||
testData?.row?.tableId !== tableId
|
||||
) {
|
||||
return {
|
||||
// Reset Core fields
|
||||
row: { tableId },
|
||||
meta: {},
|
||||
id: "",
|
||||
revision: "",
|
||||
}
|
||||
} else {
|
||||
// Leave the core data as it is
|
||||
return testData
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Before executing a test run, relay if an automation is in a valid state
|
||||
* @param {object} trigger The automation trigger config
|
||||
* @returns {boolean} validation status
|
||||
* @todo Parse *all* trigger types relay adequate feedback
|
||||
*/
|
||||
const isTriggerValid = trigger => {
|
||||
if (rowTriggers.includes(trigger?.event) && !trigger?.inputs?.tableId) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
const memoTestData = memo(parseTestData($selectedAutomation.testData))
|
||||
$: memoTestData.set(parseTestData($selectedAutomation.testData))
|
||||
|
||||
$: {
|
||||
// clone the trigger so we're not mutating the reference
|
||||
trigger = cloneDeep($selectedAutomation.definition.trigger)
|
||||
|
@ -20,34 +72,45 @@
|
|||
// get the outputs so we can define the fields
|
||||
let schema = Object.entries(trigger.schema?.outputs?.properties || {})
|
||||
|
||||
if (trigger?.event === "app:trigger") {
|
||||
if (trigger?.event === AutomationEventType.APP_TRIGGER) {
|
||||
schema = [["fields", { customType: "fields" }]]
|
||||
}
|
||||
|
||||
schemaProperties = schema
|
||||
}
|
||||
|
||||
// check to see if there is existing test data in the store
|
||||
$: testData = $selectedAutomation.testData || {}
|
||||
|
||||
// Check the schema to see if required fields have been entered
|
||||
$: isError = !trigger.schema.outputs.required.every(
|
||||
required => testData[required] || required !== "row"
|
||||
)
|
||||
$: isError =
|
||||
!isTriggerValid(trigger) ||
|
||||
!trigger.schema.outputs.required.every(
|
||||
required => $memoTestData?.[required] || required !== "row"
|
||||
)
|
||||
|
||||
function parseTestJSON(e) {
|
||||
let jsonUpdate
|
||||
|
||||
try {
|
||||
const obj = JSON.parse(e.detail)
|
||||
jsonUpdate = JSON.parse(e.detail)
|
||||
failedParse = null
|
||||
automationStore.actions.addTestDataToAutomation(obj)
|
||||
} catch (e) {
|
||||
failedParse = "Invalid JSON"
|
||||
return false
|
||||
}
|
||||
|
||||
if (rowTriggers.includes(trigger?.event)) {
|
||||
const tableId = trigger?.inputs?.tableId
|
||||
|
||||
// Reset the tableId as it must match the trigger
|
||||
if (jsonUpdate?.row?.tableId !== tableId) {
|
||||
jsonUpdate.row.tableId = tableId
|
||||
}
|
||||
}
|
||||
|
||||
automationStore.actions.addTestDataToAutomation(jsonUpdate)
|
||||
}
|
||||
|
||||
const testAutomation = async () => {
|
||||
try {
|
||||
await automationStore.actions.test($selectedAutomation, testData)
|
||||
await automationStore.actions.test($selectedAutomation, $memoTestData)
|
||||
$automationStore.showTestPanel = true
|
||||
} catch (error) {
|
||||
notifications.error(error)
|
||||
|
@ -85,7 +148,7 @@
|
|||
{#if selectedValues}
|
||||
<div class="tab-content-padding">
|
||||
<AutomationBlockSetup
|
||||
{testData}
|
||||
testData={$memoTestData}
|
||||
{schemaProperties}
|
||||
isTestModal
|
||||
block={trigger}
|
||||
|
|
|
@ -30,6 +30,16 @@
|
|||
return lowerA > lowerB ? 1 : -1
|
||||
})
|
||||
|
||||
$: groupedAutomations = filteredAutomations.reduce((acc, auto) => {
|
||||
acc[auto.definition.trigger.event] ??= {
|
||||
icon: auto.definition.trigger.icon,
|
||||
name: (auto.definition.trigger?.name || "").toUpperCase(),
|
||||
entries: [],
|
||||
}
|
||||
acc[auto.definition.trigger.event].entries.push(auto)
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
$: showNoResults = searchString && !filteredAutomations.length
|
||||
|
||||
onMount(async () => {
|
||||
|
@ -55,16 +65,25 @@
|
|||
/>
|
||||
</div>
|
||||
<div class="side-bar-nav">
|
||||
{#each filteredAutomations as automation}
|
||||
<NavItem
|
||||
text={automation.name}
|
||||
selected={automation._id === selectedAutomationId}
|
||||
on:click={() => selectAutomation(automation._id)}
|
||||
selectedBy={$userSelectedResourceMap[automation._id]}
|
||||
disabled={automation.disabled}
|
||||
>
|
||||
<EditAutomationPopover {automation} />
|
||||
</NavItem>
|
||||
{#each Object.values(groupedAutomations || {}) as triggerGroup}
|
||||
<div class="nav-group">
|
||||
<div class="nav-group-header" title={triggerGroup?.name}>
|
||||
{triggerGroup?.name}
|
||||
</div>
|
||||
{#each triggerGroup.entries as automation}
|
||||
<NavItem
|
||||
icon={triggerGroup.icon}
|
||||
iconColor={"var(--spectrum-global-color-gray-900)"}
|
||||
text={automation.name}
|
||||
selected={automation._id === selectedAutomationId}
|
||||
on:click={() => selectAutomation(automation._id)}
|
||||
selectedBy={$userSelectedResourceMap[automation._id]}
|
||||
disabled={automation.disabled}
|
||||
>
|
||||
<EditAutomationPopover {automation} />
|
||||
</NavItem>
|
||||
{/each}
|
||||
</div>
|
||||
{/each}
|
||||
|
||||
{#if showNoResults}
|
||||
|
@ -82,6 +101,17 @@
|
|||
</Modal>
|
||||
|
||||
<style>
|
||||
.nav-group {
|
||||
padding-top: var(--spacing-l);
|
||||
}
|
||||
.nav-group-header {
|
||||
color: var(--spectrum-global-color-gray-600);
|
||||
padding: 0px calc(var(--spacing-l) + 4px);
|
||||
padding-bottom: var(--spacing-l);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.side-bar {
|
||||
flex: 0 0 260px;
|
||||
display: flex;
|
||||
|
@ -104,7 +134,7 @@
|
|||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
gap: var(--spacing-l);
|
||||
padding: 0 var(--spacing-l);
|
||||
padding: 0 calc(var(--spacing-l) + 4px);
|
||||
}
|
||||
.side-bar-nav {
|
||||
flex: 1 1 auto;
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,19 +1,28 @@
|
|||
<script>
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import RowSelectorTypes from "./RowSelectorTypes.svelte"
|
||||
import PropField from "./PropField.svelte"
|
||||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import { DatePicker, Select } from "@budibase/bbui"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
export let value
|
||||
export let value = {}
|
||||
export let bindings
|
||||
export let block
|
||||
export let isTestModal
|
||||
|
||||
let schemaFields
|
||||
const { STRING, NUMBER, ARRAY } = FieldType
|
||||
|
||||
let schemaFields = []
|
||||
let editableValue
|
||||
|
||||
$: editableValue = { ...value }
|
||||
|
||||
$: {
|
||||
let fields = {}
|
||||
|
||||
for (const [key, type] of Object.entries(block?.inputs?.fields ?? {})) {
|
||||
fields = {
|
||||
...fields,
|
||||
|
@ -25,8 +34,8 @@
|
|||
},
|
||||
}
|
||||
|
||||
if (value[key] === type) {
|
||||
value[key] = INITIAL_VALUES[type.toUpperCase()]
|
||||
if (editableValue[key] === type) {
|
||||
editableValue[key] = INITIAL_VALUES[type.toUpperCase()]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,77 +47,58 @@
|
|||
NUMBER: null,
|
||||
DATETIME: null,
|
||||
STRING: "",
|
||||
OPTIONS: [],
|
||||
ARRAY: [],
|
||||
ARRAY: "",
|
||||
}
|
||||
|
||||
const coerce = (value, type) => {
|
||||
const re = new RegExp(/{{([^{].*?)}}/g)
|
||||
if (re.test(value)) {
|
||||
return value
|
||||
const onChange = (e, field) => {
|
||||
if (e.detail !== editableValue[field]) {
|
||||
editableValue[field] = e.detail
|
||||
dispatch("change", editableValue)
|
||||
}
|
||||
|
||||
if (type === "boolean") {
|
||||
if (typeof value === "boolean") {
|
||||
return value
|
||||
}
|
||||
return value === "true"
|
||||
}
|
||||
if (type === "number") {
|
||||
if (typeof value === "number") {
|
||||
return value
|
||||
}
|
||||
return Number(value)
|
||||
}
|
||||
if (type === "options") {
|
||||
return [value]
|
||||
}
|
||||
if (type === "array") {
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
return value.split(",").map(x => x.trim())
|
||||
}
|
||||
|
||||
if (type === "link") {
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
return [value]
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
const onChange = (e, field, type) => {
|
||||
value[field] = coerce(e.detail, type)
|
||||
dispatch("change", value)
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if schemaFields.length && isTestModal}
|
||||
<div class="schema-fields">
|
||||
{#if schemaFields?.length && isTestModal}
|
||||
<div class="fields">
|
||||
{#each schemaFields as [field, schema]}
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
{bindings}
|
||||
{value}
|
||||
{onChange}
|
||||
/>
|
||||
<PropField label={field}>
|
||||
{#if [STRING, NUMBER, ARRAY].includes(schema.type)}
|
||||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
value={editableValue[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
type="string"
|
||||
{bindings}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
title={schema.name}
|
||||
autocomplete="off"
|
||||
/>
|
||||
{:else if schema.type === "boolean"}
|
||||
<Select
|
||||
on:change={e => onChange(e, field)}
|
||||
value={editableValue[field]}
|
||||
options={[
|
||||
{ label: "True", value: "true" },
|
||||
{ label: "False", value: "false" },
|
||||
]}
|
||||
/>
|
||||
{:else if schema.type === "datetime"}
|
||||
<DatePicker
|
||||
value={editableValue[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
/>
|
||||
{/if}
|
||||
</PropField>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.schema-fields {
|
||||
display: grid;
|
||||
grid-gap: var(--spacing-s);
|
||||
margin-top: var(--spacing-s);
|
||||
}
|
||||
.schema-fields :global(label) {
|
||||
text-transform: capitalize;
|
||||
.fields {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
<script>
|
||||
import { Label } from "@budibase/bbui"
|
||||
|
||||
export let label
|
||||
export let labelTooltip
|
||||
export let fullWidth = false
|
||||
export let componentWidth = 320
|
||||
</script>
|
||||
|
||||
<div
|
||||
class="prop-field"
|
||||
class:fullWidth
|
||||
style={`--comp-width: ${componentWidth}px;`}
|
||||
>
|
||||
<div class="prop-label" title={label}>
|
||||
<Label tooltip={labelTooltip}>{label}</Label>
|
||||
</div>
|
||||
<div class="prop-control">
|
||||
<slot />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.prop-field {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr var(--comp-width);
|
||||
}
|
||||
|
||||
.prop-field.fullWidth {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
.prop-field.fullWidth .prop-label {
|
||||
margin-bottom: var(--spacing-s);
|
||||
}
|
||||
|
||||
.prop-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.prop-label :global(> div) {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.prop-label :global(> div > label) {
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.prop-control {
|
||||
margin-left: var(--spacing-s);
|
||||
}
|
||||
|
||||
.prop-field.fullWidth .prop-control {
|
||||
margin-left: 0px;
|
||||
}
|
||||
</style>
|
|
@ -1,28 +1,43 @@
|
|||
<script>
|
||||
import { tables } from "stores/builder"
|
||||
import { Select, Checkbox, Label } from "@budibase/bbui"
|
||||
import {
|
||||
ActionButton,
|
||||
Popover,
|
||||
Icon,
|
||||
TooltipPosition,
|
||||
TooltipType,
|
||||
} from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
import RowSelectorTypes from "./RowSelectorTypes.svelte"
|
||||
import DrawerBindableSlot from "../../common/bindings/DrawerBindableSlot.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import { TableNames } from "constants"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { capitalise } from "helpers"
|
||||
import { memo } from "@budibase/frontend-core"
|
||||
import PropField from "./PropField.svelte"
|
||||
import { cloneDeep, isPlainObject, mergeWith } from "lodash"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
export let value
|
||||
|
||||
export let row
|
||||
export let meta
|
||||
export let bindings
|
||||
export let isTestModal
|
||||
export let isUpdateRow
|
||||
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
clone.icon = "ShareAndroid"
|
||||
return clone
|
||||
const typeToField = Object.values(FIELDS).reduce((acc, field) => {
|
||||
acc[field.type] = field
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
const memoStore = memo({
|
||||
row,
|
||||
meta,
|
||||
})
|
||||
|
||||
let table
|
||||
// Row Schema Fields
|
||||
let schemaFields
|
||||
let attachmentTypes = [
|
||||
FieldType.ATTACHMENTS,
|
||||
|
@ -30,32 +45,123 @@
|
|||
FieldType.SIGNATURE_SINGLE,
|
||||
]
|
||||
|
||||
$: {
|
||||
table = $tables.list.find(table => table._id === value?.tableId)
|
||||
let customPopover
|
||||
let popoverAnchor
|
||||
let editableRow = {}
|
||||
let editableFields = {}
|
||||
|
||||
// Just sorting attachment types to the bottom here for a cleaner UX
|
||||
schemaFields = Object.entries(table?.schema ?? {}).sort(
|
||||
([, schemaA], [, schemaB]) =>
|
||||
(schemaA.type === "attachment") - (schemaB.type === "attachment")
|
||||
)
|
||||
// Avoid unnecessary updates
|
||||
$: memoStore.set({
|
||||
row,
|
||||
meta,
|
||||
})
|
||||
|
||||
schemaFields.forEach(([, schema]) => {
|
||||
if (!schema.autocolumn && !value[schema.name]) {
|
||||
value[schema.name] = ""
|
||||
}
|
||||
})
|
||||
}
|
||||
const onChangeTable = e => {
|
||||
value["tableId"] = e.detail
|
||||
dispatch("change", value)
|
||||
}
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
clone.icon = "ShareAndroid"
|
||||
return clone
|
||||
})
|
||||
|
||||
const coerce = (value, type) => {
|
||||
const re = new RegExp(/{{([^{].*?)}}/g)
|
||||
if (re.test(value)) {
|
||||
return value
|
||||
$: tableId = $memoStore?.row?.tableId
|
||||
|
||||
$: initData(tableId, $memoStore?.meta?.fields, $memoStore?.row)
|
||||
|
||||
const initData = (tableId, metaFields, row) => {
|
||||
if (!tableId) {
|
||||
return
|
||||
}
|
||||
|
||||
// Refesh the editable fields
|
||||
editableFields = cloneDeep(metaFields || {})
|
||||
|
||||
// Refresh all the row data
|
||||
editableRow = cloneDeep(row || {})
|
||||
|
||||
table = $tables.list.find(table => table._id === tableId)
|
||||
|
||||
if (table) {
|
||||
editableRow["tableId"] = tableId
|
||||
|
||||
schemaFields = Object.entries(table?.schema ?? {})
|
||||
.filter(entry => {
|
||||
const [, field] = entry
|
||||
return field.type !== "formula" && !field.autocolumn
|
||||
})
|
||||
.sort(([nameA], [nameB]) => {
|
||||
return nameA < nameB ? -1 : 1
|
||||
})
|
||||
|
||||
// Parse out any data not in the schema.
|
||||
for (const column in editableFields) {
|
||||
if (!Object.hasOwn(table?.schema, column)) {
|
||||
delete editableFields[column]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Go through the table schema and build out the editable content
|
||||
for (const entry of schemaFields) {
|
||||
const [key, fieldSchema] = entry
|
||||
|
||||
const emptyField =
|
||||
editableRow[key] == null || editableRow[key]?.length === 0
|
||||
|
||||
// Put non-empty elements into the update and add their key to the fields list.
|
||||
if (!emptyField && !Object.hasOwn(editableFields, key)) {
|
||||
editableFields = {
|
||||
...editableFields,
|
||||
[key]: {},
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy - clearRelationships
|
||||
// Init the field and add it to the update.
|
||||
if (emptyField) {
|
||||
if (editableFields[key]?.clearRelationships === true) {
|
||||
const emptyField = coerce(
|
||||
!Object.hasOwn($memoStore?.row, key) ? "" : $memoStore?.row[key],
|
||||
fieldSchema.type
|
||||
)
|
||||
|
||||
// remove this and place the field in the editable row.
|
||||
delete editableFields[key]?.clearRelationships
|
||||
|
||||
// Default the field
|
||||
editableRow = {
|
||||
...editableRow,
|
||||
[key]: emptyField,
|
||||
}
|
||||
} else {
|
||||
// Purge from the update as its presence is not necessary.
|
||||
delete editableRow[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse all known row schema keys
|
||||
const schemaKeys = [
|
||||
"tableId",
|
||||
...schemaFields.map(entry => {
|
||||
const [key] = entry
|
||||
return key
|
||||
}),
|
||||
]
|
||||
|
||||
// Purge any row keys that are not present in the schema.
|
||||
for (const rowKey of Object.keys(editableRow)) {
|
||||
if (!schemaKeys.includes(rowKey)) {
|
||||
delete editableRow[rowKey]
|
||||
delete editableFields[rowKey]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Row coerce
|
||||
const coerce = (value, type) => {
|
||||
const re = new RegExp(/{{([^{].*?)}}/g)
|
||||
if (typeof value === "string" && re.test(value)) {
|
||||
return value
|
||||
}
|
||||
if (type === "number") {
|
||||
if (typeof value === "number") {
|
||||
return value
|
||||
|
@ -66,6 +172,9 @@
|
|||
return value
|
||||
}
|
||||
if (type === "array") {
|
||||
if (!value) {
|
||||
return []
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
|
@ -73,7 +182,9 @@
|
|||
}
|
||||
|
||||
if (type === "link") {
|
||||
if (Array.isArray(value)) {
|
||||
if (!value) {
|
||||
return []
|
||||
} else if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
return value.split(",").map(x => x.trim())
|
||||
|
@ -86,130 +197,174 @@
|
|||
return value
|
||||
}
|
||||
|
||||
const onChange = (e, field, type) => {
|
||||
let newValue = {
|
||||
...value,
|
||||
[field]: coerce(e.detail, type),
|
||||
}
|
||||
dispatch("change", newValue)
|
||||
const isFullWidth = type => {
|
||||
return (
|
||||
attachmentTypes.includes(type) ||
|
||||
type === FieldType.JSON ||
|
||||
type === FieldType.LONGFORM
|
||||
)
|
||||
}
|
||||
|
||||
const onChangeSetting = (field, key, value) => {
|
||||
let newField = {}
|
||||
newField[field] = {
|
||||
[key]: value,
|
||||
const onChange = update => {
|
||||
const customizer = (objValue, srcValue) => {
|
||||
if (isPlainObject(objValue) && isPlainObject(srcValue)) {
|
||||
const result = mergeWith({}, objValue, srcValue, customizer)
|
||||
let outcome = Object.keys(result).reduce((acc, key) => {
|
||||
if (result[key] !== null) {
|
||||
acc[key] = result[key]
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
return outcome
|
||||
}
|
||||
return srcValue
|
||||
}
|
||||
|
||||
let updatedFields = {
|
||||
...meta?.fields,
|
||||
...newField,
|
||||
}
|
||||
|
||||
dispatch("change", {
|
||||
key: "meta",
|
||||
fields: updatedFields,
|
||||
})
|
||||
const result = mergeWith(
|
||||
{},
|
||||
{
|
||||
row: editableRow,
|
||||
meta: {
|
||||
fields: editableFields,
|
||||
},
|
||||
},
|
||||
update,
|
||||
customizer
|
||||
)
|
||||
dispatch("change", result)
|
||||
}
|
||||
// Ensure any nullish tableId values get set to empty string so
|
||||
// that the select works
|
||||
$: if (value?.tableId == null) value = { tableId: "" }
|
||||
</script>
|
||||
|
||||
<div class="schema-fields">
|
||||
<Label>Table</Label>
|
||||
<div class="field-width">
|
||||
<Select
|
||||
on:change={onChangeTable}
|
||||
value={value.tableId}
|
||||
options={$tables.list.filter(table => table._id !== TableNames.USERS)}
|
||||
getOptionLabel={table => table.name}
|
||||
getOptionValue={table => table._id}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{#if schemaFields.length}
|
||||
{#each schemaFields as [field, schema]}
|
||||
{#if !schema.autocolumn}
|
||||
<div class:schema-fields={!attachmentTypes.includes(schema.type)}>
|
||||
<Label>{field}</Label>
|
||||
<div class:field-width={!attachmentTypes.includes(schema.type)}>
|
||||
{#if isTestModal}
|
||||
{#each schemaFields || [] as [field, schema]}
|
||||
{#if !schema.autocolumn && Object.hasOwn(editableFields, field)}
|
||||
<PropField label={field} fullWidth={isFullWidth(schema.type)}>
|
||||
<div class="prop-control-wrap">
|
||||
{#if isTestModal}
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
bindings={parsedBindings}
|
||||
value={editableRow}
|
||||
meta={{
|
||||
fields: editableFields,
|
||||
}}
|
||||
{onChange}
|
||||
/>
|
||||
{:else}
|
||||
<DrawerBindableSlot
|
||||
title={$memoStore?.row?.title || field}
|
||||
panel={AutomationBindingPanel}
|
||||
type={schema.type}
|
||||
{schema}
|
||||
value={editableRow[field]}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
{bindings}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
drawerLeft="260px"
|
||||
>
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
bindings={parsedBindings}
|
||||
{value}
|
||||
{onChange}
|
||||
value={editableRow}
|
||||
meta={{
|
||||
fields: editableFields,
|
||||
}}
|
||||
onChange={change => onChange(change)}
|
||||
/>
|
||||
{:else}
|
||||
<DrawerBindableSlot
|
||||
title={value.title || field}
|
||||
panel={AutomationBindingPanel}
|
||||
type={schema.type}
|
||||
{schema}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
{bindings}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
drawerLeft="260px"
|
||||
>
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
bindings={parsedBindings}
|
||||
{value}
|
||||
{onChange}
|
||||
useAttachmentBinding={meta?.fields?.[field]
|
||||
?.useAttachmentBinding}
|
||||
{onChangeSetting}
|
||||
/>
|
||||
</DrawerBindableSlot>
|
||||
{/if}
|
||||
|
||||
{#if isUpdateRow && schema.type === "link"}
|
||||
<div class="checkbox-field">
|
||||
<Checkbox
|
||||
value={meta.fields?.[field]?.clearRelationships}
|
||||
text={"Clear relationships if empty?"}
|
||||
size={"S"}
|
||||
on:change={e =>
|
||||
onChangeSetting(field, "clearRelationships", e.detail)}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</DrawerBindableSlot>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
</PropField>
|
||||
{/if}
|
||||
{/each}
|
||||
|
||||
{#if table && schemaFields}
|
||||
<div
|
||||
class="add-fields-btn"
|
||||
class:empty={Object.is(editableFields, {})}
|
||||
bind:this={popoverAnchor}
|
||||
>
|
||||
<ActionButton
|
||||
icon="Add"
|
||||
fullWidth
|
||||
on:click={() => {
|
||||
customPopover.show()
|
||||
}}
|
||||
disabled={!schemaFields}
|
||||
>Add fields
|
||||
</ActionButton>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<Popover
|
||||
align="center"
|
||||
bind:this={customPopover}
|
||||
anchor={editableFields ? popoverAnchor : null}
|
||||
useAnchorWidth
|
||||
maxHeight={300}
|
||||
resizable={false}
|
||||
offset={10}
|
||||
>
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-noninteractive-element-interactions -->
|
||||
<ul class="spectrum-Menu" role="listbox">
|
||||
{#each schemaFields || [] as [field, schema]}
|
||||
{#if !schema.autocolumn}
|
||||
<li
|
||||
class="table_field spectrum-Menu-item"
|
||||
class:is-selected={Object.hasOwn(editableFields, field)}
|
||||
on:click={() => {
|
||||
if (Object.hasOwn(editableFields, field)) {
|
||||
delete editableFields[field]
|
||||
onChange({
|
||||
meta: { fields: editableFields },
|
||||
row: { [field]: null },
|
||||
})
|
||||
} else {
|
||||
editableFields[field] = {}
|
||||
onChange({ meta: { fields: editableFields } })
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Icon
|
||||
name={typeToField?.[schema.type]?.icon}
|
||||
color={"var(--spectrum-global-color-gray-600)"}
|
||||
tooltip={capitalise(schema.type)}
|
||||
tooltipType={TooltipType.Info}
|
||||
tooltipPosition={TooltipPosition.Left}
|
||||
/>
|
||||
<div class="field_name spectrum-Menu-itemLabel">{field}</div>
|
||||
<svg
|
||||
class="spectrum-Icon spectrum-UIIcon-Checkmark100 spectrum-Menu-checkmark spectrum-Menu-itemIcon"
|
||||
focusable="false"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<use xlink:href="#spectrum-css-icon-Checkmark100" />
|
||||
</svg>
|
||||
</li>
|
||||
{/if}
|
||||
{/each}
|
||||
</ul>
|
||||
</Popover>
|
||||
|
||||
<style>
|
||||
.field-width {
|
||||
width: 320px;
|
||||
.table_field {
|
||||
display: flex;
|
||||
padding: var(--spacing-s) var(--spacing-l);
|
||||
gap: var(--spacing-s);
|
||||
}
|
||||
|
||||
.schema-fields {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
flex: 1;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.schema-fields :global(label) {
|
||||
text-transform: capitalize;
|
||||
}
|
||||
.checkbox-field {
|
||||
padding-bottom: var(--spacing-s);
|
||||
padding-left: 1px;
|
||||
padding-top: var(--spacing-s);
|
||||
}
|
||||
.checkbox-field :global(label) {
|
||||
text-transform: none;
|
||||
/* Override for general json field override */
|
||||
.prop-control-wrap :global(.icon.json-slot-icon) {
|
||||
right: 1px !important;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -11,17 +11,18 @@
|
|||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import Editor from "components/integration/QueryEditor.svelte"
|
||||
import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte"
|
||||
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
|
||||
|
||||
export let onChange
|
||||
export let field
|
||||
export let schema
|
||||
export let value
|
||||
export let meta
|
||||
export let bindings
|
||||
export let isTestModal
|
||||
export let useAttachmentBinding
|
||||
export let onChangeSetting
|
||||
|
||||
$: fieldData = value[field]
|
||||
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
|
@ -35,14 +36,15 @@
|
|||
FieldType.SIGNATURE_SINGLE,
|
||||
]
|
||||
|
||||
let previousBindingState = useAttachmentBinding
|
||||
|
||||
function schemaHasOptions(schema) {
|
||||
return !!schema.constraints?.inclusion?.length
|
||||
}
|
||||
|
||||
function handleAttachmentParams(keyValueObj) {
|
||||
let params = {}
|
||||
if (!keyValueObj) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (!Array.isArray(keyValueObj) && keyValueObj) {
|
||||
keyValueObj = [keyValueObj]
|
||||
|
@ -50,45 +52,68 @@
|
|||
|
||||
if (keyValueObj.length) {
|
||||
for (let param of keyValueObj) {
|
||||
params[param.url] = param.filename
|
||||
params[param.url || ""] = param.filename || ""
|
||||
}
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
async function handleToggleChange(toggleField, event) {
|
||||
if (event.detail === true) {
|
||||
value[toggleField] = []
|
||||
} else {
|
||||
value[toggleField] = ""
|
||||
}
|
||||
previousBindingState = event.detail
|
||||
onChangeSetting(toggleField, "useAttachmentBinding", event.detail)
|
||||
onChange({ detail: value[toggleField] }, toggleField)
|
||||
}
|
||||
const handleMediaUpdate = e => {
|
||||
const media = e.detail || []
|
||||
const isSingle =
|
||||
schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||
schema.type === FieldType.SIGNATURE_SINGLE
|
||||
const parsedMedia = media.map(({ name, value }) => ({
|
||||
url: name,
|
||||
filename: value,
|
||||
}))
|
||||
|
||||
$: if (useAttachmentBinding !== previousBindingState) {
|
||||
if (useAttachmentBinding) {
|
||||
value[field] = []
|
||||
} else {
|
||||
value[field] = ""
|
||||
if (isSingle) {
|
||||
const [singleMedia] = parsedMedia
|
||||
// Return only the first entry
|
||||
return singleMedia
|
||||
? {
|
||||
url: singleMedia.url,
|
||||
filename: singleMedia.filename,
|
||||
}
|
||||
: null
|
||||
}
|
||||
previousBindingState = useAttachmentBinding
|
||||
|
||||
// Return the entire array
|
||||
return parsedMedia
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if schemaHasOptions(schema) && schema.type !== "array"}
|
||||
<Select
|
||||
on:change={e => onChange(e, field)}
|
||||
value={value[field]}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
value={fieldData}
|
||||
options={schema.constraints.inclusion}
|
||||
/>
|
||||
{:else if schema.type === "datetime"}
|
||||
<DatePicker value={value[field]} on:change={e => onChange(e, field)} />
|
||||
<DatePicker
|
||||
value={fieldData}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
/>
|
||||
{:else if schema.type === "boolean"}
|
||||
<Select
|
||||
on:change={e => onChange(e, field)}
|
||||
value={value[field]}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
value={fieldData}
|
||||
options={[
|
||||
{ label: "True", value: "true" },
|
||||
{ label: "False", value: "false" },
|
||||
|
@ -96,83 +121,111 @@
|
|||
/>
|
||||
{:else if schemaHasOptions(schema) && schema.type === "array"}
|
||||
<Multiselect
|
||||
bind:value={value[field]}
|
||||
value={fieldData}
|
||||
options={schema.constraints.inclusion}
|
||||
on:change={e => onChange(e, field)}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
/>
|
||||
{:else if schema.type === "longform"}
|
||||
<TextArea bind:value={value[field]} on:change={e => onChange(e, field)} />
|
||||
<TextArea
|
||||
value={fieldData}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
/>
|
||||
{:else if schema.type === "json"}
|
||||
<span>
|
||||
<Editor
|
||||
editorHeight="150"
|
||||
mode="json"
|
||||
on:change={e => {
|
||||
if (e.detail?.value !== value[field]) {
|
||||
onChange(e, field, schema.type)
|
||||
}
|
||||
}}
|
||||
value={value[field]}
|
||||
/>
|
||||
<div class="field-wrap json-field">
|
||||
<CodeEditor
|
||||
value={fieldData}
|
||||
on:change={e => {
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</span>
|
||||
{:else if schema.type === "link"}
|
||||
<LinkedRowSelector
|
||||
linkedRows={value[field]}
|
||||
linkedRows={fieldData}
|
||||
{schema}
|
||||
on:change={e => onChange(e, field)}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
useLabel={false}
|
||||
/>
|
||||
{:else if schema.type === "bb_reference" || schema.type === "bb_reference_single"}
|
||||
<LinkedRowSelector
|
||||
linkedRows={value[field]}
|
||||
linkedRows={fieldData}
|
||||
{schema}
|
||||
linkedTableId={"ta_users"}
|
||||
on:change={e => onChange(e, field)}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
useLabel={false}
|
||||
/>
|
||||
{:else if attachmentTypes.includes(schema.type)}
|
||||
<div class="attachment-field-container">
|
||||
<div class="toggle-container">
|
||||
<Toggle
|
||||
value={useAttachmentBinding}
|
||||
value={meta?.fields?.[field]?.useAttachmentBinding}
|
||||
text={"Use bindings"}
|
||||
size={"XS"}
|
||||
on:change={e => handleToggleChange(field, e)}
|
||||
on:change={e => {
|
||||
onChange({
|
||||
row: {
|
||||
[field]: null,
|
||||
},
|
||||
meta: {
|
||||
fields: {
|
||||
[field]: {
|
||||
useAttachmentBinding: e.detail,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
{#if !useAttachmentBinding}
|
||||
|
||||
{#if !meta?.fields?.[field]?.useAttachmentBinding}
|
||||
<div class="attachment-field-spacing">
|
||||
<KeyValueBuilder
|
||||
on:change={async e => {
|
||||
onChange(
|
||||
{
|
||||
detail:
|
||||
schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||
schema.type === FieldType.SIGNATURE_SINGLE
|
||||
? e.detail.length > 0
|
||||
? {
|
||||
url: e.detail[0].name,
|
||||
filename: e.detail[0].value,
|
||||
}
|
||||
: {}
|
||||
: e.detail.map(({ name, value }) => ({
|
||||
url: name,
|
||||
filename: value,
|
||||
})),
|
||||
on:change={e => {
|
||||
onChange({
|
||||
row: {
|
||||
[field]: handleMediaUpdate(e),
|
||||
},
|
||||
field
|
||||
)
|
||||
})
|
||||
}}
|
||||
object={handleAttachmentParams(value[field])}
|
||||
object={handleAttachmentParams(fieldData)}
|
||||
allowJS
|
||||
{bindings}
|
||||
keyBindings
|
||||
customButtonText={"Add attachment"}
|
||||
customButtonText={schema.type === FieldType.SIGNATURE_SINGLE
|
||||
? "Add signature"
|
||||
: "Add attachment"}
|
||||
keyPlaceholder={"URL"}
|
||||
valuePlaceholder={"Filename"}
|
||||
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||
schema.type === FieldType.SIGNATURE) &&
|
||||
Object.keys(value[field]).length >= 1}
|
||||
schema.type === FieldType.SIGNATURE_SINGLE) &&
|
||||
fieldData}
|
||||
/>
|
||||
</div>
|
||||
{:else}
|
||||
|
@ -180,8 +233,13 @@
|
|||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
value={fieldData}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
type="string"
|
||||
bindings={parsedBindings}
|
||||
allowJS={true}
|
||||
|
@ -195,20 +253,41 @@
|
|||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
value={fieldData}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
type="string"
|
||||
bindings={parsedBindings}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
title={schema.name}
|
||||
autocomplete="off"
|
||||
/>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.attachment-field-spacing,
|
||||
.json-input-spacing {
|
||||
margin-top: var(--spacing-s);
|
||||
margin-bottom: var(--spacing-l);
|
||||
.attachment-field-spacing {
|
||||
border: 1px solid var(--spectrum-global-color-gray-400);
|
||||
border-radius: 4px;
|
||||
padding: var(--spacing-s);
|
||||
}
|
||||
|
||||
.field-wrap.json-field {
|
||||
height: 120px;
|
||||
}
|
||||
|
||||
.field-wrap {
|
||||
box-sizing: border-box;
|
||||
border: 1px solid var(--spectrum-global-color-gray-400);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.field-wrap :global(.cm-editor),
|
||||
.field-wrap :global(.cm-scroller) {
|
||||
border-radius: 4px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
export let value
|
||||
export let isTrigger
|
||||
export let disabled = false
|
||||
|
||||
$: filteredTables = $tables.list.filter(table => {
|
||||
return !isTrigger || table._id !== TableNames.USERS
|
||||
|
@ -25,4 +26,5 @@
|
|||
options={filteredTables}
|
||||
getOptionLabel={table => table.name}
|
||||
getOptionValue={table => table._id}
|
||||
{disabled}
|
||||
/>
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
import { TableNames } from "constants"
|
||||
import { Grid } from "@budibase/frontend-core"
|
||||
import { API } from "api"
|
||||
import GridCreateAutomationButton from "./buttons/grid/GridCreateAutomationButton.svelte"
|
||||
import GridAddColumnModal from "components/backend/DataTable/modals/grid/GridCreateColumnModal.svelte"
|
||||
import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte"
|
||||
import GridEditUserModal from "components/backend/DataTable/modals/grid/GridEditUserModal.svelte"
|
||||
|
@ -81,6 +82,9 @@
|
|||
<GridCreateViewButton />
|
||||
{/if}
|
||||
<GridManageAccessButton />
|
||||
{#if !isUsersTable}
|
||||
<GridCreateAutomationButton />
|
||||
{/if}
|
||||
{#if relationshipsEnabled}
|
||||
<GridRelationshipButton />
|
||||
{/if}
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
<script>
|
||||
import {
|
||||
ActionButton,
|
||||
Popover,
|
||||
Menu,
|
||||
MenuItem,
|
||||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import { getContext } from "svelte"
|
||||
import { automationStore, tables, builderStore } from "stores/builder"
|
||||
import { TriggerStepID } from "constants/backend/automations"
|
||||
import { goto } from "@roxi/routify"
|
||||
|
||||
const { datasource } = getContext("grid")
|
||||
|
||||
$: triggers = $automationStore.blockDefinitions.TRIGGER
|
||||
|
||||
$: table = $tables.list.find(table => table._id === $datasource.tableId)
|
||||
|
||||
async function createAutomation(type) {
|
||||
const triggerType = triggers[type]
|
||||
if (!triggerType) {
|
||||
console.error("Invalid trigger type", type)
|
||||
notifications.error("Invalid automation trigger type")
|
||||
return
|
||||
}
|
||||
|
||||
if (!table) {
|
||||
notifications.error("Invalid table, cannot create automation")
|
||||
return
|
||||
}
|
||||
|
||||
const automationName = `${table.name} : Row ${
|
||||
type === TriggerStepID.ROW_SAVED ? "created" : "updated"
|
||||
}`
|
||||
const triggerBlock = automationStore.actions.constructBlock(
|
||||
"TRIGGER",
|
||||
triggerType.stepId,
|
||||
triggerType
|
||||
)
|
||||
|
||||
triggerBlock.inputs = { tableId: $datasource.tableId }
|
||||
|
||||
try {
|
||||
const response = await automationStore.actions.create(
|
||||
automationName,
|
||||
triggerBlock
|
||||
)
|
||||
builderStore.setPreviousTopNavPath(
|
||||
"/builder/app/:application/data",
|
||||
window.location.pathname
|
||||
)
|
||||
$goto(`/builder/app/${response.appId}/automation/${response.id}`)
|
||||
notifications.success(`Automation created`)
|
||||
} catch (e) {
|
||||
console.error("Error creating automation", e)
|
||||
notifications.error("Error creating automation")
|
||||
}
|
||||
}
|
||||
|
||||
let anchor
|
||||
let open
|
||||
</script>
|
||||
|
||||
<div bind:this={anchor}>
|
||||
<ActionButton
|
||||
icon="MagicWand"
|
||||
quiet
|
||||
size="M"
|
||||
on:click={() => (open = !open)}
|
||||
selected={open}
|
||||
>
|
||||
Generate
|
||||
</ActionButton>
|
||||
</div>
|
||||
|
||||
<Popover bind:open {anchor} align="left">
|
||||
<Menu>
|
||||
<MenuItem
|
||||
icon="ShareAndroid"
|
||||
on:click={() => {
|
||||
open = false
|
||||
createAutomation(TriggerStepID.ROW_SAVED)
|
||||
}}
|
||||
>
|
||||
Automation: when row is created
|
||||
</MenuItem>
|
||||
<MenuItem
|
||||
icon="ShareAndroid"
|
||||
on:click={() => {
|
||||
open = false
|
||||
createAutomation(TriggerStepID.ROW_UPDATED)
|
||||
}}
|
||||
>
|
||||
Automation: when row is updated
|
||||
</MenuItem>
|
||||
</Menu>
|
||||
</Popover>
|
||||
|
||||
<style>
|
||||
</style>
|
|
@ -3,12 +3,11 @@
|
|||
import { Modal, ActionButton, TooltipType, TempTooltip } from "@budibase/bbui"
|
||||
import GridCreateViewModal from "../../modals/grid/GridCreateViewModal.svelte"
|
||||
|
||||
const { rows, columns, filter } = getContext("grid")
|
||||
const { filter } = getContext("grid")
|
||||
|
||||
let modal
|
||||
let firstFilterUsage = false
|
||||
|
||||
$: disabled = !$columns.length || !$rows.length
|
||||
$: {
|
||||
if ($filter?.length && !firstFilterUsage) {
|
||||
firstFilterUsage = true
|
||||
|
@ -21,7 +20,7 @@
|
|||
type={TooltipType.Info}
|
||||
condition={firstFilterUsage}
|
||||
>
|
||||
<ActionButton {disabled} icon="CollectionAdd" quiet on:click={modal.show}>
|
||||
<ActionButton icon="CollectionAdd" quiet on:click={modal.show}>
|
||||
Create view
|
||||
</ActionButton>
|
||||
</TempTooltip>
|
||||
|
|
|
@ -495,11 +495,7 @@
|
|||
newError.name = `Column name cannot start with an underscore.`
|
||||
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
|
||||
newError.name = `Illegal character; must be alpha-numeric.`
|
||||
} else if (
|
||||
prohibited.some(
|
||||
name => fieldInfo?.name?.toLowerCase() === name.toLowerCase()
|
||||
)
|
||||
) {
|
||||
} else if (prohibited.some(name => fieldInfo?.name === name)) {
|
||||
newError.name = `${prohibited.join(
|
||||
", "
|
||||
)} are not allowed as column names - case insensitive.`
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
export let disableBindings = false
|
||||
export let forceModal = false
|
||||
export let context = null
|
||||
export let autocomplete
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
|
@ -71,6 +72,7 @@
|
|||
on:blur={onBlur}
|
||||
{placeholder}
|
||||
{updateOnChange}
|
||||
{autocomplete}
|
||||
/>
|
||||
{#if !disabled && !disableBindings}
|
||||
<div
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
export { default as BindableCombobox } from "./BindableCombobox.svelte"
|
||||
export { default as BindingPanel } from "./BindingPanel.svelte"
|
||||
export { default as BindingSidePanel } from "./BindingSidePanel.svelte"
|
||||
export { default as DrawerBindableCombobox } from "./DrawerBindableCombobox.svelte"
|
||||
export { default as ClientBindingPanel } from "./ClientBindingPanel.svelte"
|
||||
export { default as DrawerBindableInput } from "./DrawerBindableInput.svelte"
|
||||
export { default as DrawerBindableSlot } from "./DrawerBindableSlot.svelte"
|
||||
export { default as EvaluationSidePanel } from "./EvaluationSidePanel.svelte"
|
||||
export { default as ModalBindableInput } from "./ModalBindableInput.svelte"
|
||||
export { default as ServerBindingPanel } from "./ServerBindingPanel.svelte"
|
||||
export { default as SnippetDrawer } from "./SnippetDrawer.svelte"
|
||||
export { default as SnippetSidePanel } from "./SnippetSidePanel.svelte"
|
|
@ -18,7 +18,7 @@
|
|||
<div class="params">
|
||||
<Label small>Title</Label>
|
||||
<DrawerBindableInput
|
||||
placeholder="Title"
|
||||
placeholder="Prompt User"
|
||||
value={parameters.customTitleText}
|
||||
on:change={e => (parameters.customTitleText = e.detail)}
|
||||
{bindings}
|
||||
|
@ -30,6 +30,22 @@
|
|||
on:change={e => (parameters.confirmText = e.detail)}
|
||||
{bindings}
|
||||
/>
|
||||
|
||||
<Label small>Confirm Text</Label>
|
||||
<DrawerBindableInput
|
||||
placeholder="Confirm"
|
||||
value={parameters.confirmButtonText}
|
||||
on:change={e => (parameters.confirmButtonText = e.detail)}
|
||||
{bindings}
|
||||
/>
|
||||
|
||||
<Label small>Cancel Text</Label>
|
||||
<DrawerBindableInput
|
||||
placeholder="Cancel"
|
||||
value={parameters.cancelButtonText}
|
||||
on:change={e => (parameters.cancelButtonText = e.detail)}
|
||||
{bindings}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -9,7 +9,8 @@
|
|||
import { createEventDispatcher } from "svelte"
|
||||
import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding"
|
||||
import FilterBuilder from "./FilterBuilder.svelte"
|
||||
import { selectedScreen } from "stores/builder"
|
||||
import { tables, selectedScreen } from "stores/builder"
|
||||
import { search } from "@budibase/frontend-core"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
|
@ -23,7 +24,11 @@
|
|||
$: tempValue = value
|
||||
$: datasource = getDatasourceForProvider($selectedScreen, componentInstance)
|
||||
$: dsSchema = getSchemaForDatasource($selectedScreen, datasource)?.schema
|
||||
$: schemaFields = Object.values(schema || dsSchema || {})
|
||||
$: schemaFields = search.getFields(
|
||||
$tables.list,
|
||||
Object.values(schema || dsSchema || {}),
|
||||
{ allowLinks: true }
|
||||
)
|
||||
$: text = getText(value?.filter(filter => filter.field))
|
||||
|
||||
async function saveFilter() {
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import { AUTH_TYPE_LABELS, AUTH_TYPES } from "./authTypes"
|
||||
import BindableCombobox from "components/common/bindings/BindableCombobox.svelte"
|
||||
import { BindableCombobox } from "components/common/bindings"
|
||||
import { getAuthBindings, getEnvironmentBindings } from "dataBinding"
|
||||
import { environment, licensing, auth } from "stores/portal"
|
||||
import CreateEditVariableModal from "components/portal/environment/CreateEditVariableModal.svelte"
|
||||
|
|
|
@ -5,7 +5,17 @@
|
|||
export let row
|
||||
</script>
|
||||
|
||||
{value}
|
||||
<span title={value} class="email">
|
||||
{value}
|
||||
</span>
|
||||
{#if row.scimInfo?.isSync}
|
||||
<ActiveDirectoryInfo iconSize="XS" />
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.email {
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -146,18 +146,19 @@ const automationActions = store => ({
|
|||
await store.actions.save(automation)
|
||||
notifications.success(
|
||||
`Automation ${
|
||||
automation.disabled ? "enabled" : "disabled"
|
||||
automation.disabled ? "disabled" : "enabled"
|
||||
} successfully`
|
||||
)
|
||||
} catch (error) {
|
||||
notifications.error(
|
||||
`Error ${
|
||||
automation && automation.disabled ? "enabling" : "disabling"
|
||||
automation && automation.disabled ? "disabling" : "enabling"
|
||||
} automation`
|
||||
)
|
||||
}
|
||||
},
|
||||
updateBlockInputs: async (block, data) => {
|
||||
|
||||
processBlockInputs: async (block, data) => {
|
||||
// Create new modified block
|
||||
let newBlock = {
|
||||
...block,
|
||||
|
@ -184,6 +185,14 @@ const automationActions = store => ({
|
|||
|
||||
// Don't save if no changes were made
|
||||
if (JSON.stringify(newAutomation) === JSON.stringify(automation)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return newAutomation
|
||||
},
|
||||
updateBlockInputs: async (block, data) => {
|
||||
const newAutomation = await store.actions.processBlockInputs(block, data)
|
||||
if (newAutomation === false) {
|
||||
return
|
||||
}
|
||||
await store.actions.save(newAutomation)
|
||||
|
|
|
@ -82,7 +82,7 @@ export default defineConfig(({ mode }) => {
|
|||
...(isProduction ? [] : devOnlyPlugins),
|
||||
],
|
||||
optimizeDeps: {
|
||||
exclude: ["@roxi/routify"],
|
||||
exclude: ["@roxi/routify", "fsevents"],
|
||||
},
|
||||
resolve: {
|
||||
dedupe: ["@roxi/routify"],
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
"node-fetch": "2.6.7",
|
||||
"posthog-node": "1.3.0",
|
||||
"pouchdb": "7.3.0",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
"@budibase/pouchdb-replication-stream": "1.2.11",
|
||||
"randomstring": "1.1.5",
|
||||
"tar": "6.2.1",
|
||||
"yaml": "^2.1.1"
|
||||
|
|
|
@ -17,7 +17,7 @@ export function getPouch(url?: string) {
|
|||
prefix: url,
|
||||
}
|
||||
}
|
||||
const replicationStream = require("pouchdb-replication-stream")
|
||||
const replicationStream = require("@budibase/pouchdb-replication-stream")
|
||||
PouchDB.plugin(replicationStream.plugin)
|
||||
// @ts-ignore
|
||||
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
"downloadjs": "1.4.7",
|
||||
"html5-qrcode": "^2.2.1",
|
||||
"leaflet": "^1.7.1",
|
||||
"sanitize-html": "^2.7.0",
|
||||
"sanitize-html": "^2.13.0",
|
||||
"screenfull": "^6.0.1",
|
||||
"shortid": "^2.2.15",
|
||||
"svelte-spa-router": "^4.0.1",
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
<Modal
|
||||
on:cancel={handleModalClose}
|
||||
bind:this={modal}
|
||||
disableCancel={$builderStore.inBuilder}
|
||||
disableCancel={$builderStore.inBuilder || ignoreClicksOutside}
|
||||
zIndex={2}
|
||||
>
|
||||
<div use:styleable={$component.styles} class={`modal-content ${size}`}>
|
||||
|
|
|
@ -12,6 +12,6 @@
|
|||
"dayjs": "^1.10.8",
|
||||
"lodash": "4.17.21",
|
||||
"shortid": "2.2.15",
|
||||
"socket.io-client": "^4.6.1"
|
||||
"socket.io-client": "^4.7.5"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,12 +11,11 @@
|
|||
Label,
|
||||
Multiselect,
|
||||
} from "@budibase/bbui"
|
||||
import { FieldType, SearchFilterOperator } from "@budibase/types"
|
||||
import { ArrayOperator, FieldType } from "@budibase/types"
|
||||
import { generate } from "shortid"
|
||||
import { QueryUtils, Constants } from "@budibase/frontend-core"
|
||||
import { getContext } from "svelte"
|
||||
import FilterUsers from "./FilterUsers.svelte"
|
||||
import { getFields } from "../utils/searchFields"
|
||||
|
||||
const { OperatorOptions, DEFAULT_BB_DATASOURCE_ID } = Constants
|
||||
|
||||
|
@ -32,7 +31,7 @@
|
|||
if (
|
||||
tables.find(
|
||||
table =>
|
||||
table._id === datasource.tableId &&
|
||||
table._id === datasource?.tableId &&
|
||||
table.sourceId === DEFAULT_BB_DATASOURCE_ID
|
||||
) &&
|
||||
!schemaFields.some(field => field.name === "_id")
|
||||
|
@ -62,9 +61,7 @@
|
|||
]
|
||||
const context = getContext("context")
|
||||
|
||||
$: fieldOptions = getFields(tables, schemaFields || [], {
|
||||
allowLinks: true,
|
||||
}).map(field => ({
|
||||
$: fieldOptions = (schemaFields || []).map(field => ({
|
||||
label: field.displayName || field.name,
|
||||
value: field.name,
|
||||
}))
|
||||
|
@ -271,7 +268,7 @@
|
|||
<slot name="binding" {filter} />
|
||||
{:else if [FieldType.STRING, FieldType.LONGFORM, FieldType.NUMBER, FieldType.BIGINT, FieldType.FORMULA].includes(filter.type)}
|
||||
<Input disabled={filter.noValue} bind:value={filter.value} />
|
||||
{:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === SearchFilterOperator.ONE_OF)}
|
||||
{:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === ArrayOperator.ONE_OF)}
|
||||
<Multiselect
|
||||
disabled={filter.noValue}
|
||||
options={getFieldOptions(filter.field)}
|
||||
|
|
|
@ -2,7 +2,6 @@ import { BannedSearchTypes } from "../constants"
|
|||
|
||||
export function getTableFields(tables, linkField) {
|
||||
const table = tables.find(table => table._id === linkField.tableId)
|
||||
// TODO: mdrury - add support for this with SQS at some point
|
||||
if (!table || !table.sql) {
|
||||
return []
|
||||
}
|
||||
|
@ -11,7 +10,7 @@ export function getTableFields(tables, linkField) {
|
|||
})
|
||||
return linkFields.map(field => ({
|
||||
...field,
|
||||
name: `${table.name}.${field.name}`,
|
||||
name: `${linkField.name}.${field.name}`,
|
||||
}))
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit e8f2c5a14780e1f61ec3896821ba5f93d486eb72
|
||||
Subproject commit 7dbe323aec724ae6336b13c06aaefa4a89837edf
|
|
@ -14,7 +14,6 @@
|
|||
"@rollup/plugin-commonjs": "^25.0.7",
|
||||
"@rollup/plugin-node-resolve": "^15.2.3",
|
||||
"rollup": "^4.9.6",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
"rollup-plugin-polyfill-node": "^0.13.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
module SendgridMock {
|
||||
class Email {
|
||||
constructor() {
|
||||
// @ts-ignore
|
||||
this.apiKey = null
|
||||
}
|
||||
|
||||
setApiKey(apiKey: any) {
|
||||
// @ts-ignore
|
||||
this.apiKey = apiKey
|
||||
}
|
||||
|
||||
async send(msg: any) {
|
||||
if (msg.to === "invalid@example.com") {
|
||||
throw "Invalid"
|
||||
}
|
||||
return msg
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new Email()
|
||||
}
|
|
@ -58,7 +58,7 @@
|
|||
"@bull-board/api": "5.10.2",
|
||||
"@bull-board/koa": "5.10.2",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
"@google-cloud/firestore": "6.8.0",
|
||||
"@google-cloud/firestore": "7.8.0",
|
||||
"@koa/router": "8.0.8",
|
||||
"@socket.io/redis-adapter": "^8.2.1",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
|
@ -81,7 +81,7 @@
|
|||
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.2",
|
||||
"ioredis": "5.3.2",
|
||||
"isolated-vm": "^4.7.2",
|
||||
"jimp": "0.22.10",
|
||||
"jimp": "0.22.12",
|
||||
"joi": "17.6.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonschema": "1.4.0",
|
||||
|
@ -94,7 +94,7 @@
|
|||
"koa2-ratelimit": "1.1.1",
|
||||
"lodash": "4.17.21",
|
||||
"memorystream": "0.3.1",
|
||||
"mongodb": "^6.3.0",
|
||||
"mongodb": "6.7.0",
|
||||
"mssql": "10.0.1",
|
||||
"mysql2": "3.9.8",
|
||||
"node-fetch": "2.6.7",
|
||||
|
@ -109,7 +109,7 @@
|
|||
"serialize-error": "^7.0.1",
|
||||
"server-destroy": "1.0.1",
|
||||
"snowflake-promise": "^4.5.0",
|
||||
"socket.io": "4.6.2",
|
||||
"socket.io": "4.7.5",
|
||||
"tar": "6.2.1",
|
||||
"to-json-schema": "0.2.5",
|
||||
"uuid": "^8.3.2",
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import newid from "../../../db/newid"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { context, utils } from "@budibase/backend-core"
|
||||
|
||||
/**
|
||||
* This is used to pass around information about the deployment that is occurring
|
||||
|
@ -12,7 +11,7 @@ export default class Deployment {
|
|||
appUrl?: string
|
||||
|
||||
constructor(id = null) {
|
||||
this._id = id || newid()
|
||||
this._id = id || utils.newid()
|
||||
}
|
||||
|
||||
setVerification(verification: any) {
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
FieldType,
|
||||
FilterType,
|
||||
IncludeRelationship,
|
||||
InternalSearchFilterOperator,
|
||||
isManyToOne,
|
||||
OneToManyRelationshipFieldMetadata,
|
||||
Operation,
|
||||
|
@ -189,15 +190,22 @@ export class ExternalRequest<T extends Operation> {
|
|||
if (filters) {
|
||||
// need to map over the filters and make sure the _id field isn't present
|
||||
let prefix = 1
|
||||
for (let operator of Object.values(filters)) {
|
||||
for (let field of Object.keys(operator || {})) {
|
||||
for (const operator of Object.values(filters)) {
|
||||
for (const field of Object.keys(operator || {})) {
|
||||
if (dbCore.removeKeyNumbering(field) === "_id") {
|
||||
if (primary) {
|
||||
const parts = breakRowIdField(operator[field])
|
||||
for (let field of primary) {
|
||||
operator[`${prefix}:${field}`] = parts.shift()
|
||||
if (primary.length > 1) {
|
||||
operator[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR] = {
|
||||
id: primary,
|
||||
values: parts[0],
|
||||
}
|
||||
} else {
|
||||
for (let field of primary) {
|
||||
operator[`${prefix}:${field}`] = parts.shift()
|
||||
}
|
||||
prefix++
|
||||
}
|
||||
prefix++
|
||||
}
|
||||
// make sure this field doesn't exist on any filter
|
||||
delete operator[field]
|
||||
|
|
|
@ -72,15 +72,23 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
const row = await sdk.rows.external.getRow(tableId, updatedId, {
|
||||
relationships: true,
|
||||
})
|
||||
const enrichedRow = await outputProcessing(table, row, {
|
||||
squash: true,
|
||||
preserveLinks: true,
|
||||
})
|
||||
|
||||
const [enrichedRow, oldRow] = await Promise.all([
|
||||
outputProcessing(table, row, {
|
||||
squash: true,
|
||||
preserveLinks: true,
|
||||
}),
|
||||
outputProcessing(table, beforeRow, {
|
||||
squash: true,
|
||||
preserveLinks: true,
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
...response,
|
||||
row: enrichedRow,
|
||||
table,
|
||||
oldRow: beforeRow,
|
||||
oldRow,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,20 @@ export function isManyToMany(
|
|||
return !!(field as ManyToManyRelationshipFieldMetadata).through
|
||||
}
|
||||
|
||||
function isCorrectRelationship(
|
||||
relationship: RelationshipsJson,
|
||||
table1: Table,
|
||||
table2: Table,
|
||||
row: Row
|
||||
): boolean {
|
||||
const junctionTableId = generateJunctionTableID(table1._id!, table2._id!)
|
||||
const possibleColumns = [
|
||||
`${junctionTableId}.doc1.fieldName`,
|
||||
`${junctionTableId}.doc2.fieldName`,
|
||||
]
|
||||
return !!possibleColumns.find(col => row[col] === relationship.column)
|
||||
}
|
||||
|
||||
/**
|
||||
* This iterates through the returned rows and works out what elements of the rows
|
||||
* actually match up to another row (based on primary keys) - this is pretty specific
|
||||
|
@ -64,7 +78,12 @@ export async function updateRelationshipColumns(
|
|||
if (!linked._id) {
|
||||
continue
|
||||
}
|
||||
columns[relationship.column] = linked
|
||||
if (
|
||||
!opts?.sqs ||
|
||||
isCorrectRelationship(relationship, table, linkedTable, row)
|
||||
) {
|
||||
columns[relationship.column] = linked
|
||||
}
|
||||
}
|
||||
for (let [column, related] of Object.entries(columns)) {
|
||||
if (!row._id) {
|
||||
|
|
|
@ -25,7 +25,9 @@ export async function searchView(
|
|||
ctx.throw(400, `This method only supports viewsV2`)
|
||||
}
|
||||
|
||||
const viewFields = Object.keys(view.schema || {})
|
||||
const viewFields = Object.entries(view.schema || {})
|
||||
.filter(([_, value]) => value.visible)
|
||||
.map(([key]) => key)
|
||||
const { body } = ctx.request
|
||||
|
||||
// Enrich saved query with ephemeral query params.
|
||||
|
|
|
@ -15,7 +15,7 @@ import { getViews, saveView } from "../view/utils"
|
|||
import viewTemplate from "../view/viewBuilder"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { events, context } from "@budibase/backend-core"
|
||||
import { events, context, db as dbCore } from "@budibase/backend-core"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
ContextUser,
|
||||
|
@ -324,7 +324,7 @@ class TableSaveFunctions {
|
|||
importRows: this.importRows,
|
||||
user: this.user,
|
||||
})
|
||||
if (env.SQS_SEARCH_ENABLE) {
|
||||
if (dbCore.isSqsEnabledForTenant()) {
|
||||
await sdk.tables.sqs.addTable(table)
|
||||
}
|
||||
return table
|
||||
|
@ -518,7 +518,7 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
|
|||
if (rows) {
|
||||
await AttachmentCleanup.tableDelete(table, rows)
|
||||
}
|
||||
if (env.SQS_SEARCH_ENABLE) {
|
||||
if (dbCore.isSqsEnabledForTenant()) {
|
||||
await sdk.tables.sqs.removeTable(table)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,11 +33,6 @@ async function parseSchema(view: CreateViewRequest) {
|
|||
p[fieldName] = fieldSchema
|
||||
return p
|
||||
}, {} as Record<string, RequiredKeys<ViewUIFieldMetadata>>)
|
||||
for (let [key, column] of Object.entries(finalViewSchema)) {
|
||||
if (!column.visible && !column.readonly) {
|
||||
delete finalViewSchema[key]
|
||||
}
|
||||
}
|
||||
return finalViewSchema
|
||||
}
|
||||
|
||||
|
|
|
@ -203,7 +203,7 @@ describe("/permission", () => {
|
|||
// replicate changes before checking permissions
|
||||
await config.publish()
|
||||
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, { status: 403 })
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, { status: 401 })
|
||||
})
|
||||
|
||||
it("should ignore the view permissions if the flag is not on", async () => {
|
||||
|
@ -221,7 +221,7 @@ describe("/permission", () => {
|
|||
await config.publish()
|
||||
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
status: 401,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -250,8 +250,8 @@ describe("/permission", () => {
|
|||
.send(basicRow(table._id))
|
||||
.set(config.publicHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(403)
|
||||
expect(res.status).toEqual(403)
|
||||
.expect(401)
|
||||
expect(res.status).toEqual(401)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -819,7 +819,10 @@ describe.each([
|
|||
const table = await config.api.table.save(tableRequest)
|
||||
|
||||
const stringValue = generator.word()
|
||||
const naturalValue = generator.integer({ min: 0, max: 1000 })
|
||||
|
||||
// MySQL and MariaDB auto-increment fields have a minimum value of 1. If
|
||||
// you try to save a row with a value of 0 it will use 1 instead.
|
||||
const naturalValue = generator.integer({ min: 1, max: 1000 })
|
||||
|
||||
const existing = await config.api.row.save(table._id!, {
|
||||
string: stringValue,
|
||||
|
@ -1428,22 +1431,6 @@ describe.each([
|
|||
expect(row._id).toEqual(existing._id)
|
||||
})
|
||||
|
||||
it("should return an error on composite keys", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
await config.api.row.exportRows(
|
||||
table._id!,
|
||||
{
|
||||
rows: [`['${existing._id!}']`, "['d001', '10111']"],
|
||||
},
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Export data does not support composite keys.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should return an error if no table is found", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
await config.api.row.exportRows(
|
||||
|
@ -1452,6 +1439,41 @@ describe.each([
|
|||
{ status: 404 }
|
||||
)
|
||||
})
|
||||
|
||||
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
|
||||
// to identity columns. This is not something Budibase does currently.
|
||||
providerType !== DatabaseName.SQL_SERVER &&
|
||||
it("should handle filtering by composite primary keys", async () => {
|
||||
const tableRequest = saveTableRequest({
|
||||
primary: ["number", "string"],
|
||||
schema: {
|
||||
string: {
|
||||
type: FieldType.STRING,
|
||||
name: "string",
|
||||
},
|
||||
number: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "number",
|
||||
},
|
||||
},
|
||||
})
|
||||
delete tableRequest.schema.id
|
||||
|
||||
const table = await config.api.table.save(tableRequest)
|
||||
const toCreate = generator
|
||||
.unique(() => generator.integer({ min: 0, max: 10000 }), 10)
|
||||
.map(number => ({ number, string: generator.word({ length: 30 }) }))
|
||||
|
||||
const rows = await Promise.all(
|
||||
toCreate.map(d => config.api.row.save(table._id!, d))
|
||||
)
|
||||
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
rows: _.sampleSize(rows, 3).map(r => r._id!),
|
||||
})
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(3)
|
||||
})
|
||||
})
|
||||
|
||||
let o2mTable: Table
|
||||
|
|
|
@ -9,20 +9,20 @@ import { db as dbCore, utils } from "@budibase/backend-core"
|
|||
import * as setup from "./utilities"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
BBReferenceFieldSubType,
|
||||
Datasource,
|
||||
EmptyFilterOption,
|
||||
BBReferenceFieldSubType,
|
||||
FieldType,
|
||||
RelationshipType,
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchFilters,
|
||||
SearchResponse,
|
||||
SortOrder,
|
||||
SortType,
|
||||
Table,
|
||||
TableSchema,
|
||||
User,
|
||||
Row,
|
||||
RelationshipType,
|
||||
SearchResponse,
|
||||
} from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import tk from "timekeeper"
|
||||
|
@ -30,6 +30,7 @@ import { encodeJSBinding } from "@budibase/string-templates"
|
|||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { Knex } from "knex"
|
||||
import { structures } from "@budibase/backend-core/tests"
|
||||
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
|
||||
|
||||
describe.each([
|
||||
["in-memory", undefined],
|
||||
|
@ -53,10 +54,13 @@ describe.each([
|
|||
let rows: Row[]
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, () => config.init())
|
||||
if (isSqs) {
|
||||
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
|
||||
envCleanup = config.setCoreEnv({
|
||||
SQS_SEARCH_ENABLE: "true",
|
||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||
})
|
||||
}
|
||||
await config.init()
|
||||
|
||||
if (config.app?.appId) {
|
||||
config.app = await config.api.application.update(config.app?.appId, {
|
||||
|
@ -779,6 +783,46 @@ describe.each([
|
|||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing()
|
||||
})
|
||||
|
||||
it("can have multiple values for same column", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
name: ["foo", "bar"],
|
||||
},
|
||||
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
it("splits comma separated strings", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
// @ts-ignore
|
||||
name: "foo,bar",
|
||||
},
|
||||
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
it("trims whitespace", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
// @ts-ignore
|
||||
name: "foo, bar",
|
||||
},
|
||||
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
it("empty arrays returns all when onEmptyFilter is set to return 'all'", async () => {
|
||||
await expectQuery({
|
||||
onEmptyFilter: EmptyFilterOption.RETURN_ALL,
|
||||
oneOf: { name: [] },
|
||||
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
it("empty arrays returns all when onEmptyFilter is set to return 'none'", async () => {
|
||||
await expectQuery({
|
||||
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
|
||||
oneOf: { name: [] },
|
||||
}).toContainExactly([])
|
||||
})
|
||||
})
|
||||
|
||||
describe("fuzzy", () => {
|
||||
|
@ -1001,6 +1045,32 @@ describe.each([
|
|||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
|
||||
})
|
||||
|
||||
// I couldn't find a way to make this work in Lucene and given that
|
||||
// we're getting rid of Lucene soon I wasn't inclined to spend time on
|
||||
// it.
|
||||
!isLucene &&
|
||||
it("can convert from a string", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
// @ts-ignore
|
||||
age: "1",
|
||||
},
|
||||
}).toContainExactly([{ age: 1 }])
|
||||
})
|
||||
|
||||
// I couldn't find a way to make this work in Lucene and given that
|
||||
// we're getting rid of Lucene soon I wasn't inclined to spend time on
|
||||
// it.
|
||||
!isLucene &&
|
||||
it("can find multiple values for same column", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
// @ts-ignore
|
||||
age: "1,10",
|
||||
},
|
||||
}).toContainExactly([{ age: 1 }, { age: 10 }])
|
||||
})
|
||||
})
|
||||
|
||||
describe("range", () => {
|
||||
|
@ -1938,6 +2008,17 @@ describe.each([
|
|||
])
|
||||
})
|
||||
|
||||
it("successfully finds a row searching with a string", async () => {
|
||||
await expectQuery({
|
||||
// @ts-expect-error this test specifically goes against the type to
|
||||
// test that we coerce the string to an array.
|
||||
contains: { "1:users": user1._id },
|
||||
}).toContainExactly([
|
||||
{ users: [{ _id: user1._id }] },
|
||||
{ users: [{ _id: user1._id }, { _id: user2._id }] },
|
||||
])
|
||||
})
|
||||
|
||||
it("fails to find nonexistent row", async () => {
|
||||
await expectQuery({ contains: { users: ["us_none"] } }).toFindNothing()
|
||||
})
|
||||
|
@ -2073,6 +2154,128 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
describe("relations to same table", () => {
|
||||
let relatedTable: Table, relatedRows: Row[]
|
||||
|
||||
beforeAll(async () => {
|
||||
relatedTable = await createTable(
|
||||
{
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
},
|
||||
"productCategory"
|
||||
)
|
||||
table = await createTable({
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
related1: {
|
||||
type: FieldType.LINK,
|
||||
name: "related1",
|
||||
fieldName: "main1",
|
||||
tableId: relatedTable._id!,
|
||||
relationshipType: RelationshipType.MANY_TO_MANY,
|
||||
},
|
||||
related2: {
|
||||
type: FieldType.LINK,
|
||||
name: "related2",
|
||||
fieldName: "main2",
|
||||
tableId: relatedTable._id!,
|
||||
relationshipType: RelationshipType.MANY_TO_MANY,
|
||||
},
|
||||
})
|
||||
relatedRows = await Promise.all([
|
||||
config.api.row.save(relatedTable._id!, { name: "foo" }),
|
||||
config.api.row.save(relatedTable._id!, { name: "bar" }),
|
||||
config.api.row.save(relatedTable._id!, { name: "baz" }),
|
||||
config.api.row.save(relatedTable._id!, { name: "boo" }),
|
||||
])
|
||||
await Promise.all([
|
||||
config.api.row.save(table._id!, {
|
||||
name: "test",
|
||||
related1: [relatedRows[0]._id!],
|
||||
related2: [relatedRows[1]._id!],
|
||||
}),
|
||||
config.api.row.save(table._id!, {
|
||||
name: "test2",
|
||||
related1: [relatedRows[2]._id!],
|
||||
related2: [relatedRows[3]._id!],
|
||||
}),
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to relate to same table", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "test",
|
||||
related1: [{ _id: relatedRows[0]._id }],
|
||||
related2: [{ _id: relatedRows[1]._id }],
|
||||
},
|
||||
{
|
||||
name: "test2",
|
||||
related1: [{ _id: relatedRows[2]._id }],
|
||||
related2: [{ _id: relatedRows[3]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
it("should be able to filter down to second row with equal", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
equal: {
|
||||
["related1.name"]: "baz",
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "test2",
|
||||
related1: [{ _id: relatedRows[2]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
it("should be able to filter down to first row with not equal", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
notEqual: {
|
||||
["1:related2.name"]: "bar",
|
||||
["2:related2.name"]: "baz",
|
||||
["3:related2.name"]: "boo",
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "test",
|
||||
related1: [{ _id: relatedRows[0]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
describe("no column error backwards compat", () => {
|
||||
beforeAll(async () => {
|
||||
table = await createTable({
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("shouldn't error when column doesn't exist", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
string: {
|
||||
"1:something": "a",
|
||||
},
|
||||
},
|
||||
}).toMatch({ rows: [] })
|
||||
})
|
||||
})
|
||||
|
||||
// lucene can't count the total rows
|
||||
!isLucene &&
|
||||
describe("row counting", () => {
|
||||
|
@ -2108,4 +2311,119 @@ describe.each([
|
|||
}).toNotHaveProperty(["totalRows"])
|
||||
})
|
||||
})
|
||||
|
||||
describe.each(["data_name_test", "name_data_test", "name_test_data_"])(
|
||||
"special (%s) case",
|
||||
column => {
|
||||
beforeAll(async () => {
|
||||
table = await createTable({
|
||||
[column]: {
|
||||
name: column,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
})
|
||||
await createRows([{ [column]: "a" }, { [column]: "b" }])
|
||||
})
|
||||
|
||||
it("should be able to query a column with data_ in it", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
equal: {
|
||||
[`1:${column}`]: "a",
|
||||
},
|
||||
},
|
||||
}).toContainExactly([{ [column]: "a" }])
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
isInternal &&
|
||||
describe("sample data", () => {
|
||||
beforeAll(async () => {
|
||||
await config.api.application.addSampleData(config.appId!)
|
||||
table = DEFAULT_EMPLOYEE_TABLE_SCHEMA
|
||||
})
|
||||
|
||||
it("should be able to search sample data", async () => {
|
||||
await expectSearch({
|
||||
query: {},
|
||||
}).toContain([
|
||||
{
|
||||
"First Name": "Mandy",
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([
|
||||
{ low: "2024-07-03T00:00:00.000Z", high: "9999-00-00T00:00:00.000Z" },
|
||||
{ low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" },
|
||||
{ low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" },
|
||||
{ low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" },
|
||||
])("date special cases", ({ low, high }) => {
|
||||
const earlyDate = "2024-07-03T10:00:00.000Z",
|
||||
laterDate = "2024-07-03T11:00:00.000Z"
|
||||
beforeAll(async () => {
|
||||
table = await createTable({
|
||||
date: {
|
||||
name: "date",
|
||||
type: FieldType.DATETIME,
|
||||
},
|
||||
})
|
||||
await createRows([{ date: earlyDate }, { date: laterDate }])
|
||||
})
|
||||
|
||||
it("should be able to handle a date search", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
range: {
|
||||
"1:date": { low, high },
|
||||
},
|
||||
},
|
||||
}).toContainExactly([{ date: earlyDate }, { date: laterDate }])
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([
|
||||
"名前", // Japanese for "name"
|
||||
"Benutzer-ID", // German for "user ID", includes a hyphen
|
||||
"numéro", // French for "number", includes an accent
|
||||
"år", // Swedish for "year", includes a ring above
|
||||
"naïve", // English word borrowed from French, includes an umlaut
|
||||
"الاسم", // Arabic for "name"
|
||||
"оплата", // Russian for "payment"
|
||||
"पता", // Hindi for "address"
|
||||
"用戶名", // Chinese for "username"
|
||||
"çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla
|
||||
"preço", // Portuguese for "price", includes a cedilla
|
||||
"사용자명", // Korean for "username"
|
||||
"usuario_ñoño", // Spanish, uses an underscore and includes "ñ"
|
||||
"файл", // Bulgarian for "file"
|
||||
"δεδομένα", // Greek for "data"
|
||||
"geändert_am", // German for "modified on", includes an umlaut
|
||||
"ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore
|
||||
"São_Paulo", // Portuguese, includes an underscore and a tilde
|
||||
"età", // Italian for "age", includes an accent
|
||||
"ชื่อผู้ใช้", // Thai for "username"
|
||||
])("non-ascii column name: %s", name => {
|
||||
beforeAll(async () => {
|
||||
table = await createTable({
|
||||
[name]: {
|
||||
name,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
})
|
||||
await createRows([{ [name]: "a" }, { [name]: "b" }])
|
||||
})
|
||||
|
||||
it("should be able to query a column with non-ascii characters", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
equal: {
|
||||
[`1:${name}`]: "a",
|
||||
},
|
||||
},
|
||||
}).toContainExactly([{ [name]: "a" }])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -86,9 +86,10 @@ describe("/templates", () => {
|
|||
async source => {
|
||||
const env = {
|
||||
SQS_SEARCH_ENABLE: source === "sqs" ? "true" : "false",
|
||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||
}
|
||||
|
||||
await config.withEnv(env, async () => {
|
||||
await config.withCoreEnv(env, async () => {
|
||||
const name = generator.guid().replaceAll("-", "")
|
||||
const url = `/${name}`
|
||||
|
||||
|
|
|
@ -151,7 +151,7 @@ export const checkPermissionsEndpoint = async ({
|
|||
await exports
|
||||
.createRequest(config.request, method, url, body)
|
||||
.set(failHeader)
|
||||
.expect(403)
|
||||
.expect(401)
|
||||
}
|
||||
|
||||
export const getDB = () => {
|
||||
|
|
|
@ -9,7 +9,6 @@ import {
|
|||
QuotaUsageType,
|
||||
Row,
|
||||
SaveTableRequest,
|
||||
SearchFilterOperator,
|
||||
SortOrder,
|
||||
SortType,
|
||||
StaticQuotaName,
|
||||
|
@ -19,6 +18,7 @@ import {
|
|||
ViewUIFieldMetadata,
|
||||
ViewV2,
|
||||
SearchResponse,
|
||||
BasicOperator,
|
||||
} from "@budibase/types"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
|
@ -88,10 +88,16 @@ describe.each([
|
|||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.withCoreEnv(
|
||||
{ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" },
|
||||
() => config.init()
|
||||
)
|
||||
if (isSqs) {
|
||||
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
|
||||
envCleanup = config.setCoreEnv({
|
||||
SQS_SEARCH_ENABLE: "true",
|
||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||
})
|
||||
}
|
||||
await config.init()
|
||||
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
|
@ -149,7 +155,7 @@ describe.each([
|
|||
primaryDisplay: "id",
|
||||
query: [
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
operator: BasicOperator.EQUAL,
|
||||
field: "field",
|
||||
value: "value",
|
||||
},
|
||||
|
@ -218,6 +224,10 @@ describe.each([
|
|||
order: 1,
|
||||
width: 100,
|
||||
},
|
||||
Category: {
|
||||
visible: false,
|
||||
icon: "ic",
|
||||
},
|
||||
},
|
||||
id: createdView.id,
|
||||
version: 2,
|
||||
|
@ -269,9 +279,8 @@ describe.each([
|
|||
...newView,
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
Price: {
|
||||
visible: true,
|
||||
},
|
||||
Price: { visible: true },
|
||||
Category: { visible: false },
|
||||
},
|
||||
id: expect.any(String),
|
||||
version: 2,
|
||||
|
@ -558,7 +567,7 @@ describe.each([
|
|||
...view,
|
||||
query: [
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
operator: BasicOperator.EQUAL,
|
||||
field: "newField",
|
||||
value: "thatValue",
|
||||
},
|
||||
|
@ -586,7 +595,7 @@ describe.each([
|
|||
primaryDisplay: "Price",
|
||||
query: [
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
operator: BasicOperator.EQUAL,
|
||||
field: generator.word(),
|
||||
value: generator.word(),
|
||||
},
|
||||
|
@ -670,7 +679,7 @@ describe.each([
|
|||
tableId: generator.guid(),
|
||||
query: [
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
operator: BasicOperator.EQUAL,
|
||||
field: "newField",
|
||||
value: "thatValue",
|
||||
},
|
||||
|
@ -759,6 +768,7 @@ describe.each([
|
|||
order: 1,
|
||||
width: 100,
|
||||
},
|
||||
Category: { visible: false, icon: "ic" },
|
||||
},
|
||||
id: view.id,
|
||||
version: 2,
|
||||
|
@ -873,30 +883,23 @@ describe.each([
|
|||
await db.getDB(config.appId!).put(tableToUpdate)
|
||||
|
||||
view = await config.api.viewV2.get(view.id)
|
||||
await config.api.viewV2.update({
|
||||
...view,
|
||||
schema: {
|
||||
...view.schema,
|
||||
Price: {
|
||||
visible: false,
|
||||
await config.api.viewV2.update(
|
||||
{
|
||||
...view,
|
||||
schema: {
|
||||
...view.schema,
|
||||
Price: {
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(await config.api.viewV2.get(view.id)).toEqual(
|
||||
expect.objectContaining({
|
||||
schema: {
|
||||
id: expect.objectContaining({
|
||||
visible: false,
|
||||
}),
|
||||
Price: expect.objectContaining({
|
||||
visible: false,
|
||||
}),
|
||||
Category: expect.objectContaining({
|
||||
visible: true,
|
||||
}),
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: 'You can\'t hide "id" because it is a required field.',
|
||||
status: 400,
|
||||
},
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -938,7 +941,6 @@ describe.each([
|
|||
Category: { visible: true },
|
||||
},
|
||||
})
|
||||
expect(res.schema?.Price).toBeUndefined()
|
||||
|
||||
const view = await config.api.viewV2.get(res.id)
|
||||
const updatedTable = await config.api.table.get(table._id!)
|
||||
|
@ -1198,13 +1200,14 @@ describe.each([
|
|||
name: generator.guid(),
|
||||
query: [
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
operator: BasicOperator.EQUAL,
|
||||
field: "two",
|
||||
value: "bar2",
|
||||
},
|
||||
],
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
one: { visible: false },
|
||||
two: { visible: true },
|
||||
},
|
||||
})
|
||||
|
@ -1487,7 +1490,7 @@ describe.each([
|
|||
it("does not allow public users to fetch by default", async () => {
|
||||
await config.publish()
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
status: 401,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1531,7 +1534,7 @@ describe.each([
|
|||
await config.publish()
|
||||
|
||||
await config.api.viewV2.publicSearch(view.id, undefined, {
|
||||
status: 403,
|
||||
status: 401,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -49,7 +49,6 @@ export async function checkMissingMigrations(
|
|||
const queue = getAppMigrationQueue()
|
||||
|
||||
if (
|
||||
queue &&
|
||||
latestMigration &&
|
||||
getTimestamp(currentVersion) < getTimestamp(latestMigration)
|
||||
) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
|
||||
|
||||
import env from "../environment"
|
||||
import { env } from "@budibase/backend-core"
|
||||
import { AppMigration } from "."
|
||||
|
||||
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"
|
||||
|
@ -10,6 +10,6 @@ export const MIGRATIONS: AppMigration[] = [
|
|||
{
|
||||
id: "20240604153647_initial_sqs",
|
||||
func: m20240604153647_initial_sqs,
|
||||
disabled: !env.SQS_SEARCH_ENABLE,
|
||||
disabled: !(env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE),
|
||||
},
|
||||
]
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import { context } from "@budibase/backend-core"
|
||||
import { context, env } from "@budibase/backend-core"
|
||||
import { allLinkDocs } from "../../db/utils"
|
||||
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
|
||||
import sdk from "../../sdk"
|
||||
import env from "../../environment"
|
||||
|
||||
const migration = async () => {
|
||||
const linkDocs = await allLinkDocs()
|
||||
|
@ -40,7 +39,7 @@ const migration = async () => {
|
|||
// only do initial search if environment is using SQS already
|
||||
// initial search makes sure that all the indexes have been created
|
||||
// and are ready to use, avoiding any initial waits for large tables
|
||||
if (env.SQS_SEARCH_ENABLE) {
|
||||
if (env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE) {
|
||||
const tables = await sdk.tables.getAllInternalTables()
|
||||
// do these one by one - running in parallel could cause problems
|
||||
for (let table of tables) {
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
import { processMigrations } from "../../migrationsProcessor"
|
||||
import migration from "../20240604153647_initial_sqs"
|
||||
import { AppMigration } from "src/appMigrations"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
const MIGRATIONS: AppMigration[] = [
|
||||
{
|
||||
|
@ -27,6 +28,8 @@ const MIGRATIONS: AppMigration[] = [
|
|||
const config = setup.getConfig()
|
||||
let tableId: string
|
||||
|
||||
const prefix = sdk.tables.sqs.mapToUserColumn
|
||||
|
||||
function oldLinkDocInfo() {
|
||||
const tableId1 = `${DocumentType.TABLE}_a`,
|
||||
tableId2 = `${DocumentType.TABLE}_b`
|
||||
|
@ -63,58 +66,72 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
|||
}
|
||||
}
|
||||
|
||||
async function sqsDisabled(cb: () => Promise<void>) {
|
||||
await config.withEnv({ SQS_SEARCH_ENABLE: "" }, cb)
|
||||
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
|
||||
|
||||
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
||||
await config.withCoreEnv({ [envVar]: "", SQS_SEARCH_ENABLE_TENANTS: [] }, cb)
|
||||
}
|
||||
|
||||
async function sqsEnabled(cb: () => Promise<void>) {
|
||||
await config.withEnv({ SQS_SEARCH_ENABLE: "1" }, cb)
|
||||
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
||||
await config.withCoreEnv(
|
||||
{ [envVar]: "1", SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()] },
|
||||
cb
|
||||
)
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await sqsDisabled(async () => {
|
||||
await config.init()
|
||||
const table = await config.api.table.save(basicTable())
|
||||
tableId = table._id!
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// old link document
|
||||
await db.put(oldLinkDocument())
|
||||
})
|
||||
})
|
||||
|
||||
describe("SQS migration", () => {
|
||||
it("test migration runs as expected against an older DB", async () => {
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// confirm nothing exists initially
|
||||
await sqsDisabled(async () => {
|
||||
let error: any | undefined
|
||||
try {
|
||||
await db.get(SQLITE_DESIGN_DOC_ID)
|
||||
} catch (err: any) {
|
||||
error = err
|
||||
}
|
||||
expect(error).toBeDefined()
|
||||
expect(error.status).toBe(404)
|
||||
describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])(
|
||||
"SQS migration with (%s)",
|
||||
envVar => {
|
||||
beforeAll(async () => {
|
||||
await sqsDisabled(envVar, async () => {
|
||||
await config.init()
|
||||
const table = await config.api.table.save(basicTable())
|
||||
tableId = table._id!
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// old link document
|
||||
await db.put(oldLinkDocument())
|
||||
})
|
||||
})
|
||||
await sqsEnabled(async () => {
|
||||
await processMigrations(config.appId!, MIGRATIONS)
|
||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
expect(designDoc.sql.tables).toBeDefined()
|
||||
const mainTableDef = designDoc.sql.tables[tableId]
|
||||
expect(mainTableDef).toBeDefined()
|
||||
expect(mainTableDef.fields.name).toEqual(SQLiteType.TEXT)
|
||||
expect(mainTableDef.fields.description).toEqual(SQLiteType.TEXT)
|
||||
|
||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||
expect(linkDoc.tableId).toEqual(
|
||||
generateJunctionTableID(tableId1, tableId2)
|
||||
)
|
||||
// should have swapped the documents
|
||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||
it("test migration runs as expected against an older DB", async () => {
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// confirm nothing exists initially
|
||||
await sqsDisabled(envVar, async () => {
|
||||
let error: any | undefined
|
||||
try {
|
||||
await db.get(SQLITE_DESIGN_DOC_ID)
|
||||
} catch (err: any) {
|
||||
error = err
|
||||
}
|
||||
expect(error).toBeDefined()
|
||||
expect(error.status).toBe(404)
|
||||
})
|
||||
await sqsEnabled(envVar, async () => {
|
||||
await processMigrations(config.appId!, MIGRATIONS)
|
||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
expect(designDoc.sql.tables).toBeDefined()
|
||||
const mainTableDef = designDoc.sql.tables[tableId]
|
||||
expect(mainTableDef).toBeDefined()
|
||||
expect(mainTableDef.fields[prefix("name")]).toEqual({
|
||||
field: "name",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
expect(mainTableDef.fields[prefix("description")]).toEqual({
|
||||
field: "description",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
|
||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||
expect(linkDoc.tableId).toEqual(
|
||||
generateJunctionTableID(tableId1, tableId2)
|
||||
)
|
||||
// should have swapped the documents
|
||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -11,26 +11,26 @@ export type AppMigrationJob = {
|
|||
appId: string
|
||||
}
|
||||
|
||||
let appMigrationQueue: queue.Queue<AppMigrationJob> | undefined
|
||||
// always create app migration queue - so that events can be pushed and read from it
|
||||
// across the different api and automation services
|
||||
const appMigrationQueue = queue.createQueue<AppMigrationJob>(
|
||||
queue.JobQueue.APP_MIGRATION,
|
||||
{
|
||||
jobOptions: {
|
||||
attempts: MAX_ATTEMPTS,
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true,
|
||||
},
|
||||
maxStalledCount: MAX_ATTEMPTS,
|
||||
removeStalledCb: async (job: Job) => {
|
||||
logging.logAlert(
|
||||
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
export function init() {
|
||||
appMigrationQueue = queue.createQueue<AppMigrationJob>(
|
||||
queue.JobQueue.APP_MIGRATION,
|
||||
{
|
||||
jobOptions: {
|
||||
attempts: MAX_ATTEMPTS,
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true,
|
||||
},
|
||||
maxStalledCount: MAX_ATTEMPTS,
|
||||
removeStalledCb: async (job: Job) => {
|
||||
logging.logAlert(
|
||||
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return appMigrationQueue.process(MIGRATION_CONCURRENCY, processMessage)
|
||||
}
|
||||
|
||||
|
|
|
@ -100,7 +100,10 @@ export function getError(err: any) {
|
|||
}
|
||||
|
||||
export function guardAttachment(attachmentObject: any) {
|
||||
if (!("url" in attachmentObject) || !("filename" in attachmentObject)) {
|
||||
if (
|
||||
attachmentObject &&
|
||||
(!("url" in attachmentObject) || !("filename" in attachmentObject))
|
||||
) {
|
||||
const providedKeys = Object.keys(attachmentObject).join(", ")
|
||||
throw new Error(
|
||||
`Attachments must have both "url" and "filename" keys. You have provided: ${providedKeys}`
|
||||
|
@ -135,7 +138,9 @@ export async function sendAutomationAttachmentsToStorage(
|
|||
}
|
||||
|
||||
for (const [prop, attachments] of Object.entries(attachmentRows)) {
|
||||
if (Array.isArray(attachments)) {
|
||||
if (!attachments) {
|
||||
continue
|
||||
} else if (Array.isArray(attachments)) {
|
||||
if (attachments.length) {
|
||||
row[prop] = await Promise.all(
|
||||
attachments.map(attachment => generateAttachmentRow(attachment))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import * as automationUtils from "./automationUtils"
|
||||
import { isPlainObject } from "lodash"
|
||||
|
||||
type ObjValue = {
|
||||
[key: string]: string | ObjValue
|
||||
|
@ -18,6 +19,10 @@ function replaceBindingsRecursive(
|
|||
value: string | ObjValue,
|
||||
loopStepNumber: number
|
||||
) {
|
||||
if (value === null || value === undefined) {
|
||||
return value
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
for (const [innerKey, innerValue] of Object.entries(value)) {
|
||||
if (typeof innerValue === "string") {
|
||||
|
@ -25,7 +30,11 @@ function replaceBindingsRecursive(
|
|||
innerValue,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
} else if (typeof innerValue === "object") {
|
||||
} else if (
|
||||
innerValue &&
|
||||
isPlainObject(innerValue) &&
|
||||
Object.keys(innerValue).length > 0
|
||||
) {
|
||||
value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,8 +7,8 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
} from "@budibase/types"
|
||||
import { env } from "@budibase/backend-core"
|
||||
import * as automationUtils from "../automationUtils"
|
||||
import environment from "../../environment"
|
||||
|
||||
enum Model {
|
||||
GPT_35_TURBO = "gpt-3.5-turbo",
|
||||
|
@ -60,7 +60,7 @@ export const definition: AutomationStepSchema = {
|
|||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
if (!environment.OPENAI_API_KEY) {
|
||||
if (!env.OPENAI_API_KEY) {
|
||||
return {
|
||||
success: false,
|
||||
response:
|
||||
|
@ -77,7 +77,7 @@ export async function run({ inputs }: AutomationStepInput) {
|
|||
|
||||
try {
|
||||
const openai = new OpenAI({
|
||||
apiKey: environment.OPENAI_API_KEY,
|
||||
apiKey: env.OPENAI_API_KEY,
|
||||
})
|
||||
|
||||
const completion = await openai.chat.completions.create({
|
||||
|
|
|
@ -82,39 +82,73 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
|||
}
|
||||
const tableId = inputs.row.tableId
|
||||
|
||||
// clear any undefined, null or empty string properties so that they aren't updated
|
||||
for (let propKey of Object.keys(inputs.row)) {
|
||||
const clearRelationships =
|
||||
inputs.meta?.fields?.[propKey]?.clearRelationships
|
||||
if (
|
||||
(inputs.row[propKey] == null || inputs.row[propKey]?.length === 0) &&
|
||||
!clearRelationships
|
||||
) {
|
||||
delete inputs.row[propKey]
|
||||
}
|
||||
// Base update
|
||||
let rowUpdate: Record<string, any>
|
||||
|
||||
// Legacy
|
||||
// Find previously set values and add them to the update. Ensure empty relationships
|
||||
// are added to the update if clearRelationships is true
|
||||
const legacyUpdated = Object.keys(inputs.row || {}).reduce(
|
||||
(acc: Record<string, any>, key: string) => {
|
||||
const isEmpty = inputs.row[key] == null || inputs.row[key]?.length === 0
|
||||
const fieldConfig = inputs.meta?.fields || {}
|
||||
|
||||
if (isEmpty) {
|
||||
if (
|
||||
Object.hasOwn(fieldConfig, key) &&
|
||||
fieldConfig[key].clearRelationships === true
|
||||
) {
|
||||
// Explicitly clear the field on update
|
||||
acc[key] = []
|
||||
}
|
||||
} else {
|
||||
// Keep non-empty values
|
||||
acc[key] = inputs.row[key]
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
// The source of truth for inclusion in the update is: inputs.meta?.fields
|
||||
const parsedUpdate = Object.keys(inputs.meta?.fields || {}).reduce(
|
||||
(acc: Record<string, any>, key: string) => {
|
||||
const fieldConfig = inputs.meta?.fields?.[key] || {}
|
||||
// Ignore legacy config.
|
||||
if (Object.hasOwn(fieldConfig, "clearRelationships")) {
|
||||
return acc
|
||||
}
|
||||
acc[key] =
|
||||
!inputs.row[key] || inputs.row[key]?.length === 0 ? "" : inputs.row[key]
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
rowUpdate = {
|
||||
tableId,
|
||||
...parsedUpdate,
|
||||
...legacyUpdated,
|
||||
}
|
||||
|
||||
try {
|
||||
if (tableId) {
|
||||
inputs.row = await automationUtils.cleanUpRow(
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
rowUpdate = await automationUtils.cleanUpRow(tableId, rowUpdate)
|
||||
|
||||
inputs.row = await automationUtils.sendAutomationAttachmentsToStorage(
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
rowUpdate = await automationUtils.sendAutomationAttachmentsToStorage(
|
||||
tableId,
|
||||
rowUpdate
|
||||
)
|
||||
}
|
||||
// have to clean up the row, remove the table from it
|
||||
const ctx: any = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
...inputs.row,
|
||||
...rowUpdate,
|
||||
_id: inputs.rowId,
|
||||
},
|
||||
params: {
|
||||
rowId: inputs.rowId,
|
||||
tableId: tableId,
|
||||
tableId,
|
||||
},
|
||||
})
|
||||
await rowController.patch(ctx)
|
||||
|
|
|
@ -4,6 +4,7 @@ import { loopAutomation } from "../../tests/utilities/structures"
|
|||
import { context } from "@budibase/backend-core"
|
||||
import * as setup from "./utilities"
|
||||
import { Table } from "@budibase/types"
|
||||
import * as loopUtils from "../loopUtils"
|
||||
import { LoopInput, LoopStepType } from "../../definitions/automations"
|
||||
|
||||
describe("Attempt to run a basic loop automation", () => {
|
||||
|
@ -51,4 +52,98 @@ describe("Attempt to run a basic loop automation", () => {
|
|||
})
|
||||
expect(resp.steps[2].outputs.iterations).toBe(1)
|
||||
})
|
||||
|
||||
describe("replaceFakeBindings", () => {
|
||||
it("should replace loop bindings in nested objects", () => {
|
||||
const originalStepInput = {
|
||||
schema: {
|
||||
name: {
|
||||
type: "string",
|
||||
constraints: {
|
||||
type: "string",
|
||||
length: { maximum: null },
|
||||
presence: false,
|
||||
},
|
||||
name: "name",
|
||||
display: { type: "Text" },
|
||||
},
|
||||
},
|
||||
row: {
|
||||
tableId: "ta_aaad4296e9f74b12b1b90ef7a84afcad",
|
||||
name: "{{ loop.currentItem.pokemon }}",
|
||||
},
|
||||
}
|
||||
|
||||
const loopStepNumber = 3
|
||||
|
||||
const result = loopUtils.replaceFakeBindings(
|
||||
originalStepInput,
|
||||
loopStepNumber
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
schema: {
|
||||
name: {
|
||||
type: "string",
|
||||
constraints: {
|
||||
type: "string",
|
||||
length: { maximum: null },
|
||||
presence: false,
|
||||
},
|
||||
name: "name",
|
||||
display: { type: "Text" },
|
||||
},
|
||||
},
|
||||
row: {
|
||||
tableId: "ta_aaad4296e9f74b12b1b90ef7a84afcad",
|
||||
name: "{{ steps.3.currentItem.pokemon }}",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle null values in nested objects", () => {
|
||||
const originalStepInput = {
|
||||
nullValue: null,
|
||||
nestedNull: {
|
||||
someKey: null,
|
||||
},
|
||||
validValue: "{{ loop.someValue }}",
|
||||
}
|
||||
|
||||
const loopStepNumber = 2
|
||||
|
||||
const result = loopUtils.replaceFakeBindings(
|
||||
originalStepInput,
|
||||
loopStepNumber
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
nullValue: null,
|
||||
nestedNull: {
|
||||
someKey: null,
|
||||
},
|
||||
validValue: "{{ steps.2.someValue }}",
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle empty objects and arrays", () => {
|
||||
const originalStepInput = {
|
||||
emptyObject: {},
|
||||
emptyArray: [],
|
||||
nestedEmpty: {
|
||||
emptyObj: {},
|
||||
emptyArr: [],
|
||||
},
|
||||
}
|
||||
|
||||
const loopStepNumber = 1
|
||||
|
||||
const result = loopUtils.replaceFakeBindings(
|
||||
originalStepInput,
|
||||
loopStepNumber
|
||||
)
|
||||
|
||||
expect(result).toEqual(originalStepInput)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
const setup = require("./utilities")
|
||||
|
||||
import environment from "../../environment"
|
||||
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
|
||||
import { OpenAI } from "openai"
|
||||
|
||||
jest.mock("openai", () => ({
|
||||
|
@ -26,42 +24,41 @@ const mockedOpenAI = OpenAI as jest.MockedClass<typeof OpenAI>
|
|||
const OPENAI_PROMPT = "What is the meaning of life?"
|
||||
|
||||
describe("test the openai action", () => {
|
||||
let config = setup.getConfig()
|
||||
let config = getConfig()
|
||||
let resetEnv: () => void | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
environment.OPENAI_API_KEY = "abc123"
|
||||
resetEnv = config.setCoreEnv({ OPENAI_API_KEY: "abc123" })
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
afterEach(() => {
|
||||
resetEnv()
|
||||
})
|
||||
|
||||
afterAll(_afterAll)
|
||||
|
||||
it("should present the correct error message when the OPENAI_API_KEY variable isn't set", async () => {
|
||||
delete environment.OPENAI_API_KEY
|
||||
|
||||
let res = await setup.runStep("OPENAI", {
|
||||
prompt: OPENAI_PROMPT,
|
||||
await config.withCoreEnv({ OPENAI_API_KEY: "" }, async () => {
|
||||
let res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
||||
expect(res.response).toEqual(
|
||||
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
|
||||
)
|
||||
expect(res.success).toBeFalsy()
|
||||
})
|
||||
expect(res.response).toEqual(
|
||||
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
|
||||
)
|
||||
expect(res.success).toBeFalsy()
|
||||
})
|
||||
|
||||
it("should be able to receive a response from ChatGPT given a prompt", async () => {
|
||||
const res = await setup.runStep("OPENAI", {
|
||||
prompt: OPENAI_PROMPT,
|
||||
})
|
||||
const res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
||||
expect(res.response).toEqual("This is a test")
|
||||
expect(res.success).toBeTruthy()
|
||||
})
|
||||
|
||||
it("should present the correct error message when a prompt is not provided", async () => {
|
||||
const res = await setup.runStep("OPENAI", {
|
||||
prompt: null,
|
||||
})
|
||||
const res = await runStep("OPENAI", { prompt: null })
|
||||
expect(res.response).toEqual(
|
||||
"Budibase OpenAI Automation Failed: No prompt supplied"
|
||||
)
|
||||
|
@ -84,7 +81,7 @@ describe("test the openai action", () => {
|
|||
} as any)
|
||||
)
|
||||
|
||||
const res = await setup.runStep("OPENAI", {
|
||||
const res = await runStep("OPENAI", {
|
||||
prompt: OPENAI_PROMPT,
|
||||
})
|
||||
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "App Action",
|
||||
event: "app:trigger",
|
||||
event: AutomationEventType.APP_TRIGGER,
|
||||
icon: "Apps",
|
||||
tagline: "Automation fired from the frontend",
|
||||
description: "Trigger an automation from an action inside your app",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Cron Trigger",
|
||||
event: "cron:trigger",
|
||||
event: AutomationEventType.CRON_TRIGGER,
|
||||
icon: "Clock",
|
||||
tagline: "Cron Trigger (<b>{{inputs.cron}}</b>)",
|
||||
description: "Triggers automation on a cron schedule.",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Row Deleted",
|
||||
event: "row:delete",
|
||||
event: AutomationEventType.ROW_DELETE,
|
||||
icon: "TableRowRemoveCenter",
|
||||
tagline: "Row is deleted from {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is deleted from your database",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Row Created",
|
||||
event: "row:save",
|
||||
event: AutomationEventType.ROW_SAVE,
|
||||
icon: "TableRowAddBottom",
|
||||
tagline: "Row is added to {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is added to your database",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Row Updated",
|
||||
event: "row:update",
|
||||
event: AutomationEventType.ROW_UPDATE,
|
||||
icon: "Refresh",
|
||||
tagline: "Row is updated in {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is updated in your database",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Webhook",
|
||||
event: "web:trigger",
|
||||
event: AutomationEventType.WEBHOOK_TRIGGER,
|
||||
icon: "Send",
|
||||
tagline: "Webhook endpoint is hit",
|
||||
description: "Trigger an automation when a HTTP POST webhook is hit",
|
||||
|
|
|
@ -7,12 +7,13 @@ import { automationQueue } from "./bullboard"
|
|||
import { checkTestFlag } from "../utilities/redis"
|
||||
import * as utils from "./utils"
|
||||
import env from "../environment"
|
||||
import { context, db as dbCore } from "@budibase/backend-core"
|
||||
import { context, logging, db as dbCore } from "@budibase/backend-core"
|
||||
import {
|
||||
Automation,
|
||||
Row,
|
||||
AutomationData,
|
||||
AutomationJob,
|
||||
AutomationEventType,
|
||||
UpdatedRowEventEmitter,
|
||||
} from "@budibase/types"
|
||||
import { executeInThread } from "../threads/automation"
|
||||
|
@ -65,34 +66,41 @@ async function queueRelevantRowAutomations(
|
|||
automationTrigger?.inputs &&
|
||||
automationTrigger.inputs.tableId === event.row.tableId
|
||||
) {
|
||||
await automationQueue.add({ automation, event }, JOB_OPTS)
|
||||
try {
|
||||
await automationQueue.add({ automation, event }, JOB_OPTS)
|
||||
} catch (e) {
|
||||
logging.logAlert("Failed to queue automation", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
emitter.on("row:save", async function (event: UpdatedRowEventEmitter) {
|
||||
emitter.on(
|
||||
AutomationEventType.ROW_SAVE,
|
||||
async function (event: UpdatedRowEventEmitter) {
|
||||
/* istanbul ignore next */
|
||||
if (!event || !event.row || !event.row.tableId) {
|
||||
return
|
||||
}
|
||||
await queueRelevantRowAutomations(event, AutomationEventType.ROW_SAVE)
|
||||
}
|
||||
)
|
||||
|
||||
emitter.on(AutomationEventType.ROW_UPDATE, async function (event) {
|
||||
/* istanbul ignore next */
|
||||
if (!event || !event.row || !event.row.tableId) {
|
||||
return
|
||||
}
|
||||
await queueRelevantRowAutomations(event, "row:save")
|
||||
await queueRelevantRowAutomations(event, AutomationEventType.ROW_UPDATE)
|
||||
})
|
||||
|
||||
emitter.on("row:update", async function (event) {
|
||||
emitter.on(AutomationEventType.ROW_DELETE, async function (event) {
|
||||
/* istanbul ignore next */
|
||||
if (!event || !event.row || !event.row.tableId) {
|
||||
return
|
||||
}
|
||||
await queueRelevantRowAutomations(event, "row:update")
|
||||
})
|
||||
|
||||
emitter.on("row:delete", async function (event) {
|
||||
/* istanbul ignore next */
|
||||
if (!event || !event.row || !event.row.tableId) {
|
||||
return
|
||||
}
|
||||
await queueRelevantRowAutomations(event, "row:delete")
|
||||
await queueRelevantRowAutomations(event, AutomationEventType.ROW_DELETE)
|
||||
})
|
||||
|
||||
export async function externalTrigger(
|
||||
|
@ -118,7 +126,6 @@ export async function externalTrigger(
|
|||
}
|
||||
params.fields = coercedFields
|
||||
}
|
||||
|
||||
const data: AutomationData = { automation, event: params as any }
|
||||
if (getResponses) {
|
||||
data.event = {
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
import { Thread, ThreadType } from "../threads"
|
||||
import { definitions } from "./triggerInfo"
|
||||
import { automationQueue } from "./bullboard"
|
||||
import newid from "../db/newid"
|
||||
import { updateEntityMetadata } from "../utilities"
|
||||
import { MetadataTypes } from "../constants"
|
||||
import { db as dbCore, context } from "@budibase/backend-core"
|
||||
import { db as dbCore, context, utils } from "@budibase/backend-core"
|
||||
import { getAutomationMetadataParams } from "../db/utils"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { quotas } from "@budibase/pro"
|
||||
|
@ -207,7 +206,7 @@ export async function enableCronTrigger(appId: any, automation: Automation) {
|
|||
)
|
||||
}
|
||||
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
||||
const jobId = `${appId}_cron_${newid()}`
|
||||
const jobId = `${appId}_cron_${utils.newid()}`
|
||||
const job: any = await automationQueue.add(
|
||||
{
|
||||
automation,
|
||||
|
|
|
@ -24,16 +24,6 @@ export enum FilterTypes {
|
|||
ONE_OF = "oneOf",
|
||||
}
|
||||
|
||||
export const NoEmptyFilterStrings = [
|
||||
FilterTypes.STRING,
|
||||
FilterTypes.FUZZY,
|
||||
FilterTypes.EQUAL,
|
||||
FilterTypes.NOT_EQUAL,
|
||||
FilterTypes.CONTAINS,
|
||||
FilterTypes.NOT_CONTAINS,
|
||||
FilterTypes.CONTAINS_ANY,
|
||||
]
|
||||
|
||||
export const CanSwitchTypes = [
|
||||
[FieldType.JSON, FieldType.ARRAY],
|
||||
[
|
||||
|
@ -173,3 +163,9 @@ export const DEFAULT_INVENTORY_TABLE_ID = constants.DEFAULT_INVENTORY_TABLE_ID
|
|||
export const DEFAULT_EXPENSES_TABLE_ID = constants.DEFAULT_EXPENSES_TABLE_ID
|
||||
export const DEFAULT_EMPLOYEE_TABLE_ID = constants.DEFAULT_EMPLOYEE_TABLE_ID
|
||||
export const DEFAULT_BB_DATASOURCE_ID = constants.DEFAULT_BB_DATASOURCE_ID
|
||||
export const DEFAULT_TABLE_IDS = [
|
||||
DEFAULT_JOBS_TABLE_ID,
|
||||
DEFAULT_INVENTORY_TABLE_ID,
|
||||
DEFAULT_EXPENSES_TABLE_ID,
|
||||
DEFAULT_EMPLOYEE_TABLE_ID,
|
||||
]
|
||||
|
|
|
@ -619,6 +619,13 @@ export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
|
|||
},
|
||||
}
|
||||
|
||||
export const DEFAULT_TABLES: Table[] = [
|
||||
DEFAULT_INVENTORY_TABLE_SCHEMA,
|
||||
DEFAULT_EMPLOYEE_TABLE_SCHEMA,
|
||||
DEFAULT_JOBS_TABLE_SCHEMA,
|
||||
DEFAULT_EXPENSES_TABLE_SCHEMA,
|
||||
]
|
||||
|
||||
export async function buildDefaultDocs() {
|
||||
const inventoryData = await tableImport(
|
||||
DEFAULT_INVENTORY_TABLE_SCHEMA,
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import newid from "./newid"
|
||||
import { Row, Document, DBView } from "@budibase/types"
|
||||
|
||||
// bypass the main application db config
|
||||
// use in memory pouchdb directly
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import { db as dbCore, utils } from "@budibase/backend-core"
|
||||
|
||||
const Pouch = dbCore.getPouch({ inMemory: true })
|
||||
|
||||
|
@ -16,7 +15,7 @@ export async function runView(
|
|||
// use a different ID each time for the DB, make sure they
|
||||
// are always unique for each query, don't want overlap
|
||||
// which could cause 409s
|
||||
const db = new Pouch(newid())
|
||||
const db = new Pouch(utils.newid())
|
||||
try {
|
||||
// write all the docs to the in memory Pouch (remove revs)
|
||||
await db.bulkDocs(
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue