Merge branch 'develop' of github.com:Budibase/budibase into views-v2-frontend
This commit is contained in:
commit
b4b52d9266
|
@ -18,6 +18,8 @@ env:
|
||||||
BRANCH: ${{ github.event.pull_request.head.ref }}
|
BRANCH: ${{ github.event.pull_request.head.ref }}
|
||||||
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
|
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
|
||||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||||
|
NX_BASE_BRANCH: origin/${{ github.base_ref }}
|
||||||
|
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
|
@ -25,20 +27,20 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||||
- name: Checkout repo only
|
- name: Checkout repo only
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
|
||||||
|
|
||||||
- name: Use Node.js 14.x
|
- name: Use Node.js 18.x
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 14.x
|
node-version: 18.x
|
||||||
cache: "yarn"
|
cache: "yarn"
|
||||||
- run: yarn
|
- run: yarn --frozen-lockfile
|
||||||
- run: yarn lint
|
- run: yarn lint
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
@ -46,45 +48,66 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||||
|
fetch-depth: 0
|
||||||
- name: Checkout repo only
|
- name: Checkout repo only
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Use Node.js 14.x
|
- name: Use Node.js 18.x
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 14.x
|
node-version: 18.x
|
||||||
cache: "yarn"
|
cache: "yarn"
|
||||||
- run: yarn
|
- run: yarn --frozen-lockfile
|
||||||
|
|
||||||
# Run build all the projects
|
# Run build all the projects
|
||||||
- run: yarn build
|
- name: Build
|
||||||
|
run: |
|
||||||
|
yarn build
|
||||||
# Check the types of the projects built via esbuild
|
# Check the types of the projects built via esbuild
|
||||||
- run: yarn check:types
|
- name: Check types
|
||||||
|
run: |
|
||||||
|
if ${{ env.USE_NX_AFFECTED }}; then
|
||||||
|
yarn check:types --since=${{ env.NX_BASE_BRANCH }}
|
||||||
|
else
|
||||||
|
yarn check:types
|
||||||
|
fi
|
||||||
|
|
||||||
test-libraries:
|
test-libraries:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||||
|
fetch-depth: 0
|
||||||
- name: Checkout repo only
|
- name: Checkout repo only
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Use Node.js 14.x
|
- name: Use Node.js 18.x
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 14.x
|
node-version: 18.x
|
||||||
cache: "yarn"
|
cache: "yarn"
|
||||||
- run: yarn
|
- run: yarn --frozen-lockfile
|
||||||
- run: yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
|
- name: Test
|
||||||
|
run: |
|
||||||
|
if ${{ env.USE_NX_AFFECTED }}; then
|
||||||
|
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
|
||||||
|
else
|
||||||
|
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
|
||||||
|
fi
|
||||||
- uses: codecov/codecov-action@v3
|
- uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
|
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
|
||||||
|
@ -96,21 +119,31 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||||
|
fetch-depth: 0
|
||||||
- name: Checkout repo only
|
- name: Checkout repo only
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Use Node.js 14.x
|
- name: Use Node.js 18.x
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 14.x
|
node-version: 18.x
|
||||||
cache: "yarn"
|
cache: "yarn"
|
||||||
- run: yarn
|
- run: yarn --frozen-lockfile
|
||||||
- run: yarn test --scope=@budibase/worker --scope=@budibase/server
|
- name: Test worker and server
|
||||||
|
run: |
|
||||||
|
if ${{ env.USE_NX_AFFECTED }}; then
|
||||||
|
yarn test --scope=@budibase/worker --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||||
|
else
|
||||||
|
yarn test --scope=@budibase/worker --scope=@budibase/server
|
||||||
|
fi
|
||||||
|
|
||||||
- uses: codecov/codecov-action@v3
|
- uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos
|
token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos
|
||||||
|
@ -119,42 +152,49 @@ jobs:
|
||||||
|
|
||||||
test-pro:
|
test-pro:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Use Node.js 14.x
|
- name: Use Node.js 18.x
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 14.x
|
node-version: 18.x
|
||||||
cache: "yarn"
|
cache: "yarn"
|
||||||
- run: yarn
|
- run: yarn --frozen-lockfile
|
||||||
- run: yarn test --scope=@budibase/pro
|
- name: Test
|
||||||
|
run: |
|
||||||
|
if ${{ env.USE_NX_AFFECTED }}; then
|
||||||
|
yarn test --scope=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
|
||||||
|
else
|
||||||
|
yarn test --scope=@budibase/pro
|
||||||
|
fi
|
||||||
|
|
||||||
integration-test:
|
integration-test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||||
- name: Checkout repo only
|
- name: Checkout repo only
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
if: github.repository != github.event.pull_request.head.repo.full_name
|
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
|
||||||
|
|
||||||
- name: Use Node.js 14.x
|
- name: Use Node.js 18.x
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 14.x
|
node-version: 18.x
|
||||||
cache: "yarn"
|
cache: "yarn"
|
||||||
- run: yarn
|
- run: yarn --frozen-lockfile
|
||||||
- run: yarn build --projects=@budibase/server,@budibase/worker,@budibase/client
|
- run: yarn build --scope @budibase/server --scope @budibase/worker --scope @budibase/client
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: |
|
run: |
|
||||||
cd qa-core
|
cd qa-core
|
||||||
|
@ -166,13 +206,12 @@ jobs:
|
||||||
|
|
||||||
check-pro-submodule:
|
check-pro-submodule:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.repository == github.event.pull_request.head.repo.full_name
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||||
|
|
||||||
- name: Check pro commit
|
- name: Check pro commit
|
||||||
|
@ -190,6 +229,8 @@ jobs:
|
||||||
base_commit=$(git rev-parse origin/develop)
|
base_commit=$(git rev-parse origin/develop)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo "target_branch=$branch"
|
||||||
|
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
|
||||||
echo "pro_commit=$pro_commit"
|
echo "pro_commit=$pro_commit"
|
||||||
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
|
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
|
||||||
echo "base_commit=$base_commit"
|
echo "base_commit=$base_commit"
|
||||||
|
@ -204,7 +245,7 @@ jobs:
|
||||||
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
|
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
|
||||||
|
|
||||||
if (submoduleCommit !== baseCommit) {
|
if (submoduleCommit !== baseCommit) {
|
||||||
console.error('Submodule commit does not match the latest commit on the develop branch.');
|
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}"" branch.');
|
||||||
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
|
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
name: check_unreleased_changes
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check_unreleased:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check for unreleased changes
|
||||||
|
env:
|
||||||
|
REPO: "Budibase/budibase"
|
||||||
|
TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
|
||||||
|
"https://api.github.com/repos/$REPO/releases/latest" | \
|
||||||
|
jq -r .published_at)
|
||||||
|
COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
|
||||||
|
"https://api.github.com/repos/$REPO/commits/master" | \
|
||||||
|
jq -r .commit.committer.date)
|
||||||
|
RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s")
|
||||||
|
COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s")
|
||||||
|
if (( COMMIT_SECONDS > RELEASE_SECONDS )); then
|
||||||
|
echo "There are unreleased changes. Please release these changes before merging."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "No unreleased changes detected."
|
|
@ -44,7 +44,7 @@ jobs:
|
||||||
|
|
||||||
- uses: actions/setup-node@v1
|
- uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: 14.x
|
node-version: 18.x
|
||||||
|
|
||||||
- run: yarn install --frozen-lockfile
|
- run: yarn install --frozen-lockfile
|
||||||
- name: Update versions
|
- name: Update versions
|
||||||
|
|
|
@ -60,9 +60,9 @@ jobs:
|
||||||
- name: "Get Current tag"
|
- name: "Get Current tag"
|
||||||
id: currenttag
|
id: currenttag
|
||||||
run: |
|
run: |
|
||||||
version=v$(./scripts/getCurrentVersion.sh)
|
version=$(./scripts/getCurrentVersion.sh)
|
||||||
echo 'Using tag $version'
|
echo "Using tag $version"
|
||||||
echo "::set-output name=tag::$resversionult"
|
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Build/release Docker images
|
- name: Build/release Docker images
|
||||||
run: |
|
run: |
|
||||||
|
@ -71,7 +71,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||||
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.tag }}
|
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
|
||||||
|
|
||||||
release-helm-chart:
|
release-helm-chart:
|
||||||
needs: [release-images]
|
needs: [release-images]
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
name: release-singleimage
|
name: Deploy Budibase Single Container Image to DockerHub
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
@ -8,13 +8,20 @@ env:
|
||||||
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||||
REGISTRY_URL: registry.hub.docker.com
|
REGISTRY_URL: registry.hub.docker.com
|
||||||
jobs:
|
jobs:
|
||||||
build-amd64:
|
build:
|
||||||
name: "build-amd64"
|
name: "build"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
node-version: [14.x]
|
node-version: [14.x]
|
||||||
steps:
|
steps:
|
||||||
|
- name: Maximize build space
|
||||||
|
uses: easimon/maximize-build-space@master
|
||||||
|
with:
|
||||||
|
root-reserve-mb: 35000
|
||||||
|
swap-size-mb: 1024
|
||||||
|
remove-android: 'true'
|
||||||
|
remove-dotnet: 'true'
|
||||||
- name: Fail if not a tag
|
- name: Fail if not a tag
|
||||||
run: |
|
run: |
|
||||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
||||||
|
@ -27,12 +34,14 @@ jobs:
|
||||||
submodules: true
|
submodules: true
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Fail if tag is not in master
|
- name: Fail if tag is not in master
|
||||||
run: |
|
run: |
|
||||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
||||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
uses: actions/setup-node@v1
|
uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
|
@ -68,139 +77,9 @@ jobs:
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64,linux/arm64
|
||||||
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
|
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
|
||||||
file: ./hosting/single/Dockerfile
|
file: ./hosting/single/Dockerfile
|
||||||
|
|
||||||
- name: Tag and release Budibase Azure App Service docker image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
platforms: linux/amd64
|
|
||||||
build-args: TARGETBUILD=aas
|
|
||||||
tags: budibase/budibase-aas,budibase/budibase-aas:v${{ env.RELEASE_VERSION }}
|
|
||||||
file: ./hosting/single/Dockerfile
|
|
||||||
|
|
||||||
build-arm64:
|
|
||||||
name: "build-arm64"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
node-version: [14.x]
|
|
||||||
steps:
|
|
||||||
- name: Fail if not a tag
|
|
||||||
run: |
|
|
||||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
|
||||||
echo "Workflow Dispatch can only be run on tags"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
- name: "Checkout"
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Fail if tag is not in master
|
|
||||||
run: |
|
|
||||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
|
||||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: ${{ matrix.node-version }}
|
|
||||||
- name: Setup QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
- name: Setup Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
- name: Run Yarn
|
|
||||||
run: yarn
|
|
||||||
- name: Update versions
|
|
||||||
run: ./scripts/updateVersions.sh
|
|
||||||
- name: Runt Yarn Lint
|
|
||||||
run: yarn lint
|
|
||||||
- name: Update versions
|
|
||||||
run: ./scripts/updateVersions.sh
|
|
||||||
- name: Run Yarn Build
|
|
||||||
run: yarn build:docker:pre
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_API_KEY }}
|
|
||||||
- name: Get the latest release version
|
|
||||||
id: version
|
|
||||||
run: |
|
|
||||||
release_version=$(cat lerna.json | jq -r '.version')
|
|
||||||
echo $release_version
|
|
||||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
|
||||||
- name: Tag and release Budibase service docker image
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: true
|
|
||||||
platforms: linux/arm64
|
|
||||||
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
|
|
||||||
file: ./hosting/single/Dockerfile
|
|
||||||
|
|
||||||
build-aas:
|
|
||||||
name: "build-aas"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
node-version: [14.x]
|
|
||||||
steps:
|
|
||||||
- name: Fail if not a tag
|
|
||||||
run: |
|
|
||||||
if [[ $GITHUB_REF != refs/tags/* ]]; then
|
|
||||||
echo "Workflow Dispatch can only be run on tags"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
- name: "Checkout"
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Fail if tag is not in master
|
|
||||||
run: |
|
|
||||||
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
|
|
||||||
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: ${{ matrix.node-version }}
|
|
||||||
- name: Setup QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
- name: Setup Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
- name: Run Yarn
|
|
||||||
run: yarn
|
|
||||||
- name: Update versions
|
|
||||||
run: ./scripts/updateVersions.sh
|
|
||||||
- name: Runt Yarn Lint
|
|
||||||
run: yarn lint
|
|
||||||
- name: Update versions
|
|
||||||
run: ./scripts/updateVersions.sh
|
|
||||||
- name: Run Yarn Build
|
|
||||||
run: yarn build:docker:pre
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKER_API_KEY }}
|
|
||||||
- name: Get the latest release version
|
|
||||||
id: version
|
|
||||||
run: |
|
|
||||||
release_version=$(cat lerna.json | jq -r '.version')
|
|
||||||
echo $release_version
|
|
||||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
|
||||||
- name: Tag and release Budibase Azure App Service docker image
|
- name: Tag and release Budibase Azure App Service docker image
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
nodejs 14.21.3
|
nodejs 18.17.0
|
||||||
python 3.10.0
|
python 3.10.0
|
||||||
yarn 1.22.19
|
yarn 1.22.19
|
||||||
|
|
|
@ -1,42 +1,32 @@
|
||||||
{
|
{
|
||||||
// Use IntelliSense to learn about possible attributes.
|
// Use IntelliSense to learn about possible attributes.
|
||||||
// Hover to view descriptions of existing attributes.
|
// Hover to view descriptions of existing attributes.
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Budibase Server",
|
"name": "Budibase Server",
|
||||||
"type": "node",
|
"type": "node",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"runtimeArgs": [
|
"runtimeVersion": "14.20.1",
|
||||||
"--nolazy",
|
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
|
||||||
"-r",
|
"args": ["${workspaceFolder}/packages/server/src/index.ts"],
|
||||||
"ts-node/register/transpile-only"
|
"cwd": "${workspaceFolder}/packages/server"
|
||||||
],
|
},
|
||||||
"args": [
|
{
|
||||||
"${workspaceFolder}/packages/server/src/index.ts"
|
"name": "Budibase Worker",
|
||||||
],
|
"type": "node",
|
||||||
"cwd": "${workspaceFolder}/packages/server"
|
"request": "launch",
|
||||||
},
|
"runtimeVersion": "14.20.1",
|
||||||
{
|
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
|
||||||
"name": "Budibase Worker",
|
"args": ["${workspaceFolder}/packages/worker/src/index.ts"],
|
||||||
"type": "node",
|
"cwd": "${workspaceFolder}/packages/worker"
|
||||||
"request": "launch",
|
}
|
||||||
"runtimeArgs": [
|
],
|
||||||
"--nolazy",
|
"compounds": [
|
||||||
"-r",
|
{
|
||||||
"ts-node/register/transpile-only"
|
"name": "Start Budibase",
|
||||||
],
|
"configurations": ["Budibase Server", "Budibase Worker"]
|
||||||
"args": [
|
}
|
||||||
"${workspaceFolder}/packages/worker/src/index.ts"
|
]
|
||||||
],
|
}
|
||||||
"cwd": "${workspaceFolder}/packages/worker"
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"compounds": [
|
|
||||||
{
|
|
||||||
"name": "Start Budibase",
|
|
||||||
"configurations": ["Budibase Server", "Budibase Worker"]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
|
@ -120,6 +120,8 @@ spec:
|
||||||
{{ end }}
|
{{ end }}
|
||||||
- name: MULTI_TENANCY
|
- name: MULTI_TENANCY
|
||||||
value: {{ .Values.globals.multiTenancy | quote }}
|
value: {{ .Values.globals.multiTenancy | quote }}
|
||||||
|
- name: OFFLINE_MODE
|
||||||
|
value: {{ .Values.globals.offlineMode | quote }}
|
||||||
- name: LOG_LEVEL
|
- name: LOG_LEVEL
|
||||||
value: {{ .Values.services.apps.logLevel | quote }}
|
value: {{ .Values.services.apps.logLevel | quote }}
|
||||||
- name: REDIS_PASSWORD
|
- name: REDIS_PASSWORD
|
||||||
|
|
|
@ -116,6 +116,8 @@ spec:
|
||||||
value: {{ .Values.services.worker.port | quote }}
|
value: {{ .Values.services.worker.port | quote }}
|
||||||
- name: MULTI_TENANCY
|
- name: MULTI_TENANCY
|
||||||
value: {{ .Values.globals.multiTenancy | quote }}
|
value: {{ .Values.globals.multiTenancy | quote }}
|
||||||
|
- name: OFFLINE_MODE
|
||||||
|
value: {{ .Values.globals.offlineMode | quote }}
|
||||||
- name: LOG_LEVEL
|
- name: LOG_LEVEL
|
||||||
value: {{ .Values.services.worker.logLevel | quote }}
|
value: {{ .Values.services.worker.logLevel | quote }}
|
||||||
- name: REDIS_PASSWORD
|
- name: REDIS_PASSWORD
|
||||||
|
|
|
@ -82,6 +82,7 @@ globals:
|
||||||
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
|
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
|
||||||
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
|
||||||
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
|
||||||
|
offlineMode: "0" # set to 1 to enable offline mode
|
||||||
accountPortalUrl: ""
|
accountPortalUrl: ""
|
||||||
accountPortalApiKey: ""
|
accountPortalApiKey: ""
|
||||||
cookieDomain: ""
|
cookieDomain: ""
|
||||||
|
|
|
@ -90,7 +90,7 @@ Component libraries are collections of components as well as the definition of t
|
||||||
|
|
||||||
#### 1. Prerequisites
|
#### 1. Prerequisites
|
||||||
|
|
||||||
- NodeJS version `14.x.x`
|
- NodeJS version `18.x.x`
|
||||||
- Python version `3.x`
|
- Python version `3.x`
|
||||||
|
|
||||||
### Using asdf (recommended)
|
### Using asdf (recommended)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
FROM node:14-slim as build
|
FROM node:18-slim as build
|
||||||
|
|
||||||
# install node-gyp dependencies
|
# install node-gyp dependencies
|
||||||
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python
|
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python3
|
||||||
|
|
||||||
# add pin script
|
# add pin script
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.8.29-alpha.17",
|
"version": "2.9.24-alpha.3",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
"preinstall": "node scripts/syncProPackage.js",
|
"preinstall": "node scripts/syncProPackage.js",
|
||||||
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
|
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
|
||||||
"bootstrap": "./scripts/link-dependencies.sh && echo '***BOOTSTRAP ONLY REQUIRED FOR USE WITH ACCOUNT PORTAL***'",
|
"bootstrap": "./scripts/link-dependencies.sh && echo '***BOOTSTRAP ONLY REQUIRED FOR USE WITH ACCOUNT PORTAL***'",
|
||||||
"build": "yarn nx run-many -t=build",
|
"build": "lerna run build --stream",
|
||||||
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
|
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
|
||||||
"check:types": "lerna run check:types",
|
"check:types": "lerna run check:types",
|
||||||
"backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap",
|
"backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap",
|
||||||
|
@ -109,7 +109,7 @@
|
||||||
"@budibase/types": "0.0.0"
|
"@budibase/types": "0.0.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=14.0.0 <15.0.0"
|
"node": ">=18.0.0 <19.0.0"
|
||||||
},
|
},
|
||||||
"dependencies": {}
|
"dependencies": {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
*
|
||||||
|
!dist/**/*
|
||||||
|
dist/tsconfig.build.tsbuildinfo
|
||||||
|
!package.json
|
|
@ -2,11 +2,11 @@
|
||||||
"name": "@budibase/backend-core",
|
"name": "@budibase/backend-core",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Budibase backend core libraries used in server and worker",
|
"description": "Budibase backend core libraries used in server and worker",
|
||||||
"main": "dist/src/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/src/index.d.ts",
|
"types": "dist/src/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
".": "./dist/src/index.js",
|
".": "./dist/index.js",
|
||||||
"./tests": "./dist/tests/index.js",
|
"./tests": "./dist/tests.js",
|
||||||
"./*": "./dist/*.js"
|
"./*": "./dist/*.js"
|
||||||
},
|
},
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
|
@ -14,7 +14,7 @@
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prebuild": "rimraf dist/",
|
"prebuild": "rimraf dist/",
|
||||||
"prepack": "cp package.json dist",
|
"prepack": "cp package.json dist",
|
||||||
"build": "tsc -p tsconfig.build.json",
|
"build": "node ./scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null",
|
||||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
||||||
"test": "bash scripts/test.sh",
|
"test": "bash scripts/test.sh",
|
||||||
|
@ -88,5 +88,20 @@
|
||||||
"ts-node": "10.8.1",
|
"ts-node": "10.8.1",
|
||||||
"tsconfig-paths": "4.0.0",
|
"tsconfig-paths": "4.0.0",
|
||||||
"typescript": "4.7.3"
|
"typescript": "4.7.3"
|
||||||
|
},
|
||||||
|
"nx": {
|
||||||
|
"targets": {
|
||||||
|
"build": {
|
||||||
|
"dependsOn": [
|
||||||
|
{
|
||||||
|
"projects": [
|
||||||
|
"@budibase/shared-core",
|
||||||
|
"@budibase/types"
|
||||||
|
],
|
||||||
|
"target": "build"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
export * from "./src/plugin"
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
#!/usr/bin/node
|
||||||
|
const coreBuild = require("../../../scripts/build")
|
||||||
|
|
||||||
|
coreBuild("./src/plugin/index.ts", "./dist/plugins.js")
|
||||||
|
coreBuild("./src/index.ts", "./dist/index.js")
|
||||||
|
coreBuild("./tests/index.ts", "./dist/tests.js")
|
|
@ -8,6 +8,7 @@ import {
|
||||||
DatabasePutOpts,
|
DatabasePutOpts,
|
||||||
DatabaseCreateIndexOpts,
|
DatabaseCreateIndexOpts,
|
||||||
DatabaseDeleteIndexOpts,
|
DatabaseDeleteIndexOpts,
|
||||||
|
DocExistsResponse,
|
||||||
Document,
|
Document,
|
||||||
isDocument,
|
isDocument,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
@ -120,6 +121,19 @@ export class DatabaseImpl implements Database {
|
||||||
return this.updateOutput(() => db.get(id))
|
return this.updateOutput(() => db.get(id))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async docExists(docId: string): Promise<DocExistsResponse> {
|
||||||
|
const db = await this.checkSetup()
|
||||||
|
let _rev, exists
|
||||||
|
try {
|
||||||
|
const { etag } = await db.head(docId)
|
||||||
|
_rev = etag
|
||||||
|
exists = true
|
||||||
|
} catch (err) {
|
||||||
|
exists = false
|
||||||
|
}
|
||||||
|
return { _rev, exists }
|
||||||
|
}
|
||||||
|
|
||||||
async remove(idOrDoc: string | Document, rev?: string) {
|
async remove(idOrDoc: string | Document, rev?: string) {
|
||||||
const db = await this.checkSetup()
|
const db = await this.checkSetup()
|
||||||
let _id: string
|
let _id: string
|
||||||
|
|
|
@ -78,7 +78,6 @@ export const BUILTIN_PERMISSIONS = {
|
||||||
permissions: [
|
permissions: [
|
||||||
new Permission(PermissionType.QUERY, PermissionLevel.READ),
|
new Permission(PermissionType.QUERY, PermissionLevel.READ),
|
||||||
new Permission(PermissionType.TABLE, PermissionLevel.READ),
|
new Permission(PermissionType.TABLE, PermissionLevel.READ),
|
||||||
new Permission(PermissionType.VIEW, PermissionLevel.READ),
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
WRITE: {
|
WRITE: {
|
||||||
|
@ -87,7 +86,6 @@ export const BUILTIN_PERMISSIONS = {
|
||||||
permissions: [
|
permissions: [
|
||||||
new Permission(PermissionType.QUERY, PermissionLevel.WRITE),
|
new Permission(PermissionType.QUERY, PermissionLevel.WRITE),
|
||||||
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
|
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||||
new Permission(PermissionType.VIEW, PermissionLevel.READ),
|
|
||||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
@ -98,7 +96,6 @@ export const BUILTIN_PERMISSIONS = {
|
||||||
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
|
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||||
new Permission(PermissionType.USER, PermissionLevel.READ),
|
new Permission(PermissionType.USER, PermissionLevel.READ),
|
||||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||||
new Permission(PermissionType.VIEW, PermissionLevel.READ),
|
|
||||||
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
|
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
@ -109,7 +106,6 @@ export const BUILTIN_PERMISSIONS = {
|
||||||
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),
|
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),
|
||||||
new Permission(PermissionType.USER, PermissionLevel.ADMIN),
|
new Permission(PermissionType.USER, PermissionLevel.ADMIN),
|
||||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
|
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
|
||||||
new Permission(PermissionType.VIEW, PermissionLevel.ADMIN),
|
|
||||||
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
|
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||||
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),
|
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),
|
||||||
],
|
],
|
||||||
|
|
|
@ -80,7 +80,7 @@ function getRedisConfig() {
|
||||||
|
|
||||||
export function setupEnv(...envs: any[]) {
|
export function setupEnv(...envs: any[]) {
|
||||||
const couch = getCouchConfig(),
|
const couch = getCouchConfig(),
|
||||||
minio = getCouchConfig(),
|
minio = getMinioConfig(),
|
||||||
redis = getRedisConfig()
|
redis = getRedisConfig()
|
||||||
const configs = [
|
const configs = [
|
||||||
{ key: "COUCH_DB_PORT", value: couch.port },
|
{ key: "COUCH_DB_PORT", value: couch.port },
|
||||||
|
|
|
@ -12,7 +12,11 @@
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"types": ["node", "jest"],
|
"types": ["node", "jest"],
|
||||||
"outDir": "dist",
|
"outDir": "dist",
|
||||||
"skipLibCheck": true
|
"skipLibCheck": true,
|
||||||
|
"paths": {
|
||||||
|
"@budibase/types": ["../types/src"],
|
||||||
|
"@budibase/shared-core": ["../shared-core/src"]
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"include": ["**/*.js", "**/*.ts"],
|
"include": ["**/*.js", "**/*.ts"],
|
||||||
"exclude": [
|
"exclude": [
|
||||||
|
|
|
@ -1,12 +1,4 @@
|
||||||
{
|
{
|
||||||
"extends": "./tsconfig.build.json",
|
"extends": "./tsconfig.build.json",
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
"baseUrl": ".",
|
|
||||||
"paths": {
|
|
||||||
"@budibase/types": ["../types/src"],
|
|
||||||
"@budibase/shared-core": ["../shared-core/src"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"exclude": ["node_modules", "dist"]
|
"exclude": ["node_modules", "dist"]
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,8 +98,7 @@
|
||||||
{
|
{
|
||||||
"projects": [
|
"projects": [
|
||||||
"@budibase/string-templates",
|
"@budibase/string-templates",
|
||||||
"@budibase/shared-core",
|
"@budibase/shared-core"
|
||||||
"@budibase/types"
|
|
||||||
],
|
],
|
||||||
"target": "build"
|
"target": "build"
|
||||||
}
|
}
|
||||||
|
|
|
@ -133,9 +133,7 @@
|
||||||
"dependsOn": [
|
"dependsOn": [
|
||||||
{
|
{
|
||||||
"projects": [
|
"projects": [
|
||||||
"@budibase/shared-core",
|
"@budibase/string-templates"
|
||||||
"@budibase/string-templates",
|
|
||||||
"@budibase/types"
|
|
||||||
],
|
],
|
||||||
"target": "build"
|
"target": "build"
|
||||||
}
|
}
|
||||||
|
@ -145,9 +143,7 @@
|
||||||
"dependsOn": [
|
"dependsOn": [
|
||||||
{
|
{
|
||||||
"projects": [
|
"projects": [
|
||||||
"@budibase/shared-core",
|
"@budibase/string-templates"
|
||||||
"@budibase/string-templates",
|
|
||||||
"@budibase/types"
|
|
||||||
],
|
],
|
||||||
"target": "build"
|
"target": "build"
|
||||||
}
|
}
|
||||||
|
@ -157,9 +153,7 @@
|
||||||
"dependsOn": [
|
"dependsOn": [
|
||||||
{
|
{
|
||||||
"projects": [
|
"projects": [
|
||||||
"@budibase/shared-core",
|
"@budibase/string-templates"
|
||||||
"@budibase/string-templates",
|
|
||||||
"@budibase/types"
|
|
||||||
],
|
],
|
||||||
"target": "build"
|
"target": "build"
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,7 +108,13 @@
|
||||||
/****************************************************/
|
/****************************************************/
|
||||||
|
|
||||||
const getInputData = (testData, blockInputs) => {
|
const getInputData = (testData, blockInputs) => {
|
||||||
let newInputData = cloneDeep(testData || blockInputs)
|
// Test data is not cloned for reactivity
|
||||||
|
let newInputData = testData || cloneDeep(blockInputs)
|
||||||
|
|
||||||
|
// Ensures the app action fields are populated
|
||||||
|
if (block.event === "app:trigger" && !newInputData?.fields) {
|
||||||
|
newInputData = cloneDeep(blockInputs)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO - Remove after November 2023
|
* TODO - Remove after November 2023
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
<script>
|
<script>
|
||||||
import { Select, Label, Stepper } from "@budibase/bbui"
|
import { Select, Label } from "@budibase/bbui"
|
||||||
import { currentAsset, store } from "builderStore"
|
import { currentAsset, store } from "builderStore"
|
||||||
import { getActionProviderComponents } from "builderStore/dataBinding"
|
import { getActionProviderComponents } from "builderStore/dataBinding"
|
||||||
import { onMount } from "svelte"
|
import { onMount } from "svelte"
|
||||||
|
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
|
||||||
|
|
||||||
export let parameters
|
export let parameters
|
||||||
|
export let bindings = []
|
||||||
|
|
||||||
$: actionProviders = getActionProviderComponents(
|
$: actionProviders = getActionProviderComponents(
|
||||||
$currentAsset,
|
$currentAsset,
|
||||||
|
@ -51,7 +53,11 @@
|
||||||
<Select bind:value={parameters.type} options={typeOptions} />
|
<Select bind:value={parameters.type} options={typeOptions} />
|
||||||
{#if parameters.type === "specific"}
|
{#if parameters.type === "specific"}
|
||||||
<Label small>Number</Label>
|
<Label small>Number</Label>
|
||||||
<Stepper bind:value={parameters.number} />
|
<DrawerBindableInput
|
||||||
|
{bindings}
|
||||||
|
value={parameters.number}
|
||||||
|
on:change={e => (parameters.number = e.detail)}
|
||||||
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
import { generate } from "shortid"
|
import { generate } from "shortid"
|
||||||
import { LuceneUtils, Constants } from "@budibase/frontend-core"
|
import { LuceneUtils, Constants } from "@budibase/frontend-core"
|
||||||
import { getFields } from "helpers/searchFields"
|
import { getFields } from "helpers/searchFields"
|
||||||
import { createEventDispatcher } from "svelte"
|
import { createEventDispatcher, onMount } from "svelte"
|
||||||
|
|
||||||
export let schemaFields
|
export let schemaFields
|
||||||
export let filters = []
|
export let filters = []
|
||||||
|
@ -64,6 +64,15 @@
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
onMount(() => {
|
||||||
|
parseFilters(filters)
|
||||||
|
rawFilters.forEach(filter => {
|
||||||
|
filter.type =
|
||||||
|
schemaFields.find(field => field.name === filter.field)?.type ||
|
||||||
|
filter.type
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
// Add field key prefixes and a special metadata filter object to indicate
|
// Add field key prefixes and a special metadata filter object to indicate
|
||||||
// whether to use the "match all" or "match any" behaviour
|
// whether to use the "match all" or "match any" behaviour
|
||||||
const enrichFilters = (rawFilters, matchAny) => {
|
const enrichFilters = (rawFilters, matchAny) => {
|
||||||
|
|
|
@ -2,14 +2,15 @@
|
||||||
import { Button, Layout } from "@budibase/bbui"
|
import { Button, Layout } from "@budibase/bbui"
|
||||||
import DatasourceNavigator from "components/backend/DatasourceNavigator/DatasourceNavigator.svelte"
|
import DatasourceNavigator from "components/backend/DatasourceNavigator/DatasourceNavigator.svelte"
|
||||||
import Panel from "components/design/Panel.svelte"
|
import Panel from "components/design/Panel.svelte"
|
||||||
import { isActive, goto, redirect } from "@roxi/routify"
|
import { isActive, redirect, goto, params } from "@roxi/routify"
|
||||||
import BetaButton from "./_components/BetaButton.svelte"
|
import BetaButton from "./_components/BetaButton.svelte"
|
||||||
import { datasources } from "stores/backend"
|
import { datasources } from "stores/backend"
|
||||||
|
|
||||||
$: {
|
$: {
|
||||||
// If we ever don't have any data other than the users table, prompt the
|
// If we ever don't have any data other than the users table, prompt the
|
||||||
// user to add some
|
// user to add some
|
||||||
if (!$datasources.hasData) {
|
// Don't redirect if setting up google sheets, or we lose the query parameter
|
||||||
|
if (!$datasources.hasData && !$params["?continue_google_setup"]) {
|
||||||
$redirect("./new")
|
$redirect("./new")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,6 +127,14 @@ export default defineConfig(({ mode }) => {
|
||||||
find: "helpers",
|
find: "helpers",
|
||||||
replacement: path.resolve("./src/helpers"),
|
replacement: path.resolve("./src/helpers"),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
find: "@budibase/types",
|
||||||
|
replacement: path.resolve("../types/src"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
find: "@budibase/shared-core",
|
||||||
|
replacement: path.resolve("../shared-core/src"),
|
||||||
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,16 +2,16 @@
|
||||||
"name": "@budibase/cli",
|
"name": "@budibase/cli",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||||
"main": "dist/src/index.js",
|
"main": "dist/index.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
"budi": "dist/src/index.js"
|
"budi": "dist/index.js"
|
||||||
},
|
},
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r ../../node_modules/leveldown/prebuilds prebuilds",
|
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r ../../node_modules/leveldown/prebuilds prebuilds",
|
||||||
"rename": "renamer --find .node --replace .fake 'prebuilds/**'",
|
"rename": "renamer --find .node --replace .fake 'prebuilds/**'",
|
||||||
"tsc": "tsc -p tsconfig.build.json",
|
"tsc": "node ../../scripts/build.js",
|
||||||
"pkg": "pkg . --out-path build --no-bytecode --public --public-packages \"*\" -C GZip",
|
"pkg": "pkg . --out-path build --no-bytecode --public --public-packages \"*\" -C GZip",
|
||||||
"build": "yarn prebuild && yarn rename && yarn tsc && yarn pkg && yarn postbuild",
|
"build": "yarn prebuild && yarn rename && yarn tsc && yarn pkg && yarn postbuild",
|
||||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
||||||
|
@ -19,12 +19,11 @@
|
||||||
},
|
},
|
||||||
"pkg": {
|
"pkg": {
|
||||||
"targets": [
|
"targets": [
|
||||||
"node16-linux",
|
"node18-linux",
|
||||||
"node16-win",
|
"node18-win",
|
||||||
"node16-macos"
|
"node18-macos"
|
||||||
],
|
],
|
||||||
"assets": [
|
"assets": [
|
||||||
"node_modules/@budibase/backend-core/dist/**/*",
|
|
||||||
"prebuilds/**/*"
|
"prebuilds/**/*"
|
||||||
],
|
],
|
||||||
"outputPath": "build"
|
"outputPath": "build"
|
||||||
|
@ -64,20 +63,5 @@
|
||||||
"renamer": "^4.0.0",
|
"renamer": "^4.0.0",
|
||||||
"ts-node": "^10.9.1",
|
"ts-node": "^10.9.1",
|
||||||
"typescript": "4.7.3"
|
"typescript": "4.7.3"
|
||||||
},
|
|
||||||
"nx": {
|
|
||||||
"targets": {
|
|
||||||
"build": {
|
|
||||||
"dependsOn": [
|
|
||||||
{
|
|
||||||
"projects": [
|
|
||||||
"@budibase/backend-core",
|
|
||||||
"@budibase/string-templates"
|
|
||||||
],
|
|
||||||
"target": "build"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import util from "util"
|
import util from "util"
|
||||||
const runCommand = util.promisify(require("child_process").exec)
|
import childProcess from "child_process"
|
||||||
|
const runCommand = util.promisify(childProcess.exec)
|
||||||
|
|
||||||
export async function exec(command: string, dir = "./") {
|
export async function exec(command: string, dir = "./") {
|
||||||
const { stdout } = await runCommand(command, { cwd: dir })
|
const { stdout } = await runCommand(command, { cwd: dir })
|
||||||
|
@ -16,12 +17,12 @@ export async function utilityInstalled(utilName: string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function runPkgCommand(command: string, dir = "./") {
|
export async function runPkgCommand(command: string, dir = "./") {
|
||||||
const yarn = await exports.utilityInstalled("yarn")
|
const yarn = await utilityInstalled("yarn")
|
||||||
const npm = await exports.utilityInstalled("npm")
|
const npm = await utilityInstalled("npm")
|
||||||
if (!yarn && !npm) {
|
if (!yarn && !npm) {
|
||||||
throw new Error("Must have yarn or npm installed to run build.")
|
throw new Error("Must have yarn or npm installed to run build.")
|
||||||
}
|
}
|
||||||
const npmCmd = command === "install" ? `npm ${command}` : `npm run ${command}`
|
const npmCmd = command === "install" ? `npm ${command}` : `npm run ${command}`
|
||||||
const cmd = yarn ? `yarn ${command} --ignore-engines` : npmCmd
|
const cmd = yarn ? `yarn ${command} --ignore-engines` : npmCmd
|
||||||
await exports.exec(cmd, dir)
|
await exec(cmd, dir)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ import { error } from "./utils"
|
||||||
|
|
||||||
const PREBUILDS = "prebuilds"
|
const PREBUILDS = "prebuilds"
|
||||||
const ARCH = `${os.platform()}-${os.arch()}`
|
const ARCH = `${os.platform()}-${os.arch()}`
|
||||||
const PREBUILD_DIR = join(process.execPath, "..", PREBUILDS, ARCH)
|
const PREBUILD_DIR = join(process.execPath, "..", "cli", PREBUILDS, ARCH)
|
||||||
|
|
||||||
// running as built CLI pkg bundle
|
// running as built CLI pkg bundle
|
||||||
if (!process.argv[0].includes("node")) {
|
if (!process.argv[0].includes("node")) {
|
||||||
|
@ -13,17 +13,19 @@ if (!process.argv[0].includes("node")) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkForBinaries() {
|
function checkForBinaries() {
|
||||||
const readDir = join(__filename, "..", "..", "..", PREBUILDS, ARCH)
|
const readDir = join(__filename, "..", "..", "..", "cli", PREBUILDS, ARCH)
|
||||||
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
|
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const natives = fs.readdirSync(readDir)
|
const natives = fs.readdirSync(readDir)
|
||||||
if (fs.existsSync(readDir)) {
|
if (fs.existsSync(readDir)) {
|
||||||
fs.mkdirSync(PREBUILD_DIR, { recursive: true })
|
const writePath = join(process.execPath, PREBUILDS, ARCH)
|
||||||
|
fs.mkdirSync(writePath, { recursive: true })
|
||||||
for (let native of natives) {
|
for (let native of natives) {
|
||||||
const filename = `${native.split(".fake")[0]}.node`
|
const filename = `${native.split(".fake")[0]}.node`
|
||||||
fs.cpSync(join(readDir, native), join(PREBUILD_DIR, filename))
|
fs.cpSync(join(readDir, native), join(writePath, filename))
|
||||||
}
|
}
|
||||||
|
console.log("copied something")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,8 +41,9 @@ function cleanup(evt?: number) {
|
||||||
)
|
)
|
||||||
console.error(error(evt))
|
console.error(error(evt))
|
||||||
}
|
}
|
||||||
if (fs.existsSync(PREBUILD_DIR)) {
|
const path = join(process.execPath, PREBUILDS)
|
||||||
fs.rmSync(PREBUILD_DIR, { recursive: true })
|
if (fs.existsSync(path)) {
|
||||||
|
fs.rmSync(path, { recursive: true })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,12 @@
|
||||||
"incremental": true,
|
"incremental": true,
|
||||||
"types": [ "node", "jest" ],
|
"types": [ "node", "jest" ],
|
||||||
"outDir": "dist",
|
"outDir": "dist",
|
||||||
"skipLibCheck": true
|
"skipLibCheck": true,
|
||||||
|
"paths": {
|
||||||
|
"@budibase/types": ["../types/src"],
|
||||||
|
"@budibase/backend-core": ["../backend-core/src"],
|
||||||
|
"@budibase/backend-core/*": ["../backend-core/*"]
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"include": [
|
"include": [
|
||||||
"src/**/*"
|
"src/**/*"
|
||||||
|
|
|
@ -5,12 +5,7 @@
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"baseUrl": ".",
|
"baseUrl": ".",
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true
|
||||||
"paths": {
|
|
||||||
"@budibase/types": ["../types/src"],
|
|
||||||
"@budibase/backend-core": ["../backend-core/src"],
|
|
||||||
"@budibase/backend-core/*": ["../backend-core/*"]
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"ts-node": {
|
"ts-node": {
|
||||||
"require": ["tsconfig-paths/register"],
|
"require": ["tsconfig-paths/register"],
|
||||||
|
|
|
@ -2445,6 +2445,7 @@
|
||||||
"name": "Form Step",
|
"name": "Form Step",
|
||||||
"icon": "AssetsAdded",
|
"icon": "AssetsAdded",
|
||||||
"hasChildren": true,
|
"hasChildren": true,
|
||||||
|
"requiredAncestors": ["form"],
|
||||||
"illegalChildren": ["section", "form", "formstep", "formblock"],
|
"illegalChildren": ["section", "form", "formstep", "formblock"],
|
||||||
"styles": ["size"],
|
"styles": ["size"],
|
||||||
"size": {
|
"size": {
|
||||||
|
@ -2464,6 +2465,7 @@
|
||||||
"fieldgroup": {
|
"fieldgroup": {
|
||||||
"name": "Field Group",
|
"name": "Field Group",
|
||||||
"icon": "Group",
|
"icon": "Group",
|
||||||
|
"requiredAncestors": ["form"],
|
||||||
"illegalChildren": ["section"],
|
"illegalChildren": ["section"],
|
||||||
"styles": ["size"],
|
"styles": ["size"],
|
||||||
"hasChildren": true,
|
"hasChildren": true,
|
||||||
|
|
|
@ -250,7 +250,7 @@
|
||||||
} else if (type === "first") {
|
} else if (type === "first") {
|
||||||
currentStep.set(1)
|
currentStep.set(1)
|
||||||
} else if (type === "specific" && number && !isNaN(number)) {
|
} else if (type === "specific" && number && !isNaN(number)) {
|
||||||
currentStep.set(number)
|
currentStep.set(parseInt(number))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
setStep: step => {
|
setStep: step => {
|
||||||
|
|
|
@ -27,7 +27,6 @@
|
||||||
"array",
|
"array",
|
||||||
"attachment",
|
"attachment",
|
||||||
"boolean",
|
"boolean",
|
||||||
"formula",
|
|
||||||
"json",
|
"json",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit cf3bef2aad9c739111b306fd0712397adc363f81
|
Subproject commit 9b9c8cc08f271bfc5dd401860f344f6eb336ab35
|
|
@ -1,4 +1,4 @@
|
||||||
FROM node:14-slim
|
FROM node:18-slim
|
||||||
|
|
||||||
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
|
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
|
||||||
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
|
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
|
||||||
|
@ -18,7 +18,7 @@ ENV TOP_LEVEL_PATH=/
|
||||||
|
|
||||||
# handle node-gyp
|
# handle node-gyp
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends g++ make python
|
&& apt-get install -y --no-install-recommends g++ make python3
|
||||||
RUN yarn global add pm2
|
RUN yarn global add pm2
|
||||||
|
|
||||||
# Install client for oracle datasource
|
# Install client for oracle datasource
|
||||||
|
|
|
@ -100,7 +100,7 @@
|
||||||
"memorystream": "0.3.1",
|
"memorystream": "0.3.1",
|
||||||
"mongodb": "5.7",
|
"mongodb": "5.7",
|
||||||
"mssql": "9.1.1",
|
"mssql": "9.1.1",
|
||||||
"mysql2": "2.3.3",
|
"mysql2": "3.5.2",
|
||||||
"node-fetch": "2.6.7",
|
"node-fetch": "2.6.7",
|
||||||
"object-sizeof": "2.6.1",
|
"object-sizeof": "2.6.1",
|
||||||
"open": "8.4.0",
|
"open": "8.4.0",
|
||||||
|
|
|
@ -5,8 +5,9 @@ if [[ -n $CI ]]
|
||||||
then
|
then
|
||||||
# --runInBand performs better in ci where resources are limited
|
# --runInBand performs better in ci where resources are limited
|
||||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||||
echo "jest --coverage --runInBand --forceExit --bail"
|
node ../../node_modules/jest/bin/jest.js --version
|
||||||
jest --coverage --runInBand --forceExit --bail
|
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail"
|
||||||
|
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail
|
||||||
else
|
else
|
||||||
# --maxWorkers performs better in development
|
# --maxWorkers performs better in development
|
||||||
echo "jest --coverage --maxWorkers=2 --forceExit"
|
echo "jest --coverage --maxWorkers=2 --forceExit"
|
||||||
|
|
|
@ -39,9 +39,8 @@ import {
|
||||||
} from "../../db/defaultData/datasource_bb_default"
|
} from "../../db/defaultData/datasource_bb_default"
|
||||||
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
||||||
import { stringToReadStream } from "../../utilities"
|
import { stringToReadStream } from "../../utilities"
|
||||||
import { doesUserHaveLock, getLocksById } from "../../utilities/redis"
|
import { doesUserHaveLock } from "../../utilities/redis"
|
||||||
import { cleanupAutomations } from "../../automations/utils"
|
import { cleanupAutomations } from "../../automations/utils"
|
||||||
import { checkAppMetadata } from "../../automations/logging"
|
|
||||||
import { getUniqueRows } from "../../utilities/usageQuota/rows"
|
import { getUniqueRows } from "../../utilities/usageQuota/rows"
|
||||||
import { groups, licensing, quotas } from "@budibase/pro"
|
import { groups, licensing, quotas } from "@budibase/pro"
|
||||||
import {
|
import {
|
||||||
|
@ -51,7 +50,6 @@ import {
|
||||||
PlanType,
|
PlanType,
|
||||||
Screen,
|
Screen,
|
||||||
UserCtx,
|
UserCtx,
|
||||||
ContextUser,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
|
@ -317,6 +315,11 @@ async function performAppCreate(ctx: UserCtx) {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Keep existing validation setting
|
||||||
|
if (!existing.features?.componentValidation) {
|
||||||
|
newApplication.features!.componentValidation = false
|
||||||
|
}
|
||||||
|
|
||||||
// Migrate navigation settings and screens if required
|
// Migrate navigation settings and screens if required
|
||||||
if (existing) {
|
if (existing) {
|
||||||
const navigation = await migrateAppNavigation()
|
const navigation = await migrateAppNavigation()
|
||||||
|
|
|
@ -20,7 +20,7 @@ import {
|
||||||
Automation,
|
Automation,
|
||||||
AutomationActionStepId,
|
AutomationActionStepId,
|
||||||
AutomationResults,
|
AutomationResults,
|
||||||
BBContext,
|
Ctx,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { getActionDefinitions as actionDefs } from "../../automations/actions"
|
import { getActionDefinitions as actionDefs } from "../../automations/actions"
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
|
@ -73,7 +73,7 @@ function cleanAutomationInputs(automation: Automation) {
|
||||||
return automation
|
return automation
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function create(ctx: BBContext) {
|
export async function create(ctx: Ctx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
let automation = ctx.request.body
|
let automation = ctx.request.body
|
||||||
automation.appId = ctx.appId
|
automation.appId = ctx.appId
|
||||||
|
@ -142,7 +142,7 @@ export async function handleStepEvents(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function update(ctx: BBContext) {
|
export async function update(ctx: Ctx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
let automation = ctx.request.body
|
let automation = ctx.request.body
|
||||||
automation.appId = ctx.appId
|
automation.appId = ctx.appId
|
||||||
|
@ -193,7 +193,7 @@ export async function update(ctx: BBContext) {
|
||||||
builderSocket?.emitAutomationUpdate(ctx, automation)
|
builderSocket?.emitAutomationUpdate(ctx, automation)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(ctx: BBContext) {
|
export async function fetch(ctx: Ctx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const response = await db.allDocs(
|
const response = await db.allDocs(
|
||||||
getAutomationParams(null, {
|
getAutomationParams(null, {
|
||||||
|
@ -203,12 +203,11 @@ export async function fetch(ctx: BBContext) {
|
||||||
ctx.body = response.rows.map(row => row.doc)
|
ctx.body = response.rows.map(row => row.doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function find(ctx: BBContext) {
|
export async function find(ctx: Ctx) {
|
||||||
const db = context.getAppDB()
|
ctx.body = await sdk.automations.get(ctx.params.id)
|
||||||
ctx.body = await db.get(ctx.params.id)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function destroy(ctx: BBContext) {
|
export async function destroy(ctx: Ctx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const automationId = ctx.params.id
|
const automationId = ctx.params.id
|
||||||
const oldAutomation = await db.get<Automation>(automationId)
|
const oldAutomation = await db.get<Automation>(automationId)
|
||||||
|
@ -222,11 +221,11 @@ export async function destroy(ctx: BBContext) {
|
||||||
builderSocket?.emitAutomationDeletion(ctx, automationId)
|
builderSocket?.emitAutomationDeletion(ctx, automationId)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function logSearch(ctx: BBContext) {
|
export async function logSearch(ctx: Ctx) {
|
||||||
ctx.body = await automations.logs.logSearch(ctx.request.body)
|
ctx.body = await automations.logs.logSearch(ctx.request.body)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function clearLogError(ctx: BBContext) {
|
export async function clearLogError(ctx: Ctx) {
|
||||||
const { automationId, appId } = ctx.request.body
|
const { automationId, appId } = ctx.request.body
|
||||||
await context.doInAppContext(appId, async () => {
|
await context.doInAppContext(appId, async () => {
|
||||||
const db = context.getProdAppDB()
|
const db = context.getProdAppDB()
|
||||||
|
@ -245,15 +244,15 @@ export async function clearLogError(ctx: BBContext) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getActionList(ctx: BBContext) {
|
export async function getActionList(ctx: Ctx) {
|
||||||
ctx.body = await getActionDefinitions()
|
ctx.body = await getActionDefinitions()
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getTriggerList(ctx: BBContext) {
|
export async function getTriggerList(ctx: Ctx) {
|
||||||
ctx.body = getTriggerDefinitions()
|
ctx.body = getTriggerDefinitions()
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getDefinitionList(ctx: BBContext) {
|
export async function getDefinitionList(ctx: Ctx) {
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
trigger: getTriggerDefinitions(),
|
trigger: getTriggerDefinitions(),
|
||||||
action: await getActionDefinitions(),
|
action: await getActionDefinitions(),
|
||||||
|
@ -266,7 +265,7 @@ export async function getDefinitionList(ctx: BBContext) {
|
||||||
* *
|
* *
|
||||||
*********************/
|
*********************/
|
||||||
|
|
||||||
export async function trigger(ctx: BBContext) {
|
export async function trigger(ctx: Ctx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
let automation = await db.get<Automation>(ctx.params.id)
|
let automation = await db.get<Automation>(ctx.params.id)
|
||||||
|
|
||||||
|
@ -311,7 +310,7 @@ function prepareTestInput(input: any) {
|
||||||
return input
|
return input
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function test(ctx: BBContext) {
|
export async function test(ctx: Ctx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
let automation = await db.get<Automation>(ctx.params.id)
|
let automation = await db.get<Automation>(ctx.params.id)
|
||||||
await setTestFlag(automation._id!)
|
await setTestFlag(automation._id!)
|
||||||
|
|
|
@ -127,7 +127,7 @@ export async function preview(ctx: any) {
|
||||||
const query = ctx.request.body
|
const query = ctx.request.body
|
||||||
// preview may not have a queryId as it hasn't been saved, but if it does
|
// preview may not have a queryId as it hasn't been saved, but if it does
|
||||||
// this stops dynamic variables from calling the same query
|
// this stops dynamic variables from calling the same query
|
||||||
const { fields, parameters, queryVerb, transformer, queryId } = query
|
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
|
||||||
|
|
||||||
const authConfigCtx: any = getAuthConfig(ctx)
|
const authConfigCtx: any = getAuthConfig(ctx)
|
||||||
|
|
||||||
|
@ -140,6 +140,7 @@ export async function preview(ctx: any) {
|
||||||
parameters,
|
parameters,
|
||||||
transformer,
|
transformer,
|
||||||
queryId,
|
queryId,
|
||||||
|
schema,
|
||||||
// have to pass down to the thread runner - can't put into context now
|
// have to pass down to the thread runner - can't put into context now
|
||||||
environmentVariables: envVars,
|
environmentVariables: envVars,
|
||||||
ctx: {
|
ctx: {
|
||||||
|
@ -235,6 +236,7 @@ async function execute(
|
||||||
user: ctx.user,
|
user: ctx.user,
|
||||||
auth: { ...authConfigCtx },
|
auth: { ...authConfigCtx },
|
||||||
},
|
},
|
||||||
|
schema: query.schema,
|
||||||
}
|
}
|
||||||
const runFn = () => Runner.run(inputs)
|
const runFn = () => Runner.run(inputs)
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@ import {
|
||||||
UserCtx,
|
UserCtx,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
import * as utils from "./utils"
|
||||||
|
|
||||||
export async function handleRequest(
|
export async function handleRequest(
|
||||||
operation: Operation,
|
operation: Operation,
|
||||||
|
@ -43,8 +44,8 @@ export async function handleRequest(
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
const tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
const { id, ...rowData } = ctx.request.body
|
const { _id, ...rowData } = ctx.request.body
|
||||||
|
|
||||||
const validateResult = await sdk.rows.utils.validate({
|
const validateResult = await sdk.rows.utils.validate({
|
||||||
row: rowData,
|
row: rowData,
|
||||||
|
@ -54,10 +55,10 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
throw { validation: validateResult.errors }
|
throw { validation: validateResult.errors }
|
||||||
}
|
}
|
||||||
const response = await handleRequest(Operation.UPDATE, tableId, {
|
const response = await handleRequest(Operation.UPDATE, tableId, {
|
||||||
id: breakRowIdField(id),
|
id: breakRowIdField(_id),
|
||||||
row: rowData,
|
row: rowData,
|
||||||
})
|
})
|
||||||
const row = await sdk.rows.external.getRow(tableId, id, {
|
const row = await sdk.rows.external.getRow(tableId, _id, {
|
||||||
relationships: true,
|
relationships: true,
|
||||||
})
|
})
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
|
@ -70,7 +71,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
|
|
||||||
export async function save(ctx: UserCtx) {
|
export async function save(ctx: UserCtx) {
|
||||||
const inputs = ctx.request.body
|
const inputs = ctx.request.body
|
||||||
const tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
const validateResult = await sdk.rows.utils.validate({
|
const validateResult = await sdk.rows.utils.validate({
|
||||||
row: inputs,
|
row: inputs,
|
||||||
tableId,
|
tableId,
|
||||||
|
@ -98,15 +99,15 @@ export async function save(ctx: UserCtx) {
|
||||||
|
|
||||||
export async function find(ctx: UserCtx) {
|
export async function find(ctx: UserCtx) {
|
||||||
const id = ctx.params.rowId
|
const id = ctx.params.rowId
|
||||||
const tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
return sdk.rows.external.getRow(tableId, id)
|
return sdk.rows.external.getRow(tableId, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function destroy(ctx: UserCtx) {
|
export async function destroy(ctx: UserCtx) {
|
||||||
const tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
const id = ctx.request.body._id
|
const _id = ctx.request.body._id
|
||||||
const { row } = (await handleRequest(Operation.DELETE, tableId, {
|
const { row } = (await handleRequest(Operation.DELETE, tableId, {
|
||||||
id: breakRowIdField(id),
|
id: breakRowIdField(_id),
|
||||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||||
})) as { row: Row }
|
})) as { row: Row }
|
||||||
return { response: { ok: true }, row }
|
return { response: { ok: true }, row }
|
||||||
|
@ -114,7 +115,7 @@ export async function destroy(ctx: UserCtx) {
|
||||||
|
|
||||||
export async function bulkDestroy(ctx: UserCtx) {
|
export async function bulkDestroy(ctx: UserCtx) {
|
||||||
const { rows } = ctx.request.body
|
const { rows } = ctx.request.body
|
||||||
const tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
let promises: Promise<Row[] | { row: Row; table: Table }>[] = []
|
let promises: Promise<Row[] | { row: Row; table: Table }>[] = []
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
promises.push(
|
promises.push(
|
||||||
|
@ -130,7 +131,7 @@ export async function bulkDestroy(ctx: UserCtx) {
|
||||||
|
|
||||||
export async function fetchEnrichedRow(ctx: UserCtx) {
|
export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||||
const id = ctx.params.rowId
|
const id = ctx.params.rowId
|
||||||
const tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||||
const datasource: Datasource = await sdk.datasources.get(datasourceId!)
|
const datasource: Datasource = await sdk.datasources.get(datasourceId!)
|
||||||
if (!tableName) {
|
if (!tableName) {
|
||||||
|
|
|
@ -11,6 +11,9 @@ import {
|
||||||
Row,
|
Row,
|
||||||
PatchRowRequest,
|
PatchRowRequest,
|
||||||
PatchRowResponse,
|
PatchRowResponse,
|
||||||
|
SearchRowResponse,
|
||||||
|
SearchRowRequest,
|
||||||
|
SearchParams,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as utils from "./utils"
|
import * as utils from "./utils"
|
||||||
import { gridSocket } from "../../../websockets"
|
import { gridSocket } from "../../../websockets"
|
||||||
|
@ -197,10 +200,10 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
|
||||||
ctx.body = response
|
ctx.body = response
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function search(ctx: any) {
|
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
|
|
||||||
const searchParams = {
|
const searchParams: SearchParams = {
|
||||||
...ctx.request.body,
|
...ctx.request.body,
|
||||||
tableId,
|
tableId,
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ import {
|
||||||
import { FieldTypes } from "../../../constants"
|
import { FieldTypes } from "../../../constants"
|
||||||
import * as utils from "./utils"
|
import * as utils from "./utils"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { context, db as dbCore } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { finaliseRow, updateRelatedFormula } from "./staticFormula"
|
import { finaliseRow, updateRelatedFormula } from "./staticFormula"
|
||||||
import {
|
import {
|
||||||
UserCtx,
|
UserCtx,
|
||||||
|
@ -26,8 +26,8 @@ import {
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
|
||||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
|
const tableId = utils.getTableId(ctx)
|
||||||
const inputs = ctx.request.body
|
const inputs = ctx.request.body
|
||||||
const tableId = inputs.tableId
|
|
||||||
const isUserTable = tableId === InternalTables.USER_METADATA
|
const isUserTable = tableId === InternalTables.USER_METADATA
|
||||||
let oldRow
|
let oldRow
|
||||||
const dbTable = await sdk.tables.getTable(tableId)
|
const dbTable = await sdk.tables.getTable(tableId)
|
||||||
|
@ -94,7 +94,8 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
|
|
||||||
export async function save(ctx: UserCtx) {
|
export async function save(ctx: UserCtx) {
|
||||||
let inputs = ctx.request.body
|
let inputs = ctx.request.body
|
||||||
inputs.tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
|
inputs.tableId = tableId
|
||||||
|
|
||||||
if (!inputs._rev && !inputs._id) {
|
if (!inputs._rev && !inputs._id) {
|
||||||
inputs._id = generateRowID(inputs.tableId)
|
inputs._id = generateRowID(inputs.tableId)
|
||||||
|
@ -132,20 +133,22 @@ export async function save(ctx: UserCtx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function find(ctx: UserCtx) {
|
export async function find(ctx: UserCtx) {
|
||||||
const db = dbCore.getDB(ctx.appId)
|
const tableId = utils.getTableId(ctx),
|
||||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
rowId = ctx.params.rowId
|
||||||
let row = await utils.findRow(ctx, ctx.params.tableId, ctx.params.rowId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
|
let row = await utils.findRow(ctx, tableId, rowId)
|
||||||
row = await outputProcessing(table, row)
|
row = await outputProcessing(table, row)
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function destroy(ctx: UserCtx) {
|
export async function destroy(ctx: UserCtx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
|
const tableId = utils.getTableId(ctx)
|
||||||
const { _id } = ctx.request.body
|
const { _id } = ctx.request.body
|
||||||
let row = await db.get<Row>(_id)
|
let row = await db.get<Row>(_id)
|
||||||
let _rev = ctx.request.body._rev || row._rev
|
let _rev = ctx.request.body._rev || row._rev
|
||||||
|
|
||||||
if (row.tableId !== ctx.params.tableId) {
|
if (row.tableId !== tableId) {
|
||||||
throw "Supplied tableId doesn't match the row's tableId"
|
throw "Supplied tableId doesn't match the row's tableId"
|
||||||
}
|
}
|
||||||
const table = await sdk.tables.getTable(row.tableId)
|
const table = await sdk.tables.getTable(row.tableId)
|
||||||
|
@ -163,7 +166,7 @@ export async function destroy(ctx: UserCtx) {
|
||||||
await updateRelatedFormula(table, row)
|
await updateRelatedFormula(table, row)
|
||||||
|
|
||||||
let response
|
let response
|
||||||
if (ctx.params.tableId === InternalTables.USER_METADATA) {
|
if (tableId === InternalTables.USER_METADATA) {
|
||||||
ctx.params = {
|
ctx.params = {
|
||||||
id: _id,
|
id: _id,
|
||||||
}
|
}
|
||||||
|
@ -176,7 +179,7 @@ export async function destroy(ctx: UserCtx) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function bulkDestroy(ctx: UserCtx) {
|
export async function bulkDestroy(ctx: UserCtx) {
|
||||||
const tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
let { rows } = ctx.request.body
|
let { rows } = ctx.request.body
|
||||||
|
|
||||||
|
@ -216,7 +219,7 @@ export async function bulkDestroy(ctx: UserCtx) {
|
||||||
|
|
||||||
export async function fetchEnrichedRow(ctx: UserCtx) {
|
export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const tableId = ctx.params.tableId
|
const tableId = utils.getTableId(ctx)
|
||||||
const rowId = ctx.params.rowId
|
const rowId = ctx.params.rowId
|
||||||
// need table to work out where links go in row
|
// need table to work out where links go in row
|
||||||
let [table, row] = await Promise.all([
|
let [table, row] = await Promise.all([
|
||||||
|
|
|
@ -45,13 +45,20 @@ export async function findRow(ctx: UserCtx, tableId: string, rowId: string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getTableId(ctx: Ctx) {
|
export function getTableId(ctx: Ctx) {
|
||||||
if (ctx.request.body && ctx.request.body.tableId) {
|
// top priority, use the URL first
|
||||||
return ctx.request.body.tableId
|
if (ctx.params?.sourceId) {
|
||||||
|
return ctx.params.sourceId
|
||||||
}
|
}
|
||||||
if (ctx.params && ctx.params.tableId) {
|
// now check for old way of specifying table ID
|
||||||
|
if (ctx.params?.tableId) {
|
||||||
return ctx.params.tableId
|
return ctx.params.tableId
|
||||||
}
|
}
|
||||||
if (ctx.params && ctx.params.viewName) {
|
// check body for a table ID
|
||||||
|
if (ctx.request.body?.tableId) {
|
||||||
|
return ctx.request.body.tableId
|
||||||
|
}
|
||||||
|
// now check if a specific view name
|
||||||
|
if (ctx.params?.viewName) {
|
||||||
return ctx.params.viewName
|
return ctx.params.viewName
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,18 @@
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import {
|
import {
|
||||||
UserCtx,
|
UserCtx,
|
||||||
SearchResponse,
|
|
||||||
SortOrder,
|
|
||||||
SortType,
|
|
||||||
ViewV2,
|
ViewV2,
|
||||||
|
SearchRowResponse,
|
||||||
|
SearchViewRowRequest,
|
||||||
|
RequiredKeys,
|
||||||
|
SearchParams,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
|
||||||
export async function searchView(ctx: UserCtx<void, SearchResponse>) {
|
export async function searchView(
|
||||||
|
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
|
||||||
|
) {
|
||||||
const { viewId } = ctx.params
|
const { viewId } = ctx.params
|
||||||
|
|
||||||
const view = await sdk.views.get(viewId)
|
const view = await sdk.views.get(viewId)
|
||||||
|
@ -29,49 +33,35 @@ export async function searchView(ctx: UserCtx<void, SearchResponse>) {
|
||||||
undefined
|
undefined
|
||||||
|
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
const result = await quotas.addQuery(
|
|
||||||
() =>
|
const { body } = ctx.request
|
||||||
sdk.rows.search({
|
const query = dataFilters.buildLuceneQuery(view.query || [])
|
||||||
tableId: view.tableId,
|
|
||||||
query: view.query || {},
|
const searchOptions: RequiredKeys<SearchViewRowRequest> &
|
||||||
fields: viewFields,
|
RequiredKeys<Pick<SearchParams, "tableId" | "query" | "fields">> = {
|
||||||
...getSortOptions(ctx, view),
|
tableId: view.tableId,
|
||||||
}),
|
query,
|
||||||
{
|
fields: viewFields,
|
||||||
datasourceId: view.tableId,
|
...getSortOptions(body, view),
|
||||||
}
|
limit: body.limit,
|
||||||
)
|
bookmark: body.bookmark,
|
||||||
|
paginate: body.paginate,
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await quotas.addQuery(() => sdk.rows.search(searchOptions), {
|
||||||
|
datasourceId: view.tableId,
|
||||||
|
})
|
||||||
|
|
||||||
result.rows.forEach(r => (r._viewId = view.id))
|
result.rows.forEach(r => (r._viewId = view.id))
|
||||||
ctx.body = result
|
ctx.body = result
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSortOptions(
|
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
|
||||||
ctx: UserCtx,
|
if (request.sort) {
|
||||||
view: ViewV2
|
|
||||||
):
|
|
||||||
| {
|
|
||||||
sort: string
|
|
||||||
sortOrder?: SortOrder
|
|
||||||
sortType?: SortType
|
|
||||||
}
|
|
||||||
| undefined {
|
|
||||||
const { sort_column, sort_order, sort_type } = ctx.query
|
|
||||||
if (Array.isArray(sort_column)) {
|
|
||||||
ctx.throw(400, "sort_column cannot be an array")
|
|
||||||
}
|
|
||||||
if (Array.isArray(sort_order)) {
|
|
||||||
ctx.throw(400, "sort_order cannot be an array")
|
|
||||||
}
|
|
||||||
if (Array.isArray(sort_type)) {
|
|
||||||
ctx.throw(400, "sort_type cannot be an array")
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sort_column) {
|
|
||||||
return {
|
return {
|
||||||
sort: sort_column,
|
sort: request.sort,
|
||||||
sortOrder: sort_order as SortOrder,
|
sortOrder: request.sortOrder,
|
||||||
sortType: sort_type as SortType,
|
sortType: request.sortType,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (view.sort) {
|
if (view.sort) {
|
||||||
|
@ -82,5 +72,9 @@ function getSortOptions(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return {
|
||||||
|
sort: undefined,
|
||||||
|
sortOrder: undefined,
|
||||||
|
sortType: undefined,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,16 +4,14 @@ import authorized from "../../middleware/authorized"
|
||||||
import { paramResource, paramSubResource } from "../../middleware/resourceId"
|
import { paramResource, paramSubResource } from "../../middleware/resourceId"
|
||||||
import { permissions } from "@budibase/backend-core"
|
import { permissions } from "@budibase/backend-core"
|
||||||
import { internalSearchValidator } from "./utils/validators"
|
import { internalSearchValidator } from "./utils/validators"
|
||||||
import noViewData from "../../middleware/noViewData"
|
|
||||||
import trimViewRowInfo from "../../middleware/trimViewRowInfo"
|
import trimViewRowInfo from "../../middleware/trimViewRowInfo"
|
||||||
import * as utils from "../../db/utils"
|
|
||||||
const { PermissionType, PermissionLevel } = permissions
|
const { PermissionType, PermissionLevel } = permissions
|
||||||
|
|
||||||
const router: Router = new Router()
|
const router: Router = new Router()
|
||||||
|
|
||||||
router
|
router
|
||||||
/**
|
/**
|
||||||
* @api {get} /api/:tableId/:rowId/enrich Get an enriched row
|
* @api {get} /api/:sourceId/:rowId/enrich Get an enriched row
|
||||||
* @apiName Get an enriched row
|
* @apiName Get an enriched row
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table read access
|
* @apiPermission table read access
|
||||||
|
@ -27,13 +25,13 @@ router
|
||||||
* @apiSuccess {object} row The response body will be the enriched row.
|
* @apiSuccess {object} row The response body will be the enriched row.
|
||||||
*/
|
*/
|
||||||
.get(
|
.get(
|
||||||
"/api/:tableId/:rowId/enrich",
|
"/api/:sourceId/:rowId/enrich",
|
||||||
paramSubResource("tableId", "rowId"),
|
paramSubResource("sourceId", "rowId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||||
rowController.fetchEnrichedRow
|
rowController.fetchEnrichedRow
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
* @api {get} /api/:tableId/rows Get all rows in a table
|
* @api {get} /api/:sourceId/rows Get all rows in a table
|
||||||
* @apiName Get all rows in a table
|
* @apiName Get all rows in a table
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table read access
|
* @apiPermission table read access
|
||||||
|
@ -42,37 +40,37 @@ router
|
||||||
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
|
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
|
||||||
* will simply stop.
|
* will simply stop.
|
||||||
*
|
*
|
||||||
* @apiParam {string} tableId The ID of the table to retrieve all rows within.
|
* @apiParam {string} sourceId The ID of the table to retrieve all rows within.
|
||||||
*
|
*
|
||||||
* @apiSuccess {object[]} rows The response body will be an array of all rows found.
|
* @apiSuccess {object[]} rows The response body will be an array of all rows found.
|
||||||
*/
|
*/
|
||||||
.get(
|
.get(
|
||||||
"/api/:tableId/rows",
|
"/api/:sourceId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("sourceId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||||
rowController.fetch
|
rowController.fetch
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
* @api {get} /api/:tableId/rows/:rowId Retrieve a single row
|
* @api {get} /api/:sourceId/rows/:rowId Retrieve a single row
|
||||||
* @apiName Retrieve a single row
|
* @apiName Retrieve a single row
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table read access
|
* @apiPermission table read access
|
||||||
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
|
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
|
||||||
* a row by anything other than its _id field, use the search endpoint.
|
* a row by anything other than its _id field, use the search endpoint.
|
||||||
*
|
*
|
||||||
* @apiParam {string} tableId The ID of the table to retrieve a row from.
|
* @apiParam {string} sourceId The ID of the table to retrieve a row from.
|
||||||
* @apiParam {string} rowId The ID of the row to retrieve.
|
* @apiParam {string} rowId The ID of the row to retrieve.
|
||||||
*
|
*
|
||||||
* @apiSuccess {object} body The response body will be the row that was found.
|
* @apiSuccess {object} body The response body will be the row that was found.
|
||||||
*/
|
*/
|
||||||
.get(
|
.get(
|
||||||
"/api/:tableId/rows/:rowId",
|
"/api/:sourceId/rows/:rowId",
|
||||||
paramSubResource("tableId", "rowId"),
|
paramSubResource("sourceId", "rowId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||||
rowController.find
|
rowController.find
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
* @api {post} /api/:tableId/search Search for rows in a table
|
* @api {post} /api/:sourceId/search Search for rows in a table
|
||||||
* @apiName Search for rows in a table
|
* @apiName Search for rows in a table
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table read access
|
* @apiPermission table read access
|
||||||
|
@ -80,7 +78,7 @@ router
|
||||||
* and data UI in the builder are built atop this. All filtering, sorting and pagination is
|
* and data UI in the builder are built atop this. All filtering, sorting and pagination is
|
||||||
* handled through this, for internal and external (datasource plus, e.g. SQL) tables.
|
* handled through this, for internal and external (datasource plus, e.g. SQL) tables.
|
||||||
*
|
*
|
||||||
* @apiParam {string} tableId The ID of the table to retrieve rows from.
|
* @apiParam {string} sourceId The ID of the table to retrieve rows from.
|
||||||
*
|
*
|
||||||
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
|
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
|
||||||
* defaults to false.
|
* defaults to false.
|
||||||
|
@ -135,22 +133,22 @@ router
|
||||||
* page.
|
* page.
|
||||||
*/
|
*/
|
||||||
.post(
|
.post(
|
||||||
"/api/:tableId/search",
|
"/api/:sourceId/search",
|
||||||
internalSearchValidator(),
|
internalSearchValidator(),
|
||||||
paramResource("tableId"),
|
paramResource("sourceId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||||
rowController.search
|
rowController.search
|
||||||
)
|
)
|
||||||
// DEPRECATED - this is an old API, but for backwards compat it needs to be
|
// DEPRECATED - this is an old API, but for backwards compat it needs to be
|
||||||
// supported still
|
// supported still
|
||||||
.post(
|
.post(
|
||||||
"/api/search/:tableId/rows",
|
"/api/search/:sourceId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("sourceId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||||
rowController.search
|
rowController.search
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
* @api {post} /api/:tableId/rows Creates a new row
|
* @api {post} /api/:sourceId/rows Creates a new row
|
||||||
* @apiName Creates a new row
|
* @apiName Creates a new row
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table write access
|
* @apiPermission table write access
|
||||||
|
@ -159,7 +157,7 @@ router
|
||||||
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
|
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
|
||||||
* already used by Budibase tables and cannot be used for columns.
|
* already used by Budibase tables and cannot be used for columns.
|
||||||
*
|
*
|
||||||
* @apiParam {string} tableId The ID of the table to save a row to.
|
* @apiParam {string} sourceId The ID of the table to save a row to.
|
||||||
*
|
*
|
||||||
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
|
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
|
||||||
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
|
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
|
||||||
|
@ -174,14 +172,14 @@ router
|
||||||
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
|
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
|
||||||
*/
|
*/
|
||||||
.post(
|
.post(
|
||||||
"/api/:tableId/rows",
|
"/api/:sourceId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("sourceId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||||
noViewData,
|
trimViewRowInfo,
|
||||||
rowController.save
|
rowController.save
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
* @api {patch} /api/:tableId/rows Updates a row
|
* @api {patch} /api/:sourceId/rows Updates a row
|
||||||
* @apiName Update a row
|
* @apiName Update a row
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table write access
|
* @apiPermission table write access
|
||||||
|
@ -189,14 +187,14 @@ router
|
||||||
* error if an _id isn't provided, it will only function for existing rows.
|
* error if an _id isn't provided, it will only function for existing rows.
|
||||||
*/
|
*/
|
||||||
.patch(
|
.patch(
|
||||||
"/api/:tableId/rows",
|
"/api/:sourceId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("sourceId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||||
noViewData,
|
trimViewRowInfo,
|
||||||
rowController.patch
|
rowController.patch
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
* @api {post} /api/:tableId/rows/validate Validate inputs for a row
|
* @api {post} /api/:sourceId/rows/validate Validate inputs for a row
|
||||||
* @apiName Validate inputs for a row
|
* @apiName Validate inputs for a row
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table write access
|
* @apiPermission table write access
|
||||||
|
@ -204,7 +202,7 @@ router
|
||||||
* given the table schema, this will iterate through all the constraints on the table and
|
* given the table schema, this will iterate through all the constraints on the table and
|
||||||
* check if the request body is valid.
|
* check if the request body is valid.
|
||||||
*
|
*
|
||||||
* @apiParam {string} tableId The ID of the table the row is to be validated for.
|
* @apiParam {string} sourceId The ID of the table the row is to be validated for.
|
||||||
*
|
*
|
||||||
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
|
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
|
||||||
* against the table schema and constraints.
|
* against the table schema and constraints.
|
||||||
|
@ -216,20 +214,20 @@ router
|
||||||
* the schema.
|
* the schema.
|
||||||
*/
|
*/
|
||||||
.post(
|
.post(
|
||||||
"/api/:tableId/rows/validate",
|
"/api/:sourceId/rows/validate",
|
||||||
paramResource("tableId"),
|
paramResource("sourceId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||||
rowController.validate
|
rowController.validate
|
||||||
)
|
)
|
||||||
/**
|
/**
|
||||||
* @api {delete} /api/:tableId/rows Delete rows
|
* @api {delete} /api/:sourceId/rows Delete rows
|
||||||
* @apiName Delete rows
|
* @apiName Delete rows
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table write access
|
* @apiPermission table write access
|
||||||
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
|
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
|
||||||
* fashion.
|
* fashion.
|
||||||
*
|
*
|
||||||
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
|
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
|
||||||
*
|
*
|
||||||
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
|
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
|
||||||
* key of the request body that are to be deleted.
|
* key of the request body that are to be deleted.
|
||||||
|
@ -242,117 +240,37 @@ router
|
||||||
* is the deleted row.
|
* is the deleted row.
|
||||||
*/
|
*/
|
||||||
.delete(
|
.delete(
|
||||||
"/api/:tableId/rows",
|
"/api/:sourceId/rows",
|
||||||
paramResource("tableId"),
|
paramResource("sourceId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||||
|
trimViewRowInfo,
|
||||||
rowController.destroy
|
rowController.destroy
|
||||||
)
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @api {post} /api/:tableId/rows/exportRows Export Rows
|
* @api {post} /api/:sourceId/rows/exportRows Export Rows
|
||||||
* @apiName Export rows
|
* @apiName Export rows
|
||||||
* @apiGroup rows
|
* @apiGroup rows
|
||||||
* @apiPermission table write access
|
* @apiPermission table write access
|
||||||
* @apiDescription This API can export a number of provided rows
|
* @apiDescription This API can export a number of provided rows
|
||||||
*
|
*
|
||||||
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
|
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
|
||||||
*
|
*
|
||||||
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
|
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
|
||||||
*
|
*
|
||||||
* @apiSuccess {object[]|object}
|
* @apiSuccess {object[]|object}
|
||||||
*/
|
*/
|
||||||
.post(
|
.post(
|
||||||
"/api/:tableId/rows/exportRows",
|
"/api/:sourceId/rows/exportRows",
|
||||||
paramResource("tableId"),
|
paramResource("sourceId"),
|
||||||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||||
rowController.exportRows
|
rowController.exportRows
|
||||||
)
|
)
|
||||||
|
|
||||||
router
|
router.post(
|
||||||
.get(
|
"/api/v2/views/:viewId/search",
|
||||||
"/api/v2/views/:viewId/search",
|
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||||
authorized(PermissionType.VIEW, PermissionLevel.READ),
|
rowController.views.searchView
|
||||||
rowController.views.searchView
|
)
|
||||||
)
|
|
||||||
/**
|
|
||||||
* @api {post} /api/:tableId/rows Creates a new row
|
|
||||||
* @apiName Creates a new row
|
|
||||||
* @apiGroup rows
|
|
||||||
* @apiPermission table write access
|
|
||||||
* @apiDescription This API will create a new row based on the supplied body. If the
|
|
||||||
* body includes an "_id" field then it will update an existing row if the field
|
|
||||||
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
|
|
||||||
* already used by Budibase tables and cannot be used for columns.
|
|
||||||
*
|
|
||||||
* @apiParam {string} tableId The ID of the table to save a row to.
|
|
||||||
*
|
|
||||||
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
|
|
||||||
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
|
|
||||||
* must also be provided.
|
|
||||||
* @apiParam (Body) {string} _viewId The ID of the view should be specified in the row body itself.
|
|
||||||
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
|
|
||||||
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
|
|
||||||
* a column in the specified table. All other fields will be dropped and not stored.
|
|
||||||
*
|
|
||||||
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
|
|
||||||
* is the rows new ID.
|
|
||||||
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
|
|
||||||
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
|
|
||||||
*/
|
|
||||||
.post(
|
|
||||||
"/api/v2/views/:viewId/rows",
|
|
||||||
paramResource("viewId"),
|
|
||||||
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
|
|
||||||
trimViewRowInfo,
|
|
||||||
rowController.save
|
|
||||||
)
|
|
||||||
/**
|
|
||||||
* @api {patch} /api/v2/views/:viewId/rows/:rowId Updates a row
|
|
||||||
* @apiName Update a row
|
|
||||||
* @apiGroup rows
|
|
||||||
* @apiPermission table write access
|
|
||||||
* @apiDescription This endpoint is identical to the row creation endpoint but instead it will
|
|
||||||
* error if an _id isn't provided, it will only function for existing rows.
|
|
||||||
*/
|
|
||||||
.patch(
|
|
||||||
"/api/v2/views/:viewId/rows/:rowId",
|
|
||||||
paramResource("viewId"),
|
|
||||||
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
|
|
||||||
trimViewRowInfo,
|
|
||||||
rowController.patch
|
|
||||||
)
|
|
||||||
/**
|
|
||||||
* @api {delete} /api/v2/views/:viewId/rows Delete rows for a view
|
|
||||||
* @apiName Delete rows for a view
|
|
||||||
* @apiGroup rows
|
|
||||||
* @apiPermission table write access
|
|
||||||
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
|
|
||||||
* fashion.
|
|
||||||
*
|
|
||||||
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
|
|
||||||
*
|
|
||||||
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
|
|
||||||
* key of the request body that are to be deleted.
|
|
||||||
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
|
|
||||||
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
|
|
||||||
* revision here.
|
|
||||||
*
|
|
||||||
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
|
|
||||||
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
|
|
||||||
* is the deleted row.
|
|
||||||
*/
|
|
||||||
.delete(
|
|
||||||
"/api/v2/views/:viewId/rows",
|
|
||||||
paramResource("viewId"),
|
|
||||||
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
|
|
||||||
// This is required as the implementation relies on the table id
|
|
||||||
(ctx, next) => {
|
|
||||||
ctx.params.tableId = utils.extractViewInfoFromID(
|
|
||||||
ctx.params.viewId
|
|
||||||
).tableId
|
|
||||||
return next()
|
|
||||||
},
|
|
||||||
rowController.destroy
|
|
||||||
)
|
|
||||||
|
|
||||||
export default router
|
export default router
|
||||||
|
|
|
@ -16,16 +16,12 @@ import {
|
||||||
FieldType,
|
FieldType,
|
||||||
SortType,
|
SortType,
|
||||||
SortOrder,
|
SortOrder,
|
||||||
DeleteRow,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
expectAnyInternalColsAttributes,
|
expectAnyInternalColsAttributes,
|
||||||
generator,
|
generator,
|
||||||
structures,
|
structures,
|
||||||
} from "@budibase/backend-core/tests"
|
} from "@budibase/backend-core/tests"
|
||||||
import trimViewRowInfoMiddleware from "../../../middleware/trimViewRowInfo"
|
|
||||||
import noViewDataMiddleware from "../../../middleware/noViewData"
|
|
||||||
import router from "../row"
|
|
||||||
|
|
||||||
describe("/rows", () => {
|
describe("/rows", () => {
|
||||||
let request = setup.getRequest()
|
let request = setup.getRequest()
|
||||||
|
@ -394,26 +390,6 @@ describe("/rows", () => {
|
||||||
expect(saved.arrayFieldArrayStrKnown).toEqual(["One"])
|
expect(saved.arrayFieldArrayStrKnown).toEqual(["One"])
|
||||||
expect(saved.optsFieldStrKnown).toEqual("Alpha")
|
expect(saved.optsFieldStrKnown).toEqual("Alpha")
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw an error when creating a table row with view id data", async () => {
|
|
||||||
const res = await request
|
|
||||||
.post(`/api/${row.tableId}/rows`)
|
|
||||||
.send({ ...row, _viewId: generator.guid() })
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(400)
|
|
||||||
expect(res.body.message).toEqual(
|
|
||||||
"Table row endpoints cannot contain view info"
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should setup the noViewData middleware", async () => {
|
|
||||||
const route = router.stack.find(
|
|
||||||
r => r.methods.includes("POST") && r.path === "/api/:tableId/rows"
|
|
||||||
)
|
|
||||||
expect(route).toBeDefined()
|
|
||||||
expect(route?.stack).toContainEqual(noViewDataMiddleware)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("patch", () => {
|
describe("patch", () => {
|
||||||
|
@ -463,33 +439,6 @@ describe("/rows", () => {
|
||||||
await assertRowUsage(rowUsage)
|
await assertRowUsage(rowUsage)
|
||||||
await assertQueryUsage(queryUsage)
|
await assertQueryUsage(queryUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw an error when creating a table row with view id data", async () => {
|
|
||||||
const existing = await config.createRow()
|
|
||||||
|
|
||||||
const res = await config.api.row.patch(
|
|
||||||
table._id!,
|
|
||||||
{
|
|
||||||
...existing,
|
|
||||||
_id: existing._id!,
|
|
||||||
_rev: existing._rev!,
|
|
||||||
tableId: table._id!,
|
|
||||||
_viewId: generator.guid(),
|
|
||||||
},
|
|
||||||
{ expectStatus: 400 }
|
|
||||||
)
|
|
||||||
expect(res.body.message).toEqual(
|
|
||||||
"Table row endpoints cannot contain view info"
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should setup the noViewData middleware", async () => {
|
|
||||||
const route = router.stack.find(
|
|
||||||
r => r.methods.includes("PATCH") && r.path === "/api/:tableId/rows"
|
|
||||||
)
|
|
||||||
expect(route).toBeDefined()
|
|
||||||
expect(route?.stack).toContainEqual(noViewDataMiddleware)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("destroy", () => {
|
describe("destroy", () => {
|
||||||
|
@ -758,7 +707,7 @@ describe("/rows", () => {
|
||||||
})
|
})
|
||||||
// the environment needs configured for this
|
// the environment needs configured for this
|
||||||
await setup.switchToSelfHosted(async () => {
|
await setup.switchToSelfHosted(async () => {
|
||||||
context.doInAppContext(config.getAppId(), async () => {
|
return context.doInAppContext(config.getAppId(), async () => {
|
||||||
const enriched = await outputProcessing(table, [row])
|
const enriched = await outputProcessing(table, [row])
|
||||||
expect((enriched as Row[])[0].attachment[0].url).toBe(
|
expect((enriched as Row[])[0].attachment[0].url).toBe(
|
||||||
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
|
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
|
||||||
|
@ -813,252 +762,6 @@ describe("/rows", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("view search", () => {
|
|
||||||
function userTable(): Table {
|
|
||||||
return {
|
|
||||||
name: "user",
|
|
||||||
type: "user",
|
|
||||||
schema: {
|
|
||||||
name: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
name: "name",
|
|
||||||
constraints: { type: "string" },
|
|
||||||
},
|
|
||||||
age: {
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
name: "age",
|
|
||||||
constraints: {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
it("returns table rows from view", async () => {
|
|
||||||
const table = await config.createTable(userTable())
|
|
||||||
const rows = []
|
|
||||||
for (let i = 0; i < 10; i++) {
|
|
||||||
rows.push(await config.createRow({ tableId: table._id }))
|
|
||||||
}
|
|
||||||
|
|
||||||
const createViewResponse = await config.api.viewV2.create()
|
|
||||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
|
||||||
|
|
||||||
expect(response.body.rows).toHaveLength(10)
|
|
||||||
expect(response.body).toEqual({
|
|
||||||
rows: expect.arrayContaining(rows.map(expect.objectContaining)),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("searching respects the view filters", async () => {
|
|
||||||
const table = await config.createTable(userTable())
|
|
||||||
const expectedRows = []
|
|
||||||
for (let i = 0; i < 10; i++)
|
|
||||||
await config.createRow({
|
|
||||||
tableId: table._id,
|
|
||||||
name: generator.name(),
|
|
||||||
age: generator.integer({ min: 10, max: 30 }),
|
|
||||||
})
|
|
||||||
|
|
||||||
for (let i = 0; i < 5; i++)
|
|
||||||
expectedRows.push(
|
|
||||||
await config.createRow({
|
|
||||||
tableId: table._id,
|
|
||||||
name: generator.name(),
|
|
||||||
age: 40,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
const createViewResponse = await config.api.viewV2.create({
|
|
||||||
query: { equal: { age: 40 } },
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
|
||||||
|
|
||||||
expect(response.body.rows).toHaveLength(5)
|
|
||||||
expect(response.body).toEqual({
|
|
||||||
rows: expect.arrayContaining(expectedRows.map(expect.objectContaining)),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
const sortTestOptions: [
|
|
||||||
{
|
|
||||||
field: string
|
|
||||||
order?: SortOrder
|
|
||||||
type?: SortType
|
|
||||||
},
|
|
||||||
string[]
|
|
||||||
][] = [
|
|
||||||
[
|
|
||||||
{
|
|
||||||
field: "name",
|
|
||||||
order: SortOrder.ASCENDING,
|
|
||||||
type: SortType.STRING,
|
|
||||||
},
|
|
||||||
["Alice", "Bob", "Charly", "Danny"],
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{
|
|
||||||
field: "name",
|
|
||||||
},
|
|
||||||
["Alice", "Bob", "Charly", "Danny"],
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{
|
|
||||||
field: "name",
|
|
||||||
order: SortOrder.DESCENDING,
|
|
||||||
},
|
|
||||||
["Danny", "Charly", "Bob", "Alice"],
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{
|
|
||||||
field: "name",
|
|
||||||
order: SortOrder.DESCENDING,
|
|
||||||
type: SortType.STRING,
|
|
||||||
},
|
|
||||||
["Danny", "Charly", "Bob", "Alice"],
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{
|
|
||||||
field: "age",
|
|
||||||
order: SortOrder.ASCENDING,
|
|
||||||
type: SortType.number,
|
|
||||||
},
|
|
||||||
["Danny", "Alice", "Charly", "Bob"],
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{
|
|
||||||
field: "age",
|
|
||||||
order: SortOrder.ASCENDING,
|
|
||||||
},
|
|
||||||
["Danny", "Alice", "Charly", "Bob"],
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{
|
|
||||||
field: "age",
|
|
||||||
order: SortOrder.DESCENDING,
|
|
||||||
},
|
|
||||||
["Bob", "Charly", "Alice", "Danny"],
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{
|
|
||||||
field: "age",
|
|
||||||
order: SortOrder.DESCENDING,
|
|
||||||
type: SortType.number,
|
|
||||||
},
|
|
||||||
["Bob", "Charly", "Alice", "Danny"],
|
|
||||||
],
|
|
||||||
]
|
|
||||||
|
|
||||||
it.each(sortTestOptions)(
|
|
||||||
"allow sorting (%s)",
|
|
||||||
async (sortParams, expected) => {
|
|
||||||
await config.createTable(userTable())
|
|
||||||
const users = [
|
|
||||||
{ name: "Alice", age: 25 },
|
|
||||||
{ name: "Bob", age: 30 },
|
|
||||||
{ name: "Charly", age: 27 },
|
|
||||||
{ name: "Danny", age: 15 },
|
|
||||||
]
|
|
||||||
for (const user of users) {
|
|
||||||
await config.createRow({
|
|
||||||
tableId: config.table!._id,
|
|
||||||
...user,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const createViewResponse = await config.api.viewV2.create({
|
|
||||||
sort: sortParams,
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
|
||||||
|
|
||||||
expect(response.body.rows).toHaveLength(4)
|
|
||||||
expect(response.body).toEqual({
|
|
||||||
rows: expected.map(name => expect.objectContaining({ name })),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.each(sortTestOptions)(
|
|
||||||
"allow override the default view sorting (%s)",
|
|
||||||
async (sortParams, expected) => {
|
|
||||||
await config.createTable(userTable())
|
|
||||||
const users = [
|
|
||||||
{ name: "Alice", age: 25 },
|
|
||||||
{ name: "Bob", age: 30 },
|
|
||||||
{ name: "Charly", age: 27 },
|
|
||||||
{ name: "Danny", age: 15 },
|
|
||||||
]
|
|
||||||
for (const user of users) {
|
|
||||||
await config.createRow({
|
|
||||||
tableId: config.table!._id,
|
|
||||||
...user,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const createViewResponse = await config.api.viewV2.create({
|
|
||||||
sort: {
|
|
||||||
field: "name",
|
|
||||||
order: SortOrder.ASCENDING,
|
|
||||||
type: SortType.STRING,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await config.api.viewV2.search(createViewResponse.id, {
|
|
||||||
sort: {
|
|
||||||
column: sortParams.field,
|
|
||||||
order: sortParams.order,
|
|
||||||
type: sortParams.type,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(response.body.rows).toHaveLength(4)
|
|
||||||
expect(response.body).toEqual({
|
|
||||||
rows: expected.map(name => expect.objectContaining({ name })),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it("when schema is defined, defined columns and row attributes are returned", async () => {
|
|
||||||
const table = await config.createTable(userTable())
|
|
||||||
const rows = []
|
|
||||||
for (let i = 0; i < 10; i++) {
|
|
||||||
rows.push(
|
|
||||||
await config.createRow({
|
|
||||||
tableId: table._id,
|
|
||||||
name: generator.name(),
|
|
||||||
age: generator.age(),
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const view = await config.api.viewV2.create({
|
|
||||||
schema: { name: {} },
|
|
||||||
})
|
|
||||||
const response = await config.api.viewV2.search(view.id)
|
|
||||||
|
|
||||||
expect(response.body.rows).toHaveLength(10)
|
|
||||||
expect(response.body.rows).toEqual(
|
|
||||||
expect.arrayContaining(
|
|
||||||
rows.map(r => ({
|
|
||||||
...expectAnyInternalColsAttributes,
|
|
||||||
_viewId: view.id,
|
|
||||||
name: r.name,
|
|
||||||
}))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("views without data can be returned", async () => {
|
|
||||||
const table = await config.createTable(userTable())
|
|
||||||
|
|
||||||
const createViewResponse = await config.api.viewV2.create()
|
|
||||||
const response = await config.api.viewV2.search(createViewResponse.id)
|
|
||||||
|
|
||||||
expect(response.body.rows).toHaveLength(0)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("view 2.0", () => {
|
describe("view 2.0", () => {
|
||||||
function userTable(): Table {
|
function userTable(): Table {
|
||||||
return {
|
return {
|
||||||
|
@ -1110,7 +813,7 @@ describe("/rows", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
const data = randomRowData()
|
const data = randomRowData()
|
||||||
const newRow = await config.api.viewV2.row.create(view.id, {
|
const newRow = await config.api.row.save(view.id, {
|
||||||
tableId: config.table!._id,
|
tableId: config.table!._id,
|
||||||
_viewId: view.id,
|
_viewId: view.id,
|
||||||
...data,
|
...data,
|
||||||
|
@ -1132,16 +835,6 @@ describe("/rows", () => {
|
||||||
expect(row.body.age).toBeUndefined()
|
expect(row.body.age).toBeUndefined()
|
||||||
expect(row.body.jobTitle).toBeUndefined()
|
expect(row.body.jobTitle).toBeUndefined()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should setup the trimViewRowInfo middleware", async () => {
|
|
||||||
const route = router.stack.find(
|
|
||||||
r =>
|
|
||||||
r.methods.includes("POST") &&
|
|
||||||
r.path === "/api/v2/views/:viewId/rows"
|
|
||||||
)
|
|
||||||
expect(route).toBeDefined()
|
|
||||||
expect(route?.stack).toContainEqual(trimViewRowInfoMiddleware)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("patch", () => {
|
describe("patch", () => {
|
||||||
|
@ -1156,13 +849,13 @@ describe("/rows", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const newRow = await config.api.viewV2.row.create(view.id, {
|
const newRow = await config.api.row.save(view.id, {
|
||||||
tableId,
|
tableId,
|
||||||
_viewId: view.id,
|
_viewId: view.id,
|
||||||
...randomRowData(),
|
...randomRowData(),
|
||||||
})
|
})
|
||||||
const newData = randomRowData()
|
const newData = randomRowData()
|
||||||
await config.api.viewV2.row.update(view.id, newRow._id!, {
|
await config.api.row.patch(view.id, {
|
||||||
tableId,
|
tableId,
|
||||||
_viewId: view.id,
|
_viewId: view.id,
|
||||||
_id: newRow._id!,
|
_id: newRow._id!,
|
||||||
|
@ -1185,16 +878,6 @@ describe("/rows", () => {
|
||||||
expect(row.body.age).toBeUndefined()
|
expect(row.body.age).toBeUndefined()
|
||||||
expect(row.body.jobTitle).toBeUndefined()
|
expect(row.body.jobTitle).toBeUndefined()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should setup the trimViewRowInfo middleware", async () => {
|
|
||||||
const route = router.stack.find(
|
|
||||||
r =>
|
|
||||||
r.methods.includes("PATCH") &&
|
|
||||||
r.path === "/api/v2/views/:viewId/rows/:rowId"
|
|
||||||
)
|
|
||||||
expect(route).toBeDefined()
|
|
||||||
expect(route?.stack).toContainEqual(trimViewRowInfoMiddleware)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("destroy", () => {
|
describe("destroy", () => {
|
||||||
|
@ -1213,10 +896,7 @@ describe("/rows", () => {
|
||||||
const rowUsage = await getRowUsage()
|
const rowUsage = await getRowUsage()
|
||||||
const queryUsage = await getQueryUsage()
|
const queryUsage = await getQueryUsage()
|
||||||
|
|
||||||
const body: DeleteRow = {
|
await config.api.row.delete(view.id, [createdRow])
|
||||||
_id: createdRow._id!,
|
|
||||||
}
|
|
||||||
await config.api.viewV2.row.delete(view.id, body)
|
|
||||||
|
|
||||||
await assertRowUsage(rowUsage - 1)
|
await assertRowUsage(rowUsage - 1)
|
||||||
await assertQueryUsage(queryUsage + 1)
|
await assertQueryUsage(queryUsage + 1)
|
||||||
|
@ -1245,9 +925,7 @@ describe("/rows", () => {
|
||||||
const rowUsage = await getRowUsage()
|
const rowUsage = await getRowUsage()
|
||||||
const queryUsage = await getQueryUsage()
|
const queryUsage = await getQueryUsage()
|
||||||
|
|
||||||
await config.api.viewV2.row.delete(view.id, {
|
await config.api.row.delete(view.id, [rows[0], rows[2]])
|
||||||
rows: [rows[0], rows[2]],
|
|
||||||
})
|
|
||||||
|
|
||||||
await assertRowUsage(rowUsage - 2)
|
await assertRowUsage(rowUsage - 2)
|
||||||
await assertQueryUsage(queryUsage + 1)
|
await assertQueryUsage(queryUsage + 1)
|
||||||
|
@ -1261,5 +939,327 @@ describe("/rows", () => {
|
||||||
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 })
|
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 })
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("view search", () => {
|
||||||
|
function userTable(): Table {
|
||||||
|
return {
|
||||||
|
name: "user",
|
||||||
|
type: "user",
|
||||||
|
schema: {
|
||||||
|
name: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "name",
|
||||||
|
constraints: { type: "string" },
|
||||||
|
},
|
||||||
|
age: {
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
name: "age",
|
||||||
|
constraints: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it("returns table rows from view", async () => {
|
||||||
|
const table = await config.createTable(userTable())
|
||||||
|
const rows = []
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
rows.push(await config.createRow({ tableId: table._id }))
|
||||||
|
}
|
||||||
|
|
||||||
|
const createViewResponse = await config.api.viewV2.create()
|
||||||
|
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||||
|
|
||||||
|
expect(response.body.rows).toHaveLength(10)
|
||||||
|
expect(response.body).toEqual({
|
||||||
|
rows: expect.arrayContaining(rows.map(expect.objectContaining)),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("searching respects the view filters", async () => {
|
||||||
|
const table = await config.createTable(userTable())
|
||||||
|
const expectedRows = []
|
||||||
|
for (let i = 0; i < 10; i++)
|
||||||
|
await config.createRow({
|
||||||
|
tableId: table._id,
|
||||||
|
name: generator.name(),
|
||||||
|
age: generator.integer({ min: 10, max: 30 }),
|
||||||
|
})
|
||||||
|
|
||||||
|
for (let i = 0; i < 5; i++)
|
||||||
|
expectedRows.push(
|
||||||
|
await config.createRow({
|
||||||
|
tableId: table._id,
|
||||||
|
name: generator.name(),
|
||||||
|
age: 40,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const createViewResponse = await config.api.viewV2.create({
|
||||||
|
query: [{ operator: "equal", field: "age", value: 40 }],
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||||
|
|
||||||
|
expect(response.body.rows).toHaveLength(5)
|
||||||
|
expect(response.body).toEqual({
|
||||||
|
rows: expect.arrayContaining(
|
||||||
|
expectedRows.map(expect.objectContaining)
|
||||||
|
),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const sortTestOptions: [
|
||||||
|
{
|
||||||
|
field: string
|
||||||
|
order?: SortOrder
|
||||||
|
type?: SortType
|
||||||
|
},
|
||||||
|
string[]
|
||||||
|
][] = [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
field: "name",
|
||||||
|
order: SortOrder.ASCENDING,
|
||||||
|
type: SortType.STRING,
|
||||||
|
},
|
||||||
|
["Alice", "Bob", "Charly", "Danny"],
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
field: "name",
|
||||||
|
},
|
||||||
|
["Alice", "Bob", "Charly", "Danny"],
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
field: "name",
|
||||||
|
order: SortOrder.DESCENDING,
|
||||||
|
},
|
||||||
|
["Danny", "Charly", "Bob", "Alice"],
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
field: "name",
|
||||||
|
order: SortOrder.DESCENDING,
|
||||||
|
type: SortType.STRING,
|
||||||
|
},
|
||||||
|
["Danny", "Charly", "Bob", "Alice"],
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
field: "age",
|
||||||
|
order: SortOrder.ASCENDING,
|
||||||
|
type: SortType.number,
|
||||||
|
},
|
||||||
|
["Danny", "Alice", "Charly", "Bob"],
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
field: "age",
|
||||||
|
order: SortOrder.ASCENDING,
|
||||||
|
},
|
||||||
|
["Danny", "Alice", "Charly", "Bob"],
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
field: "age",
|
||||||
|
order: SortOrder.DESCENDING,
|
||||||
|
},
|
||||||
|
["Bob", "Charly", "Alice", "Danny"],
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
field: "age",
|
||||||
|
order: SortOrder.DESCENDING,
|
||||||
|
type: SortType.number,
|
||||||
|
},
|
||||||
|
["Bob", "Charly", "Alice", "Danny"],
|
||||||
|
],
|
||||||
|
]
|
||||||
|
|
||||||
|
it.each(sortTestOptions)(
|
||||||
|
"allow sorting (%s)",
|
||||||
|
async (sortParams, expected) => {
|
||||||
|
await config.createTable(userTable())
|
||||||
|
const users = [
|
||||||
|
{ name: "Alice", age: 25 },
|
||||||
|
{ name: "Bob", age: 30 },
|
||||||
|
{ name: "Charly", age: 27 },
|
||||||
|
{ name: "Danny", age: 15 },
|
||||||
|
]
|
||||||
|
for (const user of users) {
|
||||||
|
await config.createRow({
|
||||||
|
tableId: config.table!._id,
|
||||||
|
...user,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const createViewResponse = await config.api.viewV2.create({
|
||||||
|
sort: sortParams,
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||||
|
|
||||||
|
expect(response.body.rows).toHaveLength(4)
|
||||||
|
expect(response.body).toEqual({
|
||||||
|
rows: expected.map(name => expect.objectContaining({ name })),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
it.each(sortTestOptions)(
|
||||||
|
"allow override the default view sorting (%s)",
|
||||||
|
async (sortParams, expected) => {
|
||||||
|
await config.createTable(userTable())
|
||||||
|
const users = [
|
||||||
|
{ name: "Alice", age: 25 },
|
||||||
|
{ name: "Bob", age: 30 },
|
||||||
|
{ name: "Charly", age: 27 },
|
||||||
|
{ name: "Danny", age: 15 },
|
||||||
|
]
|
||||||
|
for (const user of users) {
|
||||||
|
await config.createRow({
|
||||||
|
tableId: config.table!._id,
|
||||||
|
...user,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const createViewResponse = await config.api.viewV2.create({
|
||||||
|
sort: {
|
||||||
|
field: "name",
|
||||||
|
order: SortOrder.ASCENDING,
|
||||||
|
type: SortType.STRING,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await config.api.viewV2.search(
|
||||||
|
createViewResponse.id,
|
||||||
|
{
|
||||||
|
sort: sortParams.field,
|
||||||
|
sortOrder: sortParams.order,
|
||||||
|
sortType: sortParams.type,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(response.body.rows).toHaveLength(4)
|
||||||
|
expect(response.body).toEqual({
|
||||||
|
rows: expected.map(name => expect.objectContaining({ name })),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
it("when schema is defined, defined columns and row attributes are returned", async () => {
|
||||||
|
const table = await config.createTable(userTable())
|
||||||
|
const rows = []
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
rows.push(
|
||||||
|
await config.createRow({
|
||||||
|
tableId: table._id,
|
||||||
|
name: generator.name(),
|
||||||
|
age: generator.age(),
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const view = await config.api.viewV2.create({
|
||||||
|
schema: { name: {} },
|
||||||
|
})
|
||||||
|
const response = await config.api.viewV2.search(view.id)
|
||||||
|
|
||||||
|
expect(response.body.rows).toHaveLength(10)
|
||||||
|
expect(response.body.rows).toEqual(
|
||||||
|
expect.arrayContaining(
|
||||||
|
rows.map(r => ({
|
||||||
|
...expectAnyInternalColsAttributes,
|
||||||
|
_viewId: view.id,
|
||||||
|
name: r.name,
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("views without data can be returned", async () => {
|
||||||
|
const table = await config.createTable(userTable())
|
||||||
|
|
||||||
|
const createViewResponse = await config.api.viewV2.create()
|
||||||
|
const response = await config.api.viewV2.search(createViewResponse.id)
|
||||||
|
|
||||||
|
expect(response.body.rows).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("respects the limit parameter", async () => {
|
||||||
|
const table = await config.createTable(userTable())
|
||||||
|
const rows = []
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
rows.push(await config.createRow({ tableId: table._id }))
|
||||||
|
}
|
||||||
|
const limit = generator.integer({ min: 1, max: 8 })
|
||||||
|
|
||||||
|
const createViewResponse = await config.api.viewV2.create()
|
||||||
|
const response = await config.api.viewV2.search(createViewResponse.id, {
|
||||||
|
limit,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(response.body.rows).toHaveLength(limit)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can handle pagination", async () => {
|
||||||
|
const table = await config.createTable(userTable())
|
||||||
|
const rows = []
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
rows.push(await config.createRow({ tableId: table._id }))
|
||||||
|
}
|
||||||
|
// rows.sort((a, b) => (a._id! > b._id! ? 1 : -1))
|
||||||
|
|
||||||
|
const createViewResponse = await config.api.viewV2.create()
|
||||||
|
const allRows = (await config.api.viewV2.search(createViewResponse.id))
|
||||||
|
.body.rows
|
||||||
|
|
||||||
|
const firstPageResponse = await config.api.viewV2.search(
|
||||||
|
createViewResponse.id,
|
||||||
|
{
|
||||||
|
paginate: true,
|
||||||
|
limit: 4,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
expect(firstPageResponse.body).toEqual({
|
||||||
|
rows: expect.arrayContaining(allRows.slice(0, 4)),
|
||||||
|
totalRows: 10,
|
||||||
|
hasNextPage: true,
|
||||||
|
bookmark: expect.any(String),
|
||||||
|
})
|
||||||
|
|
||||||
|
const secondPageResponse = await config.api.viewV2.search(
|
||||||
|
createViewResponse.id,
|
||||||
|
{
|
||||||
|
paginate: true,
|
||||||
|
limit: 4,
|
||||||
|
bookmark: firstPageResponse.body.bookmark,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
expect(secondPageResponse.body).toEqual({
|
||||||
|
rows: expect.arrayContaining(allRows.slice(4, 8)),
|
||||||
|
totalRows: 10,
|
||||||
|
hasNextPage: true,
|
||||||
|
bookmark: expect.any(String),
|
||||||
|
})
|
||||||
|
|
||||||
|
const lastPageResponse = await config.api.viewV2.search(
|
||||||
|
createViewResponse.id,
|
||||||
|
{
|
||||||
|
paginate: true,
|
||||||
|
limit: 4,
|
||||||
|
bookmark: secondPageResponse.body.bookmark,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
expect(lastPageResponse.body).toEqual({
|
||||||
|
rows: expect.arrayContaining(allRows.slice(8)),
|
||||||
|
totalRows: 10,
|
||||||
|
hasNextPage: false,
|
||||||
|
bookmark: expect.any(String),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -62,7 +62,7 @@ describe("/v2/views", () => {
|
||||||
name: generator.name(),
|
name: generator.name(),
|
||||||
tableId: config.table!._id!,
|
tableId: config.table!._id!,
|
||||||
primaryDisplay: generator.word(),
|
primaryDisplay: generator.word(),
|
||||||
query: { allOr: false, equal: { field: "value" } },
|
query: [{ operator: "equal", field: "field", value: "value" }],
|
||||||
sort: {
|
sort: {
|
||||||
field: "fieldToSort",
|
field: "fieldToSort",
|
||||||
order: SortOrder.DESCENDING,
|
order: SortOrder.DESCENDING,
|
||||||
|
@ -190,7 +190,7 @@ describe("/v2/views", () => {
|
||||||
const tableId = config.table!._id!
|
const tableId = config.table!._id!
|
||||||
await config.api.viewV2.update({
|
await config.api.viewV2.update({
|
||||||
...view,
|
...view,
|
||||||
query: { equal: { newField: "thatValue" } },
|
query: [{ operator: "equal", field: "newField", value: "thatValue" }],
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(await config.api.table.get(tableId)).toEqual({
|
expect(await config.api.table.get(tableId)).toEqual({
|
||||||
|
@ -198,7 +198,9 @@ describe("/v2/views", () => {
|
||||||
views: {
|
views: {
|
||||||
[view.name]: {
|
[view.name]: {
|
||||||
...view,
|
...view,
|
||||||
query: { equal: { newField: "thatValue" } },
|
query: [
|
||||||
|
{ operator: "equal", field: "newField", value: "thatValue" },
|
||||||
|
],
|
||||||
schema: expect.anything(),
|
schema: expect.anything(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -216,7 +218,13 @@ describe("/v2/views", () => {
|
||||||
tableId,
|
tableId,
|
||||||
name: view.name,
|
name: view.name,
|
||||||
primaryDisplay: generator.word(),
|
primaryDisplay: generator.word(),
|
||||||
query: { equal: { [generator.word()]: generator.word() } },
|
query: [
|
||||||
|
{
|
||||||
|
operator: "equal",
|
||||||
|
field: generator.word(),
|
||||||
|
value: generator.word(),
|
||||||
|
},
|
||||||
|
],
|
||||||
sort: {
|
sort: {
|
||||||
field: generator.word(),
|
field: generator.word(),
|
||||||
order: SortOrder.DESCENDING,
|
order: SortOrder.DESCENDING,
|
||||||
|
@ -285,7 +293,7 @@ describe("/v2/views", () => {
|
||||||
{
|
{
|
||||||
...view,
|
...view,
|
||||||
tableId: generator.guid(),
|
tableId: generator.guid(),
|
||||||
query: { equal: { newField: "thatValue" } },
|
query: [{ operator: "equal", field: "newField", value: "thatValue" }],
|
||||||
},
|
},
|
||||||
{ expectStatus: 404 }
|
{ expectStatus: 404 }
|
||||||
)
|
)
|
||||||
|
|
|
@ -34,7 +34,7 @@ router
|
||||||
"/api/views/:viewName",
|
"/api/views/:viewName",
|
||||||
paramResource("viewName"),
|
paramResource("viewName"),
|
||||||
authorized(
|
authorized(
|
||||||
permissions.PermissionType.VIEW,
|
permissions.PermissionType.TABLE,
|
||||||
permissions.PermissionLevel.READ
|
permissions.PermissionLevel.READ
|
||||||
),
|
),
|
||||||
rowController.fetchView
|
rowController.fetchView
|
||||||
|
|
|
@ -1,11 +1,18 @@
|
||||||
const setup = require("./utilities")
|
import * as setup from "./utilities"
|
||||||
const { FilterConditions } = require("../steps/filter")
|
import { FilterConditions } from "../steps/filter"
|
||||||
|
|
||||||
describe("test the filter logic", () => {
|
describe("test the filter logic", () => {
|
||||||
async function checkFilter(field, condition, value, pass = true) {
|
async function checkFilter(
|
||||||
let res = await setup.runStep(setup.actions.FILTER.stepId,
|
field: any,
|
||||||
{ field, condition, value }
|
condition: string,
|
||||||
)
|
value: any,
|
||||||
|
pass = true
|
||||||
|
) {
|
||||||
|
let res = await setup.runStep(setup.actions.FILTER.stepId, {
|
||||||
|
field,
|
||||||
|
condition,
|
||||||
|
value,
|
||||||
|
})
|
||||||
expect(res.result).toEqual(pass)
|
expect(res.result).toEqual(pass)
|
||||||
expect(res.success).toEqual(true)
|
expect(res.success).toEqual(true)
|
||||||
}
|
}
|
||||||
|
@ -36,9 +43,9 @@ describe("test the filter logic", () => {
|
||||||
|
|
||||||
it("check date coercion", async () => {
|
it("check date coercion", async () => {
|
||||||
await checkFilter(
|
await checkFilter(
|
||||||
(new Date()).toISOString(),
|
new Date().toISOString(),
|
||||||
FilterConditions.GREATER_THAN,
|
FilterConditions.GREATER_THAN,
|
||||||
(new Date(-10000)).toISOString(),
|
new Date(-10000).toISOString(),
|
||||||
true
|
true
|
||||||
)
|
)
|
||||||
})
|
})
|
|
@ -6,11 +6,11 @@ import { isDevAppID } from "../db/utils"
|
||||||
// need this to call directly, so we can get a response
|
// need this to call directly, so we can get a response
|
||||||
import { automationQueue } from "./bullboard"
|
import { automationQueue } from "./bullboard"
|
||||||
import { checkTestFlag } from "../utilities/redis"
|
import { checkTestFlag } from "../utilities/redis"
|
||||||
import * as utils from "./utils"
|
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import { context, db as dbCore } from "@budibase/backend-core"
|
import { context, db as dbCore } from "@budibase/backend-core"
|
||||||
import { Automation, Row, AutomationData, AutomationJob } from "@budibase/types"
|
import { Automation, Row, AutomationData, AutomationJob } from "@budibase/types"
|
||||||
import { executeSynchronously } from "../threads/automation"
|
import { executeSynchronously } from "../threads/automation"
|
||||||
|
import sdk from "../sdk"
|
||||||
|
|
||||||
export const TRIGGER_DEFINITIONS = definitions
|
export const TRIGGER_DEFINITIONS = definitions
|
||||||
const JOB_OPTS = {
|
const JOB_OPTS = {
|
||||||
|
@ -142,7 +142,7 @@ export async function rebootTrigger() {
|
||||||
let automations = await getAllAutomations()
|
let automations = await getAllAutomations()
|
||||||
let rebootEvents = []
|
let rebootEvents = []
|
||||||
for (let automation of automations) {
|
for (let automation of automations) {
|
||||||
if (utils.isRebootTrigger(automation)) {
|
if (sdk.automations.isReboot(automation)) {
|
||||||
const job = {
|
const job = {
|
||||||
automation,
|
automation,
|
||||||
event: {
|
event: {
|
||||||
|
|
|
@ -16,13 +16,14 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import sdk from "../sdk"
|
import sdk from "../sdk"
|
||||||
|
|
||||||
const REBOOT_CRON = "@reboot"
|
|
||||||
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
||||||
const CRON_STEP_ID = definitions.CRON.stepId
|
|
||||||
const Runner = new Thread(ThreadType.AUTOMATION)
|
const Runner = new Thread(ThreadType.AUTOMATION)
|
||||||
|
|
||||||
function loggingArgs(job: AutomationJob) {
|
function loggingArgs(
|
||||||
return [
|
job: AutomationJob,
|
||||||
|
timing?: { start: number; complete?: boolean }
|
||||||
|
) {
|
||||||
|
const logs: any[] = [
|
||||||
{
|
{
|
||||||
_logKey: "automation",
|
_logKey: "automation",
|
||||||
trigger: job.data.automation.definition.trigger.event,
|
trigger: job.data.automation.definition.trigger.event,
|
||||||
|
@ -32,24 +33,53 @@ function loggingArgs(job: AutomationJob) {
|
||||||
jobId: job.id,
|
jobId: job.id,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
if (timing?.start) {
|
||||||
|
logs.push({
|
||||||
|
_logKey: "startTime",
|
||||||
|
start: timing.start,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (timing?.start && timing?.complete) {
|
||||||
|
const end = new Date().getTime()
|
||||||
|
const duration = end - timing.start
|
||||||
|
logs.push({
|
||||||
|
_logKey: "endTime",
|
||||||
|
end,
|
||||||
|
})
|
||||||
|
logs.push({
|
||||||
|
_logKey: "duration",
|
||||||
|
duration,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return logs
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function processEvent(job: AutomationJob) {
|
export async function processEvent(job: AutomationJob) {
|
||||||
const appId = job.data.event.appId!
|
const appId = job.data.event.appId!
|
||||||
const automationId = job.data.automation._id!
|
const automationId = job.data.automation._id!
|
||||||
|
const start = new Date().getTime()
|
||||||
const task = async () => {
|
const task = async () => {
|
||||||
try {
|
try {
|
||||||
// need to actually await these so that an error can be captured properly
|
// need to actually await these so that an error can be captured properly
|
||||||
console.log("automation running", ...loggingArgs(job))
|
console.log("automation running", ...loggingArgs(job, { start }))
|
||||||
|
|
||||||
const runFn = () => Runner.run(job)
|
const runFn = () => Runner.run(job)
|
||||||
const result = await quotas.addAutomation(runFn, {
|
const result = await quotas.addAutomation(runFn, {
|
||||||
automationId,
|
automationId,
|
||||||
})
|
})
|
||||||
console.log("automation completed", ...loggingArgs(job))
|
const end = new Date().getTime()
|
||||||
|
const duration = end - start
|
||||||
|
console.log(
|
||||||
|
"automation completed",
|
||||||
|
...loggingArgs(job, { start, complete: true })
|
||||||
|
)
|
||||||
return result
|
return result
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`automation was unable to run`, err, ...loggingArgs(job))
|
console.error(
|
||||||
|
`automation was unable to run`,
|
||||||
|
err,
|
||||||
|
...loggingArgs(job, { start, complete: true })
|
||||||
|
)
|
||||||
return { err }
|
return { err }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -128,19 +158,6 @@ export async function clearMetadata() {
|
||||||
await db.bulkDocs(automationMetadata)
|
await db.bulkDocs(automationMetadata)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isCronTrigger(auto: Automation) {
|
|
||||||
return (
|
|
||||||
auto &&
|
|
||||||
auto.definition.trigger &&
|
|
||||||
auto.definition.trigger.stepId === CRON_STEP_ID
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isRebootTrigger(auto: Automation) {
|
|
||||||
const trigger = auto ? auto.definition.trigger : null
|
|
||||||
return isCronTrigger(auto) && trigger?.inputs.cron === REBOOT_CRON
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function handles checking of any cron jobs that need to be enabled/updated.
|
* This function handles checking of any cron jobs that need to be enabled/updated.
|
||||||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
* @param {string} appId The ID of the app in which we are checking for webhooks
|
||||||
|
@ -148,13 +165,13 @@ export function isRebootTrigger(auto: Automation) {
|
||||||
*/
|
*/
|
||||||
export async function enableCronTrigger(appId: any, automation: Automation) {
|
export async function enableCronTrigger(appId: any, automation: Automation) {
|
||||||
const trigger = automation ? automation.definition.trigger : null
|
const trigger = automation ? automation.definition.trigger : null
|
||||||
|
const validCron = sdk.automations.isCron(automation) && trigger?.inputs.cron
|
||||||
|
const needsCreated =
|
||||||
|
!sdk.automations.isReboot(automation) &&
|
||||||
|
!sdk.automations.disabled(automation)
|
||||||
|
|
||||||
// need to create cron job
|
// need to create cron job
|
||||||
if (
|
if (validCron && needsCreated) {
|
||||||
isCronTrigger(automation) &&
|
|
||||||
!isRebootTrigger(automation) &&
|
|
||||||
trigger?.inputs.cron
|
|
||||||
) {
|
|
||||||
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
||||||
const jobId = `${appId}_cron_${newid()}`
|
const jobId = `${appId}_cron_${newid()}`
|
||||||
const job: any = await automationQueue.add(
|
const job: any = await automationQueue.add(
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
import newid from "./newid"
|
import newid from "./newid"
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
|
import { DocumentType, VirtualDocumentType } from "@budibase/types"
|
||||||
|
export { DocumentType, VirtualDocumentType } from "@budibase/types"
|
||||||
|
|
||||||
type Optional = string | null
|
type Optional = string | null
|
||||||
|
|
||||||
|
@ -19,7 +21,6 @@ export const BudibaseInternalDB = {
|
||||||
|
|
||||||
export const SEPARATOR = dbCore.SEPARATOR
|
export const SEPARATOR = dbCore.SEPARATOR
|
||||||
export const StaticDatabases = dbCore.StaticDatabases
|
export const StaticDatabases = dbCore.StaticDatabases
|
||||||
export const DocumentType = dbCore.DocumentType
|
|
||||||
export const APP_PREFIX = dbCore.APP_PREFIX
|
export const APP_PREFIX = dbCore.APP_PREFIX
|
||||||
export const APP_DEV_PREFIX = dbCore.APP_DEV_PREFIX
|
export const APP_DEV_PREFIX = dbCore.APP_DEV_PREFIX
|
||||||
export const isDevAppID = dbCore.isDevAppID
|
export const isDevAppID = dbCore.isDevAppID
|
||||||
|
@ -284,10 +285,22 @@ export function getMultiIDParams(ids: string[]) {
|
||||||
* @returns {string} The new view ID which the view doc can be stored under.
|
* @returns {string} The new view ID which the view doc can be stored under.
|
||||||
*/
|
*/
|
||||||
export function generateViewID(tableId: string) {
|
export function generateViewID(tableId: string) {
|
||||||
return `${tableId}${SEPARATOR}${newid()}`
|
return `${
|
||||||
|
VirtualDocumentType.VIEW
|
||||||
|
}${SEPARATOR}${tableId}${SEPARATOR}${newid()}`
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isViewID(viewId: string) {
|
||||||
|
return viewId?.split(SEPARATOR)[0] === VirtualDocumentType.VIEW
|
||||||
}
|
}
|
||||||
|
|
||||||
export function extractViewInfoFromID(viewId: string) {
|
export function extractViewInfoFromID(viewId: string) {
|
||||||
|
if (!isViewID(viewId)) {
|
||||||
|
throw new Error("Unable to extract table ID, is not a view ID")
|
||||||
|
}
|
||||||
|
const split = viewId.split(SEPARATOR)
|
||||||
|
split.shift()
|
||||||
|
viewId = split.join(SEPARATOR)
|
||||||
const regex = new RegExp(`^(?<tableId>.+)${SEPARATOR}([^${SEPARATOR}]+)$`)
|
const regex = new RegExp(`^(?<tableId>.+)${SEPARATOR}([^${SEPARATOR}]+)$`)
|
||||||
const res = regex.exec(viewId)
|
const res = regex.exec(viewId)
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -315,7 +315,7 @@ class InternalBuilder {
|
||||||
addSorting(query: KnexQuery, json: QueryJson): KnexQuery {
|
addSorting(query: KnexQuery, json: QueryJson): KnexQuery {
|
||||||
let { sort, paginate } = json
|
let { sort, paginate } = json
|
||||||
const table = json.meta?.table
|
const table = json.meta?.table
|
||||||
if (sort) {
|
if (sort && Object.keys(sort || {}).length > 0) {
|
||||||
for (let [key, value] of Object.entries(sort)) {
|
for (let [key, value] of Object.entries(sort)) {
|
||||||
const direction =
|
const direction =
|
||||||
value.direction === SortDirection.ASCENDING ? "asc" : "desc"
|
value.direction === SortDirection.ASCENDING ? "asc" : "desc"
|
||||||
|
|
|
@ -93,6 +93,21 @@ const SCHEMA: Integration = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const defaultTypeCasting = function (field: any, next: any) {
|
||||||
|
if (
|
||||||
|
field.type == "DATETIME" ||
|
||||||
|
field.type === "DATE" ||
|
||||||
|
field.type === "TIMESTAMP" ||
|
||||||
|
field.type === "LONGLONG"
|
||||||
|
) {
|
||||||
|
return field.string()
|
||||||
|
}
|
||||||
|
if (field.type === "BIT" && field.length === 1) {
|
||||||
|
return field.buffer()?.[0]
|
||||||
|
}
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
|
||||||
export function bindingTypeCoerce(bindings: any[]) {
|
export function bindingTypeCoerce(bindings: any[]) {
|
||||||
for (let i = 0; i < bindings.length; i++) {
|
for (let i = 0; i < bindings.length; i++) {
|
||||||
const binding = bindings[i]
|
const binding = bindings[i]
|
||||||
|
@ -147,21 +162,8 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||||
delete config.rejectUnauthorized
|
delete config.rejectUnauthorized
|
||||||
this.config = {
|
this.config = {
|
||||||
...config,
|
...config,
|
||||||
|
typeCast: defaultTypeCasting,
|
||||||
multipleStatements: true,
|
multipleStatements: true,
|
||||||
typeCast: function (field: any, next: any) {
|
|
||||||
if (
|
|
||||||
field.type == "DATETIME" ||
|
|
||||||
field.type === "DATE" ||
|
|
||||||
field.type === "TIMESTAMP" ||
|
|
||||||
field.type === "LONGLONG"
|
|
||||||
) {
|
|
||||||
return field.string()
|
|
||||||
}
|
|
||||||
if (field.type === "BIT" && field.length === 1) {
|
|
||||||
return field.buffer()?.[0]
|
|
||||||
}
|
|
||||||
return next()
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,6 +196,37 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||||
return `concat(${parts.join(", ")})`
|
return `concat(${parts.join(", ")})`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
defineTypeCastingFromSchema(schema: {
|
||||||
|
[key: string]: { name: string; type: string }
|
||||||
|
}): void {
|
||||||
|
if (!schema) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this.config.typeCast = function (field: any, next: any) {
|
||||||
|
if (schema[field.name]?.name === field.name) {
|
||||||
|
if (["LONGLONG", "NEWDECIMAL", "DECIMAL"].includes(field.type)) {
|
||||||
|
if (schema[field.name]?.type === "number") {
|
||||||
|
const value = field.string()
|
||||||
|
return value ? Number(value) : null
|
||||||
|
} else {
|
||||||
|
return field.string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
field.type == "DATETIME" ||
|
||||||
|
field.type === "DATE" ||
|
||||||
|
field.type === "TIMESTAMP"
|
||||||
|
) {
|
||||||
|
return field.string()
|
||||||
|
}
|
||||||
|
if (field.type === "BIT" && field.length === 1) {
|
||||||
|
return field.buffer()?.[0]
|
||||||
|
}
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async connect() {
|
async connect() {
|
||||||
this.client = await mysql.createConnection(this.config)
|
this.client = await mysql.createConnection(this.config)
|
||||||
}
|
}
|
||||||
|
@ -204,7 +237,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||||
|
|
||||||
async internalQuery(
|
async internalQuery(
|
||||||
query: SqlQuery,
|
query: SqlQuery,
|
||||||
opts: { connect?: boolean; disableCoercion?: boolean } = {
|
opts: {
|
||||||
|
connect?: boolean
|
||||||
|
disableCoercion?: boolean
|
||||||
|
} = {
|
||||||
connect: true,
|
connect: true,
|
||||||
disableCoercion: false,
|
disableCoercion: false,
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,12 @@ function generateReadJson({
|
||||||
filters: filters || {},
|
filters: filters || {},
|
||||||
sort: sort || {},
|
sort: sort || {},
|
||||||
paginate: paginate || {},
|
paginate: paginate || {},
|
||||||
|
meta: {
|
||||||
|
table: {
|
||||||
|
name: table || TABLE_NAME,
|
||||||
|
primary: ["id"],
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -636,4 +642,19 @@ describe("SQL query builder", () => {
|
||||||
sql: `select * from (select * from (select * from \"test\" where LOWER(\"test\".\"name\") LIKE :1) where rownum <= :2) \"test\"`,
|
sql: `select * from (select * from (select * from \"test\" where LOWER(\"test\".\"name\") LIKE :1) where rownum <= :2) \"test\"`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should sort SQL Server tables by the primary key if no sort data is provided", () => {
|
||||||
|
let query = new Sql(SqlClient.MS_SQL, limit)._query(
|
||||||
|
generateReadJson({
|
||||||
|
sort: {},
|
||||||
|
paginate: {
|
||||||
|
limit: 10,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
expect(query).toEqual({
|
||||||
|
bindings: [10],
|
||||||
|
sql: `select * from (select top (@p0) * from [test] order by [test].[id] asc) as [test]`,
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
import { Ctx, Row } from "@budibase/types"
|
|
||||||
|
|
||||||
export default async (ctx: Ctx<Row>, next: any) => {
|
|
||||||
if (ctx.request.body._viewId) {
|
|
||||||
return ctx.throw(400, "Table row endpoints cannot contain view info")
|
|
||||||
}
|
|
||||||
|
|
||||||
return next()
|
|
||||||
}
|
|
|
@ -1,83 +0,0 @@
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
|
||||||
import { BBRequest, FieldType, Row, Table } from "@budibase/types"
|
|
||||||
import { Next } from "koa"
|
|
||||||
import * as utils from "../../db/utils"
|
|
||||||
import noViewDataMiddleware from "../noViewData"
|
|
||||||
|
|
||||||
class TestConfiguration {
|
|
||||||
next: Next
|
|
||||||
throw: jest.Mock<(status: number, message: string) => never>
|
|
||||||
middleware: typeof noViewDataMiddleware
|
|
||||||
params: Record<string, any>
|
|
||||||
request?: Pick<BBRequest<Row>, "body">
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.next = jest.fn()
|
|
||||||
this.throw = jest.fn()
|
|
||||||
this.params = {}
|
|
||||||
|
|
||||||
this.middleware = noViewDataMiddleware
|
|
||||||
}
|
|
||||||
|
|
||||||
executeMiddleware(ctxRequestBody: Row) {
|
|
||||||
this.request = {
|
|
||||||
body: ctxRequestBody,
|
|
||||||
}
|
|
||||||
return this.middleware(
|
|
||||||
{
|
|
||||||
request: this.request as any,
|
|
||||||
throw: this.throw as any,
|
|
||||||
params: this.params,
|
|
||||||
} as any,
|
|
||||||
this.next
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
afterEach() {
|
|
||||||
jest.clearAllMocks()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("noViewData middleware", () => {
|
|
||||||
let config: TestConfiguration
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
config = new TestConfiguration()
|
|
||||||
})
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
config.afterEach()
|
|
||||||
})
|
|
||||||
|
|
||||||
const getRandomData = () => ({
|
|
||||||
_id: generator.guid(),
|
|
||||||
name: generator.name(),
|
|
||||||
age: generator.age(),
|
|
||||||
address: generator.address(),
|
|
||||||
})
|
|
||||||
|
|
||||||
it("it should pass without view id data", async () => {
|
|
||||||
const data = getRandomData()
|
|
||||||
await config.executeMiddleware({
|
|
||||||
...data,
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(config.next).toBeCalledTimes(1)
|
|
||||||
expect(config.throw).not.toBeCalled()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("it should throw an error if _viewid is provided", async () => {
|
|
||||||
const data = getRandomData()
|
|
||||||
await config.executeMiddleware({
|
|
||||||
_viewId: generator.guid(),
|
|
||||||
...data,
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(config.throw).toBeCalledTimes(1)
|
|
||||||
expect(config.throw).toBeCalledWith(
|
|
||||||
400,
|
|
||||||
"Table row endpoints cannot contain view info"
|
|
||||||
)
|
|
||||||
expect(config.next).not.toBeCalled()
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -117,7 +117,7 @@ describe("trimViewRowInfo middleware", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(config.request?.body).toEqual(data)
|
expect(config.request?.body).toEqual(data)
|
||||||
expect(config.params.tableId).toEqual(table._id)
|
expect(config.params.sourceId).toEqual(table._id)
|
||||||
|
|
||||||
expect(config.next).toBeCalledTimes(1)
|
expect(config.next).toBeCalledTimes(1)
|
||||||
expect(config.throw).not.toBeCalled()
|
expect(config.throw).not.toBeCalled()
|
||||||
|
@ -143,32 +143,9 @@ describe("trimViewRowInfo middleware", () => {
|
||||||
name: data.name,
|
name: data.name,
|
||||||
address: data.address,
|
address: data.address,
|
||||||
})
|
})
|
||||||
expect(config.params.tableId).toEqual(table._id)
|
expect(config.params.sourceId).toEqual(table._id)
|
||||||
|
|
||||||
expect(config.next).toBeCalledTimes(1)
|
expect(config.next).toBeCalledTimes(1)
|
||||||
expect(config.throw).not.toBeCalled()
|
expect(config.throw).not.toBeCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("it should throw an error if no viewid is provided on the body", async () => {
|
|
||||||
const data = getRandomData()
|
|
||||||
await config.executeMiddleware(viewId, {
|
|
||||||
...data,
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(config.throw).toBeCalledTimes(1)
|
|
||||||
expect(config.throw).toBeCalledWith(400, "_viewId is required")
|
|
||||||
expect(config.next).not.toBeCalled()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("it should throw an error if no viewid is provided on the parameters", async () => {
|
|
||||||
const data = getRandomData()
|
|
||||||
await config.executeMiddleware(undefined as any, {
|
|
||||||
_viewId: viewId,
|
|
||||||
...data,
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(config.throw).toBeCalledTimes(1)
|
|
||||||
expect(config.throw).toBeCalledWith(400, "viewId path is required")
|
|
||||||
expect(config.next).not.toBeCalled()
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
|
@ -3,26 +3,35 @@ import * as utils from "../db/utils"
|
||||||
import sdk from "../sdk"
|
import sdk from "../sdk"
|
||||||
import { db } from "@budibase/backend-core"
|
import { db } from "@budibase/backend-core"
|
||||||
import { Next } from "koa"
|
import { Next } from "koa"
|
||||||
|
import { getTableId } from "../api/controllers/row/utils"
|
||||||
|
|
||||||
export default async (ctx: Ctx<Row>, next: Next) => {
|
export default async (ctx: Ctx<Row>, next: Next) => {
|
||||||
const { body } = ctx.request
|
const { body } = ctx.request
|
||||||
const { _viewId: viewId } = body
|
let { _viewId: viewId } = body
|
||||||
|
|
||||||
|
const possibleViewId = getTableId(ctx)
|
||||||
|
if (utils.isViewID(possibleViewId)) {
|
||||||
|
viewId = possibleViewId
|
||||||
|
}
|
||||||
|
|
||||||
|
// nothing to do, it is not a view (just a table ID)
|
||||||
if (!viewId) {
|
if (!viewId) {
|
||||||
return ctx.throw(400, "_viewId is required")
|
return next()
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!ctx.params.viewId) {
|
const { tableId } = utils.extractViewInfoFromID(viewId)
|
||||||
return ctx.throw(400, "viewId path is required")
|
|
||||||
|
// don't need to trim delete requests
|
||||||
|
if (ctx?.method?.toLowerCase() !== "delete") {
|
||||||
|
const { _viewId, ...trimmedView } = await trimViewFields(
|
||||||
|
viewId,
|
||||||
|
tableId,
|
||||||
|
body
|
||||||
|
)
|
||||||
|
ctx.request.body = trimmedView
|
||||||
}
|
}
|
||||||
|
|
||||||
const { tableId } = utils.extractViewInfoFromID(ctx.params.viewId)
|
ctx.params.sourceId = tableId
|
||||||
const { _viewId, ...trimmedView } = await trimViewFields(
|
|
||||||
viewId,
|
|
||||||
tableId,
|
|
||||||
body
|
|
||||||
)
|
|
||||||
ctx.request.body = trimmedView
|
|
||||||
ctx.params.tableId = tableId
|
|
||||||
|
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
import { context } from "@budibase/backend-core"
|
||||||
|
import { Automation, AutomationState, DocumentType } from "@budibase/types"
|
||||||
|
import { definitions } from "../../../automations/triggerInfo"
|
||||||
|
|
||||||
|
const REBOOT_CRON = "@reboot"
|
||||||
|
|
||||||
|
export async function exists(automationId: string) {
|
||||||
|
if (!automationId?.startsWith(DocumentType.AUTOMATION)) {
|
||||||
|
throw new Error("Invalid automation ID.")
|
||||||
|
}
|
||||||
|
const db = context.getAppDB()
|
||||||
|
return db.docExists(automationId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function get(automationId: string) {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
return (await db.get(automationId)) as Automation
|
||||||
|
}
|
||||||
|
|
||||||
|
export function disabled(automation: Automation) {
|
||||||
|
return automation.state === AutomationState.DISABLED || !hasSteps(automation)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isCron(automation: Automation) {
|
||||||
|
return (
|
||||||
|
automation?.definition.trigger &&
|
||||||
|
automation?.definition.trigger.stepId === definitions.CRON.stepId
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isReboot(automation: Automation) {
|
||||||
|
const trigger = automation?.definition.trigger
|
||||||
|
return isCron(automation) && trigger?.inputs.cron === REBOOT_CRON
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hasSteps(automation: Automation) {
|
||||||
|
return automation?.definition?.steps?.length > 0
|
||||||
|
}
|
|
@ -1,7 +1,9 @@
|
||||||
import * as webhook from "./webhook"
|
import * as webhook from "./webhook"
|
||||||
import * as utils from "./utils"
|
import * as utils from "./utils"
|
||||||
|
import * as automations from "./automations"
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
webhook,
|
webhook,
|
||||||
utils,
|
utils,
|
||||||
|
...automations,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +1,9 @@
|
||||||
import { SearchFilters, SortOrder, SortType } from "@budibase/types"
|
import { SearchFilters, SearchParams } from "@budibase/types"
|
||||||
import { isExternalTable } from "../../../integrations/utils"
|
import { isExternalTable } from "../../../integrations/utils"
|
||||||
import * as internal from "./search/internal"
|
import * as internal from "./search/internal"
|
||||||
import * as external from "./search/external"
|
import * as external from "./search/external"
|
||||||
import { Format } from "../../../api/controllers/view/exporters"
|
import { Format } from "../../../api/controllers/view/exporters"
|
||||||
|
|
||||||
export interface SearchParams {
|
|
||||||
tableId: string
|
|
||||||
paginate?: boolean
|
|
||||||
query: SearchFilters
|
|
||||||
bookmark?: string
|
|
||||||
limit?: number
|
|
||||||
sort?: string
|
|
||||||
sortOrder?: SortOrder
|
|
||||||
sortType?: SortType
|
|
||||||
version?: string
|
|
||||||
disableEscaping?: boolean
|
|
||||||
fields?: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ViewParams {
|
export interface ViewParams {
|
||||||
calculation: string
|
calculation: string
|
||||||
group: string
|
group: string
|
||||||
|
|
|
@ -6,6 +6,7 @@ import {
|
||||||
IncludeRelationship,
|
IncludeRelationship,
|
||||||
Row,
|
Row,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
|
SearchParams,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as exporters from "../../../../api/controllers/view/exporters"
|
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||||
import sdk from "../../../../sdk"
|
import sdk from "../../../../sdk"
|
||||||
|
@ -13,7 +14,7 @@ import { handleRequest } from "../../../../api/controllers/row/external"
|
||||||
import { breakExternalTableId } from "../../../../integrations/utils"
|
import { breakExternalTableId } from "../../../../integrations/utils"
|
||||||
import { cleanExportRows } from "../utils"
|
import { cleanExportRows } from "../utils"
|
||||||
import { utils } from "@budibase/shared-core"
|
import { utils } from "@budibase/shared-core"
|
||||||
import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search"
|
import { ExportRowsParams, ExportRowsResult } from "../search"
|
||||||
import { HTTPError, db } from "@budibase/backend-core"
|
import { HTTPError, db } from "@budibase/backend-core"
|
||||||
import pick from "lodash/pick"
|
import pick from "lodash/pick"
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ import {
|
||||||
} from "../../../../db/utils"
|
} from "../../../../db/utils"
|
||||||
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
||||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||||
import { Database, Row, Table } from "@budibase/types"
|
import { Database, Row, Table, SearchParams } from "@budibase/types"
|
||||||
import { cleanExportRows } from "../utils"
|
import { cleanExportRows } from "../utils"
|
||||||
import {
|
import {
|
||||||
Format,
|
Format,
|
||||||
|
@ -28,7 +28,7 @@ import {
|
||||||
getFromMemoryDoc,
|
getFromMemoryDoc,
|
||||||
} from "../../../../api/controllers/view/utils"
|
} from "../../../../api/controllers/view/utils"
|
||||||
import sdk from "../../../../sdk"
|
import sdk from "../../../../sdk"
|
||||||
import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search"
|
import { ExportRowsParams, ExportRowsResult } from "../search"
|
||||||
import pick from "lodash/pick"
|
import pick from "lodash/pick"
|
||||||
|
|
||||||
export async function search(options: SearchParams) {
|
export async function search(options: SearchParams) {
|
||||||
|
|
|
@ -1,8 +1,15 @@
|
||||||
import { GenericContainer } from "testcontainers"
|
import { GenericContainer } from "testcontainers"
|
||||||
|
|
||||||
import { Datasource, FieldType, Row, SourceName, Table } from "@budibase/types"
|
import {
|
||||||
|
Datasource,
|
||||||
|
FieldType,
|
||||||
|
Row,
|
||||||
|
SourceName,
|
||||||
|
Table,
|
||||||
|
SearchParams,
|
||||||
|
} from "@budibase/types"
|
||||||
|
|
||||||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
||||||
import { SearchParams } from "../../search"
|
|
||||||
import { search } from "../external"
|
import { search } from "../external"
|
||||||
import {
|
import {
|
||||||
expectAnyExternalColsAttributes,
|
expectAnyExternalColsAttributes,
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import { FieldType, Row, Table } from "@budibase/types"
|
import { FieldType, Row, Table, SearchParams } from "@budibase/types"
|
||||||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
||||||
import { SearchParams } from "../../search"
|
|
||||||
import { search } from "../internal"
|
import { search } from "../internal"
|
||||||
import {
|
import {
|
||||||
expectAnyInternalColsAttributes,
|
expectAnyInternalColsAttributes,
|
||||||
|
|
|
@ -1,17 +1,14 @@
|
||||||
import { HTTPError, context } from "@budibase/backend-core"
|
import { context, HTTPError } from "@budibase/backend-core"
|
||||||
import { FieldSchema, TableSchema, View, ViewV2 } from "@budibase/types"
|
import { FieldSchema, TableSchema, View, ViewV2 } from "@budibase/types"
|
||||||
|
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import * as utils from "../../../db/utils"
|
import * as utils from "../../../db/utils"
|
||||||
import merge from "lodash/merge"
|
|
||||||
|
|
||||||
export async function get(viewId: string): Promise<ViewV2 | undefined> {
|
export async function get(viewId: string): Promise<ViewV2 | undefined> {
|
||||||
const { tableId } = utils.extractViewInfoFromID(viewId)
|
const { tableId } = utils.extractViewInfoFromID(viewId)
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
const views = Object.values(table.views!)
|
const views = Object.values(table.views!)
|
||||||
const view = views.find(v => isV2(v) && v.id === viewId) as ViewV2 | undefined
|
return views.find(v => isV2(v) && v.id === viewId) as ViewV2 | undefined
|
||||||
|
|
||||||
return view
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function create(
|
export async function create(
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { PatchRowRequest } from "@budibase/types"
|
import { PatchRowRequest, SaveRowRequest, Row } from "@budibase/types"
|
||||||
import TestConfiguration from "../TestConfiguration"
|
import TestConfiguration from "../TestConfiguration"
|
||||||
import { TestAPI } from "./base"
|
import { TestAPI } from "./base"
|
||||||
|
|
||||||
|
@ -8,12 +8,12 @@ export class RowAPI extends TestAPI {
|
||||||
}
|
}
|
||||||
|
|
||||||
get = async (
|
get = async (
|
||||||
tableId: string,
|
sourceId: string,
|
||||||
rowId: string,
|
rowId: string,
|
||||||
{ expectStatus } = { expectStatus: 200 }
|
{ expectStatus } = { expectStatus: 200 }
|
||||||
) => {
|
) => {
|
||||||
const request = this.request
|
const request = this.request
|
||||||
.get(`/api/${tableId}/rows/${rowId}`)
|
.get(`/api/${sourceId}/rows/${rowId}`)
|
||||||
.set(this.config.defaultHeaders())
|
.set(this.config.defaultHeaders())
|
||||||
.expect(expectStatus)
|
.expect(expectStatus)
|
||||||
if (expectStatus !== 404) {
|
if (expectStatus !== 404) {
|
||||||
|
@ -22,16 +22,43 @@ export class RowAPI extends TestAPI {
|
||||||
return request
|
return request
|
||||||
}
|
}
|
||||||
|
|
||||||
|
save = async (
|
||||||
|
sourceId: string,
|
||||||
|
row: SaveRowRequest,
|
||||||
|
{ expectStatus } = { expectStatus: 200 }
|
||||||
|
): Promise<Row> => {
|
||||||
|
const resp = await this.request
|
||||||
|
.post(`/api/${sourceId}/rows`)
|
||||||
|
.send(row)
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
|
.expect("Content-Type", /json/)
|
||||||
|
.expect(expectStatus)
|
||||||
|
return resp.body as Row
|
||||||
|
}
|
||||||
|
|
||||||
patch = async (
|
patch = async (
|
||||||
tableId: string,
|
sourceId: string,
|
||||||
row: PatchRowRequest,
|
row: PatchRowRequest,
|
||||||
{ expectStatus } = { expectStatus: 200 }
|
{ expectStatus } = { expectStatus: 200 }
|
||||||
) => {
|
) => {
|
||||||
return this.request
|
return this.request
|
||||||
.patch(`/api/${tableId}/rows`)
|
.patch(`/api/${sourceId}/rows`)
|
||||||
.send(row)
|
.send(row)
|
||||||
.set(this.config.defaultHeaders())
|
.set(this.config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(expectStatus)
|
.expect(expectStatus)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete = async (
|
||||||
|
sourceId: string,
|
||||||
|
rows: Row[],
|
||||||
|
{ expectStatus } = { expectStatus: 200 }
|
||||||
|
) => {
|
||||||
|
return this.request
|
||||||
|
.delete(`/api/${sourceId}/rows`)
|
||||||
|
.send({ rows })
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
|
.expect("Content-Type", /json/)
|
||||||
|
.expect(expectStatus)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,8 @@
|
||||||
import {
|
import {
|
||||||
CreateViewRequest,
|
CreateViewRequest,
|
||||||
SortOrder,
|
|
||||||
SortType,
|
|
||||||
UpdateViewRequest,
|
UpdateViewRequest,
|
||||||
DeleteRowRequest,
|
|
||||||
PatchRowRequest,
|
|
||||||
PatchRowResponse,
|
|
||||||
Row,
|
|
||||||
ViewV2,
|
ViewV2,
|
||||||
|
SearchViewRowRequest,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import TestConfiguration from "../TestConfiguration"
|
import TestConfiguration from "../TestConfiguration"
|
||||||
import { TestAPI } from "./base"
|
import { TestAPI } from "./base"
|
||||||
|
@ -81,75 +76,14 @@ export class ViewV2API extends TestAPI {
|
||||||
|
|
||||||
search = async (
|
search = async (
|
||||||
viewId: string,
|
viewId: string,
|
||||||
options?: {
|
params?: SearchViewRowRequest,
|
||||||
sort: {
|
|
||||||
column: string
|
|
||||||
order?: SortOrder
|
|
||||||
type?: SortType
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ expectStatus } = { expectStatus: 200 }
|
{ expectStatus } = { expectStatus: 200 }
|
||||||
) => {
|
) => {
|
||||||
const qs: [string, any][] = []
|
|
||||||
if (options?.sort.column) {
|
|
||||||
qs.push(["sort_column", options.sort.column])
|
|
||||||
}
|
|
||||||
if (options?.sort.order) {
|
|
||||||
qs.push(["sort_order", options.sort.order])
|
|
||||||
}
|
|
||||||
if (options?.sort.type) {
|
|
||||||
qs.push(["sort_type", options.sort.type])
|
|
||||||
}
|
|
||||||
let url = `/api/v2/views/${viewId}/search`
|
|
||||||
if (qs.length) {
|
|
||||||
url += "?" + qs.map(q => q.join("=")).join("&")
|
|
||||||
}
|
|
||||||
return this.request
|
return this.request
|
||||||
.get(url)
|
.post(`/api/v2/views/${viewId}/search`)
|
||||||
|
.send(params)
|
||||||
.set(this.config.defaultHeaders())
|
.set(this.config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(expectStatus)
|
.expect(expectStatus)
|
||||||
}
|
}
|
||||||
|
|
||||||
row = {
|
|
||||||
create: async (
|
|
||||||
viewId: string,
|
|
||||||
row: Row,
|
|
||||||
{ expectStatus } = { expectStatus: 200 }
|
|
||||||
): Promise<Row> => {
|
|
||||||
const result = await this.request
|
|
||||||
.post(`/api/v2/views/${viewId}/rows`)
|
|
||||||
.send(row)
|
|
||||||
.set(this.config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(expectStatus)
|
|
||||||
return result.body as Row
|
|
||||||
},
|
|
||||||
update: async (
|
|
||||||
viewId: string,
|
|
||||||
rowId: string,
|
|
||||||
row: PatchRowRequest,
|
|
||||||
{ expectStatus } = { expectStatus: 200 }
|
|
||||||
): Promise<PatchRowResponse> => {
|
|
||||||
const result = await this.request
|
|
||||||
.patch(`/api/v2/views/${viewId}/rows/${rowId}`)
|
|
||||||
.send(row)
|
|
||||||
.set(this.config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(expectStatus)
|
|
||||||
return result.body as PatchRowResponse
|
|
||||||
},
|
|
||||||
delete: async (
|
|
||||||
viewId: string,
|
|
||||||
body: DeleteRowRequest,
|
|
||||||
{ expectStatus } = { expectStatus: 200 }
|
|
||||||
): Promise<any> => {
|
|
||||||
const result = await this.request
|
|
||||||
.delete(`/api/v2/views/${viewId}/rows`)
|
|
||||||
.send(body)
|
|
||||||
.set(this.config.defaultHeaders())
|
|
||||||
.expect(expectStatus)
|
|
||||||
return result.body
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,9 +2,9 @@ import { default as threadUtils } from "./utils"
|
||||||
import { Job } from "bull"
|
import { Job } from "bull"
|
||||||
threadUtils.threadSetup()
|
threadUtils.threadSetup()
|
||||||
import {
|
import {
|
||||||
isRecurring,
|
|
||||||
disableCronById,
|
disableCronById,
|
||||||
isErrorInOutput,
|
isErrorInOutput,
|
||||||
|
isRecurring,
|
||||||
} from "../automations/utils"
|
} from "../automations/utils"
|
||||||
import * as actions from "../automations/actions"
|
import * as actions from "../automations/actions"
|
||||||
import * as automationUtils from "../automations/automationUtils"
|
import * as automationUtils from "../automations/automationUtils"
|
||||||
|
@ -15,17 +15,17 @@ import { AutomationErrors, MAX_AUTOMATION_RECURRING_ERRORS } from "../constants"
|
||||||
import { storeLog } from "../automations/logging"
|
import { storeLog } from "../automations/logging"
|
||||||
import {
|
import {
|
||||||
Automation,
|
Automation,
|
||||||
AutomationStep,
|
|
||||||
AutomationStatus,
|
|
||||||
AutomationMetadata,
|
|
||||||
AutomationJob,
|
|
||||||
AutomationData,
|
AutomationData,
|
||||||
|
AutomationJob,
|
||||||
|
AutomationMetadata,
|
||||||
|
AutomationStatus,
|
||||||
|
AutomationStep,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
LoopStep,
|
|
||||||
LoopInput,
|
|
||||||
TriggerOutput,
|
|
||||||
AutomationContext,
|
AutomationContext,
|
||||||
|
LoopInput,
|
||||||
|
LoopStep,
|
||||||
|
TriggerOutput,
|
||||||
} from "../definitions/automations"
|
} from "../definitions/automations"
|
||||||
import { WorkerCallback } from "./definitions"
|
import { WorkerCallback } from "./definitions"
|
||||||
import { context, logging } from "@budibase/backend-core"
|
import { context, logging } from "@budibase/backend-core"
|
||||||
|
@ -34,6 +34,8 @@ import { cloneDeep } from "lodash/fp"
|
||||||
import { performance } from "perf_hooks"
|
import { performance } from "perf_hooks"
|
||||||
import * as sdkUtils from "../sdk/utils"
|
import * as sdkUtils from "../sdk/utils"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
|
import sdk from "../sdk"
|
||||||
|
|
||||||
const FILTER_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.FILTER.stepId
|
const FILTER_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.FILTER.stepId
|
||||||
const LOOP_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.LOOP.stepId
|
const LOOP_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.LOOP.stepId
|
||||||
const CRON_STEP_ID = triggerDefs.CRON.stepId
|
const CRON_STEP_ID = triggerDefs.CRON.stepId
|
||||||
|
@ -486,10 +488,13 @@ class Orchestrator {
|
||||||
const end = performance.now()
|
const end = performance.now()
|
||||||
const executionTime = end - start
|
const executionTime = end - start
|
||||||
|
|
||||||
console.info(`Execution time: ${executionTime} milliseconds`, {
|
console.info(
|
||||||
_logKey: "automation",
|
`Automation ID: ${automation._id} Execution time: ${executionTime} milliseconds`,
|
||||||
executionTime,
|
{
|
||||||
})
|
_logKey: "automation",
|
||||||
|
executionTime,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
// store the logs for the automation run
|
// store the logs for the automation run
|
||||||
try {
|
try {
|
||||||
|
@ -511,7 +516,8 @@ class Orchestrator {
|
||||||
|
|
||||||
export function execute(job: Job<AutomationData>, callback: WorkerCallback) {
|
export function execute(job: Job<AutomationData>, callback: WorkerCallback) {
|
||||||
const appId = job.data.event.appId
|
const appId = job.data.event.appId
|
||||||
const automationId = job.data.automation._id
|
const automation = job.data.automation
|
||||||
|
const automationId = automation._id
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
throw new Error("Unable to execute, event doesn't contain app ID.")
|
throw new Error("Unable to execute, event doesn't contain app ID.")
|
||||||
}
|
}
|
||||||
|
@ -522,10 +528,30 @@ export function execute(job: Job<AutomationData>, callback: WorkerCallback) {
|
||||||
appId,
|
appId,
|
||||||
automationId,
|
automationId,
|
||||||
task: async () => {
|
task: async () => {
|
||||||
|
let automation = job.data.automation,
|
||||||
|
isCron = sdk.automations.isCron(job.data.automation),
|
||||||
|
notFound = false
|
||||||
|
try {
|
||||||
|
automation = await sdk.automations.get(automationId)
|
||||||
|
} catch (err: any) {
|
||||||
|
// automation no longer exists
|
||||||
|
notFound = err
|
||||||
|
}
|
||||||
|
const disabled = sdk.automations.disabled(automation)
|
||||||
|
const stopAutomation = disabled || notFound
|
||||||
const envVars = await sdkUtils.getEnvironmentVariables()
|
const envVars = await sdkUtils.getEnvironmentVariables()
|
||||||
// put into automation thread for whole context
|
// put into automation thread for whole context
|
||||||
await context.doInEnvironmentContext(envVars, async () => {
|
await context.doInEnvironmentContext(envVars, async () => {
|
||||||
const automationOrchestrator = new Orchestrator(job)
|
const automationOrchestrator = new Orchestrator(job)
|
||||||
|
// hard stop on automations
|
||||||
|
if (isCron && stopAutomation) {
|
||||||
|
await automationOrchestrator.stopCron(
|
||||||
|
disabled ? "disabled" : "not_found"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (stopAutomation) {
|
||||||
|
return
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
const response = await automationOrchestrator.execute()
|
const response = await automationOrchestrator.execute()
|
||||||
callback(null, response)
|
callback(null, response)
|
||||||
|
@ -554,11 +580,10 @@ export function executeSynchronously(job: Job) {
|
||||||
// put into automation thread for whole context
|
// put into automation thread for whole context
|
||||||
return context.doInEnvironmentContext(envVars, async () => {
|
return context.doInEnvironmentContext(envVars, async () => {
|
||||||
const automationOrchestrator = new Orchestrator(job)
|
const automationOrchestrator = new Orchestrator(job)
|
||||||
const response = await Promise.race([
|
return await Promise.race([
|
||||||
automationOrchestrator.execute(),
|
automationOrchestrator.execute(),
|
||||||
timeoutPromise,
|
timeoutPromise,
|
||||||
])
|
])
|
||||||
return response
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,12 @@ export interface QueryEvent {
|
||||||
queryId: string
|
queryId: string
|
||||||
environmentVariables?: Record<string, string>
|
environmentVariables?: Record<string, string>
|
||||||
ctx?: any
|
ctx?: any
|
||||||
|
schema?: {
|
||||||
|
[key: string]: {
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueryVariable {
|
export interface QueryVariable {
|
||||||
|
|
|
@ -8,6 +8,7 @@ import { context, cache, auth } from "@budibase/backend-core"
|
||||||
import { getGlobalIDFromUserMetadataID } from "../db/utils"
|
import { getGlobalIDFromUserMetadataID } from "../db/utils"
|
||||||
import sdk from "../sdk"
|
import sdk from "../sdk"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
|
import { SourceName } from "@budibase/types"
|
||||||
|
|
||||||
import { isSQL } from "../integrations/utils"
|
import { isSQL } from "../integrations/utils"
|
||||||
import { interpolateSQL } from "../integrations/queries/sql"
|
import { interpolateSQL } from "../integrations/queries/sql"
|
||||||
|
@ -28,6 +29,7 @@ class QueryRunner {
|
||||||
hasRerun: boolean
|
hasRerun: boolean
|
||||||
hasRefreshedOAuth: boolean
|
hasRefreshedOAuth: boolean
|
||||||
hasDynamicVariables: boolean
|
hasDynamicVariables: boolean
|
||||||
|
schema: any
|
||||||
|
|
||||||
constructor(input: QueryEvent, flags = { noRecursiveQuery: false }) {
|
constructor(input: QueryEvent, flags = { noRecursiveQuery: false }) {
|
||||||
this.datasource = input.datasource
|
this.datasource = input.datasource
|
||||||
|
@ -37,6 +39,7 @@ class QueryRunner {
|
||||||
this.pagination = input.pagination
|
this.pagination = input.pagination
|
||||||
this.transformer = input.transformer
|
this.transformer = input.transformer
|
||||||
this.queryId = input.queryId
|
this.queryId = input.queryId
|
||||||
|
this.schema = input.schema
|
||||||
this.noRecursiveQuery = flags.noRecursiveQuery
|
this.noRecursiveQuery = flags.noRecursiveQuery
|
||||||
this.cachedVariables = []
|
this.cachedVariables = []
|
||||||
// Additional context items for enrichment
|
// Additional context items for enrichment
|
||||||
|
@ -51,7 +54,7 @@ class QueryRunner {
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(): Promise<any> {
|
async execute(): Promise<any> {
|
||||||
let { datasource, fields, queryVerb, transformer } = this
|
let { datasource, fields, queryVerb, transformer, schema } = this
|
||||||
let datasourceClone = cloneDeep(datasource)
|
let datasourceClone = cloneDeep(datasource)
|
||||||
let fieldsClone = cloneDeep(fields)
|
let fieldsClone = cloneDeep(fields)
|
||||||
|
|
||||||
|
@ -70,6 +73,9 @@ class QueryRunner {
|
||||||
|
|
||||||
const integration = new Integration(datasourceClone.config)
|
const integration = new Integration(datasourceClone.config)
|
||||||
|
|
||||||
|
// define the type casting from the schema
|
||||||
|
integration.defineTypeCastingFromSchema?.(schema)
|
||||||
|
|
||||||
// pre-query, make sure datasource variables are added to parameters
|
// pre-query, make sure datasource variables are added to parameters
|
||||||
const parameters = await this.addDatasourceVariables()
|
const parameters = await this.addDatasourceVariables()
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { permissions, roles } from "@budibase/backend-core"
|
import { permissions, roles } from "@budibase/backend-core"
|
||||||
import { DocumentType } from "../db/utils"
|
import { DocumentType, VirtualDocumentType } from "../db/utils"
|
||||||
|
|
||||||
export const CURRENTLY_SUPPORTED_LEVELS: string[] = [
|
export const CURRENTLY_SUPPORTED_LEVELS: string[] = [
|
||||||
permissions.PermissionLevel.WRITE,
|
permissions.PermissionLevel.WRITE,
|
||||||
|
@ -11,9 +11,10 @@ export function getPermissionType(resourceId: string) {
|
||||||
const docType = Object.values(DocumentType).filter(docType =>
|
const docType = Object.values(DocumentType).filter(docType =>
|
||||||
resourceId.startsWith(docType)
|
resourceId.startsWith(docType)
|
||||||
)[0]
|
)[0]
|
||||||
switch (docType) {
|
switch (docType as DocumentType | VirtualDocumentType) {
|
||||||
case DocumentType.TABLE:
|
case DocumentType.TABLE:
|
||||||
case DocumentType.ROW:
|
case DocumentType.ROW:
|
||||||
|
case VirtualDocumentType.VIEW:
|
||||||
return permissions.PermissionType.TABLE
|
return permissions.PermissionType.TABLE
|
||||||
case DocumentType.AUTOMATION:
|
case DocumentType.AUTOMATION:
|
||||||
return permissions.PermissionType.AUTOMATION
|
return permissions.PermissionType.AUTOMATION
|
||||||
|
@ -22,9 +23,6 @@ export function getPermissionType(resourceId: string) {
|
||||||
case DocumentType.QUERY:
|
case DocumentType.QUERY:
|
||||||
case DocumentType.DATASOURCE:
|
case DocumentType.DATASOURCE:
|
||||||
return permissions.PermissionType.QUERY
|
return permissions.PermissionType.QUERY
|
||||||
default:
|
|
||||||
// views don't have an ID, will end up here
|
|
||||||
return permissions.PermissionType.VIEW
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
*
|
||||||
|
!dist/**/*
|
||||||
|
dist/tsconfig.build.tsbuildinfo
|
||||||
|
!package.json
|
|
@ -2,19 +2,13 @@
|
||||||
"name": "@budibase/shared-core",
|
"name": "@budibase/shared-core",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Shared data utils",
|
"description": "Shared data utils",
|
||||||
"main": "src/index.ts",
|
"main": "dist/index.js",
|
||||||
"types": "src/index.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
|
||||||
".": {
|
|
||||||
"import": "./dist/index.js",
|
|
||||||
"require": "./src/index.ts"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prebuild": "rimraf dist/",
|
"prebuild": "rimraf dist/",
|
||||||
"build": "tsc -p tsconfig.build.json",
|
"build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null",
|
||||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||||
"dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput",
|
"dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput",
|
||||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null"
|
"check:types": "tsc -p tsconfig.json --noEmit --paths null"
|
||||||
|
@ -26,5 +20,19 @@
|
||||||
"concurrently": "^7.6.0",
|
"concurrently": "^7.6.0",
|
||||||
"rimraf": "3.0.2",
|
"rimraf": "3.0.2",
|
||||||
"typescript": "4.7.3"
|
"typescript": "4.7.3"
|
||||||
|
},
|
||||||
|
"nx": {
|
||||||
|
"targets": {
|
||||||
|
"build": {
|
||||||
|
"dependsOn": [
|
||||||
|
{
|
||||||
|
"projects": [
|
||||||
|
"@budibase/types"
|
||||||
|
],
|
||||||
|
"target": "build"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,12 @@
|
||||||
import { Datasource, FieldType, SortDirection, SortType } from "@budibase/types"
|
import {
|
||||||
|
Datasource,
|
||||||
|
FieldType,
|
||||||
|
SearchFilter,
|
||||||
|
SearchQuery,
|
||||||
|
SearchQueryFields,
|
||||||
|
SortDirection,
|
||||||
|
SortType,
|
||||||
|
} from "@budibase/types"
|
||||||
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
||||||
import { deepGet } from "./helpers"
|
import { deepGet } from "./helpers"
|
||||||
|
|
||||||
|
@ -73,13 +81,13 @@ export const NoEmptyFilterStrings = [
|
||||||
OperatorOptions.NotEquals.value,
|
OperatorOptions.NotEquals.value,
|
||||||
OperatorOptions.Contains.value,
|
OperatorOptions.Contains.value,
|
||||||
OperatorOptions.NotContains.value,
|
OperatorOptions.NotContains.value,
|
||||||
] as (keyof QueryFields)[]
|
] as (keyof SearchQueryFields)[]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes any fields that contain empty strings that would cause inconsistent
|
* Removes any fields that contain empty strings that would cause inconsistent
|
||||||
* behaviour with how backend tables are filtered (no value means no filter).
|
* behaviour with how backend tables are filtered (no value means no filter).
|
||||||
*/
|
*/
|
||||||
const cleanupQuery = (query: Query) => {
|
const cleanupQuery = (query: SearchQuery) => {
|
||||||
if (!query) {
|
if (!query) {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
@ -110,66 +118,12 @@ const removeKeyNumbering = (key: string) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type Filter = {
|
|
||||||
operator: keyof Query
|
|
||||||
field: string
|
|
||||||
type: any
|
|
||||||
value: any
|
|
||||||
externalType: keyof typeof SqlNumberTypeRangeMap
|
|
||||||
}
|
|
||||||
|
|
||||||
type Query = QueryFields & QueryConfig
|
|
||||||
type QueryFields = {
|
|
||||||
string?: {
|
|
||||||
[key: string]: string
|
|
||||||
}
|
|
||||||
fuzzy?: {
|
|
||||||
[key: string]: string
|
|
||||||
}
|
|
||||||
range?: {
|
|
||||||
[key: string]: {
|
|
||||||
high: number | string
|
|
||||||
low: number | string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
equal?: {
|
|
||||||
[key: string]: any
|
|
||||||
}
|
|
||||||
notEqual?: {
|
|
||||||
[key: string]: any
|
|
||||||
}
|
|
||||||
empty?: {
|
|
||||||
[key: string]: any
|
|
||||||
}
|
|
||||||
notEmpty?: {
|
|
||||||
[key: string]: any
|
|
||||||
}
|
|
||||||
oneOf?: {
|
|
||||||
[key: string]: any[]
|
|
||||||
}
|
|
||||||
contains?: {
|
|
||||||
[key: string]: any[]
|
|
||||||
}
|
|
||||||
notContains?: {
|
|
||||||
[key: string]: any[]
|
|
||||||
}
|
|
||||||
containsAny?: {
|
|
||||||
[key: string]: any[]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type QueryConfig = {
|
|
||||||
allOr?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
type QueryFieldsType = keyof QueryFields
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builds a lucene JSON query from the filter structure generated in the builder
|
* Builds a lucene JSON query from the filter structure generated in the builder
|
||||||
* @param filter the builder filter structure
|
* @param filter the builder filter structure
|
||||||
*/
|
*/
|
||||||
export const buildLuceneQuery = (filter: Filter[]) => {
|
export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
||||||
let query: Query = {
|
let query: SearchQuery = {
|
||||||
string: {},
|
string: {},
|
||||||
fuzzy: {},
|
fuzzy: {},
|
||||||
range: {},
|
range: {},
|
||||||
|
@ -227,9 +181,13 @@ export const buildLuceneQuery = (filter: Filter[]) => {
|
||||||
}
|
}
|
||||||
if (operator.startsWith("range") && query.range) {
|
if (operator.startsWith("range") && query.range) {
|
||||||
const minint =
|
const minint =
|
||||||
SqlNumberTypeRangeMap[externalType]?.min || Number.MIN_SAFE_INTEGER
|
SqlNumberTypeRangeMap[
|
||||||
|
externalType as keyof typeof SqlNumberTypeRangeMap
|
||||||
|
]?.min || Number.MIN_SAFE_INTEGER
|
||||||
const maxint =
|
const maxint =
|
||||||
SqlNumberTypeRangeMap[externalType]?.max || Number.MAX_SAFE_INTEGER
|
SqlNumberTypeRangeMap[
|
||||||
|
externalType as keyof typeof SqlNumberTypeRangeMap
|
||||||
|
]?.max || Number.MAX_SAFE_INTEGER
|
||||||
if (!query.range[field]) {
|
if (!query.range[field]) {
|
||||||
query.range[field] = {
|
query.range[field] = {
|
||||||
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
|
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
|
||||||
|
@ -275,7 +233,7 @@ export const buildLuceneQuery = (filter: Filter[]) => {
|
||||||
* @param docs the data
|
* @param docs the data
|
||||||
* @param query the JSON lucene query
|
* @param query the JSON lucene query
|
||||||
*/
|
*/
|
||||||
export const runLuceneQuery = (docs: any[], query?: Query) => {
|
export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
||||||
if (!docs || !Array.isArray(docs)) {
|
if (!docs || !Array.isArray(docs)) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
@ -289,7 +247,7 @@ export const runLuceneQuery = (docs: any[], query?: Query) => {
|
||||||
// Iterates over a set of filters and evaluates a fail function against a doc
|
// Iterates over a set of filters and evaluates a fail function against a doc
|
||||||
const match =
|
const match =
|
||||||
(
|
(
|
||||||
type: QueryFieldsType,
|
type: keyof SearchQueryFields,
|
||||||
failFn: (docValue: any, testValue: any) => boolean
|
failFn: (docValue: any, testValue: any) => boolean
|
||||||
) =>
|
) =>
|
||||||
(doc: any) => {
|
(doc: any) => {
|
||||||
|
@ -456,7 +414,7 @@ export const luceneLimit = (docs: any[], limit: string) => {
|
||||||
return docs.slice(0, numLimit)
|
return docs.slice(0, numLimit)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const hasFilters = (query?: Query) => {
|
export const hasFilters = (query?: SearchQuery) => {
|
||||||
if (!query) {
|
if (!query) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,10 @@
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"types": ["node"],
|
"types": ["node"],
|
||||||
"outDir": "dist",
|
"outDir": "dist",
|
||||||
"skipLibCheck": true
|
"skipLibCheck": true,
|
||||||
|
"paths": {
|
||||||
|
"@budibase/types": ["../types/src"]
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"include": ["**/*.js", "**/*.ts"],
|
"include": ["**/*.js", "**/*.ts"],
|
||||||
"exclude": [
|
"exclude": [
|
||||||
|
|
|
@ -1,13 +1,4 @@
|
||||||
{
|
{
|
||||||
"extends": "./tsconfig.build.json",
|
"extends": "./tsconfig.build.json",
|
||||||
"compilerOptions": {
|
|
||||||
"baseUrl": ".",
|
|
||||||
"rootDir": "./src",
|
|
||||||
"composite": true,
|
|
||||||
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo",
|
|
||||||
"paths": {
|
|
||||||
"@budibase/types": ["../../types/src"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"exclude": ["node_modules", "dist"]
|
"exclude": ["node_modules", "dist"]
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
*
|
||||||
|
!dist/**/*
|
||||||
|
dist/tsconfig.build.tsbuildinfo
|
||||||
|
!package.json
|
|
@ -2,19 +2,13 @@
|
||||||
"name": "@budibase/types",
|
"name": "@budibase/types",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Budibase types",
|
"description": "Budibase types",
|
||||||
"main": "src/index.ts",
|
"main": "dist/index.js",
|
||||||
"types": "src/index.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
|
||||||
".": {
|
|
||||||
"import": "./dist/index.js",
|
|
||||||
"require": "./src/index.ts"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prebuild": "rimraf dist/",
|
"prebuild": "rimraf dist/",
|
||||||
"build": "tsc -p tsconfig.build.json",
|
"build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly",
|
||||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||||
"dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput",
|
"dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput",
|
||||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null"
|
"check:types": "tsc -p tsconfig.json --noEmit --paths null"
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
|
import { SearchParams } from "../../../sdk"
|
||||||
import { Row } from "../../../documents"
|
import { Row } from "../../../documents"
|
||||||
|
|
||||||
|
export interface SaveRowRequest extends Row {}
|
||||||
|
|
||||||
export interface PatchRowRequest extends Row {
|
export interface PatchRowRequest extends Row {
|
||||||
_id: string
|
_id: string
|
||||||
_rev: string
|
_rev: string
|
||||||
|
@ -8,6 +11,14 @@ export interface PatchRowRequest extends Row {
|
||||||
|
|
||||||
export interface PatchRowResponse extends Row {}
|
export interface PatchRowResponse extends Row {}
|
||||||
|
|
||||||
export interface SearchResponse {
|
export interface SearchRowRequest extends Omit<SearchParams, "tableId"> {}
|
||||||
|
|
||||||
|
export interface SearchViewRowRequest
|
||||||
|
extends Pick<
|
||||||
|
SearchRowRequest,
|
||||||
|
"sort" | "sortOrder" | "sortType" | "limit" | "bookmark" | "paginate"
|
||||||
|
> {}
|
||||||
|
|
||||||
|
export interface SearchRowResponse {
|
||||||
rows: any[]
|
rows: any[]
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,3 +8,4 @@ export * from "./system"
|
||||||
export * from "./app"
|
export * from "./app"
|
||||||
export * from "./global"
|
export * from "./global"
|
||||||
export * from "./pagination"
|
export * from "./pagination"
|
||||||
|
export * from "./searchFilter"
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
import { FieldType } from "../../documents"
|
||||||
|
|
||||||
|
export type SearchFilter = {
|
||||||
|
operator: keyof SearchQuery
|
||||||
|
field: string
|
||||||
|
type?: FieldType
|
||||||
|
value: any
|
||||||
|
externalType?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SearchQuery = {
|
||||||
|
allOr?: boolean
|
||||||
|
string?: {
|
||||||
|
[key: string]: string
|
||||||
|
}
|
||||||
|
fuzzy?: {
|
||||||
|
[key: string]: string
|
||||||
|
}
|
||||||
|
range?: {
|
||||||
|
[key: string]: {
|
||||||
|
high: number | string
|
||||||
|
low: number | string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
equal?: {
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
notEqual?: {
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
empty?: {
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
notEmpty?: {
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
oneOf?: {
|
||||||
|
[key: string]: any[]
|
||||||
|
}
|
||||||
|
contains?: {
|
||||||
|
[key: string]: any[]
|
||||||
|
}
|
||||||
|
notContains?: {
|
||||||
|
[key: string]: any[]
|
||||||
|
}
|
||||||
|
containsAny?: {
|
||||||
|
[key: string]: any[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SearchQueryFields = Omit<SearchQuery, "allOr">
|
|
@ -100,6 +100,10 @@ export const AutomationStepIdArray = [
|
||||||
...Object.values(AutomationTriggerStepId),
|
...Object.values(AutomationTriggerStepId),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
export enum AutomationState {
|
||||||
|
DISABLED = "disabled",
|
||||||
|
}
|
||||||
|
|
||||||
export interface Automation extends Document {
|
export interface Automation extends Document {
|
||||||
definition: {
|
definition: {
|
||||||
steps: AutomationStep[]
|
steps: AutomationStep[]
|
||||||
|
@ -112,6 +116,7 @@ export interface Automation extends Document {
|
||||||
name: string
|
name: string
|
||||||
internal?: boolean
|
internal?: boolean
|
||||||
type?: string
|
type?: string
|
||||||
|
state?: AutomationState
|
||||||
}
|
}
|
||||||
|
|
||||||
interface BaseIOStructure {
|
interface BaseIOStructure {
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import { SortOrder, SortType } from "../../api"
|
import { SearchFilter, SortOrder, SortType } from "../../api"
|
||||||
import { SearchFilters } from "../../sdk"
|
import { UIFieldMetadata } from "./table"
|
||||||
import { TableSchema, UIFieldMetadata } from "./table"
|
|
||||||
|
|
||||||
export interface View {
|
export interface View {
|
||||||
name: string
|
name: string
|
||||||
|
@ -20,7 +19,7 @@ export interface ViewV2 {
|
||||||
name: string
|
name: string
|
||||||
primaryDisplay?: string
|
primaryDisplay?: string
|
||||||
tableId: string
|
tableId: string
|
||||||
query?: SearchFilters
|
query?: SearchFilter[]
|
||||||
sort?: {
|
sort?: {
|
||||||
field: string
|
field: string
|
||||||
order?: SortOrder
|
order?: SortOrder
|
||||||
|
|
|
@ -39,6 +39,12 @@ export enum DocumentType {
|
||||||
AUDIT_LOG = "al",
|
AUDIT_LOG = "al",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// these documents don't really exist, they are part of other
|
||||||
|
// documents or enriched into existence as part of get requests
|
||||||
|
export enum VirtualDocumentType {
|
||||||
|
VIEW = "view",
|
||||||
|
}
|
||||||
|
|
||||||
export interface Document {
|
export interface Document {
|
||||||
_id?: string
|
_id?: string
|
||||||
_rev?: string
|
_rev?: string
|
||||||
|
|
|
@ -166,6 +166,12 @@ export interface IntegrationBase {
|
||||||
delete?(query: any): Promise<any[] | any>
|
delete?(query: any): Promise<any[] | any>
|
||||||
testConnection?(): Promise<ConnectionInfo>
|
testConnection?(): Promise<ConnectionInfo>
|
||||||
getExternalSchema?(): Promise<string>
|
getExternalSchema?(): Promise<string>
|
||||||
|
defineTypeCastingFromSchema?(schema: {
|
||||||
|
[key: string]: {
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
}
|
||||||
|
}): void
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DatasourcePlus extends IntegrationBase {
|
export interface DatasourcePlus extends IntegrationBase {
|
||||||
|
|
|
@ -40,6 +40,11 @@ export type DatabasePutOpts = {
|
||||||
force?: boolean
|
force?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type DocExistsResponse = {
|
||||||
|
_rev?: string
|
||||||
|
exists: boolean
|
||||||
|
}
|
||||||
|
|
||||||
export type DatabaseCreateIndexOpts = {
|
export type DatabaseCreateIndexOpts = {
|
||||||
index: {
|
index: {
|
||||||
fields: string[]
|
fields: string[]
|
||||||
|
@ -90,6 +95,7 @@ export interface Database {
|
||||||
exists(): Promise<boolean>
|
exists(): Promise<boolean>
|
||||||
checkSetup(): Promise<Nano.DocumentScope<any>>
|
checkSetup(): Promise<Nano.DocumentScope<any>>
|
||||||
get<T>(id?: string): Promise<T>
|
get<T>(id?: string): Promise<T>
|
||||||
|
docExists(id: string): Promise<DocExistsResponse>
|
||||||
remove(
|
remove(
|
||||||
id: string | Document,
|
id: string | Document,
|
||||||
rev?: string
|
rev?: string
|
||||||
|
|
|
@ -19,3 +19,4 @@ export * from "./user"
|
||||||
export * from "./cli"
|
export * from "./cli"
|
||||||
export * from "./websocket"
|
export * from "./websocket"
|
||||||
export * from "./permissions"
|
export * from "./permissions"
|
||||||
|
export * from "./row"
|
||||||
|
|
|
@ -14,6 +14,5 @@ export enum PermissionType {
|
||||||
WEBHOOK = "webhook",
|
WEBHOOK = "webhook",
|
||||||
BUILDER = "builder",
|
BUILDER = "builder",
|
||||||
GLOBAL_BUILDER = "globalBuilder",
|
GLOBAL_BUILDER = "globalBuilder",
|
||||||
VIEW = "view",
|
|
||||||
QUERY = "query",
|
QUERY = "query",
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
import { SortOrder, SortType } from "../api"
|
||||||
|
import { SearchFilters } from "./search"
|
||||||
|
|
||||||
|
export interface SearchParams {
|
||||||
|
tableId: string
|
||||||
|
paginate?: boolean
|
||||||
|
query: SearchFilters
|
||||||
|
bookmark?: string
|
||||||
|
limit?: number
|
||||||
|
sort?: string
|
||||||
|
sortOrder?: SortOrder
|
||||||
|
sortType?: SortType
|
||||||
|
version?: string
|
||||||
|
disableEscaping?: boolean
|
||||||
|
fields?: string[]
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
FROM node:14-alpine
|
FROM node:18-alpine
|
||||||
|
|
||||||
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
|
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
|
||||||
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
|
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
import { auth } from "@budibase/backend-core"
|
import { auth } from "@budibase/backend-core"
|
||||||
import Joi from "joi"
|
import Joi from "joi"
|
||||||
|
|
||||||
|
const OPTIONAL_STRING = Joi.string().allow(null, "")
|
||||||
|
|
||||||
let schema: any = {
|
let schema: any = {
|
||||||
email: Joi.string().allow(null, ""),
|
email: OPTIONAL_STRING,
|
||||||
password: Joi.string().allow(null, ""),
|
password: OPTIONAL_STRING,
|
||||||
forceResetPassword: Joi.boolean().optional(),
|
forceResetPassword: Joi.boolean().optional(),
|
||||||
firstName: Joi.string().allow(null, ""),
|
firstName: OPTIONAL_STRING,
|
||||||
lastName: Joi.string().allow(null, ""),
|
lastName: OPTIONAL_STRING,
|
||||||
builder: Joi.object({
|
builder: Joi.object({
|
||||||
global: Joi.boolean().optional(),
|
global: Joi.boolean().optional(),
|
||||||
apps: Joi.array().optional(),
|
apps: Joi.array().optional(),
|
||||||
|
@ -21,8 +23,8 @@ export const buildSelfSaveValidation = () => {
|
||||||
schema = {
|
schema = {
|
||||||
password: Joi.string().optional(),
|
password: Joi.string().optional(),
|
||||||
forceResetPassword: Joi.boolean().optional(),
|
forceResetPassword: Joi.boolean().optional(),
|
||||||
firstName: Joi.string().allow("").optional(),
|
firstName: OPTIONAL_STRING,
|
||||||
lastName: Joi.string().allow("").optional(),
|
lastName: OPTIONAL_STRING,
|
||||||
onboardedAt: Joi.string().optional(),
|
onboardedAt: Joi.string().optional(),
|
||||||
}
|
}
|
||||||
return auth.joiValidator.body(Joi.object(schema).required().unknown(false))
|
return auth.joiValidator.body(Joi.object(schema).required().unknown(false))
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue