Merge branch 'develop' of github.com:Budibase/budibase into feature/BUDI-7108

This commit is contained in:
mike12345567 2023-08-09 17:19:35 +01:00
commit 893aaa2bf2
63 changed files with 732 additions and 703 deletions

View File

@ -25,20 +25,20 @@ jobs:
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository == github.event.pull_request.head.repo.full_name if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository != github.event.pull_request.head.repo.full_name if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 14.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 14.x node-version: 18.x
cache: "yarn" cache: "yarn"
- run: yarn - run: yarn --frozen-lockfile
- run: yarn lint - run: yarn lint
build: build:
@ -46,20 +46,20 @@ jobs:
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository == github.event.pull_request.head.repo.full_name if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository != github.event.pull_request.head.repo.full_name if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 14.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 14.x node-version: 18.x
cache: "yarn" cache: "yarn"
- run: yarn - run: yarn --frozen-lockfile
# Run build all the projects # Run build all the projects
- run: yarn build - run: yarn build
# Check the types of the projects built via esbuild # Check the types of the projects built via esbuild
@ -70,20 +70,20 @@ jobs:
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository == github.event.pull_request.head.repo.full_name if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository != github.event.pull_request.head.repo.full_name if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 14.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 14.x node-version: 18.x
cache: "yarn" cache: "yarn"
- run: yarn - run: yarn --frozen-lockfile
- run: yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro - run: yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
- uses: codecov/codecov-action@v3 - uses: codecov/codecov-action@v3
with: with:
@ -96,21 +96,22 @@ jobs:
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository == github.event.pull_request.head.repo.full_name if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository != github.event.pull_request.head.repo.full_name if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 14.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 14.x node-version: 18.x
cache: "yarn" cache: "yarn"
- run: yarn - run: yarn --frozen-lockfile
- run: yarn test --scope=@budibase/worker --scope=@budibase/server - name: Test worker and server
run: yarn test --scope=@budibase/worker --scope=@budibase/server
- uses: codecov/codecov-action@v3 - uses: codecov/codecov-action@v3
with: with:
token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos
@ -119,7 +120,7 @@ jobs:
test-pro: test-pro:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository == github.event.pull_request.head.repo.full_name if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
@ -127,12 +128,12 @@ jobs:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Use Node.js 14.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 14.x node-version: 18.x
cache: "yarn" cache: "yarn"
- run: yarn - run: yarn --frozen-lockfile
- run: yarn test --scope=@budibase/pro - run: yarn test --scope=@budibase/pro
integration-test: integration-test:
@ -140,20 +141,20 @@ jobs:
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository == github.event.pull_request.head.repo.full_name if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.repository != github.event.pull_request.head.repo.full_name if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 14.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 14.x node-version: 18.x
cache: "yarn" cache: "yarn"
- run: yarn - run: yarn --frozen-lockfile
- run: yarn build --projects=@budibase/server,@budibase/worker,@budibase/client - run: yarn build --projects=@budibase/server,@budibase/worker,@budibase/client
- name: Run tests - name: Run tests
run: | run: |
@ -166,7 +167,7 @@ jobs:
check-pro-submodule: check-pro-submodule:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository == github.event.pull_request.head.repo.full_name if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
@ -190,6 +191,8 @@ jobs:
base_commit=$(git rev-parse origin/develop) base_commit=$(git rev-parse origin/develop)
fi fi
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "pro_commit=$pro_commit" echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT" echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit" echo "base_commit=$base_commit"
@ -204,7 +207,7 @@ jobs:
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}'; const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) { if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the develop branch.'); console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}"" branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md') console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
process.exit(1); process.exit(1);
} else { } else {

View File

@ -0,0 +1,29 @@
name: check_unreleased_changes
on:
pull_request:
branches:
- master
jobs:
check_unreleased:
runs-on: ubuntu-latest
steps:
- name: Check for unreleased changes
env:
REPO: "Budibase/budibase"
TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/releases/latest" | \
jq -r .published_at)
COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/commits/master" | \
jq -r .commit.committer.date)
RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s")
COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s")
if (( COMMIT_SECONDS > RELEASE_SECONDS )); then
echo "There are unreleased changes. Please release these changes before merging."
exit 1
fi
echo "No unreleased changes detected."

View File

@ -44,7 +44,7 @@ jobs:
- uses: actions/setup-node@v1 - uses: actions/setup-node@v1
with: with:
node-version: 14.x node-version: 18.x
- run: yarn install --frozen-lockfile - run: yarn install --frozen-lockfile
- name: Update versions - name: Update versions

View File

@ -60,9 +60,9 @@ jobs:
- name: "Get Current tag" - name: "Get Current tag"
id: currenttag id: currenttag
run: | run: |
version=v$(./scripts/getCurrentVersion.sh) version=$(./scripts/getCurrentVersion.sh)
echo 'Using tag $version' echo "Using tag $version"
echo "::set-output name=tag::$resversionult" echo "version=$version" >> "$GITHUB_OUTPUT"
- name: Build/release Docker images - name: Build/release Docker images
run: | run: |
@ -71,7 +71,7 @@ jobs:
env: env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }} DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }} DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.tag }} BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
release-helm-chart: release-helm-chart:
needs: [release-images] needs: [release-images]

View File

@ -1,4 +1,4 @@
name: release-singleimage name: Deploy Budibase Single Container Image to DockerHub
on: on:
workflow_dispatch: workflow_dispatch:
@ -8,13 +8,20 @@ env:
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com REGISTRY_URL: registry.hub.docker.com
jobs: jobs:
build-amd64: build:
name: "build-amd64" name: "build"
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
node-version: [14.x] node-version: [14.x]
steps: steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 35000
swap-size-mb: 1024
remove-android: 'true'
remove-dotnet: 'true'
- name: Fail if not a tag - name: Fail if not a tag
run: | run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then if [[ $GITHUB_REF != refs/tags/* ]]; then
@ -27,12 +34,14 @@ jobs:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0 fetch-depth: 0
- name: Fail if tag is not in master - name: Fail if tag is not in master
run: | run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch" echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1 exit 1
fi fi
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1 uses: actions/setup-node@v1
with: with:
@ -68,139 +77,9 @@ jobs:
with: with:
context: . context: .
push: true push: true
platforms: linux/amd64 platforms: linux/amd64,linux/arm64
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }} tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile file: ./hosting/single/Dockerfile
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-aas,budibase/budibase-aas:v${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile
build-arm64:
name: "build-arm64"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14.x]
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- name: "Checkout"
uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Setup QEMU
uses: docker/setup-qemu-action@v1
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Run Yarn
run: yarn
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Runt Yarn Lint
run: yarn lint
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Run Yarn Build
run: yarn build:docker:pre
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/arm64
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile
build-aas:
name: "build-aas"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14.x]
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- name: "Checkout"
uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Setup QEMU
uses: docker/setup-qemu-action@v1
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Run Yarn
run: yarn
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Runt Yarn Lint
run: yarn lint
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Run Yarn Build
run: yarn build:docker:pre
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase Azure App Service docker image - name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:

2
.nvmrc
View File

@ -1 +1 @@
v14.20.1 v18.17.0

View File

@ -1,3 +1,3 @@
nodejs 14.21.3 nodejs 18.17.0
python 3.10.0 python 3.10.0
yarn 1.22.19 yarn 1.22.19

View File

@ -120,6 +120,8 @@ spec:
{{ end }} {{ end }}
- name: MULTI_TENANCY - name: MULTI_TENANCY
value: {{ .Values.globals.multiTenancy | quote }} value: {{ .Values.globals.multiTenancy | quote }}
- name: OFFLINE_MODE
value: {{ .Values.globals.offlineMode | quote }}
- name: LOG_LEVEL - name: LOG_LEVEL
value: {{ .Values.services.apps.logLevel | quote }} value: {{ .Values.services.apps.logLevel | quote }}
- name: REDIS_PASSWORD - name: REDIS_PASSWORD

View File

@ -116,6 +116,8 @@ spec:
value: {{ .Values.services.worker.port | quote }} value: {{ .Values.services.worker.port | quote }}
- name: MULTI_TENANCY - name: MULTI_TENANCY
value: {{ .Values.globals.multiTenancy | quote }} value: {{ .Values.globals.multiTenancy | quote }}
- name: OFFLINE_MODE
value: {{ .Values.globals.offlineMode | quote }}
- name: LOG_LEVEL - name: LOG_LEVEL
value: {{ .Values.services.worker.logLevel | quote }} value: {{ .Values.services.worker.logLevel | quote }}
- name: REDIS_PASSWORD - name: REDIS_PASSWORD

View File

@ -82,6 +82,7 @@ globals:
posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU" posthogToken: "phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU"
selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup selfHosted: "1" # set to 0 for budibase cloud environment, set to 1 for self-hosted setup
multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs multiTenancy: "0" # set to 0 to disable multiple orgs, set to 1 to enable multiple orgs
offlineMode: "0" # set to 1 to enable offline mode
accountPortalUrl: "" accountPortalUrl: ""
accountPortalApiKey: "" accountPortalApiKey: ""
cookieDomain: "" cookieDomain: ""

View File

@ -90,7 +90,7 @@ Component libraries are collections of components as well as the definition of t
#### 1. Prerequisites #### 1. Prerequisites
- NodeJS version `14.x.x` - NodeJS version `18.x.x`
- Python version `3.x` - Python version `3.x`
### Using asdf (recommended) ### Using asdf (recommended)

View File

@ -1,4 +1,4 @@
FROM node:14-slim as build FROM node:18-slim as build
# install node-gyp dependencies # install node-gyp dependencies
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python

View File

@ -1,5 +1,5 @@
{ {
"version": "2.8.29-alpha.19", "version": "2.9.21-alpha.0",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -109,7 +109,7 @@
"@budibase/types": "0.0.0" "@budibase/types": "0.0.0"
}, },
"engines": { "engines": {
"node": ">=14.0.0 <15.0.0" "node": ">=18.0.0 <19.0.0"
}, },
"dependencies": {} "dependencies": {}
} }

View File

@ -0,0 +1,4 @@
*
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json

View File

@ -14,7 +14,7 @@
"scripts": { "scripts": {
"prebuild": "rimraf dist/", "prebuild": "rimraf dist/",
"prepack": "cp package.json dist", "prepack": "cp package.json dist",
"build": "tsc -p tsconfig.build.json", "build": "node ./scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null", "check:types": "tsc -p tsconfig.json --noEmit --paths null",
"test": "bash scripts/test.sh", "test": "bash scripts/test.sh",
@ -88,5 +88,20 @@
"ts-node": "10.8.1", "ts-node": "10.8.1",
"tsconfig-paths": "4.0.0", "tsconfig-paths": "4.0.0",
"typescript": "4.7.3" "typescript": "4.7.3"
},
"nx": {
"targets": {
"build": {
"dependsOn": [
{
"projects": [
"@budibase/shared-core",
"@budibase/types"
],
"target": "build"
}
]
}
}
} }
} }

View File

@ -1 +0,0 @@
export * from "./src/plugin"

View File

@ -0,0 +1,6 @@
#!/usr/bin/node
const coreBuild = require("../../../scripts/build")
coreBuild("./src/plugin/index.ts", "./dist/plugins.js")
coreBuild("./src/index.ts", "./dist/index.js")
coreBuild("./tests/index.ts", "./dist/tests.js")

View File

@ -80,7 +80,7 @@ function getRedisConfig() {
export function setupEnv(...envs: any[]) { export function setupEnv(...envs: any[]) {
const couch = getCouchConfig(), const couch = getCouchConfig(),
minio = getCouchConfig(), minio = getMinioConfig(),
redis = getRedisConfig() redis = getRedisConfig()
const configs = [ const configs = [
{ key: "COUCH_DB_PORT", value: couch.port }, { key: "COUCH_DB_PORT", value: couch.port },

View File

@ -12,7 +12,11 @@
"declaration": true, "declaration": true,
"types": ["node", "jest"], "types": ["node", "jest"],
"outDir": "dist", "outDir": "dist",
"skipLibCheck": true "skipLibCheck": true,
"paths": {
"@budibase/types": ["../types/src"],
"@budibase/shared-core": ["../shared-core/src"]
}
}, },
"include": ["**/*.js", "**/*.ts"], "include": ["**/*.js", "**/*.ts"],
"exclude": [ "exclude": [

View File

@ -1,12 +1,4 @@
{ {
"extends": "./tsconfig.build.json", "extends": "./tsconfig.build.json",
"compilerOptions": {
"composite": true,
"baseUrl": ".",
"paths": {
"@budibase/types": ["../types/src"],
"@budibase/shared-core": ["../shared-core/src"]
}
},
"exclude": ["node_modules", "dist"] "exclude": ["node_modules", "dist"]
} }

View File

@ -98,8 +98,7 @@
{ {
"projects": [ "projects": [
"@budibase/string-templates", "@budibase/string-templates",
"@budibase/shared-core", "@budibase/shared-core"
"@budibase/types"
], ],
"target": "build" "target": "build"
} }

View File

@ -133,9 +133,7 @@
"dependsOn": [ "dependsOn": [
{ {
"projects": [ "projects": [
"@budibase/shared-core", "@budibase/string-templates"
"@budibase/string-templates",
"@budibase/types"
], ],
"target": "build" "target": "build"
} }
@ -145,9 +143,7 @@
"dependsOn": [ "dependsOn": [
{ {
"projects": [ "projects": [
"@budibase/shared-core", "@budibase/string-templates"
"@budibase/string-templates",
"@budibase/types"
], ],
"target": "build" "target": "build"
} }
@ -157,9 +153,7 @@
"dependsOn": [ "dependsOn": [
{ {
"projects": [ "projects": [
"@budibase/shared-core", "@budibase/string-templates"
"@budibase/string-templates",
"@budibase/types"
], ],
"target": "build" "target": "build"
} }

View File

@ -2,14 +2,15 @@
import { Button, Layout } from "@budibase/bbui" import { Button, Layout } from "@budibase/bbui"
import DatasourceNavigator from "components/backend/DatasourceNavigator/DatasourceNavigator.svelte" import DatasourceNavigator from "components/backend/DatasourceNavigator/DatasourceNavigator.svelte"
import Panel from "components/design/Panel.svelte" import Panel from "components/design/Panel.svelte"
import { isActive, goto, redirect } from "@roxi/routify" import { isActive, redirect, goto, params } from "@roxi/routify"
import BetaButton from "./_components/BetaButton.svelte" import BetaButton from "./_components/BetaButton.svelte"
import { datasources } from "stores/backend" import { datasources } from "stores/backend"
$: { $: {
// If we ever don't have any data other than the users table, prompt the // If we ever don't have any data other than the users table, prompt the
// user to add some // user to add some
if (!$datasources.hasData) { // Don't redirect if setting up google sheets, or we lose the query parameter
if (!$datasources.hasData && !$params["?continue_google_setup"]) {
$redirect("./new") $redirect("./new")
} }
} }

View File

@ -127,6 +127,14 @@ export default defineConfig(({ mode }) => {
find: "helpers", find: "helpers",
replacement: path.resolve("./src/helpers"), replacement: path.resolve("./src/helpers"),
}, },
{
find: "@budibase/types",
replacement: path.resolve("../types/src"),
},
{
find: "@budibase/shared-core",
replacement: path.resolve("../shared-core/src"),
},
], ],
}, },
} }

View File

@ -2,16 +2,16 @@
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "0.0.0", "version": "0.0.0",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "dist/src/index.js", "main": "dist/index.js",
"bin": { "bin": {
"budi": "dist/src/index.js" "budi": "dist/index.js"
}, },
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"scripts": { "scripts": {
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r ../../node_modules/leveldown/prebuilds prebuilds", "prebuild": "rm -rf prebuilds 2> /dev/null && cp -r ../../node_modules/leveldown/prebuilds prebuilds",
"rename": "renamer --find .node --replace .fake 'prebuilds/**'", "rename": "renamer --find .node --replace .fake 'prebuilds/**'",
"tsc": "tsc -p tsconfig.build.json", "tsc": "node ../../scripts/build.js",
"pkg": "pkg . --out-path build --no-bytecode --public --public-packages \"*\" -C GZip", "pkg": "pkg . --out-path build --no-bytecode --public --public-packages \"*\" -C GZip",
"build": "yarn prebuild && yarn rename && yarn tsc && yarn pkg && yarn postbuild", "build": "yarn prebuild && yarn rename && yarn tsc && yarn pkg && yarn postbuild",
"check:types": "tsc -p tsconfig.json --noEmit --paths null", "check:types": "tsc -p tsconfig.json --noEmit --paths null",
@ -19,12 +19,11 @@
}, },
"pkg": { "pkg": {
"targets": [ "targets": [
"node16-linux", "node18-linux",
"node16-win", "node18-win",
"node16-macos" "node18-macos"
], ],
"assets": [ "assets": [
"node_modules/@budibase/backend-core/dist/**/*",
"prebuilds/**/*" "prebuilds/**/*"
], ],
"outputPath": "build" "outputPath": "build"
@ -64,20 +63,5 @@
"renamer": "^4.0.0", "renamer": "^4.0.0",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"typescript": "4.7.3" "typescript": "4.7.3"
},
"nx": {
"targets": {
"build": {
"dependsOn": [
{
"projects": [
"@budibase/backend-core",
"@budibase/string-templates"
],
"target": "build"
}
]
}
}
} }
} }

View File

@ -1,5 +1,6 @@
import util from "util" import util from "util"
const runCommand = util.promisify(require("child_process").exec) import childProcess from "child_process"
const runCommand = util.promisify(childProcess.exec)
export async function exec(command: string, dir = "./") { export async function exec(command: string, dir = "./") {
const { stdout } = await runCommand(command, { cwd: dir }) const { stdout } = await runCommand(command, { cwd: dir })
@ -16,12 +17,12 @@ export async function utilityInstalled(utilName: string) {
} }
export async function runPkgCommand(command: string, dir = "./") { export async function runPkgCommand(command: string, dir = "./") {
const yarn = await exports.utilityInstalled("yarn") const yarn = await utilityInstalled("yarn")
const npm = await exports.utilityInstalled("npm") const npm = await utilityInstalled("npm")
if (!yarn && !npm) { if (!yarn && !npm) {
throw new Error("Must have yarn or npm installed to run build.") throw new Error("Must have yarn or npm installed to run build.")
} }
const npmCmd = command === "install" ? `npm ${command}` : `npm run ${command}` const npmCmd = command === "install" ? `npm ${command}` : `npm run ${command}`
const cmd = yarn ? `yarn ${command} --ignore-engines` : npmCmd const cmd = yarn ? `yarn ${command} --ignore-engines` : npmCmd
await exports.exec(cmd, dir) await exec(cmd, dir)
} }

View File

@ -5,7 +5,7 @@ import { error } from "./utils"
const PREBUILDS = "prebuilds" const PREBUILDS = "prebuilds"
const ARCH = `${os.platform()}-${os.arch()}` const ARCH = `${os.platform()}-${os.arch()}`
const PREBUILD_DIR = join(process.execPath, "..", PREBUILDS, ARCH) const PREBUILD_DIR = join(process.execPath, "..", "cli", PREBUILDS, ARCH)
// running as built CLI pkg bundle // running as built CLI pkg bundle
if (!process.argv[0].includes("node")) { if (!process.argv[0].includes("node")) {
@ -13,17 +13,19 @@ if (!process.argv[0].includes("node")) {
} }
function checkForBinaries() { function checkForBinaries() {
const readDir = join(__filename, "..", "..", "..", PREBUILDS, ARCH) const readDir = join(__filename, "..", "..", "..", "cli", PREBUILDS, ARCH)
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) { if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
return return
} }
const natives = fs.readdirSync(readDir) const natives = fs.readdirSync(readDir)
if (fs.existsSync(readDir)) { if (fs.existsSync(readDir)) {
fs.mkdirSync(PREBUILD_DIR, { recursive: true }) const writePath = join(process.execPath, PREBUILDS, ARCH)
fs.mkdirSync(writePath, { recursive: true })
for (let native of natives) { for (let native of natives) {
const filename = `${native.split(".fake")[0]}.node` const filename = `${native.split(".fake")[0]}.node`
fs.cpSync(join(readDir, native), join(PREBUILD_DIR, filename)) fs.cpSync(join(readDir, native), join(writePath, filename))
} }
console.log("copied something")
} }
} }
@ -39,8 +41,9 @@ function cleanup(evt?: number) {
) )
console.error(error(evt)) console.error(error(evt))
} }
if (fs.existsSync(PREBUILD_DIR)) { const path = join(process.execPath, PREBUILDS)
fs.rmSync(PREBUILD_DIR, { recursive: true }) if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true })
} }
} }

View File

@ -10,7 +10,12 @@
"incremental": true, "incremental": true,
"types": [ "node", "jest" ], "types": [ "node", "jest" ],
"outDir": "dist", "outDir": "dist",
"skipLibCheck": true "skipLibCheck": true,
"paths": {
"@budibase/types": ["../types/src"],
"@budibase/backend-core": ["../backend-core/src"],
"@budibase/backend-core/*": ["../backend-core/*"]
}
}, },
"include": [ "include": [
"src/**/*" "src/**/*"

View File

@ -5,12 +5,7 @@
"declaration": true, "declaration": true,
"sourceMap": true, "sourceMap": true,
"baseUrl": ".", "baseUrl": ".",
"resolveJsonModule": true, "resolveJsonModule": true
"paths": {
"@budibase/types": ["../types/src"],
"@budibase/backend-core": ["../backend-core/src"],
"@budibase/backend-core/*": ["../backend-core/*"]
}
}, },
"ts-node": { "ts-node": {
"require": ["tsconfig-paths/register"], "require": ["tsconfig-paths/register"],

@ -1 +1 @@
Subproject commit cf3bef2aad9c739111b306fd0712397adc363f81 Subproject commit ecee8071ebe0f98a5bb19646954e373264be210d

View File

@ -1,4 +1,4 @@
FROM node:14-slim FROM node:18-slim
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh" LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh" LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"

View File

@ -5,8 +5,9 @@ if [[ -n $CI ]]
then then
# --runInBand performs better in ci where resources are limited # --runInBand performs better in ci where resources are limited
export NODE_OPTIONS="--max-old-space-size=4096" export NODE_OPTIONS="--max-old-space-size=4096"
echo "jest --coverage --runInBand --forceExit --bail" node ../../node_modules/jest/bin/jest.js --version
jest --coverage --runInBand --forceExit --bail echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail"
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail
else else
# --maxWorkers performs better in development # --maxWorkers performs better in development
echo "jest --coverage --maxWorkers=2 --forceExit" echo "jest --coverage --maxWorkers=2 --forceExit"

View File

@ -317,6 +317,11 @@ async function performAppCreate(ctx: UserCtx) {
} }
}) })
// Keep existing validation setting
if (!existing.features?.componentValidation) {
newApplication.features!.componentValidation = false
}
// Migrate navigation settings and screens if required // Migrate navigation settings and screens if required
if (existing) { if (existing) {
const navigation = await migrateAppNavigation() const navigation = await migrateAppNavigation()

View File

@ -44,7 +44,7 @@ export async function handleRequest(
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) { export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const { id, ...rowData } = ctx.request.body const { _id, ...rowData } = ctx.request.body
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row: rowData, row: rowData,
@ -54,10 +54,10 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
throw { validation: validateResult.errors } throw { validation: validateResult.errors }
} }
const response = await handleRequest(Operation.UPDATE, tableId, { const response = await handleRequest(Operation.UPDATE, tableId, {
id: breakRowIdField(id), id: breakRowIdField(_id),
row: rowData, row: rowData,
}) })
const row = await sdk.rows.external.getRow(tableId, id, { const row = await sdk.rows.external.getRow(tableId, _id, {
relationships: true, relationships: true,
}) })
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
@ -104,9 +104,9 @@ export async function find(ctx: UserCtx) {
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const id = ctx.request.body._id const _id = ctx.request.body._id
const { row } = (await handleRequest(Operation.DELETE, tableId, { const { row } = (await handleRequest(Operation.DELETE, tableId, {
id: breakRowIdField(id), id: breakRowIdField(_id),
includeSqlRelationships: IncludeRelationship.EXCLUDE, includeSqlRelationships: IncludeRelationship.EXCLUDE,
})) as { row: Row } })) as { row: Row }
return { response: { ok: true }, row } return { response: { ok: true }, row }

View File

@ -11,6 +11,9 @@ import {
Row, Row,
PatchRowRequest, PatchRowRequest,
PatchRowResponse, PatchRowResponse,
SearchRowResponse,
SearchRowRequest,
SearchParams,
} from "@budibase/types" } from "@budibase/types"
import * as utils from "./utils" import * as utils from "./utils"
import { gridSocket } from "../../../websockets" import { gridSocket } from "../../../websockets"
@ -197,10 +200,10 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
ctx.body = response ctx.body = response
} }
export async function search(ctx: any) { export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
const searchParams = { const searchParams: SearchParams = {
...ctx.request.body, ...ctx.request.body,
tableId, tableId,
} }

View File

@ -1,14 +1,18 @@
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { import {
UserCtx, UserCtx,
SearchResponse,
SortOrder,
SortType,
ViewV2, ViewV2,
SearchRowResponse,
SearchViewRowRequest,
RequiredKeys,
SearchParams,
} from "@budibase/types" } from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export async function searchView(ctx: UserCtx<void, SearchResponse>) { export async function searchView(
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
) {
const { viewId } = ctx.params const { viewId } = ctx.params
const view = await sdk.views.get(viewId) const view = await sdk.views.get(viewId)
@ -29,49 +33,35 @@ export async function searchView(ctx: UserCtx<void, SearchResponse>) {
undefined undefined
ctx.status = 200 ctx.status = 200
const result = await quotas.addQuery(
() => const { body } = ctx.request
sdk.rows.search({ const query = dataFilters.buildLuceneQuery(view.query || [])
tableId: view.tableId,
query: view.query || {}, const searchOptions: RequiredKeys<SearchViewRowRequest> &
fields: viewFields, RequiredKeys<Pick<SearchParams, "tableId" | "query" | "fields">> = {
...getSortOptions(ctx, view), tableId: view.tableId,
}), query,
{ fields: viewFields,
datasourceId: view.tableId, ...getSortOptions(body, view),
} limit: body.limit,
) bookmark: body.bookmark,
paginate: body.paginate,
}
const result = await quotas.addQuery(() => sdk.rows.search(searchOptions), {
datasourceId: view.tableId,
})
result.rows.forEach(r => (r._viewId = view.id)) result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result ctx.body = result
} }
function getSortOptions( function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
ctx: UserCtx, if (request.sort) {
view: ViewV2
):
| {
sort: string
sortOrder?: SortOrder
sortType?: SortType
}
| undefined {
const { sort_column, sort_order, sort_type } = ctx.query
if (Array.isArray(sort_column)) {
ctx.throw(400, "sort_column cannot be an array")
}
if (Array.isArray(sort_order)) {
ctx.throw(400, "sort_order cannot be an array")
}
if (Array.isArray(sort_type)) {
ctx.throw(400, "sort_type cannot be an array")
}
if (sort_column) {
return { return {
sort: sort_column, sort: request.sort,
sortOrder: sort_order as SortOrder, sortOrder: request.sortOrder,
sortType: sort_type as SortType, sortType: request.sortType,
} }
} }
if (view.sort) { if (view.sort) {
@ -82,5 +72,9 @@ function getSortOptions(
} }
} }
return return {
sort: undefined,
sortOrder: undefined,
sortType: undefined,
}
} }

View File

@ -269,7 +269,7 @@ router
) )
router router
.get( .post(
"/api/v2/views/:viewId/search", "/api/v2/views/:viewId/search",
authorized(PermissionType.VIEW, PermissionLevel.READ), authorized(PermissionType.VIEW, PermissionLevel.READ),
rowController.views.searchView rowController.views.searchView

View File

@ -813,252 +813,6 @@ describe("/rows", () => {
}) })
}) })
describe("view search", () => {
function userTable(): Table {
return {
name: "user",
type: "user",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: { type: "string" },
},
age: {
type: FieldType.NUMBER,
name: "age",
constraints: {},
},
},
}
}
it("returns table rows from view", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(10)
expect(response.body).toEqual({
rows: expect.arrayContaining(rows.map(expect.objectContaining)),
})
})
it("searching respects the view filters", async () => {
const table = await config.createTable(userTable())
const expectedRows = []
for (let i = 0; i < 10; i++)
await config.createRow({
tableId: table._id,
name: generator.name(),
age: generator.integer({ min: 10, max: 30 }),
})
for (let i = 0; i < 5; i++)
expectedRows.push(
await config.createRow({
tableId: table._id,
name: generator.name(),
age: 40,
})
)
const createViewResponse = await config.api.viewV2.create({
query: { equal: { age: 40 } },
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(5)
expect(response.body).toEqual({
rows: expect.arrayContaining(expectedRows.map(expect.objectContaining)),
})
})
const sortTestOptions: [
{
field: string
order?: SortOrder
type?: SortType
},
string[]
][] = [
[
{
field: "name",
order: SortOrder.ASCENDING,
type: SortType.STRING,
},
["Alice", "Bob", "Charly", "Danny"],
],
[
{
field: "name",
},
["Alice", "Bob", "Charly", "Danny"],
],
[
{
field: "name",
order: SortOrder.DESCENDING,
},
["Danny", "Charly", "Bob", "Alice"],
],
[
{
field: "name",
order: SortOrder.DESCENDING,
type: SortType.STRING,
},
["Danny", "Charly", "Bob", "Alice"],
],
[
{
field: "age",
order: SortOrder.ASCENDING,
type: SortType.number,
},
["Danny", "Alice", "Charly", "Bob"],
],
[
{
field: "age",
order: SortOrder.ASCENDING,
},
["Danny", "Alice", "Charly", "Bob"],
],
[
{
field: "age",
order: SortOrder.DESCENDING,
},
["Bob", "Charly", "Alice", "Danny"],
],
[
{
field: "age",
order: SortOrder.DESCENDING,
type: SortType.number,
},
["Bob", "Charly", "Alice", "Danny"],
],
]
it.each(sortTestOptions)(
"allow sorting (%s)",
async (sortParams, expected) => {
await config.createTable(userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
{ name: "Charly", age: 27 },
{ name: "Danny", age: 15 },
]
for (const user of users) {
await config.createRow({
tableId: config.table!._id,
...user,
})
}
const createViewResponse = await config.api.viewV2.create({
sort: sortParams,
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
rows: expected.map(name => expect.objectContaining({ name })),
})
}
)
it.each(sortTestOptions)(
"allow override the default view sorting (%s)",
async (sortParams, expected) => {
await config.createTable(userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
{ name: "Charly", age: 27 },
{ name: "Danny", age: 15 },
]
for (const user of users) {
await config.createRow({
tableId: config.table!._id,
...user,
})
}
const createViewResponse = await config.api.viewV2.create({
sort: {
field: "name",
order: SortOrder.ASCENDING,
type: SortType.STRING,
},
})
const response = await config.api.viewV2.search(createViewResponse.id, {
sort: {
column: sortParams.field,
order: sortParams.order,
type: sortParams.type,
},
})
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
rows: expected.map(name => expect.objectContaining({ name })),
})
}
)
it("when schema is defined, defined columns and row attributes are returned", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.name(),
age: generator.age(),
})
)
}
const view = await config.api.viewV2.create({
schema: { name: {} },
})
const response = await config.api.viewV2.search(view.id)
expect(response.body.rows).toHaveLength(10)
expect(response.body.rows).toEqual(
expect.arrayContaining(
rows.map(r => ({
...expectAnyInternalColsAttributes,
_viewId: view.id,
name: r.name,
}))
)
)
})
it("views without data can be returned", async () => {
const table = await config.createTable(userTable())
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(0)
})
})
describe("view 2.0", () => { describe("view 2.0", () => {
function userTable(): Table { function userTable(): Table {
return { return {
@ -1261,5 +1015,327 @@ describe("/rows", () => {
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 })
}) })
}) })
describe("view search", () => {
function userTable(): Table {
return {
name: "user",
type: "user",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: { type: "string" },
},
age: {
type: FieldType.NUMBER,
name: "age",
constraints: {},
},
},
}
}
it("returns table rows from view", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(10)
expect(response.body).toEqual({
rows: expect.arrayContaining(rows.map(expect.objectContaining)),
})
})
it("searching respects the view filters", async () => {
const table = await config.createTable(userTable())
const expectedRows = []
for (let i = 0; i < 10; i++)
await config.createRow({
tableId: table._id,
name: generator.name(),
age: generator.integer({ min: 10, max: 30 }),
})
for (let i = 0; i < 5; i++)
expectedRows.push(
await config.createRow({
tableId: table._id,
name: generator.name(),
age: 40,
})
)
const createViewResponse = await config.api.viewV2.create({
query: [{ operator: "equal", field: "age", value: 40 }],
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(5)
expect(response.body).toEqual({
rows: expect.arrayContaining(
expectedRows.map(expect.objectContaining)
),
})
})
const sortTestOptions: [
{
field: string
order?: SortOrder
type?: SortType
},
string[]
][] = [
[
{
field: "name",
order: SortOrder.ASCENDING,
type: SortType.STRING,
},
["Alice", "Bob", "Charly", "Danny"],
],
[
{
field: "name",
},
["Alice", "Bob", "Charly", "Danny"],
],
[
{
field: "name",
order: SortOrder.DESCENDING,
},
["Danny", "Charly", "Bob", "Alice"],
],
[
{
field: "name",
order: SortOrder.DESCENDING,
type: SortType.STRING,
},
["Danny", "Charly", "Bob", "Alice"],
],
[
{
field: "age",
order: SortOrder.ASCENDING,
type: SortType.number,
},
["Danny", "Alice", "Charly", "Bob"],
],
[
{
field: "age",
order: SortOrder.ASCENDING,
},
["Danny", "Alice", "Charly", "Bob"],
],
[
{
field: "age",
order: SortOrder.DESCENDING,
},
["Bob", "Charly", "Alice", "Danny"],
],
[
{
field: "age",
order: SortOrder.DESCENDING,
type: SortType.number,
},
["Bob", "Charly", "Alice", "Danny"],
],
]
it.each(sortTestOptions)(
"allow sorting (%s)",
async (sortParams, expected) => {
await config.createTable(userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
{ name: "Charly", age: 27 },
{ name: "Danny", age: 15 },
]
for (const user of users) {
await config.createRow({
tableId: config.table!._id,
...user,
})
}
const createViewResponse = await config.api.viewV2.create({
sort: sortParams,
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
rows: expected.map(name => expect.objectContaining({ name })),
})
}
)
it.each(sortTestOptions)(
"allow override the default view sorting (%s)",
async (sortParams, expected) => {
await config.createTable(userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
{ name: "Charly", age: 27 },
{ name: "Danny", age: 15 },
]
for (const user of users) {
await config.createRow({
tableId: config.table!._id,
...user,
})
}
const createViewResponse = await config.api.viewV2.create({
sort: {
field: "name",
order: SortOrder.ASCENDING,
type: SortType.STRING,
},
})
const response = await config.api.viewV2.search(
createViewResponse.id,
{
sort: sortParams.field,
sortOrder: sortParams.order,
sortType: sortParams.type,
}
)
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
rows: expected.map(name => expect.objectContaining({ name })),
})
}
)
it("when schema is defined, defined columns and row attributes are returned", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.name(),
age: generator.age(),
})
)
}
const view = await config.api.viewV2.create({
schema: { name: {} },
})
const response = await config.api.viewV2.search(view.id)
expect(response.body.rows).toHaveLength(10)
expect(response.body.rows).toEqual(
expect.arrayContaining(
rows.map(r => ({
...expectAnyInternalColsAttributes,
_viewId: view.id,
name: r.name,
}))
)
)
})
it("views without data can be returned", async () => {
const table = await config.createTable(userTable())
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(0)
})
it("respects the limit parameter", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
const limit = generator.integer({ min: 1, max: 8 })
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id, {
limit,
})
expect(response.body.rows).toHaveLength(limit)
})
it("can handle pagination", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
// rows.sort((a, b) => (a._id! > b._id! ? 1 : -1))
const createViewResponse = await config.api.viewV2.create()
const allRows = (await config.api.viewV2.search(createViewResponse.id))
.body.rows
const firstPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
}
)
expect(firstPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(0, 4)),
totalRows: 10,
hasNextPage: true,
bookmark: expect.any(String),
})
const secondPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
bookmark: firstPageResponse.body.bookmark,
}
)
expect(secondPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(4, 8)),
totalRows: 10,
hasNextPage: true,
bookmark: expect.any(String),
})
const lastPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
bookmark: secondPageResponse.body.bookmark,
}
)
expect(lastPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(8)),
totalRows: 10,
hasNextPage: false,
bookmark: expect.any(String),
})
})
})
}) })
}) })

View File

@ -62,7 +62,7 @@ describe("/v2/views", () => {
name: generator.name(), name: generator.name(),
tableId: config.table!._id!, tableId: config.table!._id!,
primaryDisplay: generator.word(), primaryDisplay: generator.word(),
query: { allOr: false, equal: { field: "value" } }, query: [{ operator: "equal", field: "field", value: "value" }],
sort: { sort: {
field: "fieldToSort", field: "fieldToSort",
order: SortOrder.DESCENDING, order: SortOrder.DESCENDING,
@ -190,7 +190,7 @@ describe("/v2/views", () => {
const tableId = config.table!._id! const tableId = config.table!._id!
await config.api.viewV2.update({ await config.api.viewV2.update({
...view, ...view,
query: { equal: { newField: "thatValue" } }, query: [{ operator: "equal", field: "newField", value: "thatValue" }],
}) })
expect(await config.api.table.get(tableId)).toEqual({ expect(await config.api.table.get(tableId)).toEqual({
@ -198,7 +198,9 @@ describe("/v2/views", () => {
views: { views: {
[view.name]: { [view.name]: {
...view, ...view,
query: { equal: { newField: "thatValue" } }, query: [
{ operator: "equal", field: "newField", value: "thatValue" },
],
schema: expect.anything(), schema: expect.anything(),
}, },
}, },
@ -216,7 +218,13 @@ describe("/v2/views", () => {
tableId, tableId,
name: view.name, name: view.name,
primaryDisplay: generator.word(), primaryDisplay: generator.word(),
query: { equal: { [generator.word()]: generator.word() } }, query: [
{
operator: "equal",
field: generator.word(),
value: generator.word(),
},
],
sort: { sort: {
field: generator.word(), field: generator.word(),
order: SortOrder.DESCENDING, order: SortOrder.DESCENDING,
@ -285,7 +293,7 @@ describe("/v2/views", () => {
{ {
...view, ...view,
tableId: generator.guid(), tableId: generator.guid(),
query: { equal: { newField: "thatValue" } }, query: [{ operator: "equal", field: "newField", value: "thatValue" }],
}, },
{ expectStatus: 404 } { expectStatus: 404 }
) )

View File

@ -1,23 +1,9 @@
import { SearchFilters, SortOrder, SortType } from "@budibase/types" import { SearchFilters, SearchParams } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTable } from "../../../integrations/utils"
import * as internal from "./search/internal" import * as internal from "./search/internal"
import * as external from "./search/external" import * as external from "./search/external"
import { Format } from "../../../api/controllers/view/exporters" import { Format } from "../../../api/controllers/view/exporters"
export interface SearchParams {
tableId: string
paginate?: boolean
query: SearchFilters
bookmark?: string
limit?: number
sort?: string
sortOrder?: SortOrder
sortType?: SortType
version?: string
disableEscaping?: boolean
fields?: string[]
}
export interface ViewParams { export interface ViewParams {
calculation: string calculation: string
group: string group: string

View File

@ -6,6 +6,7 @@ import {
IncludeRelationship, IncludeRelationship,
Row, Row,
SearchFilters, SearchFilters,
SearchParams,
} from "@budibase/types" } from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
@ -13,7 +14,7 @@ import { handleRequest } from "../../../../api/controllers/row/external"
import { breakExternalTableId } from "../../../../integrations/utils" import { breakExternalTableId } from "../../../../integrations/utils"
import { cleanExportRows } from "../utils" import { cleanExportRows } from "../utils"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search" import { ExportRowsParams, ExportRowsResult } from "../search"
import { HTTPError, db } from "@budibase/backend-core" import { HTTPError, db } from "@budibase/backend-core"
import pick from "lodash/pick" import pick from "lodash/pick"

View File

@ -12,7 +12,7 @@ import {
} from "../../../../db/utils" } from "../../../../db/utils"
import { getGlobalUsersFromMetadata } from "../../../../utilities/global" import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import { Database, Row, Table } from "@budibase/types" import { Database, Row, Table, SearchParams } from "@budibase/types"
import { cleanExportRows } from "../utils" import { cleanExportRows } from "../utils"
import { import {
Format, Format,
@ -28,7 +28,7 @@ import {
getFromMemoryDoc, getFromMemoryDoc,
} from "../../../../api/controllers/view/utils" } from "../../../../api/controllers/view/utils"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search" import { ExportRowsParams, ExportRowsResult } from "../search"
import pick from "lodash/pick" import pick from "lodash/pick"
export async function search(options: SearchParams) { export async function search(options: SearchParams) {

View File

@ -1,8 +1,15 @@
import { GenericContainer } from "testcontainers" import { GenericContainer } from "testcontainers"
import { Datasource, FieldType, Row, SourceName, Table } from "@budibase/types" import {
Datasource,
FieldType,
Row,
SourceName,
Table,
SearchParams,
} from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { SearchParams } from "../../search"
import { search } from "../external" import { search } from "../external"
import { import {
expectAnyExternalColsAttributes, expectAnyExternalColsAttributes,

View File

@ -1,6 +1,5 @@
import { FieldType, Row, Table } from "@budibase/types" import { FieldType, Row, Table, SearchParams } from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { SearchParams } from "../../search"
import { search } from "../internal" import { search } from "../internal"
import { import {
expectAnyInternalColsAttributes, expectAnyInternalColsAttributes,

View File

@ -1,13 +1,12 @@
import { import {
CreateViewRequest, CreateViewRequest,
SortOrder,
SortType,
UpdateViewRequest, UpdateViewRequest,
DeleteRowRequest, DeleteRowRequest,
PatchRowRequest, PatchRowRequest,
PatchRowResponse, PatchRowResponse,
Row, Row,
ViewV2, ViewV2,
SearchViewRowRequest,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base" import { TestAPI } from "./base"
@ -81,31 +80,12 @@ export class ViewV2API extends TestAPI {
search = async ( search = async (
viewId: string, viewId: string,
options?: { params?: SearchViewRowRequest,
sort: {
column: string
order?: SortOrder
type?: SortType
}
},
{ expectStatus } = { expectStatus: 200 } { expectStatus } = { expectStatus: 200 }
) => { ) => {
const qs: [string, any][] = []
if (options?.sort.column) {
qs.push(["sort_column", options.sort.column])
}
if (options?.sort.order) {
qs.push(["sort_order", options.sort.order])
}
if (options?.sort.type) {
qs.push(["sort_type", options.sort.type])
}
let url = `/api/v2/views/${viewId}/search`
if (qs.length) {
url += "?" + qs.map(q => q.join("=")).join("&")
}
return this.request return this.request
.get(url) .post(`/api/v2/views/${viewId}/search`)
.send(params)
.set(this.config.defaultHeaders()) .set(this.config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(expectStatus) .expect(expectStatus)

View File

@ -486,10 +486,13 @@ class Orchestrator {
const end = performance.now() const end = performance.now()
const executionTime = end - start const executionTime = end - start
console.info(`Execution time: ${executionTime} milliseconds`, { console.info(
_logKey: "automation", `Automation ID: ${automation._id} Execution time: ${executionTime} milliseconds`,
executionTime, {
}) _logKey: "automation",
executionTime,
}
)
// store the logs for the automation run // store the logs for the automation run
try { try {

View File

@ -0,0 +1,4 @@
*
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json

View File

@ -2,19 +2,13 @@
"name": "@budibase/shared-core", "name": "@budibase/shared-core",
"version": "0.0.0", "version": "0.0.0",
"description": "Shared data utils", "description": "Shared data utils",
"main": "src/index.ts", "main": "dist/index.js",
"types": "src/index.ts", "types": "dist/index.d.ts",
"exports": {
".": {
"import": "./dist/index.js",
"require": "./src/index.ts"
}
},
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"scripts": { "scripts": {
"prebuild": "rimraf dist/", "prebuild": "rimraf dist/",
"build": "tsc -p tsconfig.build.json", "build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput", "dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null" "check:types": "tsc -p tsconfig.json --noEmit --paths null"
@ -26,5 +20,19 @@
"concurrently": "^7.6.0", "concurrently": "^7.6.0",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"typescript": "4.7.3" "typescript": "4.7.3"
},
"nx": {
"targets": {
"build": {
"dependsOn": [
{
"projects": [
"@budibase/types"
],
"target": "build"
}
]
}
}
} }
} }

View File

@ -1,4 +1,12 @@
import { Datasource, FieldType, SortDirection, SortType } from "@budibase/types" import {
Datasource,
FieldType,
SearchFilter,
SearchQuery,
SearchQueryFields,
SortDirection,
SortType,
} from "@budibase/types"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
import { deepGet } from "./helpers" import { deepGet } from "./helpers"
@ -73,13 +81,13 @@ export const NoEmptyFilterStrings = [
OperatorOptions.NotEquals.value, OperatorOptions.NotEquals.value,
OperatorOptions.Contains.value, OperatorOptions.Contains.value,
OperatorOptions.NotContains.value, OperatorOptions.NotContains.value,
] as (keyof QueryFields)[] ] as (keyof SearchQueryFields)[]
/** /**
* Removes any fields that contain empty strings that would cause inconsistent * Removes any fields that contain empty strings that would cause inconsistent
* behaviour with how backend tables are filtered (no value means no filter). * behaviour with how backend tables are filtered (no value means no filter).
*/ */
const cleanupQuery = (query: Query) => { const cleanupQuery = (query: SearchQuery) => {
if (!query) { if (!query) {
return query return query
} }
@ -110,66 +118,12 @@ const removeKeyNumbering = (key: string) => {
} }
} }
type Filter = {
operator: keyof Query
field: string
type: any
value: any
externalType: keyof typeof SqlNumberTypeRangeMap
}
type Query = QueryFields & QueryConfig
type QueryFields = {
string?: {
[key: string]: string
}
fuzzy?: {
[key: string]: string
}
range?: {
[key: string]: {
high: number | string
low: number | string
}
}
equal?: {
[key: string]: any
}
notEqual?: {
[key: string]: any
}
empty?: {
[key: string]: any
}
notEmpty?: {
[key: string]: any
}
oneOf?: {
[key: string]: any[]
}
contains?: {
[key: string]: any[]
}
notContains?: {
[key: string]: any[]
}
containsAny?: {
[key: string]: any[]
}
}
type QueryConfig = {
allOr?: boolean
}
type QueryFieldsType = keyof QueryFields
/** /**
* Builds a lucene JSON query from the filter structure generated in the builder * Builds a lucene JSON query from the filter structure generated in the builder
* @param filter the builder filter structure * @param filter the builder filter structure
*/ */
export const buildLuceneQuery = (filter: Filter[]) => { export const buildLuceneQuery = (filter: SearchFilter[]) => {
let query: Query = { let query: SearchQuery = {
string: {}, string: {},
fuzzy: {}, fuzzy: {},
range: {}, range: {},
@ -227,9 +181,13 @@ export const buildLuceneQuery = (filter: Filter[]) => {
} }
if (operator.startsWith("range") && query.range) { if (operator.startsWith("range") && query.range) {
const minint = const minint =
SqlNumberTypeRangeMap[externalType]?.min || Number.MIN_SAFE_INTEGER SqlNumberTypeRangeMap[
externalType as keyof typeof SqlNumberTypeRangeMap
]?.min || Number.MIN_SAFE_INTEGER
const maxint = const maxint =
SqlNumberTypeRangeMap[externalType]?.max || Number.MAX_SAFE_INTEGER SqlNumberTypeRangeMap[
externalType as keyof typeof SqlNumberTypeRangeMap
]?.max || Number.MAX_SAFE_INTEGER
if (!query.range[field]) { if (!query.range[field]) {
query.range[field] = { query.range[field] = {
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z", low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
@ -275,7 +233,7 @@ export const buildLuceneQuery = (filter: Filter[]) => {
* @param docs the data * @param docs the data
* @param query the JSON lucene query * @param query the JSON lucene query
*/ */
export const runLuceneQuery = (docs: any[], query?: Query) => { export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
if (!docs || !Array.isArray(docs)) { if (!docs || !Array.isArray(docs)) {
return [] return []
} }
@ -289,7 +247,7 @@ export const runLuceneQuery = (docs: any[], query?: Query) => {
// Iterates over a set of filters and evaluates a fail function against a doc // Iterates over a set of filters and evaluates a fail function against a doc
const match = const match =
( (
type: QueryFieldsType, type: keyof SearchQueryFields,
failFn: (docValue: any, testValue: any) => boolean failFn: (docValue: any, testValue: any) => boolean
) => ) =>
(doc: any) => { (doc: any) => {
@ -456,7 +414,7 @@ export const luceneLimit = (docs: any[], limit: string) => {
return docs.slice(0, numLimit) return docs.slice(0, numLimit)
} }
export const hasFilters = (query?: Query) => { export const hasFilters = (query?: SearchQuery) => {
if (!query) { if (!query) {
return false return false
} }

View File

@ -12,7 +12,10 @@
"declaration": true, "declaration": true,
"types": ["node"], "types": ["node"],
"outDir": "dist", "outDir": "dist",
"skipLibCheck": true "skipLibCheck": true,
"paths": {
"@budibase/types": ["../types/src"]
}
}, },
"include": ["**/*.js", "**/*.ts"], "include": ["**/*.js", "**/*.ts"],
"exclude": [ "exclude": [

View File

@ -1,13 +1,4 @@
{ {
"extends": "./tsconfig.build.json", "extends": "./tsconfig.build.json",
"compilerOptions": {
"baseUrl": ".",
"rootDir": "./src",
"composite": true,
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo",
"paths": {
"@budibase/types": ["../../types/src"]
}
},
"exclude": ["node_modules", "dist"] "exclude": ["node_modules", "dist"]
} }

View File

@ -0,0 +1,4 @@
*
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json

View File

@ -2,19 +2,13 @@
"name": "@budibase/types", "name": "@budibase/types",
"version": "0.0.0", "version": "0.0.0",
"description": "Budibase types", "description": "Budibase types",
"main": "src/index.ts", "main": "dist/index.js",
"types": "src/index.ts", "types": "dist/index.d.ts",
"exports": {
".": {
"import": "./dist/index.js",
"require": "./src/index.ts"
}
},
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"scripts": { "scripts": {
"prebuild": "rimraf dist/", "prebuild": "rimraf dist/",
"build": "tsc -p tsconfig.build.json", "build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput", "dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null" "check:types": "tsc -p tsconfig.json --noEmit --paths null"

View File

@ -1,3 +1,4 @@
import { SearchParams } from "../../../sdk"
import { Row } from "../../../documents" import { Row } from "../../../documents"
export interface PatchRowRequest extends Row { export interface PatchRowRequest extends Row {
@ -8,6 +9,14 @@ export interface PatchRowRequest extends Row {
export interface PatchRowResponse extends Row {} export interface PatchRowResponse extends Row {}
export interface SearchResponse { export interface SearchRowRequest extends Omit<SearchParams, "tableId"> {}
export interface SearchViewRowRequest
extends Pick<
SearchRowRequest,
"sort" | "sortOrder" | "sortType" | "limit" | "bookmark" | "paginate"
> {}
export interface SearchRowResponse {
rows: any[] rows: any[]
} }

View File

@ -8,3 +8,4 @@ export * from "./system"
export * from "./app" export * from "./app"
export * from "./global" export * from "./global"
export * from "./pagination" export * from "./pagination"
export * from "./searchFilter"

View File

@ -0,0 +1,51 @@
import { FieldType } from "../../documents"
export type SearchFilter = {
operator: keyof SearchQuery
field: string
type?: FieldType
value: any
externalType?: string
}
export type SearchQuery = {
allOr?: boolean
string?: {
[key: string]: string
}
fuzzy?: {
[key: string]: string
}
range?: {
[key: string]: {
high: number | string
low: number | string
}
}
equal?: {
[key: string]: any
}
notEqual?: {
[key: string]: any
}
empty?: {
[key: string]: any
}
notEmpty?: {
[key: string]: any
}
oneOf?: {
[key: string]: any[]
}
contains?: {
[key: string]: any[]
}
notContains?: {
[key: string]: any[]
}
containsAny?: {
[key: string]: any[]
}
}
export type SearchQueryFields = Omit<SearchQuery, "allOr">

View File

@ -1,6 +1,5 @@
import { SortOrder, SortType } from "../../api" import { SearchFilter, SortOrder, SortType } from "../../api"
import { SearchFilters } from "../../sdk" import { UIFieldMetadata } from "./table"
import { TableSchema, UIFieldMetadata } from "./table"
export interface View { export interface View {
name: string name: string
@ -20,7 +19,7 @@ export interface ViewV2 {
name: string name: string
primaryDisplay?: string primaryDisplay?: string
tableId: string tableId: string
query?: SearchFilters query?: SearchFilter[]
sort?: { sort?: {
field: string field: string
order?: SortOrder order?: SortOrder

View File

@ -19,3 +19,4 @@ export * from "./user"
export * from "./cli" export * from "./cli"
export * from "./websocket" export * from "./websocket"
export * from "./permissions" export * from "./permissions"
export * from "./row"

View File

@ -0,0 +1,16 @@
import { SortOrder, SortType } from "../api"
import { SearchFilters } from "./search"
export interface SearchParams {
tableId: string
paginate?: boolean
query: SearchFilters
bookmark?: string
limit?: number
sort?: string
sortOrder?: SortOrder
sortType?: SortType
version?: string
disableEscaping?: boolean
fields?: string[]
}

View File

@ -1,4 +1,4 @@
FROM node:14-alpine FROM node:18-alpine
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh" LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh" LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"

View File

@ -1,12 +1,14 @@
import { auth } from "@budibase/backend-core" import { auth } from "@budibase/backend-core"
import Joi from "joi" import Joi from "joi"
const OPTIONAL_STRING = Joi.string().allow(null, "")
let schema: any = { let schema: any = {
email: Joi.string().allow(null, ""), email: OPTIONAL_STRING,
password: Joi.string().allow(null, ""), password: OPTIONAL_STRING,
forceResetPassword: Joi.boolean().optional(), forceResetPassword: Joi.boolean().optional(),
firstName: Joi.string().allow(null, ""), firstName: OPTIONAL_STRING,
lastName: Joi.string().allow(null, ""), lastName: OPTIONAL_STRING,
builder: Joi.object({ builder: Joi.object({
global: Joi.boolean().optional(), global: Joi.boolean().optional(),
apps: Joi.array().optional(), apps: Joi.array().optional(),
@ -21,8 +23,8 @@ export const buildSelfSaveValidation = () => {
schema = { schema = {
password: Joi.string().optional(), password: Joi.string().optional(),
forceResetPassword: Joi.boolean().optional(), forceResetPassword: Joi.boolean().optional(),
firstName: Joi.string().allow("").optional(), firstName: OPTIONAL_STRING,
lastName: Joi.string().allow("").optional(), lastName: OPTIONAL_STRING,
onboardedAt: Joi.string().optional(), onboardedAt: Joi.string().optional(),
} }
return auth.joiValidator.body(Joi.object(schema).required().unknown(false)) return auth.joiValidator.body(Joi.object(schema).required().unknown(false))

View File

@ -43,7 +43,6 @@ function runBuild(entry, outfile) {
TsconfigPathsPlugin({ tsconfig: tsconfigPathPluginContent }), TsconfigPathsPlugin({ tsconfig: tsconfigPathPluginContent }),
nodeExternalsPlugin(), nodeExternalsPlugin(),
], ],
target: "node14",
preserveSymlinks: true, preserveSymlinks: true,
loader: { loader: {
".svelte": "copy", ".svelte": "copy",