Merge remote-tracking branch 'origin/develop' into fix/new-column-popover-fixes

This commit is contained in:
Peter Clement 2023-08-18 09:34:27 +01:00
commit 11069bacfb
78 changed files with 1106 additions and 1353 deletions

View File

@ -18,8 +18,6 @@ env:
BRANCH: ${{ github.event.pull_request.head.ref }} BRANCH: ${{ github.event.pull_request.head.ref }}
BASE_BRANCH: ${{ github.event.pull_request.base.ref}} BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
jobs: jobs:
lint: lint:
@ -27,20 +25,20 @@ jobs:
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' if: github.repository == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' if: github.repository != 'Budibase/budibase'
- name: Use Node.js 18.x - name: Use Node.js 14.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 14.x
cache: "yarn" cache: "yarn"
- run: yarn --frozen-lockfile - run: yarn
- run: yarn lint - run: yarn lint
build: build:
@ -48,66 +46,45 @@ jobs:
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' if: github.repository == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' if: github.repository != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x - name: Use Node.js 14.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 14.x
cache: "yarn" cache: "yarn"
- run: yarn --frozen-lockfile - run: yarn
# Run build all the projects # Run build all the projects
- name: Build - run: yarn build
run: |
yarn build
# Check the types of the projects built via esbuild # Check the types of the projects built via esbuild
- name: Check types - run: yarn check:types
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn check:types --since=${{ env.NX_BASE_BRANCH }}
else
yarn check:types
fi
test-libraries: test-libraries:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' if: github.repository == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' if: github.repository != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x - name: Use Node.js 14.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 14.x
cache: "yarn" cache: "yarn"
- run: yarn --frozen-lockfile - run: yarn
- name: Test - run: yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
fi
- uses: codecov/codecov-action@v3 - uses: codecov/codecov-action@v3
with: with:
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
@ -119,31 +96,21 @@ jobs:
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' if: github.repository == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' if: github.repository != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x - name: Use Node.js 14.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 14.x
cache: "yarn" cache: "yarn"
- run: yarn --frozen-lockfile - run: yarn
- name: Test worker and server - run: yarn test --scope=@budibase/worker --scope=@budibase/server
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --scope=@budibase/worker --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/worker --scope=@budibase/server
fi
- uses: codecov/codecov-action@v3 - uses: codecov/codecov-action@v3
with: with:
token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos
@ -152,49 +119,42 @@ jobs:
test-pro: test-pro:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' if: github.repository == 'Budibase/budibase'
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Use Node.js 18.x - name: Use Node.js 14.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 14.x
cache: "yarn" cache: "yarn"
- run: yarn --frozen-lockfile - run: yarn
- name: Test - run: yarn test --scope=@budibase/pro
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --scope=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/pro
fi
integration-test: integration-test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' if: github.repository == 'Budibase/budibase'
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only - name: Checkout repo only
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' if: github.repository != 'Budibase/budibase'
- name: Use Node.js 18.x - name: Use Node.js 14.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 14.x
cache: "yarn" cache: "yarn"
- run: yarn --frozen-lockfile - run: yarn
- run: yarn build --scope @budibase/server --scope @budibase/worker --scope @budibase/client - run: yarn build --projects=@budibase/server,@budibase/worker,@budibase/client
- name: Run tests - name: Run tests
run: | run: |
cd qa-core cd qa-core
@ -206,12 +166,13 @@ jobs:
check-pro-submodule: check-pro-submodule:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' if: github.repository == 'Budibase/budibase'
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
with: with:
submodules: true submodules: true
fetch-depth: 0
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Check pro commit - name: Check pro commit

View File

@ -1,29 +0,0 @@
name: check_unreleased_changes
on:
pull_request:
branches:
- master
jobs:
check_unreleased:
runs-on: ubuntu-latest
steps:
- name: Check for unreleased changes
env:
REPO: "Budibase/budibase"
TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/releases/latest" | \
jq -r .published_at)
COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/commits/master" | \
jq -r .commit.committer.date)
RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s")
COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s")
if (( COMMIT_SECONDS > RELEASE_SECONDS )); then
echo "There are unreleased changes. Please release these changes before merging."
exit 1
fi
echo "No unreleased changes detected."

View File

@ -44,7 +44,7 @@ jobs:
- uses: actions/setup-node@v1 - uses: actions/setup-node@v1
with: with:
node-version: 18.x node-version: 14.x
- run: yarn install --frozen-lockfile - run: yarn install --frozen-lockfile
- name: Update versions - name: Update versions

2
.nvmrc
View File

@ -1 +1 @@
v18.17.0 v14.20.1

View File

@ -1,3 +1,3 @@
nodejs 18.17.0 nodejs 14.21.3
python 3.10.0 python 3.10.0
yarn 1.22.19 yarn 1.22.19

4
.vscode/launch.json vendored
View File

@ -8,7 +8,6 @@
"name": "Budibase Server", "name": "Budibase Server",
"type": "node", "type": "node",
"request": "launch", "request": "launch",
"runtimeVersion": "14.20.1",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"], "runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
"args": ["${workspaceFolder}/packages/server/src/index.ts"], "args": ["${workspaceFolder}/packages/server/src/index.ts"],
"cwd": "${workspaceFolder}/packages/server" "cwd": "${workspaceFolder}/packages/server"
@ -17,7 +16,6 @@
"name": "Budibase Worker", "name": "Budibase Worker",
"type": "node", "type": "node",
"request": "launch", "request": "launch",
"runtimeVersion": "14.20.1",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"], "runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
"args": ["${workspaceFolder}/packages/worker/src/index.ts"], "args": ["${workspaceFolder}/packages/worker/src/index.ts"],
"cwd": "${workspaceFolder}/packages/worker" "cwd": "${workspaceFolder}/packages/worker"
@ -29,4 +27,4 @@
"configurations": ["Budibase Server", "Budibase Worker"] "configurations": ["Budibase Server", "Budibase Worker"]
} }
] ]
} }

View File

@ -137,7 +137,6 @@ services:
path: /health path: /health
port: 10000 port: 10000
scheme: HTTP scheme: HTTP
enabled: true
periodSeconds: 3 periodSeconds: 3
failureThreshold: 1 failureThreshold: 1
livenessProbe: livenessProbe:
@ -170,7 +169,6 @@ services:
path: /health path: /health
port: 4002 port: 4002
scheme: HTTP scheme: HTTP
enabled: true
periodSeconds: 3 periodSeconds: 3
failureThreshold: 1 failureThreshold: 1
livenessProbe: livenessProbe:
@ -204,7 +202,6 @@ services:
path: /health path: /health
port: 4003 port: 4003
scheme: HTTP scheme: HTTP
enabled: true
periodSeconds: 3 periodSeconds: 3
failureThreshold: 1 failureThreshold: 1
livenessProbe: livenessProbe:
@ -411,14 +408,12 @@ couchdb:
## Ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes ## Ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes
# FOR COUCHDB # FOR COUCHDB
livenessProbe: livenessProbe:
enabled: true
failureThreshold: 3 failureThreshold: 3
initialDelaySeconds: 0 initialDelaySeconds: 0
periodSeconds: 10 periodSeconds: 10
successThreshold: 1 successThreshold: 1
timeoutSeconds: 1 timeoutSeconds: 1
readinessProbe: readinessProbe:
enabled: true
failureThreshold: 3 failureThreshold: 3
initialDelaySeconds: 0 initialDelaySeconds: 0
periodSeconds: 10 periodSeconds: 10

View File

@ -90,7 +90,7 @@ Component libraries are collections of components as well as the definition of t
#### 1. Prerequisites #### 1. Prerequisites
- NodeJS version `18.x.x` - NodeJS version `14.x.x`
- Python version `3.x` - Python version `3.x`
### Using asdf (recommended) ### Using asdf (recommended)

View File

@ -27,6 +27,7 @@ services:
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL} BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD} BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
PLUGINS_DIR: ${PLUGINS_DIR} PLUGINS_DIR: ${PLUGINS_DIR}
OFFLINE_MODE: ${OFFLINE_MODE}
depends_on: depends_on:
- worker-service - worker-service
- redis-service - redis-service
@ -54,6 +55,7 @@ services:
INTERNAL_API_KEY: ${INTERNAL_API_KEY} INTERNAL_API_KEY: ${INTERNAL_API_KEY}
REDIS_URL: redis-service:6379 REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD} REDIS_PASSWORD: ${REDIS_PASSWORD}
OFFLINE_MODE: ${OFFLINE_MODE}
depends_on: depends_on:
- redis-service - redis-service
- minio-service - minio-service

View File

@ -1,7 +1,7 @@
FROM node:18-slim as build FROM node:14-slim as build
# install node-gyp dependencies # install node-gyp dependencies
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python3 RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python
# add pin script # add pin script
WORKDIR / WORKDIR /

View File

@ -1,5 +1,5 @@
{ {
"version": "2.9.26-alpha.2", "version": "2.9.30-alpha.0",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -34,7 +34,7 @@
"preinstall": "node scripts/syncProPackage.js", "preinstall": "node scripts/syncProPackage.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev", "setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"bootstrap": "./scripts/link-dependencies.sh && echo '***BOOTSTRAP ONLY REQUIRED FOR USE WITH ACCOUNT PORTAL***'", "bootstrap": "./scripts/link-dependencies.sh && echo '***BOOTSTRAP ONLY REQUIRED FOR USE WITH ACCOUNT PORTAL***'",
"build": "lerna run build --stream", "build": "yarn nx run-many -t=build",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput", "build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run check:types", "check:types": "lerna run check:types",
"backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap", "backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap",
@ -109,7 +109,7 @@
"@budibase/types": "0.0.0" "@budibase/types": "0.0.0"
}, },
"engines": { "engines": {
"node": ">=18.0.0 <19.0.0" "node": ">=14.0.0 <15.0.0"
}, },
"dependencies": {} "dependencies": {}
} }

View File

@ -1,7 +1,6 @@
import fetch from "node-fetch" import fetch from "node-fetch"
import { getCouchInfo } from "./couch" import { getCouchInfo } from "./couch"
import { SearchFilters, Row } from "@budibase/types" import { SearchFilters, Row, EmptyFilterOption } from "@budibase/types"
import { createUserIndex } from "./searchIndexes/searchIndexes"
const QUERY_START_REGEX = /\d[0-9]*:/g const QUERY_START_REGEX = /\d[0-9]*:/g
@ -65,6 +64,7 @@ export class QueryBuilder<T> {
this.#index = index this.#index = index
this.#query = { this.#query = {
allOr: false, allOr: false,
onEmptyFilter: EmptyFilterOption.RETURN_ALL,
string: {}, string: {},
fuzzy: {}, fuzzy: {},
range: {}, range: {},
@ -218,6 +218,10 @@ export class QueryBuilder<T> {
this.#query.allOr = true this.#query.allOr = true
} }
setOnEmptyFilter(value: EmptyFilterOption) {
this.#query.onEmptyFilter = value
}
handleSpaces(input: string) { handleSpaces(input: string) {
if (this.#noEscaping) { if (this.#noEscaping) {
return input return input
@ -289,8 +293,9 @@ export class QueryBuilder<T> {
const builder = this const builder = this
let allOr = this.#query && this.#query.allOr let allOr = this.#query && this.#query.allOr
let query = allOr ? "" : "*:*" let query = allOr ? "" : "*:*"
let allFiltersEmpty = true
const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true } const allPreProcessingOpts = { escape: true, lowercase: true, wrap: true }
let tableId let tableId: string = ""
if (this.#query.equal!.tableId) { if (this.#query.equal!.tableId) {
tableId = this.#query.equal!.tableId tableId = this.#query.equal!.tableId
delete this.#query.equal!.tableId delete this.#query.equal!.tableId
@ -305,7 +310,7 @@ export class QueryBuilder<T> {
} }
const contains = (key: string, value: any, mode = "AND") => { const contains = (key: string, value: any, mode = "AND") => {
if (Array.isArray(value) && value.length === 0) { if (!value || (Array.isArray(value) && value.length === 0)) {
return null return null
} }
if (!Array.isArray(value)) { if (!Array.isArray(value)) {
@ -384,6 +389,12 @@ export class QueryBuilder<T> {
built += ` ${mode} ` built += ` ${mode} `
} }
built += expression built += expression
if (
(typeof value !== "string" && value != null) ||
(typeof value === "string" && value !== tableId && value !== "")
) {
allFiltersEmpty = false
}
} }
if (opts?.returnBuilt) { if (opts?.returnBuilt) {
return built return built
@ -463,6 +474,13 @@ export class QueryBuilder<T> {
allOr = false allOr = false
build({ tableId }, equal) build({ tableId }, equal)
} }
if (allFiltersEmpty) {
if (this.#query.onEmptyFilter === EmptyFilterOption.RETURN_NONE) {
return ""
} else if (this.#query?.allOr) {
return query.replace("()", "(*:*)")
}
}
return query return query
} }

View File

@ -1,6 +1,6 @@
import { newid } from "../../docIds/newid" import { newid } from "../../docIds/newid"
import { getDB } from "../db" import { getDB } from "../db"
import { Database } from "@budibase/types" import { Database, EmptyFilterOption } from "@budibase/types"
import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene" import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene"
const INDEX_NAME = "main" const INDEX_NAME = "main"
@ -156,6 +156,76 @@ describe("lucene", () => {
expect(resp.rows.length).toBe(2) expect(resp.rows.length).toBe(2)
}) })
describe("empty filters behaviour", () => {
it("should return all rows by default", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.addEqual("property", "")
builder.addEqual("number", null)
builder.addString("property", "")
builder.addFuzzy("property", "")
builder.addNotEqual("number", undefined)
builder.addOneOf("number", null)
builder.addContains("array", undefined)
builder.addNotContains("array", null)
builder.addContainsAny("array", null)
const resp = await builder.run()
expect(resp.rows.length).toBe(3)
})
it("should return all rows when onEmptyFilter is ALL", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.setOnEmptyFilter(EmptyFilterOption.RETURN_ALL)
builder.setAllOr()
builder.addEqual("property", "")
builder.addEqual("number", null)
builder.addString("property", "")
builder.addFuzzy("property", "")
builder.addNotEqual("number", undefined)
builder.addOneOf("number", null)
builder.addContains("array", undefined)
builder.addNotContains("array", null)
builder.addContainsAny("array", null)
const resp = await builder.run()
expect(resp.rows.length).toBe(3)
})
it("should return no rows when onEmptyFilter is NONE", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.setOnEmptyFilter(EmptyFilterOption.RETURN_NONE)
builder.addEqual("property", "")
builder.addEqual("number", null)
builder.addString("property", "")
builder.addFuzzy("property", "")
builder.addNotEqual("number", undefined)
builder.addOneOf("number", null)
builder.addContains("array", undefined)
builder.addNotContains("array", null)
builder.addContainsAny("array", null)
const resp = await builder.run()
expect(resp.rows.length).toBe(0)
})
it("should return all matching rows when onEmptyFilter is NONE, but a filter value is provided", async () => {
const builder = new QueryBuilder(dbName, INDEX_NAME)
builder.setOnEmptyFilter(EmptyFilterOption.RETURN_NONE)
builder.addEqual("property", "")
builder.addEqual("number", 1)
builder.addString("property", "")
builder.addFuzzy("property", "")
builder.addNotEqual("number", undefined)
builder.addOneOf("number", null)
builder.addContains("array", undefined)
builder.addNotContains("array", null)
builder.addContainsAny("array", null)
const resp = await builder.run()
expect(resp.rows.length).toBe(1)
})
})
describe("skip", () => { describe("skip", () => {
const skipDbName = `db-${newid()}` const skipDbName = `db-${newid()}`
let docs: { let docs: {

View File

@ -78,6 +78,7 @@ export const BUILTIN_PERMISSIONS = {
permissions: [ permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.READ), new Permission(PermissionType.QUERY, PermissionLevel.READ),
new Permission(PermissionType.TABLE, PermissionLevel.READ), new Permission(PermissionType.TABLE, PermissionLevel.READ),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
], ],
}, },
WRITE: { WRITE: {
@ -86,6 +87,7 @@ export const BUILTIN_PERMISSIONS = {
permissions: [ permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.WRITE), new Permission(PermissionType.QUERY, PermissionLevel.WRITE),
new Permission(PermissionType.TABLE, PermissionLevel.WRITE), new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE), new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
], ],
}, },
@ -96,6 +98,7 @@ export const BUILTIN_PERMISSIONS = {
new Permission(PermissionType.TABLE, PermissionLevel.WRITE), new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.USER, PermissionLevel.READ), new Permission(PermissionType.USER, PermissionLevel.READ),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE), new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ), new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
], ],
}, },
@ -106,6 +109,7 @@ export const BUILTIN_PERMISSIONS = {
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN), new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),
new Permission(PermissionType.USER, PermissionLevel.ADMIN), new Permission(PermissionType.USER, PermissionLevel.ADMIN),
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN), new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
new Permission(PermissionType.VIEW, PermissionLevel.ADMIN),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ), new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN), new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),
], ],

View File

@ -80,7 +80,7 @@ function getRedisConfig() {
export function setupEnv(...envs: any[]) { export function setupEnv(...envs: any[]) {
const couch = getCouchConfig(), const couch = getCouchConfig(),
minio = getMinioConfig(), minio = getCouchConfig(),
redis = getRedisConfig() redis = getRedisConfig()
const configs = [ const configs = [
{ key: "COUCH_DB_PORT", value: couch.port }, { key: "COUCH_DB_PORT", value: couch.port },

View File

@ -1,12 +1,10 @@
<script> <script>
import { Select, Label } from "@budibase/bbui" import { Select, Label, Stepper } from "@budibase/bbui"
import { currentAsset, store } from "builderStore" import { currentAsset, store } from "builderStore"
import { getActionProviderComponents } from "builderStore/dataBinding" import { getActionProviderComponents } from "builderStore/dataBinding"
import { onMount } from "svelte" import { onMount } from "svelte"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
export let parameters export let parameters
export let bindings = []
$: actionProviders = getActionProviderComponents( $: actionProviders = getActionProviderComponents(
$currentAsset, $currentAsset,
@ -53,11 +51,7 @@
<Select bind:value={parameters.type} options={typeOptions} /> <Select bind:value={parameters.type} options={typeOptions} />
{#if parameters.type === "specific"} {#if parameters.type === "specific"}
<Label small>Number</Label> <Label small>Number</Label>
<DrawerBindableInput <Stepper bind:value={parameters.number} />
{bindings}
value={parameters.number}
on:change={e => (parameters.number = e.detail)}
/>
{/if} {/if}
</div> </div>

View File

@ -17,7 +17,7 @@
import { generate } from "shortid" import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core" import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { getFields } from "helpers/searchFields" import { getFields } from "helpers/searchFields"
import { createEventDispatcher, onMount } from "svelte" import { createEventDispatcher } from "svelte"
export let schemaFields export let schemaFields
export let filters = [] export let filters = []
@ -35,22 +35,28 @@
{ value: "and", label: "Match all filters" }, { value: "and", label: "Match all filters" },
{ value: "or", label: "Match any filter" }, { value: "or", label: "Match any filter" },
] ]
const onEmptyOptions = [
{ value: "all", label: "Return all table rows" },
{ value: "none", label: "Return no rows" },
]
let rawFilters let rawFilters
let matchAny = false let matchAny = false
let onEmptyFilter = "all"
$: parseFilters(filters) $: parseFilters(filters)
$: dispatch("change", enrichFilters(rawFilters, matchAny)) $: dispatch("change", enrichFilters(rawFilters, matchAny, onEmptyFilter))
$: enrichedSchemaFields = getFields(schemaFields || [], { allowLinks: true }) $: enrichedSchemaFields = getFields(schemaFields || [], { allowLinks: true })
$: fieldOptions = enrichedSchemaFields.map(field => field.name) || [] $: fieldOptions = enrichedSchemaFields.map(field => field.name) || []
$: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"] $: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"]
// Remove field key prefixes and determine whether to use the "match all" // Remove field key prefixes and determine which behaviours to use
// or "match any" behaviour
const parseFilters = filters => { const parseFilters = filters => {
matchAny = filters?.find(filter => filter.operator === "allOr") != null matchAny = filters?.find(filter => filter.operator === "allOr") != null
onEmptyFilter =
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
rawFilters = (filters || []) rawFilters = (filters || [])
.filter(filter => filter.operator !== "allOr") .filter(filter => filter.operator !== "allOr" && !filter.onEmptyFilter)
.map(filter => { .map(filter => {
const { field } = filter const { field } = filter
let newFilter = { ...filter } let newFilter = { ...filter }
@ -64,18 +70,9 @@
}) })
} }
onMount(() => {
parseFilters(filters)
rawFilters.forEach(filter => {
filter.type =
schemaFields.find(field => field.name === filter.field)?.type ||
filter.type
})
})
// Add field key prefixes and a special metadata filter object to indicate // Add field key prefixes and a special metadata filter object to indicate
// whether to use the "match all" or "match any" behaviour // how to handle filter behaviour
const enrichFilters = (rawFilters, matchAny) => { const enrichFilters = (rawFilters, matchAny, onEmptyFilter) => {
let count = 1 let count = 1
return rawFilters return rawFilters
.filter(filter => filter.field) .filter(filter => filter.field)
@ -84,6 +81,7 @@
field: `${count++}:${filter.field}`, field: `${count++}:${filter.field}`,
})) }))
.concat(matchAny ? [{ operator: "allOr" }] : []) .concat(matchAny ? [{ operator: "allOr" }] : [])
.concat([{ onEmptyFilter }])
} }
const addFilter = () => { const addFilter = () => {
@ -195,6 +193,17 @@
on:change={e => (matchAny = e.detail === "or")} on:change={e => (matchAny = e.detail === "or")}
placeholder={null} placeholder={null}
/> />
{#if datasource?.type === "table"}
<Select
label="When filter empty"
value={onEmptyFilter}
options={onEmptyOptions}
getOptionLabel={opt => opt.label}
getOptionValue={opt => opt.value}
on:change={e => (onEmptyFilter = e.detail)}
placeholder={null}
/>
{/if}
</div> </div>
<div> <div>
<div class="filter-label"> <div class="filter-label">

View File

@ -2408,13 +2408,6 @@
"label": "Disabled", "label": "Disabled",
"key": "disabled", "key": "disabled",
"defaultValue": false "defaultValue": false
},
{
"type": "text",
"label": "Initial form step",
"key": "initialFormStep",
"defaultValue": 1
} }
], ],
"context": [ "context": [
@ -2452,7 +2445,6 @@
"name": "Form Step", "name": "Form Step",
"icon": "AssetsAdded", "icon": "AssetsAdded",
"hasChildren": true, "hasChildren": true,
"requiredAncestors": ["form"],
"illegalChildren": ["section", "form", "formstep", "formblock"], "illegalChildren": ["section", "form", "formstep", "formblock"],
"styles": ["size"], "styles": ["size"],
"size": { "size": {
@ -2472,7 +2464,6 @@
"fieldgroup": { "fieldgroup": {
"name": "Field Group", "name": "Field Group",
"icon": "Group", "icon": "Group",
"requiredAncestors": ["form"],
"illegalChildren": ["section"], "illegalChildren": ["section"],
"styles": ["size"], "styles": ["size"],
"hasChildren": true, "hasChildren": true,

View File

@ -9,7 +9,6 @@
export let size export let size
export let disabled = false export let disabled = false
export let actionType = "Create" export let actionType = "Create"
export let initialFormStep = 1
// Not exposed as a builder setting. Used internally to disable validation // Not exposed as a builder setting. Used internally to disable validation
// for fields rendered in things like search blocks. // for fields rendered in things like search blocks.
@ -22,18 +21,10 @@
const context = getContext("context") const context = getContext("context")
const { API, fetchDatasourceSchema } = getContext("sdk") const { API, fetchDatasourceSchema } = getContext("sdk")
const getInitialFormStep = () => {
const parsedFormStep = parseInt(initialFormStep)
if (isNaN(parsedFormStep)) {
return 1
}
return parsedFormStep
}
let loaded = false let loaded = false
let schema let schema
let table let table
let currentStep = writable(getInitialFormStep()) let currentStep = writable(1)
$: fetchSchema(dataSource) $: fetchSchema(dataSource)
$: schemaKey = generateSchemaKey(schema) $: schemaKey = generateSchemaKey(schema)

View File

@ -250,7 +250,7 @@
} else if (type === "first") { } else if (type === "first") {
currentStep.set(1) currentStep.set(1)
} else if (type === "specific" && number && !isNaN(number)) { } else if (type === "specific" && number && !isNaN(number)) {
currentStep.set(parseInt(number)) currentStep.set(number)
} }
}, },
setStep: step => { setStep: step => {

@ -1 +1 @@
Subproject commit 9b9c8cc08f271bfc5dd401860f344f6eb336ab35 Subproject commit 02626390cde905a248cb60729968667c9e49fae9

View File

@ -1,4 +1,4 @@
FROM node:18-slim FROM node:14-slim
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh" LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh" LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
@ -18,7 +18,7 @@ ENV TOP_LEVEL_PATH=/
# handle node-gyp # handle node-gyp
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y --no-install-recommends g++ make python3 && apt-get install -y --no-install-recommends g++ make python
RUN yarn global add pm2 RUN yarn global add pm2
# Install client for oracle datasource # Install client for oracle datasource

View File

@ -12,6 +12,7 @@ const baseConfig: Config.InitialProjectOptions = {
}, },
moduleNameMapper: { moduleNameMapper: {
"@budibase/backend-core/(.*)": "<rootDir>/../backend-core/$1", "@budibase/backend-core/(.*)": "<rootDir>/../backend-core/$1",
"@budibase/shared-core/(.*)": "<rootDir>/../shared-core/$1",
"@budibase/backend-core": "<rootDir>/../backend-core/src", "@budibase/backend-core": "<rootDir>/../backend-core/src",
"@budibase/shared-core": "<rootDir>/../shared-core/src", "@budibase/shared-core": "<rootDir>/../shared-core/src",
"@budibase/types": "<rootDir>/../types/src", "@budibase/types": "<rootDir>/../types/src",

View File

@ -100,7 +100,7 @@
"memorystream": "0.3.1", "memorystream": "0.3.1",
"mongodb": "5.7", "mongodb": "5.7",
"mssql": "9.1.1", "mssql": "9.1.1",
"mysql2": "3.5.2", "mysql2": "2.3.3",
"node-fetch": "2.6.7", "node-fetch": "2.6.7",
"object-sizeof": "2.6.1", "object-sizeof": "2.6.1",
"open": "8.4.0", "open": "8.4.0",

View File

@ -5,9 +5,8 @@ if [[ -n $CI ]]
then then
# --runInBand performs better in ci where resources are limited # --runInBand performs better in ci where resources are limited
export NODE_OPTIONS="--max-old-space-size=4096" export NODE_OPTIONS="--max-old-space-size=4096"
node ../../node_modules/jest/bin/jest.js --version echo "jest --coverage --runInBand --forceExit --bail"
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail" jest --coverage --runInBand --forceExit --bail
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail
else else
# --maxWorkers performs better in development # --maxWorkers performs better in development
echo "jest --coverage --maxWorkers=2 --forceExit" echo "jest --coverage --maxWorkers=2 --forceExit"

View File

@ -127,7 +127,7 @@ export async function preview(ctx: any) {
const query = ctx.request.body const query = ctx.request.body
// preview may not have a queryId as it hasn't been saved, but if it does // preview may not have a queryId as it hasn't been saved, but if it does
// this stops dynamic variables from calling the same query // this stops dynamic variables from calling the same query
const { fields, parameters, queryVerb, transformer, queryId, schema } = query const { fields, parameters, queryVerb, transformer, queryId } = query
const authConfigCtx: any = getAuthConfig(ctx) const authConfigCtx: any = getAuthConfig(ctx)
@ -140,7 +140,6 @@ export async function preview(ctx: any) {
parameters, parameters,
transformer, transformer,
queryId, queryId,
schema,
// have to pass down to the thread runner - can't put into context now // have to pass down to the thread runner - can't put into context now
environmentVariables: envVars, environmentVariables: envVars,
ctx: { ctx: {
@ -236,7 +235,6 @@ async function execute(
user: ctx.user, user: ctx.user,
auth: { ...authConfigCtx }, auth: { ...authConfigCtx },
}, },
schema: query.schema,
} }
const runFn = () => Runner.run(inputs) const runFn = () => Runner.run(inputs)

View File

@ -13,9 +13,10 @@ import {
Row, Row,
Table, Table,
UserCtx, UserCtx,
EmptyFilterOption,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as utils from "./utils" import { hasFilters } from "@budibase/shared-core/src/filters"
export async function handleRequest( export async function handleRequest(
operation: Operation, operation: Operation,
@ -38,13 +39,20 @@ export async function handleRequest(
} }
} }
if (
!hasFilters(opts?.filters) &&
opts?.filters?.onEmptyFilter === EmptyFilterOption.RETURN_NONE
) {
return []
}
return new ExternalRequest(operation, tableId, opts?.datasource).run( return new ExternalRequest(operation, tableId, opts?.datasource).run(
opts || {} opts || {}
) )
} }
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) { export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableId = utils.getTableId(ctx) const tableId = ctx.params.tableId
const { _id, ...rowData } = ctx.request.body const { _id, ...rowData } = ctx.request.body
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
@ -71,7 +79,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
export async function save(ctx: UserCtx) { export async function save(ctx: UserCtx) {
const inputs = ctx.request.body const inputs = ctx.request.body
const tableId = utils.getTableId(ctx) const tableId = ctx.params.tableId
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row: inputs, row: inputs,
tableId, tableId,
@ -99,12 +107,12 @@ export async function save(ctx: UserCtx) {
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx) {
const id = ctx.params.rowId const id = ctx.params.rowId
const tableId = utils.getTableId(ctx) const tableId = ctx.params.tableId
return sdk.rows.external.getRow(tableId, id) return sdk.rows.external.getRow(tableId, id)
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const tableId = utils.getTableId(ctx) const tableId = ctx.params.tableId
const _id = ctx.request.body._id const _id = ctx.request.body._id
const { row } = (await handleRequest(Operation.DELETE, tableId, { const { row } = (await handleRequest(Operation.DELETE, tableId, {
id: breakRowIdField(_id), id: breakRowIdField(_id),
@ -115,7 +123,7 @@ export async function destroy(ctx: UserCtx) {
export async function bulkDestroy(ctx: UserCtx) { export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body const { rows } = ctx.request.body
const tableId = utils.getTableId(ctx) const tableId = ctx.params.tableId
let promises: Promise<Row[] | { row: Row; table: Table }>[] = [] let promises: Promise<Row[] | { row: Row; table: Table }>[] = []
for (let row of rows) { for (let row of rows) {
promises.push( promises.push(
@ -131,7 +139,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) { export async function fetchEnrichedRow(ctx: UserCtx) {
const id = ctx.params.rowId const id = ctx.params.rowId
const tableId = utils.getTableId(ctx) const tableId = ctx.params.tableId
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource: Datasource = await sdk.datasources.get(datasourceId!) const datasource: Datasource = await sdk.datasources.get(datasourceId!)
if (!tableName) { if (!tableName) {

View File

@ -11,9 +11,6 @@ import {
Row, Row,
PatchRowRequest, PatchRowRequest,
PatchRowResponse, PatchRowResponse,
SearchRowResponse,
SearchRowRequest,
SearchParams,
} from "@budibase/types" } from "@budibase/types"
import * as utils from "./utils" import * as utils from "./utils"
import { gridSocket } from "../../../websockets" import { gridSocket } from "../../../websockets"
@ -200,10 +197,10 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
ctx.body = response ctx.body = response
} }
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) { export async function search(ctx: any) {
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
const searchParams: SearchParams = { const searchParams = {
...ctx.request.body, ...ctx.request.body,
tableId, tableId,
} }

View File

@ -13,7 +13,7 @@ import {
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
import * as utils from "./utils" import * as utils from "./utils"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { context } from "@budibase/backend-core" import { context, db as dbCore } from "@budibase/backend-core"
import { finaliseRow, updateRelatedFormula } from "./staticFormula" import { finaliseRow, updateRelatedFormula } from "./staticFormula"
import { import {
UserCtx, UserCtx,
@ -26,8 +26,8 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) { export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableId = utils.getTableId(ctx)
const inputs = ctx.request.body const inputs = ctx.request.body
const tableId = inputs.tableId
const isUserTable = tableId === InternalTables.USER_METADATA const isUserTable = tableId === InternalTables.USER_METADATA
let oldRow let oldRow
const dbTable = await sdk.tables.getTable(tableId) const dbTable = await sdk.tables.getTable(tableId)
@ -94,8 +94,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
export async function save(ctx: UserCtx) { export async function save(ctx: UserCtx) {
let inputs = ctx.request.body let inputs = ctx.request.body
const tableId = utils.getTableId(ctx) inputs.tableId = ctx.params.tableId
inputs.tableId = tableId
if (!inputs._rev && !inputs._id) { if (!inputs._rev && !inputs._id) {
inputs._id = generateRowID(inputs.tableId) inputs._id = generateRowID(inputs.tableId)
@ -133,22 +132,20 @@ export async function save(ctx: UserCtx) {
} }
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx) {
const tableId = utils.getTableId(ctx), const db = dbCore.getDB(ctx.appId)
rowId = ctx.params.rowId const table = await sdk.tables.getTable(ctx.params.tableId)
const table = await sdk.tables.getTable(tableId) let row = await utils.findRow(ctx, ctx.params.tableId, ctx.params.rowId)
let row = await utils.findRow(ctx, tableId, rowId)
row = await outputProcessing(table, row) row = await outputProcessing(table, row)
return row return row
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const tableId = utils.getTableId(ctx)
const { _id } = ctx.request.body const { _id } = ctx.request.body
let row = await db.get<Row>(_id) let row = await db.get<Row>(_id)
let _rev = ctx.request.body._rev || row._rev let _rev = ctx.request.body._rev || row._rev
if (row.tableId !== tableId) { if (row.tableId !== ctx.params.tableId) {
throw "Supplied tableId doesn't match the row's tableId" throw "Supplied tableId doesn't match the row's tableId"
} }
const table = await sdk.tables.getTable(row.tableId) const table = await sdk.tables.getTable(row.tableId)
@ -166,7 +163,7 @@ export async function destroy(ctx: UserCtx) {
await updateRelatedFormula(table, row) await updateRelatedFormula(table, row)
let response let response
if (tableId === InternalTables.USER_METADATA) { if (ctx.params.tableId === InternalTables.USER_METADATA) {
ctx.params = { ctx.params = {
id: _id, id: _id,
} }
@ -179,7 +176,7 @@ export async function destroy(ctx: UserCtx) {
} }
export async function bulkDestroy(ctx: UserCtx) { export async function bulkDestroy(ctx: UserCtx) {
const tableId = utils.getTableId(ctx) const tableId = ctx.params.tableId
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
let { rows } = ctx.request.body let { rows } = ctx.request.body
@ -219,7 +216,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) { export async function fetchEnrichedRow(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const tableId = utils.getTableId(ctx) const tableId = ctx.params.tableId
const rowId = ctx.params.rowId const rowId = ctx.params.rowId
// need table to work out where links go in row // need table to work out where links go in row
let [table, row] = await Promise.all([ let [table, row] = await Promise.all([

View File

@ -45,20 +45,13 @@ export async function findRow(ctx: UserCtx, tableId: string, rowId: string) {
} }
export function getTableId(ctx: Ctx) { export function getTableId(ctx: Ctx) {
// top priority, use the URL first if (ctx.request.body && ctx.request.body.tableId) {
if (ctx.params?.sourceId) {
return ctx.params.sourceId
}
// now check for old way of specifying table ID
if (ctx.params?.tableId) {
return ctx.params.tableId
}
// check body for a table ID
if (ctx.request.body?.tableId) {
return ctx.request.body.tableId return ctx.request.body.tableId
} }
// now check if a specific view name if (ctx.params && ctx.params.tableId) {
if (ctx.params?.viewName) { return ctx.params.tableId
}
if (ctx.params && ctx.params.viewName) {
return ctx.params.viewName return ctx.params.viewName
} }
} }

View File

@ -1,18 +1,14 @@
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { import {
UserCtx, UserCtx,
SearchResponse,
SortOrder,
SortType,
ViewV2, ViewV2,
SearchRowResponse,
SearchViewRowRequest,
RequiredKeys,
SearchParams,
} from "@budibase/types" } from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export async function searchView( export async function searchView(ctx: UserCtx<void, SearchResponse>) {
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
) {
const { viewId } = ctx.params const { viewId } = ctx.params
const view = await sdk.views.get(viewId) const view = await sdk.views.get(viewId)
@ -33,35 +29,49 @@ export async function searchView(
undefined undefined
ctx.status = 200 ctx.status = 200
const result = await quotas.addQuery(
const { body } = ctx.request () =>
const query = dataFilters.buildLuceneQuery(view.query || []) sdk.rows.search({
tableId: view.tableId,
const searchOptions: RequiredKeys<SearchViewRowRequest> & query: view.query || {},
RequiredKeys<Pick<SearchParams, "tableId" | "query" | "fields">> = { fields: viewFields,
tableId: view.tableId, ...getSortOptions(ctx, view),
query, }),
fields: viewFields, {
...getSortOptions(body, view), datasourceId: view.tableId,
limit: body.limit, }
bookmark: body.bookmark, )
paginate: body.paginate,
}
const result = await quotas.addQuery(() => sdk.rows.search(searchOptions), {
datasourceId: view.tableId,
})
result.rows.forEach(r => (r._viewId = view.id)) result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result ctx.body = result
} }
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) { function getSortOptions(
if (request.sort) { ctx: UserCtx,
view: ViewV2
):
| {
sort: string
sortOrder?: SortOrder
sortType?: SortType
}
| undefined {
const { sort_column, sort_order, sort_type } = ctx.query
if (Array.isArray(sort_column)) {
ctx.throw(400, "sort_column cannot be an array")
}
if (Array.isArray(sort_order)) {
ctx.throw(400, "sort_order cannot be an array")
}
if (Array.isArray(sort_type)) {
ctx.throw(400, "sort_type cannot be an array")
}
if (sort_column) {
return { return {
sort: request.sort, sort: sort_column,
sortOrder: request.sortOrder, sortOrder: sort_order as SortOrder,
sortType: request.sortType, sortType: sort_type as SortType,
} }
} }
if (view.sort) { if (view.sort) {
@ -72,9 +82,5 @@ function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
} }
} }
return { return
sort: undefined,
sortOrder: undefined,
sortType: undefined,
}
} }

View File

@ -22,12 +22,9 @@ import {
QueryJson, QueryJson,
RelationshipType, RelationshipType,
RenameColumn, RenameColumn,
SaveTableRequest,
SaveTableResponse,
Table, Table,
TableRequest, TableRequest,
UserCtx, UserCtx,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
@ -201,8 +198,8 @@ function isRelationshipSetup(column: FieldSchema) {
return column.foreignKey || column.through return column.foreignKey || column.through
} }
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(ctx: UserCtx) {
const inputs = ctx.request.body const inputs: TableRequest = ctx.request.body
const renamed = inputs?._rename const renamed = inputs?._rename
// can't do this right now // can't do this right now
delete inputs.rows delete inputs.rows
@ -218,7 +215,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
...inputs, ...inputs,
} }
let oldTable: Table | undefined let oldTable
if (ctx.request.body && ctx.request.body._id) { if (ctx.request.body && ctx.request.body._id) {
oldTable = await sdk.tables.getTable(ctx.request.body._id) oldTable = await sdk.tables.getTable(ctx.request.body._id)
} }
@ -227,17 +224,6 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
ctx.throw(400, "A column type has changed.") ctx.throw(400, "A column type has changed.")
} }
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView || !sdk.views.isV2(tableView)) continue
tableToSave.views[view] = sdk.views.syncSchema(
oldTable!.views![view] as ViewV2,
tableToSave.schema,
renamed
)
}
const db = context.getAppDB() const db = context.getAppDB()
const datasource = await sdk.datasources.get(datasourceId) const datasource = await sdk.datasources.get(datasourceId)
if (!datasource.entities) { if (!datasource.entities) {

View File

@ -9,8 +9,6 @@ import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import { import {
FetchTablesResponse, FetchTablesResponse,
SaveTableResponse,
SaveTableRequest,
Table, Table,
TableResponse, TableResponse,
UserCtx, UserCtx,
@ -62,7 +60,7 @@ export async function find(ctx: UserCtx<void, TableResponse>) {
ctx.body = sdk.tables.enrichViewSchemas(table) ctx.body = sdk.tables.enrichViewSchemas(table)
} }
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(ctx: UserCtx) {
const appId = ctx.appId const appId = ctx.appId
const table = ctx.request.body const table = ctx.request.body
const isImport = table.rows const isImport = table.rows

View File

@ -9,15 +9,7 @@ import {
fixAutoColumnSubType, fixAutoColumnSubType,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula" import { runStaticFormulaChecks } from "./bulkFormula"
import { import { Table } from "@budibase/types"
SaveTableRequest,
SaveTableResponse,
Table,
TableRequest,
UserCtx,
ViewStatisticsSchema,
ViewV2,
} from "@budibase/types"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
@ -41,10 +33,10 @@ function checkAutoColumns(table: Table, oldTable?: Table) {
return table return table
} }
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(ctx: any) {
const db = context.getAppDB() const db = context.getAppDB()
const { rows, ...rest } = ctx.request.body const { rows, ...rest } = ctx.request.body
let tableToSave: TableRequest = { let tableToSave = {
type: "table", type: "table",
_id: generateTableID(), _id: generateTableID(),
views: {}, views: {},
@ -52,7 +44,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
} }
// if the table obj had an _id then it will have been retrieved // if the table obj had an _id then it will have been retrieved
let oldTable: Table | undefined let oldTable
if (ctx.request.body && ctx.request.body._id) { if (ctx.request.body && ctx.request.body._id) {
oldTable = await sdk.tables.getTable(ctx.request.body._id) oldTable = await sdk.tables.getTable(ctx.request.body._id)
} }
@ -88,7 +80,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
let { _rename } = tableToSave let { _rename } = tableToSave
/* istanbul ignore next */ /* istanbul ignore next */
if (_rename && _rename.old === _rename.updated) { if (_rename && _rename.old === _rename.updated) {
_rename = undefined _rename = null
delete tableToSave._rename delete tableToSave._rename
} }
@ -105,20 +97,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const tableView = tableToSave.views[view] const tableView = tableToSave.views[view]
if (!tableView) continue if (!tableView) continue
if (sdk.views.isV2(tableView)) { if (tableView.schema.group || tableView.schema.field) continue
tableToSave.views[view] = sdk.views.syncSchema(
oldTable!.views![view] as ViewV2,
tableToSave.schema,
_rename
)
continue
}
if (
(tableView.schema as ViewStatisticsSchema).group ||
tableView.schema.field
)
continue
tableView.schema = tableToSave.schema tableView.schema = tableToSave.schema
} }
@ -133,7 +112,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
tableToSave._rev = linkResp._rev tableToSave._rev = linkResp._rev
} }
} catch (err) { } catch (err) {
ctx.throw(400, err as string) ctx.throw(400, err)
} }
// don't perform any updates until relationships have been // don't perform any updates until relationships have been

View File

@ -418,7 +418,7 @@ export function areSwitchableTypes(type1: any, type2: any) {
return false return false
} }
export function hasTypeChanged(table: Table, oldTable: Table | undefined) { export function hasTypeChanged(table: any, oldTable: any) {
if (!oldTable) { if (!oldTable) {
return false return false
} }

View File

@ -4,14 +4,16 @@ import authorized from "../../middleware/authorized"
import { paramResource, paramSubResource } from "../../middleware/resourceId" import { paramResource, paramSubResource } from "../../middleware/resourceId"
import { permissions } from "@budibase/backend-core" import { permissions } from "@budibase/backend-core"
import { internalSearchValidator } from "./utils/validators" import { internalSearchValidator } from "./utils/validators"
import noViewData from "../../middleware/noViewData"
import trimViewRowInfo from "../../middleware/trimViewRowInfo" import trimViewRowInfo from "../../middleware/trimViewRowInfo"
import * as utils from "../../db/utils"
const { PermissionType, PermissionLevel } = permissions const { PermissionType, PermissionLevel } = permissions
const router: Router = new Router() const router: Router = new Router()
router router
/** /**
* @api {get} /api/:sourceId/:rowId/enrich Get an enriched row * @api {get} /api/:tableId/:rowId/enrich Get an enriched row
* @apiName Get an enriched row * @apiName Get an enriched row
* @apiGroup rows * @apiGroup rows
* @apiPermission table read access * @apiPermission table read access
@ -25,13 +27,13 @@ router
* @apiSuccess {object} row The response body will be the enriched row. * @apiSuccess {object} row The response body will be the enriched row.
*/ */
.get( .get(
"/api/:sourceId/:rowId/enrich", "/api/:tableId/:rowId/enrich",
paramSubResource("sourceId", "rowId"), paramSubResource("tableId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetchEnrichedRow rowController.fetchEnrichedRow
) )
/** /**
* @api {get} /api/:sourceId/rows Get all rows in a table * @api {get} /api/:tableId/rows Get all rows in a table
* @apiName Get all rows in a table * @apiName Get all rows in a table
* @apiGroup rows * @apiGroup rows
* @apiPermission table read access * @apiPermission table read access
@ -40,37 +42,37 @@ router
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then * due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
* will simply stop. * will simply stop.
* *
* @apiParam {string} sourceId The ID of the table to retrieve all rows within. * @apiParam {string} tableId The ID of the table to retrieve all rows within.
* *
* @apiSuccess {object[]} rows The response body will be an array of all rows found. * @apiSuccess {object[]} rows The response body will be an array of all rows found.
*/ */
.get( .get(
"/api/:sourceId/rows", "/api/:tableId/rows",
paramResource("sourceId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetch rowController.fetch
) )
/** /**
* @api {get} /api/:sourceId/rows/:rowId Retrieve a single row * @api {get} /api/:tableId/rows/:rowId Retrieve a single row
* @apiName Retrieve a single row * @apiName Retrieve a single row
* @apiGroup rows * @apiGroup rows
* @apiPermission table read access * @apiPermission table read access
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve * @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
* a row by anything other than its _id field, use the search endpoint. * a row by anything other than its _id field, use the search endpoint.
* *
* @apiParam {string} sourceId The ID of the table to retrieve a row from. * @apiParam {string} tableId The ID of the table to retrieve a row from.
* @apiParam {string} rowId The ID of the row to retrieve. * @apiParam {string} rowId The ID of the row to retrieve.
* *
* @apiSuccess {object} body The response body will be the row that was found. * @apiSuccess {object} body The response body will be the row that was found.
*/ */
.get( .get(
"/api/:sourceId/rows/:rowId", "/api/:tableId/rows/:rowId",
paramSubResource("sourceId", "rowId"), paramSubResource("tableId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.find rowController.find
) )
/** /**
* @api {post} /api/:sourceId/search Search for rows in a table * @api {post} /api/:tableId/search Search for rows in a table
* @apiName Search for rows in a table * @apiName Search for rows in a table
* @apiGroup rows * @apiGroup rows
* @apiPermission table read access * @apiPermission table read access
@ -78,7 +80,7 @@ router
* and data UI in the builder are built atop this. All filtering, sorting and pagination is * and data UI in the builder are built atop this. All filtering, sorting and pagination is
* handled through this, for internal and external (datasource plus, e.g. SQL) tables. * handled through this, for internal and external (datasource plus, e.g. SQL) tables.
* *
* @apiParam {string} sourceId The ID of the table to retrieve rows from. * @apiParam {string} tableId The ID of the table to retrieve rows from.
* *
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true, * @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
* defaults to false. * defaults to false.
@ -133,22 +135,22 @@ router
* page. * page.
*/ */
.post( .post(
"/api/:sourceId/search", "/api/:tableId/search",
internalSearchValidator(), internalSearchValidator(),
paramResource("sourceId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search rowController.search
) )
// DEPRECATED - this is an old API, but for backwards compat it needs to be // DEPRECATED - this is an old API, but for backwards compat it needs to be
// supported still // supported still
.post( .post(
"/api/search/:sourceId/rows", "/api/search/:tableId/rows",
paramResource("sourceId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search rowController.search
) )
/** /**
* @api {post} /api/:sourceId/rows Creates a new row * @api {post} /api/:tableId/rows Creates a new row
* @apiName Creates a new row * @apiName Creates a new row
* @apiGroup rows * @apiGroup rows
* @apiPermission table write access * @apiPermission table write access
@ -157,7 +159,7 @@ router
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are * links to one. Please note that "_id", "_rev" and "tableId" are fields that are
* already used by Budibase tables and cannot be used for columns. * already used by Budibase tables and cannot be used for columns.
* *
* @apiParam {string} sourceId The ID of the table to save a row to. * @apiParam {string} tableId The ID of the table to save a row to.
* *
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided. * @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision * @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
@ -172,14 +174,14 @@ router
* @apiSuccess {object} body The contents of the row that was saved will be returned as well. * @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/ */
.post( .post(
"/api/:sourceId/rows", "/api/:tableId/rows",
paramResource("sourceId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
trimViewRowInfo, noViewData,
rowController.save rowController.save
) )
/** /**
* @api {patch} /api/:sourceId/rows Updates a row * @api {patch} /api/:tableId/rows Updates a row
* @apiName Update a row * @apiName Update a row
* @apiGroup rows * @apiGroup rows
* @apiPermission table write access * @apiPermission table write access
@ -187,14 +189,14 @@ router
* error if an _id isn't provided, it will only function for existing rows. * error if an _id isn't provided, it will only function for existing rows.
*/ */
.patch( .patch(
"/api/:sourceId/rows", "/api/:tableId/rows",
paramResource("sourceId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
trimViewRowInfo, noViewData,
rowController.patch rowController.patch
) )
/** /**
* @api {post} /api/:sourceId/rows/validate Validate inputs for a row * @api {post} /api/:tableId/rows/validate Validate inputs for a row
* @apiName Validate inputs for a row * @apiName Validate inputs for a row
* @apiGroup rows * @apiGroup rows
* @apiPermission table write access * @apiPermission table write access
@ -202,7 +204,7 @@ router
* given the table schema, this will iterate through all the constraints on the table and * given the table schema, this will iterate through all the constraints on the table and
* check if the request body is valid. * check if the request body is valid.
* *
* @apiParam {string} sourceId The ID of the table the row is to be validated for. * @apiParam {string} tableId The ID of the table the row is to be validated for.
* *
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested * @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
* against the table schema and constraints. * against the table schema and constraints.
@ -214,20 +216,20 @@ router
* the schema. * the schema.
*/ */
.post( .post(
"/api/:sourceId/rows/validate", "/api/:tableId/rows/validate",
paramResource("sourceId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.validate rowController.validate
) )
/** /**
* @api {delete} /api/:sourceId/rows Delete rows * @api {delete} /api/:tableId/rows Delete rows
* @apiName Delete rows * @apiName Delete rows
* @apiGroup rows * @apiGroup rows
* @apiPermission table write access * @apiPermission table write access
* @apiDescription This endpoint can delete a single row, or delete them in a bulk * @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion. * fashion.
* *
* @apiParam {string} sourceId The ID of the table the row is to be deleted from. * @apiParam {string} tableId The ID of the table the row is to be deleted from.
* *
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this * @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted. * key of the request body that are to be deleted.
@ -240,37 +242,117 @@ router
* is the deleted row. * is the deleted row.
*/ */
.delete( .delete(
"/api/:sourceId/rows", "/api/:tableId/rows",
paramResource("sourceId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.destroy rowController.destroy
) )
/** /**
* @api {post} /api/:sourceId/rows/exportRows Export Rows * @api {post} /api/:tableId/rows/exportRows Export Rows
* @apiName Export rows * @apiName Export rows
* @apiGroup rows * @apiGroup rows
* @apiPermission table write access * @apiPermission table write access
* @apiDescription This API can export a number of provided rows * @apiDescription This API can export a number of provided rows
* *
* @apiParam {string} sourceId The ID of the table the row is to be deleted from. * @apiParam {string} tableId The ID of the table the row is to be deleted from.
* *
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported * @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
* *
* @apiSuccess {object[]|object} * @apiSuccess {object[]|object}
*/ */
.post( .post(
"/api/:sourceId/rows/exportRows", "/api/:tableId/rows/exportRows",
paramResource("sourceId"), paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.exportRows rowController.exportRows
) )
router.post( router
"/api/v2/views/:viewId/search", .get(
authorized(PermissionType.TABLE, PermissionLevel.READ), "/api/v2/views/:viewId/search",
rowController.views.searchView authorized(PermissionType.VIEW, PermissionLevel.READ),
) rowController.views.searchView
)
/**
* @api {post} /api/:tableId/rows Creates a new row
* @apiName Creates a new row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API will create a new row based on the supplied body. If the
* body includes an "_id" field then it will update an existing row if the field
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
* already used by Budibase tables and cannot be used for columns.
*
* @apiParam {string} tableId The ID of the table to save a row to.
*
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
* must also be provided.
* @apiParam (Body) {string} _viewId The ID of the view should be specified in the row body itself.
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
* a column in the specified table. All other fields will be dropped and not stored.
*
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
* is the rows new ID.
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/
.post(
"/api/v2/views/:viewId/rows",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.save
)
/**
* @api {patch} /api/v2/views/:viewId/rows/:rowId Updates a row
* @apiName Update a row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint is identical to the row creation endpoint but instead it will
* error if an _id isn't provided, it will only function for existing rows.
*/
.patch(
"/api/v2/views/:viewId/rows/:rowId",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.patch
)
/**
* @api {delete} /api/v2/views/:viewId/rows Delete rows for a view
* @apiName Delete rows for a view
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion.
*
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted.
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
* revision here.
*
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
* is the deleted row.
*/
.delete(
"/api/v2/views/:viewId/rows",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
// This is required as the implementation relies on the table id
(ctx, next) => {
ctx.params.tableId = utils.extractViewInfoFromID(
ctx.params.viewId
).tableId
return next()
},
rowController.destroy
)
export default router export default router

View File

@ -16,12 +16,16 @@ import {
FieldType, FieldType,
SortType, SortType,
SortOrder, SortOrder,
DeleteRow,
} from "@budibase/types" } from "@budibase/types"
import { import {
expectAnyInternalColsAttributes, expectAnyInternalColsAttributes,
generator, generator,
structures, structures,
} from "@budibase/backend-core/tests" } from "@budibase/backend-core/tests"
import trimViewRowInfoMiddleware from "../../../middleware/trimViewRowInfo"
import noViewDataMiddleware from "../../../middleware/noViewData"
import router from "../row"
describe("/rows", () => { describe("/rows", () => {
let request = setup.getRequest() let request = setup.getRequest()
@ -390,6 +394,26 @@ describe("/rows", () => {
expect(saved.arrayFieldArrayStrKnown).toEqual(["One"]) expect(saved.arrayFieldArrayStrKnown).toEqual(["One"])
expect(saved.optsFieldStrKnown).toEqual("Alpha") expect(saved.optsFieldStrKnown).toEqual("Alpha")
}) })
it("should throw an error when creating a table row with view id data", async () => {
const res = await request
.post(`/api/${row.tableId}/rows`)
.send({ ...row, _viewId: generator.guid() })
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(400)
expect(res.body.message).toEqual(
"Table row endpoints cannot contain view info"
)
})
it("should setup the noViewData middleware", async () => {
const route = router.stack.find(
r => r.methods.includes("POST") && r.path === "/api/:tableId/rows"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(noViewDataMiddleware)
})
}) })
describe("patch", () => { describe("patch", () => {
@ -439,6 +463,33 @@ describe("/rows", () => {
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage) await assertQueryUsage(queryUsage)
}) })
it("should throw an error when creating a table row with view id data", async () => {
const existing = await config.createRow()
const res = await config.api.row.patch(
table._id!,
{
...existing,
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
_viewId: generator.guid(),
},
{ expectStatus: 400 }
)
expect(res.body.message).toEqual(
"Table row endpoints cannot contain view info"
)
})
it("should setup the noViewData middleware", async () => {
const route = router.stack.find(
r => r.methods.includes("PATCH") && r.path === "/api/:tableId/rows"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(noViewDataMiddleware)
})
}) })
describe("destroy", () => { describe("destroy", () => {
@ -707,7 +758,7 @@ describe("/rows", () => {
}) })
// the environment needs configured for this // the environment needs configured for this
await setup.switchToSelfHosted(async () => { await setup.switchToSelfHosted(async () => {
return context.doInAppContext(config.getAppId(), async () => { context.doInAppContext(config.getAppId(), async () => {
const enriched = await outputProcessing(table, [row]) const enriched = await outputProcessing(table, [row])
expect((enriched as Row[])[0].attachment[0].url).toBe( expect((enriched as Row[])[0].attachment[0].url).toBe(
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
@ -762,6 +813,252 @@ describe("/rows", () => {
}) })
}) })
describe("view search", () => {
function userTable(): Table {
return {
name: "user",
type: "user",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: { type: "string" },
},
age: {
type: FieldType.NUMBER,
name: "age",
constraints: {},
},
},
}
}
it("returns table rows from view", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(10)
expect(response.body).toEqual({
rows: expect.arrayContaining(rows.map(expect.objectContaining)),
})
})
it("searching respects the view filters", async () => {
const table = await config.createTable(userTable())
const expectedRows = []
for (let i = 0; i < 10; i++)
await config.createRow({
tableId: table._id,
name: generator.name(),
age: generator.integer({ min: 10, max: 30 }),
})
for (let i = 0; i < 5; i++)
expectedRows.push(
await config.createRow({
tableId: table._id,
name: generator.name(),
age: 40,
})
)
const createViewResponse = await config.api.viewV2.create({
query: { equal: { age: 40 } },
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(5)
expect(response.body).toEqual({
rows: expect.arrayContaining(expectedRows.map(expect.objectContaining)),
})
})
const sortTestOptions: [
{
field: string
order?: SortOrder
type?: SortType
},
string[]
][] = [
[
{
field: "name",
order: SortOrder.ASCENDING,
type: SortType.STRING,
},
["Alice", "Bob", "Charly", "Danny"],
],
[
{
field: "name",
},
["Alice", "Bob", "Charly", "Danny"],
],
[
{
field: "name",
order: SortOrder.DESCENDING,
},
["Danny", "Charly", "Bob", "Alice"],
],
[
{
field: "name",
order: SortOrder.DESCENDING,
type: SortType.STRING,
},
["Danny", "Charly", "Bob", "Alice"],
],
[
{
field: "age",
order: SortOrder.ASCENDING,
type: SortType.number,
},
["Danny", "Alice", "Charly", "Bob"],
],
[
{
field: "age",
order: SortOrder.ASCENDING,
},
["Danny", "Alice", "Charly", "Bob"],
],
[
{
field: "age",
order: SortOrder.DESCENDING,
},
["Bob", "Charly", "Alice", "Danny"],
],
[
{
field: "age",
order: SortOrder.DESCENDING,
type: SortType.number,
},
["Bob", "Charly", "Alice", "Danny"],
],
]
it.each(sortTestOptions)(
"allow sorting (%s)",
async (sortParams, expected) => {
await config.createTable(userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
{ name: "Charly", age: 27 },
{ name: "Danny", age: 15 },
]
for (const user of users) {
await config.createRow({
tableId: config.table!._id,
...user,
})
}
const createViewResponse = await config.api.viewV2.create({
sort: sortParams,
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
rows: expected.map(name => expect.objectContaining({ name })),
})
}
)
it.each(sortTestOptions)(
"allow override the default view sorting (%s)",
async (sortParams, expected) => {
await config.createTable(userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
{ name: "Charly", age: 27 },
{ name: "Danny", age: 15 },
]
for (const user of users) {
await config.createRow({
tableId: config.table!._id,
...user,
})
}
const createViewResponse = await config.api.viewV2.create({
sort: {
field: "name",
order: SortOrder.ASCENDING,
type: SortType.STRING,
},
})
const response = await config.api.viewV2.search(createViewResponse.id, {
sort: {
column: sortParams.field,
order: sortParams.order,
type: sortParams.type,
},
})
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
rows: expected.map(name => expect.objectContaining({ name })),
})
}
)
it("when schema is defined, defined columns and row attributes are returned", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.name(),
age: generator.age(),
})
)
}
const view = await config.api.viewV2.create({
schema: { name: {} },
})
const response = await config.api.viewV2.search(view.id)
expect(response.body.rows).toHaveLength(10)
expect(response.body.rows).toEqual(
expect.arrayContaining(
rows.map(r => ({
...expectAnyInternalColsAttributes,
_viewId: view.id,
name: r.name,
}))
)
)
})
it("views without data can be returned", async () => {
const table = await config.createTable(userTable())
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(0)
})
})
describe("view 2.0", () => { describe("view 2.0", () => {
function userTable(): Table { function userTable(): Table {
return { return {
@ -813,7 +1110,7 @@ describe("/rows", () => {
}) })
const data = randomRowData() const data = randomRowData()
const newRow = await config.api.row.save(view.id, { const newRow = await config.api.viewV2.row.create(view.id, {
tableId: config.table!._id, tableId: config.table!._id,
_viewId: view.id, _viewId: view.id,
...data, ...data,
@ -835,6 +1132,16 @@ describe("/rows", () => {
expect(row.body.age).toBeUndefined() expect(row.body.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined() expect(row.body.jobTitle).toBeUndefined()
}) })
it("should setup the trimViewRowInfo middleware", async () => {
const route = router.stack.find(
r =>
r.methods.includes("POST") &&
r.path === "/api/v2/views/:viewId/rows"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(trimViewRowInfoMiddleware)
})
}) })
describe("patch", () => { describe("patch", () => {
@ -849,13 +1156,13 @@ describe("/rows", () => {
}, },
}) })
const newRow = await config.api.row.save(view.id, { const newRow = await config.api.viewV2.row.create(view.id, {
tableId, tableId,
_viewId: view.id, _viewId: view.id,
...randomRowData(), ...randomRowData(),
}) })
const newData = randomRowData() const newData = randomRowData()
await config.api.row.patch(view.id, { await config.api.viewV2.row.update(view.id, newRow._id!, {
tableId, tableId,
_viewId: view.id, _viewId: view.id,
_id: newRow._id!, _id: newRow._id!,
@ -878,6 +1185,16 @@ describe("/rows", () => {
expect(row.body.age).toBeUndefined() expect(row.body.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined() expect(row.body.jobTitle).toBeUndefined()
}) })
it("should setup the trimViewRowInfo middleware", async () => {
const route = router.stack.find(
r =>
r.methods.includes("PATCH") &&
r.path === "/api/v2/views/:viewId/rows/:rowId"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(trimViewRowInfoMiddleware)
})
}) })
describe("destroy", () => { describe("destroy", () => {
@ -896,7 +1213,10 @@ describe("/rows", () => {
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage() const queryUsage = await getQueryUsage()
await config.api.row.delete(view.id, [createdRow]) const body: DeleteRow = {
_id: createdRow._id!,
}
await config.api.viewV2.row.delete(view.id, body)
await assertRowUsage(rowUsage - 1) await assertRowUsage(rowUsage - 1)
await assertQueryUsage(queryUsage + 1) await assertQueryUsage(queryUsage + 1)
@ -925,7 +1245,9 @@ describe("/rows", () => {
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage() const queryUsage = await getQueryUsage()
await config.api.row.delete(view.id, [rows[0], rows[2]]) await config.api.viewV2.row.delete(view.id, {
rows: [rows[0], rows[2]],
})
await assertRowUsage(rowUsage - 2) await assertRowUsage(rowUsage - 2)
await assertQueryUsage(queryUsage + 1) await assertQueryUsage(queryUsage + 1)
@ -939,327 +1261,5 @@ describe("/rows", () => {
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 })
}) })
}) })
describe("view search", () => {
function userTable(): Table {
return {
name: "user",
type: "user",
schema: {
name: {
type: FieldType.STRING,
name: "name",
constraints: { type: "string" },
},
age: {
type: FieldType.NUMBER,
name: "age",
constraints: {},
},
},
}
}
it("returns table rows from view", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(10)
expect(response.body).toEqual({
rows: expect.arrayContaining(rows.map(expect.objectContaining)),
})
})
it("searching respects the view filters", async () => {
const table = await config.createTable(userTable())
const expectedRows = []
for (let i = 0; i < 10; i++)
await config.createRow({
tableId: table._id,
name: generator.name(),
age: generator.integer({ min: 10, max: 30 }),
})
for (let i = 0; i < 5; i++)
expectedRows.push(
await config.createRow({
tableId: table._id,
name: generator.name(),
age: 40,
})
)
const createViewResponse = await config.api.viewV2.create({
query: [{ operator: "equal", field: "age", value: 40 }],
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(5)
expect(response.body).toEqual({
rows: expect.arrayContaining(
expectedRows.map(expect.objectContaining)
),
})
})
const sortTestOptions: [
{
field: string
order?: SortOrder
type?: SortType
},
string[]
][] = [
[
{
field: "name",
order: SortOrder.ASCENDING,
type: SortType.STRING,
},
["Alice", "Bob", "Charly", "Danny"],
],
[
{
field: "name",
},
["Alice", "Bob", "Charly", "Danny"],
],
[
{
field: "name",
order: SortOrder.DESCENDING,
},
["Danny", "Charly", "Bob", "Alice"],
],
[
{
field: "name",
order: SortOrder.DESCENDING,
type: SortType.STRING,
},
["Danny", "Charly", "Bob", "Alice"],
],
[
{
field: "age",
order: SortOrder.ASCENDING,
type: SortType.number,
},
["Danny", "Alice", "Charly", "Bob"],
],
[
{
field: "age",
order: SortOrder.ASCENDING,
},
["Danny", "Alice", "Charly", "Bob"],
],
[
{
field: "age",
order: SortOrder.DESCENDING,
},
["Bob", "Charly", "Alice", "Danny"],
],
[
{
field: "age",
order: SortOrder.DESCENDING,
type: SortType.number,
},
["Bob", "Charly", "Alice", "Danny"],
],
]
it.each(sortTestOptions)(
"allow sorting (%s)",
async (sortParams, expected) => {
await config.createTable(userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
{ name: "Charly", age: 27 },
{ name: "Danny", age: 15 },
]
for (const user of users) {
await config.createRow({
tableId: config.table!._id,
...user,
})
}
const createViewResponse = await config.api.viewV2.create({
sort: sortParams,
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
rows: expected.map(name => expect.objectContaining({ name })),
})
}
)
it.each(sortTestOptions)(
"allow override the default view sorting (%s)",
async (sortParams, expected) => {
await config.createTable(userTable())
const users = [
{ name: "Alice", age: 25 },
{ name: "Bob", age: 30 },
{ name: "Charly", age: 27 },
{ name: "Danny", age: 15 },
]
for (const user of users) {
await config.createRow({
tableId: config.table!._id,
...user,
})
}
const createViewResponse = await config.api.viewV2.create({
sort: {
field: "name",
order: SortOrder.ASCENDING,
type: SortType.STRING,
},
})
const response = await config.api.viewV2.search(
createViewResponse.id,
{
sort: sortParams.field,
sortOrder: sortParams.order,
sortType: sortParams.type,
}
)
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
rows: expected.map(name => expect.objectContaining({ name })),
})
}
)
it("when schema is defined, defined columns and row attributes are returned", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.name(),
age: generator.age(),
})
)
}
const view = await config.api.viewV2.create({
schema: { name: {} },
})
const response = await config.api.viewV2.search(view.id)
expect(response.body.rows).toHaveLength(10)
expect(response.body.rows).toEqual(
expect.arrayContaining(
rows.map(r => ({
...expectAnyInternalColsAttributes,
_viewId: view.id,
name: r.name,
}))
)
)
})
it("views without data can be returned", async () => {
const table = await config.createTable(userTable())
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(0)
})
it("respects the limit parameter", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
const limit = generator.integer({ min: 1, max: 8 })
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id, {
limit,
})
expect(response.body.rows).toHaveLength(limit)
})
it("can handle pagination", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
// rows.sort((a, b) => (a._id! > b._id! ? 1 : -1))
const createViewResponse = await config.api.viewV2.create()
const allRows = (await config.api.viewV2.search(createViewResponse.id))
.body.rows
const firstPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
}
)
expect(firstPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(0, 4)),
totalRows: 10,
hasNextPage: true,
bookmark: expect.any(String),
})
const secondPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
bookmark: firstPageResponse.body.bookmark,
}
)
expect(secondPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(4, 8)),
totalRows: 10,
hasNextPage: true,
bookmark: expect.any(String),
})
const lastPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
bookmark: secondPageResponse.body.bookmark,
}
)
expect(lastPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(8)),
totalRows: 10,
hasNextPage: false,
bookmark: expect.any(String),
})
})
})
}) })
}) })

View File

@ -62,7 +62,7 @@ describe("/v2/views", () => {
name: generator.name(), name: generator.name(),
tableId: config.table!._id!, tableId: config.table!._id!,
primaryDisplay: generator.word(), primaryDisplay: generator.word(),
query: [{ operator: "equal", field: "field", value: "value" }], query: { allOr: false, equal: { field: "value" } },
sort: { sort: {
field: "fieldToSort", field: "fieldToSort",
order: SortOrder.DESCENDING, order: SortOrder.DESCENDING,
@ -190,7 +190,7 @@ describe("/v2/views", () => {
const tableId = config.table!._id! const tableId = config.table!._id!
await config.api.viewV2.update({ await config.api.viewV2.update({
...view, ...view,
query: [{ operator: "equal", field: "newField", value: "thatValue" }], query: { equal: { newField: "thatValue" } },
}) })
expect(await config.api.table.get(tableId)).toEqual({ expect(await config.api.table.get(tableId)).toEqual({
@ -198,9 +198,7 @@ describe("/v2/views", () => {
views: { views: {
[view.name]: { [view.name]: {
...view, ...view,
query: [ query: { equal: { newField: "thatValue" } },
{ operator: "equal", field: "newField", value: "thatValue" },
],
schema: expect.anything(), schema: expect.anything(),
}, },
}, },
@ -218,13 +216,7 @@ describe("/v2/views", () => {
tableId, tableId,
name: view.name, name: view.name,
primaryDisplay: generator.word(), primaryDisplay: generator.word(),
query: [ query: { equal: { [generator.word()]: generator.word() } },
{
operator: "equal",
field: generator.word(),
value: generator.word(),
},
],
sort: { sort: {
field: generator.word(), field: generator.word(),
order: SortOrder.DESCENDING, order: SortOrder.DESCENDING,
@ -293,7 +285,7 @@ describe("/v2/views", () => {
{ {
...view, ...view,
tableId: generator.guid(), tableId: generator.guid(),
query: [{ operator: "equal", field: "newField", value: "thatValue" }], query: { equal: { newField: "thatValue" } },
}, },
{ expectStatus: 404 } { expectStatus: 404 }
) )

View File

@ -34,7 +34,7 @@ router
"/api/views/:viewName", "/api/views/:viewName",
paramResource("viewName"), paramResource("viewName"),
authorized( authorized(
permissions.PermissionType.TABLE, permissions.PermissionType.VIEW,
permissions.PermissionLevel.READ permissions.PermissionLevel.READ
), ),
rowController.fetchView rowController.fetchView

View File

@ -11,6 +11,7 @@ import {
AutomationStepInput, AutomationStepInput,
AutomationStepSchema, AutomationStepSchema,
AutomationStepType, AutomationStepType,
EmptyFilterOption,
SearchFilters, SearchFilters,
Table, Table,
} from "@budibase/types" } from "@budibase/types"
@ -26,16 +27,6 @@ const SortOrderPretty = {
[SortOrder.DESCENDING]: "Descending", [SortOrder.DESCENDING]: "Descending",
} }
enum EmptyFilterOption {
RETURN_ALL = "all",
RETURN_NONE = "none",
}
const EmptyFilterOptionPretty = {
[EmptyFilterOption.RETURN_ALL]: "Return all table rows",
[EmptyFilterOption.RETURN_NONE]: "Return no rows",
}
export const definition: AutomationStepSchema = { export const definition: AutomationStepSchema = {
description: "Query rows from the database", description: "Query rows from the database",
icon: "Search", icon: "Search",
@ -77,12 +68,6 @@ export const definition: AutomationStepSchema = {
title: "Limit", title: "Limit",
customType: AutomationCustomIOType.QUERY_LIMIT, customType: AutomationCustomIOType.QUERY_LIMIT,
}, },
onEmptyFilter: {
pretty: Object.values(EmptyFilterOptionPretty),
enum: Object.values(EmptyFilterOption),
type: AutomationIOType.STRING,
title: "When Filter Empty",
},
}, },
required: ["tableId"], required: ["tableId"],
}, },

View File

@ -1,18 +1,11 @@
import * as setup from "./utilities" const setup = require("./utilities")
import { FilterConditions } from "../steps/filter" const { FilterConditions } = require("../steps/filter")
describe("test the filter logic", () => { describe("test the filter logic", () => {
async function checkFilter( async function checkFilter(field, condition, value, pass = true) {
field: any, let res = await setup.runStep(setup.actions.FILTER.stepId,
condition: string, { field, condition, value }
value: any, )
pass = true
) {
let res = await setup.runStep(setup.actions.FILTER.stepId, {
field,
condition,
value,
})
expect(res.result).toEqual(pass) expect(res.result).toEqual(pass)
expect(res.success).toEqual(true) expect(res.success).toEqual(true)
} }
@ -43,9 +36,9 @@ describe("test the filter logic", () => {
it("check date coercion", async () => { it("check date coercion", async () => {
await checkFilter( await checkFilter(
new Date().toISOString(), (new Date()).toISOString(),
FilterConditions.GREATER_THAN, FilterConditions.GREATER_THAN,
new Date(-10000).toISOString(), (new Date(-10000)).toISOString(),
true true
) )
}) })

View File

@ -1,7 +1,5 @@
import newid from "./newid" import newid from "./newid"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { DocumentType, VirtualDocumentType } from "@budibase/types"
export { DocumentType, VirtualDocumentType } from "@budibase/types"
type Optional = string | null type Optional = string | null
@ -21,6 +19,7 @@ export const BudibaseInternalDB = {
export const SEPARATOR = dbCore.SEPARATOR export const SEPARATOR = dbCore.SEPARATOR
export const StaticDatabases = dbCore.StaticDatabases export const StaticDatabases = dbCore.StaticDatabases
export const DocumentType = dbCore.DocumentType
export const APP_PREFIX = dbCore.APP_PREFIX export const APP_PREFIX = dbCore.APP_PREFIX
export const APP_DEV_PREFIX = dbCore.APP_DEV_PREFIX export const APP_DEV_PREFIX = dbCore.APP_DEV_PREFIX
export const isDevAppID = dbCore.isDevAppID export const isDevAppID = dbCore.isDevAppID
@ -285,22 +284,10 @@ export function getMultiIDParams(ids: string[]) {
* @returns {string} The new view ID which the view doc can be stored under. * @returns {string} The new view ID which the view doc can be stored under.
*/ */
export function generateViewID(tableId: string) { export function generateViewID(tableId: string) {
return `${ return `${tableId}${SEPARATOR}${newid()}`
VirtualDocumentType.VIEW
}${SEPARATOR}${tableId}${SEPARATOR}${newid()}`
}
export function isViewID(viewId: string) {
return viewId?.split(SEPARATOR)[0] === VirtualDocumentType.VIEW
} }
export function extractViewInfoFromID(viewId: string) { export function extractViewInfoFromID(viewId: string) {
if (!isViewID(viewId)) {
throw new Error("Unable to extract table ID, is not a view ID")
}
const split = viewId.split(SEPARATOR)
split.shift()
viewId = split.join(SEPARATOR)
const regex = new RegExp(`^(?<tableId>.+)${SEPARATOR}([^${SEPARATOR}]+)$`) const regex = new RegExp(`^(?<tableId>.+)${SEPARATOR}([^${SEPARATOR}]+)$`)
const res = regex.exec(viewId) const res = regex.exec(viewId)
return { return {

View File

@ -341,10 +341,10 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
} }
} }
getDefinitionSQL(tableName: string) { getDefinitionSQL(tableName: string, schemaName: string) {
return `select * return `select *
from INFORMATION_SCHEMA.COLUMNS from INFORMATION_SCHEMA.COLUMNS
where TABLE_NAME='${tableName}'` where TABLE_NAME='${tableName}' AND TABLE_SCHEMA='${schemaName}'`
} }
getConstraintsSQL(tableName: string) { getConstraintsSQL(tableName: string) {
@ -388,16 +388,18 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
throw "Unable to get list of tables in database" throw "Unable to get list of tables in database"
} }
const schema = this.config.schema || DEFAULT_SCHEMA const schemaName = this.config.schema || DEFAULT_SCHEMA
const tableNames = tableInfo const tableNames = tableInfo
.filter((record: any) => record.TABLE_SCHEMA === schema) .filter((record: any) => record.TABLE_SCHEMA === schemaName)
.map((record: any) => record.TABLE_NAME) .map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1) .filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
const tables: Record<string, ExternalTable> = {} const tables: Record<string, ExternalTable> = {}
for (let tableName of tableNames) { for (let tableName of tableNames) {
// get the column definition (type) // get the column definition (type)
const definition = await this.runSQL(this.getDefinitionSQL(tableName)) const definition = await this.runSQL(
this.getDefinitionSQL(tableName, schemaName)
)
// find primary key constraints // find primary key constraints
const constraints = await this.runSQL(this.getConstraintsSQL(tableName)) const constraints = await this.runSQL(this.getConstraintsSQL(tableName))
// find the computed and identity columns (auto columns) // find the computed and identity columns (auto columns)

View File

@ -93,21 +93,6 @@ const SCHEMA: Integration = {
}, },
} }
const defaultTypeCasting = function (field: any, next: any) {
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP" ||
field.type === "LONGLONG"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
}
export function bindingTypeCoerce(bindings: any[]) { export function bindingTypeCoerce(bindings: any[]) {
for (let i = 0; i < bindings.length; i++) { for (let i = 0; i < bindings.length; i++) {
const binding = bindings[i] const binding = bindings[i]
@ -162,8 +147,21 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
delete config.rejectUnauthorized delete config.rejectUnauthorized
this.config = { this.config = {
...config, ...config,
typeCast: defaultTypeCasting,
multipleStatements: true, multipleStatements: true,
typeCast: function (field: any, next: any) {
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP" ||
field.type === "LONGLONG"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
},
} }
} }
@ -196,37 +194,6 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
return `concat(${parts.join(", ")})` return `concat(${parts.join(", ")})`
} }
defineTypeCastingFromSchema(schema: {
[key: string]: { name: string; type: string }
}): void {
if (!schema) {
return
}
this.config.typeCast = function (field: any, next: any) {
if (schema[field.name]?.name === field.name) {
if (["LONGLONG", "NEWDECIMAL", "DECIMAL"].includes(field.type)) {
if (schema[field.name]?.type === "number") {
const value = field.string()
return value ? Number(value) : null
} else {
return field.string()
}
}
}
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
}
}
async connect() { async connect() {
this.client = await mysql.createConnection(this.config) this.client = await mysql.createConnection(this.config)
} }
@ -237,10 +204,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async internalQuery( async internalQuery(
query: SqlQuery, query: SqlQuery,
opts: { opts: { connect?: boolean; disableCoercion?: boolean } = {
connect?: boolean
disableCoercion?: boolean
} = {
connect: true, connect: true,
disableCoercion: false, disableCoercion: false,
} }

View File

@ -0,0 +1,9 @@
import { Ctx, Row } from "@budibase/types"
export default async (ctx: Ctx<Row>, next: any) => {
if (ctx.request.body._viewId) {
return ctx.throw(400, "Table row endpoints cannot contain view info")
}
return next()
}

View File

@ -0,0 +1,83 @@
import { generator } from "@budibase/backend-core/tests"
import { BBRequest, FieldType, Row, Table } from "@budibase/types"
import { Next } from "koa"
import * as utils from "../../db/utils"
import noViewDataMiddleware from "../noViewData"
class TestConfiguration {
next: Next
throw: jest.Mock<(status: number, message: string) => never>
middleware: typeof noViewDataMiddleware
params: Record<string, any>
request?: Pick<BBRequest<Row>, "body">
constructor() {
this.next = jest.fn()
this.throw = jest.fn()
this.params = {}
this.middleware = noViewDataMiddleware
}
executeMiddleware(ctxRequestBody: Row) {
this.request = {
body: ctxRequestBody,
}
return this.middleware(
{
request: this.request as any,
throw: this.throw as any,
params: this.params,
} as any,
this.next
)
}
afterEach() {
jest.clearAllMocks()
}
}
describe("noViewData middleware", () => {
let config: TestConfiguration
beforeEach(() => {
config = new TestConfiguration()
})
afterEach(() => {
config.afterEach()
})
const getRandomData = () => ({
_id: generator.guid(),
name: generator.name(),
age: generator.age(),
address: generator.address(),
})
it("it should pass without view id data", async () => {
const data = getRandomData()
await config.executeMiddleware({
...data,
})
expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled()
})
it("it should throw an error if _viewid is provided", async () => {
const data = getRandomData()
await config.executeMiddleware({
_viewId: generator.guid(),
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(
400,
"Table row endpoints cannot contain view info"
)
expect(config.next).not.toBeCalled()
})
})

View File

@ -117,7 +117,7 @@ describe("trimViewRowInfo middleware", () => {
}) })
expect(config.request?.body).toEqual(data) expect(config.request?.body).toEqual(data)
expect(config.params.sourceId).toEqual(table._id) expect(config.params.tableId).toEqual(table._id)
expect(config.next).toBeCalledTimes(1) expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled() expect(config.throw).not.toBeCalled()
@ -143,9 +143,32 @@ describe("trimViewRowInfo middleware", () => {
name: data.name, name: data.name,
address: data.address, address: data.address,
}) })
expect(config.params.sourceId).toEqual(table._id) expect(config.params.tableId).toEqual(table._id)
expect(config.next).toBeCalledTimes(1) expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled() expect(config.throw).not.toBeCalled()
}) })
it("it should throw an error if no viewid is provided on the body", async () => {
const data = getRandomData()
await config.executeMiddleware(viewId, {
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(400, "_viewId is required")
expect(config.next).not.toBeCalled()
})
it("it should throw an error if no viewid is provided on the parameters", async () => {
const data = getRandomData()
await config.executeMiddleware(undefined as any, {
_viewId: viewId,
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(400, "viewId path is required")
expect(config.next).not.toBeCalled()
})
}) })

View File

@ -3,35 +3,26 @@ import * as utils from "../db/utils"
import sdk from "../sdk" import sdk from "../sdk"
import { db } from "@budibase/backend-core" import { db } from "@budibase/backend-core"
import { Next } from "koa" import { Next } from "koa"
import { getTableId } from "../api/controllers/row/utils"
export default async (ctx: Ctx<Row>, next: Next) => { export default async (ctx: Ctx<Row>, next: Next) => {
const { body } = ctx.request const { body } = ctx.request
let { _viewId: viewId } = body const { _viewId: viewId } = body
const possibleViewId = getTableId(ctx)
if (utils.isViewID(possibleViewId)) {
viewId = possibleViewId
}
// nothing to do, it is not a view (just a table ID)
if (!viewId) { if (!viewId) {
return next() return ctx.throw(400, "_viewId is required")
} }
const { tableId } = utils.extractViewInfoFromID(viewId) if (!ctx.params.viewId) {
return ctx.throw(400, "viewId path is required")
// don't need to trim delete requests
if (ctx?.method?.toLowerCase() !== "delete") {
const { _viewId, ...trimmedView } = await trimViewFields(
viewId,
tableId,
body
)
ctx.request.body = trimmedView
} }
ctx.params.sourceId = tableId const { tableId } = utils.extractViewInfoFromID(ctx.params.viewId)
const { _viewId, ...trimmedView } = await trimViewFields(
viewId,
tableId,
body
)
ctx.request.body = trimmedView
ctx.params.tableId = tableId
return next() return next()
} }

View File

@ -1,9 +1,23 @@
import { SearchFilters, SearchParams } from "@budibase/types" import { SearchFilters, SortOrder, SortType } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils" import { isExternalTable } from "../../../integrations/utils"
import * as internal from "./search/internal" import * as internal from "./search/internal"
import * as external from "./search/external" import * as external from "./search/external"
import { Format } from "../../../api/controllers/view/exporters" import { Format } from "../../../api/controllers/view/exporters"
export interface SearchParams {
tableId: string
paginate?: boolean
query: SearchFilters
bookmark?: string
limit?: number
sort?: string
sortOrder?: SortOrder
sortType?: SortType
version?: string
disableEscaping?: boolean
fields?: string[]
}
export interface ViewParams { export interface ViewParams {
calculation: string calculation: string
group: string group: string

View File

@ -6,7 +6,6 @@ import {
IncludeRelationship, IncludeRelationship,
Row, Row,
SearchFilters, SearchFilters,
SearchParams,
} from "@budibase/types" } from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
@ -14,7 +13,7 @@ import { handleRequest } from "../../../../api/controllers/row/external"
import { breakExternalTableId } from "../../../../integrations/utils" import { breakExternalTableId } from "../../../../integrations/utils"
import { cleanExportRows } from "../utils" import { cleanExportRows } from "../utils"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "../search" import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search"
import { HTTPError, db } from "@budibase/backend-core" import { HTTPError, db } from "@budibase/backend-core"
import pick from "lodash/pick" import pick from "lodash/pick"

View File

@ -12,7 +12,7 @@ import {
} from "../../../../db/utils" } from "../../../../db/utils"
import { getGlobalUsersFromMetadata } from "../../../../utilities/global" import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import { Database, Row, Table, SearchParams } from "@budibase/types" import { Database, Row, Table } from "@budibase/types"
import { cleanExportRows } from "../utils" import { cleanExportRows } from "../utils"
import { import {
Format, Format,
@ -28,7 +28,7 @@ import {
getFromMemoryDoc, getFromMemoryDoc,
} from "../../../../api/controllers/view/utils" } from "../../../../api/controllers/view/utils"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { ExportRowsParams, ExportRowsResult } from "../search" import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search"
import pick from "lodash/pick" import pick from "lodash/pick"
export async function search(options: SearchParams) { export async function search(options: SearchParams) {

View File

@ -1,15 +1,14 @@
import { GenericContainer } from "testcontainers" import { GenericContainer } from "testcontainers"
import { import {
Datasource, Datasource,
EmptyFilterOption,
FieldType, FieldType,
Row, Row,
SourceName, SourceName,
Table, Table,
SearchParams,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { SearchParams } from "../../search"
import { search } from "../external" import { search } from "../external"
import { import {
expectAnyExternalColsAttributes, expectAnyExternalColsAttributes,
@ -123,6 +122,22 @@ describe.skip("external", () => {
}) })
}) })
it("empty filters search returns no data", async () => {
await config.doInContext(config.appId, async () => {
const tableId = config.table!._id!
const searchParams: SearchParams = {
tableId,
query: {
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
},
}
const result = await search(searchParams)
expect(result.rows).toHaveLength(0)
})
})
it("querying by fields will always return data attribute columns", async () => { it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const tableId = config.table!._id! const tableId = config.table!._id!

View File

@ -1,5 +1,6 @@
import { FieldType, Row, Table, SearchParams } from "@budibase/types" import { FieldType, Row, Table } from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { SearchParams } from "../../search"
import { search } from "../internal" import { search } from "../internal"
import { import {
expectAnyInternalColsAttributes, expectAnyInternalColsAttributes,

View File

@ -1,20 +1,17 @@
import { import { HTTPError, context } from "@budibase/backend-core"
FieldSchema, import { FieldSchema, TableSchema, View, ViewV2 } from "@budibase/types"
RenameColumn,
TableSchema,
View,
ViewV2,
} from "@budibase/types"
import { context, HTTPError } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as utils from "../../../db/utils" import * as utils from "../../../db/utils"
import merge from "lodash/merge"
export async function get(viewId: string): Promise<ViewV2 | undefined> { export async function get(viewId: string): Promise<ViewV2 | undefined> {
const { tableId } = utils.extractViewInfoFromID(viewId) const { tableId } = utils.extractViewInfoFromID(viewId)
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
const views = Object.values(table.views!) const views = Object.values(table.views!)
return views.find(v => isV2(v) && v.id === viewId) as ViewV2 | undefined const view = views.find(v => isV2(v) && v.id === viewId) as ViewV2 | undefined
return view
} }
export async function create( export async function create(
@ -109,37 +106,3 @@ export function enrichSchema(view: View | ViewV2, tableSchema: TableSchema) {
schema: schema, schema: schema,
} }
} }
export function syncSchema(
view: ViewV2,
schema: TableSchema,
renameColumn: RenameColumn | undefined
): ViewV2 {
if (renameColumn) {
if (view.columns) {
view.columns[view.columns.indexOf(renameColumn.old)] =
renameColumn.updated
}
if (view.schemaUI) {
view.schemaUI[renameColumn.updated] = view.schemaUI[renameColumn.old]
delete view.schemaUI[renameColumn.old]
}
}
if (view.schemaUI) {
for (const fieldName of Object.keys(view.schemaUI)) {
if (!schema[fieldName]) {
delete view.schemaUI[fieldName]
}
}
for (const fieldName of Object.keys(schema)) {
if (!view.schemaUI[fieldName]) {
view.schemaUI[fieldName] = { visible: false }
}
}
}
view.columns = view.columns?.filter(x => schema[x])
return view
}

View File

@ -1,54 +1,53 @@
import _ from "lodash" import { FieldType, Table, ViewV2 } from "@budibase/types"
import { FieldType, Table, TableSchema, ViewV2 } from "@budibase/types"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { enrichSchema, syncSchema } from ".." import { enrichSchema } from ".."
describe("table sdk", () => { describe("table sdk", () => {
const basicTable: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
visible: true,
width: 80,
order: 2,
constraints: {
type: "string",
},
},
description: {
type: FieldType.STRING,
name: "description",
visible: true,
width: 200,
constraints: {
type: "string",
},
},
id: {
type: FieldType.NUMBER,
name: "id",
visible: true,
order: 1,
constraints: {
type: "number",
},
},
hiddenField: {
type: FieldType.STRING,
name: "hiddenField",
visible: false,
constraints: {
type: "string",
},
},
},
}
describe("enrichViewSchemas", () => { describe("enrichViewSchemas", () => {
const basicTable: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
schema: {
name: {
type: FieldType.STRING,
name: "name",
visible: true,
width: 80,
order: 2,
constraints: {
type: "string",
},
},
description: {
type: FieldType.STRING,
name: "description",
visible: true,
width: 200,
constraints: {
type: "string",
},
},
id: {
type: FieldType.NUMBER,
name: "id",
visible: true,
order: 1,
constraints: {
type: "number",
},
},
hiddenField: {
type: FieldType.STRING,
name: "hiddenField",
visible: false,
constraints: {
type: "string",
},
},
},
}
it("should fetch the default schema if not overriden", async () => { it("should fetch the default schema if not overriden", async () => {
const tableId = basicTable._id! const tableId = basicTable._id!
const view: ViewV2 = { const view: ViewV2 = {
@ -281,294 +280,4 @@ describe("table sdk", () => {
) )
}) })
}) })
describe("syncSchema", () => {
const basicView: ViewV2 = {
version: 2,
id: generator.guid(),
name: generator.guid(),
tableId: basicTable._id!,
}
describe("view without schema", () => {
it("no table schema changes will not amend the view", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
}
const result = syncSchema(
_.cloneDeep(view),
basicTable.schema,
undefined
)
expect(result).toEqual(view)
})
it("adding new columns will not change the view schema", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
}
const newTableSchema = {
...basicTable.schema,
newField1: {
type: FieldType.STRING,
name: "newField1",
visible: true,
},
newField2: {
type: FieldType.NUMBER,
name: "newField2",
visible: false,
},
}
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
schemaUI: undefined,
})
})
it("deleting columns will not change the view schema", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
}
const { name, description, ...newTableSchema } = basicTable.schema
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
columns: ["id"],
schemaUI: undefined,
})
})
it("renaming mapped columns will update the view column mapping", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
}
const { description, ...newTableSchema } = {
...basicTable.schema,
updatedDescription: {
...basicTable.schema.description,
name: "updatedDescription",
},
} as TableSchema
const result = syncSchema(_.cloneDeep(view), newTableSchema, {
old: "description",
updated: "updatedDescription",
})
expect(result).toEqual({
...view,
columns: ["name", "id", "updatedDescription"],
schemaUI: undefined,
})
})
})
describe("view with schema", () => {
it("no table schema changes will not amend the view", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const result = syncSchema(
_.cloneDeep(view),
basicTable.schema,
undefined
)
expect(result).toEqual(view)
})
it("adding new columns will add them as not visible to the view", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const newTableSchema = {
...basicTable.schema,
newField1: {
type: FieldType.STRING,
name: "newField1",
visible: true,
},
newField2: {
type: FieldType.NUMBER,
name: "newField2",
visible: false,
},
}
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
schemaUI: {
...view.schemaUI,
newField1: { visible: false },
newField2: { visible: false },
},
})
})
it("deleting columns will remove them from the UI", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const { name, description, ...newTableSchema } = basicTable.schema
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
columns: ["id"],
schemaUI: {
...view.schemaUI,
name: undefined,
description: undefined,
},
})
})
it("can handle additions and deletions at the same them UI", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const { name, description, ...newTableSchema } = {
...basicTable.schema,
newField1: {
type: FieldType.STRING,
name: "newField1",
visible: true,
},
} as TableSchema
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
columns: ["id"],
schemaUI: {
...view.schemaUI,
name: undefined,
description: undefined,
newField1: { visible: false },
},
})
})
it("renaming mapped columns will update the view column mapping and it's schema", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true },
id: { visible: true },
description: { visible: true, width: 150, icon: "ic-any" },
hiddenField: { visible: false },
},
}
const { description, ...newTableSchema } = {
...basicTable.schema,
updatedDescription: {
...basicTable.schema.description,
name: "updatedDescription",
},
} as TableSchema
const result = syncSchema(_.cloneDeep(view), newTableSchema, {
old: "description",
updated: "updatedDescription",
})
expect(result).toEqual({
...view,
columns: ["name", "id", "updatedDescription"],
schemaUI: {
...view.schemaUI,
description: undefined,
updatedDescription: { visible: true, width: 150, icon: "ic-any" },
},
})
})
it("changing no UI schema will not affect the view", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const result = syncSchema(
_.cloneDeep(view),
{
...basicTable.schema,
id: {
...basicTable.schema.id,
type: FieldType.NUMBER,
},
},
undefined
)
expect(result).toEqual(view)
})
it("changing table column UI fields will not affect the view schema", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const result = syncSchema(
_.cloneDeep(view),
{
...basicTable.schema,
id: {
...basicTable.schema.id,
visible: !basicTable.schema.id.visible,
},
},
undefined
)
expect(result).toEqual(view)
})
})
})
}) })

View File

@ -1,4 +1,4 @@
import { PatchRowRequest, SaveRowRequest, Row } from "@budibase/types" import { PatchRowRequest } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base" import { TestAPI } from "./base"
@ -8,12 +8,12 @@ export class RowAPI extends TestAPI {
} }
get = async ( get = async (
sourceId: string, tableId: string,
rowId: string, rowId: string,
{ expectStatus } = { expectStatus: 200 } { expectStatus } = { expectStatus: 200 }
) => { ) => {
const request = this.request const request = this.request
.get(`/api/${sourceId}/rows/${rowId}`) .get(`/api/${tableId}/rows/${rowId}`)
.set(this.config.defaultHeaders()) .set(this.config.defaultHeaders())
.expect(expectStatus) .expect(expectStatus)
if (expectStatus !== 404) { if (expectStatus !== 404) {
@ -22,43 +22,16 @@ export class RowAPI extends TestAPI {
return request return request
} }
save = async (
sourceId: string,
row: SaveRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<Row> => {
const resp = await this.request
.post(`/api/${sourceId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return resp.body as Row
}
patch = async ( patch = async (
sourceId: string, tableId: string,
row: PatchRowRequest, row: PatchRowRequest,
{ expectStatus } = { expectStatus: 200 } { expectStatus } = { expectStatus: 200 }
) => { ) => {
return this.request return this.request
.patch(`/api/${sourceId}/rows`) .patch(`/api/${tableId}/rows`)
.send(row) .send(row)
.set(this.config.defaultHeaders()) .set(this.config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(expectStatus) .expect(expectStatus)
} }
delete = async (
sourceId: string,
rows: Row[],
{ expectStatus } = { expectStatus: 200 }
) => {
return this.request
.delete(`/api/${sourceId}/rows`)
.send({ rows })
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
}
} }

View File

@ -1,8 +1,13 @@
import { import {
CreateViewRequest, CreateViewRequest,
SortOrder,
SortType,
UpdateViewRequest, UpdateViewRequest,
DeleteRowRequest,
PatchRowRequest,
PatchRowResponse,
Row,
ViewV2, ViewV2,
SearchViewRowRequest,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../TestConfiguration" import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base" import { TestAPI } from "./base"
@ -76,14 +81,75 @@ export class ViewV2API extends TestAPI {
search = async ( search = async (
viewId: string, viewId: string,
params?: SearchViewRowRequest, options?: {
sort: {
column: string
order?: SortOrder
type?: SortType
}
},
{ expectStatus } = { expectStatus: 200 } { expectStatus } = { expectStatus: 200 }
) => { ) => {
const qs: [string, any][] = []
if (options?.sort.column) {
qs.push(["sort_column", options.sort.column])
}
if (options?.sort.order) {
qs.push(["sort_order", options.sort.order])
}
if (options?.sort.type) {
qs.push(["sort_type", options.sort.type])
}
let url = `/api/v2/views/${viewId}/search`
if (qs.length) {
url += "?" + qs.map(q => q.join("=")).join("&")
}
return this.request return this.request
.post(`/api/v2/views/${viewId}/search`) .get(url)
.send(params)
.set(this.config.defaultHeaders()) .set(this.config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(expectStatus) .expect(expectStatus)
} }
row = {
create: async (
viewId: string,
row: Row,
{ expectStatus } = { expectStatus: 200 }
): Promise<Row> => {
const result = await this.request
.post(`/api/v2/views/${viewId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return result.body as Row
},
update: async (
viewId: string,
rowId: string,
row: PatchRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<PatchRowResponse> => {
const result = await this.request
.patch(`/api/v2/views/${viewId}/rows/${rowId}`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return result.body as PatchRowResponse
},
delete: async (
viewId: string,
body: DeleteRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<any> => {
const result = await this.request
.delete(`/api/v2/views/${viewId}/rows`)
.send(body)
.set(this.config.defaultHeaders())
.expect(expectStatus)
return result.body
},
}
} }

View File

@ -11,12 +11,6 @@ export interface QueryEvent {
queryId: string queryId: string
environmentVariables?: Record<string, string> environmentVariables?: Record<string, string>
ctx?: any ctx?: any
schema?: {
[key: string]: {
name: string
type: string
}
}
} }
export interface QueryVariable { export interface QueryVariable {

View File

@ -8,7 +8,6 @@ import { context, cache, auth } from "@budibase/backend-core"
import { getGlobalIDFromUserMetadataID } from "../db/utils" import { getGlobalIDFromUserMetadataID } from "../db/utils"
import sdk from "../sdk" import sdk from "../sdk"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { SourceName } from "@budibase/types"
import { isSQL } from "../integrations/utils" import { isSQL } from "../integrations/utils"
import { interpolateSQL } from "../integrations/queries/sql" import { interpolateSQL } from "../integrations/queries/sql"
@ -29,7 +28,6 @@ class QueryRunner {
hasRerun: boolean hasRerun: boolean
hasRefreshedOAuth: boolean hasRefreshedOAuth: boolean
hasDynamicVariables: boolean hasDynamicVariables: boolean
schema: any
constructor(input: QueryEvent, flags = { noRecursiveQuery: false }) { constructor(input: QueryEvent, flags = { noRecursiveQuery: false }) {
this.datasource = input.datasource this.datasource = input.datasource
@ -39,7 +37,6 @@ class QueryRunner {
this.pagination = input.pagination this.pagination = input.pagination
this.transformer = input.transformer this.transformer = input.transformer
this.queryId = input.queryId this.queryId = input.queryId
this.schema = input.schema
this.noRecursiveQuery = flags.noRecursiveQuery this.noRecursiveQuery = flags.noRecursiveQuery
this.cachedVariables = [] this.cachedVariables = []
// Additional context items for enrichment // Additional context items for enrichment
@ -54,7 +51,7 @@ class QueryRunner {
} }
async execute(): Promise<any> { async execute(): Promise<any> {
let { datasource, fields, queryVerb, transformer, schema } = this let { datasource, fields, queryVerb, transformer } = this
let datasourceClone = cloneDeep(datasource) let datasourceClone = cloneDeep(datasource)
let fieldsClone = cloneDeep(fields) let fieldsClone = cloneDeep(fields)
@ -73,9 +70,6 @@ class QueryRunner {
const integration = new Integration(datasourceClone.config) const integration = new Integration(datasourceClone.config)
// define the type casting from the schema
integration.defineTypeCastingFromSchema?.(schema)
// pre-query, make sure datasource variables are added to parameters // pre-query, make sure datasource variables are added to parameters
const parameters = await this.addDatasourceVariables() const parameters = await this.addDatasourceVariables()

View File

@ -1,5 +1,5 @@
import { permissions, roles } from "@budibase/backend-core" import { permissions, roles } from "@budibase/backend-core"
import { DocumentType, VirtualDocumentType } from "../db/utils" import { DocumentType } from "../db/utils"
export const CURRENTLY_SUPPORTED_LEVELS: string[] = [ export const CURRENTLY_SUPPORTED_LEVELS: string[] = [
permissions.PermissionLevel.WRITE, permissions.PermissionLevel.WRITE,
@ -11,10 +11,9 @@ export function getPermissionType(resourceId: string) {
const docType = Object.values(DocumentType).filter(docType => const docType = Object.values(DocumentType).filter(docType =>
resourceId.startsWith(docType) resourceId.startsWith(docType)
)[0] )[0]
switch (docType as DocumentType | VirtualDocumentType) { switch (docType) {
case DocumentType.TABLE: case DocumentType.TABLE:
case DocumentType.ROW: case DocumentType.ROW:
case VirtualDocumentType.VIEW:
return permissions.PermissionType.TABLE return permissions.PermissionType.TABLE
case DocumentType.AUTOMATION: case DocumentType.AUTOMATION:
return permissions.PermissionType.AUTOMATION return permissions.PermissionType.AUTOMATION
@ -23,6 +22,9 @@ export function getPermissionType(resourceId: string) {
case DocumentType.QUERY: case DocumentType.QUERY:
case DocumentType.DATASOURCE: case DocumentType.DATASOURCE:
return permissions.PermissionType.QUERY return permissions.PermissionType.QUERY
default:
// views don't have an ID, will end up here
return permissions.PermissionType.VIEW
} }
} }

View File

@ -32,18 +32,7 @@
"target": "build" "target": "build"
} }
] ]
},
"dev:builder": {
"dependsOn": [
{
"projects": [
"@budibase/types"
],
"target": "build"
}
]
} }
} }
} }
} }

View File

@ -1,11 +1,11 @@
import { import {
Datasource, Datasource,
FieldType, FieldType,
SortDirection,
SortType,
SearchFilter, SearchFilter,
SearchQuery, SearchQuery,
SearchQueryFields, SearchQueryFields,
SortDirection,
SortType,
} from "@budibase/types" } from "@budibase/types"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
import { deepGet } from "./helpers" import { deepGet } from "./helpers"
@ -138,7 +138,8 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
} }
if (Array.isArray(filter)) { if (Array.isArray(filter)) {
filter.forEach(expression => { filter.forEach(expression => {
let { operator, field, type, value, externalType } = expression let { operator, field, type, value, externalType, onEmptyFilter } =
expression
const isHbs = const isHbs =
typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0 typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0
// Parse all values into correct types // Parse all values into correct types
@ -146,6 +147,10 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
query.allOr = true query.allOr = true
return return
} }
if (onEmptyFilter) {
query.onEmptyFilter = onEmptyFilter
return
}
if ( if (
type === "datetime" && type === "datetime" &&
!isHbs && !isHbs &&
@ -203,7 +208,7 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
) { ) {
query.range[field].high = value query.range[field].high = value
} }
} else if (query[operator]) { } else if (query[operator] && operator !== "onEmptyFilter") {
if (type === "boolean") { if (type === "boolean") {
// Transform boolean filters to cope with null. // Transform boolean filters to cope with null.
// "equals false" needs to be "not equals true" // "equals false" needs to be "not equals true"
@ -418,7 +423,7 @@ export const hasFilters = (query?: SearchQuery) => {
if (!query) { if (!query) {
return false return false
} }
const skipped = ["allOr"] const skipped = ["allOr", "onEmptyFilter"]
for (let [key, value] of Object.entries(query)) { for (let [key, value] of Object.entries(query)) {
if (skipped.includes(key) || typeof value !== "object") { if (skipped.includes(key) || typeof value !== "object") {
continue continue

View File

@ -1,7 +1,7 @@
{ {
"compilerOptions": { "compilerOptions": {
"target": "es6", "target": "es6",
"module": "commonjs", "moduleResolution": "node",
"lib": ["es2020"], "lib": ["es2020"],
"strict": true, "strict": true,
"noImplicitAny": true, "noImplicitAny": true,

View File

@ -1,8 +1,5 @@
import { SearchParams } from "../../../sdk"
import { Row } from "../../../documents" import { Row } from "../../../documents"
export interface SaveRowRequest extends Row {}
export interface PatchRowRequest extends Row { export interface PatchRowRequest extends Row {
_id: string _id: string
_rev: string _rev: string
@ -11,14 +8,6 @@ export interface PatchRowRequest extends Row {
export interface PatchRowResponse extends Row {} export interface PatchRowResponse extends Row {}
export interface SearchRowRequest extends Omit<SearchParams, "tableId"> {} export interface SearchResponse {
export interface SearchViewRowRequest
extends Pick<
SearchRowRequest,
"sort" | "sortOrder" | "sortType" | "limit" | "bookmark" | "paginate"
> {}
export interface SearchRowResponse {
rows: any[] rows: any[]
} }

View File

@ -1,10 +1,4 @@
import { import { Table, TableSchema, View, ViewV2 } from "../../../documents"
Table,
TableRequest,
TableSchema,
View,
ViewV2,
} from "../../../documents"
interface ViewV2Response extends ViewV2 { interface ViewV2Response extends ViewV2 {
schema: TableSchema schema: TableSchema
@ -17,7 +11,3 @@ export interface TableResponse extends Table {
} }
export type FetchTablesResponse = TableResponse[] export type FetchTablesResponse = TableResponse[]
export interface SaveTableRequest extends TableRequest {}
export type SaveTableResponse = Table

View File

@ -1,7 +1,9 @@
import { FieldType } from "../../documents" import { FieldType } from "../../documents"
import { EmptyFilterOption } from "../../sdk"
export type SearchFilter = { export type SearchFilter = {
operator: keyof SearchQuery operator: keyof SearchQuery
onEmptyFilter?: EmptyFilterOption
field: string field: string
type?: FieldType type?: FieldType
value: any value: any
@ -10,6 +12,7 @@ export type SearchFilter = {
export type SearchQuery = { export type SearchQuery = {
allOr?: boolean allOr?: boolean
onEmptyFilter?: EmptyFilterOption
string?: { string?: {
[key: string]: string [key: string]: string
} }
@ -48,4 +51,4 @@ export type SearchQuery = {
} }
} }
export type SearchQueryFields = Omit<SearchQuery, "allOr"> export type SearchQueryFields = Omit<SearchQuery, "allOr" | "onEmptyFilter">

View File

@ -1,5 +1,6 @@
import { SearchFilter, SortOrder, SortType } from "../../api" import { SortOrder, SortType } from "../../api"
import { UIFieldMetadata } from "./table" import { SearchFilters } from "../../sdk"
import { TableSchema, UIFieldMetadata } from "./table"
export interface View { export interface View {
name: string name: string
@ -19,7 +20,7 @@ export interface ViewV2 {
name: string name: string
primaryDisplay?: string primaryDisplay?: string
tableId: string tableId: string
query?: SearchFilter[] query?: SearchFilters
sort?: { sort?: {
field: string field: string
order?: SortOrder order?: SortOrder

View File

@ -39,12 +39,6 @@ export enum DocumentType {
AUDIT_LOG = "al", AUDIT_LOG = "al",
} }
// these documents don't really exist, they are part of other
// documents or enriched into existence as part of get requests
export enum VirtualDocumentType {
VIEW = "view",
}
export interface Document { export interface Document {
_id?: string _id?: string
_rev?: string _rev?: string

View File

@ -166,12 +166,6 @@ export interface IntegrationBase {
delete?(query: any): Promise<any[] | any> delete?(query: any): Promise<any[] | any>
testConnection?(): Promise<ConnectionInfo> testConnection?(): Promise<ConnectionInfo>
getExternalSchema?(): Promise<string> getExternalSchema?(): Promise<string>
defineTypeCastingFromSchema?(schema: {
[key: string]: {
name: string
type: string
}
}): void
} }
export interface DatasourcePlus extends IntegrationBase { export interface DatasourcePlus extends IntegrationBase {

View File

@ -19,4 +19,3 @@ export * from "./user"
export * from "./cli" export * from "./cli"
export * from "./websocket" export * from "./websocket"
export * from "./permissions" export * from "./permissions"
export * from "./row"

View File

@ -14,5 +14,6 @@ export enum PermissionType {
WEBHOOK = "webhook", WEBHOOK = "webhook",
BUILDER = "builder", BUILDER = "builder",
GLOBAL_BUILDER = "globalBuilder", GLOBAL_BUILDER = "globalBuilder",
VIEW = "view",
QUERY = "query", QUERY = "query",
} }

View File

@ -1,16 +0,0 @@
import { SortOrder, SortType } from "../api"
import { SearchFilters } from "./search"
export interface SearchParams {
tableId: string
paginate?: boolean
query: SearchFilters
bookmark?: string
limit?: number
sort?: string
sortOrder?: SortOrder
sortType?: SortType
version?: string
disableEscaping?: boolean
fields?: string[]
}

View File

@ -4,6 +4,7 @@ import { SortType } from "../api"
export interface SearchFilters { export interface SearchFilters {
allOr?: boolean allOr?: boolean
onEmptyFilter?: EmptyFilterOption
string?: { string?: {
[key: string]: string [key: string]: string
} }
@ -99,3 +100,8 @@ export interface SqlQuery {
sql: string sql: string
bindings?: string[] bindings?: string[]
} }
export enum EmptyFilterOption {
RETURN_ALL = "all",
RETURN_NONE = "none",
}

View File

@ -1,4 +1,4 @@
FROM node:18-alpine FROM node:14-alpine
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh" LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh" LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"

View File

@ -28,8 +28,8 @@ describe("datasource validators", () => {
8000 8000
)}`, )}`,
} }
env._set("AWS_ACCESS_KEY_ID", "mockedkey") env._set("AWS_ACCESS_KEY_ID", "mocked_key")
env._set("AWS_SECRET_ACCESS_KEY", "mockedsecret") env._set("AWS_SECRET_ACCESS_KEY", "mocked_secret")
}) })
it("test valid connection string", async () => { it("test valid connection string", async () => {

View File

@ -43,6 +43,7 @@ function runBuild(entry, outfile) {
TsconfigPathsPlugin({ tsconfig: tsconfigPathPluginContent }), TsconfigPathsPlugin({ tsconfig: tsconfigPathPluginContent }),
nodeExternalsPlugin(), nodeExternalsPlugin(),
], ],
target: "node14",
preserveSymlinks: true, preserveSymlinks: true,
loader: { loader: {
".svelte": "copy", ".svelte": "copy",

View File

@ -10283,7 +10283,7 @@ denque@^1.1.0:
resolved "https://registry.yarnpkg.com/denque/-/denque-1.5.1.tgz#07f670e29c9a78f8faecb2566a1e2c11929c5cbf" resolved "https://registry.yarnpkg.com/denque/-/denque-1.5.1.tgz#07f670e29c9a78f8faecb2566a1e2c11929c5cbf"
integrity sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw== integrity sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==
denque@^2.1.0: denque@^2.0.1, denque@^2.1.0:
version "2.1.0" version "2.1.0"
resolved "https://registry.yarnpkg.com/denque/-/denque-2.1.0.tgz#e93e1a6569fb5e66f16a3c2a2964617d349d6ab1" resolved "https://registry.yarnpkg.com/denque/-/denque-2.1.0.tgz#e93e1a6569fb5e66f16a3c2a2964617d349d6ab1"
integrity sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw== integrity sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==
@ -16986,11 +16986,6 @@ long@^5.0.0:
resolved "https://registry.yarnpkg.com/long/-/long-5.2.1.tgz#e27595d0083d103d2fa2c20c7699f8e0c92b897f" resolved "https://registry.yarnpkg.com/long/-/long-5.2.1.tgz#e27595d0083d103d2fa2c20c7699f8e0c92b897f"
integrity sha512-GKSNGeNAtw8IryjjkhZxuKB3JzlcLTwjtiQCHKvqQet81I93kXslhDQruGI/QsddO83mcDToBVy7GqGS/zYf/A== integrity sha512-GKSNGeNAtw8IryjjkhZxuKB3JzlcLTwjtiQCHKvqQet81I93kXslhDQruGI/QsddO83mcDToBVy7GqGS/zYf/A==
long@^5.2.1:
version "5.2.3"
resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1"
integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==
lookpath@1.1.0: lookpath@1.1.0:
version "1.1.0" version "1.1.0"
resolved "https://registry.yarnpkg.com/lookpath/-/lookpath-1.1.0.tgz#932d68371a2f0b4a5644f03d6a2b4728edba96d2" resolved "https://registry.yarnpkg.com/lookpath/-/lookpath-1.1.0.tgz#932d68371a2f0b4a5644f03d6a2b4728edba96d2"
@ -17057,11 +17052,6 @@ lru-cache@^7.4.4, lru-cache@^7.5.1, lru-cache@^7.7.1:
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89"
integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA== integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==
lru-cache@^8.0.0:
version "8.0.5"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-8.0.5.tgz#983fe337f3e176667f8e567cfcce7cb064ea214e"
integrity sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA==
lru-cache@^9.0.0: lru-cache@^9.0.0:
version "9.0.1" version "9.0.1"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-9.0.1.tgz#ac061ed291f8b9adaca2b085534bb1d3b61bef83" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-9.0.1.tgz#ac061ed291f8b9adaca2b085534bb1d3b61bef83"
@ -17962,17 +17952,17 @@ mute-stream@~1.0.0:
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e"
integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA== integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==
mysql2@3.5.2: mysql2@2.3.3:
version "3.5.2" version "2.3.3"
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.5.2.tgz#a06050e1514e9ac15711a8b883ffd51cb44b2dc8" resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-2.3.3.tgz#944f3deca4b16629052ff8614fbf89d5552545a0"
integrity sha512-cptobmhYkYeTBIFp2c0piw2+gElpioga1rUw5UidHvo8yaHijMZoo8A3zyBVoo/K71f7ZFvrShA9iMIy9dCzCA== integrity sha512-wxJUev6LgMSgACDkb/InIFxDprRa6T95+VEoR+xPvtngtccNH2dGjEB/fVZ8yg1gWv1510c9CvXuJHi5zUm0ZA==
dependencies: dependencies:
denque "^2.1.0" denque "^2.0.1"
generate-function "^2.3.1" generate-function "^2.3.1"
iconv-lite "^0.6.3" iconv-lite "^0.6.3"
long "^5.2.1" long "^4.0.0"
lru-cache "^8.0.0" lru-cache "^6.0.0"
named-placeholders "^1.1.3" named-placeholders "^1.1.2"
seq-queue "^0.0.5" seq-queue "^0.0.5"
sqlstring "^2.3.2" sqlstring "^2.3.2"
@ -17985,7 +17975,7 @@ mz@^2.4.0, mz@^2.7.0:
object-assign "^4.0.1" object-assign "^4.0.1"
thenify-all "^1.0.0" thenify-all "^1.0.0"
named-placeholders@^1.1.3: named-placeholders@^1.1.2:
version "1.1.3" version "1.1.3"
resolved "https://registry.yarnpkg.com/named-placeholders/-/named-placeholders-1.1.3.tgz#df595799a36654da55dda6152ba7a137ad1d9351" resolved "https://registry.yarnpkg.com/named-placeholders/-/named-placeholders-1.1.3.tgz#df595799a36654da55dda6152ba7a137ad1d9351"
integrity sha512-eLoBxg6wE/rZkJPhU/xRX1WTpkFEwDJEN96oxFrTsqBdbT5ec295Q+CoHrL9IT0DipqKhmGcaZmwOt8OON5x1w== integrity sha512-eLoBxg6wE/rZkJPhU/xRX1WTpkFEwDJEN96oxFrTsqBdbT5ec295Q+CoHrL9IT0DipqKhmGcaZmwOt8OON5x1w==