Merge branch 'develop' of github.com:Budibase/budibase into fix/test-flakiness

This commit is contained in:
mike12345567 2023-08-18 14:35:47 +01:00
commit f154e14414
68 changed files with 1301 additions and 941 deletions

View File

@ -18,6 +18,8 @@ env:
BRANCH: ${{ github.event.pull_request.head.ref }}
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
jobs:
lint:
@ -25,20 +27,20 @@ jobs:
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v3
if: github.repository == 'Budibase/budibase'
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only
uses: actions/checkout@v3
if: github.repository != 'Budibase/budibase'
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 14.x
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 14.x
node-version: 18.x
cache: "yarn"
- run: yarn
- run: yarn --frozen-lockfile
- run: yarn lint
build:
@ -46,45 +48,66 @@ jobs:
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v3
if: github.repository == 'Budibase/budibase'
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.repository != 'Budibase/budibase'
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 14.x
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 14.x
node-version: 18.x
cache: "yarn"
- run: yarn
- run: yarn --frozen-lockfile
# Run build all the projects
- run: yarn build
- name: Build
run: |
yarn build
# Check the types of the projects built via esbuild
- run: yarn check:types
- name: Check types
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn check:types --since=${{ env.NX_BASE_BRANCH }}
else
yarn check:types
fi
test-libraries:
runs-on: ubuntu-latest
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v3
if: github.repository == 'Budibase/budibase'
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.repository != 'Budibase/budibase'
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 14.x
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 14.x
node-version: 18.x
cache: "yarn"
- run: yarn
- run: yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
- run: yarn --frozen-lockfile
- name: Test
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
fi
- uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
@ -96,21 +119,31 @@ jobs:
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v3
if: github.repository == 'Budibase/budibase'
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.repository != 'Budibase/budibase'
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 14.x
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 14.x
node-version: 18.x
cache: "yarn"
- run: yarn
- run: yarn test --scope=@budibase/worker --scope=@budibase/server
- run: yarn --frozen-lockfile
- name: Test worker and server
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --scope=@budibase/worker --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/worker --scope=@budibase/server
fi
- uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos
@ -119,42 +152,49 @@ jobs:
test-pro:
runs-on: ubuntu-latest
if: github.repository == 'Budibase/budibase'
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v3
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Use Node.js 14.x
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 14.x
node-version: 18.x
cache: "yarn"
- run: yarn
- run: yarn test --scope=@budibase/pro
- run: yarn --frozen-lockfile
- name: Test
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --scope=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/pro
fi
integration-test:
runs-on: ubuntu-latest
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v3
if: github.repository == 'Budibase/budibase'
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only
uses: actions/checkout@v3
if: github.repository != 'Budibase/budibase'
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 14.x
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 14.x
node-version: 18.x
cache: "yarn"
- run: yarn
- run: yarn build --projects=@budibase/server,@budibase/worker,@budibase/client
- run: yarn --frozen-lockfile
- run: yarn build --scope @budibase/server --scope @budibase/worker --scope @budibase/client
- name: Run tests
run: |
cd qa-core
@ -166,13 +206,12 @@ jobs:
check-pro-submodule:
runs-on: ubuntu-latest
if: github.repository == 'Budibase/budibase'
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v3
with:
submodules: true
fetch-depth: 0
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Check pro commit

View File

@ -0,0 +1,29 @@
name: check_unreleased_changes
on:
pull_request:
branches:
- master
jobs:
check_unreleased:
runs-on: ubuntu-latest
steps:
- name: Check for unreleased changes
env:
REPO: "Budibase/budibase"
TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/releases/latest" | \
jq -r .published_at)
COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/commits/master" | \
jq -r .commit.committer.date)
RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s")
COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s")
if (( COMMIT_SECONDS > RELEASE_SECONDS )); then
echo "There are unreleased changes. Please release these changes before merging."
exit 1
fi
echo "No unreleased changes detected."

View File

@ -44,7 +44,7 @@ jobs:
- uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: 18.x
- run: yarn install --frozen-lockfile
- name: Update versions

2
.nvmrc
View File

@ -1 +1 @@
v14.20.1
v18.17.0

View File

@ -1,3 +1,3 @@
nodejs 14.21.3
nodejs 18.17.0
python 3.10.0
yarn 1.22.19

1
.vscode/launch.json vendored
View File

@ -1,3 +1,4 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.

View File

@ -90,7 +90,7 @@ Component libraries are collections of components as well as the definition of t
#### 1. Prerequisites
- NodeJS version `14.x.x`
- NodeJS version `18.x.x`
- Python version `3.x`
### Using asdf (recommended)

View File

@ -1,7 +1,7 @@
FROM node:14-slim as build
FROM node:18-slim as build
# install node-gyp dependencies
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python3
# add pin script
WORKDIR /

View File

@ -1,5 +1,5 @@
{
"version": "2.9.30-alpha.1",
"version": "2.9.30-alpha.2",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -34,7 +34,7 @@
"preinstall": "node scripts/syncProPackage.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"bootstrap": "./scripts/link-dependencies.sh && echo '***BOOTSTRAP ONLY REQUIRED FOR USE WITH ACCOUNT PORTAL***'",
"build": "yarn nx run-many -t=build",
"build": "lerna run build --stream",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run check:types",
"backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap",
@ -109,7 +109,7 @@
"@budibase/types": "0.0.0"
},
"engines": {
"node": ">=14.0.0 <15.0.0"
"node": ">=18.0.0 <19.0.0"
},
"dependencies": {}
}

View File

@ -78,7 +78,6 @@ export const BUILTIN_PERMISSIONS = {
permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.READ),
new Permission(PermissionType.TABLE, PermissionLevel.READ),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
],
},
WRITE: {
@ -87,7 +86,6 @@ export const BUILTIN_PERMISSIONS = {
permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.WRITE),
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
],
},
@ -98,7 +96,6 @@ export const BUILTIN_PERMISSIONS = {
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.USER, PermissionLevel.READ),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
],
},
@ -109,7 +106,6 @@ export const BUILTIN_PERMISSIONS = {
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),
new Permission(PermissionType.USER, PermissionLevel.ADMIN),
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
new Permission(PermissionType.VIEW, PermissionLevel.ADMIN),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),
],

View File

@ -1,10 +1,12 @@
<script>
import { Select, Label, Stepper } from "@budibase/bbui"
import { Select, Label } from "@budibase/bbui"
import { currentAsset, store } from "builderStore"
import { getActionProviderComponents } from "builderStore/dataBinding"
import { onMount } from "svelte"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
export let parameters
export let bindings = []
$: actionProviders = getActionProviderComponents(
$currentAsset,
@ -51,7 +53,11 @@
<Select bind:value={parameters.type} options={typeOptions} />
{#if parameters.type === "specific"}
<Label small>Number</Label>
<Stepper bind:value={parameters.number} />
<DrawerBindableInput
{bindings}
value={parameters.number}
on:change={e => (parameters.number = e.detail)}
/>
{/if}
</div>

View File

@ -17,7 +17,7 @@
import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { getFields } from "helpers/searchFields"
import { createEventDispatcher } from "svelte"
import { createEventDispatcher, onMount } from "svelte"
export let schemaFields
export let filters = []
@ -70,6 +70,15 @@
})
}
onMount(() => {
parseFilters(filters)
rawFilters.forEach(filter => {
filter.type =
schemaFields.find(field => field.name === filter.field)?.type ||
filter.type
})
})
// Add field key prefixes and a special metadata filter object to indicate
// how to handle filter behaviour
const enrichFilters = (rawFilters, matchAny, onEmptyFilter) => {

View File

@ -2408,6 +2408,13 @@
"label": "Disabled",
"key": "disabled",
"defaultValue": false
},
{
"type": "text",
"label": "Initial form step",
"key": "initialFormStep",
"defaultValue": 1
}
],
"context": [
@ -2445,6 +2452,7 @@
"name": "Form Step",
"icon": "AssetsAdded",
"hasChildren": true,
"requiredAncestors": ["form"],
"illegalChildren": ["section", "form", "formstep", "formblock"],
"styles": ["size"],
"size": {
@ -2464,6 +2472,7 @@
"fieldgroup": {
"name": "Field Group",
"icon": "Group",
"requiredAncestors": ["form"],
"illegalChildren": ["section"],
"styles": ["size"],
"hasChildren": true,

View File

@ -9,6 +9,7 @@
export let size
export let disabled = false
export let actionType = "Create"
export let initialFormStep = 1
// Not exposed as a builder setting. Used internally to disable validation
// for fields rendered in things like search blocks.
@ -21,10 +22,18 @@
const context = getContext("context")
const { API, fetchDatasourceSchema } = getContext("sdk")
const getInitialFormStep = () => {
const parsedFormStep = parseInt(initialFormStep)
if (isNaN(parsedFormStep)) {
return 1
}
return parsedFormStep
}
let loaded = false
let schema
let table
let currentStep = writable(1)
let currentStep = writable(getInitialFormStep())
$: fetchSchema(dataSource)
$: schemaKey = generateSchemaKey(schema)

View File

@ -250,7 +250,7 @@
} else if (type === "first") {
currentStep.set(1)
} else if (type === "specific" && number && !isNaN(number)) {
currentStep.set(number)
currentStep.set(parseInt(number))
}
},
setStep: step => {

View File

@ -1,4 +1,4 @@
FROM node:14-slim
FROM node:18-slim
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"
@ -18,7 +18,7 @@ ENV TOP_LEVEL_PATH=/
# handle node-gyp
RUN apt-get update \
&& apt-get install -y --no-install-recommends g++ make python
&& apt-get install -y --no-install-recommends g++ make python3
RUN yarn global add pm2
# Install client for oracle datasource

View File

@ -100,7 +100,7 @@
"memorystream": "0.3.1",
"mongodb": "5.7",
"mssql": "9.1.1",
"mysql2": "2.3.3",
"mysql2": "3.5.2",
"node-fetch": "2.6.7",
"object-sizeof": "2.6.1",
"open": "8.4.0",

View File

@ -5,8 +5,9 @@ if [[ -n $CI ]]
then
# --runInBand performs better in ci where resources are limited
export NODE_OPTIONS="--max-old-space-size=4096"
echo "jest --coverage --runInBand --forceExit --bail"
jest --coverage --runInBand --forceExit --bail
node ../../node_modules/jest/bin/jest.js --version
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail"
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail
else
# --maxWorkers performs better in development
echo "jest --coverage --maxWorkers=2 --forceExit"

View File

@ -127,7 +127,7 @@ export async function preview(ctx: any) {
const query = ctx.request.body
// preview may not have a queryId as it hasn't been saved, but if it does
// this stops dynamic variables from calling the same query
const { fields, parameters, queryVerb, transformer, queryId } = query
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
const authConfigCtx: any = getAuthConfig(ctx)
@ -140,6 +140,7 @@ export async function preview(ctx: any) {
parameters,
transformer,
queryId,
schema,
// have to pass down to the thread runner - can't put into context now
environmentVariables: envVars,
ctx: {
@ -235,6 +236,7 @@ async function execute(
user: ctx.user,
auth: { ...authConfigCtx },
},
schema: query.schema,
}
const runFn = () => Runner.run(inputs)

View File

@ -16,7 +16,7 @@ import {
EmptyFilterOption,
} from "@budibase/types"
import sdk from "../../../sdk"
import { hasFilters } from "@budibase/shared-core/src/filters"
import { dataFilters } from "@budibase/shared-core"
export async function handleRequest(
operation: Operation,
@ -40,7 +40,7 @@ export async function handleRequest(
}
if (
!hasFilters(opts?.filters) &&
!dataFilters.hasFilters(opts?.filters) &&
opts?.filters?.onEmptyFilter === EmptyFilterOption.RETURN_NONE
) {
return []

View File

@ -11,6 +11,9 @@ import {
Row,
PatchRowRequest,
PatchRowResponse,
SearchRowResponse,
SearchRowRequest,
SearchParams,
} from "@budibase/types"
import * as utils from "./utils"
import { gridSocket } from "../../../websockets"
@ -197,10 +200,10 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
ctx.body = response
}
export async function search(ctx: any) {
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const tableId = utils.getTableId(ctx)
const searchParams = {
const searchParams: SearchParams = {
...ctx.request.body,
tableId,
}

View File

@ -13,7 +13,7 @@ import {
import { FieldTypes } from "../../../constants"
import * as utils from "./utils"
import { cloneDeep } from "lodash/fp"
import { context, db as dbCore } from "@budibase/backend-core"
import { context } from "@budibase/backend-core"
import { finaliseRow, updateRelatedFormula } from "./staticFormula"
import {
UserCtx,
@ -26,8 +26,8 @@ import {
import sdk from "../../../sdk"
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableId = utils.getTableId(ctx)
const inputs = ctx.request.body
const tableId = inputs.tableId
const isUserTable = tableId === InternalTables.USER_METADATA
let oldRow
const dbTable = await sdk.tables.getTable(tableId)
@ -94,7 +94,8 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
export async function save(ctx: UserCtx) {
let inputs = ctx.request.body
inputs.tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
inputs.tableId = tableId
if (!inputs._rev && !inputs._id) {
inputs._id = generateRowID(inputs.tableId)
@ -132,20 +133,22 @@ export async function save(ctx: UserCtx) {
}
export async function find(ctx: UserCtx) {
const db = dbCore.getDB(ctx.appId)
const table = await sdk.tables.getTable(ctx.params.tableId)
let row = await utils.findRow(ctx, ctx.params.tableId, ctx.params.rowId)
const tableId = utils.getTableId(ctx),
rowId = ctx.params.rowId
const table = await sdk.tables.getTable(tableId)
let row = await utils.findRow(ctx, tableId, rowId)
row = await outputProcessing(table, row)
return row
}
export async function destroy(ctx: UserCtx) {
const db = context.getAppDB()
const tableId = utils.getTableId(ctx)
const { _id } = ctx.request.body
let row = await db.get<Row>(_id)
let _rev = ctx.request.body._rev || row._rev
if (row.tableId !== ctx.params.tableId) {
if (row.tableId !== tableId) {
throw "Supplied tableId doesn't match the row's tableId"
}
const table = await sdk.tables.getTable(row.tableId)
@ -163,7 +166,7 @@ export async function destroy(ctx: UserCtx) {
await updateRelatedFormula(table, row)
let response
if (ctx.params.tableId === InternalTables.USER_METADATA) {
if (tableId === InternalTables.USER_METADATA) {
ctx.params = {
id: _id,
}
@ -176,7 +179,7 @@ export async function destroy(ctx: UserCtx) {
}
export async function bulkDestroy(ctx: UserCtx) {
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
const table = await sdk.tables.getTable(tableId)
let { rows } = ctx.request.body
@ -216,7 +219,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) {
const db = context.getAppDB()
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
const rowId = ctx.params.rowId
// need table to work out where links go in row
let [table, row] = await Promise.all([

View File

@ -45,13 +45,20 @@ export async function findRow(ctx: UserCtx, tableId: string, rowId: string) {
}
export function getTableId(ctx: Ctx) {
if (ctx.request.body && ctx.request.body.tableId) {
return ctx.request.body.tableId
// top priority, use the URL first
if (ctx.params?.sourceId) {
return ctx.params.sourceId
}
if (ctx.params && ctx.params.tableId) {
// now check for old way of specifying table ID
if (ctx.params?.tableId) {
return ctx.params.tableId
}
if (ctx.params && ctx.params.viewName) {
// check body for a table ID
if (ctx.request.body?.tableId) {
return ctx.request.body.tableId
}
// now check if a specific view name
if (ctx.params?.viewName) {
return ctx.params.viewName
}
}

View File

@ -1,14 +1,18 @@
import { quotas } from "@budibase/pro"
import {
UserCtx,
SearchResponse,
SortOrder,
SortType,
ViewV2,
SearchRowResponse,
SearchViewRowRequest,
RequiredKeys,
SearchParams,
} from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../../sdk"
export async function searchView(ctx: UserCtx<void, SearchResponse>) {
export async function searchView(
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
) {
const { viewId } = ctx.params
const view = await sdk.views.get(viewId)
@ -29,49 +33,35 @@ export async function searchView(ctx: UserCtx<void, SearchResponse>) {
undefined
ctx.status = 200
const result = await quotas.addQuery(
() =>
sdk.rows.search({
const { body } = ctx.request
const query = dataFilters.buildLuceneQuery(view.query || [])
const searchOptions: RequiredKeys<SearchViewRowRequest> &
RequiredKeys<Pick<SearchParams, "tableId" | "query" | "fields">> = {
tableId: view.tableId,
query: view.query || {},
query,
fields: viewFields,
...getSortOptions(ctx, view),
}),
{
datasourceId: view.tableId,
...getSortOptions(body, view),
limit: body.limit,
bookmark: body.bookmark,
paginate: body.paginate,
}
)
const result = await quotas.addQuery(() => sdk.rows.search(searchOptions), {
datasourceId: view.tableId,
})
result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result
}
function getSortOptions(
ctx: UserCtx,
view: ViewV2
):
| {
sort: string
sortOrder?: SortOrder
sortType?: SortType
}
| undefined {
const { sort_column, sort_order, sort_type } = ctx.query
if (Array.isArray(sort_column)) {
ctx.throw(400, "sort_column cannot be an array")
}
if (Array.isArray(sort_order)) {
ctx.throw(400, "sort_order cannot be an array")
}
if (Array.isArray(sort_type)) {
ctx.throw(400, "sort_type cannot be an array")
}
if (sort_column) {
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
if (request.sort) {
return {
sort: sort_column,
sortOrder: sort_order as SortOrder,
sortType: sort_type as SortType,
sort: request.sort,
sortOrder: request.sortOrder,
sortType: request.sortType,
}
}
if (view.sort) {
@ -82,5 +72,9 @@ function getSortOptions(
}
}
return
return {
sort: undefined,
sortOrder: undefined,
sortType: undefined,
}
}

View File

@ -22,9 +22,12 @@ import {
QueryJson,
RelationshipType,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
Table,
TableRequest,
UserCtx,
ViewV2,
} from "@budibase/types"
import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets"
@ -198,8 +201,8 @@ function isRelationshipSetup(column: FieldSchema) {
return column.foreignKey || column.through
}
export async function save(ctx: UserCtx) {
const inputs: TableRequest = ctx.request.body
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const inputs = ctx.request.body
const renamed = inputs?._rename
// can't do this right now
delete inputs.rows
@ -215,7 +218,7 @@ export async function save(ctx: UserCtx) {
...inputs,
}
let oldTable
let oldTable: Table | undefined
if (ctx.request.body && ctx.request.body._id) {
oldTable = await sdk.tables.getTable(ctx.request.body._id)
}
@ -224,6 +227,17 @@ export async function save(ctx: UserCtx) {
ctx.throw(400, "A column type has changed.")
}
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView || !sdk.views.isV2(tableView)) continue
tableToSave.views[view] = sdk.views.syncSchema(
oldTable!.views![view] as ViewV2,
tableToSave.schema,
renamed
)
}
const db = context.getAppDB()
const datasource = await sdk.datasources.get(datasourceId)
if (!datasource.entities) {

View File

@ -9,6 +9,8 @@ import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core"
import {
FetchTablesResponse,
SaveTableResponse,
SaveTableRequest,
Table,
TableResponse,
UserCtx,
@ -60,7 +62,7 @@ export async function find(ctx: UserCtx<void, TableResponse>) {
ctx.body = sdk.tables.enrichViewSchemas(table)
}
export async function save(ctx: UserCtx) {
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const appId = ctx.appId
const table = ctx.request.body
const isImport = table.rows

View File

@ -9,7 +9,15 @@ import {
fixAutoColumnSubType,
} from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula"
import { Table } from "@budibase/types"
import {
SaveTableRequest,
SaveTableResponse,
Table,
TableRequest,
UserCtx,
ViewStatisticsSchema,
ViewV2,
} from "@budibase/types"
import { quotas } from "@budibase/pro"
import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp"
@ -33,10 +41,10 @@ function checkAutoColumns(table: Table, oldTable?: Table) {
return table
}
export async function save(ctx: any) {
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const db = context.getAppDB()
const { rows, ...rest } = ctx.request.body
let tableToSave = {
let tableToSave: TableRequest = {
type: "table",
_id: generateTableID(),
views: {},
@ -44,7 +52,7 @@ export async function save(ctx: any) {
}
// if the table obj had an _id then it will have been retrieved
let oldTable
let oldTable: Table | undefined
if (ctx.request.body && ctx.request.body._id) {
oldTable = await sdk.tables.getTable(ctx.request.body._id)
}
@ -80,7 +88,7 @@ export async function save(ctx: any) {
let { _rename } = tableToSave
/* istanbul ignore next */
if (_rename && _rename.old === _rename.updated) {
_rename = null
_rename = undefined
delete tableToSave._rename
}
@ -97,7 +105,20 @@ export async function save(ctx: any) {
const tableView = tableToSave.views[view]
if (!tableView) continue
if (tableView.schema.group || tableView.schema.field) continue
if (sdk.views.isV2(tableView)) {
tableToSave.views[view] = sdk.views.syncSchema(
oldTable!.views![view] as ViewV2,
tableToSave.schema,
_rename
)
continue
}
if (
(tableView.schema as ViewStatisticsSchema).group ||
tableView.schema.field
)
continue
tableView.schema = tableToSave.schema
}
@ -112,7 +133,7 @@ export async function save(ctx: any) {
tableToSave._rev = linkResp._rev
}
} catch (err) {
ctx.throw(400, err)
ctx.throw(400, err as string)
}
// don't perform any updates until relationships have been

View File

@ -418,7 +418,7 @@ export function areSwitchableTypes(type1: any, type2: any) {
return false
}
export function hasTypeChanged(table: any, oldTable: any) {
export function hasTypeChanged(table: Table, oldTable: Table | undefined) {
if (!oldTable) {
return false
}

View File

@ -4,16 +4,14 @@ import authorized from "../../middleware/authorized"
import { paramResource, paramSubResource } from "../../middleware/resourceId"
import { permissions } from "@budibase/backend-core"
import { internalSearchValidator } from "./utils/validators"
import noViewData from "../../middleware/noViewData"
import trimViewRowInfo from "../../middleware/trimViewRowInfo"
import * as utils from "../../db/utils"
const { PermissionType, PermissionLevel } = permissions
const router: Router = new Router()
router
/**
* @api {get} /api/:tableId/:rowId/enrich Get an enriched row
* @api {get} /api/:sourceId/:rowId/enrich Get an enriched row
* @apiName Get an enriched row
* @apiGroup rows
* @apiPermission table read access
@ -27,13 +25,13 @@ router
* @apiSuccess {object} row The response body will be the enriched row.
*/
.get(
"/api/:tableId/:rowId/enrich",
paramSubResource("tableId", "rowId"),
"/api/:sourceId/:rowId/enrich",
paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetchEnrichedRow
)
/**
* @api {get} /api/:tableId/rows Get all rows in a table
* @api {get} /api/:sourceId/rows Get all rows in a table
* @apiName Get all rows in a table
* @apiGroup rows
* @apiPermission table read access
@ -42,37 +40,37 @@ router
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
* will simply stop.
*
* @apiParam {string} tableId The ID of the table to retrieve all rows within.
* @apiParam {string} sourceId The ID of the table to retrieve all rows within.
*
* @apiSuccess {object[]} rows The response body will be an array of all rows found.
*/
.get(
"/api/:tableId/rows",
paramResource("tableId"),
"/api/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetch
)
/**
* @api {get} /api/:tableId/rows/:rowId Retrieve a single row
* @api {get} /api/:sourceId/rows/:rowId Retrieve a single row
* @apiName Retrieve a single row
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
* a row by anything other than its _id field, use the search endpoint.
*
* @apiParam {string} tableId The ID of the table to retrieve a row from.
* @apiParam {string} sourceId The ID of the table to retrieve a row from.
* @apiParam {string} rowId The ID of the row to retrieve.
*
* @apiSuccess {object} body The response body will be the row that was found.
*/
.get(
"/api/:tableId/rows/:rowId",
paramSubResource("tableId", "rowId"),
"/api/:sourceId/rows/:rowId",
paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.find
)
/**
* @api {post} /api/:tableId/search Search for rows in a table
* @api {post} /api/:sourceId/search Search for rows in a table
* @apiName Search for rows in a table
* @apiGroup rows
* @apiPermission table read access
@ -80,7 +78,7 @@ router
* and data UI in the builder are built atop this. All filtering, sorting and pagination is
* handled through this, for internal and external (datasource plus, e.g. SQL) tables.
*
* @apiParam {string} tableId The ID of the table to retrieve rows from.
* @apiParam {string} sourceId The ID of the table to retrieve rows from.
*
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
* defaults to false.
@ -135,22 +133,22 @@ router
* page.
*/
.post(
"/api/:tableId/search",
"/api/:sourceId/search",
internalSearchValidator(),
paramResource("tableId"),
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search
)
// DEPRECATED - this is an old API, but for backwards compat it needs to be
// supported still
.post(
"/api/search/:tableId/rows",
paramResource("tableId"),
"/api/search/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search
)
/**
* @api {post} /api/:tableId/rows Creates a new row
* @api {post} /api/:sourceId/rows Creates a new row
* @apiName Creates a new row
* @apiGroup rows
* @apiPermission table write access
@ -159,7 +157,7 @@ router
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
* already used by Budibase tables and cannot be used for columns.
*
* @apiParam {string} tableId The ID of the table to save a row to.
* @apiParam {string} sourceId The ID of the table to save a row to.
*
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
@ -174,14 +172,14 @@ router
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/
.post(
"/api/:tableId/rows",
paramResource("tableId"),
"/api/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
noViewData,
trimViewRowInfo,
rowController.save
)
/**
* @api {patch} /api/:tableId/rows Updates a row
* @api {patch} /api/:sourceId/rows Updates a row
* @apiName Update a row
* @apiGroup rows
* @apiPermission table write access
@ -189,14 +187,14 @@ router
* error if an _id isn't provided, it will only function for existing rows.
*/
.patch(
"/api/:tableId/rows",
paramResource("tableId"),
"/api/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
noViewData,
trimViewRowInfo,
rowController.patch
)
/**
* @api {post} /api/:tableId/rows/validate Validate inputs for a row
* @api {post} /api/:sourceId/rows/validate Validate inputs for a row
* @apiName Validate inputs for a row
* @apiGroup rows
* @apiPermission table write access
@ -204,7 +202,7 @@ router
* given the table schema, this will iterate through all the constraints on the table and
* check if the request body is valid.
*
* @apiParam {string} tableId The ID of the table the row is to be validated for.
* @apiParam {string} sourceId The ID of the table the row is to be validated for.
*
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
* against the table schema and constraints.
@ -216,20 +214,20 @@ router
* the schema.
*/
.post(
"/api/:tableId/rows/validate",
paramResource("tableId"),
"/api/:sourceId/rows/validate",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.validate
)
/**
* @api {delete} /api/:tableId/rows Delete rows
* @api {delete} /api/:sourceId/rows Delete rows
* @apiName Delete rows
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion.
*
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted.
@ -242,117 +240,37 @@ router
* is the deleted row.
*/
.delete(
"/api/:tableId/rows",
paramResource("tableId"),
"/api/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.destroy
)
/**
* @api {post} /api/:tableId/rows/exportRows Export Rows
* @api {post} /api/:sourceId/rows/exportRows Export Rows
* @apiName Export rows
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API can export a number of provided rows
*
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
*
* @apiSuccess {object[]|object}
*/
.post(
"/api/:tableId/rows/exportRows",
paramResource("tableId"),
"/api/:sourceId/rows/exportRows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.exportRows
)
router
.get(
router.post(
"/api/v2/views/:viewId/search",
authorized(PermissionType.VIEW, PermissionLevel.READ),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.views.searchView
)
/**
* @api {post} /api/:tableId/rows Creates a new row
* @apiName Creates a new row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API will create a new row based on the supplied body. If the
* body includes an "_id" field then it will update an existing row if the field
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
* already used by Budibase tables and cannot be used for columns.
*
* @apiParam {string} tableId The ID of the table to save a row to.
*
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
* must also be provided.
* @apiParam (Body) {string} _viewId The ID of the view should be specified in the row body itself.
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
* a column in the specified table. All other fields will be dropped and not stored.
*
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
* is the rows new ID.
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/
.post(
"/api/v2/views/:viewId/rows",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.save
)
/**
* @api {patch} /api/v2/views/:viewId/rows/:rowId Updates a row
* @apiName Update a row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint is identical to the row creation endpoint but instead it will
* error if an _id isn't provided, it will only function for existing rows.
*/
.patch(
"/api/v2/views/:viewId/rows/:rowId",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.patch
)
/**
* @api {delete} /api/v2/views/:viewId/rows Delete rows for a view
* @apiName Delete rows for a view
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion.
*
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted.
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
* revision here.
*
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
* is the deleted row.
*/
.delete(
"/api/v2/views/:viewId/rows",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
// This is required as the implementation relies on the table id
(ctx, next) => {
ctx.params.tableId = utils.extractViewInfoFromID(
ctx.params.viewId
).tableId
return next()
},
rowController.destroy
)
export default router

View File

@ -16,16 +16,12 @@ import {
FieldType,
SortType,
SortOrder,
DeleteRow,
} from "@budibase/types"
import {
expectAnyInternalColsAttributes,
generator,
structures,
} from "@budibase/backend-core/tests"
import trimViewRowInfoMiddleware from "../../../middleware/trimViewRowInfo"
import noViewDataMiddleware from "../../../middleware/noViewData"
import router from "../row"
describe("/rows", () => {
let request = setup.getRequest()
@ -394,26 +390,6 @@ describe("/rows", () => {
expect(saved.arrayFieldArrayStrKnown).toEqual(["One"])
expect(saved.optsFieldStrKnown).toEqual("Alpha")
})
it("should throw an error when creating a table row with view id data", async () => {
const res = await request
.post(`/api/${row.tableId}/rows`)
.send({ ...row, _viewId: generator.guid() })
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(400)
expect(res.body.message).toEqual(
"Table row endpoints cannot contain view info"
)
})
it("should setup the noViewData middleware", async () => {
const route = router.stack.find(
r => r.methods.includes("POST") && r.path === "/api/:tableId/rows"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(noViewDataMiddleware)
})
})
describe("patch", () => {
@ -463,33 +439,6 @@ describe("/rows", () => {
await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage)
})
it("should throw an error when creating a table row with view id data", async () => {
const existing = await config.createRow()
const res = await config.api.row.patch(
table._id!,
{
...existing,
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
_viewId: generator.guid(),
},
{ expectStatus: 400 }
)
expect(res.body.message).toEqual(
"Table row endpoints cannot contain view info"
)
})
it("should setup the noViewData middleware", async () => {
const route = router.stack.find(
r => r.methods.includes("PATCH") && r.path === "/api/:tableId/rows"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(noViewDataMiddleware)
})
})
describe("destroy", () => {
@ -758,7 +707,7 @@ describe("/rows", () => {
})
// the environment needs configured for this
await setup.switchToSelfHosted(async () => {
context.doInAppContext(config.getAppId(), async () => {
return context.doInAppContext(config.getAppId(), async () => {
const enriched = await outputProcessing(table, [row])
expect((enriched as Row[])[0].attachment[0].url).toBe(
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
@ -813,6 +762,184 @@ describe("/rows", () => {
})
})
describe("view 2.0", () => {
function userTable(): Table {
return {
name: "user",
type: "user",
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
surname: {
type: FieldType.STRING,
name: "name",
},
age: {
type: FieldType.NUMBER,
name: "age",
},
address: {
type: FieldType.STRING,
name: "address",
},
jobTitle: {
type: FieldType.STRING,
name: "jobTitle",
},
},
}
}
const randomRowData = () => ({
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
jobTitle: generator.word(),
})
describe("create", () => {
it("should persist a new row with only the provided view fields", async () => {
const table = await config.createTable(userTable())
const view = await config.api.viewV2.create({
tableId: table._id!,
schema: {
name: { visible: true },
surname: { visible: true },
address: { visible: true },
},
})
const data = randomRowData()
const newRow = await config.api.row.save(view.id, {
tableId: config.table!._id,
_viewId: view.id,
...data,
})
const row = await config.api.row.get(table._id!, newRow._id!)
expect(row.body).toEqual({
name: data.name,
surname: data.surname,
address: data.address,
tableId: config.table!._id,
type: "row",
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
expect(row.body._viewId).toBeUndefined()
expect(row.body.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined()
})
})
describe("patch", () => {
it("should update only the view fields for a row", async () => {
const table = await config.createTable(userTable())
const tableId = table._id!
const view = await config.api.viewV2.create({
tableId,
schema: {
name: { visible: true },
address: { visible: true },
},
})
const newRow = await config.api.row.save(view.id, {
tableId,
_viewId: view.id,
...randomRowData(),
})
const newData = randomRowData()
await config.api.row.patch(view.id, {
tableId,
_viewId: view.id,
_id: newRow._id!,
_rev: newRow._rev!,
...newData,
})
const row = await config.api.row.get(tableId, newRow._id!)
expect(row.body).toEqual({
...newRow,
type: "row",
name: newData.name,
address: newData.address,
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
expect(row.body._viewId).toBeUndefined()
expect(row.body.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined()
})
})
describe("destroy", () => {
it("should be able to delete a row", async () => {
const table = await config.createTable(userTable())
const tableId = table._id!
const view = await config.api.viewV2.create({
tableId,
schema: {
name: { visible: true },
address: { visible: true },
},
})
const createdRow = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
await config.api.row.delete(view.id, [createdRow])
await assertRowUsage(rowUsage - 1)
await assertQueryUsage(queryUsage + 1)
await config.api.row.get(tableId, createdRow._id!, {
expectStatus: 404,
})
})
it("should be able to delete multiple rows", async () => {
const table = await config.createTable(userTable())
const tableId = table._id!
const view = await config.api.viewV2.create({
tableId,
schema: {
name: { visible: true },
address: { visible: true },
},
})
const rows = [
await config.createRow(),
await config.createRow(),
await config.createRow(),
]
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
await config.api.row.delete(view.id, [rows[0], rows[2]])
await assertRowUsage(rowUsage - 2)
await assertQueryUsage(queryUsage + 1)
await config.api.row.get(tableId, rows[0]._id!, {
expectStatus: 404,
})
await config.api.row.get(tableId, rows[2]._id!, {
expectStatus: 404,
})
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 })
})
})
describe("view search", () => {
function userTable(): Table {
return {
@ -869,14 +996,16 @@ describe("/rows", () => {
)
const createViewResponse = await config.api.viewV2.create({
query: { equal: { age: 40 } },
query: [{ operator: "equal", field: "age", value: 40 }],
})
const response = await config.api.viewV2.search(createViewResponse.id)
expect(response.body.rows).toHaveLength(5)
expect(response.body).toEqual({
rows: expect.arrayContaining(expectedRows.map(expect.objectContaining)),
rows: expect.arrayContaining(
expectedRows.map(expect.objectContaining)
),
})
})
@ -1004,13 +1133,14 @@ describe("/rows", () => {
},
})
const response = await config.api.viewV2.search(createViewResponse.id, {
sort: {
column: sortParams.field,
order: sortParams.order,
type: sortParams.type,
},
})
const response = await config.api.viewV2.search(
createViewResponse.id,
{
sort: sortParams.field,
sortOrder: sortParams.order,
sortType: sortParams.type,
}
)
expect(response.body.rows).toHaveLength(4)
expect(response.body).toEqual({
@ -1057,208 +1187,78 @@ describe("/rows", () => {
expect(response.body.rows).toHaveLength(0)
})
})
describe("view 2.0", () => {
function userTable(): Table {
return {
name: "user",
type: "user",
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
surname: {
type: FieldType.STRING,
name: "name",
},
age: {
type: FieldType.NUMBER,
name: "age",
},
address: {
type: FieldType.STRING,
name: "address",
},
jobTitle: {
type: FieldType.STRING,
name: "jobTitle",
},
},
}
}
const randomRowData = () => ({
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
jobTitle: generator.word(),
})
describe("create", () => {
it("should persist a new row with only the provided view fields", async () => {
it("respects the limit parameter", async () => {
const table = await config.createTable(userTable())
const view = await config.api.viewV2.create({
tableId: table._id!,
schema: {
name: { visible: true },
surname: { visible: true },
address: { visible: true },
},
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
const limit = generator.integer({ min: 1, max: 8 })
const createViewResponse = await config.api.viewV2.create()
const response = await config.api.viewV2.search(createViewResponse.id, {
limit,
})
const data = randomRowData()
const newRow = await config.api.viewV2.row.create(view.id, {
tableId: config.table!._id,
_viewId: view.id,
...data,
expect(response.body.rows).toHaveLength(limit)
})
const row = await config.api.row.get(table._id!, newRow._id!)
expect(row.body).toEqual({
name: data.name,
surname: data.surname,
address: data.address,
tableId: config.table!._id,
type: "row",
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
expect(row.body._viewId).toBeUndefined()
expect(row.body.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined()
})
it("can handle pagination", async () => {
const table = await config.createTable(userTable())
const rows = []
for (let i = 0; i < 10; i++) {
rows.push(await config.createRow({ tableId: table._id }))
}
// rows.sort((a, b) => (a._id! > b._id! ? 1 : -1))
it("should setup the trimViewRowInfo middleware", async () => {
const route = router.stack.find(
r =>
r.methods.includes("POST") &&
r.path === "/api/v2/views/:viewId/rows"
const createViewResponse = await config.api.viewV2.create()
const allRows = (await config.api.viewV2.search(createViewResponse.id))
.body.rows
const firstPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
}
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(trimViewRowInfoMiddleware)
})
expect(firstPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(0, 4)),
totalRows: 10,
hasNextPage: true,
bookmark: expect.any(String),
})
describe("patch", () => {
it("should update only the view fields for a row", async () => {
const table = await config.createTable(userTable())
const tableId = table._id!
const view = await config.api.viewV2.create({
tableId,
schema: {
name: { visible: true },
address: { visible: true },
},
})
const newRow = await config.api.viewV2.row.create(view.id, {
tableId,
_viewId: view.id,
...randomRowData(),
})
const newData = randomRowData()
await config.api.viewV2.row.update(view.id, newRow._id!, {
tableId,
_viewId: view.id,
_id: newRow._id!,
_rev: newRow._rev!,
...newData,
})
const row = await config.api.row.get(tableId, newRow._id!)
expect(row.body).toEqual({
...newRow,
type: "row",
name: newData.name,
address: newData.address,
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
expect(row.body._viewId).toBeUndefined()
expect(row.body.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined()
})
it("should setup the trimViewRowInfo middleware", async () => {
const route = router.stack.find(
r =>
r.methods.includes("PATCH") &&
r.path === "/api/v2/views/:viewId/rows/:rowId"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(trimViewRowInfoMiddleware)
})
})
describe("destroy", () => {
it("should be able to delete a row", async () => {
const table = await config.createTable(userTable())
const tableId = table._id!
const view = await config.api.viewV2.create({
tableId,
schema: {
name: { visible: true },
address: { visible: true },
},
})
const createdRow = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const body: DeleteRow = {
_id: createdRow._id!,
const secondPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
bookmark: firstPageResponse.body.bookmark,
}
await config.api.viewV2.row.delete(view.id, body)
await assertRowUsage(rowUsage - 1)
await assertQueryUsage(queryUsage + 1)
await config.api.row.get(tableId, createdRow._id!, {
expectStatus: 404,
})
)
expect(secondPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(4, 8)),
totalRows: 10,
hasNextPage: true,
bookmark: expect.any(String),
})
it("should be able to delete multiple rows", async () => {
const table = await config.createTable(userTable())
const tableId = table._id!
const view = await config.api.viewV2.create({
tableId,
schema: {
name: { visible: true },
address: { visible: true },
},
})
const rows = [
await config.createRow(),
await config.createRow(),
await config.createRow(),
]
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
await config.api.viewV2.row.delete(view.id, {
rows: [rows[0], rows[2]],
})
await assertRowUsage(rowUsage - 2)
await assertQueryUsage(queryUsage + 1)
await config.api.row.get(tableId, rows[0]._id!, {
expectStatus: 404,
})
await config.api.row.get(tableId, rows[2]._id!, {
expectStatus: 404,
})
await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 })
const lastPageResponse = await config.api.viewV2.search(
createViewResponse.id,
{
paginate: true,
limit: 4,
bookmark: secondPageResponse.body.bookmark,
}
)
expect(lastPageResponse.body).toEqual({
rows: expect.arrayContaining(allRows.slice(8)),
totalRows: 10,
hasNextPage: false,
bookmark: expect.any(String),
})
})
})
})

View File

@ -62,7 +62,7 @@ describe("/v2/views", () => {
name: generator.name(),
tableId: config.table!._id!,
primaryDisplay: generator.word(),
query: { allOr: false, equal: { field: "value" } },
query: [{ operator: "equal", field: "field", value: "value" }],
sort: {
field: "fieldToSort",
order: SortOrder.DESCENDING,
@ -190,7 +190,7 @@ describe("/v2/views", () => {
const tableId = config.table!._id!
await config.api.viewV2.update({
...view,
query: { equal: { newField: "thatValue" } },
query: [{ operator: "equal", field: "newField", value: "thatValue" }],
})
expect(await config.api.table.get(tableId)).toEqual({
@ -198,7 +198,9 @@ describe("/v2/views", () => {
views: {
[view.name]: {
...view,
query: { equal: { newField: "thatValue" } },
query: [
{ operator: "equal", field: "newField", value: "thatValue" },
],
schema: expect.anything(),
},
},
@ -216,7 +218,13 @@ describe("/v2/views", () => {
tableId,
name: view.name,
primaryDisplay: generator.word(),
query: { equal: { [generator.word()]: generator.word() } },
query: [
{
operator: "equal",
field: generator.word(),
value: generator.word(),
},
],
sort: {
field: generator.word(),
order: SortOrder.DESCENDING,
@ -285,7 +293,7 @@ describe("/v2/views", () => {
{
...view,
tableId: generator.guid(),
query: { equal: { newField: "thatValue" } },
query: [{ operator: "equal", field: "newField", value: "thatValue" }],
},
{ expectStatus: 404 }
)

View File

@ -34,7 +34,7 @@ router
"/api/views/:viewName",
paramResource("viewName"),
authorized(
permissions.PermissionType.VIEW,
permissions.PermissionType.TABLE,
permissions.PermissionLevel.READ
),
rowController.fetchView

View File

@ -1,11 +1,18 @@
const setup = require("./utilities")
const { FilterConditions } = require("../steps/filter")
import * as setup from "./utilities"
import { FilterConditions } from "../steps/filter"
describe("test the filter logic", () => {
async function checkFilter(field, condition, value, pass = true) {
let res = await setup.runStep(setup.actions.FILTER.stepId,
{ field, condition, value }
)
async function checkFilter(
field: any,
condition: string,
value: any,
pass = true
) {
let res = await setup.runStep(setup.actions.FILTER.stepId, {
field,
condition,
value,
})
expect(res.result).toEqual(pass)
expect(res.success).toEqual(true)
}
@ -36,9 +43,9 @@ describe("test the filter logic", () => {
it("check date coercion", async () => {
await checkFilter(
(new Date()).toISOString(),
new Date().toISOString(),
FilterConditions.GREATER_THAN,
(new Date(-10000)).toISOString(),
new Date(-10000).toISOString(),
true
)
})

View File

@ -1,5 +1,7 @@
import newid from "./newid"
import { db as dbCore } from "@budibase/backend-core"
import { DocumentType, VirtualDocumentType } from "@budibase/types"
export { DocumentType, VirtualDocumentType } from "@budibase/types"
type Optional = string | null
@ -19,7 +21,6 @@ export const BudibaseInternalDB = {
export const SEPARATOR = dbCore.SEPARATOR
export const StaticDatabases = dbCore.StaticDatabases
export const DocumentType = dbCore.DocumentType
export const APP_PREFIX = dbCore.APP_PREFIX
export const APP_DEV_PREFIX = dbCore.APP_DEV_PREFIX
export const isDevAppID = dbCore.isDevAppID
@ -284,10 +285,22 @@ export function getMultiIDParams(ids: string[]) {
* @returns {string} The new view ID which the view doc can be stored under.
*/
export function generateViewID(tableId: string) {
return `${tableId}${SEPARATOR}${newid()}`
return `${
VirtualDocumentType.VIEW
}${SEPARATOR}${tableId}${SEPARATOR}${newid()}`
}
export function isViewID(viewId: string) {
return viewId?.split(SEPARATOR)[0] === VirtualDocumentType.VIEW
}
export function extractViewInfoFromID(viewId: string) {
if (!isViewID(viewId)) {
throw new Error("Unable to extract table ID, is not a view ID")
}
const split = viewId.split(SEPARATOR)
split.shift()
viewId = split.join(SEPARATOR)
const regex = new RegExp(`^(?<tableId>.+)${SEPARATOR}([^${SEPARATOR}]+)$`)
const res = regex.exec(viewId)
return {

View File

@ -93,6 +93,21 @@ const SCHEMA: Integration = {
},
}
const defaultTypeCasting = function (field: any, next: any) {
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP" ||
field.type === "LONGLONG"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
}
export function bindingTypeCoerce(bindings: any[]) {
for (let i = 0; i < bindings.length; i++) {
const binding = bindings[i]
@ -147,21 +162,8 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
delete config.rejectUnauthorized
this.config = {
...config,
typeCast: defaultTypeCasting,
multipleStatements: true,
typeCast: function (field: any, next: any) {
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP" ||
field.type === "LONGLONG"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
},
}
}
@ -194,6 +196,37 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
return `concat(${parts.join(", ")})`
}
defineTypeCastingFromSchema(schema: {
[key: string]: { name: string; type: string }
}): void {
if (!schema) {
return
}
this.config.typeCast = function (field: any, next: any) {
if (schema[field.name]?.name === field.name) {
if (["LONGLONG", "NEWDECIMAL", "DECIMAL"].includes(field.type)) {
if (schema[field.name]?.type === "number") {
const value = field.string()
return value ? Number(value) : null
} else {
return field.string()
}
}
}
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
}
}
async connect() {
this.client = await mysql.createConnection(this.config)
}
@ -204,7 +237,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async internalQuery(
query: SqlQuery,
opts: { connect?: boolean; disableCoercion?: boolean } = {
opts: {
connect?: boolean
disableCoercion?: boolean
} = {
connect: true,
disableCoercion: false,
}

View File

@ -1,9 +0,0 @@
import { Ctx, Row } from "@budibase/types"
export default async (ctx: Ctx<Row>, next: any) => {
if (ctx.request.body._viewId) {
return ctx.throw(400, "Table row endpoints cannot contain view info")
}
return next()
}

View File

@ -1,83 +0,0 @@
import { generator } from "@budibase/backend-core/tests"
import { BBRequest, FieldType, Row, Table } from "@budibase/types"
import { Next } from "koa"
import * as utils from "../../db/utils"
import noViewDataMiddleware from "../noViewData"
class TestConfiguration {
next: Next
throw: jest.Mock<(status: number, message: string) => never>
middleware: typeof noViewDataMiddleware
params: Record<string, any>
request?: Pick<BBRequest<Row>, "body">
constructor() {
this.next = jest.fn()
this.throw = jest.fn()
this.params = {}
this.middleware = noViewDataMiddleware
}
executeMiddleware(ctxRequestBody: Row) {
this.request = {
body: ctxRequestBody,
}
return this.middleware(
{
request: this.request as any,
throw: this.throw as any,
params: this.params,
} as any,
this.next
)
}
afterEach() {
jest.clearAllMocks()
}
}
describe("noViewData middleware", () => {
let config: TestConfiguration
beforeEach(() => {
config = new TestConfiguration()
})
afterEach(() => {
config.afterEach()
})
const getRandomData = () => ({
_id: generator.guid(),
name: generator.name(),
age: generator.age(),
address: generator.address(),
})
it("it should pass without view id data", async () => {
const data = getRandomData()
await config.executeMiddleware({
...data,
})
expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled()
})
it("it should throw an error if _viewid is provided", async () => {
const data = getRandomData()
await config.executeMiddleware({
_viewId: generator.guid(),
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(
400,
"Table row endpoints cannot contain view info"
)
expect(config.next).not.toBeCalled()
})
})

View File

@ -117,7 +117,7 @@ describe("trimViewRowInfo middleware", () => {
})
expect(config.request?.body).toEqual(data)
expect(config.params.tableId).toEqual(table._id)
expect(config.params.sourceId).toEqual(table._id)
expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled()
@ -143,32 +143,9 @@ describe("trimViewRowInfo middleware", () => {
name: data.name,
address: data.address,
})
expect(config.params.tableId).toEqual(table._id)
expect(config.params.sourceId).toEqual(table._id)
expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled()
})
it("it should throw an error if no viewid is provided on the body", async () => {
const data = getRandomData()
await config.executeMiddleware(viewId, {
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(400, "_viewId is required")
expect(config.next).not.toBeCalled()
})
it("it should throw an error if no viewid is provided on the parameters", async () => {
const data = getRandomData()
await config.executeMiddleware(undefined as any, {
_viewId: viewId,
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(400, "viewId path is required")
expect(config.next).not.toBeCalled()
})
})

View File

@ -3,26 +3,35 @@ import * as utils from "../db/utils"
import sdk from "../sdk"
import { db } from "@budibase/backend-core"
import { Next } from "koa"
import { getTableId } from "../api/controllers/row/utils"
export default async (ctx: Ctx<Row>, next: Next) => {
const { body } = ctx.request
const { _viewId: viewId } = body
let { _viewId: viewId } = body
const possibleViewId = getTableId(ctx)
if (utils.isViewID(possibleViewId)) {
viewId = possibleViewId
}
// nothing to do, it is not a view (just a table ID)
if (!viewId) {
return ctx.throw(400, "_viewId is required")
return next()
}
if (!ctx.params.viewId) {
return ctx.throw(400, "viewId path is required")
}
const { tableId } = utils.extractViewInfoFromID(viewId)
const { tableId } = utils.extractViewInfoFromID(ctx.params.viewId)
// don't need to trim delete requests
if (ctx?.method?.toLowerCase() !== "delete") {
const { _viewId, ...trimmedView } = await trimViewFields(
viewId,
tableId,
body
)
ctx.request.body = trimmedView
ctx.params.tableId = tableId
}
ctx.params.sourceId = tableId
return next()
}

View File

@ -1,23 +1,9 @@
import { SearchFilters, SortOrder, SortType } from "@budibase/types"
import { SearchFilters, SearchParams } from "@budibase/types"
import { isExternalTable } from "../../../integrations/utils"
import * as internal from "./search/internal"
import * as external from "./search/external"
import { Format } from "../../../api/controllers/view/exporters"
export interface SearchParams {
tableId: string
paginate?: boolean
query: SearchFilters
bookmark?: string
limit?: number
sort?: string
sortOrder?: SortOrder
sortType?: SortType
version?: string
disableEscaping?: boolean
fields?: string[]
}
export interface ViewParams {
calculation: string
group: string

View File

@ -6,6 +6,7 @@ import {
IncludeRelationship,
Row,
SearchFilters,
SearchParams,
} from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters"
import sdk from "../../../../sdk"
@ -13,7 +14,7 @@ import { handleRequest } from "../../../../api/controllers/row/external"
import { breakExternalTableId } from "../../../../integrations/utils"
import { cleanExportRows } from "../utils"
import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search"
import { ExportRowsParams, ExportRowsResult } from "../search"
import { HTTPError, db } from "@budibase/backend-core"
import pick from "lodash/pick"

View File

@ -12,7 +12,7 @@ import {
} from "../../../../db/utils"
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
import { outputProcessing } from "../../../../utilities/rowProcessor"
import { Database, Row, Table } from "@budibase/types"
import { Database, Row, Table, SearchParams } from "@budibase/types"
import { cleanExportRows } from "../utils"
import {
Format,
@ -28,7 +28,7 @@ import {
getFromMemoryDoc,
} from "../../../../api/controllers/view/utils"
import sdk from "../../../../sdk"
import { ExportRowsParams, ExportRowsResult, SearchParams } from "../search"
import { ExportRowsParams, ExportRowsResult } from "../search"
import pick from "lodash/pick"
export async function search(options: SearchParams) {

View File

@ -1,14 +1,15 @@
import { GenericContainer } from "testcontainers"
import {
Datasource,
EmptyFilterOption,
FieldType,
Row,
SourceName,
Table,
SearchParams,
} from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { SearchParams } from "../../search"
import { search } from "../external"
import {
expectAnyExternalColsAttributes,
@ -122,22 +123,6 @@ describe.skip("external", () => {
})
})
it("empty filters search returns no data", async () => {
await config.doInContext(config.appId, async () => {
const tableId = config.table!._id!
const searchParams: SearchParams = {
tableId,
query: {
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
},
}
const result = await search(searchParams)
expect(result.rows).toHaveLength(0)
})
})
it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => {
const tableId = config.table!._id!

View File

@ -1,6 +1,5 @@
import { FieldType, Row, Table } from "@budibase/types"
import { FieldType, Row, Table, SearchParams } from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { SearchParams } from "../../search"
import { search } from "../internal"
import {
expectAnyInternalColsAttributes,

View File

@ -1,17 +1,20 @@
import { HTTPError, context } from "@budibase/backend-core"
import { FieldSchema, TableSchema, View, ViewV2 } from "@budibase/types"
import {
FieldSchema,
RenameColumn,
TableSchema,
View,
ViewV2,
} from "@budibase/types"
import { context, HTTPError } from "@budibase/backend-core"
import sdk from "../../../sdk"
import * as utils from "../../../db/utils"
import merge from "lodash/merge"
export async function get(viewId: string): Promise<ViewV2 | undefined> {
const { tableId } = utils.extractViewInfoFromID(viewId)
const table = await sdk.tables.getTable(tableId)
const views = Object.values(table.views!)
const view = views.find(v => isV2(v) && v.id === viewId) as ViewV2 | undefined
return view
return views.find(v => isV2(v) && v.id === viewId) as ViewV2 | undefined
}
export async function create(
@ -106,3 +109,37 @@ export function enrichSchema(view: View | ViewV2, tableSchema: TableSchema) {
schema: schema,
}
}
export function syncSchema(
view: ViewV2,
schema: TableSchema,
renameColumn: RenameColumn | undefined
): ViewV2 {
if (renameColumn) {
if (view.columns) {
view.columns[view.columns.indexOf(renameColumn.old)] =
renameColumn.updated
}
if (view.schemaUI) {
view.schemaUI[renameColumn.updated] = view.schemaUI[renameColumn.old]
delete view.schemaUI[renameColumn.old]
}
}
if (view.schemaUI) {
for (const fieldName of Object.keys(view.schemaUI)) {
if (!schema[fieldName]) {
delete view.schemaUI[fieldName]
}
}
for (const fieldName of Object.keys(schema)) {
if (!view.schemaUI[fieldName]) {
view.schemaUI[fieldName] = { visible: false }
}
}
}
view.columns = view.columns?.filter(x => schema[x])
return view
}

View File

@ -1,9 +1,9 @@
import { FieldType, Table, ViewV2 } from "@budibase/types"
import _ from "lodash"
import { FieldType, Table, TableSchema, ViewV2 } from "@budibase/types"
import { generator } from "@budibase/backend-core/tests"
import { enrichSchema } from ".."
import { enrichSchema, syncSchema } from ".."
describe("table sdk", () => {
describe("enrichViewSchemas", () => {
const basicTable: Table = {
_id: generator.guid(),
name: "TestTable",
@ -48,6 +48,7 @@ describe("table sdk", () => {
},
}
describe("enrichViewSchemas", () => {
it("should fetch the default schema if not overriden", async () => {
const tableId = basicTable._id!
const view: ViewV2 = {
@ -280,4 +281,294 @@ describe("table sdk", () => {
)
})
})
describe("syncSchema", () => {
const basicView: ViewV2 = {
version: 2,
id: generator.guid(),
name: generator.guid(),
tableId: basicTable._id!,
}
describe("view without schema", () => {
it("no table schema changes will not amend the view", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
}
const result = syncSchema(
_.cloneDeep(view),
basicTable.schema,
undefined
)
expect(result).toEqual(view)
})
it("adding new columns will not change the view schema", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
}
const newTableSchema = {
...basicTable.schema,
newField1: {
type: FieldType.STRING,
name: "newField1",
visible: true,
},
newField2: {
type: FieldType.NUMBER,
name: "newField2",
visible: false,
},
}
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
schemaUI: undefined,
})
})
it("deleting columns will not change the view schema", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
}
const { name, description, ...newTableSchema } = basicTable.schema
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
columns: ["id"],
schemaUI: undefined,
})
})
it("renaming mapped columns will update the view column mapping", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
}
const { description, ...newTableSchema } = {
...basicTable.schema,
updatedDescription: {
...basicTable.schema.description,
name: "updatedDescription",
},
} as TableSchema
const result = syncSchema(_.cloneDeep(view), newTableSchema, {
old: "description",
updated: "updatedDescription",
})
expect(result).toEqual({
...view,
columns: ["name", "id", "updatedDescription"],
schemaUI: undefined,
})
})
})
describe("view with schema", () => {
it("no table schema changes will not amend the view", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const result = syncSchema(
_.cloneDeep(view),
basicTable.schema,
undefined
)
expect(result).toEqual(view)
})
it("adding new columns will add them as not visible to the view", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const newTableSchema = {
...basicTable.schema,
newField1: {
type: FieldType.STRING,
name: "newField1",
visible: true,
},
newField2: {
type: FieldType.NUMBER,
name: "newField2",
visible: false,
},
}
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
schemaUI: {
...view.schemaUI,
newField1: { visible: false },
newField2: { visible: false },
},
})
})
it("deleting columns will remove them from the UI", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const { name, description, ...newTableSchema } = basicTable.schema
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
columns: ["id"],
schemaUI: {
...view.schemaUI,
name: undefined,
description: undefined,
},
})
})
it("can handle additions and deletions at the same them UI", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const { name, description, ...newTableSchema } = {
...basicTable.schema,
newField1: {
type: FieldType.STRING,
name: "newField1",
visible: true,
},
} as TableSchema
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
expect(result).toEqual({
...view,
columns: ["id"],
schemaUI: {
...view.schemaUI,
name: undefined,
description: undefined,
newField1: { visible: false },
},
})
})
it("renaming mapped columns will update the view column mapping and it's schema", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true },
id: { visible: true },
description: { visible: true, width: 150, icon: "ic-any" },
hiddenField: { visible: false },
},
}
const { description, ...newTableSchema } = {
...basicTable.schema,
updatedDescription: {
...basicTable.schema.description,
name: "updatedDescription",
},
} as TableSchema
const result = syncSchema(_.cloneDeep(view), newTableSchema, {
old: "description",
updated: "updatedDescription",
})
expect(result).toEqual({
...view,
columns: ["name", "id", "updatedDescription"],
schemaUI: {
...view.schemaUI,
description: undefined,
updatedDescription: { visible: true, width: 150, icon: "ic-any" },
},
})
})
it("changing no UI schema will not affect the view", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const result = syncSchema(
_.cloneDeep(view),
{
...basicTable.schema,
id: {
...basicTable.schema.id,
type: FieldType.NUMBER,
},
},
undefined
)
expect(result).toEqual(view)
})
it("changing table column UI fields will not affect the view schema", () => {
const view: ViewV2 = {
...basicView,
columns: ["name", "id", "description"],
schemaUI: {
name: { visible: true, width: 100 },
id: { visible: true, width: 20 },
description: { visible: false },
hiddenField: { visible: false },
},
}
const result = syncSchema(
_.cloneDeep(view),
{
...basicTable.schema,
id: {
...basicTable.schema.id,
visible: !basicTable.schema.id.visible,
},
},
undefined
)
expect(result).toEqual(view)
})
})
})
})

View File

@ -1,4 +1,4 @@
import { PatchRowRequest } from "@budibase/types"
import { PatchRowRequest, SaveRowRequest, Row } from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
@ -8,12 +8,12 @@ export class RowAPI extends TestAPI {
}
get = async (
tableId: string,
sourceId: string,
rowId: string,
{ expectStatus } = { expectStatus: 200 }
) => {
const request = this.request
.get(`/api/${tableId}/rows/${rowId}`)
.get(`/api/${sourceId}/rows/${rowId}`)
.set(this.config.defaultHeaders())
.expect(expectStatus)
if (expectStatus !== 404) {
@ -22,16 +22,43 @@ export class RowAPI extends TestAPI {
return request
}
save = async (
sourceId: string,
row: SaveRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<Row> => {
const resp = await this.request
.post(`/api/${sourceId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return resp.body as Row
}
patch = async (
tableId: string,
sourceId: string,
row: PatchRowRequest,
{ expectStatus } = { expectStatus: 200 }
) => {
return this.request
.patch(`/api/${tableId}/rows`)
.patch(`/api/${sourceId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
}
delete = async (
sourceId: string,
rows: Row[],
{ expectStatus } = { expectStatus: 200 }
) => {
return this.request
.delete(`/api/${sourceId}/rows`)
.send({ rows })
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
}
}

View File

@ -1,13 +1,8 @@
import {
CreateViewRequest,
SortOrder,
SortType,
UpdateViewRequest,
DeleteRowRequest,
PatchRowRequest,
PatchRowResponse,
Row,
ViewV2,
SearchViewRowRequest,
} from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
@ -81,75 +76,14 @@ export class ViewV2API extends TestAPI {
search = async (
viewId: string,
options?: {
sort: {
column: string
order?: SortOrder
type?: SortType
}
},
params?: SearchViewRowRequest,
{ expectStatus } = { expectStatus: 200 }
) => {
const qs: [string, any][] = []
if (options?.sort.column) {
qs.push(["sort_column", options.sort.column])
}
if (options?.sort.order) {
qs.push(["sort_order", options.sort.order])
}
if (options?.sort.type) {
qs.push(["sort_type", options.sort.type])
}
let url = `/api/v2/views/${viewId}/search`
if (qs.length) {
url += "?" + qs.map(q => q.join("=")).join("&")
}
return this.request
.get(url)
.post(`/api/v2/views/${viewId}/search`)
.send(params)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
}
row = {
create: async (
viewId: string,
row: Row,
{ expectStatus } = { expectStatus: 200 }
): Promise<Row> => {
const result = await this.request
.post(`/api/v2/views/${viewId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return result.body as Row
},
update: async (
viewId: string,
rowId: string,
row: PatchRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<PatchRowResponse> => {
const result = await this.request
.patch(`/api/v2/views/${viewId}/rows/${rowId}`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return result.body as PatchRowResponse
},
delete: async (
viewId: string,
body: DeleteRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<any> => {
const result = await this.request
.delete(`/api/v2/views/${viewId}/rows`)
.send(body)
.set(this.config.defaultHeaders())
.expect(expectStatus)
return result.body
},
}
}

View File

@ -11,6 +11,12 @@ export interface QueryEvent {
queryId: string
environmentVariables?: Record<string, string>
ctx?: any
schema?: {
[key: string]: {
name: string
type: string
}
}
}
export interface QueryVariable {

View File

@ -8,6 +8,7 @@ import { context, cache, auth } from "@budibase/backend-core"
import { getGlobalIDFromUserMetadataID } from "../db/utils"
import sdk from "../sdk"
import { cloneDeep } from "lodash/fp"
import { SourceName } from "@budibase/types"
import { isSQL } from "../integrations/utils"
import { interpolateSQL } from "../integrations/queries/sql"
@ -28,6 +29,7 @@ class QueryRunner {
hasRerun: boolean
hasRefreshedOAuth: boolean
hasDynamicVariables: boolean
schema: any
constructor(input: QueryEvent, flags = { noRecursiveQuery: false }) {
this.datasource = input.datasource
@ -37,6 +39,7 @@ class QueryRunner {
this.pagination = input.pagination
this.transformer = input.transformer
this.queryId = input.queryId
this.schema = input.schema
this.noRecursiveQuery = flags.noRecursiveQuery
this.cachedVariables = []
// Additional context items for enrichment
@ -51,7 +54,7 @@ class QueryRunner {
}
async execute(): Promise<any> {
let { datasource, fields, queryVerb, transformer } = this
let { datasource, fields, queryVerb, transformer, schema } = this
let datasourceClone = cloneDeep(datasource)
let fieldsClone = cloneDeep(fields)
@ -70,6 +73,9 @@ class QueryRunner {
const integration = new Integration(datasourceClone.config)
// define the type casting from the schema
integration.defineTypeCastingFromSchema?.(schema)
// pre-query, make sure datasource variables are added to parameters
const parameters = await this.addDatasourceVariables()

View File

@ -1,5 +1,5 @@
import { permissions, roles } from "@budibase/backend-core"
import { DocumentType } from "../db/utils"
import { DocumentType, VirtualDocumentType } from "../db/utils"
export const CURRENTLY_SUPPORTED_LEVELS: string[] = [
permissions.PermissionLevel.WRITE,
@ -11,9 +11,10 @@ export function getPermissionType(resourceId: string) {
const docType = Object.values(DocumentType).filter(docType =>
resourceId.startsWith(docType)
)[0]
switch (docType) {
switch (docType as DocumentType | VirtualDocumentType) {
case DocumentType.TABLE:
case DocumentType.ROW:
case VirtualDocumentType.VIEW:
return permissions.PermissionType.TABLE
case DocumentType.AUTOMATION:
return permissions.PermissionType.AUTOMATION
@ -22,9 +23,6 @@ export function getPermissionType(resourceId: string) {
case DocumentType.QUERY:
case DocumentType.DATASOURCE:
return permissions.PermissionType.QUERY
default:
// views don't have an ID, will end up here
return permissions.PermissionType.VIEW
}
}

View File

@ -32,7 +32,18 @@
"target": "build"
}
]
}
},
"dev:builder": {
"dependsOn": [
{
"projects": [
"@budibase/types"
],
"target": "build"
}
]
}
}
}
}

View File

@ -434,3 +434,4 @@ export const hasFilters = (query?: SearchQuery) => {
}
return false
}

View File

@ -1,7 +1,7 @@
{
"compilerOptions": {
"target": "es6",
"moduleResolution": "node",
"module": "commonjs",
"lib": ["es2020"],
"strict": true,
"noImplicitAny": true,

View File

@ -1,5 +1,8 @@
import { SearchParams } from "../../../sdk"
import { Row } from "../../../documents"
export interface SaveRowRequest extends Row {}
export interface PatchRowRequest extends Row {
_id: string
_rev: string
@ -8,6 +11,14 @@ export interface PatchRowRequest extends Row {
export interface PatchRowResponse extends Row {}
export interface SearchResponse {
export interface SearchRowRequest extends Omit<SearchParams, "tableId"> {}
export interface SearchViewRowRequest
extends Pick<
SearchRowRequest,
"sort" | "sortOrder" | "sortType" | "limit" | "bookmark" | "paginate"
> {}
export interface SearchRowResponse {
rows: any[]
}

View File

@ -1,4 +1,10 @@
import { Table, TableSchema, View, ViewV2 } from "../../../documents"
import {
Table,
TableRequest,
TableSchema,
View,
ViewV2,
} from "../../../documents"
interface ViewV2Response extends ViewV2 {
schema: TableSchema
@ -11,3 +17,7 @@ export interface TableResponse extends Table {
}
export type FetchTablesResponse = TableResponse[]
export interface SaveTableRequest extends TableRequest {}
export type SaveTableResponse = Table

View File

@ -1,6 +1,5 @@
import { SortOrder, SortType } from "../../api"
import { SearchFilters } from "../../sdk"
import { TableSchema, UIFieldMetadata } from "./table"
import { SearchFilter, SortOrder, SortType } from "../../api"
import { UIFieldMetadata } from "./table"
export interface View {
name: string
@ -20,7 +19,7 @@ export interface ViewV2 {
name: string
primaryDisplay?: string
tableId: string
query?: SearchFilters
query?: SearchFilter[]
sort?: {
field: string
order?: SortOrder

View File

@ -39,6 +39,12 @@ export enum DocumentType {
AUDIT_LOG = "al",
}
// these documents don't really exist, they are part of other
// documents or enriched into existence as part of get requests
export enum VirtualDocumentType {
VIEW = "view",
}
export interface Document {
_id?: string
_rev?: string

View File

@ -166,6 +166,12 @@ export interface IntegrationBase {
delete?(query: any): Promise<any[] | any>
testConnection?(): Promise<ConnectionInfo>
getExternalSchema?(): Promise<string>
defineTypeCastingFromSchema?(schema: {
[key: string]: {
name: string
type: string
}
}): void
}
export interface DatasourcePlus extends IntegrationBase {

View File

@ -19,3 +19,4 @@ export * from "./user"
export * from "./cli"
export * from "./websocket"
export * from "./permissions"
export * from "./row"

View File

@ -14,6 +14,5 @@ export enum PermissionType {
WEBHOOK = "webhook",
BUILDER = "builder",
GLOBAL_BUILDER = "globalBuilder",
VIEW = "view",
QUERY = "query",
}

View File

@ -0,0 +1,16 @@
import { SortOrder, SortType } from "../api"
import { SearchFilters } from "./search"
export interface SearchParams {
tableId: string
paginate?: boolean
query: SearchFilters
bookmark?: string
limit?: number
sort?: string
sortOrder?: SortOrder
sortType?: SortType
version?: string
disableEscaping?: boolean
fields?: string[]
}

View File

@ -1,4 +1,4 @@
FROM node:14-alpine
FROM node:18-alpine
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"

View File

@ -28,8 +28,8 @@ describe("datasource validators", () => {
8000
)}`,
}
env._set("AWS_ACCESS_KEY_ID", "mocked_key")
env._set("AWS_SECRET_ACCESS_KEY", "mocked_secret")
env._set("AWS_ACCESS_KEY_ID", "mockedkey")
env._set("AWS_SECRET_ACCESS_KEY", "mockedsecret")
})
it("test valid connection string", async () => {

View File

@ -43,7 +43,6 @@ function runBuild(entry, outfile) {
TsconfigPathsPlugin({ tsconfig: tsconfigPathPluginContent }),
nodeExternalsPlugin(),
],
target: "node14",
preserveSymlinks: true,
loader: {
".svelte": "copy",

View File

@ -10283,7 +10283,7 @@ denque@^1.1.0:
resolved "https://registry.yarnpkg.com/denque/-/denque-1.5.1.tgz#07f670e29c9a78f8faecb2566a1e2c11929c5cbf"
integrity sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==
denque@^2.0.1, denque@^2.1.0:
denque@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/denque/-/denque-2.1.0.tgz#e93e1a6569fb5e66f16a3c2a2964617d349d6ab1"
integrity sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==
@ -16986,6 +16986,11 @@ long@^5.0.0:
resolved "https://registry.yarnpkg.com/long/-/long-5.2.1.tgz#e27595d0083d103d2fa2c20c7699f8e0c92b897f"
integrity sha512-GKSNGeNAtw8IryjjkhZxuKB3JzlcLTwjtiQCHKvqQet81I93kXslhDQruGI/QsddO83mcDToBVy7GqGS/zYf/A==
long@^5.2.1:
version "5.2.3"
resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1"
integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==
lookpath@1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/lookpath/-/lookpath-1.1.0.tgz#932d68371a2f0b4a5644f03d6a2b4728edba96d2"
@ -17052,6 +17057,11 @@ lru-cache@^7.4.4, lru-cache@^7.5.1, lru-cache@^7.7.1:
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89"
integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==
lru-cache@^8.0.0:
version "8.0.5"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-8.0.5.tgz#983fe337f3e176667f8e567cfcce7cb064ea214e"
integrity sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA==
lru-cache@^9.0.0:
version "9.0.1"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-9.0.1.tgz#ac061ed291f8b9adaca2b085534bb1d3b61bef83"
@ -17952,17 +17962,17 @@ mute-stream@~1.0.0:
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e"
integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==
mysql2@2.3.3:
version "2.3.3"
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-2.3.3.tgz#944f3deca4b16629052ff8614fbf89d5552545a0"
integrity sha512-wxJUev6LgMSgACDkb/InIFxDprRa6T95+VEoR+xPvtngtccNH2dGjEB/fVZ8yg1gWv1510c9CvXuJHi5zUm0ZA==
mysql2@3.5.2:
version "3.5.2"
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.5.2.tgz#a06050e1514e9ac15711a8b883ffd51cb44b2dc8"
integrity sha512-cptobmhYkYeTBIFp2c0piw2+gElpioga1rUw5UidHvo8yaHijMZoo8A3zyBVoo/K71f7ZFvrShA9iMIy9dCzCA==
dependencies:
denque "^2.0.1"
denque "^2.1.0"
generate-function "^2.3.1"
iconv-lite "^0.6.3"
long "^4.0.0"
lru-cache "^6.0.0"
named-placeholders "^1.1.2"
long "^5.2.1"
lru-cache "^8.0.0"
named-placeholders "^1.1.3"
seq-queue "^0.0.5"
sqlstring "^2.3.2"
@ -17975,7 +17985,7 @@ mz@^2.4.0, mz@^2.7.0:
object-assign "^4.0.1"
thenify-all "^1.0.0"
named-placeholders@^1.1.2:
named-placeholders@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/named-placeholders/-/named-placeholders-1.1.3.tgz#df595799a36654da55dda6152ba7a137ad1d9351"
integrity sha512-eLoBxg6wE/rZkJPhU/xRX1WTpkFEwDJEN96oxFrTsqBdbT5ec295Q+CoHrL9IT0DipqKhmGcaZmwOt8OON5x1w==