Merge branch 'develop' of github.com:Budibase/budibase into views-v2-frontend

This commit is contained in:
Andrew Kingston 2023-08-11 12:12:56 +01:00
commit dcb238251e
65 changed files with 612 additions and 699 deletions

View File

@ -18,6 +18,8 @@ env:
BRANCH: ${{ github.event.pull_request.head.ref }}
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
jobs:
lint:
@ -50,9 +52,12 @@ jobs:
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x
uses: actions/setup-node@v3
@ -60,10 +65,19 @@ jobs:
node-version: 18.x
cache: "yarn"
- run: yarn --frozen-lockfile
# Run build all the projects
- run: yarn build
- name: Build
run: |
yarn build
# Check the types of the projects built via esbuild
- run: yarn check:types
- name: Check types
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn check:types --since=${{ env.NX_BASE_BRANCH }}
else
yarn check:types
fi
test-libraries:
runs-on: ubuntu-latest
@ -74,9 +88,12 @@ jobs:
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x
uses: actions/setup-node@v3
@ -84,7 +101,13 @@ jobs:
node-version: 18.x
cache: "yarn"
- run: yarn --frozen-lockfile
- run: yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
- name: Test
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
fi
- uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
@ -100,9 +123,12 @@ jobs:
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x
uses: actions/setup-node@v3
@ -111,7 +137,13 @@ jobs:
cache: "yarn"
- run: yarn --frozen-lockfile
- name: Test worker and server
run: yarn test --scope=@budibase/worker --scope=@budibase/server
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --scope=@budibase/worker --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/worker --scope=@budibase/server
fi
- uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN || github.token }} # not required for public repos
@ -127,6 +159,7 @@ jobs:
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Use Node.js 18.x
uses: actions/setup-node@v3
@ -134,7 +167,13 @@ jobs:
node-version: 18.x
cache: "yarn"
- run: yarn --frozen-lockfile
- run: yarn test --scope=@budibase/pro
- name: Test
run: |
if ${{ env.USE_NX_AFFECTED }}; then
yarn test --scope=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/pro
fi
integration-test:
runs-on: ubuntu-latest
@ -155,7 +194,7 @@ jobs:
node-version: 18.x
cache: "yarn"
- run: yarn --frozen-lockfile
- run: yarn build --projects=@budibase/server,@budibase/worker,@budibase/client
- run: yarn build --scope @budibase/server --scope @budibase/worker --scope @budibase/client
- name: Run tests
run: |
cd qa-core
@ -173,7 +212,6 @@ jobs:
uses: actions/checkout@v3
with:
submodules: true
fetch-depth: 0
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Check pro commit

View File

@ -1,4 +1,4 @@
name: release-singleimage
name: Deploy Budibase Single Container Image to DockerHub
on:
workflow_dispatch:
@ -8,13 +8,20 @@ env:
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
jobs:
build-amd64-arm64:
name: "build-amd64"
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14.x]
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 35000
swap-size-mb: 1024
remove-android: 'true'
remove-dotnet: 'true'
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
@ -27,12 +34,14 @@ jobs:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
@ -69,64 +78,8 @@ jobs:
context: .
push: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile
build-aas:
name: "build-aas"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14.x]
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- name: "Checkout"
uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Setup QEMU
uses: docker/setup-qemu-action@v1
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Run Yarn
run: yarn
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Runt Yarn Lint
run: yarn lint
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Run Yarn Build
run: yarn build:docker:pre
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:

72
.vscode/launch.json vendored
View File

@ -1,42 +1,32 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Budibase Server",
"type": "node",
"request": "launch",
"runtimeArgs": [
"--nolazy",
"-r",
"ts-node/register/transpile-only"
],
"args": [
"${workspaceFolder}/packages/server/src/index.ts"
],
"cwd": "${workspaceFolder}/packages/server"
},
{
"name": "Budibase Worker",
"type": "node",
"request": "launch",
"runtimeArgs": [
"--nolazy",
"-r",
"ts-node/register/transpile-only"
],
"args": [
"${workspaceFolder}/packages/worker/src/index.ts"
],
"cwd": "${workspaceFolder}/packages/worker"
},
],
"compounds": [
{
"name": "Start Budibase",
"configurations": ["Budibase Server", "Budibase Worker"]
}
]
}
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Budibase Server",
"type": "node",
"request": "launch",
"runtimeVersion": "14.20.1",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
"args": ["${workspaceFolder}/packages/server/src/index.ts"],
"cwd": "${workspaceFolder}/packages/server"
},
{
"name": "Budibase Worker",
"type": "node",
"request": "launch",
"runtimeVersion": "14.20.1",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
"args": ["${workspaceFolder}/packages/worker/src/index.ts"],
"cwd": "${workspaceFolder}/packages/worker"
}
],
"compounds": [
{
"name": "Start Budibase",
"configurations": ["Budibase Server", "Budibase Worker"]
}
]
}

View File

@ -1,7 +1,7 @@
FROM node:18-slim as build
# install node-gyp dependencies
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python
RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends apt-utils cron g++ make python3
# add pin script
WORKDIR /

View File

@ -1,5 +1,5 @@
{
"version": "2.9.8-alpha.1",
"version": "2.9.24-alpha.3",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -34,7 +34,7 @@
"preinstall": "node scripts/syncProPackage.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"bootstrap": "./scripts/link-dependencies.sh && echo '***BOOTSTRAP ONLY REQUIRED FOR USE WITH ACCOUNT PORTAL***'",
"build": "yarn nx run-many -t=build",
"build": "lerna run build --stream",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run check:types",
"backend:bootstrap": "./scripts/scopeBackend.sh && yarn run bootstrap",

View File

@ -0,0 +1,4 @@
*
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json

View File

@ -2,11 +2,11 @@
"name": "@budibase/backend-core",
"version": "0.0.0",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"main": "dist/index.js",
"types": "dist/src/index.d.ts",
"exports": {
".": "./dist/src/index.js",
"./tests": "./dist/tests/index.js",
".": "./dist/index.js",
"./tests": "./dist/tests.js",
"./*": "./dist/*.js"
},
"author": "Budibase",
@ -14,7 +14,7 @@
"scripts": {
"prebuild": "rimraf dist/",
"prepack": "cp package.json dist",
"build": "tsc -p tsconfig.build.json",
"build": "node ./scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
"test": "bash scripts/test.sh",
@ -88,5 +88,20 @@
"ts-node": "10.8.1",
"tsconfig-paths": "4.0.0",
"typescript": "4.7.3"
},
"nx": {
"targets": {
"build": {
"dependsOn": [
{
"projects": [
"@budibase/shared-core",
"@budibase/types"
],
"target": "build"
}
]
}
}
}
}

View File

@ -1 +0,0 @@
export * from "./src/plugin"

View File

@ -0,0 +1,6 @@
#!/usr/bin/node
const coreBuild = require("../../../scripts/build")
coreBuild("./src/plugin/index.ts", "./dist/plugins.js")
coreBuild("./src/index.ts", "./dist/index.js")
coreBuild("./tests/index.ts", "./dist/tests.js")

View File

@ -8,6 +8,7 @@ import {
DatabasePutOpts,
DatabaseCreateIndexOpts,
DatabaseDeleteIndexOpts,
DocExistsResponse,
Document,
isDocument,
} from "@budibase/types"
@ -120,6 +121,19 @@ export class DatabaseImpl implements Database {
return this.updateOutput(() => db.get(id))
}
async docExists(docId: string): Promise<DocExistsResponse> {
const db = await this.checkSetup()
let _rev, exists
try {
const { etag } = await db.head(docId)
_rev = etag
exists = true
} catch (err) {
exists = false
}
return { _rev, exists }
}
async remove(idOrDoc: string | Document, rev?: string) {
const db = await this.checkSetup()
let _id: string

View File

@ -78,7 +78,6 @@ export const BUILTIN_PERMISSIONS = {
permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.READ),
new Permission(PermissionType.TABLE, PermissionLevel.READ),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
],
},
WRITE: {
@ -87,7 +86,6 @@ export const BUILTIN_PERMISSIONS = {
permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.WRITE),
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
],
},
@ -98,7 +96,6 @@ export const BUILTIN_PERMISSIONS = {
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.USER, PermissionLevel.READ),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new Permission(PermissionType.VIEW, PermissionLevel.READ),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
],
},
@ -109,7 +106,6 @@ export const BUILTIN_PERMISSIONS = {
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),
new Permission(PermissionType.USER, PermissionLevel.ADMIN),
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
new Permission(PermissionType.VIEW, PermissionLevel.ADMIN),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),
],

View File

@ -12,7 +12,11 @@
"declaration": true,
"types": ["node", "jest"],
"outDir": "dist",
"skipLibCheck": true
"skipLibCheck": true,
"paths": {
"@budibase/types": ["../types/src"],
"@budibase/shared-core": ["../shared-core/src"]
}
},
"include": ["**/*.js", "**/*.ts"],
"exclude": [

View File

@ -1,12 +1,4 @@
{
"extends": "./tsconfig.build.json",
"compilerOptions": {
"composite": true,
"baseUrl": ".",
"paths": {
"@budibase/types": ["../types/src"],
"@budibase/shared-core": ["../shared-core/src"]
}
},
"exclude": ["node_modules", "dist"]
}

View File

@ -98,8 +98,7 @@
{
"projects": [
"@budibase/string-templates",
"@budibase/shared-core",
"@budibase/types"
"@budibase/shared-core"
],
"target": "build"
}

View File

@ -133,9 +133,7 @@
"dependsOn": [
{
"projects": [
"@budibase/shared-core",
"@budibase/string-templates",
"@budibase/types"
"@budibase/string-templates"
],
"target": "build"
}
@ -145,9 +143,7 @@
"dependsOn": [
{
"projects": [
"@budibase/shared-core",
"@budibase/string-templates",
"@budibase/types"
"@budibase/string-templates"
],
"target": "build"
}
@ -157,9 +153,7 @@
"dependsOn": [
{
"projects": [
"@budibase/shared-core",
"@budibase/string-templates",
"@budibase/types"
"@budibase/string-templates"
],
"target": "build"
}

View File

@ -1,10 +1,12 @@
<script>
import { Select, Label, Stepper } from "@budibase/bbui"
import { Select, Label } from "@budibase/bbui"
import { currentAsset, store } from "builderStore"
import { getActionProviderComponents } from "builderStore/dataBinding"
import { onMount } from "svelte"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
export let parameters
export let bindings = []
$: actionProviders = getActionProviderComponents(
$currentAsset,
@ -51,7 +53,11 @@
<Select bind:value={parameters.type} options={typeOptions} />
{#if parameters.type === "specific"}
<Label small>Number</Label>
<Stepper bind:value={parameters.number} />
<DrawerBindableInput
{bindings}
value={parameters.number}
on:change={e => (parameters.number = e.detail)}
/>
{/if}
</div>

View File

@ -17,7 +17,7 @@
import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { getFields } from "helpers/searchFields"
import { createEventDispatcher } from "svelte"
import { createEventDispatcher, onMount } from "svelte"
export let schemaFields
export let filters = []
@ -64,6 +64,15 @@
})
}
onMount(() => {
parseFilters(filters)
rawFilters.forEach(filter => {
filter.type =
schemaFields.find(field => field.name === filter.field)?.type ||
filter.type
})
})
// Add field key prefixes and a special metadata filter object to indicate
// whether to use the "match all" or "match any" behaviour
const enrichFilters = (rawFilters, matchAny) => {

View File

@ -127,6 +127,14 @@ export default defineConfig(({ mode }) => {
find: "helpers",
replacement: path.resolve("./src/helpers"),
},
{
find: "@budibase/types",
replacement: path.resolve("../types/src"),
},
{
find: "@budibase/shared-core",
replacement: path.resolve("../shared-core/src"),
},
],
},
}

View File

@ -63,20 +63,5 @@
"renamer": "^4.0.0",
"ts-node": "^10.9.1",
"typescript": "4.7.3"
},
"nx": {
"targets": {
"build": {
"dependsOn": [
{
"projects": [
"@budibase/backend-core",
"@budibase/string-templates"
],
"target": "build"
}
]
}
}
}
}

View File

@ -2445,6 +2445,7 @@
"name": "Form Step",
"icon": "AssetsAdded",
"hasChildren": true,
"requiredAncestors": ["form"],
"illegalChildren": ["section", "form", "formstep", "formblock"],
"styles": ["size"],
"size": {
@ -2464,6 +2465,7 @@
"fieldgroup": {
"name": "Field Group",
"icon": "Group",
"requiredAncestors": ["form"],
"illegalChildren": ["section"],
"styles": ["size"],
"hasChildren": true,

View File

@ -250,7 +250,7 @@
} else if (type === "first") {
currentStep.set(1)
} else if (type === "specific" && number && !isNaN(number)) {
currentStep.set(number)
currentStep.set(parseInt(number))
}
},
setStep: step => {

@ -1 +1 @@
Subproject commit ecee8071ebe0f98a5bb19646954e373264be210d
Subproject commit 9b9c8cc08f271bfc5dd401860f344f6eb336ab35

View File

@ -18,7 +18,7 @@ ENV TOP_LEVEL_PATH=/
# handle node-gyp
RUN apt-get update \
&& apt-get install -y --no-install-recommends g++ make python
&& apt-get install -y --no-install-recommends g++ make python3
RUN yarn global add pm2
# Install client for oracle datasource

View File

@ -100,7 +100,7 @@
"memorystream": "0.3.1",
"mongodb": "5.7",
"mssql": "9.1.1",
"mysql2": "2.3.3",
"mysql2": "3.5.2",
"node-fetch": "2.6.7",
"object-sizeof": "2.6.1",
"open": "8.4.0",

View File

@ -39,9 +39,8 @@ import {
} from "../../db/defaultData/datasource_bb_default"
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import { stringToReadStream } from "../../utilities"
import { doesUserHaveLock, getLocksById } from "../../utilities/redis"
import { doesUserHaveLock } from "../../utilities/redis"
import { cleanupAutomations } from "../../automations/utils"
import { checkAppMetadata } from "../../automations/logging"
import { getUniqueRows } from "../../utilities/usageQuota/rows"
import { groups, licensing, quotas } from "@budibase/pro"
import {
@ -51,7 +50,6 @@ import {
PlanType,
Screen,
UserCtx,
ContextUser,
} from "@budibase/types"
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
import sdk from "../../sdk"

View File

@ -20,7 +20,7 @@ import {
Automation,
AutomationActionStepId,
AutomationResults,
BBContext,
Ctx,
} from "@budibase/types"
import { getActionDefinitions as actionDefs } from "../../automations/actions"
import sdk from "../../sdk"
@ -73,7 +73,7 @@ function cleanAutomationInputs(automation: Automation) {
return automation
}
export async function create(ctx: BBContext) {
export async function create(ctx: Ctx) {
const db = context.getAppDB()
let automation = ctx.request.body
automation.appId = ctx.appId
@ -142,7 +142,7 @@ export async function handleStepEvents(
}
}
export async function update(ctx: BBContext) {
export async function update(ctx: Ctx) {
const db = context.getAppDB()
let automation = ctx.request.body
automation.appId = ctx.appId
@ -193,7 +193,7 @@ export async function update(ctx: BBContext) {
builderSocket?.emitAutomationUpdate(ctx, automation)
}
export async function fetch(ctx: BBContext) {
export async function fetch(ctx: Ctx) {
const db = context.getAppDB()
const response = await db.allDocs(
getAutomationParams(null, {
@ -203,12 +203,11 @@ export async function fetch(ctx: BBContext) {
ctx.body = response.rows.map(row => row.doc)
}
export async function find(ctx: BBContext) {
const db = context.getAppDB()
ctx.body = await db.get(ctx.params.id)
export async function find(ctx: Ctx) {
ctx.body = await sdk.automations.get(ctx.params.id)
}
export async function destroy(ctx: BBContext) {
export async function destroy(ctx: Ctx) {
const db = context.getAppDB()
const automationId = ctx.params.id
const oldAutomation = await db.get<Automation>(automationId)
@ -222,11 +221,11 @@ export async function destroy(ctx: BBContext) {
builderSocket?.emitAutomationDeletion(ctx, automationId)
}
export async function logSearch(ctx: BBContext) {
export async function logSearch(ctx: Ctx) {
ctx.body = await automations.logs.logSearch(ctx.request.body)
}
export async function clearLogError(ctx: BBContext) {
export async function clearLogError(ctx: Ctx) {
const { automationId, appId } = ctx.request.body
await context.doInAppContext(appId, async () => {
const db = context.getProdAppDB()
@ -245,15 +244,15 @@ export async function clearLogError(ctx: BBContext) {
})
}
export async function getActionList(ctx: BBContext) {
export async function getActionList(ctx: Ctx) {
ctx.body = await getActionDefinitions()
}
export async function getTriggerList(ctx: BBContext) {
export async function getTriggerList(ctx: Ctx) {
ctx.body = getTriggerDefinitions()
}
export async function getDefinitionList(ctx: BBContext) {
export async function getDefinitionList(ctx: Ctx) {
ctx.body = {
trigger: getTriggerDefinitions(),
action: await getActionDefinitions(),
@ -266,7 +265,7 @@ export async function getDefinitionList(ctx: BBContext) {
* *
*********************/
export async function trigger(ctx: BBContext) {
export async function trigger(ctx: Ctx) {
const db = context.getAppDB()
let automation = await db.get<Automation>(ctx.params.id)
@ -311,7 +310,7 @@ function prepareTestInput(input: any) {
return input
}
export async function test(ctx: BBContext) {
export async function test(ctx: Ctx) {
const db = context.getAppDB()
let automation = await db.get<Automation>(ctx.params.id)
await setTestFlag(automation._id!)

View File

@ -127,7 +127,7 @@ export async function preview(ctx: any) {
const query = ctx.request.body
// preview may not have a queryId as it hasn't been saved, but if it does
// this stops dynamic variables from calling the same query
const { fields, parameters, queryVerb, transformer, queryId } = query
const { fields, parameters, queryVerb, transformer, queryId, schema } = query
const authConfigCtx: any = getAuthConfig(ctx)
@ -140,6 +140,7 @@ export async function preview(ctx: any) {
parameters,
transformer,
queryId,
schema,
// have to pass down to the thread runner - can't put into context now
environmentVariables: envVars,
ctx: {
@ -235,6 +236,7 @@ async function execute(
user: ctx.user,
auth: { ...authConfigCtx },
},
schema: query.schema,
}
const runFn = () => Runner.run(inputs)

View File

@ -15,6 +15,7 @@ import {
UserCtx,
} from "@budibase/types"
import sdk from "../../../sdk"
import * as utils from "./utils"
export async function handleRequest(
operation: Operation,
@ -43,7 +44,7 @@ export async function handleRequest(
}
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
const { _id, ...rowData } = ctx.request.body
const validateResult = await sdk.rows.utils.validate({
@ -70,7 +71,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
export async function save(ctx: UserCtx) {
const inputs = ctx.request.body
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
const validateResult = await sdk.rows.utils.validate({
row: inputs,
tableId,
@ -98,12 +99,12 @@ export async function save(ctx: UserCtx) {
export async function find(ctx: UserCtx) {
const id = ctx.params.rowId
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
return sdk.rows.external.getRow(tableId, id)
}
export async function destroy(ctx: UserCtx) {
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
const _id = ctx.request.body._id
const { row } = (await handleRequest(Operation.DELETE, tableId, {
id: breakRowIdField(_id),
@ -114,7 +115,7 @@ export async function destroy(ctx: UserCtx) {
export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
let promises: Promise<Row[] | { row: Row; table: Table }>[] = []
for (let row of rows) {
promises.push(
@ -130,7 +131,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) {
const id = ctx.params.rowId
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
const { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource: Datasource = await sdk.datasources.get(datasourceId!)
if (!tableName) {

View File

@ -13,7 +13,7 @@ import {
import { FieldTypes } from "../../../constants"
import * as utils from "./utils"
import { cloneDeep } from "lodash/fp"
import { context, db as dbCore } from "@budibase/backend-core"
import { context } from "@budibase/backend-core"
import { finaliseRow, updateRelatedFormula } from "./staticFormula"
import {
UserCtx,
@ -26,8 +26,8 @@ import {
import sdk from "../../../sdk"
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableId = utils.getTableId(ctx)
const inputs = ctx.request.body
const tableId = inputs.tableId
const isUserTable = tableId === InternalTables.USER_METADATA
let oldRow
const dbTable = await sdk.tables.getTable(tableId)
@ -94,7 +94,8 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
export async function save(ctx: UserCtx) {
let inputs = ctx.request.body
inputs.tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
inputs.tableId = tableId
if (!inputs._rev && !inputs._id) {
inputs._id = generateRowID(inputs.tableId)
@ -132,20 +133,22 @@ export async function save(ctx: UserCtx) {
}
export async function find(ctx: UserCtx) {
const db = dbCore.getDB(ctx.appId)
const table = await sdk.tables.getTable(ctx.params.tableId)
let row = await utils.findRow(ctx, ctx.params.tableId, ctx.params.rowId)
const tableId = utils.getTableId(ctx),
rowId = ctx.params.rowId
const table = await sdk.tables.getTable(tableId)
let row = await utils.findRow(ctx, tableId, rowId)
row = await outputProcessing(table, row)
return row
}
export async function destroy(ctx: UserCtx) {
const db = context.getAppDB()
const tableId = utils.getTableId(ctx)
const { _id } = ctx.request.body
let row = await db.get<Row>(_id)
let _rev = ctx.request.body._rev || row._rev
if (row.tableId !== ctx.params.tableId) {
if (row.tableId !== tableId) {
throw "Supplied tableId doesn't match the row's tableId"
}
const table = await sdk.tables.getTable(row.tableId)
@ -163,7 +166,7 @@ export async function destroy(ctx: UserCtx) {
await updateRelatedFormula(table, row)
let response
if (ctx.params.tableId === InternalTables.USER_METADATA) {
if (tableId === InternalTables.USER_METADATA) {
ctx.params = {
id: _id,
}
@ -176,7 +179,7 @@ export async function destroy(ctx: UserCtx) {
}
export async function bulkDestroy(ctx: UserCtx) {
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
const table = await sdk.tables.getTable(tableId)
let { rows } = ctx.request.body
@ -216,7 +219,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) {
const db = context.getAppDB()
const tableId = ctx.params.tableId
const tableId = utils.getTableId(ctx)
const rowId = ctx.params.rowId
// need table to work out where links go in row
let [table, row] = await Promise.all([

View File

@ -45,13 +45,20 @@ export async function findRow(ctx: UserCtx, tableId: string, rowId: string) {
}
export function getTableId(ctx: Ctx) {
if (ctx.request.body && ctx.request.body.tableId) {
return ctx.request.body.tableId
// top priority, use the URL first
if (ctx.params?.sourceId) {
return ctx.params.sourceId
}
if (ctx.params && ctx.params.tableId) {
// now check for old way of specifying table ID
if (ctx.params?.tableId) {
return ctx.params.tableId
}
if (ctx.params && ctx.params.viewName) {
// check body for a table ID
if (ctx.request.body?.tableId) {
return ctx.request.body.tableId
}
// now check if a specific view name
if (ctx.params?.viewName) {
return ctx.params.viewName
}
}

View File

@ -4,16 +4,14 @@ import authorized from "../../middleware/authorized"
import { paramResource, paramSubResource } from "../../middleware/resourceId"
import { permissions } from "@budibase/backend-core"
import { internalSearchValidator } from "./utils/validators"
import noViewData from "../../middleware/noViewData"
import trimViewRowInfo from "../../middleware/trimViewRowInfo"
import * as utils from "../../db/utils"
const { PermissionType, PermissionLevel } = permissions
const router: Router = new Router()
router
/**
* @api {get} /api/:tableId/:rowId/enrich Get an enriched row
* @api {get} /api/:sourceId/:rowId/enrich Get an enriched row
* @apiName Get an enriched row
* @apiGroup rows
* @apiPermission table read access
@ -27,13 +25,13 @@ router
* @apiSuccess {object} row The response body will be the enriched row.
*/
.get(
"/api/:tableId/:rowId/enrich",
paramSubResource("tableId", "rowId"),
"/api/:sourceId/:rowId/enrich",
paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetchEnrichedRow
)
/**
* @api {get} /api/:tableId/rows Get all rows in a table
* @api {get} /api/:sourceId/rows Get all rows in a table
* @apiName Get all rows in a table
* @apiGroup rows
* @apiPermission table read access
@ -42,37 +40,37 @@ router
* due to its lack of support for pagination. With SQL tables this will retrieve up to a limit and then
* will simply stop.
*
* @apiParam {string} tableId The ID of the table to retrieve all rows within.
* @apiParam {string} sourceId The ID of the table to retrieve all rows within.
*
* @apiSuccess {object[]} rows The response body will be an array of all rows found.
*/
.get(
"/api/:tableId/rows",
paramResource("tableId"),
"/api/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.fetch
)
/**
* @api {get} /api/:tableId/rows/:rowId Retrieve a single row
* @api {get} /api/:sourceId/rows/:rowId Retrieve a single row
* @apiName Retrieve a single row
* @apiGroup rows
* @apiPermission table read access
* @apiDescription This endpoint retrieves only the specified row. If you wish to retrieve
* a row by anything other than its _id field, use the search endpoint.
*
* @apiParam {string} tableId The ID of the table to retrieve a row from.
* @apiParam {string} sourceId The ID of the table to retrieve a row from.
* @apiParam {string} rowId The ID of the row to retrieve.
*
* @apiSuccess {object} body The response body will be the row that was found.
*/
.get(
"/api/:tableId/rows/:rowId",
paramSubResource("tableId", "rowId"),
"/api/:sourceId/rows/:rowId",
paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.find
)
/**
* @api {post} /api/:tableId/search Search for rows in a table
* @api {post} /api/:sourceId/search Search for rows in a table
* @apiName Search for rows in a table
* @apiGroup rows
* @apiPermission table read access
@ -80,7 +78,7 @@ router
* and data UI in the builder are built atop this. All filtering, sorting and pagination is
* handled through this, for internal and external (datasource plus, e.g. SQL) tables.
*
* @apiParam {string} tableId The ID of the table to retrieve rows from.
* @apiParam {string} sourceId The ID of the table to retrieve rows from.
*
* @apiParam (Body) {boolean} [paginate] If pagination is required then this should be set to true,
* defaults to false.
@ -135,22 +133,22 @@ router
* page.
*/
.post(
"/api/:tableId/search",
"/api/:sourceId/search",
internalSearchValidator(),
paramResource("tableId"),
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search
)
// DEPRECATED - this is an old API, but for backwards compat it needs to be
// supported still
.post(
"/api/search/:tableId/rows",
paramResource("tableId"),
"/api/search/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search
)
/**
* @api {post} /api/:tableId/rows Creates a new row
* @api {post} /api/:sourceId/rows Creates a new row
* @apiName Creates a new row
* @apiGroup rows
* @apiPermission table write access
@ -159,7 +157,7 @@ router
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
* already used by Budibase tables and cannot be used for columns.
*
* @apiParam {string} tableId The ID of the table to save a row to.
* @apiParam {string} sourceId The ID of the table to save a row to.
*
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
@ -174,14 +172,14 @@ router
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/
.post(
"/api/:tableId/rows",
paramResource("tableId"),
"/api/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
noViewData,
trimViewRowInfo,
rowController.save
)
/**
* @api {patch} /api/:tableId/rows Updates a row
* @api {patch} /api/:sourceId/rows Updates a row
* @apiName Update a row
* @apiGroup rows
* @apiPermission table write access
@ -189,14 +187,14 @@ router
* error if an _id isn't provided, it will only function for existing rows.
*/
.patch(
"/api/:tableId/rows",
paramResource("tableId"),
"/api/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
noViewData,
trimViewRowInfo,
rowController.patch
)
/**
* @api {post} /api/:tableId/rows/validate Validate inputs for a row
* @api {post} /api/:sourceId/rows/validate Validate inputs for a row
* @apiName Validate inputs for a row
* @apiGroup rows
* @apiPermission table write access
@ -204,7 +202,7 @@ router
* given the table schema, this will iterate through all the constraints on the table and
* check if the request body is valid.
*
* @apiParam {string} tableId The ID of the table the row is to be validated for.
* @apiParam {string} sourceId The ID of the table the row is to be validated for.
*
* @apiParam (Body) {any} [any] Any fields provided in the request body will be tested
* against the table schema and constraints.
@ -216,20 +214,20 @@ router
* the schema.
*/
.post(
"/api/:tableId/rows/validate",
paramResource("tableId"),
"/api/:sourceId/rows/validate",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.validate
)
/**
* @api {delete} /api/:tableId/rows Delete rows
* @api {delete} /api/:sourceId/rows Delete rows
* @apiName Delete rows
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion.
*
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted.
@ -242,117 +240,37 @@ router
* is the deleted row.
*/
.delete(
"/api/:tableId/rows",
paramResource("tableId"),
"/api/:sourceId/rows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.destroy
)
/**
* @api {post} /api/:tableId/rows/exportRows Export Rows
* @api {post} /api/:sourceId/rows/exportRows Export Rows
* @apiName Export rows
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API can export a number of provided rows
*
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
* @apiParam {string} sourceId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] The row IDs which are to be exported
*
* @apiSuccess {object[]|object}
*/
.post(
"/api/:tableId/rows/exportRows",
paramResource("tableId"),
"/api/:sourceId/rows/exportRows",
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.exportRows
)
router
.post(
"/api/v2/views/:viewId/search",
authorized(PermissionType.VIEW, PermissionLevel.READ),
rowController.views.searchView
)
/**
* @api {post} /api/:tableId/rows Creates a new row
* @apiName Creates a new row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This API will create a new row based on the supplied body. If the
* body includes an "_id" field then it will update an existing row if the field
* links to one. Please note that "_id", "_rev" and "tableId" are fields that are
* already used by Budibase tables and cannot be used for columns.
*
* @apiParam {string} tableId The ID of the table to save a row to.
*
* @apiParam (Body) {string} [_id] If the row exists already then an ID for the row must be provided.
* @apiParam (Body) {string} [_rev] If working with an existing row for an internal table its revision
* must also be provided.
* @apiParam (Body) {string} _viewId The ID of the view should be specified in the row body itself.
* @apiParam (Body) {string} tableId The ID of the table should also be specified in the row body itself.
* @apiParam (Body) {any} [any] Any field supplied in the body will be assessed to see if it matches
* a column in the specified table. All other fields will be dropped and not stored.
*
* @apiSuccess {string} _id The ID of the row that was just saved, if it was just created this
* is the rows new ID.
* @apiSuccess {string} [_rev] If saving to an internal table a revision will also be returned.
* @apiSuccess {object} body The contents of the row that was saved will be returned as well.
*/
.post(
"/api/v2/views/:viewId/rows",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.save
)
/**
* @api {patch} /api/v2/views/:viewId/rows/:rowId Updates a row
* @apiName Update a row
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint is identical to the row creation endpoint but instead it will
* error if an _id isn't provided, it will only function for existing rows.
*/
.patch(
"/api/v2/views/:viewId/rows/:rowId",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
trimViewRowInfo,
rowController.patch
)
/**
* @api {delete} /api/v2/views/:viewId/rows Delete rows for a view
* @apiName Delete rows for a view
* @apiGroup rows
* @apiPermission table write access
* @apiDescription This endpoint can delete a single row, or delete them in a bulk
* fashion.
*
* @apiParam {string} tableId The ID of the table the row is to be deleted from.
*
* @apiParam (Body) {object[]} [rows] If bulk deletion is desired then provide the rows in this
* key of the request body that are to be deleted.
* @apiParam (Body) {string} [_id] If deleting a single row then provide its ID in this field.
* @apiParam (Body) {string} [_rev] If deleting a single row from an internal table then provide its
* revision here.
*
* @apiSuccess {object[]|object} body If deleting bulk then the response body will be an array
* of the deleted rows, if deleting a single row then the body will contain a "row" property which
* is the deleted row.
*/
.delete(
"/api/v2/views/:viewId/rows",
paramResource("viewId"),
authorized(PermissionType.VIEW, PermissionLevel.WRITE),
// This is required as the implementation relies on the table id
(ctx, next) => {
ctx.params.tableId = utils.extractViewInfoFromID(
ctx.params.viewId
).tableId
return next()
},
rowController.destroy
)
router.post(
"/api/v2/views/:viewId/search",
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.views.searchView
)
export default router

View File

@ -16,16 +16,12 @@ import {
FieldType,
SortType,
SortOrder,
DeleteRow,
} from "@budibase/types"
import {
expectAnyInternalColsAttributes,
generator,
structures,
} from "@budibase/backend-core/tests"
import trimViewRowInfoMiddleware from "../../../middleware/trimViewRowInfo"
import noViewDataMiddleware from "../../../middleware/noViewData"
import router from "../row"
describe("/rows", () => {
let request = setup.getRequest()
@ -394,26 +390,6 @@ describe("/rows", () => {
expect(saved.arrayFieldArrayStrKnown).toEqual(["One"])
expect(saved.optsFieldStrKnown).toEqual("Alpha")
})
it("should throw an error when creating a table row with view id data", async () => {
const res = await request
.post(`/api/${row.tableId}/rows`)
.send({ ...row, _viewId: generator.guid() })
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(400)
expect(res.body.message).toEqual(
"Table row endpoints cannot contain view info"
)
})
it("should setup the noViewData middleware", async () => {
const route = router.stack.find(
r => r.methods.includes("POST") && r.path === "/api/:tableId/rows"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(noViewDataMiddleware)
})
})
describe("patch", () => {
@ -463,33 +439,6 @@ describe("/rows", () => {
await assertRowUsage(rowUsage)
await assertQueryUsage(queryUsage)
})
it("should throw an error when creating a table row with view id data", async () => {
const existing = await config.createRow()
const res = await config.api.row.patch(
table._id!,
{
...existing,
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
_viewId: generator.guid(),
},
{ expectStatus: 400 }
)
expect(res.body.message).toEqual(
"Table row endpoints cannot contain view info"
)
})
it("should setup the noViewData middleware", async () => {
const route = router.stack.find(
r => r.methods.includes("PATCH") && r.path === "/api/:tableId/rows"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(noViewDataMiddleware)
})
})
describe("destroy", () => {
@ -758,7 +707,7 @@ describe("/rows", () => {
})
// the environment needs configured for this
await setup.switchToSelfHosted(async () => {
context.doInAppContext(config.getAppId(), async () => {
return context.doInAppContext(config.getAppId(), async () => {
const enriched = await outputProcessing(table, [row])
expect((enriched as Row[])[0].attachment[0].url).toBe(
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
@ -864,7 +813,7 @@ describe("/rows", () => {
})
const data = randomRowData()
const newRow = await config.api.viewV2.row.create(view.id, {
const newRow = await config.api.row.save(view.id, {
tableId: config.table!._id,
_viewId: view.id,
...data,
@ -886,16 +835,6 @@ describe("/rows", () => {
expect(row.body.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined()
})
it("should setup the trimViewRowInfo middleware", async () => {
const route = router.stack.find(
r =>
r.methods.includes("POST") &&
r.path === "/api/v2/views/:viewId/rows"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(trimViewRowInfoMiddleware)
})
})
describe("patch", () => {
@ -910,13 +849,13 @@ describe("/rows", () => {
},
})
const newRow = await config.api.viewV2.row.create(view.id, {
const newRow = await config.api.row.save(view.id, {
tableId,
_viewId: view.id,
...randomRowData(),
})
const newData = randomRowData()
await config.api.viewV2.row.update(view.id, newRow._id!, {
await config.api.row.patch(view.id, {
tableId,
_viewId: view.id,
_id: newRow._id!,
@ -939,16 +878,6 @@ describe("/rows", () => {
expect(row.body.age).toBeUndefined()
expect(row.body.jobTitle).toBeUndefined()
})
it("should setup the trimViewRowInfo middleware", async () => {
const route = router.stack.find(
r =>
r.methods.includes("PATCH") &&
r.path === "/api/v2/views/:viewId/rows/:rowId"
)
expect(route).toBeDefined()
expect(route?.stack).toContainEqual(trimViewRowInfoMiddleware)
})
})
describe("destroy", () => {
@ -967,10 +896,7 @@ describe("/rows", () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const body: DeleteRow = {
_id: createdRow._id!,
}
await config.api.viewV2.row.delete(view.id, body)
await config.api.row.delete(view.id, [createdRow])
await assertRowUsage(rowUsage - 1)
await assertQueryUsage(queryUsage + 1)
@ -999,9 +925,7 @@ describe("/rows", () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
await config.api.viewV2.row.delete(view.id, {
rows: [rows[0], rows[2]],
})
await config.api.row.delete(view.id, [rows[0], rows[2]])
await assertRowUsage(rowUsage - 2)
await assertQueryUsage(queryUsage + 1)

View File

@ -34,7 +34,7 @@ router
"/api/views/:viewName",
paramResource("viewName"),
authorized(
permissions.PermissionType.VIEW,
permissions.PermissionType.TABLE,
permissions.PermissionLevel.READ
),
rowController.fetchView

View File

@ -1,11 +1,18 @@
const setup = require("./utilities")
const { FilterConditions } = require("../steps/filter")
import * as setup from "./utilities"
import { FilterConditions } from "../steps/filter"
describe("test the filter logic", () => {
async function checkFilter(field, condition, value, pass = true) {
let res = await setup.runStep(setup.actions.FILTER.stepId,
{ field, condition, value }
)
async function checkFilter(
field: any,
condition: string,
value: any,
pass = true
) {
let res = await setup.runStep(setup.actions.FILTER.stepId, {
field,
condition,
value,
})
expect(res.result).toEqual(pass)
expect(res.success).toEqual(true)
}
@ -36,9 +43,9 @@ describe("test the filter logic", () => {
it("check date coercion", async () => {
await checkFilter(
(new Date()).toISOString(),
new Date().toISOString(),
FilterConditions.GREATER_THAN,
(new Date(-10000)).toISOString(),
new Date(-10000).toISOString(),
true
)
})

View File

@ -6,11 +6,11 @@ import { isDevAppID } from "../db/utils"
// need this to call directly, so we can get a response
import { automationQueue } from "./bullboard"
import { checkTestFlag } from "../utilities/redis"
import * as utils from "./utils"
import env from "../environment"
import { context, db as dbCore } from "@budibase/backend-core"
import { Automation, Row, AutomationData, AutomationJob } from "@budibase/types"
import { executeSynchronously } from "../threads/automation"
import sdk from "../sdk"
export const TRIGGER_DEFINITIONS = definitions
const JOB_OPTS = {
@ -142,7 +142,7 @@ export async function rebootTrigger() {
let automations = await getAllAutomations()
let rebootEvents = []
for (let automation of automations) {
if (utils.isRebootTrigger(automation)) {
if (sdk.automations.isReboot(automation)) {
const job = {
automation,
event: {

View File

@ -16,13 +16,14 @@ import {
} from "@budibase/types"
import sdk from "../sdk"
const REBOOT_CRON = "@reboot"
const WH_STEP_ID = definitions.WEBHOOK.stepId
const CRON_STEP_ID = definitions.CRON.stepId
const Runner = new Thread(ThreadType.AUTOMATION)
function loggingArgs(job: AutomationJob) {
return [
function loggingArgs(
job: AutomationJob,
timing?: { start: number; complete?: boolean }
) {
const logs: any[] = [
{
_logKey: "automation",
trigger: job.data.automation.definition.trigger.event,
@ -32,24 +33,53 @@ function loggingArgs(job: AutomationJob) {
jobId: job.id,
},
]
if (timing?.start) {
logs.push({
_logKey: "startTime",
start: timing.start,
})
}
if (timing?.start && timing?.complete) {
const end = new Date().getTime()
const duration = end - timing.start
logs.push({
_logKey: "endTime",
end,
})
logs.push({
_logKey: "duration",
duration,
})
}
return logs
}
export async function processEvent(job: AutomationJob) {
const appId = job.data.event.appId!
const automationId = job.data.automation._id!
const start = new Date().getTime()
const task = async () => {
try {
// need to actually await these so that an error can be captured properly
console.log("automation running", ...loggingArgs(job))
console.log("automation running", ...loggingArgs(job, { start }))
const runFn = () => Runner.run(job)
const result = await quotas.addAutomation(runFn, {
automationId,
})
console.log("automation completed", ...loggingArgs(job))
const end = new Date().getTime()
const duration = end - start
console.log(
"automation completed",
...loggingArgs(job, { start, complete: true })
)
return result
} catch (err) {
console.error(`automation was unable to run`, err, ...loggingArgs(job))
console.error(
`automation was unable to run`,
err,
...loggingArgs(job, { start, complete: true })
)
return { err }
}
}
@ -128,19 +158,6 @@ export async function clearMetadata() {
await db.bulkDocs(automationMetadata)
}
export function isCronTrigger(auto: Automation) {
return (
auto &&
auto.definition.trigger &&
auto.definition.trigger.stepId === CRON_STEP_ID
)
}
export function isRebootTrigger(auto: Automation) {
const trigger = auto ? auto.definition.trigger : null
return isCronTrigger(auto) && trigger?.inputs.cron === REBOOT_CRON
}
/**
* This function handles checking of any cron jobs that need to be enabled/updated.
* @param {string} appId The ID of the app in which we are checking for webhooks
@ -148,13 +165,13 @@ export function isRebootTrigger(auto: Automation) {
*/
export async function enableCronTrigger(appId: any, automation: Automation) {
const trigger = automation ? automation.definition.trigger : null
const validCron = sdk.automations.isCron(automation) && trigger?.inputs.cron
const needsCreated =
!sdk.automations.isReboot(automation) &&
!sdk.automations.disabled(automation)
// need to create cron job
if (
isCronTrigger(automation) &&
!isRebootTrigger(automation) &&
trigger?.inputs.cron
) {
if (validCron && needsCreated) {
// make a job id rather than letting Bull decide, makes it easier to handle on way out
const jobId = `${appId}_cron_${newid()}`
const job: any = await automationQueue.add(

View File

@ -1,5 +1,7 @@
import newid from "./newid"
import { db as dbCore } from "@budibase/backend-core"
import { DocumentType, VirtualDocumentType } from "@budibase/types"
export { DocumentType, VirtualDocumentType } from "@budibase/types"
type Optional = string | null
@ -19,7 +21,6 @@ export const BudibaseInternalDB = {
export const SEPARATOR = dbCore.SEPARATOR
export const StaticDatabases = dbCore.StaticDatabases
export const DocumentType = dbCore.DocumentType
export const APP_PREFIX = dbCore.APP_PREFIX
export const APP_DEV_PREFIX = dbCore.APP_DEV_PREFIX
export const isDevAppID = dbCore.isDevAppID
@ -284,10 +285,22 @@ export function getMultiIDParams(ids: string[]) {
* @returns {string} The new view ID which the view doc can be stored under.
*/
export function generateViewID(tableId: string) {
return `${tableId}${SEPARATOR}${newid()}`
return `${
VirtualDocumentType.VIEW
}${SEPARATOR}${tableId}${SEPARATOR}${newid()}`
}
export function isViewID(viewId: string) {
return viewId?.split(SEPARATOR)[0] === VirtualDocumentType.VIEW
}
export function extractViewInfoFromID(viewId: string) {
if (!isViewID(viewId)) {
throw new Error("Unable to extract table ID, is not a view ID")
}
const split = viewId.split(SEPARATOR)
split.shift()
viewId = split.join(SEPARATOR)
const regex = new RegExp(`^(?<tableId>.+)${SEPARATOR}([^${SEPARATOR}]+)$`)
const res = regex.exec(viewId)
return {

View File

@ -93,6 +93,21 @@ const SCHEMA: Integration = {
},
}
const defaultTypeCasting = function (field: any, next: any) {
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP" ||
field.type === "LONGLONG"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
}
export function bindingTypeCoerce(bindings: any[]) {
for (let i = 0; i < bindings.length; i++) {
const binding = bindings[i]
@ -147,21 +162,8 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
delete config.rejectUnauthorized
this.config = {
...config,
typeCast: defaultTypeCasting,
multipleStatements: true,
typeCast: function (field: any, next: any) {
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP" ||
field.type === "LONGLONG"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
},
}
}
@ -194,6 +196,37 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
return `concat(${parts.join(", ")})`
}
defineTypeCastingFromSchema(schema: {
[key: string]: { name: string; type: string }
}): void {
if (!schema) {
return
}
this.config.typeCast = function (field: any, next: any) {
if (schema[field.name]?.name === field.name) {
if (["LONGLONG", "NEWDECIMAL", "DECIMAL"].includes(field.type)) {
if (schema[field.name]?.type === "number") {
const value = field.string()
return value ? Number(value) : null
} else {
return field.string()
}
}
}
if (
field.type == "DATETIME" ||
field.type === "DATE" ||
field.type === "TIMESTAMP"
) {
return field.string()
}
if (field.type === "BIT" && field.length === 1) {
return field.buffer()?.[0]
}
return next()
}
}
async connect() {
this.client = await mysql.createConnection(this.config)
}
@ -204,7 +237,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async internalQuery(
query: SqlQuery,
opts: { connect?: boolean; disableCoercion?: boolean } = {
opts: {
connect?: boolean
disableCoercion?: boolean
} = {
connect: true,
disableCoercion: false,
}

View File

@ -1,9 +0,0 @@
import { Ctx, Row } from "@budibase/types"
export default async (ctx: Ctx<Row>, next: any) => {
if (ctx.request.body._viewId) {
return ctx.throw(400, "Table row endpoints cannot contain view info")
}
return next()
}

View File

@ -1,83 +0,0 @@
import { generator } from "@budibase/backend-core/tests"
import { BBRequest, FieldType, Row, Table } from "@budibase/types"
import { Next } from "koa"
import * as utils from "../../db/utils"
import noViewDataMiddleware from "../noViewData"
class TestConfiguration {
next: Next
throw: jest.Mock<(status: number, message: string) => never>
middleware: typeof noViewDataMiddleware
params: Record<string, any>
request?: Pick<BBRequest<Row>, "body">
constructor() {
this.next = jest.fn()
this.throw = jest.fn()
this.params = {}
this.middleware = noViewDataMiddleware
}
executeMiddleware(ctxRequestBody: Row) {
this.request = {
body: ctxRequestBody,
}
return this.middleware(
{
request: this.request as any,
throw: this.throw as any,
params: this.params,
} as any,
this.next
)
}
afterEach() {
jest.clearAllMocks()
}
}
describe("noViewData middleware", () => {
let config: TestConfiguration
beforeEach(() => {
config = new TestConfiguration()
})
afterEach(() => {
config.afterEach()
})
const getRandomData = () => ({
_id: generator.guid(),
name: generator.name(),
age: generator.age(),
address: generator.address(),
})
it("it should pass without view id data", async () => {
const data = getRandomData()
await config.executeMiddleware({
...data,
})
expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled()
})
it("it should throw an error if _viewid is provided", async () => {
const data = getRandomData()
await config.executeMiddleware({
_viewId: generator.guid(),
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(
400,
"Table row endpoints cannot contain view info"
)
expect(config.next).not.toBeCalled()
})
})

View File

@ -117,7 +117,7 @@ describe("trimViewRowInfo middleware", () => {
})
expect(config.request?.body).toEqual(data)
expect(config.params.tableId).toEqual(table._id)
expect(config.params.sourceId).toEqual(table._id)
expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled()
@ -143,32 +143,9 @@ describe("trimViewRowInfo middleware", () => {
name: data.name,
address: data.address,
})
expect(config.params.tableId).toEqual(table._id)
expect(config.params.sourceId).toEqual(table._id)
expect(config.next).toBeCalledTimes(1)
expect(config.throw).not.toBeCalled()
})
it("it should throw an error if no viewid is provided on the body", async () => {
const data = getRandomData()
await config.executeMiddleware(viewId, {
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(400, "_viewId is required")
expect(config.next).not.toBeCalled()
})
it("it should throw an error if no viewid is provided on the parameters", async () => {
const data = getRandomData()
await config.executeMiddleware(undefined as any, {
_viewId: viewId,
...data,
})
expect(config.throw).toBeCalledTimes(1)
expect(config.throw).toBeCalledWith(400, "viewId path is required")
expect(config.next).not.toBeCalled()
})
})

View File

@ -3,26 +3,35 @@ import * as utils from "../db/utils"
import sdk from "../sdk"
import { db } from "@budibase/backend-core"
import { Next } from "koa"
import { getTableId } from "../api/controllers/row/utils"
export default async (ctx: Ctx<Row>, next: Next) => {
const { body } = ctx.request
const { _viewId: viewId } = body
let { _viewId: viewId } = body
const possibleViewId = getTableId(ctx)
if (utils.isViewID(possibleViewId)) {
viewId = possibleViewId
}
// nothing to do, it is not a view (just a table ID)
if (!viewId) {
return ctx.throw(400, "_viewId is required")
return next()
}
if (!ctx.params.viewId) {
return ctx.throw(400, "viewId path is required")
const { tableId } = utils.extractViewInfoFromID(viewId)
// don't need to trim delete requests
if (ctx?.method?.toLowerCase() !== "delete") {
const { _viewId, ...trimmedView } = await trimViewFields(
viewId,
tableId,
body
)
ctx.request.body = trimmedView
}
const { tableId } = utils.extractViewInfoFromID(ctx.params.viewId)
const { _viewId, ...trimmedView } = await trimViewFields(
viewId,
tableId,
body
)
ctx.request.body = trimmedView
ctx.params.tableId = tableId
ctx.params.sourceId = tableId
return next()
}

View File

@ -0,0 +1,38 @@
import { context } from "@budibase/backend-core"
import { Automation, AutomationState, DocumentType } from "@budibase/types"
import { definitions } from "../../../automations/triggerInfo"
const REBOOT_CRON = "@reboot"
export async function exists(automationId: string) {
if (!automationId?.startsWith(DocumentType.AUTOMATION)) {
throw new Error("Invalid automation ID.")
}
const db = context.getAppDB()
return db.docExists(automationId)
}
export async function get(automationId: string) {
const db = context.getAppDB()
return (await db.get(automationId)) as Automation
}
export function disabled(automation: Automation) {
return automation.state === AutomationState.DISABLED || !hasSteps(automation)
}
export function isCron(automation: Automation) {
return (
automation?.definition.trigger &&
automation?.definition.trigger.stepId === definitions.CRON.stepId
)
}
export function isReboot(automation: Automation) {
const trigger = automation?.definition.trigger
return isCron(automation) && trigger?.inputs.cron === REBOOT_CRON
}
export function hasSteps(automation: Automation) {
return automation?.definition?.steps?.length > 0
}

View File

@ -1,7 +1,9 @@
import * as webhook from "./webhook"
import * as utils from "./utils"
import * as automations from "./automations"
export default {
webhook,
utils,
...automations,
}

View File

@ -1,17 +1,14 @@
import { HTTPError, context } from "@budibase/backend-core"
import { context, HTTPError } from "@budibase/backend-core"
import { FieldSchema, TableSchema, View, ViewV2 } from "@budibase/types"
import sdk from "../../../sdk"
import * as utils from "../../../db/utils"
import merge from "lodash/merge"
export async function get(viewId: string): Promise<ViewV2 | undefined> {
const { tableId } = utils.extractViewInfoFromID(viewId)
const table = await sdk.tables.getTable(tableId)
const views = Object.values(table.views!)
const view = views.find(v => isV2(v) && v.id === viewId) as ViewV2 | undefined
return view
return views.find(v => isV2(v) && v.id === viewId) as ViewV2 | undefined
}
export async function create(

View File

@ -1,4 +1,4 @@
import { PatchRowRequest } from "@budibase/types"
import { PatchRowRequest, SaveRowRequest, Row } from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
@ -8,12 +8,12 @@ export class RowAPI extends TestAPI {
}
get = async (
tableId: string,
sourceId: string,
rowId: string,
{ expectStatus } = { expectStatus: 200 }
) => {
const request = this.request
.get(`/api/${tableId}/rows/${rowId}`)
.get(`/api/${sourceId}/rows/${rowId}`)
.set(this.config.defaultHeaders())
.expect(expectStatus)
if (expectStatus !== 404) {
@ -22,16 +22,43 @@ export class RowAPI extends TestAPI {
return request
}
save = async (
sourceId: string,
row: SaveRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<Row> => {
const resp = await this.request
.post(`/api/${sourceId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return resp.body as Row
}
patch = async (
tableId: string,
sourceId: string,
row: PatchRowRequest,
{ expectStatus } = { expectStatus: 200 }
) => {
return this.request
.patch(`/api/${tableId}/rows`)
.patch(`/api/${sourceId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
}
delete = async (
sourceId: string,
rows: Row[],
{ expectStatus } = { expectStatus: 200 }
) => {
return this.request
.delete(`/api/${sourceId}/rows`)
.send({ rows })
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
}
}

View File

@ -1,10 +1,6 @@
import {
CreateViewRequest,
UpdateViewRequest,
DeleteRowRequest,
PatchRowRequest,
PatchRowResponse,
Row,
ViewV2,
SearchViewRowRequest,
} from "@budibase/types"
@ -90,46 +86,4 @@ export class ViewV2API extends TestAPI {
.expect("Content-Type", /json/)
.expect(expectStatus)
}
row = {
create: async (
viewId: string,
row: Row,
{ expectStatus } = { expectStatus: 200 }
): Promise<Row> => {
const result = await this.request
.post(`/api/v2/views/${viewId}/rows`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return result.body as Row
},
update: async (
viewId: string,
rowId: string,
row: PatchRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<PatchRowResponse> => {
const result = await this.request
.patch(`/api/v2/views/${viewId}/rows/${rowId}`)
.send(row)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expectStatus)
return result.body as PatchRowResponse
},
delete: async (
viewId: string,
body: DeleteRowRequest,
{ expectStatus } = { expectStatus: 200 }
): Promise<any> => {
const result = await this.request
.delete(`/api/v2/views/${viewId}/rows`)
.send(body)
.set(this.config.defaultHeaders())
.expect(expectStatus)
return result.body
},
}
}

View File

@ -2,9 +2,9 @@ import { default as threadUtils } from "./utils"
import { Job } from "bull"
threadUtils.threadSetup()
import {
isRecurring,
disableCronById,
isErrorInOutput,
isRecurring,
} from "../automations/utils"
import * as actions from "../automations/actions"
import * as automationUtils from "../automations/automationUtils"
@ -15,17 +15,17 @@ import { AutomationErrors, MAX_AUTOMATION_RECURRING_ERRORS } from "../constants"
import { storeLog } from "../automations/logging"
import {
Automation,
AutomationStep,
AutomationStatus,
AutomationMetadata,
AutomationJob,
AutomationData,
AutomationJob,
AutomationMetadata,
AutomationStatus,
AutomationStep,
} from "@budibase/types"
import {
LoopStep,
LoopInput,
TriggerOutput,
AutomationContext,
LoopInput,
LoopStep,
TriggerOutput,
} from "../definitions/automations"
import { WorkerCallback } from "./definitions"
import { context, logging } from "@budibase/backend-core"
@ -34,6 +34,8 @@ import { cloneDeep } from "lodash/fp"
import { performance } from "perf_hooks"
import * as sdkUtils from "../sdk/utils"
import env from "../environment"
import sdk from "../sdk"
const FILTER_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.FILTER.stepId
const LOOP_STEP_ID = actions.BUILTIN_ACTION_DEFINITIONS.LOOP.stepId
const CRON_STEP_ID = triggerDefs.CRON.stepId
@ -514,7 +516,8 @@ class Orchestrator {
export function execute(job: Job<AutomationData>, callback: WorkerCallback) {
const appId = job.data.event.appId
const automationId = job.data.automation._id
const automation = job.data.automation
const automationId = automation._id
if (!appId) {
throw new Error("Unable to execute, event doesn't contain app ID.")
}
@ -525,10 +528,30 @@ export function execute(job: Job<AutomationData>, callback: WorkerCallback) {
appId,
automationId,
task: async () => {
let automation = job.data.automation,
isCron = sdk.automations.isCron(job.data.automation),
notFound = false
try {
automation = await sdk.automations.get(automationId)
} catch (err: any) {
// automation no longer exists
notFound = err
}
const disabled = sdk.automations.disabled(automation)
const stopAutomation = disabled || notFound
const envVars = await sdkUtils.getEnvironmentVariables()
// put into automation thread for whole context
await context.doInEnvironmentContext(envVars, async () => {
const automationOrchestrator = new Orchestrator(job)
// hard stop on automations
if (isCron && stopAutomation) {
await automationOrchestrator.stopCron(
disabled ? "disabled" : "not_found"
)
}
if (stopAutomation) {
return
}
try {
const response = await automationOrchestrator.execute()
callback(null, response)
@ -557,11 +580,10 @@ export function executeSynchronously(job: Job) {
// put into automation thread for whole context
return context.doInEnvironmentContext(envVars, async () => {
const automationOrchestrator = new Orchestrator(job)
const response = await Promise.race([
return await Promise.race([
automationOrchestrator.execute(),
timeoutPromise,
])
return response
})
})
}

View File

@ -11,6 +11,12 @@ export interface QueryEvent {
queryId: string
environmentVariables?: Record<string, string>
ctx?: any
schema?: {
[key: string]: {
name: string
type: string
}
}
}
export interface QueryVariable {

View File

@ -8,6 +8,7 @@ import { context, cache, auth } from "@budibase/backend-core"
import { getGlobalIDFromUserMetadataID } from "../db/utils"
import sdk from "../sdk"
import { cloneDeep } from "lodash/fp"
import { SourceName } from "@budibase/types"
import { isSQL } from "../integrations/utils"
import { interpolateSQL } from "../integrations/queries/sql"
@ -28,6 +29,7 @@ class QueryRunner {
hasRerun: boolean
hasRefreshedOAuth: boolean
hasDynamicVariables: boolean
schema: any
constructor(input: QueryEvent, flags = { noRecursiveQuery: false }) {
this.datasource = input.datasource
@ -37,6 +39,7 @@ class QueryRunner {
this.pagination = input.pagination
this.transformer = input.transformer
this.queryId = input.queryId
this.schema = input.schema
this.noRecursiveQuery = flags.noRecursiveQuery
this.cachedVariables = []
// Additional context items for enrichment
@ -51,7 +54,7 @@ class QueryRunner {
}
async execute(): Promise<any> {
let { datasource, fields, queryVerb, transformer } = this
let { datasource, fields, queryVerb, transformer, schema } = this
let datasourceClone = cloneDeep(datasource)
let fieldsClone = cloneDeep(fields)
@ -70,6 +73,9 @@ class QueryRunner {
const integration = new Integration(datasourceClone.config)
// define the type casting from the schema
integration.defineTypeCastingFromSchema?.(schema)
// pre-query, make sure datasource variables are added to parameters
const parameters = await this.addDatasourceVariables()

View File

@ -1,5 +1,5 @@
import { permissions, roles } from "@budibase/backend-core"
import { DocumentType } from "../db/utils"
import { DocumentType, VirtualDocumentType } from "../db/utils"
export const CURRENTLY_SUPPORTED_LEVELS: string[] = [
permissions.PermissionLevel.WRITE,
@ -11,9 +11,10 @@ export function getPermissionType(resourceId: string) {
const docType = Object.values(DocumentType).filter(docType =>
resourceId.startsWith(docType)
)[0]
switch (docType) {
switch (docType as DocumentType | VirtualDocumentType) {
case DocumentType.TABLE:
case DocumentType.ROW:
case VirtualDocumentType.VIEW:
return permissions.PermissionType.TABLE
case DocumentType.AUTOMATION:
return permissions.PermissionType.AUTOMATION
@ -22,9 +23,6 @@ export function getPermissionType(resourceId: string) {
case DocumentType.QUERY:
case DocumentType.DATASOURCE:
return permissions.PermissionType.QUERY
default:
// views don't have an ID, will end up here
return permissions.PermissionType.VIEW
}
}

View File

@ -0,0 +1,4 @@
*
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json

View File

@ -2,19 +2,13 @@
"name": "@budibase/shared-core",
"version": "0.0.0",
"description": "Shared data utils",
"main": "src/index.ts",
"types": "src/index.ts",
"exports": {
".": {
"import": "./dist/index.js",
"require": "./src/index.ts"
}
},
"main": "dist/index.js",
"types": "dist/index.d.ts",
"author": "Budibase",
"license": "GPL-3.0",
"scripts": {
"prebuild": "rimraf dist/",
"build": "tsc -p tsconfig.build.json",
"build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null"
@ -26,5 +20,19 @@
"concurrently": "^7.6.0",
"rimraf": "3.0.2",
"typescript": "4.7.3"
},
"nx": {
"targets": {
"build": {
"dependsOn": [
{
"projects": [
"@budibase/types"
],
"target": "build"
}
]
}
}
}
}

View File

@ -12,7 +12,10 @@
"declaration": true,
"types": ["node"],
"outDir": "dist",
"skipLibCheck": true
"skipLibCheck": true,
"paths": {
"@budibase/types": ["../types/src"]
}
},
"include": ["**/*.js", "**/*.ts"],
"exclude": [

View File

@ -1,13 +1,4 @@
{
"extends": "./tsconfig.build.json",
"compilerOptions": {
"baseUrl": ".",
"rootDir": "./src",
"composite": true,
"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo",
"paths": {
"@budibase/types": ["../../types/src"]
}
},
"exclude": ["node_modules", "dist"]
}

View File

@ -0,0 +1,4 @@
*
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json

View File

@ -2,19 +2,13 @@
"name": "@budibase/types",
"version": "0.0.0",
"description": "Budibase types",
"main": "src/index.ts",
"types": "src/index.ts",
"exports": {
".": {
"import": "./dist/index.js",
"require": "./src/index.ts"
}
},
"main": "dist/index.js",
"types": "dist/index.d.ts",
"author": "Budibase",
"license": "GPL-3.0",
"scripts": {
"prebuild": "rimraf dist/",
"build": "tsc -p tsconfig.build.json",
"build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"dev:builder": "yarn prebuild && tsc -p tsconfig.json --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null"

View File

@ -1,6 +1,8 @@
import { SearchParams } from "../../../sdk"
import { Row } from "../../../documents"
export interface SaveRowRequest extends Row {}
export interface PatchRowRequest extends Row {
_id: string
_rev: string

View File

@ -100,6 +100,10 @@ export const AutomationStepIdArray = [
...Object.values(AutomationTriggerStepId),
]
export enum AutomationState {
DISABLED = "disabled",
}
export interface Automation extends Document {
definition: {
steps: AutomationStep[]
@ -112,6 +116,7 @@ export interface Automation extends Document {
name: string
internal?: boolean
type?: string
state?: AutomationState
}
interface BaseIOStructure {

View File

@ -39,6 +39,12 @@ export enum DocumentType {
AUDIT_LOG = "al",
}
// these documents don't really exist, they are part of other
// documents or enriched into existence as part of get requests
export enum VirtualDocumentType {
VIEW = "view",
}
export interface Document {
_id?: string
_rev?: string

View File

@ -166,6 +166,12 @@ export interface IntegrationBase {
delete?(query: any): Promise<any[] | any>
testConnection?(): Promise<ConnectionInfo>
getExternalSchema?(): Promise<string>
defineTypeCastingFromSchema?(schema: {
[key: string]: {
name: string
type: string
}
}): void
}
export interface DatasourcePlus extends IntegrationBase {

View File

@ -40,6 +40,11 @@ export type DatabasePutOpts = {
force?: boolean
}
export type DocExistsResponse = {
_rev?: string
exists: boolean
}
export type DatabaseCreateIndexOpts = {
index: {
fields: string[]
@ -90,6 +95,7 @@ export interface Database {
exists(): Promise<boolean>
checkSetup(): Promise<Nano.DocumentScope<any>>
get<T>(id?: string): Promise<T>
docExists(id: string): Promise<DocExistsResponse>
remove(
id: string | Document,
rev?: string

View File

@ -14,6 +14,5 @@ export enum PermissionType {
WEBHOOK = "webhook",
BUILDER = "builder",
GLOBAL_BUILDER = "globalBuilder",
VIEW = "view",
QUERY = "query",
}

View File

@ -1,12 +1,14 @@
import { auth } from "@budibase/backend-core"
import Joi from "joi"
const OPTIONAL_STRING = Joi.string().allow(null, "")
let schema: any = {
email: Joi.string().allow(null, ""),
password: Joi.string().allow(null, ""),
email: OPTIONAL_STRING,
password: OPTIONAL_STRING,
forceResetPassword: Joi.boolean().optional(),
firstName: Joi.string().allow(null, ""),
lastName: Joi.string().allow(null, ""),
firstName: OPTIONAL_STRING,
lastName: OPTIONAL_STRING,
builder: Joi.object({
global: Joi.boolean().optional(),
apps: Joi.array().optional(),
@ -21,8 +23,8 @@ export const buildSelfSaveValidation = () => {
schema = {
password: Joi.string().optional(),
forceResetPassword: Joi.boolean().optional(),
firstName: Joi.string().allow("").optional(),
lastName: Joi.string().allow("").optional(),
firstName: OPTIONAL_STRING,
lastName: OPTIONAL_STRING,
onboardedAt: Joi.string().optional(),
}
return auth.joiValidator.body(Joi.object(schema).required().unknown(false))