Merge branch 'master' into automation-branching-ux-updates

This commit is contained in:
Andrew Kingston 2024-12-06 16:11:57 +00:00 committed by GitHub
commit 3efac145a4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
178 changed files with 2743 additions and 1425 deletions

View File

@ -200,6 +200,20 @@ jobs:
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- name: Set up PostgreSQL 16
if: matrix.datasource == 'postgres'
run: |
sudo systemctl stop postgresql
sudo apt-get remove --purge -y postgresql* libpq-dev
sudo rm -rf /etc/postgresql /var/lib/postgresql
sudo apt-get autoremove -y
sudo apt-get autoclean
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt-get update
sudo apt-get install -y postgresql-16
- name: Test server - name: Test server
env: env:
DATASOURCE: ${{ matrix.datasource }} DATASOURCE: ${{ matrix.datasource }}

View File

@ -22,6 +22,6 @@
"@types/react": "17.0.39", "@types/react": "17.0.39",
"eslint": "8.10.0", "eslint": "8.10.0",
"eslint-config-next": "12.1.0", "eslint-config-next": "12.1.0",
"typescript": "5.5.2" "typescript": "5.7.2"
} }
} }

View File

@ -47,6 +47,8 @@ async function killContainers(containers: ContainerInfo[]) {
} }
export default async function setup() { export default async function setup() {
process.env.TESTCONTAINERS_RYUK_DISABLED = "true"
// For whatever reason, testcontainers doesn't always use the correct current // For whatever reason, testcontainers doesn't always use the correct current
// docker context. This bit of code forces the issue by finding the current // docker context. This bit of code forces the issue by finding the current
// context and setting it as the DOCKER_HOST environment // context and setting it as the DOCKER_HOST environment
@ -75,6 +77,7 @@ export default async function setup() {
try { try {
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3-sqs-v2.1.1") const couchdb = new GenericContainer("budibase/couchdb:v3.3.3-sqs-v2.1.1")
.withName("couchdb_testcontainer")
.withExposedPorts(5984, 4984) .withExposedPorts(5984, 4984)
.withEnvironment({ .withEnvironment({
COUCHDB_PASSWORD: "budibase", COUCHDB_PASSWORD: "budibase",
@ -99,6 +102,7 @@ export default async function setup() {
) )
const minio = new GenericContainer("minio/minio") const minio = new GenericContainer("minio/minio")
.withName("minio_testcontainer")
.withExposedPorts(9000) .withExposedPorts(9000)
.withCommand(["server", "/data"]) .withCommand(["server", "/data"])
.withTmpFs({ "/data": "rw" }) .withTmpFs({ "/data": "rw" })

View File

@ -46,6 +46,11 @@ server {
} }
location ~ ^/api/(system|admin|global)/ { location ~ ^/api/(system|admin|global)/ {
# Enable buffering for potentially large OIDC configs
proxy_buffering on;
proxy_buffer_size 16k;
proxy_buffers 4 32k;
proxy_pass http://127.0.0.1:4002; proxy_pass http://127.0.0.1:4002;
} }

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.16", "version": "3.2.25",
"npmClient": "yarn", "npmClient": "yarn",
"concurrency": 20, "concurrency": 20,
"command": { "command": {

View File

@ -28,7 +28,7 @@
"proper-lockfile": "^4.1.2", "proper-lockfile": "^4.1.2",
"svelte": "4.2.19", "svelte": "4.2.19",
"svelte-eslint-parser": "^0.33.1", "svelte-eslint-parser": "^0.33.1",
"typescript": "5.5.2", "typescript": "5.7.2",
"typescript-eslint": "^7.3.1", "typescript-eslint": "^7.3.1",
"yargs": "^17.7.2" "yargs": "^17.7.2"
}, },

View File

@ -83,6 +83,7 @@
"@types/semver": "7.3.7", "@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1", "@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4", "@types/uuid": "8.3.4",
"@types/koa": "2.13.4",
"chance": "1.1.8", "chance": "1.1.8",
"ioredis-mock": "8.9.0", "ioredis-mock": "8.9.0",
"jest": "29.7.0", "jest": "29.7.0",
@ -90,9 +91,9 @@
"nock": "^13.5.6", "nock": "^13.5.6",
"pino-pretty": "10.0.0", "pino-pretty": "10.0.0",
"pouchdb-adapter-memory": "7.2.2", "pouchdb-adapter-memory": "7.2.2",
"testcontainers": "^10.7.2", "testcontainers": "10.16.0",
"timekeeper": "2.2.0", "timekeeper": "2.2.0",
"typescript": "5.5.2" "typescript": "5.7.2"
}, },
"nx": { "nx": {
"targets": { "targets": {

View File

@ -121,7 +121,7 @@ const identifyInstallationGroup = async (
const identifyTenantGroup = async ( const identifyTenantGroup = async (
tenantId: string, tenantId: string,
account: Account | undefined, hosting: Hosting,
timestamp?: string | number timestamp?: string | number
): Promise<void> => { ): Promise<void> => {
const id = await getEventTenantId(tenantId) const id = await getEventTenantId(tenantId)
@ -129,26 +129,12 @@ const identifyTenantGroup = async (
const installationId = await getInstallationId() const installationId = await getInstallationId()
const environment = getDeploymentEnvironment() const environment = getDeploymentEnvironment()
let hosting: Hosting
let profession: string | undefined
let companySize: string | undefined
if (account) {
profession = account.profession
companySize = account.size
hosting = account.hosting
} else {
hosting = getHostingFromEnv()
}
const group: TenantGroup = { const group: TenantGroup = {
id, id,
type, type,
hosting, hosting,
environment, environment,
installationId, installationId,
profession,
companySize,
} }
await identifyGroup(group, timestamp) await identifyGroup(group, timestamp)

View File

@ -266,12 +266,14 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
// new flag, add it here and use the `fetch` and `get` functions to access it. // new flag, add it here and use the `fetch` and `get` functions to access it.
// All of the machinery in this file is to make sure that flags have their // All of the machinery in this file is to make sure that flags have their
// default values set correctly and their types flow through the system. // default values set correctly and their types flow through the system.
export const flags = new FlagSet({ const flagsConfig: Record<FeatureFlag, Flag<any>> = {
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true), [FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true), [FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true), [FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true), [FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
}) [FeatureFlag.USE_ZOD_VALIDATOR]: Flag.boolean(env.isDev()),
}
export const flags = new FlagSet(flagsConfig)
type UnwrapPromise<T> = T extends Promise<infer U> ? U : T type UnwrapPromise<T> = T extends Promise<infer U> ? U : T
export type FeatureFlags = UnwrapPromise<ReturnType<typeof flags.fetch>> export type FeatureFlags = UnwrapPromise<ReturnType<typeof flags.fetch>>

View File

@ -1,6 +1,10 @@
import { BBContext } from "@budibase/types" import { Ctx } from "@budibase/types"
import type { Middleware, Next } from "koa"
export default async (ctx: BBContext | any, next: any) => { // this middleware exists purely to be overridden by middlewares supplied by the @budibase/pro library
const middleware = (async (ctx: Ctx, next: Next) => {
// Placeholder for audit log middleware // Placeholder for audit log middleware
return next() return next()
} }) as Middleware
export default middleware

View File

@ -22,6 +22,7 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { ErrorCode, InvalidAPIKeyError } from "../errors" import { ErrorCode, InvalidAPIKeyError } from "../errors"
import tracer from "dd-trace" import tracer from "dd-trace"
import type { Middleware, Next } from "koa"
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
? parseInt(env.SESSION_UPDATE_PERIOD) ? parseInt(env.SESSION_UPDATE_PERIOD)
@ -94,6 +95,14 @@ async function checkApiKey(
}) })
} }
function getHeader(ctx: Ctx, header: Header): string | undefined {
const contents = ctx.request.headers[header]
if (Array.isArray(contents)) {
throw new Error("Unexpected header format")
}
return contents
}
/** /**
* This middleware is tenancy aware, so that it does not depend on other middlewares being used. * This middleware is tenancy aware, so that it does not depend on other middlewares being used.
* The tenancy modules should not be used here and it should be assumed that the tenancy context * The tenancy modules should not be used here and it should be assumed that the tenancy context
@ -106,9 +115,9 @@ export default function (
} }
) { ) {
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : [] const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
return async (ctx: Ctx | any, next: any) => { return (async (ctx: Ctx, next: Next) => {
let publicEndpoint = false let publicEndpoint = false
const version = ctx.request.headers[Header.API_VER] const version = getHeader(ctx, Header.API_VER)
// the path is not authenticated // the path is not authenticated
const found = matches(ctx, noAuthOptions) const found = matches(ctx, noAuthOptions)
if (found) { if (found) {
@ -116,18 +125,18 @@ export default function (
} }
try { try {
// check the actual user is authenticated first, try header or cookie // check the actual user is authenticated first, try header or cookie
let headerToken = ctx.request.headers[Header.TOKEN] let headerToken = getHeader(ctx, Header.TOKEN)
const authCookie = const authCookie =
getCookie<SessionCookie>(ctx, Cookie.Auth) || getCookie<SessionCookie>(ctx, Cookie.Auth) ||
openJwt<SessionCookie>(headerToken) openJwt<SessionCookie>(headerToken)
let apiKey = ctx.request.headers[Header.API_KEY] let apiKey = getHeader(ctx, Header.API_KEY)
if (!apiKey && ctx.request.headers[Header.AUTHORIZATION]) { if (!apiKey && ctx.request.headers[Header.AUTHORIZATION]) {
apiKey = ctx.request.headers[Header.AUTHORIZATION].split(" ")[1] apiKey = ctx.request.headers[Header.AUTHORIZATION].split(" ")[1]
} }
const tenantId = ctx.request.headers[Header.TENANT_ID] const tenantId = getHeader(ctx, Header.TENANT_ID)
let authenticated: boolean = false, let authenticated: boolean = false,
user: User | { tenantId: string } | undefined = undefined, user: User | { tenantId: string } | undefined = undefined,
internal: boolean = false, internal: boolean = false,
@ -243,5 +252,5 @@ export default function (
ctx.throw(err.status || 403, err) ctx.throw(err.status || 403, err)
} }
} }
} }) as Middleware
} }

View File

@ -1,6 +1,7 @@
import { Header } from "../constants" import { Header } from "../constants"
import { buildMatcherRegex, matches } from "./matchers" import { buildMatcherRegex, matches } from "./matchers"
import { BBContext, EndpointMatcher } from "@budibase/types" import { Ctx, EndpointMatcher } from "@budibase/types"
import type { Middleware, Next } from "koa"
/** /**
* GET, HEAD and OPTIONS methods are considered safe operations * GET, HEAD and OPTIONS methods are considered safe operations
@ -36,7 +37,7 @@ export default function (
opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] } opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] }
) { ) {
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns) const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
return async (ctx: BBContext | any, next: any) => { return (async (ctx: Ctx, next: Next) => {
// don't apply for excluded paths // don't apply for excluded paths
const found = matches(ctx, noCsrfOptions) const found = matches(ctx, noCsrfOptions)
if (found) { if (found) {
@ -77,5 +78,5 @@ export default function (
} }
return next() return next()
} }) as Middleware
} }

View File

@ -1,11 +1,11 @@
import { Header } from "../constants" import { Header } from "../constants"
import { BBContext } from "@budibase/types" import { Ctx } from "@budibase/types"
import { isValidInternalAPIKey } from "../utils" import { isValidInternalAPIKey } from "../utils"
/** /**
* API Key only endpoint. * API Key only endpoint.
*/ */
export default async (ctx: BBContext, next: any) => { export default async (ctx: Ctx, next: any) => {
const apiKey = ctx.request.headers[Header.API_KEY] const apiKey = ctx.request.headers[Header.API_KEY]
if (!apiKey) { if (!apiKey) {
ctx.throw(403, "Unauthorized") ctx.throw(403, "Unauthorized")

View File

@ -1,4 +1,4 @@
import { BBContext, EndpointMatcher, RegexMatcher } from "@budibase/types" import { Ctx, EndpointMatcher, RegexMatcher } from "@budibase/types"
const PARAM_REGEX = /\/:(.*?)(\/.*)?$/g const PARAM_REGEX = /\/:(.*?)(\/.*)?$/g
@ -27,7 +27,7 @@ export const buildMatcherRegex = (
}) })
} }
export const matches = (ctx: BBContext, options: RegexMatcher[]) => { export const matches = (ctx: Ctx, options: RegexMatcher[]) => {
return options.find(({ regex, method }) => { return options.find(({ regex, method }) => {
const urlMatch = regex.test(ctx.request.url) const urlMatch = regex.test(ctx.request.url)
const methodMatch = const methodMatch =

View File

@ -2,7 +2,7 @@ import { UserStatus } from "../../constants"
import { compare } from "../../utils" import { compare } from "../../utils"
import * as users from "../../users" import * as users from "../../users"
import { authError } from "./utils" import { authError } from "./utils"
import { BBContext } from "@budibase/types" import { Ctx } from "@budibase/types"
const INVALID_ERR = "Invalid credentials" const INVALID_ERR = "Invalid credentials"
const EXPIRED = "This account has expired. Please reset your password" const EXPIRED = "This account has expired. Please reset your password"
@ -20,7 +20,7 @@ export const options = {
* @returns The authenticated user, or errors if they occur * @returns The authenticated user, or errors if they occur
*/ */
export async function authenticate( export async function authenticate(
ctx: BBContext, ctx: Ctx,
email: string, email: string,
password: string, password: string,
done: Function done: Function

View File

@ -3,11 +3,12 @@ import { getTenantIDFromCtx } from "../tenancy"
import { buildMatcherRegex, matches } from "./matchers" import { buildMatcherRegex, matches } from "./matchers"
import { Header } from "../constants" import { Header } from "../constants"
import { import {
BBContext, Ctx,
EndpointMatcher, EndpointMatcher,
GetTenantIdOptions, GetTenantIdOptions,
TenantResolutionStrategy, TenantResolutionStrategy,
} from "@budibase/types" } from "@budibase/types"
import type { Next, Middleware } from "koa"
export default function ( export default function (
allowQueryStringPatterns: EndpointMatcher[], allowQueryStringPatterns: EndpointMatcher[],
@ -17,7 +18,7 @@ export default function (
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns) const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns) const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
return async function (ctx: BBContext | any, next: any) { return async function (ctx: Ctx, next: Next) {
const allowNoTenant = const allowNoTenant =
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions) opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
const tenantOpts: GetTenantIdOptions = { const tenantOpts: GetTenantIdOptions = {
@ -32,5 +33,5 @@ export default function (
const tenantId = getTenantIDFromCtx(ctx, tenantOpts) const tenantId = getTenantIDFromCtx(ctx, tenantOpts)
ctx.set(Header.TENANT_ID, tenantId as string) ctx.set(Header.TENANT_ID, tenantId as string)
return doInTenant(tenantId, next) return doInTenant(tenantId, next)
} } as Middleware
} }

View File

@ -11,7 +11,7 @@ describe("redis", () => {
let container: StartedTestContainer let container: StartedTestContainer
beforeAll(async () => { beforeAll(async () => {
const container = await new GenericContainer("redis") container = await new GenericContainer("redis")
.withExposedPorts(6379) .withExposedPorts(6379)
.start() .start()

View File

@ -2,6 +2,8 @@ import {
PermissionLevel, PermissionLevel,
PermissionType, PermissionType,
BuiltinPermissionID, BuiltinPermissionID,
Permission,
BuiltinPermissions,
} from "@budibase/types" } from "@budibase/types"
import flatten from "lodash/flatten" import flatten from "lodash/flatten"
import cloneDeep from "lodash/fp/cloneDeep" import cloneDeep from "lodash/fp/cloneDeep"
@ -12,7 +14,7 @@ export type RoleHierarchy = {
permissionId: string permissionId: string
}[] }[]
export class Permission { export class PermissionImpl implements Permission {
type: PermissionType type: PermissionType
level: PermissionLevel level: PermissionLevel
@ -61,68 +63,62 @@ export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
} }
} }
export const BUILTIN_PERMISSIONS: { export const BUILTIN_PERMISSIONS: BuiltinPermissions = {
[key in keyof typeof BuiltinPermissionID]: {
_id: (typeof BuiltinPermissionID)[key]
name: string
permissions: Permission[]
}
} = {
PUBLIC: { PUBLIC: {
_id: BuiltinPermissionID.PUBLIC, _id: BuiltinPermissionID.PUBLIC,
name: "Public", name: "Public",
permissions: [ permissions: [
new Permission(PermissionType.WEBHOOK, PermissionLevel.EXECUTE), new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),
], ],
}, },
READ_ONLY: { READ_ONLY: {
_id: BuiltinPermissionID.READ_ONLY, _id: BuiltinPermissionID.READ_ONLY,
name: "Read only", name: "Read only",
permissions: [ permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.READ), new PermissionImpl(PermissionType.QUERY, PermissionLevel.READ),
new Permission(PermissionType.TABLE, PermissionLevel.READ), new PermissionImpl(PermissionType.TABLE, PermissionLevel.READ),
new Permission(PermissionType.APP, PermissionLevel.READ), new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
], ],
}, },
WRITE: { WRITE: {
_id: BuiltinPermissionID.WRITE, _id: BuiltinPermissionID.WRITE,
name: "Read/Write", name: "Read/Write",
permissions: [ permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.WRITE), new PermissionImpl(PermissionType.QUERY, PermissionLevel.WRITE),
new Permission(PermissionType.TABLE, PermissionLevel.WRITE), new PermissionImpl(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE), new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ), new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new Permission(PermissionType.APP, PermissionLevel.READ), new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
], ],
}, },
POWER: { POWER: {
_id: BuiltinPermissionID.POWER, _id: BuiltinPermissionID.POWER,
name: "Power", name: "Power",
permissions: [ permissions: [
new Permission(PermissionType.TABLE, PermissionLevel.WRITE), new PermissionImpl(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.USER, PermissionLevel.READ), new PermissionImpl(PermissionType.USER, PermissionLevel.READ),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE), new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ), new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.READ),
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ), new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new Permission(PermissionType.APP, PermissionLevel.READ), new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
], ],
}, },
ADMIN: { ADMIN: {
_id: BuiltinPermissionID.ADMIN, _id: BuiltinPermissionID.ADMIN,
name: "Admin", name: "Admin",
permissions: [ permissions: [
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN), new PermissionImpl(PermissionType.TABLE, PermissionLevel.ADMIN),
new Permission(PermissionType.USER, PermissionLevel.ADMIN), new PermissionImpl(PermissionType.USER, PermissionLevel.ADMIN),
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN), new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ), new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.READ),
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN), new PermissionImpl(PermissionType.QUERY, PermissionLevel.ADMIN),
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ), new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new Permission(PermissionType.APP, PermissionLevel.READ), new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
], ],
}, },
} }
export function getBuiltinPermissions() { export function getBuiltinPermissions(): BuiltinPermissions {
return cloneDeep(BUILTIN_PERMISSIONS) return cloneDeep(BUILTIN_PERMISSIONS)
} }

View File

@ -592,7 +592,10 @@ export class AccessController {
) )
} }
async checkScreensAccess(screens: Screen[], userRoleId: string) { async checkScreensAccess(
screens: Screen[],
userRoleId: string
): Promise<Screen[]> {
let accessibleScreens = [] let accessibleScreens = []
// don't want to handle this with Promise.all as this would mean all custom roles would be // don't want to handle this with Promise.all as this would mean all custom roles would be
// retrieved at same time, it is likely a custom role will be re-used and therefore want // retrieved at same time, it is likely a custom role will be re-used and therefore want

View File

@ -133,7 +133,7 @@ describe("getBuiltinPermissionByID", () => {
_id: BuiltinPermissionID.PUBLIC, _id: BuiltinPermissionID.PUBLIC,
name: "Public", name: "Public",
permissions: [ permissions: [
new permissions.Permission( new permissions.PermissionImpl(
permissions.PermissionType.WEBHOOK, permissions.PermissionType.WEBHOOK,
permissions.PermissionLevel.EXECUTE permissions.PermissionLevel.EXECUTE
), ),

View File

@ -6,7 +6,7 @@ import {
getPlatformURL, getPlatformURL,
} from "../context" } from "../context"
import { import {
BBContext, Ctx,
TenantResolutionStrategy, TenantResolutionStrategy,
GetTenantIdOptions, GetTenantIdOptions,
} from "@budibase/types" } from "@budibase/types"
@ -37,7 +37,7 @@ export const isUserInAppTenant = (appId: string, user?: any) => {
const ALL_STRATEGIES = Object.values(TenantResolutionStrategy) const ALL_STRATEGIES = Object.values(TenantResolutionStrategy)
export const getTenantIDFromCtx = ( export const getTenantIDFromCtx = (
ctx: BBContext, ctx: Ctx,
opts: GetTenantIdOptions opts: GetTenantIdOptions
): string | undefined => { ): string | undefined => {
// exit early if not multi-tenant // exit early if not multi-tenant

View File

@ -5,7 +5,7 @@ import * as db from "../../db"
import { Header } from "../../constants" import { Header } from "../../constants"
import { newid } from "../../utils" import { newid } from "../../utils"
import env from "../../environment" import env from "../../environment"
import { BBContext } from "@budibase/types" import { Ctx } from "@budibase/types"
describe("utils", () => { describe("utils", () => {
const config = new DBTestConfiguration() const config = new DBTestConfiguration()
@ -109,7 +109,7 @@ describe("utils", () => {
}) })
describe("isServingBuilder", () => { describe("isServingBuilder", () => {
let ctx: BBContext let ctx: Ctx
const expectResult = (result: boolean) => const expectResult = (result: boolean) =>
expect(utils.isServingBuilder(ctx)).toBe(result) expect(utils.isServingBuilder(ctx)).toBe(result)
@ -133,7 +133,7 @@ describe("utils", () => {
}) })
describe("isServingBuilderPreview", () => { describe("isServingBuilderPreview", () => {
let ctx: BBContext let ctx: Ctx
const expectResult = (result: boolean) => const expectResult = (result: boolean) =>
expect(utils.isServingBuilderPreview(ctx)).toBe(result) expect(utils.isServingBuilderPreview(ctx)).toBe(result)
@ -157,7 +157,7 @@ describe("utils", () => {
}) })
describe("isPublicAPIRequest", () => { describe("isPublicAPIRequest", () => {
let ctx: BBContext let ctx: Ctx
const expectResult = (result: boolean) => const expectResult = (result: boolean) =>
expect(utils.isPublicApiRequest(ctx)).toBe(result) expect(utils.isPublicApiRequest(ctx)).toBe(result)

View File

@ -1,8 +1,8 @@
import { createMockContext, createMockCookies } from "@shopify/jest-koa-mocks" import { createMockContext, createMockCookies } from "@shopify/jest-koa-mocks"
import { BBContext } from "@budibase/types" import { Ctx } from "@budibase/types"
export const newContext = (): BBContext => { export const newContext = (): Ctx => {
const ctx = createMockContext() as any const ctx = createMockContext() as Ctx
return { return {
...ctx, ...ctx,
path: "/", path: "/",

View File

@ -37,10 +37,6 @@ function getTestcontainers(): ContainerInfo[] {
) )
} }
function removeContainer(container: ContainerInfo) {
execSync(`docker rm ${container.ID}`)
}
export function getContainerByImage(image: string) { export function getContainerByImage(image: string) {
const containers = getTestcontainers().filter(x => x.Image.startsWith(image)) const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
if (containers.length > 1) { if (containers.length > 1) {
@ -53,10 +49,6 @@ export function getContainerByImage(image: string) {
return containers[0] return containers[0]
} }
function getContainerByName(name: string) {
return getTestcontainers().find(x => x.Names === name)
}
export function getContainerById(id: string) { export function getContainerById(id: string) {
return getTestcontainers().find(x => x.ID === id) return getTestcontainers().find(x => x.ID === id)
} }
@ -98,6 +90,8 @@ function getCurrentDockerContext(): DockerContext {
} }
export function setupEnv(...envs: any[]) { export function setupEnv(...envs: any[]) {
process.env.TESTCONTAINERS_RYUK_DISABLED = "true"
// For whatever reason, testcontainers doesn't always use the correct current // For whatever reason, testcontainers doesn't always use the correct current
// docker context. This bit of code forces the issue by finding the current // docker context. This bit of code forces the issue by finding the current
// context and setting it as the DOCKER_HOST environment // context and setting it as the DOCKER_HOST environment
@ -153,19 +147,10 @@ export async function startContainer(container: GenericContainer) {
key = key.replace(/\//g, "-").replace(/:/g, "-") key = key.replace(/\//g, "-").replace(/:/g, "-")
const name = `${key}_testcontainer` const name = `${key}_testcontainer`
// If a container has died it hangs around and future attempts to start a
// container with the same name will fail. What we do here is if we find a
// matching container and it has exited, we remove it before carrying on. This
// removes the need to do this removal manually.
const existingContainer = getContainerByName(name)
if (existingContainer?.State === "exited") {
removeContainer(existingContainer)
}
container = container container = container
.withReuse() .withReuse()
.withLabels({ "com.budibase": "true" }) .withLabels({ "com.budibase": "true" })
.withName(`${key}_testcontainer`) .withName(name)
let startedContainer: StartedTestContainer | undefined = undefined let startedContainer: StartedTestContainer | undefined = undefined
let lastError = undefined let lastError = undefined

View File

@ -63,7 +63,7 @@
if (!name?.length) { if (!name?.length) {
return "Name is required" return "Name is required"
} }
if (snippets.some(snippet => snippet.name === name)) { if (!snippet?.name && snippets.some(snippet => snippet.name === name)) {
return "That name is already in use" return "That name is already in use"
} }
if (firstCharNumberRegex.test(name)) { if (firstCharNumberRegex.test(name)) {
@ -106,11 +106,7 @@
Delete Delete
</Button> </Button>
{/if} {/if}
<Button <Button cta on:click={saveSnippet} disabled={!code || loading || nameError}>
cta
on:click={saveSnippet}
disabled={!snippet && (loading || nameError)}
>
Save Save
</Button> </Button>
</svelte:fragment> </svelte:fragment>

View File

@ -186,7 +186,7 @@
<div class="snippet-popover"> <div class="snippet-popover">
{#key hoveredSnippet} {#key hoveredSnippet}
<CodeEditor <CodeEditor
value={hoveredSnippet.code.trim()} value={hoveredSnippet.code?.trim()}
mode={EditorModes.JS} mode={EditorModes.JS}
readonly readonly
/> />

View File

@ -52,9 +52,16 @@
let modal let modal
$: text = value?.label ?? "Choose an option" $: text = value?.label ?? "Choose an option"
$: tables = $tablesStore.list.map(table => $: tables = $tablesStore.list
format.table(table, $datasources.list) .map(table => format.table(table, $datasources.list))
) .sort((a, b) => {
// sort tables alphabetically, grouped by datasource
const dsComparison = a.datasourceName.localeCompare(b.datasourceName)
if (dsComparison !== 0) {
return dsComparison
}
return a.label.localeCompare(b.label)
})
$: viewsV1 = $viewsStore.list.map(view => ({ $: viewsV1 = $viewsStore.list.map(view => ({
...view, ...view,
label: view.name, label: view.name,

View File

@ -1,5 +1,5 @@
<script> <script>
import { Heading, Body, Layout, Button, Modal } from "@budibase/bbui" import { Heading, Body, Layout, Button, Modal, Icon } from "@budibase/bbui"
import AutomationPanel from "components/automation/AutomationPanel/AutomationPanel.svelte" import AutomationPanel from "components/automation/AutomationPanel/AutomationPanel.svelte"
import CreateAutomationModal from "components/automation/AutomationPanel/CreateAutomationModal.svelte" import CreateAutomationModal from "components/automation/AutomationPanel/CreateAutomationModal.svelte"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte" import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
@ -12,11 +12,13 @@
automationStore, automationStore,
selectedAutomation, selectedAutomation,
} from "stores/builder" } from "stores/builder"
import { createLocalStorageStore } from "@budibase/frontend-core"
import { fly } from "svelte/transition"
$: automationId = $selectedAutomation?.data?._id $: automationId = $selectedAutomation?.data?._id
$: builderStore.selectResource(automationId) $: builderStore.selectResource(automationId)
// Keep URL and state in sync for selected screen ID const surveyDismissed = createLocalStorageStore("automation-survey", false)
const stopSyncing = syncURLToState({ const stopSyncing = syncURLToState({
urlParam: "automationId", urlParam: "automationId",
stateKey: "selectedAutomationId", stateKey: "selectedAutomationId",
@ -29,9 +31,11 @@
let modal let modal
let webhookModal let webhookModal
let mounted = false
onMount(() => { onMount(() => {
$automationStore.showTestPanel = false $automationStore.showTestPanel = false
mounted = true
}) })
onDestroy(stopSyncing) onDestroy(stopSyncing)
@ -79,6 +83,43 @@
</Modal> </Modal>
</div> </div>
{#if !$surveyDismissed && mounted}
<div
class="survey"
in:fly={{ x: 600, duration: 260, delay: 1000 }}
out:fly={{ x: 600, duration: 260 }}
>
<div class="survey__body">
<div class="survey__title">We value your feedback!</div>
<div class="survey__text">
<a
href="https://t.maze.co/310149185"
target="_blank"
rel="noopener noreferrer"
on:click={() => surveyDismissed.set(true)}
>
Complete our survey on Automations</a
>
and receive a $20 thank-you gift.
<a
href="https://drive.google.com/file/d/12-qk_2F9g5PdbM6wuKoz2KkIyLI-feMX/view?usp=sharing"
target="_blank"
rel="noopener noreferrer"
>
Terms apply.
</a>
</div>
</div>
<Icon
name="Close"
hoverable
color="var(--spectrum-global-color-static-gray-300)"
hoverColor="var(--spectrum-global-color-static-gray-100)"
on:click={() => surveyDismissed.set(true)}
/>
</div>
{/if}
<style> <style>
.root { .root {
flex: 1 1 auto; flex: 1 1 auto;
@ -108,11 +149,9 @@
justify-content: center; justify-content: center;
align-items: center; align-items: center;
} }
.main { .main {
width: 300px; width: 300px;
} }
.setup { .setup {
padding-top: 9px; padding-top: 9px;
border-left: var(--border-light); border-left: var(--border-light);
@ -125,4 +164,39 @@
grid-column: 3; grid-column: 3;
overflow: auto; overflow: auto;
} }
/* Survey */
.survey {
position: absolute;
bottom: 32px;
right: 32px;
background: var(--spectrum-semantic-positive-color-background);
display: flex;
flex-direction: row;
padding: var(--spacing-l) var(--spacing-xl);
border-radius: 4px;
gap: var(--spacing-xl);
}
.survey * {
color: var(--spectrum-global-color-static-gray-300);
white-space: nowrap;
}
.survey a {
text-decoration: underline;
transition: color 130ms ease-out;
}
.survey a:hover {
color: var(--spectrum-global-color-static-gray-100);
cursor: pointer;
}
.survey__body {
flex: 1 1 auto;
display: flex;
flex-direction: column;
gap: 2px;
}
.survey__title {
font-weight: 600;
font-size: 15px;
}
</style> </style>

View File

@ -40,6 +40,6 @@
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/pouchdb": "^6.4.0", "@types/pouchdb": "^6.4.0",
"ts-node": "10.8.1", "ts-node": "10.8.1",
"typescript": "5.5.2" "typescript": "5.7.2"
} }
} }

@ -1 +1 @@
Subproject commit d9245f3d6d0b41ec2e6b3406b791f9e7448882cb Subproject commit 5321c7589257711cf153600597ef4e6a5f6b7162

View File

@ -129,7 +129,8 @@
"uuid": "^8.3.2", "uuid": "^8.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
"worker-farm": "1.7.0", "worker-farm": "1.7.0",
"xml2js": "0.6.2" "xml2js": "0.6.2",
"zod-validation-error": "^3.4.0"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.22.5", "@babel/core": "^7.22.5",
@ -169,13 +170,14 @@
"rimraf": "3.0.2", "rimraf": "3.0.2",
"supertest": "6.3.3", "supertest": "6.3.3",
"swagger-jsdoc": "6.1.0", "swagger-jsdoc": "6.1.0",
"testcontainers": "10.7.2", "testcontainers": "10.16.0",
"timekeeper": "2.2.0", "timekeeper": "2.2.0",
"ts-node": "10.8.1", "ts-node": "10.8.1",
"tsconfig-paths": "4.0.0", "tsconfig-paths": "4.0.0",
"typescript": "5.5.2", "typescript": "5.7.2",
"update-dotenv": "1.1.1", "update-dotenv": "1.1.1",
"yargs": "13.2.4" "yargs": "^13.2.4",
"zod": "^3.23.8"
}, },
"nx": { "nx": {
"targets": { "targets": {

View File

@ -1,16 +1,22 @@
import { events, context } from "@budibase/backend-core" import { events, context } from "@budibase/backend-core"
import { AnalyticsPingRequest, App, PingSource } from "@budibase/types" import {
AnalyticsPingRequest,
App,
PingSource,
Ctx,
AnalyticsEnabledResponse,
} from "@budibase/types"
import { DocumentType, isDevAppID } from "../../db/utils" import { DocumentType, isDevAppID } from "../../db/utils"
export const isEnabled = async (ctx: any) => { export const isEnabled = async (ctx: Ctx<void, AnalyticsEnabledResponse>) => {
const enabled = await events.analytics.enabled() const enabled = await events.analytics.enabled()
ctx.body = { ctx.body = {
enabled, enabled,
} }
} }
export const ping = async (ctx: any) => { export const ping = async (ctx: Ctx<AnalyticsPingRequest, void>) => {
const body = ctx.request.body as AnalyticsPingRequest const body = ctx.request.body
switch (body.source) { switch (body.source) {
case PingSource.APP: { case PingSource.APP: {

View File

@ -1,18 +1,25 @@
import { db as dbCore, tenancy } from "@budibase/backend-core" import { db as dbCore, tenancy } from "@budibase/backend-core"
import { BBContext, Document } from "@budibase/types" import {
Document,
UserCtx,
ApiKeyDoc,
ApiKeyFetchResponse,
UpdateApiKeyRequest,
UpdateApiKeyResponse,
} from "@budibase/types"
const KEYS_DOC = dbCore.StaticDatabases.GLOBAL.docs.apiKeys const KEYS_DOC = dbCore.StaticDatabases.GLOBAL.docs.apiKeys
async function getBuilderMainDoc() { async function getBuilderMainDoc() {
const db = tenancy.getGlobalDB() const db = tenancy.getGlobalDB()
try { const doc = await db.tryGet<ApiKeyDoc>(KEYS_DOC)
return await db.get<any>(KEYS_DOC) if (!doc) {
} catch (err) {
// doesn't exist yet, nothing to get
return { return {
_id: KEYS_DOC, _id: KEYS_DOC,
apiKeys: {},
} }
} }
return doc
} }
async function setBuilderMainDoc(doc: Document) { async function setBuilderMainDoc(doc: Document) {
@ -22,7 +29,7 @@ async function setBuilderMainDoc(doc: Document) {
return db.put(doc) return db.put(doc)
} }
export async function fetch(ctx: BBContext) { export async function fetch(ctx: UserCtx<void, ApiKeyFetchResponse>) {
try { try {
const mainDoc = await getBuilderMainDoc() const mainDoc = await getBuilderMainDoc()
ctx.body = mainDoc.apiKeys ? mainDoc.apiKeys : {} ctx.body = mainDoc.apiKeys ? mainDoc.apiKeys : {}
@ -32,7 +39,9 @@ export async function fetch(ctx: BBContext) {
} }
} }
export async function update(ctx: BBContext) { export async function update(
ctx: UserCtx<UpdateApiKeyRequest, UpdateApiKeyResponse>
) {
const key = ctx.params.key const key = ctx.params.key
const value = ctx.request.body.value const value = ctx.request.body.value

View File

@ -59,6 +59,15 @@ import {
BBReferenceFieldSubType, BBReferenceFieldSubType,
Row, Row,
BBRequest, BBRequest,
SyncAppResponse,
CreateAppResponse,
FetchAppsResponse,
UpdateAppClientResponse,
RevertAppClientResponse,
DeleteAppResponse,
ImportToUpdateAppRequest,
ImportToUpdateAppResponse,
SetRevertableAppVersionRequest,
} from "@budibase/types" } from "@budibase/types"
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts" import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
import sdk from "../../sdk" import sdk from "../../sdk"
@ -166,7 +175,7 @@ async function createInstance(appId: string, template: AppTemplate) {
return { _id: appId } return { _id: appId }
} }
export const addSampleData = async (ctx: UserCtx) => { export const addSampleData = async (ctx: UserCtx<void, void>) => {
const db = context.getAppDB() const db = context.getAppDB()
try { try {
@ -182,7 +191,7 @@ export const addSampleData = async (ctx: UserCtx) => {
ctx.status = 200 ctx.status = 200
} }
export async function fetch(ctx: UserCtx<void, App[]>) { export async function fetch(ctx: UserCtx<void, FetchAppsResponse>) {
ctx.body = await sdk.applications.fetch( ctx.body = await sdk.applications.fetch(
ctx.query.status as AppStatus, ctx.query.status as AppStatus,
ctx.user ctx.user
@ -242,7 +251,9 @@ export async function fetchAppPackage(
} }
} }
async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) { async function performAppCreate(
ctx: UserCtx<CreateAppRequest, CreateAppResponse>
) {
const apps = (await dbCore.getAllApps({ dev: true })) as App[] const apps = (await dbCore.getAllApps({ dev: true })) as App[]
const { body } = ctx.request const { body } = ctx.request
const { name, url, encryptionPassword, templateKey } = body const { name, url, encryptionPassword, templateKey } = body
@ -510,7 +521,9 @@ async function appPostCreate(ctx: UserCtx<CreateAppRequest, App>, app: App) {
} }
} }
export async function create(ctx: UserCtx<CreateAppRequest, App>) { export async function create(
ctx: UserCtx<CreateAppRequest, CreateAppResponse>
) {
const newApplication = await quotas.addApp(() => performAppCreate(ctx)) const newApplication = await quotas.addApp(() => performAppCreate(ctx))
await appPostCreate(ctx, newApplication) await appPostCreate(ctx, newApplication)
await cache.bustCache(cache.CacheKey.CHECKLIST) await cache.bustCache(cache.CacheKey.CHECKLIST)
@ -553,7 +566,9 @@ export async function update(
}) })
} }
export async function updateClient(ctx: UserCtx) { export async function updateClient(
ctx: UserCtx<void, UpdateAppClientResponse>
) {
// Get current app version // Get current app version
const application = await sdk.applications.metadata.get() const application = await sdk.applications.metadata.get()
const currentVersion = application.version const currentVersion = application.version
@ -581,7 +596,9 @@ export async function updateClient(ctx: UserCtx) {
ctx.body = app ctx.body = app
} }
export async function revertClient(ctx: UserCtx) { export async function revertClient(
ctx: UserCtx<void, RevertAppClientResponse>
) {
// Check app can be reverted // Check app can be reverted
const application = await sdk.applications.metadata.get() const application = await sdk.applications.metadata.get()
if (!application.revertableVersion) { if (!application.revertableVersion) {
@ -668,7 +685,7 @@ async function postDestroyApp(ctx: UserCtx) {
} }
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx<void, DeleteAppResponse>) {
await preDestroyApp(ctx) await preDestroyApp(ctx)
const result = await destroyApp(ctx) const result = await destroyApp(ctx)
await postDestroyApp(ctx) await postDestroyApp(ctx)
@ -676,7 +693,7 @@ export async function destroy(ctx: UserCtx) {
ctx.body = result ctx.body = result
} }
export async function unpublish(ctx: UserCtx) { export async function unpublish(ctx: UserCtx<void, void>) {
const prodAppId = dbCore.getProdAppID(ctx.params.appId) const prodAppId = dbCore.getProdAppID(ctx.params.appId)
const dbExists = await dbCore.dbExists(prodAppId) const dbExists = await dbCore.dbExists(prodAppId)
@ -692,7 +709,7 @@ export async function unpublish(ctx: UserCtx) {
builderSocket?.emitAppUnpublish(ctx) builderSocket?.emitAppUnpublish(ctx)
} }
export async function sync(ctx: UserCtx) { export async function sync(ctx: UserCtx<void, SyncAppResponse>) {
const appId = ctx.params.appId const appId = ctx.params.appId
try { try {
ctx.body = await sdk.applications.syncApp(appId) ctx.body = await sdk.applications.syncApp(appId)
@ -701,10 +718,12 @@ export async function sync(ctx: UserCtx) {
} }
} }
export async function importToApp(ctx: UserCtx) { export async function importToApp(
ctx: UserCtx<ImportToUpdateAppRequest, ImportToUpdateAppResponse>
) {
const { appId } = ctx.params const { appId } = ctx.params
const appExport = ctx.request.files?.appExport const appExport = ctx.request.files?.appExport
const password = ctx.request.body.encryptionPassword as string const password = ctx.request.body.encryptionPassword
if (!appExport) { if (!appExport) {
ctx.throw(400, "Must supply app export to import") ctx.throw(400, "Must supply app export to import")
} }
@ -811,7 +830,7 @@ export async function updateAppPackage(
} }
export async function setRevertableVersion( export async function setRevertableVersion(
ctx: UserCtx<{ revertableVersion: string }, App> ctx: UserCtx<SetRevertableAppVersionRequest, void>
) { ) {
if (!env.isDev()) { if (!env.isDev()) {
ctx.status = 403 ctx.status = 403

View File

@ -2,7 +2,7 @@ import { outputProcessing } from "../../utilities/rowProcessor"
import { InternalTables } from "../../db/utils" import { InternalTables } from "../../db/utils"
import { getFullUser } from "../../utilities/users" import { getFullUser } from "../../utilities/users"
import { roles, context, db as dbCore } from "@budibase/backend-core" import { roles, context, db as dbCore } from "@budibase/backend-core"
import { ContextUser, Row, UserCtx } from "@budibase/types" import { AppSelfResponse, ContextUser, UserCtx } from "@budibase/types"
import sdk from "../../sdk" import sdk from "../../sdk"
import { processUser } from "../../utilities/global" import { processUser } from "../../utilities/global"
@ -17,7 +17,7 @@ const addSessionAttributesToUser = (ctx: any) => {
} }
} }
export async function fetchSelf(ctx: UserCtx) { export async function fetchSelf(ctx: UserCtx<void, AppSelfResponse>) {
let userId = ctx.user.userId || ctx.user._id let userId = ctx.user.userId || ctx.user._id
/* istanbul ignore next */ /* istanbul ignore next */
if (!userId || !ctx.isAuthenticated) { if (!userId || !ctx.isAuthenticated) {
@ -45,9 +45,9 @@ export async function fetchSelf(ctx: UserCtx) {
try { try {
const userTable = await sdk.tables.getTable(InternalTables.USER_METADATA) const userTable = await sdk.tables.getTable(InternalTables.USER_METADATA)
// specifically needs to make sure is enriched // specifically needs to make sure is enriched
ctx.body = await outputProcessing(userTable, user as Row) ctx.body = await outputProcessing(userTable, user)
} catch (err: any) { } catch (err: any) {
let response let response: ContextUser | {}
// user didn't exist in app, don't pretend they do // user didn't exist in app, don't pretend they do
if (user.roleId === PUBLIC_ROLE) { if (user.roleId === PUBLIC_ROLE) {
response = {} response = {}

View File

@ -9,10 +9,25 @@ import {
App, App,
Automation, Automation,
AutomationActionStepId, AutomationActionStepId,
AutomationResults,
UserCtx, UserCtx,
DeleteAutomationResponse, DeleteAutomationResponse,
FetchAutomationResponse, FetchAutomationResponse,
GetAutomationTriggerDefinitionsResponse,
GetAutomationStepDefinitionsResponse,
GetAutomationActionDefinitionsResponse,
FindAutomationResponse,
UpdateAutomationRequest,
UpdateAutomationResponse,
CreateAutomationRequest,
CreateAutomationResponse,
SearchAutomationLogsRequest,
SearchAutomationLogsResponse,
ClearAutomationLogRequest,
ClearAutomationLogResponse,
TriggerAutomationRequest,
TriggerAutomationResponse,
TestAutomationRequest,
TestAutomationResponse,
} from "@budibase/types" } from "@budibase/types"
import { getActionDefinitions as actionDefs } from "../../automations/actions" import { getActionDefinitions as actionDefs } from "../../automations/actions"
import sdk from "../../sdk" import sdk from "../../sdk"
@ -34,7 +49,7 @@ function getTriggerDefinitions() {
*************************/ *************************/
export async function create( export async function create(
ctx: UserCtx<Automation, { message: string; automation: Automation }> ctx: UserCtx<CreateAutomationRequest, CreateAutomationResponse>
) { ) {
let automation = ctx.request.body let automation = ctx.request.body
automation.appId = ctx.appId automation.appId = ctx.appId
@ -55,7 +70,9 @@ export async function create(
builderSocket?.emitAutomationUpdate(ctx, automation) builderSocket?.emitAutomationUpdate(ctx, automation)
} }
export async function update(ctx: UserCtx) { export async function update(
ctx: UserCtx<UpdateAutomationRequest, UpdateAutomationResponse>
) {
let automation = ctx.request.body let automation = ctx.request.body
automation.appId = ctx.appId automation.appId = ctx.appId
@ -80,7 +97,7 @@ export async function fetch(ctx: UserCtx<void, FetchAutomationResponse>) {
ctx.body = { automations } ctx.body = { automations }
} }
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx<void, FindAutomationResponse>) {
ctx.body = await sdk.automations.get(ctx.params.id) ctx.body = await sdk.automations.get(ctx.params.id)
} }
@ -96,11 +113,15 @@ export async function destroy(ctx: UserCtx<void, DeleteAutomationResponse>) {
builderSocket?.emitAutomationDeletion(ctx, automationId) builderSocket?.emitAutomationDeletion(ctx, automationId)
} }
export async function logSearch(ctx: UserCtx) { export async function logSearch(
ctx: UserCtx<SearchAutomationLogsRequest, SearchAutomationLogsResponse>
) {
ctx.body = await automations.logs.logSearch(ctx.request.body) ctx.body = await automations.logs.logSearch(ctx.request.body)
} }
export async function clearLogError(ctx: UserCtx) { export async function clearLogError(
ctx: UserCtx<ClearAutomationLogRequest, ClearAutomationLogResponse>
) {
const { automationId, appId } = ctx.request.body const { automationId, appId } = ctx.request.body
await context.doInAppContext(appId, async () => { await context.doInAppContext(appId, async () => {
const db = context.getProdAppDB() const db = context.getProdAppDB()
@ -119,15 +140,21 @@ export async function clearLogError(ctx: UserCtx) {
}) })
} }
export async function getActionList(ctx: UserCtx) { export async function getActionList(
ctx: UserCtx<void, GetAutomationActionDefinitionsResponse>
) {
ctx.body = await getActionDefinitions() ctx.body = await getActionDefinitions()
} }
export async function getTriggerList(ctx: UserCtx) { export async function getTriggerList(
ctx: UserCtx<void, GetAutomationTriggerDefinitionsResponse>
) {
ctx.body = getTriggerDefinitions() ctx.body = getTriggerDefinitions()
} }
export async function getDefinitionList(ctx: UserCtx) { export async function getDefinitionList(
ctx: UserCtx<void, GetAutomationStepDefinitionsResponse>
) {
ctx.body = { ctx.body = {
trigger: getTriggerDefinitions(), trigger: getTriggerDefinitions(),
action: await getActionDefinitions(), action: await getActionDefinitions(),
@ -140,14 +167,16 @@ export async function getDefinitionList(ctx: UserCtx) {
* * * *
*********************/ *********************/
export async function trigger(ctx: UserCtx) { export async function trigger(
ctx: UserCtx<TriggerAutomationRequest, TriggerAutomationResponse>
) {
const db = context.getAppDB() const db = context.getAppDB()
let automation = await db.get<Automation>(ctx.params.id) let automation = await db.get<Automation>(ctx.params.id)
let hasCollectStep = sdk.automations.utils.checkForCollectStep(automation) let hasCollectStep = sdk.automations.utils.checkForCollectStep(automation)
if (hasCollectStep && (await features.isSyncAutomationsEnabled())) { if (hasCollectStep && (await features.isSyncAutomationsEnabled())) {
try { try {
const response: AutomationResults = await triggers.externalTrigger( const response = await triggers.externalTrigger(
automation, automation,
{ {
fields: ctx.request.body.fields, fields: ctx.request.body.fields,
@ -158,6 +187,10 @@ export async function trigger(ctx: UserCtx) {
{ getResponses: true } { getResponses: true }
) )
if (!("steps" in response)) {
ctx.throw(400, "Unable to collect response")
}
let collectedValue = response.steps.find( let collectedValue = response.steps.find(
step => step.stepId === AutomationActionStepId.COLLECT step => step.stepId === AutomationActionStepId.COLLECT
) )
@ -185,7 +218,7 @@ export async function trigger(ctx: UserCtx) {
} }
} }
function prepareTestInput(input: any) { function prepareTestInput(input: TestAutomationRequest) {
// prepare the test parameters // prepare the test parameters
if (input.id && input.row) { if (input.id && input.row) {
input.row._id = input.id input.row._id = input.id
@ -196,7 +229,9 @@ function prepareTestInput(input: any) {
return input return input
} }
export async function test(ctx: UserCtx) { export async function test(
ctx: UserCtx<TestAutomationRequest, TestAutomationResponse>
) {
const db = context.getAppDB() const db = context.getAppDB()
let automation = await db.get<Automation>(ctx.params.id) let automation = await db.get<Automation>(ctx.params.id)
await setTestFlag(automation._id!) await setTestFlag(automation._id!)

View File

@ -1,14 +1,16 @@
import sdk from "../../sdk" import sdk from "../../sdk"
import { events, context, db } from "@budibase/backend-core" import { events, context, db } from "@budibase/backend-core"
import { DocumentType } from "../../db/utils" import { DocumentType } from "../../db/utils"
import { App, Ctx } from "@budibase/types" import {
App,
Ctx,
ExportAppDumpRequest,
ExportAppDumpResponse,
} from "@budibase/types"
interface ExportAppDumpRequest { export async function exportAppDump(
excludeRows: boolean ctx: Ctx<ExportAppDumpRequest, ExportAppDumpResponse>
encryptPassword?: string ) {
}
export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
const { appId } = ctx.query as any const { appId } = ctx.query as any
const { excludeRows, encryptPassword } = ctx.request.body const { excludeRows, encryptPassword } = ctx.request.body

View File

@ -1,9 +1,16 @@
import { DocumentType } from "../../db/utils" import { DocumentType } from "../../db/utils"
import { App, Plugin, UserCtx } from "@budibase/types" import {
App,
FetchComponentDefinitionResponse,
Plugin,
UserCtx,
} from "@budibase/types"
import { db as dbCore, context, tenancy } from "@budibase/backend-core" import { db as dbCore, context, tenancy } from "@budibase/backend-core"
import { getComponentLibraryManifest } from "../../utilities/fileSystem" import { getComponentLibraryManifest } from "../../utilities/fileSystem"
export async function fetchAppComponentDefinitions(ctx: UserCtx) { export async function fetchAppComponentDefinitions(
ctx: UserCtx<void, FetchComponentDefinitionResponse>
) {
try { try {
const db = context.getAppDB() const db = context.getAppDB()
const app = await db.get<App>(DocumentType.APP_METADATA) const app = await db.get<App>(DocumentType.APP_METADATA)

View File

@ -23,13 +23,17 @@ import {
Table, Table,
RowValue, RowValue,
DynamicVariable, DynamicVariable,
FetchDatasourcesResponse,
FindDatasourcesResponse,
DeleteDatasourceResponse,
FetchExternalSchemaResponse,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../sdk" import sdk from "../../sdk"
import { builderSocket } from "../../websockets" import { builderSocket } from "../../websockets"
import { isEqual } from "lodash" import { isEqual } from "lodash"
import { processTable } from "../../sdk/app/tables/getters" import { processTable } from "../../sdk/app/tables/getters"
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx<void, FetchDatasourcesResponse>) {
ctx.body = await sdk.datasources.fetch() ctx.body = await sdk.datasources.fetch()
} }
@ -260,7 +264,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
} }
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx<void, DeleteDatasourceResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
const datasourceId = ctx.params.datasourceId const datasourceId = ctx.params.datasourceId
@ -291,12 +295,14 @@ export async function destroy(ctx: UserCtx) {
builderSocket?.emitDatasourceDeletion(ctx, datasourceId) builderSocket?.emitDatasourceDeletion(ctx, datasourceId)
} }
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx<void, FindDatasourcesResponse>) {
const datasource = await sdk.datasources.get(ctx.params.datasourceId) const datasource = await sdk.datasources.get(ctx.params.datasourceId)
ctx.body = await sdk.datasources.removeSecretSingle(datasource) ctx.body = await sdk.datasources.removeSecretSingle(datasource)
} }
export async function getExternalSchema(ctx: UserCtx) { export async function getExternalSchema(
ctx: UserCtx<void, FetchExternalSchemaResponse>
) {
const datasource = await sdk.datasources.get(ctx.params.datasourceId) const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource( const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
datasource datasource
@ -306,9 +312,10 @@ export async function getExternalSchema(ctx: UserCtx) {
if (!connector.getExternalSchema) { if (!connector.getExternalSchema) {
ctx.throw(400, "Datasource does not support exporting external schema") ctx.throw(400, "Datasource does not support exporting external schema")
} }
const response = await connector.getExternalSchema()
ctx.body = { try {
schema: response, ctx.body = { schema: await connector.getExternalSchema() }
} catch (e: any) {
ctx.throw(400, e.message)
} }
} }

View File

@ -1,4 +1,5 @@
import { context, utils } from "@budibase/backend-core" import { context, utils } from "@budibase/backend-core"
import { DeploymentStatus } from "@budibase/types"
/** /**
* This is used to pass around information about the deployment that is occurring * This is used to pass around information about the deployment that is occurring
@ -6,7 +7,7 @@ import { context, utils } from "@budibase/backend-core"
export default class Deployment { export default class Deployment {
_id: string _id: string
verification: any verification: any
status?: string status?: DeploymentStatus
err?: any err?: any
appUrl?: string appUrl?: string
@ -25,7 +26,7 @@ export default class Deployment {
return this.verification return this.verification
} }
setStatus(status: string, err?: any) { setStatus(status: DeploymentStatus, err?: any) {
this.status = status this.status = status
if (err) { if (err) {
this.err = err this.err = err

View File

@ -7,20 +7,26 @@ import {
enableCronTrigger, enableCronTrigger,
} from "../../../automations/utils" } from "../../../automations/utils"
import { backups } from "@budibase/pro" import { backups } from "@budibase/pro"
import { App, AppBackupTrigger } from "@budibase/types" import {
App,
AppBackupTrigger,
DeploymentDoc,
FetchDeploymentResponse,
PublishAppResponse,
UserCtx,
DeploymentStatus,
DeploymentProgressResponse,
} from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
// the max time we can wait for an invalidation to complete before considering it failed // the max time we can wait for an invalidation to complete before considering it failed
const MAX_PENDING_TIME_MS = 30 * 60000 const MAX_PENDING_TIME_MS = 30 * 60000
const DeploymentStatus = {
SUCCESS: "SUCCESS",
PENDING: "PENDING",
FAILURE: "FAILURE",
}
// checks that deployments are in a good state, any pending will be updated // checks that deployments are in a good state, any pending will be updated
async function checkAllDeployments(deployments: any) { async function checkAllDeployments(
deployments: any
): Promise<{ updated: boolean; deployments: DeploymentDoc }> {
let updated = false let updated = false
let deployment: any let deployment: any
for (deployment of Object.values(deployments.history)) { for (deployment of Object.values(deployments.history)) {
@ -96,7 +102,9 @@ async function initDeployedApp(prodAppId: any) {
}) })
} }
export async function fetchDeployments(ctx: any) { export async function fetchDeployments(
ctx: UserCtx<void, FetchDeploymentResponse>
) {
try { try {
const db = context.getAppDB() const db = context.getAppDB()
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS) const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
@ -104,17 +112,24 @@ export async function fetchDeployments(ctx: any) {
if (updated) { if (updated) {
await db.put(deployments) await db.put(deployments)
} }
ctx.body = Object.values(deployments.history).reverse() ctx.body = deployments.history
? Object.values(deployments.history).reverse()
: []
} catch (err) { } catch (err) {
ctx.body = [] ctx.body = []
} }
} }
export async function deploymentProgress(ctx: any) { export async function deploymentProgress(
ctx: UserCtx<void, DeploymentProgressResponse>
) {
try { try {
const db = context.getAppDB() const db = context.getAppDB()
const deploymentDoc = await db.get<any>(DocumentType.DEPLOYMENTS) const deploymentDoc = await db.get<DeploymentDoc>(DocumentType.DEPLOYMENTS)
ctx.body = deploymentDoc[ctx.params.deploymentId] if (!deploymentDoc.history?.[ctx.params.deploymentId]) {
ctx.throw(404, "No deployment found")
}
ctx.body = deploymentDoc.history?.[ctx.params.deploymentId]
} catch (err) { } catch (err) {
ctx.throw( ctx.throw(
500, 500,
@ -123,7 +138,9 @@ export async function deploymentProgress(ctx: any) {
} }
} }
export const publishApp = async function (ctx: any) { export const publishApp = async function (
ctx: UserCtx<void, PublishAppResponse>
) {
let deployment = new Deployment() let deployment = new Deployment()
console.log("Deployment object created") console.log("Deployment object created")
deployment.setStatus(DeploymentStatus.PENDING) deployment.setStatus(DeploymentStatus.PENDING)

View File

@ -11,7 +11,13 @@ import {
db as dbCore, db as dbCore,
cache, cache,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { App } from "@budibase/types" import {
App,
ClearDevLockResponse,
Ctx,
GetVersionResponse,
RevertAppResponse,
} from "@budibase/types"
async function redirect( async function redirect(
ctx: any, ctx: any,
@ -69,7 +75,7 @@ export function buildRedirectDelete(path: string) {
} }
} }
export async function clearLock(ctx: any) { export async function clearLock(ctx: Ctx<void, ClearDevLockResponse>) {
const { appId } = ctx.params const { appId } = ctx.params
try { try {
await redisClearLock(appId, ctx.user) await redisClearLock(appId, ctx.user)
@ -81,7 +87,7 @@ export async function clearLock(ctx: any) {
} }
} }
export async function revert(ctx: any) { export async function revert(ctx: Ctx<void, RevertAppResponse>) {
const { appId } = ctx.params const { appId } = ctx.params
const productionAppId = dbCore.getProdAppID(appId) const productionAppId = dbCore.getProdAppID(appId)
@ -131,7 +137,7 @@ export async function revert(ctx: any) {
} }
} }
export async function getBudibaseVersion(ctx: any) { export async function getBudibaseVersion(ctx: Ctx<void, GetVersionResponse>) {
const version = envCore.VERSION const version = envCore.VERSION
ctx.body = { ctx.body = {
version, version,

View File

@ -1,12 +1,17 @@
import { getDefinition, getDefinitions } from "../../integrations" import { getDefinition, getDefinitions } from "../../integrations"
import { SourceName, UserCtx } from "@budibase/types" import {
SourceName,
UserCtx,
FetchIntegrationsResponse,
FindIntegrationResponse,
} from "@budibase/types"
const DISABLED_EXTERNAL_INTEGRATIONS = [ const DISABLED_EXTERNAL_INTEGRATIONS = [
SourceName.AIRTABLE, SourceName.AIRTABLE,
SourceName.BUDIBASE, SourceName.BUDIBASE,
] ]
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx<void, FetchIntegrationsResponse>) {
const definitions = await getDefinitions() const definitions = await getDefinitions()
for (let disabledIntegration of DISABLED_EXTERNAL_INTEGRATIONS) { for (let disabledIntegration of DISABLED_EXTERNAL_INTEGRATIONS) {
delete definitions[disabledIntegration] delete definitions[disabledIntegration]
@ -14,10 +19,14 @@ export async function fetch(ctx: UserCtx) {
ctx.body = definitions ctx.body = definitions
} }
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx<void, FindIntegrationResponse>) {
const sourceType = ctx.params?.type const sourceType = ctx.params?.type
if (DISABLED_EXTERNAL_INTEGRATIONS.indexOf(sourceType) !== -1) { if (DISABLED_EXTERNAL_INTEGRATIONS.indexOf(sourceType) !== -1) {
ctx.throw(400, `Invalid source type - ${sourceType} is not supported.`) ctx.throw(400, `Invalid source type - ${sourceType} is not supported.`)
} }
ctx.body = await getDefinition(ctx.params.type) const integration = await getDefinition(ctx.params.type)
if (!integration) {
ctx.throw(404, "Integration not found")
}
ctx.body = integration
} }

View File

@ -2,7 +2,7 @@ import { EMPTY_LAYOUT } from "../../constants/layouts"
import { generateLayoutID, getScreenParams } from "../../db/utils" import { generateLayoutID, getScreenParams } from "../../db/utils"
import { events, context } from "@budibase/backend-core" import { events, context } from "@budibase/backend-core"
import { import {
BBContext, DeleteLayoutResponse,
Layout, Layout,
SaveLayoutRequest, SaveLayoutRequest,
SaveLayoutResponse, SaveLayoutResponse,
@ -32,7 +32,7 @@ export async function save(
ctx.status = 200 ctx.status = 200
} }
export async function destroy(ctx: BBContext) { export async function destroy(ctx: UserCtx<void, DeleteLayoutResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
const layoutId = ctx.params.layoutId, const layoutId = ctx.params.layoutId,
layoutRev = ctx.params.layoutRev layoutRev = ctx.params.layoutRev

View File

@ -1,24 +1,35 @@
import { MetadataTypes } from "../../constants"
import { generateMetadataID } from "../../db/utils" import { generateMetadataID } from "../../db/utils"
import { saveEntityMetadata, deleteEntityMetadata } from "../../utilities" import { saveEntityMetadata, deleteEntityMetadata } from "../../utilities"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { BBContext } from "@budibase/types" import {
UserCtx,
MetadataType,
GetMetadataTypesResponse,
SaveMetadataRequest,
SaveMetadataResponse,
DeleteMetadataResponse,
FindMetadataResponse,
} from "@budibase/types"
export async function getTypes(ctx: BBContext) { export async function getTypes(ctx: UserCtx<void, GetMetadataTypesResponse>) {
ctx.body = { ctx.body = {
types: MetadataTypes, types: MetadataType,
} }
} }
export async function saveMetadata(ctx: BBContext) { export async function saveMetadata(
ctx: UserCtx<SaveMetadataRequest, SaveMetadataResponse>
) {
const { type, entityId } = ctx.params const { type, entityId } = ctx.params
if (type === MetadataTypes.AUTOMATION_TEST_HISTORY) { if (type === MetadataType.AUTOMATION_TEST_HISTORY) {
ctx.throw(400, "Cannot save automation history type") ctx.throw(400, "Cannot save automation history type")
} }
ctx.body = await saveEntityMetadata(type, entityId, ctx.request.body) ctx.body = await saveEntityMetadata(type, entityId, ctx.request.body)
} }
export async function deleteMetadata(ctx: BBContext) { export async function deleteMetadata(
ctx: UserCtx<void, DeleteMetadataResponse>
) {
const { type, entityId } = ctx.params const { type, entityId } = ctx.params
await deleteEntityMetadata(type, entityId) await deleteEntityMetadata(type, entityId)
ctx.body = { ctx.body = {
@ -26,17 +37,9 @@ export async function deleteMetadata(ctx: BBContext) {
} }
} }
export async function getMetadata(ctx: BBContext) { export async function getMetadata(ctx: UserCtx<void, FindMetadataResponse>) {
const { type, entityId } = ctx.params const { type, entityId } = ctx.params
const db = context.getAppDB() const db = context.getAppDB()
const id = generateMetadataID(type, entityId) const id = generateMetadataID(type, entityId)
try { ctx.body = (await db.tryGet(id)) || {}
ctx.body = await db.get(id)
} catch (err: any) {
if (err.status === 404) {
ctx.body = {}
} else {
ctx.throw(err.status, err)
}
}
} }

View File

@ -1,24 +1,33 @@
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { migrate as migrationImpl, MIGRATIONS } from "../../migrations" import { migrate as migrationImpl, MIGRATIONS } from "../../migrations"
import { Ctx } from "@budibase/types" import {
Ctx,
FetchOldMigrationResponse,
GetOldMigrationStatus,
RunOldMigrationRequest,
} from "@budibase/types"
import { import {
getAppMigrationVersion, getAppMigrationVersion,
getLatestEnabledMigrationId, getLatestEnabledMigrationId,
} from "../../appMigrations" } from "../../appMigrations"
export async function migrate(ctx: Ctx) { export async function migrate(ctx: Ctx<RunOldMigrationRequest, void>) {
const options = ctx.request.body const options = ctx.request.body
// don't await as can take a while, just return // don't await as can take a while, just return
migrationImpl(options) migrationImpl(options)
ctx.status = 200 ctx.status = 200
} }
export async function fetchDefinitions(ctx: Ctx) { export async function fetchDefinitions(
ctx: Ctx<void, FetchOldMigrationResponse>
) {
ctx.body = MIGRATIONS ctx.body = MIGRATIONS
ctx.status = 200 ctx.status = 200
} }
export async function getMigrationStatus(ctx: Ctx) { export async function getMigrationStatus(
ctx: Ctx<void, GetOldMigrationStatus>
) {
const appId = context.getAppId() const appId = context.getAppId()
if (!appId) { if (!appId) {

View File

@ -1,16 +1,7 @@
import { Ctx } from "@budibase/types" import { Ctx, LogOpsRequest, ErrorOpsRequest } from "@budibase/types"
import { logging } from "@budibase/backend-core" import { logging } from "@budibase/backend-core"
interface LogRequest { export async function log(ctx: Ctx<LogOpsRequest, void>) {
message: string
data?: any
}
interface ErrorRequest {
message: string
}
export async function log(ctx: Ctx<LogRequest>) {
const body = ctx.request.body const body = ctx.request.body
console.trace(body.message, body.data) console.trace(body.message, body.data)
console.debug(body.message, body.data) console.debug(body.message, body.data)
@ -20,13 +11,13 @@ export async function log(ctx: Ctx<LogRequest>) {
ctx.status = 204 ctx.status = 204
} }
export async function alert(ctx: Ctx<ErrorRequest>) { export async function alert(ctx: Ctx<ErrorOpsRequest, void>) {
const body = ctx.request.body const body = ctx.request.body
logging.logAlert(body.message, new Error(body.message)) logging.logAlert(body.message, new Error(body.message))
ctx.status = 204 ctx.status = 204
} }
export async function error(ctx: Ctx<ErrorRequest>) { export async function error(ctx: Ctx<ErrorOpsRequest, void>) {
const body = ctx.request.body const body = ctx.request.body
throw new Error(body.message) throw new Error(body.message)
} }

View File

@ -9,6 +9,8 @@ import {
RemovePermissionRequest, RemovePermissionRequest,
RemovePermissionResponse, RemovePermissionResponse,
FetchResourcePermissionInfoResponse, FetchResourcePermissionInfoResponse,
FetchBuiltinPermissionsRequest,
FetchPermissionLevelsRequest,
} from "@budibase/types" } from "@budibase/types"
import { import {
CURRENTLY_SUPPORTED_LEVELS, CURRENTLY_SUPPORTED_LEVELS,
@ -19,11 +21,13 @@ import { PermissionUpdateType } from "../../sdk/app/permissions"
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
export function fetchBuiltin(ctx: UserCtx) { export function fetchBuiltin(
ctx: UserCtx<void, FetchBuiltinPermissionsRequest>
) {
ctx.body = Object.values(permissions.getBuiltinPermissions()) ctx.body = Object.values(permissions.getBuiltinPermissions())
} }
export function fetchLevels(ctx: UserCtx) { export function fetchLevels(ctx: UserCtx<void, FetchPermissionLevelsRequest>) {
// for now only provide the read/write perms externally // for now only provide the read/write perms externally
ctx.body = SUPPORTED_LEVELS ctx.body = SUPPORTED_LEVELS
} }

View File

@ -3,8 +3,12 @@ import {
getPluginMetadata, getPluginMetadata,
extractTarball, extractTarball,
} from "../../../utilities/fileSystem" } from "../../../utilities/fileSystem"
import { KoaFile } from "@budibase/types"
export async function fileUpload(file: { name: string; path: string }) { export async function fileUpload(file: KoaFile) {
if (!file.name || !file.path) {
throw new Error("File is not valid - cannot upload.")
}
if (!file.name.endsWith(".tar.gz")) { if (!file.name.endsWith(".tar.gz")) {
throw new Error("Plugin must be compressed into a gzipped tarball.") throw new Error("Plugin must be compressed into a gzipped tarball.")
} }

View File

@ -2,26 +2,37 @@ import { npmUpload, urlUpload, githubUpload } from "./uploaders"
import { plugins as pluginCore } from "@budibase/backend-core" import { plugins as pluginCore } from "@budibase/backend-core"
import { import {
PluginType, PluginType,
FileType,
PluginSource, PluginSource,
Ctx,
CreatePluginRequest, CreatePluginRequest,
CreatePluginResponse, CreatePluginResponse,
UserCtx,
UploadPluginRequest,
Plugin,
UploadPluginResponse,
FetchPluginResponse,
DeletePluginResponse,
} from "@budibase/types" } from "@budibase/types"
import env from "../../../environment" import env from "../../../environment"
import { clientAppSocket } from "../../../websockets" import { clientAppSocket } from "../../../websockets"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { sdk as pro } from "@budibase/pro" import { sdk as pro } from "@budibase/pro"
export async function upload(ctx: any) { export async function upload(
const plugins: FileType[] = ctx: UserCtx<UploadPluginRequest, UploadPluginResponse>
ctx.request.files.file.length > 1 ) {
? Array.from(ctx.request.files.file) const files = ctx.request.files
: [ctx.request.files.file] const plugins =
files && Array.isArray(files.file) && files.file.length > 1
? Array.from(files.file)
: [files?.file]
try { try {
let docs = [] let docs: Plugin[] = []
// can do single or multiple plugins // can do single or multiple plugins
for (let plugin of plugins) { for (let plugin of plugins) {
if (!plugin || Array.isArray(plugin)) {
continue
}
const doc = await sdk.plugins.processUploaded(plugin, PluginSource.FILE) const doc = await sdk.plugins.processUploaded(plugin, PluginSource.FILE)
docs.push(doc) docs.push(doc)
} }
@ -37,7 +48,7 @@ export async function upload(ctx: any) {
} }
export async function create( export async function create(
ctx: Ctx<CreatePluginRequest, CreatePluginResponse> ctx: UserCtx<CreatePluginRequest, CreatePluginResponse>
) { ) {
const { source, url, headers, githubToken } = ctx.request.body const { source, url, headers, githubToken } = ctx.request.body
@ -91,11 +102,11 @@ export async function create(
} }
} }
export async function fetch(ctx: any) { export async function fetch(ctx: UserCtx<void, FetchPluginResponse>) {
ctx.body = await sdk.plugins.fetch() ctx.body = await sdk.plugins.fetch()
} }
export async function destroy(ctx: any) { export async function destroy(ctx: UserCtx<void, DeletePluginResponse>) {
const { pluginId } = ctx.params const { pluginId } = ctx.params
try { try {

View File

@ -4,26 +4,38 @@ import { save as saveDatasource } from "../datasource"
import { RestImporter } from "./import" import { RestImporter } from "./import"
import { invalidateCachedVariable } from "../../../threads/utils" import { invalidateCachedVariable } from "../../../threads/utils"
import env from "../../../environment" import env from "../../../environment"
import { events, context, utils, constants } from "@budibase/backend-core" import { constants, context, events, utils } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { QueryEvent, QueryEventParameters } from "../../../threads/definitions" import { QueryEvent, QueryEventParameters } from "../../../threads/definitions"
import { import {
ConfigType, ConfigType,
Query, CreateDatasourceRequest,
UserCtx, Datasource,
SessionCookie,
JsonFieldSubType,
QueryResponse,
QuerySchema,
FieldType,
ExecuteQueryRequest, ExecuteQueryRequest,
ExecuteQueryResponse, ExecuteV2QueryResponse,
ExecuteV1QueryResponse,
FetchQueriesResponse,
FieldType,
FindQueryResponse,
ImportRestQueryRequest,
ImportRestQueryResponse,
JsonFieldSubType,
PreviewQueryRequest, PreviewQueryRequest,
PreviewQueryResponse, PreviewQueryResponse,
Query,
QueryResponse,
QuerySchema,
SaveQueryRequest,
SaveQueryResponse,
SessionCookie,
SourceName,
UserCtx,
DeleteQueryResponse,
} from "@budibase/types" } from "@budibase/types"
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core" import { utils as JsonUtils, ValidQueryNameRegex } from "@budibase/shared-core"
import { findHBSBlocks } from "@budibase/string-templates" import { findHBSBlocks } from "@budibase/string-templates"
import { ObjectId } from "mongodb" import { ObjectId } from "mongodb"
import { merge } from "lodash"
const Runner = new Thread(ThreadType.QUERY, { const Runner = new Thread(ThreadType.QUERY, {
timeoutMs: env.QUERY_THREAD_TIMEOUT, timeoutMs: env.QUERY_THREAD_TIMEOUT,
@ -43,11 +55,13 @@ function validateQueryInputs(parameters: QueryEventParameters) {
} }
} }
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx<void, FetchQueriesResponse>) {
ctx.body = await sdk.queries.fetch() ctx.body = await sdk.queries.fetch()
} }
const _import = async (ctx: UserCtx) => { const _import = async (
ctx: UserCtx<ImportRestQueryRequest, ImportRestQueryResponse>
) => {
const body = ctx.request.body const body = ctx.request.body
const data = body.data const data = body.data
@ -58,9 +72,9 @@ const _import = async (ctx: UserCtx) => {
if (!body.datasourceId) { if (!body.datasourceId) {
// construct new datasource // construct new datasource
const info: any = await importer.getInfo() const info: any = await importer.getInfo()
let datasource = { let datasource: Datasource = {
type: "datasource", type: "datasource",
source: "REST", source: SourceName.REST,
config: { config: {
url: info.url, url: info.url,
defaultHeaders: [], defaultHeaders: [],
@ -69,8 +83,14 @@ const _import = async (ctx: UserCtx) => {
name: info.name, name: info.name,
} }
// save the datasource // save the datasource
const datasourceCtx = { ...ctx } const datasourceCtx: UserCtx<CreateDatasourceRequest> = merge(ctx, {
datasourceCtx.request.body.datasource = datasource request: {
body: {
datasource,
tablesFilter: [],
},
},
})
await saveDatasource(datasourceCtx) await saveDatasource(datasourceCtx)
datasourceId = datasourceCtx.body.datasource._id datasourceId = datasourceCtx.body.datasource._id
} else { } else {
@ -88,7 +108,7 @@ const _import = async (ctx: UserCtx) => {
} }
export { _import as import } export { _import as import }
export async function save(ctx: UserCtx<Query, Query>) { export async function save(ctx: UserCtx<SaveQueryRequest, SaveQueryResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
const query: Query = ctx.request.body const query: Query = ctx.request.body
@ -119,10 +139,9 @@ export async function save(ctx: UserCtx<Query, Query>) {
query._rev = response.rev query._rev = response.rev
ctx.body = query ctx.body = query
ctx.message = `Query ${query.name} saved successfully.`
} }
export async function find(ctx: UserCtx) { export async function find(ctx: UserCtx<void, FindQueryResponse>) {
const queryId = ctx.params.queryId const queryId = ctx.params.queryId
ctx.body = await sdk.queries.find(queryId) ctx.body = await sdk.queries.find(queryId)
} }
@ -335,7 +354,7 @@ export async function preview(
async function execute( async function execute(
ctx: UserCtx< ctx: UserCtx<
ExecuteQueryRequest, ExecuteQueryRequest,
ExecuteQueryResponse | Record<string, any>[] ExecuteV2QueryResponse | ExecuteV1QueryResponse
>, >,
opts: any = { rowsOnly: false, isAutomation: false } opts: any = { rowsOnly: false, isAutomation: false }
) { ) {
@ -390,19 +409,21 @@ async function execute(
} }
export async function executeV1( export async function executeV1(
ctx: UserCtx<ExecuteQueryRequest, Record<string, any>[]> ctx: UserCtx<ExecuteQueryRequest, ExecuteV1QueryResponse>
) { ) {
return execute(ctx, { rowsOnly: true, isAutomation: false }) return execute(ctx, { rowsOnly: true, isAutomation: false })
} }
export async function executeV2( export async function executeV2(
ctx: UserCtx< ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse>
ExecuteQueryRequest,
ExecuteQueryResponse | Record<string, any>[]
>,
{ isAutomation }: { isAutomation?: boolean } = {}
) { ) {
return execute(ctx, { rowsOnly: false, isAutomation }) return execute(ctx, { rowsOnly: false })
}
export async function executeV2AsAutomation(
ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse>
) {
return execute(ctx, { rowsOnly: false, isAutomation: true })
} }
const removeDynamicVariables = async (queryId: string) => { const removeDynamicVariables = async (queryId: string) => {
@ -426,14 +447,14 @@ const removeDynamicVariables = async (queryId: string) => {
} }
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx<void, DeleteQueryResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
const queryId = ctx.params.queryId as string const queryId = ctx.params.queryId as string
await removeDynamicVariables(queryId) await removeDynamicVariables(queryId)
const query = await db.get<Query>(queryId) const query = await db.get<Query>(queryId)
const datasource = await sdk.datasources.get(query.datasourceId) const datasource = await sdk.datasources.get(query.datasourceId)
await db.remove(ctx.params.queryId, ctx.params.revId) await db.remove(ctx.params.queryId, ctx.params.revId)
ctx.message = `Query deleted.` ctx.body = { message: `Query deleted.` }
ctx.status = 200 ctx.status = 200
await events.query.deleted(datasource, query) await events.query.deleted(datasource, query)
} }

View File

@ -9,7 +9,7 @@ import { getUserMetadataParams, InternalTables } from "../../db/utils"
import { import {
AccessibleRolesResponse, AccessibleRolesResponse,
Database, Database,
DestroyRoleResponse, DeleteRoleResponse,
FetchRolesResponse, FetchRolesResponse,
FindRoleResponse, FindRoleResponse,
Role, Role,
@ -199,7 +199,7 @@ export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
builderSocket?.emitRoleUpdate(ctx, role) builderSocket?.emitRoleUpdate(ctx, role)
} }
export async function destroy(ctx: UserCtx<void, DestroyRoleResponse>) { export async function destroy(ctx: UserCtx<void, DeleteRoleResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
let roleId = ctx.params.roleId as string let roleId = ctx.params.roleId as string
if (roles.isBuiltin(roleId)) { if (roles.isBuiltin(roleId)) {

View File

@ -1,11 +1,17 @@
import { getRoutingInfo } from "../../utilities/routing" import { getRoutingInfo } from "../../utilities/routing"
import { roles } from "@budibase/backend-core" import { roles } from "@budibase/backend-core"
import { UserCtx } from "@budibase/types" import {
FetchClientScreenRoutingResponse,
FetchScreenRoutingResponse,
ScreenRoutingJson,
UserCtx,
} from "@budibase/types"
const URL_SEPARATOR = "/" const URL_SEPARATOR = "/"
class Routing { class Routing {
json: any json: ScreenRoutingJson
constructor() { constructor() {
this.json = {} this.json = {}
} }
@ -43,7 +49,7 @@ class Routing {
* @returns The routing structure, this is the full structure designed for use in the builder, * @returns The routing structure, this is the full structure designed for use in the builder,
* if the client routing is required then the updateRoutingStructureForUserRole should be used. * if the client routing is required then the updateRoutingStructureForUserRole should be used.
*/ */
async function getRoutingStructure() { async function getRoutingStructure(): Promise<{ routes: ScreenRoutingJson }> {
const screenRoutes = await getRoutingInfo() const screenRoutes = await getRoutingInfo()
const routing = new Routing() const routing = new Routing()
@ -56,11 +62,13 @@ async function getRoutingStructure() {
return { routes: routing.json } return { routes: routing.json }
} }
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx<void, FetchScreenRoutingResponse>) {
ctx.body = await getRoutingStructure() ctx.body = await getRoutingStructure()
} }
export async function clientFetch(ctx: UserCtx) { export async function clientFetch(
ctx: UserCtx<void, FetchClientScreenRoutingResponse>
) {
const routing = await getRoutingStructure() const routing = await getRoutingStructure()
let roleId = ctx.user?.role?._id let roleId = ctx.user?.role?._id
const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : [] const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : []

View File

@ -11,23 +11,30 @@ import {
DeleteRow, DeleteRow,
DeleteRowRequest, DeleteRowRequest,
DeleteRows, DeleteRows,
DownloadAttachmentResponse,
EventType, EventType,
ExportRowsRequest, ExportRowsRequest,
ExportRowsResponse, ExportRowsResponse,
FetchEnrichedRowResponse,
FetchRowsResponse,
FieldType, FieldType,
GetRowResponse, FindRowResponse,
isRelationshipField, isRelationshipField,
PatchRowRequest, PatchRowRequest,
PatchRowResponse, PatchRowResponse,
RequiredKeys,
Row, Row,
RowAttachment, RowAttachment,
RowSearchParams, RowSearchParams,
SaveRowRequest,
SaveRowResponse,
SearchFilters, SearchFilters,
SearchRowRequest, SearchRowRequest,
SearchRowResponse, SearchRowResponse,
Table, Table,
UserCtx, UserCtx,
ValidateResponse, ValidateRowRequest,
ValidateRowResponse,
} from "@budibase/types" } from "@budibase/types"
import * as utils from "./utils" import * as utils from "./utils"
import { gridSocket } from "../../../websockets" import { gridSocket } from "../../../websockets"
@ -82,7 +89,7 @@ export async function patch(
} }
} }
export const save = async (ctx: UserCtx<Row, Row>) => { export const save = async (ctx: UserCtx<SaveRowRequest, SaveRowResponse>) => {
const { tableId, viewId } = utils.getSourceId(ctx) const { tableId, viewId } = utils.getSourceId(ctx)
const sourceId = viewId || tableId const sourceId = viewId || tableId
@ -130,12 +137,12 @@ export async function fetchLegacyView(ctx: any) {
}) })
} }
export async function fetch(ctx: any) { export async function fetch(ctx: UserCtx<void, FetchRowsResponse>) {
const { tableId } = utils.getSourceId(ctx) const { tableId } = utils.getSourceId(ctx)
ctx.body = await sdk.rows.fetch(tableId) ctx.body = await sdk.rows.fetch(tableId)
} }
export async function find(ctx: UserCtx<void, GetRowResponse>) { export async function find(ctx: UserCtx<void, FindRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx) const { tableId, viewId } = utils.getSourceId(ctx)
const sourceId = viewId || tableId const sourceId = viewId || tableId
const rowId = ctx.params.rowId const rowId = ctx.params.rowId
@ -239,7 +246,8 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
await context.ensureSnippetContext(true) await context.ensureSnippetContext(true)
let { query } = ctx.request.body const searchRequest = ctx.request.body
let { query } = searchRequest
if (query) { if (query) {
const allTables = await sdk.tables.getAllTables() const allTables = await sdk.tables.getAllTables()
query = replaceTableNamesInFilters(tableId, query, allTables) query = replaceTableNamesInFilters(tableId, query, allTables)
@ -249,11 +257,22 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
user: sdk.users.getUserContextBindings(ctx.user), user: sdk.users.getUserContextBindings(ctx.user),
}) })
const searchParams: RowSearchParams = { const searchParams: RequiredKeys<RowSearchParams> = {
...ctx.request.body,
query: enrichedQuery, query: enrichedQuery,
tableId, tableId,
viewId, viewId,
bookmark: searchRequest.bookmark ?? undefined,
paginate: searchRequest.paginate,
limit: searchRequest.limit,
sort: searchRequest.sort ?? undefined,
sortOrder: searchRequest.sortOrder,
sortType: searchRequest.sortType ?? undefined,
countRows: searchRequest.countRows,
version: searchRequest.version,
disableEscaping: searchRequest.disableEscaping,
fields: undefined,
indexer: undefined,
rows: undefined,
} }
ctx.status = 200 ctx.status = 200
@ -301,7 +320,9 @@ function replaceTableNamesInFilters(
}) })
} }
export async function validate(ctx: Ctx<Row, ValidateResponse>) { export async function validate(
ctx: Ctx<ValidateRowRequest, ValidateRowResponse>
) {
const source = await utils.getSource(ctx) const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source) const table = await utils.getTableFromSource(source)
// external tables are hard to validate currently // external tables are hard to validate currently
@ -315,7 +336,9 @@ export async function validate(ctx: Ctx<Row, ValidateResponse>) {
} }
} }
export async function fetchEnrichedRow(ctx: UserCtx<void, Row>) { export async function fetchEnrichedRow(
ctx: UserCtx<void, FetchEnrichedRowResponse>
) {
const { tableId } = utils.getSourceId(ctx) const { tableId } = utils.getSourceId(ctx)
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx) ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
} }
@ -353,7 +376,9 @@ export const exportRows = async (
ctx.body = apiFileReturn(content) ctx.body = apiFileReturn(content)
} }
export async function downloadAttachment(ctx: UserCtx) { export async function downloadAttachment(
ctx: UserCtx<void, DownloadAttachmentResponse>
) {
const { columnName } = ctx.params const { columnName } = ctx.params
const { tableId } = utils.getSourceId(ctx) const { tableId } = utils.getSourceId(ctx)

View File

@ -15,10 +15,21 @@ import {
} from "@budibase/types" } from "@budibase/types"
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp" import { cloneDeep, merge } from "lodash/fp"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
function mergeRows(row1: Row, row2: Row) {
const merged = merge(row1, row2)
// make sure any specifically undefined fields are removed
for (const key of Object.keys(row2)) {
if (row2[key] === undefined) {
delete merged[key]
}
}
return merged
}
/** /**
* This function runs through a list of enriched rows, looks at the rows which * This function runs through a list of enriched rows, looks at the rows which
* are related and then checks if they need the state of their formulas * are related and then checks if they need the state of their formulas
@ -162,9 +173,14 @@ export async function finaliseRow(
}) })
} }
const response = await db.put(row) await db.put(row)
// for response, calculate the formulas for the enriched row const retrieved = await db.tryGet<Row>(row._id)
enrichedRow._rev = response.rev if (!retrieved) {
throw new Error(`Unable to retrieve row ${row._id} after saving.`)
}
delete enrichedRow._rev
enrichedRow = mergeRows(retrieved, enrichedRow)
enrichedRow = await processFormulas(table, enrichedRow, { enrichedRow = await processFormulas(table, enrichedRow, {
dynamic: false, dynamic: false,
}) })

View File

@ -175,7 +175,7 @@ export async function enrichArrayContext(
} }
export async function enrichSearchContext( export async function enrichSearchContext(
fields: Record<string, any>, fields: Record<string, any> | undefined,
inputs = {}, inputs = {},
helpers = true helpers = true
): Promise<Record<string, any>> { ): Promise<Record<string, any>> {

View File

@ -29,19 +29,20 @@ export async function searchView(
await context.ensureSnippetContext(true) await context.ensureSnippetContext(true)
const searchOptions: RequiredKeys<SearchViewRowRequest> & const searchOptions: RequiredKeys<RowSearchParams> = {
RequiredKeys<
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
> = {
tableId: view.tableId, tableId: view.tableId,
viewId: view.id, viewId: view.id,
query: body.query, query: body.query || {},
fields: viewFields, fields: viewFields,
...getSortOptions(body, view), ...getSortOptions(body, view),
limit: body.limit, limit: body.limit,
bookmark: body.bookmark, bookmark: body.bookmark ?? undefined,
paginate: body.paginate, paginate: body.paginate,
countRows: body.countRows, countRows: body.countRows,
version: undefined,
disableEscaping: undefined,
indexer: undefined,
rows: undefined,
} }
const result = await sdk.rows.search(searchOptions, { const result = await sdk.rows.search(searchOptions, {
@ -56,7 +57,7 @@ function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
return { return {
sort: request.sort, sort: request.sort,
sortOrder: request.sortOrder, sortOrder: request.sortOrder,
sortType: request.sortType, sortType: request.sortType ?? undefined,
} }
} }
if (view.sort) { if (view.sort) {

View File

@ -10,13 +10,16 @@ import { updateAppPackage } from "./application"
import { import {
Plugin, Plugin,
ScreenProps, ScreenProps,
BBContext,
Screen, Screen,
UserCtx, UserCtx,
FetchScreenResponse,
SaveScreenRequest,
SaveScreenResponse,
DeleteScreenResponse,
} from "@budibase/types" } from "@budibase/types"
import { builderSocket } from "../../websockets" import { builderSocket } from "../../websockets"
export async function fetch(ctx: BBContext) { export async function fetch(ctx: UserCtx<void, FetchScreenResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
const screens = ( const screens = (
@ -37,7 +40,9 @@ export async function fetch(ctx: BBContext) {
) )
} }
export async function save(ctx: UserCtx<Screen, Screen>) { export async function save(
ctx: UserCtx<SaveScreenRequest, SaveScreenResponse>
) {
const db = context.getAppDB() const db = context.getAppDB()
let screen = ctx.request.body let screen = ctx.request.body
@ -107,7 +112,7 @@ export async function save(ctx: UserCtx<Screen, Screen>) {
builderSocket?.emitScreenUpdate(ctx, savedScreen) builderSocket?.emitScreenUpdate(ctx, savedScreen)
} }
export async function destroy(ctx: BBContext) { export async function destroy(ctx: UserCtx<void, DeleteScreenResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
const id = ctx.params.screenId const id = ctx.params.screenId
const screen = await db.get<Screen>(id) const screen = await db.get<Screen>(id)

View File

@ -14,7 +14,3 @@ export async function execute(ctx: Ctx) {
throw err throw err
} }
} }
export async function save(ctx: Ctx) {
ctx.throw(501, "Not currently implemented")
}

View File

@ -27,7 +27,13 @@ import {
Ctx, Ctx,
DocumentType, DocumentType,
Feature, Feature,
GetSignedUploadUrlRequest,
GetSignedUploadUrlResponse,
ProcessAttachmentResponse, ProcessAttachmentResponse,
ServeAppResponse,
ServeBuilderPreviewResponse,
ServeClientLibraryResponse,
ToggleBetaFeatureResponse,
UserCtx, UserCtx,
} from "@budibase/types" } from "@budibase/types"
import { import {
@ -38,7 +44,9 @@ import {
import send from "koa-send" import send from "koa-send"
import { getThemeVariables } from "../../../constants/themes" import { getThemeVariables } from "../../../constants/themes"
export const toggleBetaUiFeature = async function (ctx: Ctx) { export const toggleBetaUiFeature = async function (
ctx: Ctx<void, ToggleBetaFeatureResponse>
) {
const cookieName = `beta:${ctx.params.feature}` const cookieName = `beta:${ctx.params.feature}`
if (ctx.cookies.get(cookieName)) { if (ctx.cookies.get(cookieName)) {
@ -66,13 +74,13 @@ export const toggleBetaUiFeature = async function (ctx: Ctx) {
} }
} }
export const serveBuilder = async function (ctx: Ctx) { export const serveBuilder = async function (ctx: Ctx<void, void>) {
const builderPath = join(TOP_LEVEL_PATH, "builder") const builderPath = join(TOP_LEVEL_PATH, "builder")
await send(ctx, ctx.file, { root: builderPath }) await send(ctx, ctx.file, { root: builderPath })
} }
export const uploadFile = async function ( export const uploadFile = async function (
ctx: Ctx<{}, ProcessAttachmentResponse> ctx: Ctx<void, ProcessAttachmentResponse>
) { ) {
const file = ctx.request?.files?.file const file = ctx.request?.files?.file
if (!file) { if (!file) {
@ -144,7 +152,7 @@ const requiresMigration = async (ctx: Ctx) => {
return latestMigrationApplied !== latestMigration return latestMigrationApplied !== latestMigration
} }
export const serveApp = async function (ctx: UserCtx) { export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
if (ctx.url.includes("apple-touch-icon.png")) { if (ctx.url.includes("apple-touch-icon.png")) {
ctx.redirect("/builder/bblogo.png") ctx.redirect("/builder/bblogo.png")
return return
@ -249,7 +257,9 @@ export const serveApp = async function (ctx: UserCtx) {
} }
} }
export const serveBuilderPreview = async function (ctx: Ctx) { export const serveBuilderPreview = async function (
ctx: Ctx<void, ServeBuilderPreviewResponse>
) {
const db = context.getAppDB({ skip_setup: true }) const db = context.getAppDB({ skip_setup: true })
const appInfo = await db.get<App>(DocumentType.APP_METADATA) const appInfo = await db.get<App>(DocumentType.APP_METADATA)
@ -268,7 +278,9 @@ export const serveBuilderPreview = async function (ctx: Ctx) {
} }
} }
export const serveClientLibrary = async function (ctx: Ctx) { export const serveClientLibrary = async function (
ctx: Ctx<void, ServeClientLibraryResponse>
) {
const version = ctx.request.query.version const version = ctx.request.query.version
if (Array.isArray(version)) { if (Array.isArray(version)) {
@ -297,7 +309,9 @@ export const serveClientLibrary = async function (ctx: Ctx) {
} }
} }
export const getSignedUploadURL = async function (ctx: Ctx) { export const getSignedUploadURL = async function (
ctx: Ctx<GetSignedUploadUrlRequest, GetSignedUploadUrlResponse>
) {
// Ensure datasource is valid // Ensure datasource is valid
let datasource let datasource
try { try {

View File

@ -19,17 +19,18 @@ import {
EventType, EventType,
FetchTablesResponse, FetchTablesResponse,
FieldType, FieldType,
MigrateRequest, MigrateTableRequest,
MigrateResponse, MigrateTableResponse,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableResponse, FindTableResponse,
TableSourceType, TableSourceType,
UserCtx, UserCtx,
ValidateNewTableImportRequest, ValidateNewTableImportRequest,
ValidateTableImportRequest, ValidateTableImportRequest,
ValidateTableImportResponse, ValidateTableImportResponse,
DeleteTableResponse,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
@ -94,7 +95,7 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
ctx.body = result ctx.body = result
} }
export async function find(ctx: UserCtx<void, TableResponse>) { export async function find(ctx: UserCtx<void, FindTableResponse>) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
@ -137,7 +138,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
builderSocket?.emitTableUpdate(ctx, cloneDeep(savedTable)) builderSocket?.emitTableUpdate(ctx, cloneDeep(savedTable))
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx<void, DeleteTableResponse>) {
const appId = ctx.appId const appId = ctx.appId
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
await sdk.rowActions.deleteAll(tableId) await sdk.rowActions.deleteAll(tableId)
@ -223,7 +224,9 @@ export async function validateExistingTableImport(
} }
} }
export async function migrate(ctx: UserCtx<MigrateRequest, MigrateResponse>) { export async function migrate(
ctx: UserCtx<MigrateTableRequest, MigrateTableResponse>
) {
const { oldColumn, newColumn } = ctx.request.body const { oldColumn, newColumn } = ctx.request.body
let tableId = ctx.params.tableId as string let tableId = ctx.params.tableId as string
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)

View File

@ -1,13 +1,17 @@
import nodeFetch from "node-fetch" import nodeFetch from "node-fetch"
import { downloadTemplate as dlTemplate } from "../../utilities/fileSystem" import { downloadTemplate as dlTemplate } from "../../utilities/fileSystem"
import env from "../../environment" import env from "../../environment"
import { BBContext } from "@budibase/types" import {
DownloadTemplateResponse,
FetchTemplateResponse,
UserCtx,
} from "@budibase/types"
// development flag, can be used to test against templates exported locally // development flag, can be used to test against templates exported locally
const DEFAULT_TEMPLATES_BUCKET = const DEFAULT_TEMPLATES_BUCKET =
"prod-budi-templates.s3-eu-west-1.amazonaws.com" "prod-budi-templates.s3-eu-west-1.amazonaws.com"
export async function fetch(ctx: BBContext) { export async function fetch(ctx: UserCtx<void, FetchTemplateResponse>) {
let type = env.TEMPLATE_REPOSITORY let type = env.TEMPLATE_REPOSITORY
let response, let response,
error = false error = false
@ -32,7 +36,9 @@ export async function fetch(ctx: BBContext) {
// can't currently test this, have to ignore from coverage // can't currently test this, have to ignore from coverage
/* istanbul ignore next */ /* istanbul ignore next */
export async function downloadTemplate(ctx: BBContext) { export async function downloadTemplate(
ctx: UserCtx<void, DownloadTemplateResponse>
) {
const { type, name } = ctx.params const { type, name } = ctx.params
await dlTemplate(type, name) await dlTemplate(type, name)

View File

@ -7,19 +7,24 @@ import {
FetchUserMetadataResponse, FetchUserMetadataResponse,
FindUserMetadataResponse, FindUserMetadataResponse,
Flags, Flags,
SetFlagRequest, SetUserFlagRequest,
UpdateSelfMetadataRequest,
UpdateSelfMetadataResponse,
UpdateUserMetadataResponse,
UpdateUserMetadataRequest,
UserCtx, UserCtx,
UserMetadata, DeleteUserMetadataResponse,
SetUserFlagResponse,
GetUserFlagsResponse,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../sdk" import sdk from "../../sdk"
import { DocumentInsertResponse } from "@budibase/nano"
export async function fetchMetadata(ctx: Ctx<void, FetchUserMetadataResponse>) { export async function fetchMetadata(ctx: Ctx<void, FetchUserMetadataResponse>) {
ctx.body = await sdk.users.fetchMetadata() ctx.body = await sdk.users.fetchMetadata()
} }
export async function updateSelfMetadata( export async function updateSelfMetadata(
ctx: UserCtx<UserMetadata, DocumentInsertResponse> ctx: UserCtx<UpdateSelfMetadataRequest, UpdateSelfMetadataResponse>
) { ) {
// overwrite the ID with current users // overwrite the ID with current users
ctx.request.body._id = ctx.user?._id ctx.request.body._id = ctx.user?._id
@ -31,7 +36,7 @@ export async function updateSelfMetadata(
} }
export async function updateMetadata( export async function updateMetadata(
ctx: UserCtx<UserMetadata, DocumentInsertResponse> ctx: UserCtx<UpdateUserMetadataRequest, UpdateUserMetadataResponse>
) { ) {
const db = context.getAppDB() const db = context.getAppDB()
const user = ctx.request.body const user = ctx.request.body
@ -44,7 +49,9 @@ export async function updateMetadata(
ctx.body = await db.put(metadata) ctx.body = await db.put(metadata)
} }
export async function destroyMetadata(ctx: UserCtx<void, { message: string }>) { export async function destroyMetadata(
ctx: UserCtx<void, DeleteUserMetadataResponse>
) {
const db = context.getAppDB() const db = context.getAppDB()
try { try {
const dbUser = await sdk.users.get(ctx.params.id) const dbUser = await sdk.users.get(ctx.params.id)
@ -64,7 +71,7 @@ export async function findMetadata(
} }
export async function setFlag( export async function setFlag(
ctx: UserCtx<SetFlagRequest, { message: string }> ctx: UserCtx<SetUserFlagRequest, SetUserFlagResponse>
) { ) {
const userId = ctx.user?._id const userId = ctx.user?._id
const { flag, value } = ctx.request.body const { flag, value } = ctx.request.body
@ -84,7 +91,7 @@ export async function setFlag(
ctx.body = { message: "Flag set successfully" } ctx.body = { message: "Flag set successfully" }
} }
export async function getFlags(ctx: UserCtx<void, Flags>) { export async function getFlags(ctx: UserCtx<void, GetUserFlagsResponse>) {
const userId = ctx.user?._id const userId = ctx.user?._id
const docId = generateUserFlagID(userId!) const docId = generateUserFlagID(userId!)
const db = context.getAppDB() const db = context.getAppDB()

View File

@ -4,7 +4,6 @@ import {
Ctx, Ctx,
RequiredKeys, RequiredKeys,
UpdateViewRequest, UpdateViewRequest,
ViewResponse,
ViewResponseEnriched, ViewResponseEnriched,
ViewV2, ViewV2,
BasicViewFieldMetadata, BasicViewFieldMetadata,
@ -15,6 +14,8 @@ import {
ViewFetchResponseEnriched, ViewFetchResponseEnriched,
CountDistinctCalculationFieldMetadata, CountDistinctCalculationFieldMetadata,
CountCalculationFieldMetadata, CountCalculationFieldMetadata,
CreateViewResponse,
UpdateViewResponse,
} from "@budibase/types" } from "@budibase/types"
import { builderSocket, gridSocket } from "../../../websockets" import { builderSocket, gridSocket } from "../../../websockets"
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
@ -132,7 +133,7 @@ export async function fetch(ctx: Ctx<void, ViewFetchResponseEnriched>) {
} }
} }
export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) { export async function create(ctx: Ctx<CreateViewRequest, CreateViewResponse>) {
const view = ctx.request.body const view = ctx.request.body
const { tableId } = view const { tableId } = view
@ -159,7 +160,7 @@ export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
gridSocket?.emitViewUpdate(ctx, result) gridSocket?.emitViewUpdate(ctx, result)
} }
export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) { export async function update(ctx: Ctx<UpdateViewRequest, UpdateViewResponse>) {
const view = ctx.request.body const view = ctx.request.body
if (view.version !== 2) { if (view.version !== 2) {
@ -196,7 +197,7 @@ export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) {
gridSocket?.emitViewUpdate(ctx, result) gridSocket?.emitViewUpdate(ctx, result)
} }
export async function remove(ctx: Ctx) { export async function remove(ctx: Ctx<void, void>) {
const { viewId } = ctx.params const { viewId } = ctx.params
const view = await sdk.views.remove(viewId) const view = await sdk.views.remove(viewId)

View File

@ -4,9 +4,17 @@ import { db as dbCore, context } from "@budibase/backend-core"
import { import {
Webhook, Webhook,
WebhookActionType, WebhookActionType,
BBContext, Ctx,
Automation, Automation,
AutomationActionStepId, AutomationActionStepId,
FetchWebhooksResponse,
SaveWebhookResponse,
SaveWebhookRequest,
DeleteWebhookResponse,
BuildWebhookSchemaRequest,
BuildWebhookSchemaResponse,
TriggerWebhookRequest,
TriggerWebhookResponse,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../sdk" import sdk from "../../sdk"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
@ -16,17 +24,17 @@ const validate = require("jsonschema").validate
const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema" const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema"
export async function fetch(ctx: BBContext) { export async function fetch(ctx: Ctx<void, FetchWebhooksResponse>) {
const db = context.getAppDB() const db = context.getAppDB()
const response = await db.allDocs( const response = await db.allDocs<Webhook>(
getWebhookParams(null, { getWebhookParams(null, {
include_docs: true, include_docs: true,
}) })
) )
ctx.body = response.rows.map((row: any) => row.doc) ctx.body = response.rows.filter(row => row.doc).map(row => row.doc!)
} }
export async function save(ctx: BBContext) { export async function save(ctx: Ctx<SaveWebhookRequest, SaveWebhookResponse>) {
const webhook = await sdk.automations.webhook.save(ctx.request.body) const webhook = await sdk.automations.webhook.save(ctx.request.body)
ctx.body = { ctx.body = {
message: "Webhook created successfully", message: "Webhook created successfully",
@ -34,21 +42,23 @@ export async function save(ctx: BBContext) {
} }
} }
export async function destroy(ctx: BBContext) { export async function destroy(ctx: Ctx<void, DeleteWebhookResponse>) {
ctx.body = await sdk.automations.webhook.destroy( ctx.body = await sdk.automations.webhook.destroy(
ctx.params.id, ctx.params.id,
ctx.params.rev ctx.params.rev
) )
} }
export async function buildSchema(ctx: BBContext) { export async function buildSchema(
ctx: Ctx<BuildWebhookSchemaRequest, BuildWebhookSchemaResponse>
) {
await context.doInAppContext(ctx.params.instance, async () => { await context.doInAppContext(ctx.params.instance, async () => {
const db = context.getAppDB() const db = context.getAppDB()
const webhook = (await db.get(ctx.params.id)) as Webhook const webhook = await db.get<Webhook>(ctx.params.id)
webhook.bodySchema = toJsonSchema(ctx.request.body) webhook.bodySchema = toJsonSchema(ctx.request.body)
// update the automation outputs // update the automation outputs
if (webhook.action.type === WebhookActionType.AUTOMATION) { if (webhook.action.type === WebhookActionType.AUTOMATION) {
let automation = (await db.get(webhook.action.target)) as Automation let automation = await db.get<Automation>(webhook.action.target)
const autoOutputs = automation.definition.trigger.schema.outputs const autoOutputs = automation.definition.trigger.schema.outputs
let properties = webhook.bodySchema.properties let properties = webhook.bodySchema.properties
// reset webhook outputs // reset webhook outputs
@ -67,56 +77,66 @@ export async function buildSchema(ctx: BBContext) {
}) })
} }
export async function trigger(ctx: BBContext) { export async function trigger(
ctx: Ctx<TriggerWebhookRequest, TriggerWebhookResponse>
) {
const prodAppId = dbCore.getProdAppID(ctx.params.instance) const prodAppId = dbCore.getProdAppID(ctx.params.instance)
const appNotDeployed = () => {
ctx.body = {
message: "Application not deployed yet.",
}
}
await context.doInAppContext(prodAppId, async () => { await context.doInAppContext(prodAppId, async () => {
try { const db = context.getAppDB()
const db = context.getAppDB() const webhook = await db.tryGet<Webhook>(ctx.params.id)
const webhook = (await db.get(ctx.params.id)) as Webhook if (!webhook) {
// validate against the schema return appNotDeployed()
if (webhook.bodySchema) { }
validate(ctx.request.body, webhook.bodySchema) // validate against the schema
} if (webhook.bodySchema) {
const target = await db.get<Automation>(webhook.action.target) validate(ctx.request.body, webhook.bodySchema)
if (webhook.action.type === WebhookActionType.AUTOMATION) { }
// trigger with both the pure request and then expand it const target = await db.tryGet<Automation>(webhook.action.target)
// incase the user has produced a schema to bind to if (!target) {
let hasCollectStep = sdk.automations.utils.checkForCollectStep(target) return appNotDeployed()
}
if (webhook.action.type === WebhookActionType.AUTOMATION) {
// trigger with both the pure request and then expand it
// incase the user has produced a schema to bind to
let hasCollectStep = sdk.automations.utils.checkForCollectStep(target)
if (hasCollectStep && (await pro.features.isSyncAutomationsEnabled())) { if (hasCollectStep && (await pro.features.isSyncAutomationsEnabled())) {
const response = await triggers.externalTrigger( const response = await triggers.externalTrigger(
target, target,
{ {
body: ctx.request.body, fields: {
...ctx.request.body, ...ctx.request.body,
appId: prodAppId, body: ctx.request.body,
}, },
{ getResponses: true } appId: prodAppId,
) },
{ getResponses: true }
)
if (triggers.isAutomationResults(response)) {
let collectedValue = response.steps.find( let collectedValue = response.steps.find(
(step: any) => step.stepId === AutomationActionStepId.COLLECT (step: any) => step.stepId === AutomationActionStepId.COLLECT
) )
ctx.status = 200 ctx.body = collectedValue?.outputs
ctx.body = collectedValue.outputs
} else { } else {
await triggers.externalTrigger(target, { ctx.throw(400, "Automation did not have a collect block.")
body: ctx.request.body,
...ctx.request.body,
appId: prodAppId,
})
ctx.status = 200
ctx.body = {
message: "Webhook trigger fired successfully",
}
} }
} } else {
} catch (err: any) { await triggers.externalTrigger(target, {
if (err.status === 404) { fields: {
ctx.status = 200 ...ctx.request.body,
body: ctx.request.body,
},
appId: prodAppId,
})
ctx.body = { ctx.body = {
message: "Application not deployed yet.", message: "Webhook trigger fired successfully",
} }
} }
} }

View File

@ -58,12 +58,9 @@ if (apiEnabled()) {
}) })
) )
.use(pro.licensing()) .use(pro.licensing())
// @ts-ignore
.use(currentApp) .use(currentApp)
.use(auth.auditLog) .use(auth.auditLog)
// @ts-ignore
.use(migrations) .use(migrations)
// @ts-ignore
.use(cleanup) .use(cleanup)
// authenticated routes // authenticated routes

View File

@ -56,7 +56,7 @@ router
"/api/v2/queries/:queryId", "/api/v2/queries/:queryId",
paramResource("queryId"), paramResource("queryId"),
authorized(PermissionType.QUERY, PermissionLevel.WRITE), authorized(PermissionType.QUERY, PermissionLevel.WRITE),
queryController.executeV2 as any queryController.executeV2
) )
export default router export default router

View File

@ -5,6 +5,8 @@ import { paramResource, paramSubResource } from "../../middleware/resourceId"
import { permissions } from "@budibase/backend-core" import { permissions } from "@budibase/backend-core"
import { internalSearchValidator } from "./utils/validators" import { internalSearchValidator } from "./utils/validators"
import trimViewRowInfo from "../../middleware/trimViewRowInfo" import trimViewRowInfo from "../../middleware/trimViewRowInfo"
import { validateBody } from "../../middleware/zod-validator"
import { searchRowRequestValidator } from "@budibase/types"
const { PermissionType, PermissionLevel } = permissions const { PermissionType, PermissionLevel } = permissions
@ -32,6 +34,7 @@ router
.post( .post(
"/api/:sourceId/search", "/api/:sourceId/search",
internalSearchValidator(), internalSearchValidator(),
validateBody(searchRowRequestValidator),
paramResource("sourceId"), paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ), authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search rowController.search
@ -87,6 +90,7 @@ router
router.post( router.post(
"/api/v2/views/:viewId/search", "/api/v2/views/:viewId/search",
internalSearchValidator(), internalSearchValidator(),
validateBody(searchRowRequestValidator),
authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"), authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"),
rowController.views.searchView rowController.views.searchView
) )

View File

@ -1,10 +0,0 @@
import Router from "@koa/router"
import * as controller from "../controllers/script"
import authorized from "../../middleware/authorized"
import { permissions } from "@budibase/backend-core"
const router: Router = new Router()
router.post("/api/script", authorized(permissions.BUILDER), controller.save)
export default router

View File

@ -169,331 +169,521 @@ const descriptions = datasourceDescribe({
}) })
if (descriptions.length) { if (descriptions.length) {
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => { describe.each(descriptions)(
let datasource: Datasource "$dbName",
let rawDatasource: Datasource ({ config, dsProvider, isOracle, isMSSQL }) => {
let client: Knex let datasource: Datasource
let rawDatasource: Datasource
let client: Knex
beforeEach(async () => { beforeEach(async () => {
const ds = await dsProvider() const ds = await dsProvider()
rawDatasource = ds.rawDatasource! rawDatasource = ds.rawDatasource!
datasource = ds.datasource! datasource = ds.datasource!
client = ds.client! client = ds.client!
jest.clearAllMocks() jest.clearAllMocks()
nock.cleanAll() nock.cleanAll()
})
describe("get", () => {
it("should be able to get a datasource", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds).toEqual({
config: expect.any(Object),
plus: datasource.plus,
source: datasource.source,
isSQL: true,
type: "datasource_plus",
_id: datasource._id,
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
}) })
it("should not return database password", async () => { describe("get", () => {
const ds = await config.api.datasource.get(datasource._id!) it("should be able to get a datasource", async () => {
expect(ds.config!.password).toBe("--secret-value--") const ds = await config.api.datasource.get(datasource._id!)
}) expect(ds).toEqual({
}) config: expect.any(Object),
plus: datasource.plus,
describe("list", () => { source: datasource.source,
it("returns all the datasources", async () => { isSQL: true,
const datasources = await config.api.datasource.fetch() type: "datasource_plus",
expect(datasources).toContainEqual(expect.objectContaining(datasource)) _id: datasource._id,
}) _rev: expect.any(String),
}) createdAt: expect.any(String),
updatedAt: expect.any(String),
describe("put", () => {
it("should update an existing datasource", async () => {
const newName = generator.guid()
datasource.name = newName
const updatedDs = await config.api.datasource.update(datasource)
expect(updatedDs.name).toEqual(newName)
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
it("should not overwrite database password with --secret-value--", async () => {
const password = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(password).not.toBe("--secret-value--")
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
await config.api.datasource.update(
await config.api.datasource.get(datasource._id!)
)
const newPassword = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(newPassword).not.toBe("--secret-value--")
expect(newPassword).toBe(password)
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.api.query.save({
datasourceId: datasource._id!,
name: "Test Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: null,
readable: true,
})
await config.api.datasource.delete(datasource)
const datasources = await config.api.datasource.fetch()
expect(datasources).not.toContainEqual(
expect.objectContaining(datasource)
)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
})
describe("schema", () => {
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
}) })
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: stringName,
type: FieldType.STRING,
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: {
allowEmpty: false,
},
inclusion: [],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: [],
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = expect.objectContaining({
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = {
...field,
externalType: allowUndefined(expect.any(String)),
constraints: allowUndefined(expect.any(Object)),
autocolumn: allowUndefined(expect.any(Boolean)),
}
return acc
},
{}
),
})
return acc
},
{}
),
_rev: expect.any(String),
updatedAt: expect.any(String),
}
expect(updated).toEqual(expected)
})
})
describe("verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: /.*/, // error message differs between databases
},
}
)
})
})
describe("info", () => {
it("should fetch information about a datasource with a single table", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableName = generator.guid()
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
}) })
const info = await config.api.datasource.info(datasource) it("should not return database password", async () => {
expect(info.tableNames).toEqual( const ds = await config.api.datasource.get(datasource._id!)
expect.arrayContaining([tableName, ...existingTableNames]) expect(ds.config!.password).toBe("--secret-value--")
) })
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
}) })
it("should fetch information about a datasource with multiple tables", async () => { describe("list", () => {
const existingTableNames = ( it("returns all the datasources", async () => {
await config.api.datasource.info(datasource) const datasources = await config.api.datasource.fetch()
).tableNames expect(datasources).toContainEqual(
expect.objectContaining(datasource)
)
})
})
const tableNames = [ describe("put", () => {
generator.guid(), it("should update an existing datasource", async () => {
generator.guid(), const newName = generator.guid()
generator.guid(), datasource.name = newName
generator.guid(), const updatedDs = await config.api.datasource.update(datasource)
] expect(updatedDs.name).toEqual(newName)
for (const tableName of tableNames) { expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
it("should not overwrite database password with --secret-value--", async () => {
const password = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(password).not.toBe("--secret-value--")
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
await config.api.datasource.update(
await config.api.datasource.get(datasource._id!)
)
const newPassword = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(newPassword).not.toBe("--secret-value--")
expect(newPassword).toBe(password)
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.api.query.save({
datasourceId: datasource._id!,
name: "Test Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: null,
readable: true,
})
await config.api.datasource.delete(datasource)
const datasources = await config.api.datasource.fetch()
expect(datasources).not.toContainEqual(
expect.objectContaining(datasource)
)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
})
describe("schema", () => {
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: stringName,
type: FieldType.STRING,
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: {
allowEmpty: false,
},
inclusion: ["1", "2", "3"],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: [],
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = expect.objectContaining({
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = {
...field,
externalType: allowUndefined(expect.any(String)),
constraints: allowUndefined(expect.any(Object)),
autocolumn: allowUndefined(expect.any(Boolean)),
}
return acc
},
{}
),
})
return acc
},
{}
),
_rev: expect.any(String),
updatedAt: expect.any(String),
}
expect(updated).toEqual(expected)
})
!isOracle &&
!isMSSQL &&
it("can fetch options columns with a large number of options", async () => {
const enumOptions = new Array(1000)
.fill(0)
.map((_, i) => i.toString())
.toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
!isOracle &&
!isMSSQL &&
it("can fetch options with commas in them", async () => {
const enumOptions = [
"Lincoln, Abraham",
"Washington, George",
"Fred",
"Bob",
].toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
!isOracle &&
!isMSSQL &&
it("can fetch options that may include other type names", async () => {
const enumOptions = [
"int",
"bigint",
"float",
"numeric",
"json",
"map",
].toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
})
describe("verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: /.*/, // error message differs between databases
},
}
)
})
})
describe("info", () => {
it("should fetch information about a datasource with a single table", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableName = generator.guid()
await client.schema.createTable(tableName, table => { await client.schema.createTable(tableName, table => {
table.increments("id").primary() table.increments("id").primary()
table.string("name") table.string("name")
}) })
}
const info = await config.api.datasource.info(datasource) const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual( expect(info.tableNames).toEqual(
expect.arrayContaining([...tableNames, ...existingTableNames]) expect.arrayContaining([tableName, ...existingTableNames])
) )
expect(info.tableNames).toHaveLength( expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
existingTableNames.length + tableNames.length })
)
it("should fetch information about a datasource with multiple tables", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableNames = [
generator.guid(),
generator.guid(),
generator.guid(),
generator.guid(),
]
for (const tableName of tableNames) {
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
}
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([...tableNames, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(
existingTableNames.length + tableNames.length
)
})
}) })
}) }
}) )
}
const datasources = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE],
})
if (datasources.length) {
describe.each(datasources)(
"$dbName",
({ config, dsProvider, isPostgres, isMySQL, isMariaDB }) => {
let datasource: Datasource
let client: Knex
beforeEach(async () => {
const ds = await dsProvider()
datasource = ds.datasource!
client = ds.client!
})
describe("external export", () => {
let table: Table
beforeEach(async () => {
table = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
primary: ["id"],
primaryDisplay: "name",
schema: {
id: {
name: "id",
autocolumn: true,
type: FieldType.NUMBER,
constraints: {
presence: false,
},
},
name: {
name: "name",
autocolumn: false,
type: FieldType.STRING,
constraints: {
presence: false,
},
},
},
})
)
})
it("should be able to export and reimport a schema", async () => {
let { schema } = await config.api.datasource.externalSchema(
datasource
)
if (isPostgres) {
// pg_dump 17 puts this config parameter into the dump but no DB < 17
// can load it. We're using postgres 16 in tests at the time of writing.
schema = schema.replace("SET transaction_timeout = 0;", "")
}
await config.api.table.destroy(table._id!, table._rev!)
if (isMySQL || isMariaDB) {
// MySQL/MariaDB clients don't let you run multiple queries in a
// single call. They also throw an error when given an empty query.
// The below handles both of these things.
for (let query of schema.split(";\n")) {
query = query.trim()
if (!query) {
continue
}
await client.raw(query)
}
} else {
await client.raw(schema)
}
await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const tables = await config.api.table.fetch()
const newTable = tables.find(t => t.name === table.name)!
// This is only set on tables created through Budibase, we don't
// expect it to match after we import the table.
delete table.created
for (const field of Object.values(newTable.schema)) {
// Will differ per-database, not useful for this test.
delete field.externalType
}
expect(newTable).toEqual(table)
})
})
}
)
} }

View File

@ -1,11 +1,11 @@
const { testAutomation } = require("./utilities/TestFunctions") import { testAutomation } from "./utilities/TestFunctions"
const setup = require("./utilities") import * as setup from "./utilities"
const { MetadataTypes } = require("../../../constants") import { MetadataType, Automation } from "@budibase/types"
describe("/metadata", () => { describe("/metadata", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
let automation let automation: Automation
afterAll(setup.afterAll) afterAll(setup.afterAll)
@ -15,8 +15,8 @@ describe("/metadata", () => {
}) })
async function createMetadata( async function createMetadata(
data, data: Record<string, string>,
type = MetadataTypes.AUTOMATION_TEST_INPUT type = MetadataType.AUTOMATION_TEST_INPUT
) { ) {
const res = await request const res = await request
.post(`/api/metadata/${type}/${automation._id}`) .post(`/api/metadata/${type}/${automation._id}`)
@ -27,7 +27,7 @@ describe("/metadata", () => {
expect(res.body._rev).toBeDefined() expect(res.body._rev).toBeDefined()
} }
async function getMetadata(type) { async function getMetadata(type: MetadataType) {
const res = await request const res = await request
.get(`/api/metadata/${type}/${automation._id}`) .get(`/api/metadata/${type}/${automation._id}`)
.set(config.defaultHeaders()) .set(config.defaultHeaders())
@ -39,14 +39,14 @@ describe("/metadata", () => {
describe("save", () => { describe("save", () => {
it("should be able to save some metadata", async () => { it("should be able to save some metadata", async () => {
await createMetadata({ test: "a" }) await createMetadata({ test: "a" })
const testInput = await getMetadata(MetadataTypes.AUTOMATION_TEST_INPUT) const testInput = await getMetadata(MetadataType.AUTOMATION_TEST_INPUT)
expect(testInput.test).toBe("a") expect(testInput.test).toBe("a")
}) })
it("should save history metadata on automation run", async () => { it("should save history metadata on automation run", async () => {
// this should have created some history // this should have created some history
await testAutomation(config, automation) await testAutomation(config, automation, {})
const metadata = await getMetadata(MetadataTypes.AUTOMATION_TEST_HISTORY) const metadata = await getMetadata(MetadataType.AUTOMATION_TEST_HISTORY)
expect(metadata).toBeDefined() expect(metadata).toBeDefined()
expect(metadata.history.length).toBe(1) expect(metadata.history.length).toBe(1)
expect(typeof metadata.history[0].occurredAt).toBe("number") expect(typeof metadata.history[0].occurredAt).toBe("number")
@ -57,13 +57,13 @@ describe("/metadata", () => {
it("should be able to delete some test inputs", async () => { it("should be able to delete some test inputs", async () => {
const res = await request const res = await request
.delete( .delete(
`/api/metadata/${MetadataTypes.AUTOMATION_TEST_INPUT}/${automation._id}` `/api/metadata/${MetadataType.AUTOMATION_TEST_INPUT}/${automation._id}`
) )
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.message).toBeDefined() expect(res.body.message).toBeDefined()
const metadata = await getMetadata(MetadataTypes.AUTOMATION_TEST_INPUT) const metadata = await getMetadata(MetadataType.AUTOMATION_TEST_INPUT)
expect(metadata.test).toBeUndefined() expect(metadata.test).toBeUndefined()
}) })
}) })

View File

@ -48,7 +48,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: async () => ({ forCurrentTenant: async () => ({
initialised: true, llm: {},
run: jest.fn(() => `Mock LLM Response`), run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(), buildPromptFromAIOperation: jest.fn(),
}), }),
@ -2607,6 +2607,8 @@ if (descriptions.length) {
name: "foo", name: "foo",
description: "bar", description: "bar",
tableId, tableId,
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
}) })
}) })
@ -2628,6 +2630,8 @@ if (descriptions.length) {
id: isInternal ? undefined : expect.any(Number), id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined, type: isInternal ? "row" : undefined,
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id,
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
}) })
}) })
@ -2650,6 +2654,8 @@ if (descriptions.length) {
_rev: expect.any(String), _rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number), id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined, type: isInternal ? "row" : undefined,
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
}) })
}) })
@ -2729,6 +2735,8 @@ if (descriptions.length) {
id: isInternal ? undefined : expect.any(Number), id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined, type: isInternal ? "row" : undefined,
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id,
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
}) })
}) })
@ -2745,15 +2753,8 @@ if (descriptions.length) {
user: null, user: null,
users: null, users: null,
}) })
expect(updatedRow).toEqual({ expect(updatedRow.user).toBeUndefined()
name: "foo", expect(updatedRow.users).toBeUndefined()
description: "bar",
tableId,
_id: row._id,
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined,
})
}) })
it("fetch all will populate the relationships", async () => { it("fetch all will populate the relationships", async () => {

View File

@ -24,6 +24,7 @@ import {
JsonFieldSubType, JsonFieldSubType,
LogicalOperator, LogicalOperator,
RelationshipType, RelationshipType,
RequiredKeys,
Row, Row,
RowSearchParams, RowSearchParams,
SearchFilters, SearchFilters,
@ -51,7 +52,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: async () => ({ forCurrentTenant: async () => ({
initialised: true, llm: {},
run: jest.fn(() => `Mock LLM Response`), run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(), buildPromptFromAIOperation: jest.fn(),
}), }),
@ -208,9 +209,25 @@ if (descriptions.length) {
private async performSearch(): Promise<SearchResponse<Row>> { private async performSearch(): Promise<SearchResponse<Row>> {
if (isInMemory) { if (isInMemory) {
return dataFilters.search(_.cloneDeep(rows), { const inMemoryQuery: RequiredKeys<
...this.query, Omit<RowSearchParams, "tableId">
}) > = {
sort: this.query.sort ?? undefined,
query: { ...this.query.query },
paginate: this.query.paginate,
bookmark: this.query.bookmark ?? undefined,
limit: this.query.limit,
sortOrder: this.query.sortOrder,
sortType: this.query.sortType ?? undefined,
version: this.query.version,
disableEscaping: this.query.disableEscaping,
countRows: this.query.countRows,
viewId: undefined,
fields: undefined,
indexer: undefined,
rows: undefined,
}
return dataFilters.search(_.cloneDeep(rows), inMemoryQuery)
} else { } else {
return config.api.row.search(tableOrViewId, this.query) return config.api.row.search(tableOrViewId, this.query)
} }

View File

@ -96,9 +96,15 @@ if (env.SELF_HOSTED) {
ACTION_IMPLS["EXECUTE_BASH"] = bash.run ACTION_IMPLS["EXECUTE_BASH"] = bash.run
// @ts-ignore // @ts-ignore
BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition
if (env.isTest()) {
BUILTIN_ACTION_DEFINITIONS["OPENAI"] = openai.definition
}
} }
export async function getActionDefinitions() { export async function getActionDefinitions(): Promise<
Record<keyof typeof AutomationActionStepId, AutomationStepDefinition>
> {
if (await features.flags.isEnabled(FeatureFlag.AUTOMATION_BRANCHING)) { if (await features.flags.isEnabled(FeatureFlag.AUTOMATION_BRANCHING)) {
BUILTIN_ACTION_DEFINITIONS["BRANCH"] = branch.definition BUILTIN_ACTION_DEFINITIONS["BRANCH"] = branch.definition
} }

View File

@ -94,7 +94,7 @@ export async function run({
}) })
try { try {
await queryController.executeV2(ctx, { isAutomation: true }) await queryController.executeV2AsAutomation(ctx)
const { data, ...rest } = ctx.body const { data, ...rest } = ctx.body
return { return {

View File

@ -106,13 +106,15 @@ export async function run({
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) && (await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
(await pro.features.isBudibaseAIEnabled()) (await pro.features.isBudibaseAIEnabled())
let llm let llmWrapper
if (budibaseAIEnabled || customConfigsEnabled) { if (budibaseAIEnabled || customConfigsEnabled) {
llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model) llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(
inputs.model
)
} }
response = llm?.initialised response = llmWrapper?.llm
? await llm.run(inputs.prompt) ? await llmWrapper.run(inputs.prompt)
: await legacyOpenAIPrompt(inputs) : await legacyOpenAIPrompt(inputs)
return { return {

View File

@ -3,7 +3,6 @@ import {
AutomationStepDefinition, AutomationStepDefinition,
AutomationStepType, AutomationStepType,
AutomationIOType, AutomationIOType,
AutomationResults,
Automation, Automation,
AutomationCustomIOType, AutomationCustomIOType,
TriggerAutomationStepInputs, TriggerAutomationStepInputs,
@ -78,7 +77,7 @@ export async function run({
const db = context.getAppDB() const db = context.getAppDB()
let automation = await db.get<Automation>(inputs.automation.automationId) let automation = await db.get<Automation>(inputs.automation.automationId)
const response: AutomationResults = await triggers.externalTrigger( const response = await triggers.externalTrigger(
automation, automation,
{ {
fields: { ...fieldParams }, fields: { ...fieldParams },
@ -88,9 +87,13 @@ export async function run({
{ getResponses: true } { getResponses: true }
) )
return { if (triggers.isAutomationResults(response)) {
success: true, return {
value: response.steps, success: true,
value: response.steps,
}
} else {
throw new Error("Automation did not have a collect block")
} }
} }
} else { } else {

View File

@ -1,26 +1,148 @@
import { getConfig, afterAll as _afterAll, runStep } from "./utilities" import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import * as automation from "../index"
import * as setup from "./utilities"
import { Table } from "@budibase/types"
describe("test the bash action", () => { describe("Execute Bash Automations", () => {
let config = getConfig() let config = setup.getConfig(),
table: Table
beforeAll(async () => { beforeAll(async () => {
await automation.init()
await config.init() await config.init()
}) table = await config.createTable()
afterAll(_afterAll) await config.createRow({
name: "test row",
it("should be able to execute a script", async () => { description: "test description",
let res = await runStep(config, "EXECUTE_BASH", { tableId: table._id!,
code: "echo 'test'",
}) })
expect(res.stdout).toEqual("test\n")
expect(res.success).toEqual(true)
}) })
it("should handle a null value", async () => { afterAll(setup.afterAll)
let res = await runStep(config, "EXECUTE_BASH", {
code: null, it("should use trigger data in bash command and pass output to subsequent steps", async () => {
const result = await createAutomationBuilder({
name: "Bash with Trigger Data",
config,
}) })
expect(res.stdout).toEqual( .appAction({ fields: { command: "hello world" } })
.bash(
{ code: "echo '{{ trigger.fields.command }}'" },
{ stepName: "Echo Command" }
)
.serverLog(
{ text: "Bash output was: {{ steps.[Echo Command].stdout }}" },
{ stepName: "Log Output" }
)
.run()
expect(result.steps[0].outputs.stdout).toEqual("hello world\n")
expect(result.steps[1].outputs.message).toContain(
"Bash output was: hello world"
)
})
it("should chain multiple bash commands using previous outputs", async () => {
const result = await createAutomationBuilder({
name: "Chained Bash Commands",
config,
})
.appAction({ fields: { filename: "testfile.txt" } })
.bash(
{ code: "echo 'initial content' > {{ trigger.fields.filename }}" },
{ stepName: "Create File" }
)
.bash(
{ code: "cat {{ trigger.fields.filename }} | tr '[a-z]' '[A-Z]'" },
{ stepName: "Transform Content" }
)
.bash(
{ code: "rm {{ trigger.fields.filename }}" },
{ stepName: "Cleanup" }
)
.run()
expect(result.steps[1].outputs.stdout).toEqual("INITIAL CONTENT\n")
expect(result.steps[1].outputs.success).toEqual(true)
})
it("should integrate bash output with row operations", async () => {
const result = await createAutomationBuilder({
name: "Bash with Row Operations",
config,
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {},
},
{ stepName: "Get Row" }
)
.bash(
{
code: "echo Row data: {{ steps.[Get Row].rows.[0].name }} - {{ steps.[Get Row].rows.[0].description }}",
},
{ stepName: "Process Row Data" }
)
.serverLog(
{ text: "{{ steps.[Process Row Data].stdout }}" },
{ stepName: "Log Result" }
)
.run()
expect(result.steps[1].outputs.stdout).toContain(
"Row data: test row - test description"
)
expect(result.steps[2].outputs.message).toContain(
"Row data: test row - test description"
)
})
it("should handle bash output in conditional logic", async () => {
const result = await createAutomationBuilder({
name: "Bash with Conditional",
config,
})
.appAction({ fields: { threshold: "5" } })
.bash(
{ code: "echo $(( {{ trigger.fields.threshold }} + 5 ))" },
{ stepName: "Calculate Value" }
)
.executeScript(
{
code: `
const value = parseInt(steps["Calculate Value"].stdout);
return value > 8 ? "high" : "low";
`,
},
{ stepName: "Check Value" }
)
.serverLog(
{ text: "Value was {{ steps.[Check Value].value }}" },
{ stepName: "Log Result" }
)
.run()
expect(result.steps[0].outputs.stdout).toEqual("10\n")
expect(result.steps[1].outputs.value).toEqual("high")
expect(result.steps[2].outputs.message).toContain("Value was high")
})
it("should handle null values gracefully", async () => {
const result = await createAutomationBuilder({
name: "Null Bash Input",
config,
})
.appAction({ fields: {} })
.bash(
//@ts-ignore
{ code: null },
{ stepName: "Null Command" }
)
.run()
expect(result.steps[0].outputs.stdout).toBe(
"Budibase bash automation failed: Invalid inputs" "Budibase bash automation failed: Invalid inputs"
) )
}) })

View File

@ -3,7 +3,7 @@ import * as triggers from "../triggers"
import { loopAutomation } from "../../tests/utilities/structures" import { loopAutomation } from "../../tests/utilities/structures"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import * as setup from "./utilities" import * as setup from "./utilities"
import { Table, LoopStepType } from "@budibase/types" import { Table, LoopStepType, AutomationResults } from "@budibase/types"
import * as loopUtils from "../loopUtils" import * as loopUtils from "../loopUtils"
import { LoopInput } from "../../definitions/automations" import { LoopInput } from "../../definitions/automations"
@ -20,15 +20,19 @@ describe("Attempt to run a basic loop automation", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
async function runLoop(loopOpts?: LoopInput) { async function runLoop(loopOpts?: LoopInput): Promise<AutomationResults> {
const appId = config.getAppId() const appId = config.getAppId()
return await context.doInAppContext(appId, async () => { return await context.doInAppContext(appId, async () => {
const params = { fields: { appId } } const params = { fields: { appId } }
return await triggers.externalTrigger( const result = await triggers.externalTrigger(
loopAutomation(table._id!, loopOpts), loopAutomation(table._id!, loopOpts),
params, params,
{ getResponses: true } { getResponses: true }
) )
if ("outputs" in result && !result.outputs.success) {
throw new Error("Unable to proceed - failed to return anything.")
}
return result as AutomationResults
}) })
} }

View File

@ -1,7 +1,9 @@
import { getConfig, runStep, afterAll as _afterAll } from "./utilities" import { getConfig, afterAll as _afterAll } from "./utilities"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import { OpenAI } from "openai" import { OpenAI } from "openai"
import { setEnv as setCoreEnv } from "@budibase/backend-core" import { setEnv as setCoreEnv } from "@budibase/backend-core"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
import { Model } from "@budibase/types"
jest.mock("openai", () => ({ jest.mock("openai", () => ({
OpenAI: jest.fn().mockImplementation(() => ({ OpenAI: jest.fn().mockImplementation(() => ({
@ -25,7 +27,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: jest.fn().mockImplementation(() => ({ forCurrentTenant: jest.fn().mockImplementation(() => ({
initialised: true, llm: {},
init: jest.fn(), init: jest.fn(),
run: jest.fn(), run: jest.fn(),
})), })),
@ -47,6 +49,7 @@ describe("test the openai action", () => {
let resetEnv: () => void | undefined let resetEnv: () => void | undefined
beforeAll(async () => { beforeAll(async () => {
setCoreEnv({ SELF_HOSTED: true })
await config.init() await config.init()
}) })
@ -62,17 +65,39 @@ describe("test the openai action", () => {
afterAll(_afterAll) afterAll(_afterAll)
it("should be able to receive a response from ChatGPT given a prompt", async () => { it("should be able to receive a response from ChatGPT given a prompt", async () => {
const res = await runStep(config, "OPENAI", { prompt: OPENAI_PROMPT }) setCoreEnv({ SELF_HOSTED: true })
expect(res.response).toEqual("This is a test")
expect(res.success).toBeTruthy() const result = await createAutomationBuilder({
name: "Test OpenAI Response",
config,
})
.appAction({ fields: {} })
.openai(
{ prompt: OPENAI_PROMPT, model: Model.GPT_4O_MINI },
{ stepName: "Basic OpenAI Query" }
)
.run()
expect(result.steps[0].outputs.response).toEqual("This is a test")
expect(result.steps[0].outputs.success).toBeTruthy()
}) })
it("should present the correct error message when a prompt is not provided", async () => { it("should present the correct error message when a prompt is not provided", async () => {
const res = await runStep(config, "OPENAI", { prompt: null }) const result = await createAutomationBuilder({
expect(res.response).toEqual( name: "Test OpenAI No Prompt",
config,
})
.appAction({ fields: {} })
.openai(
{ prompt: "", model: Model.GPT_4O_MINI },
{ stepName: "Empty Prompt Query" }
)
.run()
expect(result.steps[0].outputs.response).toEqual(
"Budibase OpenAI Automation Failed: No prompt supplied" "Budibase OpenAI Automation Failed: No prompt supplied"
) )
expect(res.success).toBeFalsy() expect(result.steps[0].outputs.success).toBeFalsy()
}) })
it("should present the correct error message when an error is thrown from the createChatCompletion call", async () => { it("should present the correct error message when an error is thrown from the createChatCompletion call", async () => {
@ -91,14 +116,21 @@ describe("test the openai action", () => {
} as any) } as any)
) )
const res = await runStep(config, "OPENAI", { const result = await createAutomationBuilder({
prompt: OPENAI_PROMPT, name: "Test OpenAI Error",
config,
}) })
.appAction({ fields: {} })
.openai(
{ prompt: OPENAI_PROMPT, model: Model.GPT_4O_MINI },
{ stepName: "Error Producing Query" }
)
.run()
expect(res.response).toEqual( expect(result.steps[0].outputs.response).toEqual(
"Error: An error occurred while calling createChatCompletion" "Error: An error occurred while calling createChatCompletion"
) )
expect(res.success).toBeFalsy() expect(result.steps[0].outputs.success).toBeFalsy()
}) })
it("should ensure that the pro AI module is called when the budibase AI features are enabled", async () => { it("should ensure that the pro AI module is called when the budibase AI features are enabled", async () => {
@ -106,10 +138,19 @@ describe("test the openai action", () => {
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true) jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
const prompt = "What is the meaning of life?" const prompt = "What is the meaning of life?"
await runStep(config, "OPENAI", { await createAutomationBuilder({
model: "gpt-4o-mini", name: "Test OpenAI Pro Features",
prompt, config,
}) })
.appAction({ fields: {} })
.openai(
{
model: Model.GPT_4O_MINI,
prompt,
},
{ stepName: "Pro Features Query" }
)
.run()
expect(pro.ai.LargeLanguageModel.forCurrentTenant).toHaveBeenCalledWith( expect(pro.ai.LargeLanguageModel.forCurrentTenant).toHaveBeenCalledWith(
"gpt-4o-mini" "gpt-4o-mini"

View File

@ -1,5 +1,7 @@
import { Table } from "@budibase/types" import { EmptyFilterOption, SortOrder, Table } from "@budibase/types"
import * as setup from "./utilities" import * as setup from "./utilities"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import * as automation from "../index"
const NAME = "Test" const NAME = "Test"
@ -8,6 +10,7 @@ describe("Test a query step automation", () => {
let config = setup.getConfig() let config = setup.getConfig()
beforeAll(async () => { beforeAll(async () => {
await automation.init()
await config.init() await config.init()
table = await config.createTable() table = await config.createTable()
const row = { const row = {
@ -22,107 +25,132 @@ describe("Test a query step automation", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
it("should be able to run the query step", async () => { it("should be able to run the query step", async () => {
const inputs = { const result = await createAutomationBuilder({
tableId: table._id, name: "Basic Query Test",
filters: {
equal: {
name: NAME,
},
},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
config, config,
setup.actions.QUERY_ROWS.stepId, })
inputs .appAction({ fields: {} })
) .queryRows(
expect(res.success).toBe(true) {
expect(res.rows).toBeDefined() tableId: table._id!,
expect(res.rows.length).toBe(2) filters: {
expect(res.rows[0].name).toBe(NAME) equal: {
name: NAME,
},
},
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query All Rows" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
expect(result.steps[0].outputs.rows[0].name).toBe(NAME)
}) })
it("Returns all rows when onEmptyFilter has no value and no filters are passed", async () => { it("Returns all rows when onEmptyFilter has no value and no filters are passed", async () => {
const inputs = { const result = await createAutomationBuilder({
tableId: table._id, name: "Empty Filter Test",
filters: {},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
config, config,
setup.actions.QUERY_ROWS.stepId, })
inputs .appAction({ fields: {} })
) .queryRows(
expect(res.success).toBe(true) {
expect(res.rows).toBeDefined() tableId: table._id!,
expect(res.rows.length).toBe(2) filters: {},
expect(res.rows[0].name).toBe(NAME) sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Empty Filter" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
expect(result.steps[0].outputs.rows[0].name).toBe(NAME)
}) })
it("Returns no rows when onEmptyFilter is RETURN_NONE and theres no filters", async () => { it("Returns no rows when onEmptyFilter is RETURN_NONE and theres no filters", async () => {
const inputs = { const result = await createAutomationBuilder({
tableId: table._id, name: "Return None Test",
filters: {},
"filters-def": [],
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
onEmptyFilter: "none",
}
const res = await setup.runStep(
config, config,
setup.actions.QUERY_ROWS.stepId, })
inputs .appAction({ fields: {} })
) .queryRows(
expect(res.success).toBe(false) {
expect(res.rows).toBeDefined() tableId: table._id!,
expect(res.rows.length).toBe(0) filters: {},
"filters-def": [],
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
},
{ stepName: "Query With Return None" }
)
.run()
expect(result.steps[0].outputs.success).toBe(false)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(0)
}) })
it("Returns no rows when onEmptyFilters RETURN_NONE and a filter is passed with a null value", async () => { it("Returns no rows when onEmptyFilters RETURN_NONE and a filter is passed with a null value", async () => {
const inputs = { const result = await createAutomationBuilder({
tableId: table._id, name: "Null Filter Test",
onEmptyFilter: "none",
filters: {},
"filters-def": [
{
value: null,
},
],
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
config, config,
setup.actions.QUERY_ROWS.stepId, })
inputs .appAction({ fields: {} })
) .queryRows(
expect(res.success).toBe(false) {
expect(res.rows).toBeDefined() tableId: table._id!,
expect(res.rows.length).toBe(0) onEmptyFilter: EmptyFilterOption.RETURN_NONE,
filters: {},
"filters-def": [
{
value: null,
},
],
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Null Filter" }
)
.run()
expect(result.steps[0].outputs.success).toBe(false)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(0)
}) })
it("Returns rows when onEmptyFilter is RETURN_ALL and no filter is passed", async () => { it("Returns rows when onEmptyFilter is RETURN_ALL and no filter is passed", async () => {
const inputs = { const result = await createAutomationBuilder({
tableId: table._id, name: "Return All Test",
onEmptyFilter: "all",
filters: {},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
config, config,
setup.actions.QUERY_ROWS.stepId, })
inputs .appAction({ fields: {} })
) .queryRows(
expect(res.success).toBe(true) {
expect(res.rows).toBeDefined() tableId: table._id!,
expect(res.rows.length).toBe(2) onEmptyFilter: EmptyFilterOption.RETURN_ALL,
filters: {},
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Return All" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
}) })
}) })

View File

@ -152,6 +152,44 @@ describe("Loop automations", () => {
) )
}) })
it("ensure the loop stops if the max iterations are reached", async () => {
const builder = createAutomationBuilder({
name: "Test Loop max iterations",
})
const results = await builder
.appAction({ fields: {} })
.loop({
option: LoopStepType.ARRAY,
binding: ["test", "test2", "test3"],
iterations: 2,
})
.serverLog({ text: "{{loop.currentItem}}" })
.serverLog({ text: "{{steps.1.iterations}}" })
.run()
expect(results.steps[0].outputs.iterations).toBe(2)
})
it("should run an automation with loop and max iterations to ensure context correctness further down the tree", async () => {
const builder = createAutomationBuilder({
name: "Test context down tree with Loop and max iterations",
})
const results = await builder
.appAction({ fields: {} })
.loop({
option: LoopStepType.ARRAY,
binding: ["test", "test2", "test3"],
iterations: 2,
})
.serverLog({ text: "{{loop.currentItem}}" })
.serverLog({ text: "{{steps.1.iterations}}" })
.run()
expect(results.steps[1].outputs.message).toContain("- 2")
})
it("should run an automation where a loop is successfully run twice", async () => { it("should run an automation where a loop is successfully run twice", async () => {
const builder = createAutomationBuilder({ const builder = createAutomationBuilder({
name: "Test Trigger with Loop and Create Row", name: "Test Trigger with Loop and Create Row",

View File

@ -35,6 +35,8 @@ import {
Branch, Branch,
FilterStepInputs, FilterStepInputs,
ExecuteScriptStepInputs, ExecuteScriptStepInputs,
OpenAIStepInputs,
BashStepInputs,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import * as setup from "../utilities" import * as setup from "../utilities"
@ -221,6 +223,30 @@ class BaseStepBuilder {
input input
) )
} }
bash(
input: BashStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.EXECUTE_BASH,
BUILTIN_ACTION_DEFINITIONS.EXECUTE_BASH,
input,
opts
)
}
openai(
input: OpenAIStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.OPENAI,
BUILTIN_ACTION_DEFINITIONS.OPENAI,
input,
opts
)
}
} }
class StepBuilder extends BaseStepBuilder { class StepBuilder extends BaseStepBuilder {
build(): AutomationStep[] { build(): AutomationStep[] {

View File

@ -20,6 +20,7 @@ import {
AutomationStatus, AutomationStatus,
AutomationRowEvent, AutomationRowEvent,
UserBindings, UserBindings,
AutomationResults,
} from "@budibase/types" } from "@budibase/types"
import { executeInThread } from "../threads/automation" import { executeInThread } from "../threads/automation"
import { dataFilters, sdk } from "@budibase/shared-core" import { dataFilters, sdk } from "@budibase/shared-core"
@ -32,6 +33,14 @@ const JOB_OPTS = {
import * as automationUtils from "../automations/automationUtils" import * as automationUtils from "../automations/automationUtils"
import { doesTableExist } from "../sdk/app/tables/getters" import { doesTableExist } from "../sdk/app/tables/getters"
type DidNotTriggerResponse = {
outputs: {
success: false
status: AutomationStatus.STOPPED
}
message: AutomationStoppedReason.TRIGGER_FILTER_NOT_MET
}
async function getAllAutomations() { async function getAllAutomations() {
const db = context.getAppDB() const db = context.getAppDB()
let automations = await db.allDocs<Automation>( let automations = await db.allDocs<Automation>(
@ -139,6 +148,14 @@ function rowPassesFilters(row: Row, filters: SearchFilters) {
return filteredRows.length > 0 return filteredRows.length > 0
} }
export function isAutomationResults(
response: AutomationResults | DidNotTriggerResponse | AutomationJob
): response is AutomationResults {
return (
response !== null && "steps" in response && Array.isArray(response.steps)
)
}
export async function externalTrigger( export async function externalTrigger(
automation: Automation, automation: Automation,
params: { params: {
@ -148,7 +165,7 @@ export async function externalTrigger(
user?: UserBindings user?: UserBindings
}, },
{ getResponses }: { getResponses?: boolean } = {} { getResponses }: { getResponses?: boolean } = {}
): Promise<any> { ): Promise<AutomationResults | DidNotTriggerResponse | AutomationJob> {
if (automation.disabled) { if (automation.disabled) {
throw new Error("Automation is disabled") throw new Error("Automation is disabled")
} }

View File

@ -2,16 +2,18 @@ import { Thread, ThreadType } from "../threads"
import { definitions } from "./triggerInfo" import { definitions } from "./triggerInfo"
import { automationQueue } from "./bullboard" import { automationQueue } from "./bullboard"
import { updateEntityMetadata } from "../utilities" import { updateEntityMetadata } from "../utilities"
import { MetadataTypes } from "../constants"
import { context, db as dbCore, utils } from "@budibase/backend-core" import { context, db as dbCore, utils } from "@budibase/backend-core"
import { getAutomationMetadataParams } from "../db/utils" import { getAutomationMetadataParams } from "../db/utils"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { import {
Automation, Automation,
AutomationActionStepId,
AutomationJob, AutomationJob,
AutomationStepDefinition, AutomationStepDefinition,
AutomationTriggerDefinition, AutomationTriggerDefinition,
AutomationTriggerStepId,
MetadataType,
} from "@budibase/types" } from "@budibase/types"
import { automationsEnabled } from "../features" import { automationsEnabled } from "../features"
import { helpers, REBOOT_CRON } from "@budibase/shared-core" import { helpers, REBOOT_CRON } from "@budibase/shared-core"
@ -105,7 +107,7 @@ export async function updateTestHistory(
history: any history: any
) { ) {
return updateEntityMetadata( return updateEntityMetadata(
MetadataTypes.AUTOMATION_TEST_HISTORY, MetadataType.AUTOMATION_TEST_HISTORY,
automation._id, automation._id,
(metadata: any) => { (metadata: any) => {
if (metadata && Array.isArray(metadata.history)) { if (metadata && Array.isArray(metadata.history)) {
@ -120,19 +122,21 @@ export async function updateTestHistory(
) )
} }
export function removeDeprecated( export function removeDeprecated<
definitions: Record< T extends
| Record<keyof typeof AutomationTriggerStepId, AutomationTriggerDefinition>
| Record<keyof typeof AutomationActionStepId, AutomationStepDefinition>
>(definitions: T): T {
const base: Record<
string, string,
AutomationStepDefinition | AutomationTriggerDefinition AutomationTriggerDefinition | AutomationStepDefinition
> > = cloneDeep(definitions)
) {
const base = cloneDeep(definitions)
for (let key of Object.keys(base)) { for (let key of Object.keys(base)) {
if (base[key].deprecated) { if (base[key].deprecated) {
delete base[key] delete base[key]
} }
} }
return base return base as T
} }
// end the repetition and the job itself // end the repetition and the job itself

View File

@ -124,11 +124,6 @@ export enum BaseQueryVerbs {
DELETE = "delete", DELETE = "delete",
} }
export enum MetadataTypes {
AUTOMATION_TEST_INPUT = "automationTestInput",
AUTOMATION_TEST_HISTORY = "automationTestHistory",
}
export enum InvalidColumns { export enum InvalidColumns {
ID = "_id", ID = "_id",
REV = "_rev", REV = "_rev",
@ -137,7 +132,6 @@ export enum InvalidColumns {
export enum AutomationErrors { export enum AutomationErrors {
INCORRECT_TYPE = "INCORRECT_TYPE", INCORRECT_TYPE = "INCORRECT_TYPE",
MAX_ITERATIONS = "MAX_ITERATIONS_REACHED",
FAILURE_CONDITION = "FAILURE_CONDITION_MET", FAILURE_CONDITION = "FAILURE_CONDITION_MET",
} }

View File

@ -26,3 +26,6 @@ export interface AutomationContext extends AutomationResults {
company?: string company?: string
} }
} }
export interface AutomationResponse
extends Omit<AutomationContext, "stepsByName" | "stepsById"> {}

View File

@ -193,6 +193,34 @@ const SCHEMA: Integration = {
}, },
} }
interface MSSQLColumnDefinition {
TableName: string
ColumnName: string
DataType: string
MaxLength: number
IsNullable: boolean
IsIdentity: boolean
Precision: number
Scale: number
}
interface ColumnDefinitionMetadata {
usesMaxLength?: boolean
usesPrecision?: boolean
}
const COLUMN_DEFINITION_METADATA: Record<string, ColumnDefinitionMetadata> = {
DATETIME2: { usesMaxLength: true },
TIME: { usesMaxLength: true },
DATETIMEOFFSET: { usesMaxLength: true },
NCHAR: { usesMaxLength: true },
NVARCHAR: { usesMaxLength: true },
BINARY: { usesMaxLength: true },
VARBINARY: { usesMaxLength: true },
DECIMAL: { usesPrecision: true },
NUMERIC: { usesPrecision: true },
}
class SqlServerIntegration extends Sql implements DatasourcePlus { class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig private readonly config: MSSQLConfig
private index: number = 0 private index: number = 0
@ -527,20 +555,24 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
return this.queryWithReturning(json, queryFn, processFn) return this.queryWithReturning(json, queryFn, processFn)
} }
async getExternalSchema() { private async getColumnDefinitions(): Promise<MSSQLColumnDefinition[]> {
// Query to retrieve table schema // Query to retrieve table schema
const query = ` const query = `
SELECT SELECT
t.name AS TableName, t.name AS TableName,
c.name AS ColumnName, c.name AS ColumnName,
ty.name AS DataType, ty.name AS DataType,
ty.precision AS Precision,
ty.scale AS Scale,
c.max_length AS MaxLength, c.max_length AS MaxLength,
c.is_nullable AS IsNullable, c.is_nullable AS IsNullable,
c.is_identity AS IsIdentity c.is_identity AS IsIdentity
FROM FROM
sys.tables t sys.tables t
INNER JOIN sys.columns c ON t.object_id = c.object_id INNER JOIN sys.columns c ON t.object_id = c.object_id
INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id INNER JOIN sys.types ty
ON c.system_type_id = ty.system_type_id
AND c.user_type_id = ty.user_type_id
WHERE WHERE
t.is_ms_shipped = 0 t.is_ms_shipped = 0
ORDER BY ORDER BY
@ -553,17 +585,36 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
sql: query, sql: query,
}) })
return result.recordset as MSSQLColumnDefinition[]
}
private getDataType(columnDef: MSSQLColumnDefinition): string {
const { DataType, MaxLength, Precision, Scale } = columnDef
const { usesMaxLength = false, usesPrecision = false } =
COLUMN_DEFINITION_METADATA[DataType] || {}
let dataType = DataType
if (usesMaxLength) {
if (MaxLength === -1) {
dataType += `(MAX)`
} else {
dataType += `(${MaxLength})`
}
}
if (usesPrecision) {
dataType += `(${Precision}, ${Scale})`
}
return dataType
}
async getExternalSchema() {
const scriptParts = [] const scriptParts = []
const tables: any = {} const tables: any = {}
for (const row of result.recordset) { const columns = await this.getColumnDefinitions()
const { for (const row of columns) {
TableName, const { TableName, ColumnName, IsNullable, IsIdentity } = row
ColumnName,
DataType,
MaxLength,
IsNullable,
IsIdentity,
} = row
if (!tables[TableName]) { if (!tables[TableName]) {
tables[TableName] = { tables[TableName] = {
@ -571,9 +622,11 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
} }
} }
const columnDefinition = `${ColumnName} ${DataType}${ const nullable = IsNullable ? "NULL" : "NOT NULL"
MaxLength ? `(${MaxLength})` : "" const identity = IsIdentity ? "IDENTITY" : ""
}${IsNullable ? " NULL" : " NOT NULL"}` const columnDefinition = `[${ColumnName}] ${this.getDataType(
row
)} ${nullable} ${identity}`
tables[TableName].columns.push(columnDefinition) tables[TableName].columns.push(columnDefinition)

View File

@ -322,9 +322,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
presence: required && !isAuto && !hasDefault, presence: required && !isAuto && !hasDefault,
externalType: column.Type, externalType: column.Type,
options: column.Type.startsWith("enum") options: column.Type.startsWith("enum")
? column.Type.substring(5, column.Type.length - 1) ? column.Type.substring(6, column.Type.length - 2).split("','")
.split(",")
.map(str => str.replace(/^'(.*)'$/, "$1"))
: undefined, : undefined,
}) })
} }
@ -414,7 +412,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async getExternalSchema() { async getExternalSchema() {
try { try {
const [databaseResult] = await this.internalQuery({ const [databaseResult] = await this.internalQuery({
sql: `SHOW CREATE DATABASE ${this.config.database}`, sql: `SHOW CREATE DATABASE IF NOT EXISTS \`${this.config.database}\``,
}) })
let dumpContent = [databaseResult["Create Database"]] let dumpContent = [databaseResult["Create Database"]]
@ -434,7 +432,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
dumpContent.push(createTableStatement) dumpContent.push(createTableStatement)
} }
return dumpContent.join("\n") return dumpContent.join(";\n") + ";"
} finally { } finally {
this.disconnect() this.disconnect()
} }

View File

@ -476,21 +476,15 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
this.config.password this.config.password
}" pg_dump --schema-only "${dumpCommandParts.join(" ")}"` }" pg_dump --schema-only "${dumpCommandParts.join(" ")}"`
return new Promise<string>((res, rej) => { return new Promise<string>((resolve, reject) => {
exec(dumpCommand, (error, stdout, stderr) => { exec(dumpCommand, (error, stdout, stderr) => {
if (error) { if (error || stderr) {
console.error(`Error generating dump: ${error.message}`) console.error(stderr)
rej(error.message) reject(new Error(stderr))
return return
} }
if (stderr) { resolve(stdout)
console.error(`pg_dump error: ${stderr}`)
rej(stderr)
return
}
res(stdout)
console.log("SQL dump generated successfully!") console.log("SQL dump generated successfully!")
}) })
}) })

View File

@ -102,6 +102,9 @@ function createDummyTest() {
} }
export function datasourceDescribe(opts: DatasourceDescribeOpts) { export function datasourceDescribe(opts: DatasourceDescribeOpts) {
// tests that call this need a lot longer timeouts
jest.setTimeout(120000)
if (process.env.DATASOURCE === "none") { if (process.env.DATASOURCE === "none") {
createDummyTest() createDummyTest()
} }
@ -146,6 +149,7 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
isMongodb: dbName === DatabaseName.MONGODB, isMongodb: dbName === DatabaseName.MONGODB,
isMSSQL: dbName === DatabaseName.SQL_SERVER, isMSSQL: dbName === DatabaseName.SQL_SERVER,
isOracle: dbName === DatabaseName.ORACLE, isOracle: dbName === DatabaseName.ORACLE,
isMariaDB: dbName === DatabaseName.MARIADB,
})) }))
} }
@ -155,19 +159,19 @@ function getDatasource(
return providers[sourceName]() return providers[sourceName]()
} }
export async function knexClient(ds: Datasource) { export async function knexClient(ds: Datasource, opts?: Knex.Config) {
switch (ds.source) { switch (ds.source) {
case SourceName.POSTGRES: { case SourceName.POSTGRES: {
return postgres.knexClient(ds) return postgres.knexClient(ds, opts)
} }
case SourceName.MYSQL: { case SourceName.MYSQL: {
return mysql.knexClient(ds) return mysql.knexClient(ds, opts)
} }
case SourceName.SQL_SERVER: { case SourceName.SQL_SERVER: {
return mssql.knexClient(ds) return mssql.knexClient(ds, opts)
} }
case SourceName.ORACLE: { case SourceName.ORACLE: {
return oracle.knexClient(ds) return oracle.knexClient(ds, opts)
} }
default: { default: {
throw new Error(`Unsupported source: ${ds.source}`) throw new Error(`Unsupported source: ${ds.source}`)

View File

@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex" import knex, { Knex } from "knex"
import { MSSQL_IMAGE } from "./images" import { MSSQL_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -57,7 +57,7 @@ export async function getDatasource(): Promise<Datasource> {
return datasource return datasource
} }
export async function knexClient(ds: Datasource) { export async function knexClient(ds: Datasource, opts?: Knex.Config) {
if (!ds.config) { if (!ds.config) {
throw new Error("Datasource config is missing") throw new Error("Datasource config is missing")
} }
@ -68,5 +68,6 @@ export async function knexClient(ds: Datasource) {
return knex({ return knex({
client: "mssql", client: "mssql",
connection: ds.config, connection: ds.config,
...opts,
}) })
} }

View File

@ -3,7 +3,7 @@ import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex" import knex, { Knex } from "knex"
import { MYSQL_IMAGE } from "./images" import { MYSQL_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -63,7 +63,7 @@ export async function getDatasource(): Promise<Datasource> {
return datasource return datasource
} }
export async function knexClient(ds: Datasource) { export async function knexClient(ds: Datasource, opts?: Knex.Config) {
if (!ds.config) { if (!ds.config) {
throw new Error("Datasource config is missing") throw new Error("Datasource config is missing")
} }
@ -74,5 +74,6 @@ export async function knexClient(ds: Datasource) {
return knex({ return knex({
client: "mysql2", client: "mysql2",
connection: ds.config, connection: ds.config,
...opts,
}) })
} }

View File

@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex" import knex, { Knex } from "knex"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -25,7 +25,7 @@ export async function getDatasource(): Promise<Datasource> {
}) })
.withWaitStrategy( .withWaitStrategy(
Wait.forLogMessage("DATABASE IS READY TO USE!").withStartupTimeout( Wait.forLogMessage("DATABASE IS READY TO USE!").withStartupTimeout(
20000 60000
) )
) )
) )
@ -58,7 +58,7 @@ export async function getDatasource(): Promise<Datasource> {
return datasource return datasource
} }
export async function knexClient(ds: Datasource) { export async function knexClient(ds: Datasource, opts?: Knex.Config) {
if (!ds.config) { if (!ds.config) {
throw new Error("Datasource config is missing") throw new Error("Datasource config is missing")
} }
@ -76,6 +76,7 @@ export async function knexClient(ds: Datasource) {
user: ds.config.user, user: ds.config.user,
password: ds.config.password, password: ds.config.password,
}, },
...opts,
}) })
return c return c

View File

@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex" import knex, { Knex } from "knex"
import { POSTGRES_IMAGE } from "./images" import { POSTGRES_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -51,7 +51,10 @@ export async function getDatasource(): Promise<Datasource> {
return datasource return datasource
} }
export async function knexClient(ds: Datasource) { export async function knexClient(
ds: Datasource,
opts?: Knex.Config
): Promise<Knex> {
if (!ds.config) { if (!ds.config) {
throw new Error("Datasource config is missing") throw new Error("Datasource config is missing")
} }
@ -62,5 +65,6 @@ export async function knexClient(ds: Datasource) {
return knex({ return knex({
client: "pg", client: "pg",
connection: ds.config, connection: ds.config,
...opts,
}) })
} }

View File

@ -138,12 +138,22 @@ export function generateColumnDefinition(config: {
let { externalType, autocolumn, name, presence, options } = config let { externalType, autocolumn, name, presence, options } = config
let foundType = FieldType.STRING let foundType = FieldType.STRING
const lowerCaseType = externalType.toLowerCase() const lowerCaseType = externalType.toLowerCase()
let matchingTypes = [] let matchingTypes: { external: string; internal: PrimitiveTypes }[] = []
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
if (lowerCaseType.includes(external)) { // In at least MySQL, the external type of an ENUM column is "enum('option1',
matchingTypes.push({ external, internal }) // 'option2', ...)", which can potentially contain any type name as a
// substring. To get around this interfering with the loop below, we first
// check for an enum column and handle that separately.
if (lowerCaseType.startsWith("enum")) {
matchingTypes.push({ external: "enum", internal: FieldType.OPTIONS })
} else {
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
if (lowerCaseType.includes(external)) {
matchingTypes.push({ external, internal })
}
} }
} }
// Set the foundType based the longest match // Set the foundType based the longest match
if (matchingTypes.length > 0) { if (matchingTypes.length > 0) {
foundType = matchingTypes.reduce((acc, val) => { foundType = matchingTypes.reduce((acc, val) => {

View File

@ -1,5 +1,5 @@
import { isDevAppID, isProdAppID } from "../db/utils" import { isDevAppID, isProdAppID } from "../db/utils"
import { BBContext } from "@budibase/types" import { Ctx } from "@budibase/types"
export enum AppType { export enum AppType {
DEV = "dev", DEV = "dev",
@ -7,7 +7,7 @@ export enum AppType {
} }
export function middleware({ appType }: { appType?: AppType } = {}) { export function middleware({ appType }: { appType?: AppType } = {}) {
return (ctx: BBContext, next: any) => { return (ctx: Ctx, next: any) => {
const appId = ctx.appId const appId = ctx.appId
if (appType === AppType.DEV && appId && !isDevAppID(appId)) { if (appType === AppType.DEV && appId && !isDevAppID(appId)) {
ctx.throw(400, "Only apps in development support this endpoint") ctx.throw(400, "Only apps in development support this endpoint")

View File

@ -1,8 +1,9 @@
import { UserCtx } from "@budibase/types" import { UserCtx } from "@budibase/types"
import { checkMissingMigrations } from "../appMigrations" import { checkMissingMigrations } from "../appMigrations"
import env from "../environment" import env from "../environment"
import type { Middleware, Next } from "koa"
export default async (ctx: UserCtx, next: any) => { const middleware = (async (ctx: UserCtx, next: Next) => {
const { appId } = ctx const { appId } = ctx
// migrations can be disabled via environment variable if you // migrations can be disabled via environment variable if you
@ -16,4 +17,6 @@ export default async (ctx: UserCtx, next: any) => {
} }
return checkMissingMigrations(ctx, next, appId) return checkMissingMigrations(ctx, next, appId)
} }) as Middleware
export default middleware

View File

@ -1,8 +1,9 @@
import { Ctx } from "@budibase/types" import { Ctx } from "@budibase/types"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { tracer } from "dd-trace" import { tracer } from "dd-trace"
import type { Middleware, Next } from "koa"
export default async (ctx: Ctx, next: any) => { const middleware = (async (ctx: Ctx, next: Next) => {
const resp = await next() const resp = await next()
const current = context.getCurrentContext() const current = context.getCurrentContext()
@ -30,4 +31,6 @@ export default async (ctx: Ctx, next: any) => {
} }
return resp return resp
} }) as Middleware
export default middleware

View File

@ -13,8 +13,9 @@ import env from "../environment"
import { isWebhookEndpoint, isBrowser, isApiKey } from "./utils" import { isWebhookEndpoint, isBrowser, isApiKey } from "./utils"
import { UserCtx, ContextUser } from "@budibase/types" import { UserCtx, ContextUser } from "@budibase/types"
import tracer from "dd-trace" import tracer from "dd-trace"
import type { Middleware, Next } from "koa"
export default async (ctx: UserCtx, next: any) => { const middleware = (async (ctx: UserCtx, next: Next) => {
// try to get the appID from the request // try to get the appID from the request
let requestAppId = await utils.getAppIdFromCtx(ctx) let requestAppId = await utils.getAppIdFromCtx(ctx)
if (!requestAppId) { if (!requestAppId) {
@ -116,4 +117,6 @@ export default async (ctx: UserCtx, next: any) => {
return next() return next()
}) })
} }) as Middleware
export default middleware

Some files were not shown because too many files have changed in this diff Show More