Merge branch 'master' into automation-branching-ux-updates
This commit is contained in:
commit
3efac145a4
|
@ -200,6 +200,20 @@ jobs:
|
|||
|
||||
- run: yarn --frozen-lockfile
|
||||
|
||||
- name: Set up PostgreSQL 16
|
||||
if: matrix.datasource == 'postgres'
|
||||
run: |
|
||||
sudo systemctl stop postgresql
|
||||
sudo apt-get remove --purge -y postgresql* libpq-dev
|
||||
sudo rm -rf /etc/postgresql /var/lib/postgresql
|
||||
sudo apt-get autoremove -y
|
||||
sudo apt-get autoclean
|
||||
|
||||
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y postgresql-16
|
||||
|
||||
- name: Test server
|
||||
env:
|
||||
DATASOURCE: ${{ matrix.datasource }}
|
||||
|
|
|
@ -22,6 +22,6 @@
|
|||
"@types/react": "17.0.39",
|
||||
"eslint": "8.10.0",
|
||||
"eslint-config-next": "12.1.0",
|
||||
"typescript": "5.5.2"
|
||||
"typescript": "5.7.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,6 +47,8 @@ async function killContainers(containers: ContainerInfo[]) {
|
|||
}
|
||||
|
||||
export default async function setup() {
|
||||
process.env.TESTCONTAINERS_RYUK_DISABLED = "true"
|
||||
|
||||
// For whatever reason, testcontainers doesn't always use the correct current
|
||||
// docker context. This bit of code forces the issue by finding the current
|
||||
// context and setting it as the DOCKER_HOST environment
|
||||
|
@ -75,6 +77,7 @@ export default async function setup() {
|
|||
|
||||
try {
|
||||
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3-sqs-v2.1.1")
|
||||
.withName("couchdb_testcontainer")
|
||||
.withExposedPorts(5984, 4984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
|
@ -99,6 +102,7 @@ export default async function setup() {
|
|||
)
|
||||
|
||||
const minio = new GenericContainer("minio/minio")
|
||||
.withName("minio_testcontainer")
|
||||
.withExposedPorts(9000)
|
||||
.withCommand(["server", "/data"])
|
||||
.withTmpFs({ "/data": "rw" })
|
||||
|
|
|
@ -46,6 +46,11 @@ server {
|
|||
}
|
||||
|
||||
location ~ ^/api/(system|admin|global)/ {
|
||||
# Enable buffering for potentially large OIDC configs
|
||||
proxy_buffering on;
|
||||
proxy_buffer_size 16k;
|
||||
proxy_buffers 4 32k;
|
||||
|
||||
proxy_pass http://127.0.0.1:4002;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.2.16",
|
||||
"version": "3.2.25",
|
||||
"npmClient": "yarn",
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
"proper-lockfile": "^4.1.2",
|
||||
"svelte": "4.2.19",
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.5.2",
|
||||
"typescript": "5.7.2",
|
||||
"typescript-eslint": "^7.3.1",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
|
|
|
@ -83,6 +83,7 @@
|
|||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
"@types/koa": "2.13.4",
|
||||
"chance": "1.1.8",
|
||||
"ioredis-mock": "8.9.0",
|
||||
"jest": "29.7.0",
|
||||
|
@ -90,9 +91,9 @@
|
|||
"nock": "^13.5.6",
|
||||
"pino-pretty": "10.0.0",
|
||||
"pouchdb-adapter-memory": "7.2.2",
|
||||
"testcontainers": "^10.7.2",
|
||||
"testcontainers": "10.16.0",
|
||||
"timekeeper": "2.2.0",
|
||||
"typescript": "5.5.2"
|
||||
"typescript": "5.7.2"
|
||||
},
|
||||
"nx": {
|
||||
"targets": {
|
||||
|
|
|
@ -121,7 +121,7 @@ const identifyInstallationGroup = async (
|
|||
|
||||
const identifyTenantGroup = async (
|
||||
tenantId: string,
|
||||
account: Account | undefined,
|
||||
hosting: Hosting,
|
||||
timestamp?: string | number
|
||||
): Promise<void> => {
|
||||
const id = await getEventTenantId(tenantId)
|
||||
|
@ -129,26 +129,12 @@ const identifyTenantGroup = async (
|
|||
const installationId = await getInstallationId()
|
||||
const environment = getDeploymentEnvironment()
|
||||
|
||||
let hosting: Hosting
|
||||
let profession: string | undefined
|
||||
let companySize: string | undefined
|
||||
|
||||
if (account) {
|
||||
profession = account.profession
|
||||
companySize = account.size
|
||||
hosting = account.hosting
|
||||
} else {
|
||||
hosting = getHostingFromEnv()
|
||||
}
|
||||
|
||||
const group: TenantGroup = {
|
||||
id,
|
||||
type,
|
||||
hosting,
|
||||
environment,
|
||||
installationId,
|
||||
profession,
|
||||
companySize,
|
||||
}
|
||||
|
||||
await identifyGroup(group, timestamp)
|
||||
|
|
|
@ -266,12 +266,14 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
|||
// new flag, add it here and use the `fetch` and `get` functions to access it.
|
||||
// All of the machinery in this file is to make sure that flags have their
|
||||
// default values set correctly and their types flow through the system.
|
||||
export const flags = new FlagSet({
|
||||
const flagsConfig: Record<FeatureFlag, Flag<any>> = {
|
||||
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
|
||||
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
|
||||
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
|
||||
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
|
||||
})
|
||||
[FeatureFlag.USE_ZOD_VALIDATOR]: Flag.boolean(env.isDev()),
|
||||
}
|
||||
export const flags = new FlagSet(flagsConfig)
|
||||
|
||||
type UnwrapPromise<T> = T extends Promise<infer U> ? U : T
|
||||
export type FeatureFlags = UnwrapPromise<ReturnType<typeof flags.fetch>>
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
export default async (ctx: BBContext | any, next: any) => {
|
||||
// this middleware exists purely to be overridden by middlewares supplied by the @budibase/pro library
|
||||
const middleware = (async (ctx: Ctx, next: Next) => {
|
||||
// Placeholder for audit log middleware
|
||||
return next()
|
||||
}
|
||||
}) as Middleware
|
||||
|
||||
export default middleware
|
||||
|
|
|
@ -22,6 +22,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import { ErrorCode, InvalidAPIKeyError } from "../errors"
|
||||
import tracer from "dd-trace"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
|
||||
? parseInt(env.SESSION_UPDATE_PERIOD)
|
||||
|
@ -94,6 +95,14 @@ async function checkApiKey(
|
|||
})
|
||||
}
|
||||
|
||||
function getHeader(ctx: Ctx, header: Header): string | undefined {
|
||||
const contents = ctx.request.headers[header]
|
||||
if (Array.isArray(contents)) {
|
||||
throw new Error("Unexpected header format")
|
||||
}
|
||||
return contents
|
||||
}
|
||||
|
||||
/**
|
||||
* This middleware is tenancy aware, so that it does not depend on other middlewares being used.
|
||||
* The tenancy modules should not be used here and it should be assumed that the tenancy context
|
||||
|
@ -106,9 +115,9 @@ export default function (
|
|||
}
|
||||
) {
|
||||
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
|
||||
return async (ctx: Ctx | any, next: any) => {
|
||||
return (async (ctx: Ctx, next: Next) => {
|
||||
let publicEndpoint = false
|
||||
const version = ctx.request.headers[Header.API_VER]
|
||||
const version = getHeader(ctx, Header.API_VER)
|
||||
// the path is not authenticated
|
||||
const found = matches(ctx, noAuthOptions)
|
||||
if (found) {
|
||||
|
@ -116,18 +125,18 @@ export default function (
|
|||
}
|
||||
try {
|
||||
// check the actual user is authenticated first, try header or cookie
|
||||
let headerToken = ctx.request.headers[Header.TOKEN]
|
||||
let headerToken = getHeader(ctx, Header.TOKEN)
|
||||
|
||||
const authCookie =
|
||||
getCookie<SessionCookie>(ctx, Cookie.Auth) ||
|
||||
openJwt<SessionCookie>(headerToken)
|
||||
let apiKey = ctx.request.headers[Header.API_KEY]
|
||||
let apiKey = getHeader(ctx, Header.API_KEY)
|
||||
|
||||
if (!apiKey && ctx.request.headers[Header.AUTHORIZATION]) {
|
||||
apiKey = ctx.request.headers[Header.AUTHORIZATION].split(" ")[1]
|
||||
}
|
||||
|
||||
const tenantId = ctx.request.headers[Header.TENANT_ID]
|
||||
const tenantId = getHeader(ctx, Header.TENANT_ID)
|
||||
let authenticated: boolean = false,
|
||||
user: User | { tenantId: string } | undefined = undefined,
|
||||
internal: boolean = false,
|
||||
|
@ -243,5 +252,5 @@ export default function (
|
|||
ctx.throw(err.status || 403, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}) as Middleware
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { Header } from "../constants"
|
||||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { BBContext, EndpointMatcher } from "@budibase/types"
|
||||
import { Ctx, EndpointMatcher } from "@budibase/types"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
/**
|
||||
* GET, HEAD and OPTIONS methods are considered safe operations
|
||||
|
@ -36,7 +37,7 @@ export default function (
|
|||
opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] }
|
||||
) {
|
||||
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
|
||||
return async (ctx: BBContext | any, next: any) => {
|
||||
return (async (ctx: Ctx, next: Next) => {
|
||||
// don't apply for excluded paths
|
||||
const found = matches(ctx, noCsrfOptions)
|
||||
if (found) {
|
||||
|
@ -77,5 +78,5 @@ export default function (
|
|||
}
|
||||
|
||||
return next()
|
||||
}
|
||||
}) as Middleware
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import { Header } from "../constants"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
import { isValidInternalAPIKey } from "../utils"
|
||||
|
||||
/**
|
||||
* API Key only endpoint.
|
||||
*/
|
||||
export default async (ctx: BBContext, next: any) => {
|
||||
export default async (ctx: Ctx, next: any) => {
|
||||
const apiKey = ctx.request.headers[Header.API_KEY]
|
||||
if (!apiKey) {
|
||||
ctx.throw(403, "Unauthorized")
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { BBContext, EndpointMatcher, RegexMatcher } from "@budibase/types"
|
||||
import { Ctx, EndpointMatcher, RegexMatcher } from "@budibase/types"
|
||||
|
||||
const PARAM_REGEX = /\/:(.*?)(\/.*)?$/g
|
||||
|
||||
|
@ -27,7 +27,7 @@ export const buildMatcherRegex = (
|
|||
})
|
||||
}
|
||||
|
||||
export const matches = (ctx: BBContext, options: RegexMatcher[]) => {
|
||||
export const matches = (ctx: Ctx, options: RegexMatcher[]) => {
|
||||
return options.find(({ regex, method }) => {
|
||||
const urlMatch = regex.test(ctx.request.url)
|
||||
const methodMatch =
|
||||
|
|
|
@ -2,7 +2,7 @@ import { UserStatus } from "../../constants"
|
|||
import { compare } from "../../utils"
|
||||
import * as users from "../../users"
|
||||
import { authError } from "./utils"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
const INVALID_ERR = "Invalid credentials"
|
||||
const EXPIRED = "This account has expired. Please reset your password"
|
||||
|
@ -20,7 +20,7 @@ export const options = {
|
|||
* @returns The authenticated user, or errors if they occur
|
||||
*/
|
||||
export async function authenticate(
|
||||
ctx: BBContext,
|
||||
ctx: Ctx,
|
||||
email: string,
|
||||
password: string,
|
||||
done: Function
|
||||
|
|
|
@ -3,11 +3,12 @@ import { getTenantIDFromCtx } from "../tenancy"
|
|||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { Header } from "../constants"
|
||||
import {
|
||||
BBContext,
|
||||
Ctx,
|
||||
EndpointMatcher,
|
||||
GetTenantIdOptions,
|
||||
TenantResolutionStrategy,
|
||||
} from "@budibase/types"
|
||||
import type { Next, Middleware } from "koa"
|
||||
|
||||
export default function (
|
||||
allowQueryStringPatterns: EndpointMatcher[],
|
||||
|
@ -17,7 +18,7 @@ export default function (
|
|||
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
|
||||
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
|
||||
|
||||
return async function (ctx: BBContext | any, next: any) {
|
||||
return async function (ctx: Ctx, next: Next) {
|
||||
const allowNoTenant =
|
||||
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
|
||||
const tenantOpts: GetTenantIdOptions = {
|
||||
|
@ -32,5 +33,5 @@ export default function (
|
|||
const tenantId = getTenantIDFromCtx(ctx, tenantOpts)
|
||||
ctx.set(Header.TENANT_ID, tenantId as string)
|
||||
return doInTenant(tenantId, next)
|
||||
}
|
||||
} as Middleware
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ describe("redis", () => {
|
|||
let container: StartedTestContainer
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("redis")
|
||||
container = await new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.start()
|
||||
|
||||
|
|
|
@ -2,6 +2,8 @@ import {
|
|||
PermissionLevel,
|
||||
PermissionType,
|
||||
BuiltinPermissionID,
|
||||
Permission,
|
||||
BuiltinPermissions,
|
||||
} from "@budibase/types"
|
||||
import flatten from "lodash/flatten"
|
||||
import cloneDeep from "lodash/fp/cloneDeep"
|
||||
|
@ -12,7 +14,7 @@ export type RoleHierarchy = {
|
|||
permissionId: string
|
||||
}[]
|
||||
|
||||
export class Permission {
|
||||
export class PermissionImpl implements Permission {
|
||||
type: PermissionType
|
||||
level: PermissionLevel
|
||||
|
||||
|
@ -61,68 +63,62 @@ export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
|
|||
}
|
||||
}
|
||||
|
||||
export const BUILTIN_PERMISSIONS: {
|
||||
[key in keyof typeof BuiltinPermissionID]: {
|
||||
_id: (typeof BuiltinPermissionID)[key]
|
||||
name: string
|
||||
permissions: Permission[]
|
||||
}
|
||||
} = {
|
||||
export const BUILTIN_PERMISSIONS: BuiltinPermissions = {
|
||||
PUBLIC: {
|
||||
_id: BuiltinPermissionID.PUBLIC,
|
||||
name: "Public",
|
||||
permissions: [
|
||||
new Permission(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),
|
||||
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),
|
||||
],
|
||||
},
|
||||
READ_ONLY: {
|
||||
_id: BuiltinPermissionID.READ_ONLY,
|
||||
name: "Read only",
|
||||
permissions: [
|
||||
new Permission(PermissionType.QUERY, PermissionLevel.READ),
|
||||
new Permission(PermissionType.TABLE, PermissionLevel.READ),
|
||||
new Permission(PermissionType.APP, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.QUERY, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.TABLE, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
|
||||
],
|
||||
},
|
||||
WRITE: {
|
||||
_id: BuiltinPermissionID.WRITE,
|
||||
name: "Read/Write",
|
||||
permissions: [
|
||||
new Permission(PermissionType.QUERY, PermissionLevel.WRITE),
|
||||
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new Permission(PermissionType.APP, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.QUERY, PermissionLevel.WRITE),
|
||||
new PermissionImpl(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
|
||||
],
|
||||
},
|
||||
POWER: {
|
||||
_id: BuiltinPermissionID.POWER,
|
||||
name: "Power",
|
||||
permissions: [
|
||||
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
new Permission(PermissionType.USER, PermissionLevel.READ),
|
||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new Permission(PermissionType.APP, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
new PermissionImpl(PermissionType.USER, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
|
||||
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
|
||||
],
|
||||
},
|
||||
ADMIN: {
|
||||
_id: BuiltinPermissionID.ADMIN,
|
||||
name: "Admin",
|
||||
permissions: [
|
||||
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),
|
||||
new Permission(PermissionType.USER, PermissionLevel.ADMIN),
|
||||
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
|
||||
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),
|
||||
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new Permission(PermissionType.APP, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.TABLE, PermissionLevel.ADMIN),
|
||||
new PermissionImpl(PermissionType.USER, PermissionLevel.ADMIN),
|
||||
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
|
||||
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.QUERY, PermissionLevel.ADMIN),
|
||||
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
|
||||
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
export function getBuiltinPermissions() {
|
||||
export function getBuiltinPermissions(): BuiltinPermissions {
|
||||
return cloneDeep(BUILTIN_PERMISSIONS)
|
||||
}
|
||||
|
||||
|
|
|
@ -592,7 +592,10 @@ export class AccessController {
|
|||
)
|
||||
}
|
||||
|
||||
async checkScreensAccess(screens: Screen[], userRoleId: string) {
|
||||
async checkScreensAccess(
|
||||
screens: Screen[],
|
||||
userRoleId: string
|
||||
): Promise<Screen[]> {
|
||||
let accessibleScreens = []
|
||||
// don't want to handle this with Promise.all as this would mean all custom roles would be
|
||||
// retrieved at same time, it is likely a custom role will be re-used and therefore want
|
||||
|
|
|
@ -133,7 +133,7 @@ describe("getBuiltinPermissionByID", () => {
|
|||
_id: BuiltinPermissionID.PUBLIC,
|
||||
name: "Public",
|
||||
permissions: [
|
||||
new permissions.Permission(
|
||||
new permissions.PermissionImpl(
|
||||
permissions.PermissionType.WEBHOOK,
|
||||
permissions.PermissionLevel.EXECUTE
|
||||
),
|
||||
|
|
|
@ -6,7 +6,7 @@ import {
|
|||
getPlatformURL,
|
||||
} from "../context"
|
||||
import {
|
||||
BBContext,
|
||||
Ctx,
|
||||
TenantResolutionStrategy,
|
||||
GetTenantIdOptions,
|
||||
} from "@budibase/types"
|
||||
|
@ -37,7 +37,7 @@ export const isUserInAppTenant = (appId: string, user?: any) => {
|
|||
const ALL_STRATEGIES = Object.values(TenantResolutionStrategy)
|
||||
|
||||
export const getTenantIDFromCtx = (
|
||||
ctx: BBContext,
|
||||
ctx: Ctx,
|
||||
opts: GetTenantIdOptions
|
||||
): string | undefined => {
|
||||
// exit early if not multi-tenant
|
||||
|
|
|
@ -5,7 +5,7 @@ import * as db from "../../db"
|
|||
import { Header } from "../../constants"
|
||||
import { newid } from "../../utils"
|
||||
import env from "../../environment"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
describe("utils", () => {
|
||||
const config = new DBTestConfiguration()
|
||||
|
@ -109,7 +109,7 @@ describe("utils", () => {
|
|||
})
|
||||
|
||||
describe("isServingBuilder", () => {
|
||||
let ctx: BBContext
|
||||
let ctx: Ctx
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isServingBuilder(ctx)).toBe(result)
|
||||
|
@ -133,7 +133,7 @@ describe("utils", () => {
|
|||
})
|
||||
|
||||
describe("isServingBuilderPreview", () => {
|
||||
let ctx: BBContext
|
||||
let ctx: Ctx
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isServingBuilderPreview(ctx)).toBe(result)
|
||||
|
@ -157,7 +157,7 @@ describe("utils", () => {
|
|||
})
|
||||
|
||||
describe("isPublicAPIRequest", () => {
|
||||
let ctx: BBContext
|
||||
let ctx: Ctx
|
||||
|
||||
const expectResult = (result: boolean) =>
|
||||
expect(utils.isPublicApiRequest(ctx)).toBe(result)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { createMockContext, createMockCookies } from "@shopify/jest-koa-mocks"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
export const newContext = (): BBContext => {
|
||||
const ctx = createMockContext() as any
|
||||
export const newContext = (): Ctx => {
|
||||
const ctx = createMockContext() as Ctx
|
||||
return {
|
||||
...ctx,
|
||||
path: "/",
|
||||
|
|
|
@ -37,10 +37,6 @@ function getTestcontainers(): ContainerInfo[] {
|
|||
)
|
||||
}
|
||||
|
||||
function removeContainer(container: ContainerInfo) {
|
||||
execSync(`docker rm ${container.ID}`)
|
||||
}
|
||||
|
||||
export function getContainerByImage(image: string) {
|
||||
const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
|
||||
if (containers.length > 1) {
|
||||
|
@ -53,10 +49,6 @@ export function getContainerByImage(image: string) {
|
|||
return containers[0]
|
||||
}
|
||||
|
||||
function getContainerByName(name: string) {
|
||||
return getTestcontainers().find(x => x.Names === name)
|
||||
}
|
||||
|
||||
export function getContainerById(id: string) {
|
||||
return getTestcontainers().find(x => x.ID === id)
|
||||
}
|
||||
|
@ -98,6 +90,8 @@ function getCurrentDockerContext(): DockerContext {
|
|||
}
|
||||
|
||||
export function setupEnv(...envs: any[]) {
|
||||
process.env.TESTCONTAINERS_RYUK_DISABLED = "true"
|
||||
|
||||
// For whatever reason, testcontainers doesn't always use the correct current
|
||||
// docker context. This bit of code forces the issue by finding the current
|
||||
// context and setting it as the DOCKER_HOST environment
|
||||
|
@ -153,19 +147,10 @@ export async function startContainer(container: GenericContainer) {
|
|||
key = key.replace(/\//g, "-").replace(/:/g, "-")
|
||||
const name = `${key}_testcontainer`
|
||||
|
||||
// If a container has died it hangs around and future attempts to start a
|
||||
// container with the same name will fail. What we do here is if we find a
|
||||
// matching container and it has exited, we remove it before carrying on. This
|
||||
// removes the need to do this removal manually.
|
||||
const existingContainer = getContainerByName(name)
|
||||
if (existingContainer?.State === "exited") {
|
||||
removeContainer(existingContainer)
|
||||
}
|
||||
|
||||
container = container
|
||||
.withReuse()
|
||||
.withLabels({ "com.budibase": "true" })
|
||||
.withName(`${key}_testcontainer`)
|
||||
.withName(name)
|
||||
|
||||
let startedContainer: StartedTestContainer | undefined = undefined
|
||||
let lastError = undefined
|
||||
|
|
|
@ -63,7 +63,7 @@
|
|||
if (!name?.length) {
|
||||
return "Name is required"
|
||||
}
|
||||
if (snippets.some(snippet => snippet.name === name)) {
|
||||
if (!snippet?.name && snippets.some(snippet => snippet.name === name)) {
|
||||
return "That name is already in use"
|
||||
}
|
||||
if (firstCharNumberRegex.test(name)) {
|
||||
|
@ -106,11 +106,7 @@
|
|||
Delete
|
||||
</Button>
|
||||
{/if}
|
||||
<Button
|
||||
cta
|
||||
on:click={saveSnippet}
|
||||
disabled={!snippet && (loading || nameError)}
|
||||
>
|
||||
<Button cta on:click={saveSnippet} disabled={!code || loading || nameError}>
|
||||
Save
|
||||
</Button>
|
||||
</svelte:fragment>
|
||||
|
|
|
@ -186,7 +186,7 @@
|
|||
<div class="snippet-popover">
|
||||
{#key hoveredSnippet}
|
||||
<CodeEditor
|
||||
value={hoveredSnippet.code.trim()}
|
||||
value={hoveredSnippet.code?.trim()}
|
||||
mode={EditorModes.JS}
|
||||
readonly
|
||||
/>
|
||||
|
|
|
@ -52,9 +52,16 @@
|
|||
let modal
|
||||
|
||||
$: text = value?.label ?? "Choose an option"
|
||||
$: tables = $tablesStore.list.map(table =>
|
||||
format.table(table, $datasources.list)
|
||||
)
|
||||
$: tables = $tablesStore.list
|
||||
.map(table => format.table(table, $datasources.list))
|
||||
.sort((a, b) => {
|
||||
// sort tables alphabetically, grouped by datasource
|
||||
const dsComparison = a.datasourceName.localeCompare(b.datasourceName)
|
||||
if (dsComparison !== 0) {
|
||||
return dsComparison
|
||||
}
|
||||
return a.label.localeCompare(b.label)
|
||||
})
|
||||
$: viewsV1 = $viewsStore.list.map(view => ({
|
||||
...view,
|
||||
label: view.name,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { Heading, Body, Layout, Button, Modal } from "@budibase/bbui"
|
||||
import { Heading, Body, Layout, Button, Modal, Icon } from "@budibase/bbui"
|
||||
import AutomationPanel from "components/automation/AutomationPanel/AutomationPanel.svelte"
|
||||
import CreateAutomationModal from "components/automation/AutomationPanel/CreateAutomationModal.svelte"
|
||||
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
|
||||
|
@ -12,11 +12,13 @@
|
|||
automationStore,
|
||||
selectedAutomation,
|
||||
} from "stores/builder"
|
||||
import { createLocalStorageStore } from "@budibase/frontend-core"
|
||||
import { fly } from "svelte/transition"
|
||||
|
||||
$: automationId = $selectedAutomation?.data?._id
|
||||
$: builderStore.selectResource(automationId)
|
||||
|
||||
// Keep URL and state in sync for selected screen ID
|
||||
const surveyDismissed = createLocalStorageStore("automation-survey", false)
|
||||
const stopSyncing = syncURLToState({
|
||||
urlParam: "automationId",
|
||||
stateKey: "selectedAutomationId",
|
||||
|
@ -29,9 +31,11 @@
|
|||
|
||||
let modal
|
||||
let webhookModal
|
||||
let mounted = false
|
||||
|
||||
onMount(() => {
|
||||
$automationStore.showTestPanel = false
|
||||
mounted = true
|
||||
})
|
||||
|
||||
onDestroy(stopSyncing)
|
||||
|
@ -79,6 +83,43 @@
|
|||
</Modal>
|
||||
</div>
|
||||
|
||||
{#if !$surveyDismissed && mounted}
|
||||
<div
|
||||
class="survey"
|
||||
in:fly={{ x: 600, duration: 260, delay: 1000 }}
|
||||
out:fly={{ x: 600, duration: 260 }}
|
||||
>
|
||||
<div class="survey__body">
|
||||
<div class="survey__title">We value your feedback!</div>
|
||||
<div class="survey__text">
|
||||
<a
|
||||
href="https://t.maze.co/310149185"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
on:click={() => surveyDismissed.set(true)}
|
||||
>
|
||||
Complete our survey on Automations</a
|
||||
>
|
||||
and receive a $20 thank-you gift.
|
||||
<a
|
||||
href="https://drive.google.com/file/d/12-qk_2F9g5PdbM6wuKoz2KkIyLI-feMX/view?usp=sharing"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Terms apply.
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<Icon
|
||||
name="Close"
|
||||
hoverable
|
||||
color="var(--spectrum-global-color-static-gray-300)"
|
||||
hoverColor="var(--spectrum-global-color-static-gray-100)"
|
||||
on:click={() => surveyDismissed.set(true)}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.root {
|
||||
flex: 1 1 auto;
|
||||
|
@ -108,11 +149,9 @@
|
|||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.main {
|
||||
width: 300px;
|
||||
}
|
||||
|
||||
.setup {
|
||||
padding-top: 9px;
|
||||
border-left: var(--border-light);
|
||||
|
@ -125,4 +164,39 @@
|
|||
grid-column: 3;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
/* Survey */
|
||||
.survey {
|
||||
position: absolute;
|
||||
bottom: 32px;
|
||||
right: 32px;
|
||||
background: var(--spectrum-semantic-positive-color-background);
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
padding: var(--spacing-l) var(--spacing-xl);
|
||||
border-radius: 4px;
|
||||
gap: var(--spacing-xl);
|
||||
}
|
||||
.survey * {
|
||||
color: var(--spectrum-global-color-static-gray-300);
|
||||
white-space: nowrap;
|
||||
}
|
||||
.survey a {
|
||||
text-decoration: underline;
|
||||
transition: color 130ms ease-out;
|
||||
}
|
||||
.survey a:hover {
|
||||
color: var(--spectrum-global-color-static-gray-100);
|
||||
cursor: pointer;
|
||||
}
|
||||
.survey__body {
|
||||
flex: 1 1 auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
.survey__title {
|
||||
font-weight: 600;
|
||||
font-size: 15px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -40,6 +40,6 @@
|
|||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "^6.4.0",
|
||||
"ts-node": "10.8.1",
|
||||
"typescript": "5.5.2"
|
||||
"typescript": "5.7.2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit d9245f3d6d0b41ec2e6b3406b791f9e7448882cb
|
||||
Subproject commit 5321c7589257711cf153600597ef4e6a5f6b7162
|
|
@ -129,7 +129,8 @@
|
|||
"uuid": "^8.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
"worker-farm": "1.7.0",
|
||||
"xml2js": "0.6.2"
|
||||
"xml2js": "0.6.2",
|
||||
"zod-validation-error": "^3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.22.5",
|
||||
|
@ -169,13 +170,14 @@
|
|||
"rimraf": "3.0.2",
|
||||
"supertest": "6.3.3",
|
||||
"swagger-jsdoc": "6.1.0",
|
||||
"testcontainers": "10.7.2",
|
||||
"testcontainers": "10.16.0",
|
||||
"timekeeper": "2.2.0",
|
||||
"ts-node": "10.8.1",
|
||||
"tsconfig-paths": "4.0.0",
|
||||
"typescript": "5.5.2",
|
||||
"typescript": "5.7.2",
|
||||
"update-dotenv": "1.1.1",
|
||||
"yargs": "13.2.4"
|
||||
"yargs": "^13.2.4",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"nx": {
|
||||
"targets": {
|
||||
|
|
|
@ -1,16 +1,22 @@
|
|||
import { events, context } from "@budibase/backend-core"
|
||||
import { AnalyticsPingRequest, App, PingSource } from "@budibase/types"
|
||||
import {
|
||||
AnalyticsPingRequest,
|
||||
App,
|
||||
PingSource,
|
||||
Ctx,
|
||||
AnalyticsEnabledResponse,
|
||||
} from "@budibase/types"
|
||||
import { DocumentType, isDevAppID } from "../../db/utils"
|
||||
|
||||
export const isEnabled = async (ctx: any) => {
|
||||
export const isEnabled = async (ctx: Ctx<void, AnalyticsEnabledResponse>) => {
|
||||
const enabled = await events.analytics.enabled()
|
||||
ctx.body = {
|
||||
enabled,
|
||||
}
|
||||
}
|
||||
|
||||
export const ping = async (ctx: any) => {
|
||||
const body = ctx.request.body as AnalyticsPingRequest
|
||||
export const ping = async (ctx: Ctx<AnalyticsPingRequest, void>) => {
|
||||
const body = ctx.request.body
|
||||
|
||||
switch (body.source) {
|
||||
case PingSource.APP: {
|
||||
|
|
|
@ -1,18 +1,25 @@
|
|||
import { db as dbCore, tenancy } from "@budibase/backend-core"
|
||||
import { BBContext, Document } from "@budibase/types"
|
||||
import {
|
||||
Document,
|
||||
UserCtx,
|
||||
ApiKeyDoc,
|
||||
ApiKeyFetchResponse,
|
||||
UpdateApiKeyRequest,
|
||||
UpdateApiKeyResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
const KEYS_DOC = dbCore.StaticDatabases.GLOBAL.docs.apiKeys
|
||||
|
||||
async function getBuilderMainDoc() {
|
||||
const db = tenancy.getGlobalDB()
|
||||
try {
|
||||
return await db.get<any>(KEYS_DOC)
|
||||
} catch (err) {
|
||||
// doesn't exist yet, nothing to get
|
||||
const doc = await db.tryGet<ApiKeyDoc>(KEYS_DOC)
|
||||
if (!doc) {
|
||||
return {
|
||||
_id: KEYS_DOC,
|
||||
apiKeys: {},
|
||||
}
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
async function setBuilderMainDoc(doc: Document) {
|
||||
|
@ -22,7 +29,7 @@ async function setBuilderMainDoc(doc: Document) {
|
|||
return db.put(doc)
|
||||
}
|
||||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
export async function fetch(ctx: UserCtx<void, ApiKeyFetchResponse>) {
|
||||
try {
|
||||
const mainDoc = await getBuilderMainDoc()
|
||||
ctx.body = mainDoc.apiKeys ? mainDoc.apiKeys : {}
|
||||
|
@ -32,7 +39,9 @@ export async function fetch(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function update(ctx: BBContext) {
|
||||
export async function update(
|
||||
ctx: UserCtx<UpdateApiKeyRequest, UpdateApiKeyResponse>
|
||||
) {
|
||||
const key = ctx.params.key
|
||||
const value = ctx.request.body.value
|
||||
|
||||
|
|
|
@ -59,6 +59,15 @@ import {
|
|||
BBReferenceFieldSubType,
|
||||
Row,
|
||||
BBRequest,
|
||||
SyncAppResponse,
|
||||
CreateAppResponse,
|
||||
FetchAppsResponse,
|
||||
UpdateAppClientResponse,
|
||||
RevertAppClientResponse,
|
||||
DeleteAppResponse,
|
||||
ImportToUpdateAppRequest,
|
||||
ImportToUpdateAppResponse,
|
||||
SetRevertableAppVersionRequest,
|
||||
} from "@budibase/types"
|
||||
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
||||
import sdk from "../../sdk"
|
||||
|
@ -166,7 +175,7 @@ async function createInstance(appId: string, template: AppTemplate) {
|
|||
return { _id: appId }
|
||||
}
|
||||
|
||||
export const addSampleData = async (ctx: UserCtx) => {
|
||||
export const addSampleData = async (ctx: UserCtx<void, void>) => {
|
||||
const db = context.getAppDB()
|
||||
|
||||
try {
|
||||
|
@ -182,7 +191,7 @@ export const addSampleData = async (ctx: UserCtx) => {
|
|||
ctx.status = 200
|
||||
}
|
||||
|
||||
export async function fetch(ctx: UserCtx<void, App[]>) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchAppsResponse>) {
|
||||
ctx.body = await sdk.applications.fetch(
|
||||
ctx.query.status as AppStatus,
|
||||
ctx.user
|
||||
|
@ -242,7 +251,9 @@ export async function fetchAppPackage(
|
|||
}
|
||||
}
|
||||
|
||||
async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) {
|
||||
async function performAppCreate(
|
||||
ctx: UserCtx<CreateAppRequest, CreateAppResponse>
|
||||
) {
|
||||
const apps = (await dbCore.getAllApps({ dev: true })) as App[]
|
||||
const { body } = ctx.request
|
||||
const { name, url, encryptionPassword, templateKey } = body
|
||||
|
@ -510,7 +521,9 @@ async function appPostCreate(ctx: UserCtx<CreateAppRequest, App>, app: App) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function create(ctx: UserCtx<CreateAppRequest, App>) {
|
||||
export async function create(
|
||||
ctx: UserCtx<CreateAppRequest, CreateAppResponse>
|
||||
) {
|
||||
const newApplication = await quotas.addApp(() => performAppCreate(ctx))
|
||||
await appPostCreate(ctx, newApplication)
|
||||
await cache.bustCache(cache.CacheKey.CHECKLIST)
|
||||
|
@ -553,7 +566,9 @@ export async function update(
|
|||
})
|
||||
}
|
||||
|
||||
export async function updateClient(ctx: UserCtx) {
|
||||
export async function updateClient(
|
||||
ctx: UserCtx<void, UpdateAppClientResponse>
|
||||
) {
|
||||
// Get current app version
|
||||
const application = await sdk.applications.metadata.get()
|
||||
const currentVersion = application.version
|
||||
|
@ -581,7 +596,9 @@ export async function updateClient(ctx: UserCtx) {
|
|||
ctx.body = app
|
||||
}
|
||||
|
||||
export async function revertClient(ctx: UserCtx) {
|
||||
export async function revertClient(
|
||||
ctx: UserCtx<void, RevertAppClientResponse>
|
||||
) {
|
||||
// Check app can be reverted
|
||||
const application = await sdk.applications.metadata.get()
|
||||
if (!application.revertableVersion) {
|
||||
|
@ -668,7 +685,7 @@ async function postDestroyApp(ctx: UserCtx) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
export async function destroy(ctx: UserCtx<void, DeleteAppResponse>) {
|
||||
await preDestroyApp(ctx)
|
||||
const result = await destroyApp(ctx)
|
||||
await postDestroyApp(ctx)
|
||||
|
@ -676,7 +693,7 @@ export async function destroy(ctx: UserCtx) {
|
|||
ctx.body = result
|
||||
}
|
||||
|
||||
export async function unpublish(ctx: UserCtx) {
|
||||
export async function unpublish(ctx: UserCtx<void, void>) {
|
||||
const prodAppId = dbCore.getProdAppID(ctx.params.appId)
|
||||
const dbExists = await dbCore.dbExists(prodAppId)
|
||||
|
||||
|
@ -692,7 +709,7 @@ export async function unpublish(ctx: UserCtx) {
|
|||
builderSocket?.emitAppUnpublish(ctx)
|
||||
}
|
||||
|
||||
export async function sync(ctx: UserCtx) {
|
||||
export async function sync(ctx: UserCtx<void, SyncAppResponse>) {
|
||||
const appId = ctx.params.appId
|
||||
try {
|
||||
ctx.body = await sdk.applications.syncApp(appId)
|
||||
|
@ -701,10 +718,12 @@ export async function sync(ctx: UserCtx) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function importToApp(ctx: UserCtx) {
|
||||
export async function importToApp(
|
||||
ctx: UserCtx<ImportToUpdateAppRequest, ImportToUpdateAppResponse>
|
||||
) {
|
||||
const { appId } = ctx.params
|
||||
const appExport = ctx.request.files?.appExport
|
||||
const password = ctx.request.body.encryptionPassword as string
|
||||
const password = ctx.request.body.encryptionPassword
|
||||
if (!appExport) {
|
||||
ctx.throw(400, "Must supply app export to import")
|
||||
}
|
||||
|
@ -811,7 +830,7 @@ export async function updateAppPackage(
|
|||
}
|
||||
|
||||
export async function setRevertableVersion(
|
||||
ctx: UserCtx<{ revertableVersion: string }, App>
|
||||
ctx: UserCtx<SetRevertableAppVersionRequest, void>
|
||||
) {
|
||||
if (!env.isDev()) {
|
||||
ctx.status = 403
|
||||
|
|
|
@ -2,7 +2,7 @@ import { outputProcessing } from "../../utilities/rowProcessor"
|
|||
import { InternalTables } from "../../db/utils"
|
||||
import { getFullUser } from "../../utilities/users"
|
||||
import { roles, context, db as dbCore } from "@budibase/backend-core"
|
||||
import { ContextUser, Row, UserCtx } from "@budibase/types"
|
||||
import { AppSelfResponse, ContextUser, UserCtx } from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
import { processUser } from "../../utilities/global"
|
||||
|
||||
|
@ -17,7 +17,7 @@ const addSessionAttributesToUser = (ctx: any) => {
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetchSelf(ctx: UserCtx) {
|
||||
export async function fetchSelf(ctx: UserCtx<void, AppSelfResponse>) {
|
||||
let userId = ctx.user.userId || ctx.user._id
|
||||
/* istanbul ignore next */
|
||||
if (!userId || !ctx.isAuthenticated) {
|
||||
|
@ -45,9 +45,9 @@ export async function fetchSelf(ctx: UserCtx) {
|
|||
try {
|
||||
const userTable = await sdk.tables.getTable(InternalTables.USER_METADATA)
|
||||
// specifically needs to make sure is enriched
|
||||
ctx.body = await outputProcessing(userTable, user as Row)
|
||||
ctx.body = await outputProcessing(userTable, user)
|
||||
} catch (err: any) {
|
||||
let response
|
||||
let response: ContextUser | {}
|
||||
// user didn't exist in app, don't pretend they do
|
||||
if (user.roleId === PUBLIC_ROLE) {
|
||||
response = {}
|
||||
|
|
|
@ -9,10 +9,25 @@ import {
|
|||
App,
|
||||
Automation,
|
||||
AutomationActionStepId,
|
||||
AutomationResults,
|
||||
UserCtx,
|
||||
DeleteAutomationResponse,
|
||||
FetchAutomationResponse,
|
||||
GetAutomationTriggerDefinitionsResponse,
|
||||
GetAutomationStepDefinitionsResponse,
|
||||
GetAutomationActionDefinitionsResponse,
|
||||
FindAutomationResponse,
|
||||
UpdateAutomationRequest,
|
||||
UpdateAutomationResponse,
|
||||
CreateAutomationRequest,
|
||||
CreateAutomationResponse,
|
||||
SearchAutomationLogsRequest,
|
||||
SearchAutomationLogsResponse,
|
||||
ClearAutomationLogRequest,
|
||||
ClearAutomationLogResponse,
|
||||
TriggerAutomationRequest,
|
||||
TriggerAutomationResponse,
|
||||
TestAutomationRequest,
|
||||
TestAutomationResponse,
|
||||
} from "@budibase/types"
|
||||
import { getActionDefinitions as actionDefs } from "../../automations/actions"
|
||||
import sdk from "../../sdk"
|
||||
|
@ -34,7 +49,7 @@ function getTriggerDefinitions() {
|
|||
*************************/
|
||||
|
||||
export async function create(
|
||||
ctx: UserCtx<Automation, { message: string; automation: Automation }>
|
||||
ctx: UserCtx<CreateAutomationRequest, CreateAutomationResponse>
|
||||
) {
|
||||
let automation = ctx.request.body
|
||||
automation.appId = ctx.appId
|
||||
|
@ -55,7 +70,9 @@ export async function create(
|
|||
builderSocket?.emitAutomationUpdate(ctx, automation)
|
||||
}
|
||||
|
||||
export async function update(ctx: UserCtx) {
|
||||
export async function update(
|
||||
ctx: UserCtx<UpdateAutomationRequest, UpdateAutomationResponse>
|
||||
) {
|
||||
let automation = ctx.request.body
|
||||
automation.appId = ctx.appId
|
||||
|
||||
|
@ -80,7 +97,7 @@ export async function fetch(ctx: UserCtx<void, FetchAutomationResponse>) {
|
|||
ctx.body = { automations }
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx) {
|
||||
export async function find(ctx: UserCtx<void, FindAutomationResponse>) {
|
||||
ctx.body = await sdk.automations.get(ctx.params.id)
|
||||
}
|
||||
|
||||
|
@ -96,11 +113,15 @@ export async function destroy(ctx: UserCtx<void, DeleteAutomationResponse>) {
|
|||
builderSocket?.emitAutomationDeletion(ctx, automationId)
|
||||
}
|
||||
|
||||
export async function logSearch(ctx: UserCtx) {
|
||||
export async function logSearch(
|
||||
ctx: UserCtx<SearchAutomationLogsRequest, SearchAutomationLogsResponse>
|
||||
) {
|
||||
ctx.body = await automations.logs.logSearch(ctx.request.body)
|
||||
}
|
||||
|
||||
export async function clearLogError(ctx: UserCtx) {
|
||||
export async function clearLogError(
|
||||
ctx: UserCtx<ClearAutomationLogRequest, ClearAutomationLogResponse>
|
||||
) {
|
||||
const { automationId, appId } = ctx.request.body
|
||||
await context.doInAppContext(appId, async () => {
|
||||
const db = context.getProdAppDB()
|
||||
|
@ -119,15 +140,21 @@ export async function clearLogError(ctx: UserCtx) {
|
|||
})
|
||||
}
|
||||
|
||||
export async function getActionList(ctx: UserCtx) {
|
||||
export async function getActionList(
|
||||
ctx: UserCtx<void, GetAutomationActionDefinitionsResponse>
|
||||
) {
|
||||
ctx.body = await getActionDefinitions()
|
||||
}
|
||||
|
||||
export async function getTriggerList(ctx: UserCtx) {
|
||||
export async function getTriggerList(
|
||||
ctx: UserCtx<void, GetAutomationTriggerDefinitionsResponse>
|
||||
) {
|
||||
ctx.body = getTriggerDefinitions()
|
||||
}
|
||||
|
||||
export async function getDefinitionList(ctx: UserCtx) {
|
||||
export async function getDefinitionList(
|
||||
ctx: UserCtx<void, GetAutomationStepDefinitionsResponse>
|
||||
) {
|
||||
ctx.body = {
|
||||
trigger: getTriggerDefinitions(),
|
||||
action: await getActionDefinitions(),
|
||||
|
@ -140,14 +167,16 @@ export async function getDefinitionList(ctx: UserCtx) {
|
|||
* *
|
||||
*********************/
|
||||
|
||||
export async function trigger(ctx: UserCtx) {
|
||||
export async function trigger(
|
||||
ctx: UserCtx<TriggerAutomationRequest, TriggerAutomationResponse>
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
let automation = await db.get<Automation>(ctx.params.id)
|
||||
|
||||
let hasCollectStep = sdk.automations.utils.checkForCollectStep(automation)
|
||||
if (hasCollectStep && (await features.isSyncAutomationsEnabled())) {
|
||||
try {
|
||||
const response: AutomationResults = await triggers.externalTrigger(
|
||||
const response = await triggers.externalTrigger(
|
||||
automation,
|
||||
{
|
||||
fields: ctx.request.body.fields,
|
||||
|
@ -158,6 +187,10 @@ export async function trigger(ctx: UserCtx) {
|
|||
{ getResponses: true }
|
||||
)
|
||||
|
||||
if (!("steps" in response)) {
|
||||
ctx.throw(400, "Unable to collect response")
|
||||
}
|
||||
|
||||
let collectedValue = response.steps.find(
|
||||
step => step.stepId === AutomationActionStepId.COLLECT
|
||||
)
|
||||
|
@ -185,7 +218,7 @@ export async function trigger(ctx: UserCtx) {
|
|||
}
|
||||
}
|
||||
|
||||
function prepareTestInput(input: any) {
|
||||
function prepareTestInput(input: TestAutomationRequest) {
|
||||
// prepare the test parameters
|
||||
if (input.id && input.row) {
|
||||
input.row._id = input.id
|
||||
|
@ -196,7 +229,9 @@ function prepareTestInput(input: any) {
|
|||
return input
|
||||
}
|
||||
|
||||
export async function test(ctx: UserCtx) {
|
||||
export async function test(
|
||||
ctx: UserCtx<TestAutomationRequest, TestAutomationResponse>
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
let automation = await db.get<Automation>(ctx.params.id)
|
||||
await setTestFlag(automation._id!)
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
import sdk from "../../sdk"
|
||||
import { events, context, db } from "@budibase/backend-core"
|
||||
import { DocumentType } from "../../db/utils"
|
||||
import { App, Ctx } from "@budibase/types"
|
||||
import {
|
||||
App,
|
||||
Ctx,
|
||||
ExportAppDumpRequest,
|
||||
ExportAppDumpResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
interface ExportAppDumpRequest {
|
||||
excludeRows: boolean
|
||||
encryptPassword?: string
|
||||
}
|
||||
|
||||
export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
|
||||
export async function exportAppDump(
|
||||
ctx: Ctx<ExportAppDumpRequest, ExportAppDumpResponse>
|
||||
) {
|
||||
const { appId } = ctx.query as any
|
||||
const { excludeRows, encryptPassword } = ctx.request.body
|
||||
|
||||
|
|
|
@ -1,9 +1,16 @@
|
|||
import { DocumentType } from "../../db/utils"
|
||||
import { App, Plugin, UserCtx } from "@budibase/types"
|
||||
import {
|
||||
App,
|
||||
FetchComponentDefinitionResponse,
|
||||
Plugin,
|
||||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
import { db as dbCore, context, tenancy } from "@budibase/backend-core"
|
||||
import { getComponentLibraryManifest } from "../../utilities/fileSystem"
|
||||
|
||||
export async function fetchAppComponentDefinitions(ctx: UserCtx) {
|
||||
export async function fetchAppComponentDefinitions(
|
||||
ctx: UserCtx<void, FetchComponentDefinitionResponse>
|
||||
) {
|
||||
try {
|
||||
const db = context.getAppDB()
|
||||
const app = await db.get<App>(DocumentType.APP_METADATA)
|
||||
|
|
|
@ -23,13 +23,17 @@ import {
|
|||
Table,
|
||||
RowValue,
|
||||
DynamicVariable,
|
||||
FetchDatasourcesResponse,
|
||||
FindDatasourcesResponse,
|
||||
DeleteDatasourceResponse,
|
||||
FetchExternalSchemaResponse,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
import { builderSocket } from "../../websockets"
|
||||
import { isEqual } from "lodash"
|
||||
import { processTable } from "../../sdk/app/tables/getters"
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchDatasourcesResponse>) {
|
||||
ctx.body = await sdk.datasources.fetch()
|
||||
}
|
||||
|
||||
|
@ -260,7 +264,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
export async function destroy(ctx: UserCtx<void, DeleteDatasourceResponse>) {
|
||||
const db = context.getAppDB()
|
||||
const datasourceId = ctx.params.datasourceId
|
||||
|
||||
|
@ -291,12 +295,14 @@ export async function destroy(ctx: UserCtx) {
|
|||
builderSocket?.emitDatasourceDeletion(ctx, datasourceId)
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx) {
|
||||
export async function find(ctx: UserCtx<void, FindDatasourcesResponse>) {
|
||||
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
|
||||
ctx.body = await sdk.datasources.removeSecretSingle(datasource)
|
||||
}
|
||||
|
||||
export async function getExternalSchema(ctx: UserCtx) {
|
||||
export async function getExternalSchema(
|
||||
ctx: UserCtx<void, FetchExternalSchemaResponse>
|
||||
) {
|
||||
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
|
||||
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
|
||||
datasource
|
||||
|
@ -306,9 +312,10 @@ export async function getExternalSchema(ctx: UserCtx) {
|
|||
if (!connector.getExternalSchema) {
|
||||
ctx.throw(400, "Datasource does not support exporting external schema")
|
||||
}
|
||||
const response = await connector.getExternalSchema()
|
||||
|
||||
ctx.body = {
|
||||
schema: response,
|
||||
try {
|
||||
ctx.body = { schema: await connector.getExternalSchema() }
|
||||
} catch (e: any) {
|
||||
ctx.throw(400, e.message)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { context, utils } from "@budibase/backend-core"
|
||||
import { DeploymentStatus } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* This is used to pass around information about the deployment that is occurring
|
||||
|
@ -6,7 +7,7 @@ import { context, utils } from "@budibase/backend-core"
|
|||
export default class Deployment {
|
||||
_id: string
|
||||
verification: any
|
||||
status?: string
|
||||
status?: DeploymentStatus
|
||||
err?: any
|
||||
appUrl?: string
|
||||
|
||||
|
@ -25,7 +26,7 @@ export default class Deployment {
|
|||
return this.verification
|
||||
}
|
||||
|
||||
setStatus(status: string, err?: any) {
|
||||
setStatus(status: DeploymentStatus, err?: any) {
|
||||
this.status = status
|
||||
if (err) {
|
||||
this.err = err
|
||||
|
|
|
@ -7,20 +7,26 @@ import {
|
|||
enableCronTrigger,
|
||||
} from "../../../automations/utils"
|
||||
import { backups } from "@budibase/pro"
|
||||
import { App, AppBackupTrigger } from "@budibase/types"
|
||||
import {
|
||||
App,
|
||||
AppBackupTrigger,
|
||||
DeploymentDoc,
|
||||
FetchDeploymentResponse,
|
||||
PublishAppResponse,
|
||||
UserCtx,
|
||||
DeploymentStatus,
|
||||
DeploymentProgressResponse,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
|
||||
// the max time we can wait for an invalidation to complete before considering it failed
|
||||
const MAX_PENDING_TIME_MS = 30 * 60000
|
||||
const DeploymentStatus = {
|
||||
SUCCESS: "SUCCESS",
|
||||
PENDING: "PENDING",
|
||||
FAILURE: "FAILURE",
|
||||
}
|
||||
|
||||
// checks that deployments are in a good state, any pending will be updated
|
||||
async function checkAllDeployments(deployments: any) {
|
||||
async function checkAllDeployments(
|
||||
deployments: any
|
||||
): Promise<{ updated: boolean; deployments: DeploymentDoc }> {
|
||||
let updated = false
|
||||
let deployment: any
|
||||
for (deployment of Object.values(deployments.history)) {
|
||||
|
@ -96,7 +102,9 @@ async function initDeployedApp(prodAppId: any) {
|
|||
})
|
||||
}
|
||||
|
||||
export async function fetchDeployments(ctx: any) {
|
||||
export async function fetchDeployments(
|
||||
ctx: UserCtx<void, FetchDeploymentResponse>
|
||||
) {
|
||||
try {
|
||||
const db = context.getAppDB()
|
||||
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
|
||||
|
@ -104,17 +112,24 @@ export async function fetchDeployments(ctx: any) {
|
|||
if (updated) {
|
||||
await db.put(deployments)
|
||||
}
|
||||
ctx.body = Object.values(deployments.history).reverse()
|
||||
ctx.body = deployments.history
|
||||
? Object.values(deployments.history).reverse()
|
||||
: []
|
||||
} catch (err) {
|
||||
ctx.body = []
|
||||
}
|
||||
}
|
||||
|
||||
export async function deploymentProgress(ctx: any) {
|
||||
export async function deploymentProgress(
|
||||
ctx: UserCtx<void, DeploymentProgressResponse>
|
||||
) {
|
||||
try {
|
||||
const db = context.getAppDB()
|
||||
const deploymentDoc = await db.get<any>(DocumentType.DEPLOYMENTS)
|
||||
ctx.body = deploymentDoc[ctx.params.deploymentId]
|
||||
const deploymentDoc = await db.get<DeploymentDoc>(DocumentType.DEPLOYMENTS)
|
||||
if (!deploymentDoc.history?.[ctx.params.deploymentId]) {
|
||||
ctx.throw(404, "No deployment found")
|
||||
}
|
||||
ctx.body = deploymentDoc.history?.[ctx.params.deploymentId]
|
||||
} catch (err) {
|
||||
ctx.throw(
|
||||
500,
|
||||
|
@ -123,7 +138,9 @@ export async function deploymentProgress(ctx: any) {
|
|||
}
|
||||
}
|
||||
|
||||
export const publishApp = async function (ctx: any) {
|
||||
export const publishApp = async function (
|
||||
ctx: UserCtx<void, PublishAppResponse>
|
||||
) {
|
||||
let deployment = new Deployment()
|
||||
console.log("Deployment object created")
|
||||
deployment.setStatus(DeploymentStatus.PENDING)
|
||||
|
|
|
@ -11,7 +11,13 @@ import {
|
|||
db as dbCore,
|
||||
cache,
|
||||
} from "@budibase/backend-core"
|
||||
import { App } from "@budibase/types"
|
||||
import {
|
||||
App,
|
||||
ClearDevLockResponse,
|
||||
Ctx,
|
||||
GetVersionResponse,
|
||||
RevertAppResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
async function redirect(
|
||||
ctx: any,
|
||||
|
@ -69,7 +75,7 @@ export function buildRedirectDelete(path: string) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function clearLock(ctx: any) {
|
||||
export async function clearLock(ctx: Ctx<void, ClearDevLockResponse>) {
|
||||
const { appId } = ctx.params
|
||||
try {
|
||||
await redisClearLock(appId, ctx.user)
|
||||
|
@ -81,7 +87,7 @@ export async function clearLock(ctx: any) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function revert(ctx: any) {
|
||||
export async function revert(ctx: Ctx<void, RevertAppResponse>) {
|
||||
const { appId } = ctx.params
|
||||
const productionAppId = dbCore.getProdAppID(appId)
|
||||
|
||||
|
@ -131,7 +137,7 @@ export async function revert(ctx: any) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function getBudibaseVersion(ctx: any) {
|
||||
export async function getBudibaseVersion(ctx: Ctx<void, GetVersionResponse>) {
|
||||
const version = envCore.VERSION
|
||||
ctx.body = {
|
||||
version,
|
||||
|
|
|
@ -1,12 +1,17 @@
|
|||
import { getDefinition, getDefinitions } from "../../integrations"
|
||||
import { SourceName, UserCtx } from "@budibase/types"
|
||||
import {
|
||||
SourceName,
|
||||
UserCtx,
|
||||
FetchIntegrationsResponse,
|
||||
FindIntegrationResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
const DISABLED_EXTERNAL_INTEGRATIONS = [
|
||||
SourceName.AIRTABLE,
|
||||
SourceName.BUDIBASE,
|
||||
]
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchIntegrationsResponse>) {
|
||||
const definitions = await getDefinitions()
|
||||
for (let disabledIntegration of DISABLED_EXTERNAL_INTEGRATIONS) {
|
||||
delete definitions[disabledIntegration]
|
||||
|
@ -14,10 +19,14 @@ export async function fetch(ctx: UserCtx) {
|
|||
ctx.body = definitions
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx) {
|
||||
export async function find(ctx: UserCtx<void, FindIntegrationResponse>) {
|
||||
const sourceType = ctx.params?.type
|
||||
if (DISABLED_EXTERNAL_INTEGRATIONS.indexOf(sourceType) !== -1) {
|
||||
ctx.throw(400, `Invalid source type - ${sourceType} is not supported.`)
|
||||
}
|
||||
ctx.body = await getDefinition(ctx.params.type)
|
||||
const integration = await getDefinition(ctx.params.type)
|
||||
if (!integration) {
|
||||
ctx.throw(404, "Integration not found")
|
||||
}
|
||||
ctx.body = integration
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import { EMPTY_LAYOUT } from "../../constants/layouts"
|
|||
import { generateLayoutID, getScreenParams } from "../../db/utils"
|
||||
import { events, context } from "@budibase/backend-core"
|
||||
import {
|
||||
BBContext,
|
||||
DeleteLayoutResponse,
|
||||
Layout,
|
||||
SaveLayoutRequest,
|
||||
SaveLayoutResponse,
|
||||
|
@ -32,7 +32,7 @@ export async function save(
|
|||
ctx.status = 200
|
||||
}
|
||||
|
||||
export async function destroy(ctx: BBContext) {
|
||||
export async function destroy(ctx: UserCtx<void, DeleteLayoutResponse>) {
|
||||
const db = context.getAppDB()
|
||||
const layoutId = ctx.params.layoutId,
|
||||
layoutRev = ctx.params.layoutRev
|
||||
|
|
|
@ -1,24 +1,35 @@
|
|||
import { MetadataTypes } from "../../constants"
|
||||
import { generateMetadataID } from "../../db/utils"
|
||||
import { saveEntityMetadata, deleteEntityMetadata } from "../../utilities"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import {
|
||||
UserCtx,
|
||||
MetadataType,
|
||||
GetMetadataTypesResponse,
|
||||
SaveMetadataRequest,
|
||||
SaveMetadataResponse,
|
||||
DeleteMetadataResponse,
|
||||
FindMetadataResponse,
|
||||
} from "@budibase/types"
|
||||
|
||||
export async function getTypes(ctx: BBContext) {
|
||||
export async function getTypes(ctx: UserCtx<void, GetMetadataTypesResponse>) {
|
||||
ctx.body = {
|
||||
types: MetadataTypes,
|
||||
types: MetadataType,
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveMetadata(ctx: BBContext) {
|
||||
export async function saveMetadata(
|
||||
ctx: UserCtx<SaveMetadataRequest, SaveMetadataResponse>
|
||||
) {
|
||||
const { type, entityId } = ctx.params
|
||||
if (type === MetadataTypes.AUTOMATION_TEST_HISTORY) {
|
||||
if (type === MetadataType.AUTOMATION_TEST_HISTORY) {
|
||||
ctx.throw(400, "Cannot save automation history type")
|
||||
}
|
||||
ctx.body = await saveEntityMetadata(type, entityId, ctx.request.body)
|
||||
}
|
||||
|
||||
export async function deleteMetadata(ctx: BBContext) {
|
||||
export async function deleteMetadata(
|
||||
ctx: UserCtx<void, DeleteMetadataResponse>
|
||||
) {
|
||||
const { type, entityId } = ctx.params
|
||||
await deleteEntityMetadata(type, entityId)
|
||||
ctx.body = {
|
||||
|
@ -26,17 +37,9 @@ export async function deleteMetadata(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function getMetadata(ctx: BBContext) {
|
||||
export async function getMetadata(ctx: UserCtx<void, FindMetadataResponse>) {
|
||||
const { type, entityId } = ctx.params
|
||||
const db = context.getAppDB()
|
||||
const id = generateMetadataID(type, entityId)
|
||||
try {
|
||||
ctx.body = await db.get(id)
|
||||
} catch (err: any) {
|
||||
if (err.status === 404) {
|
||||
ctx.body = {}
|
||||
} else {
|
||||
ctx.throw(err.status, err)
|
||||
}
|
||||
}
|
||||
ctx.body = (await db.tryGet(id)) || {}
|
||||
}
|
||||
|
|
|
@ -1,24 +1,33 @@
|
|||
import { context } from "@budibase/backend-core"
|
||||
import { migrate as migrationImpl, MIGRATIONS } from "../../migrations"
|
||||
import { Ctx } from "@budibase/types"
|
||||
import {
|
||||
Ctx,
|
||||
FetchOldMigrationResponse,
|
||||
GetOldMigrationStatus,
|
||||
RunOldMigrationRequest,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getAppMigrationVersion,
|
||||
getLatestEnabledMigrationId,
|
||||
} from "../../appMigrations"
|
||||
|
||||
export async function migrate(ctx: Ctx) {
|
||||
export async function migrate(ctx: Ctx<RunOldMigrationRequest, void>) {
|
||||
const options = ctx.request.body
|
||||
// don't await as can take a while, just return
|
||||
migrationImpl(options)
|
||||
ctx.status = 200
|
||||
}
|
||||
|
||||
export async function fetchDefinitions(ctx: Ctx) {
|
||||
export async function fetchDefinitions(
|
||||
ctx: Ctx<void, FetchOldMigrationResponse>
|
||||
) {
|
||||
ctx.body = MIGRATIONS
|
||||
ctx.status = 200
|
||||
}
|
||||
|
||||
export async function getMigrationStatus(ctx: Ctx) {
|
||||
export async function getMigrationStatus(
|
||||
ctx: Ctx<void, GetOldMigrationStatus>
|
||||
) {
|
||||
const appId = context.getAppId()
|
||||
|
||||
if (!appId) {
|
||||
|
|
|
@ -1,16 +1,7 @@
|
|||
import { Ctx } from "@budibase/types"
|
||||
import { Ctx, LogOpsRequest, ErrorOpsRequest } from "@budibase/types"
|
||||
import { logging } from "@budibase/backend-core"
|
||||
|
||||
interface LogRequest {
|
||||
message: string
|
||||
data?: any
|
||||
}
|
||||
|
||||
interface ErrorRequest {
|
||||
message: string
|
||||
}
|
||||
|
||||
export async function log(ctx: Ctx<LogRequest>) {
|
||||
export async function log(ctx: Ctx<LogOpsRequest, void>) {
|
||||
const body = ctx.request.body
|
||||
console.trace(body.message, body.data)
|
||||
console.debug(body.message, body.data)
|
||||
|
@ -20,13 +11,13 @@ export async function log(ctx: Ctx<LogRequest>) {
|
|||
ctx.status = 204
|
||||
}
|
||||
|
||||
export async function alert(ctx: Ctx<ErrorRequest>) {
|
||||
export async function alert(ctx: Ctx<ErrorOpsRequest, void>) {
|
||||
const body = ctx.request.body
|
||||
logging.logAlert(body.message, new Error(body.message))
|
||||
ctx.status = 204
|
||||
}
|
||||
|
||||
export async function error(ctx: Ctx<ErrorRequest>) {
|
||||
export async function error(ctx: Ctx<ErrorOpsRequest, void>) {
|
||||
const body = ctx.request.body
|
||||
throw new Error(body.message)
|
||||
}
|
||||
|
|
|
@ -9,6 +9,8 @@ import {
|
|||
RemovePermissionRequest,
|
||||
RemovePermissionResponse,
|
||||
FetchResourcePermissionInfoResponse,
|
||||
FetchBuiltinPermissionsRequest,
|
||||
FetchPermissionLevelsRequest,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
CURRENTLY_SUPPORTED_LEVELS,
|
||||
|
@ -19,11 +21,13 @@ import { PermissionUpdateType } from "../../sdk/app/permissions"
|
|||
|
||||
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
|
||||
|
||||
export function fetchBuiltin(ctx: UserCtx) {
|
||||
export function fetchBuiltin(
|
||||
ctx: UserCtx<void, FetchBuiltinPermissionsRequest>
|
||||
) {
|
||||
ctx.body = Object.values(permissions.getBuiltinPermissions())
|
||||
}
|
||||
|
||||
export function fetchLevels(ctx: UserCtx) {
|
||||
export function fetchLevels(ctx: UserCtx<void, FetchPermissionLevelsRequest>) {
|
||||
// for now only provide the read/write perms externally
|
||||
ctx.body = SUPPORTED_LEVELS
|
||||
}
|
||||
|
|
|
@ -3,8 +3,12 @@ import {
|
|||
getPluginMetadata,
|
||||
extractTarball,
|
||||
} from "../../../utilities/fileSystem"
|
||||
import { KoaFile } from "@budibase/types"
|
||||
|
||||
export async function fileUpload(file: { name: string; path: string }) {
|
||||
export async function fileUpload(file: KoaFile) {
|
||||
if (!file.name || !file.path) {
|
||||
throw new Error("File is not valid - cannot upload.")
|
||||
}
|
||||
if (!file.name.endsWith(".tar.gz")) {
|
||||
throw new Error("Plugin must be compressed into a gzipped tarball.")
|
||||
}
|
||||
|
|
|
@ -2,26 +2,37 @@ import { npmUpload, urlUpload, githubUpload } from "./uploaders"
|
|||
import { plugins as pluginCore } from "@budibase/backend-core"
|
||||
import {
|
||||
PluginType,
|
||||
FileType,
|
||||
PluginSource,
|
||||
Ctx,
|
||||
CreatePluginRequest,
|
||||
CreatePluginResponse,
|
||||
UserCtx,
|
||||
UploadPluginRequest,
|
||||
Plugin,
|
||||
UploadPluginResponse,
|
||||
FetchPluginResponse,
|
||||
DeletePluginResponse,
|
||||
} from "@budibase/types"
|
||||
import env from "../../../environment"
|
||||
import { clientAppSocket } from "../../../websockets"
|
||||
import sdk from "../../../sdk"
|
||||
import { sdk as pro } from "@budibase/pro"
|
||||
|
||||
export async function upload(ctx: any) {
|
||||
const plugins: FileType[] =
|
||||
ctx.request.files.file.length > 1
|
||||
? Array.from(ctx.request.files.file)
|
||||
: [ctx.request.files.file]
|
||||
export async function upload(
|
||||
ctx: UserCtx<UploadPluginRequest, UploadPluginResponse>
|
||||
) {
|
||||
const files = ctx.request.files
|
||||
const plugins =
|
||||
files && Array.isArray(files.file) && files.file.length > 1
|
||||
? Array.from(files.file)
|
||||
: [files?.file]
|
||||
|
||||
try {
|
||||
let docs = []
|
||||
let docs: Plugin[] = []
|
||||
// can do single or multiple plugins
|
||||
for (let plugin of plugins) {
|
||||
if (!plugin || Array.isArray(plugin)) {
|
||||
continue
|
||||
}
|
||||
const doc = await sdk.plugins.processUploaded(plugin, PluginSource.FILE)
|
||||
docs.push(doc)
|
||||
}
|
||||
|
@ -37,7 +48,7 @@ export async function upload(ctx: any) {
|
|||
}
|
||||
|
||||
export async function create(
|
||||
ctx: Ctx<CreatePluginRequest, CreatePluginResponse>
|
||||
ctx: UserCtx<CreatePluginRequest, CreatePluginResponse>
|
||||
) {
|
||||
const { source, url, headers, githubToken } = ctx.request.body
|
||||
|
||||
|
@ -91,11 +102,11 @@ export async function create(
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetch(ctx: any) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchPluginResponse>) {
|
||||
ctx.body = await sdk.plugins.fetch()
|
||||
}
|
||||
|
||||
export async function destroy(ctx: any) {
|
||||
export async function destroy(ctx: UserCtx<void, DeletePluginResponse>) {
|
||||
const { pluginId } = ctx.params
|
||||
|
||||
try {
|
||||
|
|
|
@ -4,26 +4,38 @@ import { save as saveDatasource } from "../datasource"
|
|||
import { RestImporter } from "./import"
|
||||
import { invalidateCachedVariable } from "../../../threads/utils"
|
||||
import env from "../../../environment"
|
||||
import { events, context, utils, constants } from "@budibase/backend-core"
|
||||
import { constants, context, events, utils } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
import { QueryEvent, QueryEventParameters } from "../../../threads/definitions"
|
||||
import {
|
||||
ConfigType,
|
||||
Query,
|
||||
UserCtx,
|
||||
SessionCookie,
|
||||
JsonFieldSubType,
|
||||
QueryResponse,
|
||||
QuerySchema,
|
||||
FieldType,
|
||||
CreateDatasourceRequest,
|
||||
Datasource,
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse,
|
||||
ExecuteV2QueryResponse,
|
||||
ExecuteV1QueryResponse,
|
||||
FetchQueriesResponse,
|
||||
FieldType,
|
||||
FindQueryResponse,
|
||||
ImportRestQueryRequest,
|
||||
ImportRestQueryResponse,
|
||||
JsonFieldSubType,
|
||||
PreviewQueryRequest,
|
||||
PreviewQueryResponse,
|
||||
Query,
|
||||
QueryResponse,
|
||||
QuerySchema,
|
||||
SaveQueryRequest,
|
||||
SaveQueryResponse,
|
||||
SessionCookie,
|
||||
SourceName,
|
||||
UserCtx,
|
||||
DeleteQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
|
||||
import { utils as JsonUtils, ValidQueryNameRegex } from "@budibase/shared-core"
|
||||
import { findHBSBlocks } from "@budibase/string-templates"
|
||||
import { ObjectId } from "mongodb"
|
||||
import { merge } from "lodash"
|
||||
|
||||
const Runner = new Thread(ThreadType.QUERY, {
|
||||
timeoutMs: env.QUERY_THREAD_TIMEOUT,
|
||||
|
@ -43,11 +55,13 @@ function validateQueryInputs(parameters: QueryEventParameters) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchQueriesResponse>) {
|
||||
ctx.body = await sdk.queries.fetch()
|
||||
}
|
||||
|
||||
const _import = async (ctx: UserCtx) => {
|
||||
const _import = async (
|
||||
ctx: UserCtx<ImportRestQueryRequest, ImportRestQueryResponse>
|
||||
) => {
|
||||
const body = ctx.request.body
|
||||
const data = body.data
|
||||
|
||||
|
@ -58,9 +72,9 @@ const _import = async (ctx: UserCtx) => {
|
|||
if (!body.datasourceId) {
|
||||
// construct new datasource
|
||||
const info: any = await importer.getInfo()
|
||||
let datasource = {
|
||||
let datasource: Datasource = {
|
||||
type: "datasource",
|
||||
source: "REST",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
url: info.url,
|
||||
defaultHeaders: [],
|
||||
|
@ -69,8 +83,14 @@ const _import = async (ctx: UserCtx) => {
|
|||
name: info.name,
|
||||
}
|
||||
// save the datasource
|
||||
const datasourceCtx = { ...ctx }
|
||||
datasourceCtx.request.body.datasource = datasource
|
||||
const datasourceCtx: UserCtx<CreateDatasourceRequest> = merge(ctx, {
|
||||
request: {
|
||||
body: {
|
||||
datasource,
|
||||
tablesFilter: [],
|
||||
},
|
||||
},
|
||||
})
|
||||
await saveDatasource(datasourceCtx)
|
||||
datasourceId = datasourceCtx.body.datasource._id
|
||||
} else {
|
||||
|
@ -88,7 +108,7 @@ const _import = async (ctx: UserCtx) => {
|
|||
}
|
||||
export { _import as import }
|
||||
|
||||
export async function save(ctx: UserCtx<Query, Query>) {
|
||||
export async function save(ctx: UserCtx<SaveQueryRequest, SaveQueryResponse>) {
|
||||
const db = context.getAppDB()
|
||||
const query: Query = ctx.request.body
|
||||
|
||||
|
@ -119,10 +139,9 @@ export async function save(ctx: UserCtx<Query, Query>) {
|
|||
query._rev = response.rev
|
||||
|
||||
ctx.body = query
|
||||
ctx.message = `Query ${query.name} saved successfully.`
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx) {
|
||||
export async function find(ctx: UserCtx<void, FindQueryResponse>) {
|
||||
const queryId = ctx.params.queryId
|
||||
ctx.body = await sdk.queries.find(queryId)
|
||||
}
|
||||
|
@ -335,7 +354,7 @@ export async function preview(
|
|||
async function execute(
|
||||
ctx: UserCtx<
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse | Record<string, any>[]
|
||||
ExecuteV2QueryResponse | ExecuteV1QueryResponse
|
||||
>,
|
||||
opts: any = { rowsOnly: false, isAutomation: false }
|
||||
) {
|
||||
|
@ -390,19 +409,21 @@ async function execute(
|
|||
}
|
||||
|
||||
export async function executeV1(
|
||||
ctx: UserCtx<ExecuteQueryRequest, Record<string, any>[]>
|
||||
ctx: UserCtx<ExecuteQueryRequest, ExecuteV1QueryResponse>
|
||||
) {
|
||||
return execute(ctx, { rowsOnly: true, isAutomation: false })
|
||||
}
|
||||
|
||||
export async function executeV2(
|
||||
ctx: UserCtx<
|
||||
ExecuteQueryRequest,
|
||||
ExecuteQueryResponse | Record<string, any>[]
|
||||
>,
|
||||
{ isAutomation }: { isAutomation?: boolean } = {}
|
||||
ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse>
|
||||
) {
|
||||
return execute(ctx, { rowsOnly: false, isAutomation })
|
||||
return execute(ctx, { rowsOnly: false })
|
||||
}
|
||||
|
||||
export async function executeV2AsAutomation(
|
||||
ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse>
|
||||
) {
|
||||
return execute(ctx, { rowsOnly: false, isAutomation: true })
|
||||
}
|
||||
|
||||
const removeDynamicVariables = async (queryId: string) => {
|
||||
|
@ -426,14 +447,14 @@ const removeDynamicVariables = async (queryId: string) => {
|
|||
}
|
||||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
export async function destroy(ctx: UserCtx<void, DeleteQueryResponse>) {
|
||||
const db = context.getAppDB()
|
||||
const queryId = ctx.params.queryId as string
|
||||
await removeDynamicVariables(queryId)
|
||||
const query = await db.get<Query>(queryId)
|
||||
const datasource = await sdk.datasources.get(query.datasourceId)
|
||||
await db.remove(ctx.params.queryId, ctx.params.revId)
|
||||
ctx.message = `Query deleted.`
|
||||
ctx.body = { message: `Query deleted.` }
|
||||
ctx.status = 200
|
||||
await events.query.deleted(datasource, query)
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ import { getUserMetadataParams, InternalTables } from "../../db/utils"
|
|||
import {
|
||||
AccessibleRolesResponse,
|
||||
Database,
|
||||
DestroyRoleResponse,
|
||||
DeleteRoleResponse,
|
||||
FetchRolesResponse,
|
||||
FindRoleResponse,
|
||||
Role,
|
||||
|
@ -199,7 +199,7 @@ export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
|
|||
builderSocket?.emitRoleUpdate(ctx, role)
|
||||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx<void, DestroyRoleResponse>) {
|
||||
export async function destroy(ctx: UserCtx<void, DeleteRoleResponse>) {
|
||||
const db = context.getAppDB()
|
||||
let roleId = ctx.params.roleId as string
|
||||
if (roles.isBuiltin(roleId)) {
|
||||
|
|
|
@ -1,11 +1,17 @@
|
|||
import { getRoutingInfo } from "../../utilities/routing"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
import { UserCtx } from "@budibase/types"
|
||||
import {
|
||||
FetchClientScreenRoutingResponse,
|
||||
FetchScreenRoutingResponse,
|
||||
ScreenRoutingJson,
|
||||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
|
||||
const URL_SEPARATOR = "/"
|
||||
|
||||
class Routing {
|
||||
json: any
|
||||
json: ScreenRoutingJson
|
||||
|
||||
constructor() {
|
||||
this.json = {}
|
||||
}
|
||||
|
@ -43,7 +49,7 @@ class Routing {
|
|||
* @returns The routing structure, this is the full structure designed for use in the builder,
|
||||
* if the client routing is required then the updateRoutingStructureForUserRole should be used.
|
||||
*/
|
||||
async function getRoutingStructure() {
|
||||
async function getRoutingStructure(): Promise<{ routes: ScreenRoutingJson }> {
|
||||
const screenRoutes = await getRoutingInfo()
|
||||
const routing = new Routing()
|
||||
|
||||
|
@ -56,11 +62,13 @@ async function getRoutingStructure() {
|
|||
return { routes: routing.json }
|
||||
}
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchScreenRoutingResponse>) {
|
||||
ctx.body = await getRoutingStructure()
|
||||
}
|
||||
|
||||
export async function clientFetch(ctx: UserCtx) {
|
||||
export async function clientFetch(
|
||||
ctx: UserCtx<void, FetchClientScreenRoutingResponse>
|
||||
) {
|
||||
const routing = await getRoutingStructure()
|
||||
let roleId = ctx.user?.role?._id
|
||||
const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : []
|
||||
|
|
|
@ -11,23 +11,30 @@ import {
|
|||
DeleteRow,
|
||||
DeleteRowRequest,
|
||||
DeleteRows,
|
||||
DownloadAttachmentResponse,
|
||||
EventType,
|
||||
ExportRowsRequest,
|
||||
ExportRowsResponse,
|
||||
FetchEnrichedRowResponse,
|
||||
FetchRowsResponse,
|
||||
FieldType,
|
||||
GetRowResponse,
|
||||
FindRowResponse,
|
||||
isRelationshipField,
|
||||
PatchRowRequest,
|
||||
PatchRowResponse,
|
||||
RequiredKeys,
|
||||
Row,
|
||||
RowAttachment,
|
||||
RowSearchParams,
|
||||
SaveRowRequest,
|
||||
SaveRowResponse,
|
||||
SearchFilters,
|
||||
SearchRowRequest,
|
||||
SearchRowResponse,
|
||||
Table,
|
||||
UserCtx,
|
||||
ValidateResponse,
|
||||
ValidateRowRequest,
|
||||
ValidateRowResponse,
|
||||
} from "@budibase/types"
|
||||
import * as utils from "./utils"
|
||||
import { gridSocket } from "../../../websockets"
|
||||
|
@ -82,7 +89,7 @@ export async function patch(
|
|||
}
|
||||
}
|
||||
|
||||
export const save = async (ctx: UserCtx<Row, Row>) => {
|
||||
export const save = async (ctx: UserCtx<SaveRowRequest, SaveRowResponse>) => {
|
||||
const { tableId, viewId } = utils.getSourceId(ctx)
|
||||
const sourceId = viewId || tableId
|
||||
|
||||
|
@ -130,12 +137,12 @@ export async function fetchLegacyView(ctx: any) {
|
|||
})
|
||||
}
|
||||
|
||||
export async function fetch(ctx: any) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchRowsResponse>) {
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
ctx.body = await sdk.rows.fetch(tableId)
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx<void, GetRowResponse>) {
|
||||
export async function find(ctx: UserCtx<void, FindRowResponse>) {
|
||||
const { tableId, viewId } = utils.getSourceId(ctx)
|
||||
const sourceId = viewId || tableId
|
||||
const rowId = ctx.params.rowId
|
||||
|
@ -239,7 +246,8 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
|
||||
await context.ensureSnippetContext(true)
|
||||
|
||||
let { query } = ctx.request.body
|
||||
const searchRequest = ctx.request.body
|
||||
let { query } = searchRequest
|
||||
if (query) {
|
||||
const allTables = await sdk.tables.getAllTables()
|
||||
query = replaceTableNamesInFilters(tableId, query, allTables)
|
||||
|
@ -249,11 +257,22 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
user: sdk.users.getUserContextBindings(ctx.user),
|
||||
})
|
||||
|
||||
const searchParams: RowSearchParams = {
|
||||
...ctx.request.body,
|
||||
const searchParams: RequiredKeys<RowSearchParams> = {
|
||||
query: enrichedQuery,
|
||||
tableId,
|
||||
viewId,
|
||||
bookmark: searchRequest.bookmark ?? undefined,
|
||||
paginate: searchRequest.paginate,
|
||||
limit: searchRequest.limit,
|
||||
sort: searchRequest.sort ?? undefined,
|
||||
sortOrder: searchRequest.sortOrder,
|
||||
sortType: searchRequest.sortType ?? undefined,
|
||||
countRows: searchRequest.countRows,
|
||||
version: searchRequest.version,
|
||||
disableEscaping: searchRequest.disableEscaping,
|
||||
fields: undefined,
|
||||
indexer: undefined,
|
||||
rows: undefined,
|
||||
}
|
||||
|
||||
ctx.status = 200
|
||||
|
@ -301,7 +320,9 @@ function replaceTableNamesInFilters(
|
|||
})
|
||||
}
|
||||
|
||||
export async function validate(ctx: Ctx<Row, ValidateResponse>) {
|
||||
export async function validate(
|
||||
ctx: Ctx<ValidateRowRequest, ValidateRowResponse>
|
||||
) {
|
||||
const source = await utils.getSource(ctx)
|
||||
const table = await utils.getTableFromSource(source)
|
||||
// external tables are hard to validate currently
|
||||
|
@ -315,7 +336,9 @@ export async function validate(ctx: Ctx<Row, ValidateResponse>) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: UserCtx<void, Row>) {
|
||||
export async function fetchEnrichedRow(
|
||||
ctx: UserCtx<void, FetchEnrichedRowResponse>
|
||||
) {
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
|
||||
}
|
||||
|
@ -353,7 +376,9 @@ export const exportRows = async (
|
|||
ctx.body = apiFileReturn(content)
|
||||
}
|
||||
|
||||
export async function downloadAttachment(ctx: UserCtx) {
|
||||
export async function downloadAttachment(
|
||||
ctx: UserCtx<void, DownloadAttachmentResponse>
|
||||
) {
|
||||
const { columnName } = ctx.params
|
||||
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
|
|
|
@ -15,10 +15,21 @@ import {
|
|||
} from "@budibase/types"
|
||||
import * as linkRows from "../../../db/linkedRows"
|
||||
import isEqual from "lodash/isEqual"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { cloneDeep, merge } from "lodash/fp"
|
||||
import sdk from "../../../sdk"
|
||||
import * as pro from "@budibase/pro"
|
||||
|
||||
function mergeRows(row1: Row, row2: Row) {
|
||||
const merged = merge(row1, row2)
|
||||
// make sure any specifically undefined fields are removed
|
||||
for (const key of Object.keys(row2)) {
|
||||
if (row2[key] === undefined) {
|
||||
delete merged[key]
|
||||
}
|
||||
}
|
||||
return merged
|
||||
}
|
||||
|
||||
/**
|
||||
* This function runs through a list of enriched rows, looks at the rows which
|
||||
* are related and then checks if they need the state of their formulas
|
||||
|
@ -162,9 +173,14 @@ export async function finaliseRow(
|
|||
})
|
||||
}
|
||||
|
||||
const response = await db.put(row)
|
||||
// for response, calculate the formulas for the enriched row
|
||||
enrichedRow._rev = response.rev
|
||||
await db.put(row)
|
||||
const retrieved = await db.tryGet<Row>(row._id)
|
||||
if (!retrieved) {
|
||||
throw new Error(`Unable to retrieve row ${row._id} after saving.`)
|
||||
}
|
||||
|
||||
delete enrichedRow._rev
|
||||
enrichedRow = mergeRows(retrieved, enrichedRow)
|
||||
enrichedRow = await processFormulas(table, enrichedRow, {
|
||||
dynamic: false,
|
||||
})
|
||||
|
|
|
@ -175,7 +175,7 @@ export async function enrichArrayContext(
|
|||
}
|
||||
|
||||
export async function enrichSearchContext(
|
||||
fields: Record<string, any>,
|
||||
fields: Record<string, any> | undefined,
|
||||
inputs = {},
|
||||
helpers = true
|
||||
): Promise<Record<string, any>> {
|
||||
|
|
|
@ -29,19 +29,20 @@ export async function searchView(
|
|||
|
||||
await context.ensureSnippetContext(true)
|
||||
|
||||
const searchOptions: RequiredKeys<SearchViewRowRequest> &
|
||||
RequiredKeys<
|
||||
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
|
||||
> = {
|
||||
const searchOptions: RequiredKeys<RowSearchParams> = {
|
||||
tableId: view.tableId,
|
||||
viewId: view.id,
|
||||
query: body.query,
|
||||
query: body.query || {},
|
||||
fields: viewFields,
|
||||
...getSortOptions(body, view),
|
||||
limit: body.limit,
|
||||
bookmark: body.bookmark,
|
||||
bookmark: body.bookmark ?? undefined,
|
||||
paginate: body.paginate,
|
||||
countRows: body.countRows,
|
||||
version: undefined,
|
||||
disableEscaping: undefined,
|
||||
indexer: undefined,
|
||||
rows: undefined,
|
||||
}
|
||||
|
||||
const result = await sdk.rows.search(searchOptions, {
|
||||
|
@ -56,7 +57,7 @@ function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
|
|||
return {
|
||||
sort: request.sort,
|
||||
sortOrder: request.sortOrder,
|
||||
sortType: request.sortType,
|
||||
sortType: request.sortType ?? undefined,
|
||||
}
|
||||
}
|
||||
if (view.sort) {
|
||||
|
|
|
@ -10,13 +10,16 @@ import { updateAppPackage } from "./application"
|
|||
import {
|
||||
Plugin,
|
||||
ScreenProps,
|
||||
BBContext,
|
||||
Screen,
|
||||
UserCtx,
|
||||
FetchScreenResponse,
|
||||
SaveScreenRequest,
|
||||
SaveScreenResponse,
|
||||
DeleteScreenResponse,
|
||||
} from "@budibase/types"
|
||||
import { builderSocket } from "../../websockets"
|
||||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchScreenResponse>) {
|
||||
const db = context.getAppDB()
|
||||
|
||||
const screens = (
|
||||
|
@ -37,7 +40,9 @@ export async function fetch(ctx: BBContext) {
|
|||
)
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx<Screen, Screen>) {
|
||||
export async function save(
|
||||
ctx: UserCtx<SaveScreenRequest, SaveScreenResponse>
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
let screen = ctx.request.body
|
||||
|
||||
|
@ -107,7 +112,7 @@ export async function save(ctx: UserCtx<Screen, Screen>) {
|
|||
builderSocket?.emitScreenUpdate(ctx, savedScreen)
|
||||
}
|
||||
|
||||
export async function destroy(ctx: BBContext) {
|
||||
export async function destroy(ctx: UserCtx<void, DeleteScreenResponse>) {
|
||||
const db = context.getAppDB()
|
||||
const id = ctx.params.screenId
|
||||
const screen = await db.get<Screen>(id)
|
||||
|
|
|
@ -14,7 +14,3 @@ export async function execute(ctx: Ctx) {
|
|||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
export async function save(ctx: Ctx) {
|
||||
ctx.throw(501, "Not currently implemented")
|
||||
}
|
||||
|
|
|
@ -27,7 +27,13 @@ import {
|
|||
Ctx,
|
||||
DocumentType,
|
||||
Feature,
|
||||
GetSignedUploadUrlRequest,
|
||||
GetSignedUploadUrlResponse,
|
||||
ProcessAttachmentResponse,
|
||||
ServeAppResponse,
|
||||
ServeBuilderPreviewResponse,
|
||||
ServeClientLibraryResponse,
|
||||
ToggleBetaFeatureResponse,
|
||||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
|
@ -38,7 +44,9 @@ import {
|
|||
import send from "koa-send"
|
||||
import { getThemeVariables } from "../../../constants/themes"
|
||||
|
||||
export const toggleBetaUiFeature = async function (ctx: Ctx) {
|
||||
export const toggleBetaUiFeature = async function (
|
||||
ctx: Ctx<void, ToggleBetaFeatureResponse>
|
||||
) {
|
||||
const cookieName = `beta:${ctx.params.feature}`
|
||||
|
||||
if (ctx.cookies.get(cookieName)) {
|
||||
|
@ -66,13 +74,13 @@ export const toggleBetaUiFeature = async function (ctx: Ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
export const serveBuilder = async function (ctx: Ctx) {
|
||||
export const serveBuilder = async function (ctx: Ctx<void, void>) {
|
||||
const builderPath = join(TOP_LEVEL_PATH, "builder")
|
||||
await send(ctx, ctx.file, { root: builderPath })
|
||||
}
|
||||
|
||||
export const uploadFile = async function (
|
||||
ctx: Ctx<{}, ProcessAttachmentResponse>
|
||||
ctx: Ctx<void, ProcessAttachmentResponse>
|
||||
) {
|
||||
const file = ctx.request?.files?.file
|
||||
if (!file) {
|
||||
|
@ -144,7 +152,7 @@ const requiresMigration = async (ctx: Ctx) => {
|
|||
return latestMigrationApplied !== latestMigration
|
||||
}
|
||||
|
||||
export const serveApp = async function (ctx: UserCtx) {
|
||||
export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
|
||||
if (ctx.url.includes("apple-touch-icon.png")) {
|
||||
ctx.redirect("/builder/bblogo.png")
|
||||
return
|
||||
|
@ -249,7 +257,9 @@ export const serveApp = async function (ctx: UserCtx) {
|
|||
}
|
||||
}
|
||||
|
||||
export const serveBuilderPreview = async function (ctx: Ctx) {
|
||||
export const serveBuilderPreview = async function (
|
||||
ctx: Ctx<void, ServeBuilderPreviewResponse>
|
||||
) {
|
||||
const db = context.getAppDB({ skip_setup: true })
|
||||
const appInfo = await db.get<App>(DocumentType.APP_METADATA)
|
||||
|
||||
|
@ -268,7 +278,9 @@ export const serveBuilderPreview = async function (ctx: Ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
export const serveClientLibrary = async function (ctx: Ctx) {
|
||||
export const serveClientLibrary = async function (
|
||||
ctx: Ctx<void, ServeClientLibraryResponse>
|
||||
) {
|
||||
const version = ctx.request.query.version
|
||||
|
||||
if (Array.isArray(version)) {
|
||||
|
@ -297,7 +309,9 @@ export const serveClientLibrary = async function (ctx: Ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
export const getSignedUploadURL = async function (ctx: Ctx) {
|
||||
export const getSignedUploadURL = async function (
|
||||
ctx: Ctx<GetSignedUploadUrlRequest, GetSignedUploadUrlResponse>
|
||||
) {
|
||||
// Ensure datasource is valid
|
||||
let datasource
|
||||
try {
|
||||
|
|
|
@ -19,17 +19,18 @@ import {
|
|||
EventType,
|
||||
FetchTablesResponse,
|
||||
FieldType,
|
||||
MigrateRequest,
|
||||
MigrateResponse,
|
||||
MigrateTableRequest,
|
||||
MigrateTableResponse,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
Table,
|
||||
TableResponse,
|
||||
FindTableResponse,
|
||||
TableSourceType,
|
||||
UserCtx,
|
||||
ValidateNewTableImportRequest,
|
||||
ValidateTableImportRequest,
|
||||
ValidateTableImportResponse,
|
||||
DeleteTableResponse,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
|
@ -94,7 +95,7 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
|
|||
ctx.body = result
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx<void, TableResponse>) {
|
||||
export async function find(ctx: UserCtx<void, FindTableResponse>) {
|
||||
const tableId = ctx.params.tableId
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
|
||||
|
@ -137,7 +138,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
|||
builderSocket?.emitTableUpdate(ctx, cloneDeep(savedTable))
|
||||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
export async function destroy(ctx: UserCtx<void, DeleteTableResponse>) {
|
||||
const appId = ctx.appId
|
||||
const tableId = ctx.params.tableId
|
||||
await sdk.rowActions.deleteAll(tableId)
|
||||
|
@ -223,7 +224,9 @@ export async function validateExistingTableImport(
|
|||
}
|
||||
}
|
||||
|
||||
export async function migrate(ctx: UserCtx<MigrateRequest, MigrateResponse>) {
|
||||
export async function migrate(
|
||||
ctx: UserCtx<MigrateTableRequest, MigrateTableResponse>
|
||||
) {
|
||||
const { oldColumn, newColumn } = ctx.request.body
|
||||
let tableId = ctx.params.tableId as string
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
import nodeFetch from "node-fetch"
|
||||
import { downloadTemplate as dlTemplate } from "../../utilities/fileSystem"
|
||||
import env from "../../environment"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import {
|
||||
DownloadTemplateResponse,
|
||||
FetchTemplateResponse,
|
||||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
|
||||
// development flag, can be used to test against templates exported locally
|
||||
const DEFAULT_TEMPLATES_BUCKET =
|
||||
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
|
||||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchTemplateResponse>) {
|
||||
let type = env.TEMPLATE_REPOSITORY
|
||||
let response,
|
||||
error = false
|
||||
|
@ -32,7 +36,9 @@ export async function fetch(ctx: BBContext) {
|
|||
|
||||
// can't currently test this, have to ignore from coverage
|
||||
/* istanbul ignore next */
|
||||
export async function downloadTemplate(ctx: BBContext) {
|
||||
export async function downloadTemplate(
|
||||
ctx: UserCtx<void, DownloadTemplateResponse>
|
||||
) {
|
||||
const { type, name } = ctx.params
|
||||
|
||||
await dlTemplate(type, name)
|
||||
|
|
|
@ -7,19 +7,24 @@ import {
|
|||
FetchUserMetadataResponse,
|
||||
FindUserMetadataResponse,
|
||||
Flags,
|
||||
SetFlagRequest,
|
||||
SetUserFlagRequest,
|
||||
UpdateSelfMetadataRequest,
|
||||
UpdateSelfMetadataResponse,
|
||||
UpdateUserMetadataResponse,
|
||||
UpdateUserMetadataRequest,
|
||||
UserCtx,
|
||||
UserMetadata,
|
||||
DeleteUserMetadataResponse,
|
||||
SetUserFlagResponse,
|
||||
GetUserFlagsResponse,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
import { DocumentInsertResponse } from "@budibase/nano"
|
||||
|
||||
export async function fetchMetadata(ctx: Ctx<void, FetchUserMetadataResponse>) {
|
||||
ctx.body = await sdk.users.fetchMetadata()
|
||||
}
|
||||
|
||||
export async function updateSelfMetadata(
|
||||
ctx: UserCtx<UserMetadata, DocumentInsertResponse>
|
||||
ctx: UserCtx<UpdateSelfMetadataRequest, UpdateSelfMetadataResponse>
|
||||
) {
|
||||
// overwrite the ID with current users
|
||||
ctx.request.body._id = ctx.user?._id
|
||||
|
@ -31,7 +36,7 @@ export async function updateSelfMetadata(
|
|||
}
|
||||
|
||||
export async function updateMetadata(
|
||||
ctx: UserCtx<UserMetadata, DocumentInsertResponse>
|
||||
ctx: UserCtx<UpdateUserMetadataRequest, UpdateUserMetadataResponse>
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
const user = ctx.request.body
|
||||
|
@ -44,7 +49,9 @@ export async function updateMetadata(
|
|||
ctx.body = await db.put(metadata)
|
||||
}
|
||||
|
||||
export async function destroyMetadata(ctx: UserCtx<void, { message: string }>) {
|
||||
export async function destroyMetadata(
|
||||
ctx: UserCtx<void, DeleteUserMetadataResponse>
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
try {
|
||||
const dbUser = await sdk.users.get(ctx.params.id)
|
||||
|
@ -64,7 +71,7 @@ export async function findMetadata(
|
|||
}
|
||||
|
||||
export async function setFlag(
|
||||
ctx: UserCtx<SetFlagRequest, { message: string }>
|
||||
ctx: UserCtx<SetUserFlagRequest, SetUserFlagResponse>
|
||||
) {
|
||||
const userId = ctx.user?._id
|
||||
const { flag, value } = ctx.request.body
|
||||
|
@ -84,7 +91,7 @@ export async function setFlag(
|
|||
ctx.body = { message: "Flag set successfully" }
|
||||
}
|
||||
|
||||
export async function getFlags(ctx: UserCtx<void, Flags>) {
|
||||
export async function getFlags(ctx: UserCtx<void, GetUserFlagsResponse>) {
|
||||
const userId = ctx.user?._id
|
||||
const docId = generateUserFlagID(userId!)
|
||||
const db = context.getAppDB()
|
||||
|
|
|
@ -4,7 +4,6 @@ import {
|
|||
Ctx,
|
||||
RequiredKeys,
|
||||
UpdateViewRequest,
|
||||
ViewResponse,
|
||||
ViewResponseEnriched,
|
||||
ViewV2,
|
||||
BasicViewFieldMetadata,
|
||||
|
@ -15,6 +14,8 @@ import {
|
|||
ViewFetchResponseEnriched,
|
||||
CountDistinctCalculationFieldMetadata,
|
||||
CountCalculationFieldMetadata,
|
||||
CreateViewResponse,
|
||||
UpdateViewResponse,
|
||||
} from "@budibase/types"
|
||||
import { builderSocket, gridSocket } from "../../../websockets"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
@ -132,7 +133,7 @@ export async function fetch(ctx: Ctx<void, ViewFetchResponseEnriched>) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
|
||||
export async function create(ctx: Ctx<CreateViewRequest, CreateViewResponse>) {
|
||||
const view = ctx.request.body
|
||||
const { tableId } = view
|
||||
|
||||
|
@ -159,7 +160,7 @@ export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
|
|||
gridSocket?.emitViewUpdate(ctx, result)
|
||||
}
|
||||
|
||||
export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) {
|
||||
export async function update(ctx: Ctx<UpdateViewRequest, UpdateViewResponse>) {
|
||||
const view = ctx.request.body
|
||||
|
||||
if (view.version !== 2) {
|
||||
|
@ -196,7 +197,7 @@ export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) {
|
|||
gridSocket?.emitViewUpdate(ctx, result)
|
||||
}
|
||||
|
||||
export async function remove(ctx: Ctx) {
|
||||
export async function remove(ctx: Ctx<void, void>) {
|
||||
const { viewId } = ctx.params
|
||||
|
||||
const view = await sdk.views.remove(viewId)
|
||||
|
|
|
@ -4,9 +4,17 @@ import { db as dbCore, context } from "@budibase/backend-core"
|
|||
import {
|
||||
Webhook,
|
||||
WebhookActionType,
|
||||
BBContext,
|
||||
Ctx,
|
||||
Automation,
|
||||
AutomationActionStepId,
|
||||
FetchWebhooksResponse,
|
||||
SaveWebhookResponse,
|
||||
SaveWebhookRequest,
|
||||
DeleteWebhookResponse,
|
||||
BuildWebhookSchemaRequest,
|
||||
BuildWebhookSchemaResponse,
|
||||
TriggerWebhookRequest,
|
||||
TriggerWebhookResponse,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
import * as pro from "@budibase/pro"
|
||||
|
@ -16,17 +24,17 @@ const validate = require("jsonschema").validate
|
|||
|
||||
const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema"
|
||||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
export async function fetch(ctx: Ctx<void, FetchWebhooksResponse>) {
|
||||
const db = context.getAppDB()
|
||||
const response = await db.allDocs(
|
||||
const response = await db.allDocs<Webhook>(
|
||||
getWebhookParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
ctx.body = response.rows.map((row: any) => row.doc)
|
||||
ctx.body = response.rows.filter(row => row.doc).map(row => row.doc!)
|
||||
}
|
||||
|
||||
export async function save(ctx: BBContext) {
|
||||
export async function save(ctx: Ctx<SaveWebhookRequest, SaveWebhookResponse>) {
|
||||
const webhook = await sdk.automations.webhook.save(ctx.request.body)
|
||||
ctx.body = {
|
||||
message: "Webhook created successfully",
|
||||
|
@ -34,21 +42,23 @@ export async function save(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function destroy(ctx: BBContext) {
|
||||
export async function destroy(ctx: Ctx<void, DeleteWebhookResponse>) {
|
||||
ctx.body = await sdk.automations.webhook.destroy(
|
||||
ctx.params.id,
|
||||
ctx.params.rev
|
||||
)
|
||||
}
|
||||
|
||||
export async function buildSchema(ctx: BBContext) {
|
||||
export async function buildSchema(
|
||||
ctx: Ctx<BuildWebhookSchemaRequest, BuildWebhookSchemaResponse>
|
||||
) {
|
||||
await context.doInAppContext(ctx.params.instance, async () => {
|
||||
const db = context.getAppDB()
|
||||
const webhook = (await db.get(ctx.params.id)) as Webhook
|
||||
const webhook = await db.get<Webhook>(ctx.params.id)
|
||||
webhook.bodySchema = toJsonSchema(ctx.request.body)
|
||||
// update the automation outputs
|
||||
if (webhook.action.type === WebhookActionType.AUTOMATION) {
|
||||
let automation = (await db.get(webhook.action.target)) as Automation
|
||||
let automation = await db.get<Automation>(webhook.action.target)
|
||||
const autoOutputs = automation.definition.trigger.schema.outputs
|
||||
let properties = webhook.bodySchema.properties
|
||||
// reset webhook outputs
|
||||
|
@ -67,56 +77,66 @@ export async function buildSchema(ctx: BBContext) {
|
|||
})
|
||||
}
|
||||
|
||||
export async function trigger(ctx: BBContext) {
|
||||
export async function trigger(
|
||||
ctx: Ctx<TriggerWebhookRequest, TriggerWebhookResponse>
|
||||
) {
|
||||
const prodAppId = dbCore.getProdAppID(ctx.params.instance)
|
||||
const appNotDeployed = () => {
|
||||
ctx.body = {
|
||||
message: "Application not deployed yet.",
|
||||
}
|
||||
}
|
||||
await context.doInAppContext(prodAppId, async () => {
|
||||
try {
|
||||
const db = context.getAppDB()
|
||||
const webhook = (await db.get(ctx.params.id)) as Webhook
|
||||
// validate against the schema
|
||||
if (webhook.bodySchema) {
|
||||
validate(ctx.request.body, webhook.bodySchema)
|
||||
}
|
||||
const target = await db.get<Automation>(webhook.action.target)
|
||||
if (webhook.action.type === WebhookActionType.AUTOMATION) {
|
||||
// trigger with both the pure request and then expand it
|
||||
// incase the user has produced a schema to bind to
|
||||
let hasCollectStep = sdk.automations.utils.checkForCollectStep(target)
|
||||
const db = context.getAppDB()
|
||||
const webhook = await db.tryGet<Webhook>(ctx.params.id)
|
||||
if (!webhook) {
|
||||
return appNotDeployed()
|
||||
}
|
||||
// validate against the schema
|
||||
if (webhook.bodySchema) {
|
||||
validate(ctx.request.body, webhook.bodySchema)
|
||||
}
|
||||
const target = await db.tryGet<Automation>(webhook.action.target)
|
||||
if (!target) {
|
||||
return appNotDeployed()
|
||||
}
|
||||
if (webhook.action.type === WebhookActionType.AUTOMATION) {
|
||||
// trigger with both the pure request and then expand it
|
||||
// incase the user has produced a schema to bind to
|
||||
let hasCollectStep = sdk.automations.utils.checkForCollectStep(target)
|
||||
|
||||
if (hasCollectStep && (await pro.features.isSyncAutomationsEnabled())) {
|
||||
const response = await triggers.externalTrigger(
|
||||
target,
|
||||
{
|
||||
body: ctx.request.body,
|
||||
if (hasCollectStep && (await pro.features.isSyncAutomationsEnabled())) {
|
||||
const response = await triggers.externalTrigger(
|
||||
target,
|
||||
{
|
||||
fields: {
|
||||
...ctx.request.body,
|
||||
appId: prodAppId,
|
||||
body: ctx.request.body,
|
||||
},
|
||||
{ getResponses: true }
|
||||
)
|
||||
appId: prodAppId,
|
||||
},
|
||||
{ getResponses: true }
|
||||
)
|
||||
|
||||
if (triggers.isAutomationResults(response)) {
|
||||
let collectedValue = response.steps.find(
|
||||
(step: any) => step.stepId === AutomationActionStepId.COLLECT
|
||||
)
|
||||
|
||||
ctx.status = 200
|
||||
ctx.body = collectedValue.outputs
|
||||
ctx.body = collectedValue?.outputs
|
||||
} else {
|
||||
await triggers.externalTrigger(target, {
|
||||
body: ctx.request.body,
|
||||
...ctx.request.body,
|
||||
appId: prodAppId,
|
||||
})
|
||||
ctx.status = 200
|
||||
ctx.body = {
|
||||
message: "Webhook trigger fired successfully",
|
||||
}
|
||||
ctx.throw(400, "Automation did not have a collect block.")
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err.status === 404) {
|
||||
ctx.status = 200
|
||||
} else {
|
||||
await triggers.externalTrigger(target, {
|
||||
fields: {
|
||||
...ctx.request.body,
|
||||
body: ctx.request.body,
|
||||
},
|
||||
appId: prodAppId,
|
||||
})
|
||||
ctx.body = {
|
||||
message: "Application not deployed yet.",
|
||||
message: "Webhook trigger fired successfully",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,12 +58,9 @@ if (apiEnabled()) {
|
|||
})
|
||||
)
|
||||
.use(pro.licensing())
|
||||
// @ts-ignore
|
||||
.use(currentApp)
|
||||
.use(auth.auditLog)
|
||||
// @ts-ignore
|
||||
.use(migrations)
|
||||
// @ts-ignore
|
||||
.use(cleanup)
|
||||
|
||||
// authenticated routes
|
||||
|
|
|
@ -56,7 +56,7 @@ router
|
|||
"/api/v2/queries/:queryId",
|
||||
paramResource("queryId"),
|
||||
authorized(PermissionType.QUERY, PermissionLevel.WRITE),
|
||||
queryController.executeV2 as any
|
||||
queryController.executeV2
|
||||
)
|
||||
|
||||
export default router
|
||||
|
|
|
@ -5,6 +5,8 @@ import { paramResource, paramSubResource } from "../../middleware/resourceId"
|
|||
import { permissions } from "@budibase/backend-core"
|
||||
import { internalSearchValidator } from "./utils/validators"
|
||||
import trimViewRowInfo from "../../middleware/trimViewRowInfo"
|
||||
import { validateBody } from "../../middleware/zod-validator"
|
||||
import { searchRowRequestValidator } from "@budibase/types"
|
||||
|
||||
const { PermissionType, PermissionLevel } = permissions
|
||||
|
||||
|
@ -32,6 +34,7 @@ router
|
|||
.post(
|
||||
"/api/:sourceId/search",
|
||||
internalSearchValidator(),
|
||||
validateBody(searchRowRequestValidator),
|
||||
paramResource("sourceId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||
rowController.search
|
||||
|
@ -87,6 +90,7 @@ router
|
|||
router.post(
|
||||
"/api/v2/views/:viewId/search",
|
||||
internalSearchValidator(),
|
||||
validateBody(searchRowRequestValidator),
|
||||
authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"),
|
||||
rowController.views.searchView
|
||||
)
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
import Router from "@koa/router"
|
||||
import * as controller from "../controllers/script"
|
||||
import authorized from "../../middleware/authorized"
|
||||
import { permissions } from "@budibase/backend-core"
|
||||
|
||||
const router: Router = new Router()
|
||||
|
||||
router.post("/api/script", authorized(permissions.BUILDER), controller.save)
|
||||
|
||||
export default router
|
|
@ -169,331 +169,521 @@ const descriptions = datasourceDescribe({
|
|||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let rawDatasource: Datasource
|
||||
let client: Knex
|
||||
describe.each(descriptions)(
|
||||
"$dbName",
|
||||
({ config, dsProvider, isOracle, isMSSQL }) => {
|
||||
let datasource: Datasource
|
||||
let rawDatasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
|
||||
jest.clearAllMocks()
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
describe("get", () => {
|
||||
it("should be able to get a datasource", async () => {
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds).toEqual({
|
||||
config: expect.any(Object),
|
||||
plus: datasource.plus,
|
||||
source: datasource.source,
|
||||
isSQL: true,
|
||||
type: "datasource_plus",
|
||||
_id: datasource._id,
|
||||
_rev: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
jest.clearAllMocks()
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should not return database password", async () => {
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds.config!.password).toBe("--secret-value--")
|
||||
})
|
||||
})
|
||||
|
||||
describe("list", () => {
|
||||
it("returns all the datasources", async () => {
|
||||
const datasources = await config.api.datasource.fetch()
|
||||
expect(datasources).toContainEqual(expect.objectContaining(datasource))
|
||||
})
|
||||
})
|
||||
|
||||
describe("put", () => {
|
||||
it("should update an existing datasource", async () => {
|
||||
const newName = generator.guid()
|
||||
datasource.name = newName
|
||||
const updatedDs = await config.api.datasource.update(datasource)
|
||||
expect(updatedDs.name).toEqual(newName)
|
||||
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should not overwrite database password with --secret-value--", async () => {
|
||||
const password = await context.doInAppContext(
|
||||
config.getAppId(),
|
||||
async () => {
|
||||
const ds = await sdk.datasources.get(datasource._id!)
|
||||
return ds.config!.password
|
||||
}
|
||||
)
|
||||
|
||||
expect(password).not.toBe("--secret-value--")
|
||||
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds.config!.password).toBe("--secret-value--")
|
||||
|
||||
await config.api.datasource.update(
|
||||
await config.api.datasource.get(datasource._id!)
|
||||
)
|
||||
|
||||
const newPassword = await context.doInAppContext(
|
||||
config.getAppId(),
|
||||
async () => {
|
||||
const ds = await sdk.datasources.get(datasource._id!)
|
||||
return ds.config!.password
|
||||
}
|
||||
)
|
||||
|
||||
expect(newPassword).not.toBe("--secret-value--")
|
||||
expect(newPassword).toBe(password)
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("deletes queries for the datasource after deletion and returns a success message", async () => {
|
||||
await config.api.query.save({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Test Query",
|
||||
parameters: [],
|
||||
fields: {},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: null,
|
||||
readable: true,
|
||||
})
|
||||
|
||||
await config.api.datasource.delete(datasource)
|
||||
const datasources = await config.api.datasource.fetch()
|
||||
expect(datasources).not.toContainEqual(
|
||||
expect.objectContaining(datasource)
|
||||
)
|
||||
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("schema", () => {
|
||||
it("fetching schema will not drop tables or columns", async () => {
|
||||
const datasourceId = datasource!._id!
|
||||
|
||||
const simpleTable = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "simple",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
describe("get", () => {
|
||||
it("should be able to get a datasource", async () => {
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds).toEqual({
|
||||
config: expect.any(Object),
|
||||
plus: datasource.plus,
|
||||
source: datasource.source,
|
||||
isSQL: true,
|
||||
type: "datasource_plus",
|
||||
_id: datasource._id,
|
||||
_rev: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
})
|
||||
)
|
||||
|
||||
const stringName = "string"
|
||||
const fullSchema: {
|
||||
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
||||
} = {
|
||||
[FieldType.STRING]: {
|
||||
name: stringName,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
[FieldType.LONGFORM]: {
|
||||
name: "longform",
|
||||
type: FieldType.LONGFORM,
|
||||
},
|
||||
[FieldType.OPTIONS]: {
|
||||
name: "options",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
presence: {
|
||||
allowEmpty: false,
|
||||
},
|
||||
inclusion: [],
|
||||
},
|
||||
},
|
||||
[FieldType.NUMBER]: {
|
||||
name: "number",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
[FieldType.BOOLEAN]: {
|
||||
name: "boolean",
|
||||
type: FieldType.BOOLEAN,
|
||||
},
|
||||
[FieldType.ARRAY]: {
|
||||
name: "array",
|
||||
type: FieldType.ARRAY,
|
||||
constraints: {
|
||||
type: JsonFieldSubType.ARRAY,
|
||||
inclusion: [],
|
||||
},
|
||||
},
|
||||
[FieldType.DATETIME]: {
|
||||
name: "datetime",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
timeOnly: false,
|
||||
},
|
||||
[FieldType.LINK]: {
|
||||
name: "link",
|
||||
type: FieldType.LINK,
|
||||
tableId: simpleTable._id!,
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
fieldName: "link",
|
||||
},
|
||||
[FieldType.FORMULA]: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: "any formula",
|
||||
},
|
||||
[FieldType.BARCODEQR]: {
|
||||
name: "barcodeqr",
|
||||
type: FieldType.BARCODEQR,
|
||||
},
|
||||
[FieldType.BIGINT]: {
|
||||
name: "bigint",
|
||||
type: FieldType.BIGINT,
|
||||
},
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
name: "bb_reference",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
[FieldType.BB_REFERENCE_SINGLE]: {
|
||||
name: "bb_reference_single",
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
}
|
||||
|
||||
await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "full",
|
||||
schema: fullSchema,
|
||||
})
|
||||
)
|
||||
|
||||
const persisted = await config.api.datasource.get(datasourceId)
|
||||
await config.api.datasource.fetchSchema({ datasourceId })
|
||||
|
||||
const updated = await config.api.datasource.get(datasourceId)
|
||||
const expected: Datasource = {
|
||||
...persisted,
|
||||
entities:
|
||||
persisted?.entities &&
|
||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||
(acc, [tableName, table]) => {
|
||||
acc[tableName] = expect.objectContaining({
|
||||
...table,
|
||||
primaryDisplay: expect.not.stringMatching(
|
||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||
),
|
||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||
(acc, [fieldName, field]) => {
|
||||
acc[fieldName] = {
|
||||
...field,
|
||||
externalType: allowUndefined(expect.any(String)),
|
||||
constraints: allowUndefined(expect.any(Object)),
|
||||
autocolumn: allowUndefined(expect.any(Boolean)),
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
|
||||
_rev: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
}
|
||||
expect(updated).toEqual(expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe("verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: rawDatasource,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...rawDatasource.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error: /.*/, // error message differs between databases
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("info", () => {
|
||||
it("should fetch information about a datasource with a single table", async () => {
|
||||
const existingTableNames = (
|
||||
await config.api.datasource.info(datasource)
|
||||
).tableNames
|
||||
|
||||
const tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
})
|
||||
|
||||
const info = await config.api.datasource.info(datasource)
|
||||
expect(info.tableNames).toEqual(
|
||||
expect.arrayContaining([tableName, ...existingTableNames])
|
||||
)
|
||||
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
|
||||
it("should not return database password", async () => {
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds.config!.password).toBe("--secret-value--")
|
||||
})
|
||||
})
|
||||
|
||||
it("should fetch information about a datasource with multiple tables", async () => {
|
||||
const existingTableNames = (
|
||||
await config.api.datasource.info(datasource)
|
||||
).tableNames
|
||||
describe("list", () => {
|
||||
it("returns all the datasources", async () => {
|
||||
const datasources = await config.api.datasource.fetch()
|
||||
expect(datasources).toContainEqual(
|
||||
expect.objectContaining(datasource)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const tableNames = [
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
]
|
||||
for (const tableName of tableNames) {
|
||||
describe("put", () => {
|
||||
it("should update an existing datasource", async () => {
|
||||
const newName = generator.guid()
|
||||
datasource.name = newName
|
||||
const updatedDs = await config.api.datasource.update(datasource)
|
||||
expect(updatedDs.name).toEqual(newName)
|
||||
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should not overwrite database password with --secret-value--", async () => {
|
||||
const password = await context.doInAppContext(
|
||||
config.getAppId(),
|
||||
async () => {
|
||||
const ds = await sdk.datasources.get(datasource._id!)
|
||||
return ds.config!.password
|
||||
}
|
||||
)
|
||||
|
||||
expect(password).not.toBe("--secret-value--")
|
||||
|
||||
const ds = await config.api.datasource.get(datasource._id!)
|
||||
expect(ds.config!.password).toBe("--secret-value--")
|
||||
|
||||
await config.api.datasource.update(
|
||||
await config.api.datasource.get(datasource._id!)
|
||||
)
|
||||
|
||||
const newPassword = await context.doInAppContext(
|
||||
config.getAppId(),
|
||||
async () => {
|
||||
const ds = await sdk.datasources.get(datasource._id!)
|
||||
return ds.config!.password
|
||||
}
|
||||
)
|
||||
|
||||
expect(newPassword).not.toBe("--secret-value--")
|
||||
expect(newPassword).toBe(password)
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("deletes queries for the datasource after deletion and returns a success message", async () => {
|
||||
await config.api.query.save({
|
||||
datasourceId: datasource._id!,
|
||||
name: "Test Query",
|
||||
parameters: [],
|
||||
fields: {},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
transformer: null,
|
||||
readable: true,
|
||||
})
|
||||
|
||||
await config.api.datasource.delete(datasource)
|
||||
const datasources = await config.api.datasource.fetch()
|
||||
expect(datasources).not.toContainEqual(
|
||||
expect.objectContaining(datasource)
|
||||
)
|
||||
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("schema", () => {
|
||||
it("fetching schema will not drop tables or columns", async () => {
|
||||
const datasourceId = datasource!._id!
|
||||
|
||||
const simpleTable = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "simple",
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
const stringName = "string"
|
||||
const fullSchema: {
|
||||
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
||||
} = {
|
||||
[FieldType.STRING]: {
|
||||
name: stringName,
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
[FieldType.LONGFORM]: {
|
||||
name: "longform",
|
||||
type: FieldType.LONGFORM,
|
||||
},
|
||||
[FieldType.OPTIONS]: {
|
||||
name: "options",
|
||||
type: FieldType.OPTIONS,
|
||||
constraints: {
|
||||
presence: {
|
||||
allowEmpty: false,
|
||||
},
|
||||
inclusion: ["1", "2", "3"],
|
||||
},
|
||||
},
|
||||
[FieldType.NUMBER]: {
|
||||
name: "number",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
[FieldType.BOOLEAN]: {
|
||||
name: "boolean",
|
||||
type: FieldType.BOOLEAN,
|
||||
},
|
||||
[FieldType.ARRAY]: {
|
||||
name: "array",
|
||||
type: FieldType.ARRAY,
|
||||
constraints: {
|
||||
type: JsonFieldSubType.ARRAY,
|
||||
inclusion: [],
|
||||
},
|
||||
},
|
||||
[FieldType.DATETIME]: {
|
||||
name: "datetime",
|
||||
type: FieldType.DATETIME,
|
||||
dateOnly: true,
|
||||
timeOnly: false,
|
||||
},
|
||||
[FieldType.LINK]: {
|
||||
name: "link",
|
||||
type: FieldType.LINK,
|
||||
tableId: simpleTable._id!,
|
||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||
fieldName: "link",
|
||||
},
|
||||
[FieldType.FORMULA]: {
|
||||
name: "formula",
|
||||
type: FieldType.FORMULA,
|
||||
formula: "any formula",
|
||||
},
|
||||
[FieldType.BARCODEQR]: {
|
||||
name: "barcodeqr",
|
||||
type: FieldType.BARCODEQR,
|
||||
},
|
||||
[FieldType.BIGINT]: {
|
||||
name: "bigint",
|
||||
type: FieldType.BIGINT,
|
||||
},
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
name: "bb_reference",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
[FieldType.BB_REFERENCE_SINGLE]: {
|
||||
name: "bb_reference_single",
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
}
|
||||
|
||||
await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "full",
|
||||
schema: fullSchema,
|
||||
})
|
||||
)
|
||||
|
||||
const persisted = await config.api.datasource.get(datasourceId)
|
||||
await config.api.datasource.fetchSchema({ datasourceId })
|
||||
|
||||
const updated = await config.api.datasource.get(datasourceId)
|
||||
const expected: Datasource = {
|
||||
...persisted,
|
||||
entities:
|
||||
persisted?.entities &&
|
||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||
(acc, [tableName, table]) => {
|
||||
acc[tableName] = expect.objectContaining({
|
||||
...table,
|
||||
primaryDisplay: expect.not.stringMatching(
|
||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||
),
|
||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||
(acc, [fieldName, field]) => {
|
||||
acc[fieldName] = {
|
||||
...field,
|
||||
externalType: allowUndefined(expect.any(String)),
|
||||
constraints: allowUndefined(expect.any(Object)),
|
||||
autocolumn: allowUndefined(expect.any(Boolean)),
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
),
|
||||
|
||||
_rev: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
}
|
||||
expect(updated).toEqual(expected)
|
||||
})
|
||||
|
||||
!isOracle &&
|
||||
!isMSSQL &&
|
||||
it("can fetch options columns with a large number of options", async () => {
|
||||
const enumOptions = new Array(1000)
|
||||
.fill(0)
|
||||
.map((_, i) => i.toString())
|
||||
.toSorted()
|
||||
await client.schema.createTable("options", table => {
|
||||
table.increments("id").primary()
|
||||
table.enum("enum", enumOptions, {
|
||||
useNative: true,
|
||||
enumName: "enum",
|
||||
})
|
||||
})
|
||||
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(resp.errors).toEqual({})
|
||||
|
||||
const table = resp.datasource.entities!.options
|
||||
expect(
|
||||
table.schema.enum.constraints!.inclusion!.toSorted()
|
||||
).toEqual(enumOptions)
|
||||
})
|
||||
|
||||
!isOracle &&
|
||||
!isMSSQL &&
|
||||
it("can fetch options with commas in them", async () => {
|
||||
const enumOptions = [
|
||||
"Lincoln, Abraham",
|
||||
"Washington, George",
|
||||
"Fred",
|
||||
"Bob",
|
||||
].toSorted()
|
||||
await client.schema.createTable("options", table => {
|
||||
table.increments("id").primary()
|
||||
table.enum("enum", enumOptions, {
|
||||
useNative: true,
|
||||
enumName: "enum",
|
||||
})
|
||||
})
|
||||
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(resp.errors).toEqual({})
|
||||
|
||||
const table = resp.datasource.entities!.options
|
||||
expect(
|
||||
table.schema.enum.constraints!.inclusion!.toSorted()
|
||||
).toEqual(enumOptions)
|
||||
})
|
||||
|
||||
!isOracle &&
|
||||
!isMSSQL &&
|
||||
it("can fetch options that may include other type names", async () => {
|
||||
const enumOptions = [
|
||||
"int",
|
||||
"bigint",
|
||||
"float",
|
||||
"numeric",
|
||||
"json",
|
||||
"map",
|
||||
].toSorted()
|
||||
|
||||
await client.schema.createTable("options", table => {
|
||||
table.increments("id").primary()
|
||||
table.enum("enum", enumOptions, {
|
||||
useNative: true,
|
||||
enumName: "enum",
|
||||
})
|
||||
})
|
||||
|
||||
const resp = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
expect(resp.errors).toEqual({})
|
||||
|
||||
const table = resp.datasource.entities!.options
|
||||
expect(
|
||||
table.schema.enum.constraints!.inclusion!.toSorted()
|
||||
).toEqual(enumOptions)
|
||||
})
|
||||
})
|
||||
|
||||
describe("verify", () => {
|
||||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: rawDatasource,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...rawDatasource.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
body: {
|
||||
connected: false,
|
||||
error: /.*/, // error message differs between databases
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("info", () => {
|
||||
it("should fetch information about a datasource with a single table", async () => {
|
||||
const existingTableNames = (
|
||||
await config.api.datasource.info(datasource)
|
||||
).tableNames
|
||||
|
||||
const tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
})
|
||||
}
|
||||
|
||||
const info = await config.api.datasource.info(datasource)
|
||||
expect(info.tableNames).toEqual(
|
||||
expect.arrayContaining([...tableNames, ...existingTableNames])
|
||||
)
|
||||
expect(info.tableNames).toHaveLength(
|
||||
existingTableNames.length + tableNames.length
|
||||
)
|
||||
const info = await config.api.datasource.info(datasource)
|
||||
expect(info.tableNames).toEqual(
|
||||
expect.arrayContaining([tableName, ...existingTableNames])
|
||||
)
|
||||
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
|
||||
})
|
||||
|
||||
it("should fetch information about a datasource with multiple tables", async () => {
|
||||
const existingTableNames = (
|
||||
await config.api.datasource.info(datasource)
|
||||
).tableNames
|
||||
|
||||
const tableNames = [
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
generator.guid(),
|
||||
]
|
||||
for (const tableName of tableNames) {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id").primary()
|
||||
table.string("name")
|
||||
})
|
||||
}
|
||||
|
||||
const info = await config.api.datasource.info(datasource)
|
||||
expect(info.tableNames).toEqual(
|
||||
expect.arrayContaining([...tableNames, ...existingTableNames])
|
||||
)
|
||||
expect(info.tableNames).toHaveLength(
|
||||
existingTableNames.length + tableNames.length
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const datasources = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE],
|
||||
})
|
||||
|
||||
if (datasources.length) {
|
||||
describe.each(datasources)(
|
||||
"$dbName",
|
||||
({ config, dsProvider, isPostgres, isMySQL, isMariaDB }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
describe("external export", () => {
|
||||
let table: Table
|
||||
|
||||
beforeEach(async () => {
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
name: "simple",
|
||||
primary: ["id"],
|
||||
primaryDisplay: "name",
|
||||
schema: {
|
||||
id: {
|
||||
name: "id",
|
||||
autocolumn: true,
|
||||
type: FieldType.NUMBER,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
},
|
||||
name: {
|
||||
name: "name",
|
||||
autocolumn: false,
|
||||
type: FieldType.STRING,
|
||||
constraints: {
|
||||
presence: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("should be able to export and reimport a schema", async () => {
|
||||
let { schema } = await config.api.datasource.externalSchema(
|
||||
datasource
|
||||
)
|
||||
|
||||
if (isPostgres) {
|
||||
// pg_dump 17 puts this config parameter into the dump but no DB < 17
|
||||
// can load it. We're using postgres 16 in tests at the time of writing.
|
||||
schema = schema.replace("SET transaction_timeout = 0;", "")
|
||||
}
|
||||
|
||||
await config.api.table.destroy(table._id!, table._rev!)
|
||||
|
||||
if (isMySQL || isMariaDB) {
|
||||
// MySQL/MariaDB clients don't let you run multiple queries in a
|
||||
// single call. They also throw an error when given an empty query.
|
||||
// The below handles both of these things.
|
||||
for (let query of schema.split(";\n")) {
|
||||
query = query.trim()
|
||||
if (!query) {
|
||||
continue
|
||||
}
|
||||
await client.raw(query)
|
||||
}
|
||||
} else {
|
||||
await client.raw(schema)
|
||||
}
|
||||
|
||||
await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
const tables = await config.api.table.fetch()
|
||||
const newTable = tables.find(t => t.name === table.name)!
|
||||
|
||||
// This is only set on tables created through Budibase, we don't
|
||||
// expect it to match after we import the table.
|
||||
delete table.created
|
||||
|
||||
for (const field of Object.values(newTable.schema)) {
|
||||
// Will differ per-database, not useful for this test.
|
||||
delete field.externalType
|
||||
}
|
||||
|
||||
expect(newTable).toEqual(table)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
const { testAutomation } = require("./utilities/TestFunctions")
|
||||
const setup = require("./utilities")
|
||||
const { MetadataTypes } = require("../../../constants")
|
||||
import { testAutomation } from "./utilities/TestFunctions"
|
||||
import * as setup from "./utilities"
|
||||
import { MetadataType, Automation } from "@budibase/types"
|
||||
|
||||
describe("/metadata", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let automation
|
||||
let automation: Automation
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
|
@ -15,8 +15,8 @@ describe("/metadata", () => {
|
|||
})
|
||||
|
||||
async function createMetadata(
|
||||
data,
|
||||
type = MetadataTypes.AUTOMATION_TEST_INPUT
|
||||
data: Record<string, string>,
|
||||
type = MetadataType.AUTOMATION_TEST_INPUT
|
||||
) {
|
||||
const res = await request
|
||||
.post(`/api/metadata/${type}/${automation._id}`)
|
||||
|
@ -27,7 +27,7 @@ describe("/metadata", () => {
|
|||
expect(res.body._rev).toBeDefined()
|
||||
}
|
||||
|
||||
async function getMetadata(type) {
|
||||
async function getMetadata(type: MetadataType) {
|
||||
const res = await request
|
||||
.get(`/api/metadata/${type}/${automation._id}`)
|
||||
.set(config.defaultHeaders())
|
||||
|
@ -39,14 +39,14 @@ describe("/metadata", () => {
|
|||
describe("save", () => {
|
||||
it("should be able to save some metadata", async () => {
|
||||
await createMetadata({ test: "a" })
|
||||
const testInput = await getMetadata(MetadataTypes.AUTOMATION_TEST_INPUT)
|
||||
const testInput = await getMetadata(MetadataType.AUTOMATION_TEST_INPUT)
|
||||
expect(testInput.test).toBe("a")
|
||||
})
|
||||
|
||||
it("should save history metadata on automation run", async () => {
|
||||
// this should have created some history
|
||||
await testAutomation(config, automation)
|
||||
const metadata = await getMetadata(MetadataTypes.AUTOMATION_TEST_HISTORY)
|
||||
await testAutomation(config, automation, {})
|
||||
const metadata = await getMetadata(MetadataType.AUTOMATION_TEST_HISTORY)
|
||||
expect(metadata).toBeDefined()
|
||||
expect(metadata.history.length).toBe(1)
|
||||
expect(typeof metadata.history[0].occurredAt).toBe("number")
|
||||
|
@ -57,13 +57,13 @@ describe("/metadata", () => {
|
|||
it("should be able to delete some test inputs", async () => {
|
||||
const res = await request
|
||||
.delete(
|
||||
`/api/metadata/${MetadataTypes.AUTOMATION_TEST_INPUT}/${automation._id}`
|
||||
`/api/metadata/${MetadataType.AUTOMATION_TEST_INPUT}/${automation._id}`
|
||||
)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body.message).toBeDefined()
|
||||
const metadata = await getMetadata(MetadataTypes.AUTOMATION_TEST_INPUT)
|
||||
const metadata = await getMetadata(MetadataType.AUTOMATION_TEST_INPUT)
|
||||
expect(metadata.test).toBeUndefined()
|
||||
})
|
||||
})
|
|
@ -48,7 +48,7 @@ jest.mock("@budibase/pro", () => ({
|
|||
ai: {
|
||||
LargeLanguageModel: {
|
||||
forCurrentTenant: async () => ({
|
||||
initialised: true,
|
||||
llm: {},
|
||||
run: jest.fn(() => `Mock LLM Response`),
|
||||
buildPromptFromAIOperation: jest.fn(),
|
||||
}),
|
||||
|
@ -2607,6 +2607,8 @@ if (descriptions.length) {
|
|||
name: "foo",
|
||||
description: "bar",
|
||||
tableId,
|
||||
createdAt: isInternal ? new Date().toISOString() : undefined,
|
||||
updatedAt: isInternal ? new Date().toISOString() : undefined,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -2628,6 +2630,8 @@ if (descriptions.length) {
|
|||
id: isInternal ? undefined : expect.any(Number),
|
||||
type: isInternal ? "row" : undefined,
|
||||
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id,
|
||||
createdAt: isInternal ? new Date().toISOString() : undefined,
|
||||
updatedAt: isInternal ? new Date().toISOString() : undefined,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -2650,6 +2654,8 @@ if (descriptions.length) {
|
|||
_rev: expect.any(String),
|
||||
id: isInternal ? undefined : expect.any(Number),
|
||||
type: isInternal ? "row" : undefined,
|
||||
createdAt: isInternal ? new Date().toISOString() : undefined,
|
||||
updatedAt: isInternal ? new Date().toISOString() : undefined,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -2729,6 +2735,8 @@ if (descriptions.length) {
|
|||
id: isInternal ? undefined : expect.any(Number),
|
||||
type: isInternal ? "row" : undefined,
|
||||
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id,
|
||||
createdAt: isInternal ? new Date().toISOString() : undefined,
|
||||
updatedAt: isInternal ? new Date().toISOString() : undefined,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -2745,15 +2753,8 @@ if (descriptions.length) {
|
|||
user: null,
|
||||
users: null,
|
||||
})
|
||||
expect(updatedRow).toEqual({
|
||||
name: "foo",
|
||||
description: "bar",
|
||||
tableId,
|
||||
_id: row._id,
|
||||
_rev: expect.any(String),
|
||||
id: isInternal ? undefined : expect.any(Number),
|
||||
type: isInternal ? "row" : undefined,
|
||||
})
|
||||
expect(updatedRow.user).toBeUndefined()
|
||||
expect(updatedRow.users).toBeUndefined()
|
||||
})
|
||||
|
||||
it("fetch all will populate the relationships", async () => {
|
||||
|
|
|
@ -24,6 +24,7 @@ import {
|
|||
JsonFieldSubType,
|
||||
LogicalOperator,
|
||||
RelationshipType,
|
||||
RequiredKeys,
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchFilters,
|
||||
|
@ -51,7 +52,7 @@ jest.mock("@budibase/pro", () => ({
|
|||
ai: {
|
||||
LargeLanguageModel: {
|
||||
forCurrentTenant: async () => ({
|
||||
initialised: true,
|
||||
llm: {},
|
||||
run: jest.fn(() => `Mock LLM Response`),
|
||||
buildPromptFromAIOperation: jest.fn(),
|
||||
}),
|
||||
|
@ -208,9 +209,25 @@ if (descriptions.length) {
|
|||
|
||||
private async performSearch(): Promise<SearchResponse<Row>> {
|
||||
if (isInMemory) {
|
||||
return dataFilters.search(_.cloneDeep(rows), {
|
||||
...this.query,
|
||||
})
|
||||
const inMemoryQuery: RequiredKeys<
|
||||
Omit<RowSearchParams, "tableId">
|
||||
> = {
|
||||
sort: this.query.sort ?? undefined,
|
||||
query: { ...this.query.query },
|
||||
paginate: this.query.paginate,
|
||||
bookmark: this.query.bookmark ?? undefined,
|
||||
limit: this.query.limit,
|
||||
sortOrder: this.query.sortOrder,
|
||||
sortType: this.query.sortType ?? undefined,
|
||||
version: this.query.version,
|
||||
disableEscaping: this.query.disableEscaping,
|
||||
countRows: this.query.countRows,
|
||||
viewId: undefined,
|
||||
fields: undefined,
|
||||
indexer: undefined,
|
||||
rows: undefined,
|
||||
}
|
||||
return dataFilters.search(_.cloneDeep(rows), inMemoryQuery)
|
||||
} else {
|
||||
return config.api.row.search(tableOrViewId, this.query)
|
||||
}
|
||||
|
|
|
@ -96,9 +96,15 @@ if (env.SELF_HOSTED) {
|
|||
ACTION_IMPLS["EXECUTE_BASH"] = bash.run
|
||||
// @ts-ignore
|
||||
BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition
|
||||
|
||||
if (env.isTest()) {
|
||||
BUILTIN_ACTION_DEFINITIONS["OPENAI"] = openai.definition
|
||||
}
|
||||
}
|
||||
|
||||
export async function getActionDefinitions() {
|
||||
export async function getActionDefinitions(): Promise<
|
||||
Record<keyof typeof AutomationActionStepId, AutomationStepDefinition>
|
||||
> {
|
||||
if (await features.flags.isEnabled(FeatureFlag.AUTOMATION_BRANCHING)) {
|
||||
BUILTIN_ACTION_DEFINITIONS["BRANCH"] = branch.definition
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ export async function run({
|
|||
})
|
||||
|
||||
try {
|
||||
await queryController.executeV2(ctx, { isAutomation: true })
|
||||
await queryController.executeV2AsAutomation(ctx)
|
||||
const { data, ...rest } = ctx.body
|
||||
|
||||
return {
|
||||
|
|
|
@ -106,13 +106,15 @@ export async function run({
|
|||
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
|
||||
(await pro.features.isBudibaseAIEnabled())
|
||||
|
||||
let llm
|
||||
let llmWrapper
|
||||
if (budibaseAIEnabled || customConfigsEnabled) {
|
||||
llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
|
||||
llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(
|
||||
inputs.model
|
||||
)
|
||||
}
|
||||
|
||||
response = llm?.initialised
|
||||
? await llm.run(inputs.prompt)
|
||||
response = llmWrapper?.llm
|
||||
? await llmWrapper.run(inputs.prompt)
|
||||
: await legacyOpenAIPrompt(inputs)
|
||||
|
||||
return {
|
||||
|
|
|
@ -3,7 +3,6 @@ import {
|
|||
AutomationStepDefinition,
|
||||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
AutomationResults,
|
||||
Automation,
|
||||
AutomationCustomIOType,
|
||||
TriggerAutomationStepInputs,
|
||||
|
@ -78,7 +77,7 @@ export async function run({
|
|||
const db = context.getAppDB()
|
||||
let automation = await db.get<Automation>(inputs.automation.automationId)
|
||||
|
||||
const response: AutomationResults = await triggers.externalTrigger(
|
||||
const response = await triggers.externalTrigger(
|
||||
automation,
|
||||
{
|
||||
fields: { ...fieldParams },
|
||||
|
@ -88,9 +87,13 @@ export async function run({
|
|||
{ getResponses: true }
|
||||
)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
value: response.steps,
|
||||
if (triggers.isAutomationResults(response)) {
|
||||
return {
|
||||
success: true,
|
||||
value: response.steps,
|
||||
}
|
||||
} else {
|
||||
throw new Error("Automation did not have a collect block")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -1,26 +1,148 @@
|
|||
import { getConfig, afterAll as _afterAll, runStep } from "./utilities"
|
||||
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
|
||||
import * as automation from "../index"
|
||||
import * as setup from "./utilities"
|
||||
import { Table } from "@budibase/types"
|
||||
|
||||
describe("test the bash action", () => {
|
||||
let config = getConfig()
|
||||
describe("Execute Bash Automations", () => {
|
||||
let config = setup.getConfig(),
|
||||
table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await automation.init()
|
||||
await config.init()
|
||||
})
|
||||
afterAll(_afterAll)
|
||||
|
||||
it("should be able to execute a script", async () => {
|
||||
let res = await runStep(config, "EXECUTE_BASH", {
|
||||
code: "echo 'test'",
|
||||
table = await config.createTable()
|
||||
await config.createRow({
|
||||
name: "test row",
|
||||
description: "test description",
|
||||
tableId: table._id!,
|
||||
})
|
||||
expect(res.stdout).toEqual("test\n")
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle a null value", async () => {
|
||||
let res = await runStep(config, "EXECUTE_BASH", {
|
||||
code: null,
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should use trigger data in bash command and pass output to subsequent steps", async () => {
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Bash with Trigger Data",
|
||||
config,
|
||||
})
|
||||
expect(res.stdout).toEqual(
|
||||
.appAction({ fields: { command: "hello world" } })
|
||||
.bash(
|
||||
{ code: "echo '{{ trigger.fields.command }}'" },
|
||||
{ stepName: "Echo Command" }
|
||||
)
|
||||
.serverLog(
|
||||
{ text: "Bash output was: {{ steps.[Echo Command].stdout }}" },
|
||||
{ stepName: "Log Output" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.stdout).toEqual("hello world\n")
|
||||
expect(result.steps[1].outputs.message).toContain(
|
||||
"Bash output was: hello world"
|
||||
)
|
||||
})
|
||||
|
||||
it("should chain multiple bash commands using previous outputs", async () => {
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Chained Bash Commands",
|
||||
config,
|
||||
})
|
||||
.appAction({ fields: { filename: "testfile.txt" } })
|
||||
.bash(
|
||||
{ code: "echo 'initial content' > {{ trigger.fields.filename }}" },
|
||||
{ stepName: "Create File" }
|
||||
)
|
||||
.bash(
|
||||
{ code: "cat {{ trigger.fields.filename }} | tr '[a-z]' '[A-Z]'" },
|
||||
{ stepName: "Transform Content" }
|
||||
)
|
||||
.bash(
|
||||
{ code: "rm {{ trigger.fields.filename }}" },
|
||||
{ stepName: "Cleanup" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[1].outputs.stdout).toEqual("INITIAL CONTENT\n")
|
||||
expect(result.steps[1].outputs.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should integrate bash output with row operations", async () => {
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Bash with Row Operations",
|
||||
config,
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.queryRows(
|
||||
{
|
||||
tableId: table._id!,
|
||||
filters: {},
|
||||
},
|
||||
{ stepName: "Get Row" }
|
||||
)
|
||||
.bash(
|
||||
{
|
||||
code: "echo Row data: {{ steps.[Get Row].rows.[0].name }} - {{ steps.[Get Row].rows.[0].description }}",
|
||||
},
|
||||
{ stepName: "Process Row Data" }
|
||||
)
|
||||
.serverLog(
|
||||
{ text: "{{ steps.[Process Row Data].stdout }}" },
|
||||
{ stepName: "Log Result" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[1].outputs.stdout).toContain(
|
||||
"Row data: test row - test description"
|
||||
)
|
||||
expect(result.steps[2].outputs.message).toContain(
|
||||
"Row data: test row - test description"
|
||||
)
|
||||
})
|
||||
|
||||
it("should handle bash output in conditional logic", async () => {
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Bash with Conditional",
|
||||
config,
|
||||
})
|
||||
.appAction({ fields: { threshold: "5" } })
|
||||
.bash(
|
||||
{ code: "echo $(( {{ trigger.fields.threshold }} + 5 ))" },
|
||||
{ stepName: "Calculate Value" }
|
||||
)
|
||||
.executeScript(
|
||||
{
|
||||
code: `
|
||||
const value = parseInt(steps["Calculate Value"].stdout);
|
||||
return value > 8 ? "high" : "low";
|
||||
`,
|
||||
},
|
||||
{ stepName: "Check Value" }
|
||||
)
|
||||
.serverLog(
|
||||
{ text: "Value was {{ steps.[Check Value].value }}" },
|
||||
{ stepName: "Log Result" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.stdout).toEqual("10\n")
|
||||
expect(result.steps[1].outputs.value).toEqual("high")
|
||||
expect(result.steps[2].outputs.message).toContain("Value was high")
|
||||
})
|
||||
|
||||
it("should handle null values gracefully", async () => {
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Null Bash Input",
|
||||
config,
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.bash(
|
||||
//@ts-ignore
|
||||
{ code: null },
|
||||
{ stepName: "Null Command" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.stdout).toBe(
|
||||
"Budibase bash automation failed: Invalid inputs"
|
||||
)
|
||||
})
|
||||
|
|
|
@ -3,7 +3,7 @@ import * as triggers from "../triggers"
|
|||
import { loopAutomation } from "../../tests/utilities/structures"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import * as setup from "./utilities"
|
||||
import { Table, LoopStepType } from "@budibase/types"
|
||||
import { Table, LoopStepType, AutomationResults } from "@budibase/types"
|
||||
import * as loopUtils from "../loopUtils"
|
||||
import { LoopInput } from "../../definitions/automations"
|
||||
|
||||
|
@ -20,15 +20,19 @@ describe("Attempt to run a basic loop automation", () => {
|
|||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
async function runLoop(loopOpts?: LoopInput) {
|
||||
async function runLoop(loopOpts?: LoopInput): Promise<AutomationResults> {
|
||||
const appId = config.getAppId()
|
||||
return await context.doInAppContext(appId, async () => {
|
||||
const params = { fields: { appId } }
|
||||
return await triggers.externalTrigger(
|
||||
const result = await triggers.externalTrigger(
|
||||
loopAutomation(table._id!, loopOpts),
|
||||
params,
|
||||
{ getResponses: true }
|
||||
)
|
||||
if ("outputs" in result && !result.outputs.success) {
|
||||
throw new Error("Unable to proceed - failed to return anything.")
|
||||
}
|
||||
return result as AutomationResults
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
|
||||
import { getConfig, afterAll as _afterAll } from "./utilities"
|
||||
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
|
||||
import { OpenAI } from "openai"
|
||||
import { setEnv as setCoreEnv } from "@budibase/backend-core"
|
||||
import * as pro from "@budibase/pro"
|
||||
import { Model } from "@budibase/types"
|
||||
|
||||
jest.mock("openai", () => ({
|
||||
OpenAI: jest.fn().mockImplementation(() => ({
|
||||
|
@ -25,7 +27,7 @@ jest.mock("@budibase/pro", () => ({
|
|||
ai: {
|
||||
LargeLanguageModel: {
|
||||
forCurrentTenant: jest.fn().mockImplementation(() => ({
|
||||
initialised: true,
|
||||
llm: {},
|
||||
init: jest.fn(),
|
||||
run: jest.fn(),
|
||||
})),
|
||||
|
@ -47,6 +49,7 @@ describe("test the openai action", () => {
|
|||
let resetEnv: () => void | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
setCoreEnv({ SELF_HOSTED: true })
|
||||
await config.init()
|
||||
})
|
||||
|
||||
|
@ -62,17 +65,39 @@ describe("test the openai action", () => {
|
|||
afterAll(_afterAll)
|
||||
|
||||
it("should be able to receive a response from ChatGPT given a prompt", async () => {
|
||||
const res = await runStep(config, "OPENAI", { prompt: OPENAI_PROMPT })
|
||||
expect(res.response).toEqual("This is a test")
|
||||
expect(res.success).toBeTruthy()
|
||||
setCoreEnv({ SELF_HOSTED: true })
|
||||
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Test OpenAI Response",
|
||||
config,
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.openai(
|
||||
{ prompt: OPENAI_PROMPT, model: Model.GPT_4O_MINI },
|
||||
{ stepName: "Basic OpenAI Query" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.response).toEqual("This is a test")
|
||||
expect(result.steps[0].outputs.success).toBeTruthy()
|
||||
})
|
||||
|
||||
it("should present the correct error message when a prompt is not provided", async () => {
|
||||
const res = await runStep(config, "OPENAI", { prompt: null })
|
||||
expect(res.response).toEqual(
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Test OpenAI No Prompt",
|
||||
config,
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.openai(
|
||||
{ prompt: "", model: Model.GPT_4O_MINI },
|
||||
{ stepName: "Empty Prompt Query" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.response).toEqual(
|
||||
"Budibase OpenAI Automation Failed: No prompt supplied"
|
||||
)
|
||||
expect(res.success).toBeFalsy()
|
||||
expect(result.steps[0].outputs.success).toBeFalsy()
|
||||
})
|
||||
|
||||
it("should present the correct error message when an error is thrown from the createChatCompletion call", async () => {
|
||||
|
@ -91,14 +116,21 @@ describe("test the openai action", () => {
|
|||
} as any)
|
||||
)
|
||||
|
||||
const res = await runStep(config, "OPENAI", {
|
||||
prompt: OPENAI_PROMPT,
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Test OpenAI Error",
|
||||
config,
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.openai(
|
||||
{ prompt: OPENAI_PROMPT, model: Model.GPT_4O_MINI },
|
||||
{ stepName: "Error Producing Query" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(res.response).toEqual(
|
||||
expect(result.steps[0].outputs.response).toEqual(
|
||||
"Error: An error occurred while calling createChatCompletion"
|
||||
)
|
||||
expect(res.success).toBeFalsy()
|
||||
expect(result.steps[0].outputs.success).toBeFalsy()
|
||||
})
|
||||
|
||||
it("should ensure that the pro AI module is called when the budibase AI features are enabled", async () => {
|
||||
|
@ -106,10 +138,19 @@ describe("test the openai action", () => {
|
|||
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
|
||||
|
||||
const prompt = "What is the meaning of life?"
|
||||
await runStep(config, "OPENAI", {
|
||||
model: "gpt-4o-mini",
|
||||
prompt,
|
||||
await createAutomationBuilder({
|
||||
name: "Test OpenAI Pro Features",
|
||||
config,
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.openai(
|
||||
{
|
||||
model: Model.GPT_4O_MINI,
|
||||
prompt,
|
||||
},
|
||||
{ stepName: "Pro Features Query" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(pro.ai.LargeLanguageModel.forCurrentTenant).toHaveBeenCalledWith(
|
||||
"gpt-4o-mini"
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import { Table } from "@budibase/types"
|
||||
import { EmptyFilterOption, SortOrder, Table } from "@budibase/types"
|
||||
import * as setup from "./utilities"
|
||||
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
|
||||
import * as automation from "../index"
|
||||
|
||||
const NAME = "Test"
|
||||
|
||||
|
@ -8,6 +10,7 @@ describe("Test a query step automation", () => {
|
|||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await automation.init()
|
||||
await config.init()
|
||||
table = await config.createTable()
|
||||
const row = {
|
||||
|
@ -22,107 +25,132 @@ describe("Test a query step automation", () => {
|
|||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to run the query step", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
filters: {
|
||||
equal: {
|
||||
name: NAME,
|
||||
},
|
||||
},
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Basic Query Test",
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(true)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(2)
|
||||
expect(res.rows[0].name).toBe(NAME)
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.queryRows(
|
||||
{
|
||||
tableId: table._id!,
|
||||
filters: {
|
||||
equal: {
|
||||
name: NAME,
|
||||
},
|
||||
},
|
||||
sortColumn: "name",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
limit: 10,
|
||||
},
|
||||
{ stepName: "Query All Rows" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.success).toBe(true)
|
||||
expect(result.steps[0].outputs.rows).toBeDefined()
|
||||
expect(result.steps[0].outputs.rows.length).toBe(2)
|
||||
expect(result.steps[0].outputs.rows[0].name).toBe(NAME)
|
||||
})
|
||||
|
||||
it("Returns all rows when onEmptyFilter has no value and no filters are passed", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
filters: {},
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Empty Filter Test",
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(true)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(2)
|
||||
expect(res.rows[0].name).toBe(NAME)
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.queryRows(
|
||||
{
|
||||
tableId: table._id!,
|
||||
filters: {},
|
||||
sortColumn: "name",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
limit: 10,
|
||||
},
|
||||
{ stepName: "Query With Empty Filter" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.success).toBe(true)
|
||||
expect(result.steps[0].outputs.rows).toBeDefined()
|
||||
expect(result.steps[0].outputs.rows.length).toBe(2)
|
||||
expect(result.steps[0].outputs.rows[0].name).toBe(NAME)
|
||||
})
|
||||
|
||||
it("Returns no rows when onEmptyFilter is RETURN_NONE and theres no filters", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
filters: {},
|
||||
"filters-def": [],
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
onEmptyFilter: "none",
|
||||
}
|
||||
const res = await setup.runStep(
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Return None Test",
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(false)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(0)
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.queryRows(
|
||||
{
|
||||
tableId: table._id!,
|
||||
filters: {},
|
||||
"filters-def": [],
|
||||
sortColumn: "name",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
limit: 10,
|
||||
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
|
||||
},
|
||||
{ stepName: "Query With Return None" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.success).toBe(false)
|
||||
expect(result.steps[0].outputs.rows).toBeDefined()
|
||||
expect(result.steps[0].outputs.rows.length).toBe(0)
|
||||
})
|
||||
|
||||
it("Returns no rows when onEmptyFilters RETURN_NONE and a filter is passed with a null value", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
onEmptyFilter: "none",
|
||||
filters: {},
|
||||
"filters-def": [
|
||||
{
|
||||
value: null,
|
||||
},
|
||||
],
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Null Filter Test",
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(false)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(0)
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.queryRows(
|
||||
{
|
||||
tableId: table._id!,
|
||||
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
|
||||
filters: {},
|
||||
"filters-def": [
|
||||
{
|
||||
value: null,
|
||||
},
|
||||
],
|
||||
sortColumn: "name",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
limit: 10,
|
||||
},
|
||||
{ stepName: "Query With Null Filter" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.success).toBe(false)
|
||||
expect(result.steps[0].outputs.rows).toBeDefined()
|
||||
expect(result.steps[0].outputs.rows.length).toBe(0)
|
||||
})
|
||||
|
||||
it("Returns rows when onEmptyFilter is RETURN_ALL and no filter is passed", async () => {
|
||||
const inputs = {
|
||||
tableId: table._id,
|
||||
onEmptyFilter: "all",
|
||||
filters: {},
|
||||
sortColumn: "name",
|
||||
sortOrder: "ascending",
|
||||
limit: 10,
|
||||
}
|
||||
const res = await setup.runStep(
|
||||
const result = await createAutomationBuilder({
|
||||
name: "Return All Test",
|
||||
config,
|
||||
setup.actions.QUERY_ROWS.stepId,
|
||||
inputs
|
||||
)
|
||||
expect(res.success).toBe(true)
|
||||
expect(res.rows).toBeDefined()
|
||||
expect(res.rows.length).toBe(2)
|
||||
})
|
||||
.appAction({ fields: {} })
|
||||
.queryRows(
|
||||
{
|
||||
tableId: table._id!,
|
||||
onEmptyFilter: EmptyFilterOption.RETURN_ALL,
|
||||
filters: {},
|
||||
sortColumn: "name",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
limit: 10,
|
||||
},
|
||||
{ stepName: "Query With Return All" }
|
||||
)
|
||||
.run()
|
||||
|
||||
expect(result.steps[0].outputs.success).toBe(true)
|
||||
expect(result.steps[0].outputs.rows).toBeDefined()
|
||||
expect(result.steps[0].outputs.rows.length).toBe(2)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -152,6 +152,44 @@ describe("Loop automations", () => {
|
|||
)
|
||||
})
|
||||
|
||||
it("ensure the loop stops if the max iterations are reached", async () => {
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Loop max iterations",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({ fields: {} })
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: ["test", "test2", "test3"],
|
||||
iterations: 2,
|
||||
})
|
||||
.serverLog({ text: "{{loop.currentItem}}" })
|
||||
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||
.run()
|
||||
|
||||
expect(results.steps[0].outputs.iterations).toBe(2)
|
||||
})
|
||||
|
||||
it("should run an automation with loop and max iterations to ensure context correctness further down the tree", async () => {
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test context down tree with Loop and max iterations",
|
||||
})
|
||||
|
||||
const results = await builder
|
||||
.appAction({ fields: {} })
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: ["test", "test2", "test3"],
|
||||
iterations: 2,
|
||||
})
|
||||
.serverLog({ text: "{{loop.currentItem}}" })
|
||||
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||
.run()
|
||||
|
||||
expect(results.steps[1].outputs.message).toContain("- 2")
|
||||
})
|
||||
|
||||
it("should run an automation where a loop is successfully run twice", async () => {
|
||||
const builder = createAutomationBuilder({
|
||||
name: "Test Trigger with Loop and Create Row",
|
||||
|
|
|
@ -35,6 +35,8 @@ import {
|
|||
Branch,
|
||||
FilterStepInputs,
|
||||
ExecuteScriptStepInputs,
|
||||
OpenAIStepInputs,
|
||||
BashStepInputs,
|
||||
} from "@budibase/types"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
import * as setup from "../utilities"
|
||||
|
@ -221,6 +223,30 @@ class BaseStepBuilder {
|
|||
input
|
||||
)
|
||||
}
|
||||
|
||||
bash(
|
||||
input: BashStepInputs,
|
||||
opts?: { stepName?: string; stepId?: string }
|
||||
): this {
|
||||
return this.step(
|
||||
AutomationActionStepId.EXECUTE_BASH,
|
||||
BUILTIN_ACTION_DEFINITIONS.EXECUTE_BASH,
|
||||
input,
|
||||
opts
|
||||
)
|
||||
}
|
||||
|
||||
openai(
|
||||
input: OpenAIStepInputs,
|
||||
opts?: { stepName?: string; stepId?: string }
|
||||
): this {
|
||||
return this.step(
|
||||
AutomationActionStepId.OPENAI,
|
||||
BUILTIN_ACTION_DEFINITIONS.OPENAI,
|
||||
input,
|
||||
opts
|
||||
)
|
||||
}
|
||||
}
|
||||
class StepBuilder extends BaseStepBuilder {
|
||||
build(): AutomationStep[] {
|
||||
|
|
|
@ -20,6 +20,7 @@ import {
|
|||
AutomationStatus,
|
||||
AutomationRowEvent,
|
||||
UserBindings,
|
||||
AutomationResults,
|
||||
} from "@budibase/types"
|
||||
import { executeInThread } from "../threads/automation"
|
||||
import { dataFilters, sdk } from "@budibase/shared-core"
|
||||
|
@ -32,6 +33,14 @@ const JOB_OPTS = {
|
|||
import * as automationUtils from "../automations/automationUtils"
|
||||
import { doesTableExist } from "../sdk/app/tables/getters"
|
||||
|
||||
type DidNotTriggerResponse = {
|
||||
outputs: {
|
||||
success: false
|
||||
status: AutomationStatus.STOPPED
|
||||
}
|
||||
message: AutomationStoppedReason.TRIGGER_FILTER_NOT_MET
|
||||
}
|
||||
|
||||
async function getAllAutomations() {
|
||||
const db = context.getAppDB()
|
||||
let automations = await db.allDocs<Automation>(
|
||||
|
@ -139,6 +148,14 @@ function rowPassesFilters(row: Row, filters: SearchFilters) {
|
|||
return filteredRows.length > 0
|
||||
}
|
||||
|
||||
export function isAutomationResults(
|
||||
response: AutomationResults | DidNotTriggerResponse | AutomationJob
|
||||
): response is AutomationResults {
|
||||
return (
|
||||
response !== null && "steps" in response && Array.isArray(response.steps)
|
||||
)
|
||||
}
|
||||
|
||||
export async function externalTrigger(
|
||||
automation: Automation,
|
||||
params: {
|
||||
|
@ -148,7 +165,7 @@ export async function externalTrigger(
|
|||
user?: UserBindings
|
||||
},
|
||||
{ getResponses }: { getResponses?: boolean } = {}
|
||||
): Promise<any> {
|
||||
): Promise<AutomationResults | DidNotTriggerResponse | AutomationJob> {
|
||||
if (automation.disabled) {
|
||||
throw new Error("Automation is disabled")
|
||||
}
|
||||
|
|
|
@ -2,16 +2,18 @@ import { Thread, ThreadType } from "../threads"
|
|||
import { definitions } from "./triggerInfo"
|
||||
import { automationQueue } from "./bullboard"
|
||||
import { updateEntityMetadata } from "../utilities"
|
||||
import { MetadataTypes } from "../constants"
|
||||
import { context, db as dbCore, utils } from "@budibase/backend-core"
|
||||
import { getAutomationMetadataParams } from "../db/utils"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import {
|
||||
Automation,
|
||||
AutomationActionStepId,
|
||||
AutomationJob,
|
||||
AutomationStepDefinition,
|
||||
AutomationTriggerDefinition,
|
||||
AutomationTriggerStepId,
|
||||
MetadataType,
|
||||
} from "@budibase/types"
|
||||
import { automationsEnabled } from "../features"
|
||||
import { helpers, REBOOT_CRON } from "@budibase/shared-core"
|
||||
|
@ -105,7 +107,7 @@ export async function updateTestHistory(
|
|||
history: any
|
||||
) {
|
||||
return updateEntityMetadata(
|
||||
MetadataTypes.AUTOMATION_TEST_HISTORY,
|
||||
MetadataType.AUTOMATION_TEST_HISTORY,
|
||||
automation._id,
|
||||
(metadata: any) => {
|
||||
if (metadata && Array.isArray(metadata.history)) {
|
||||
|
@ -120,19 +122,21 @@ export async function updateTestHistory(
|
|||
)
|
||||
}
|
||||
|
||||
export function removeDeprecated(
|
||||
definitions: Record<
|
||||
export function removeDeprecated<
|
||||
T extends
|
||||
| Record<keyof typeof AutomationTriggerStepId, AutomationTriggerDefinition>
|
||||
| Record<keyof typeof AutomationActionStepId, AutomationStepDefinition>
|
||||
>(definitions: T): T {
|
||||
const base: Record<
|
||||
string,
|
||||
AutomationStepDefinition | AutomationTriggerDefinition
|
||||
>
|
||||
) {
|
||||
const base = cloneDeep(definitions)
|
||||
AutomationTriggerDefinition | AutomationStepDefinition
|
||||
> = cloneDeep(definitions)
|
||||
for (let key of Object.keys(base)) {
|
||||
if (base[key].deprecated) {
|
||||
delete base[key]
|
||||
}
|
||||
}
|
||||
return base
|
||||
return base as T
|
||||
}
|
||||
|
||||
// end the repetition and the job itself
|
||||
|
|
|
@ -124,11 +124,6 @@ export enum BaseQueryVerbs {
|
|||
DELETE = "delete",
|
||||
}
|
||||
|
||||
export enum MetadataTypes {
|
||||
AUTOMATION_TEST_INPUT = "automationTestInput",
|
||||
AUTOMATION_TEST_HISTORY = "automationTestHistory",
|
||||
}
|
||||
|
||||
export enum InvalidColumns {
|
||||
ID = "_id",
|
||||
REV = "_rev",
|
||||
|
@ -137,7 +132,6 @@ export enum InvalidColumns {
|
|||
|
||||
export enum AutomationErrors {
|
||||
INCORRECT_TYPE = "INCORRECT_TYPE",
|
||||
MAX_ITERATIONS = "MAX_ITERATIONS_REACHED",
|
||||
FAILURE_CONDITION = "FAILURE_CONDITION_MET",
|
||||
}
|
||||
|
||||
|
|
|
@ -26,3 +26,6 @@ export interface AutomationContext extends AutomationResults {
|
|||
company?: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface AutomationResponse
|
||||
extends Omit<AutomationContext, "stepsByName" | "stepsById"> {}
|
||||
|
|
|
@ -193,6 +193,34 @@ const SCHEMA: Integration = {
|
|||
},
|
||||
}
|
||||
|
||||
interface MSSQLColumnDefinition {
|
||||
TableName: string
|
||||
ColumnName: string
|
||||
DataType: string
|
||||
MaxLength: number
|
||||
IsNullable: boolean
|
||||
IsIdentity: boolean
|
||||
Precision: number
|
||||
Scale: number
|
||||
}
|
||||
|
||||
interface ColumnDefinitionMetadata {
|
||||
usesMaxLength?: boolean
|
||||
usesPrecision?: boolean
|
||||
}
|
||||
|
||||
const COLUMN_DEFINITION_METADATA: Record<string, ColumnDefinitionMetadata> = {
|
||||
DATETIME2: { usesMaxLength: true },
|
||||
TIME: { usesMaxLength: true },
|
||||
DATETIMEOFFSET: { usesMaxLength: true },
|
||||
NCHAR: { usesMaxLength: true },
|
||||
NVARCHAR: { usesMaxLength: true },
|
||||
BINARY: { usesMaxLength: true },
|
||||
VARBINARY: { usesMaxLength: true },
|
||||
DECIMAL: { usesPrecision: true },
|
||||
NUMERIC: { usesPrecision: true },
|
||||
}
|
||||
|
||||
class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||
private readonly config: MSSQLConfig
|
||||
private index: number = 0
|
||||
|
@ -527,20 +555,24 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
return this.queryWithReturning(json, queryFn, processFn)
|
||||
}
|
||||
|
||||
async getExternalSchema() {
|
||||
private async getColumnDefinitions(): Promise<MSSQLColumnDefinition[]> {
|
||||
// Query to retrieve table schema
|
||||
const query = `
|
||||
SELECT
|
||||
t.name AS TableName,
|
||||
c.name AS ColumnName,
|
||||
ty.name AS DataType,
|
||||
ty.precision AS Precision,
|
||||
ty.scale AS Scale,
|
||||
c.max_length AS MaxLength,
|
||||
c.is_nullable AS IsNullable,
|
||||
c.is_identity AS IsIdentity
|
||||
FROM
|
||||
sys.tables t
|
||||
INNER JOIN sys.columns c ON t.object_id = c.object_id
|
||||
INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id
|
||||
INNER JOIN sys.types ty
|
||||
ON c.system_type_id = ty.system_type_id
|
||||
AND c.user_type_id = ty.user_type_id
|
||||
WHERE
|
||||
t.is_ms_shipped = 0
|
||||
ORDER BY
|
||||
|
@ -553,17 +585,36 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
sql: query,
|
||||
})
|
||||
|
||||
return result.recordset as MSSQLColumnDefinition[]
|
||||
}
|
||||
|
||||
private getDataType(columnDef: MSSQLColumnDefinition): string {
|
||||
const { DataType, MaxLength, Precision, Scale } = columnDef
|
||||
const { usesMaxLength = false, usesPrecision = false } =
|
||||
COLUMN_DEFINITION_METADATA[DataType] || {}
|
||||
|
||||
let dataType = DataType
|
||||
|
||||
if (usesMaxLength) {
|
||||
if (MaxLength === -1) {
|
||||
dataType += `(MAX)`
|
||||
} else {
|
||||
dataType += `(${MaxLength})`
|
||||
}
|
||||
}
|
||||
if (usesPrecision) {
|
||||
dataType += `(${Precision}, ${Scale})`
|
||||
}
|
||||
|
||||
return dataType
|
||||
}
|
||||
|
||||
async getExternalSchema() {
|
||||
const scriptParts = []
|
||||
const tables: any = {}
|
||||
for (const row of result.recordset) {
|
||||
const {
|
||||
TableName,
|
||||
ColumnName,
|
||||
DataType,
|
||||
MaxLength,
|
||||
IsNullable,
|
||||
IsIdentity,
|
||||
} = row
|
||||
const columns = await this.getColumnDefinitions()
|
||||
for (const row of columns) {
|
||||
const { TableName, ColumnName, IsNullable, IsIdentity } = row
|
||||
|
||||
if (!tables[TableName]) {
|
||||
tables[TableName] = {
|
||||
|
@ -571,9 +622,11 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
}
|
||||
}
|
||||
|
||||
const columnDefinition = `${ColumnName} ${DataType}${
|
||||
MaxLength ? `(${MaxLength})` : ""
|
||||
}${IsNullable ? " NULL" : " NOT NULL"}`
|
||||
const nullable = IsNullable ? "NULL" : "NOT NULL"
|
||||
const identity = IsIdentity ? "IDENTITY" : ""
|
||||
const columnDefinition = `[${ColumnName}] ${this.getDataType(
|
||||
row
|
||||
)} ${nullable} ${identity}`
|
||||
|
||||
tables[TableName].columns.push(columnDefinition)
|
||||
|
||||
|
|
|
@ -322,9 +322,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
presence: required && !isAuto && !hasDefault,
|
||||
externalType: column.Type,
|
||||
options: column.Type.startsWith("enum")
|
||||
? column.Type.substring(5, column.Type.length - 1)
|
||||
.split(",")
|
||||
.map(str => str.replace(/^'(.*)'$/, "$1"))
|
||||
? column.Type.substring(6, column.Type.length - 2).split("','")
|
||||
: undefined,
|
||||
})
|
||||
}
|
||||
|
@ -414,7 +412,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
async getExternalSchema() {
|
||||
try {
|
||||
const [databaseResult] = await this.internalQuery({
|
||||
sql: `SHOW CREATE DATABASE ${this.config.database}`,
|
||||
sql: `SHOW CREATE DATABASE IF NOT EXISTS \`${this.config.database}\``,
|
||||
})
|
||||
let dumpContent = [databaseResult["Create Database"]]
|
||||
|
||||
|
@ -434,7 +432,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
dumpContent.push(createTableStatement)
|
||||
}
|
||||
|
||||
return dumpContent.join("\n")
|
||||
return dumpContent.join(";\n") + ";"
|
||||
} finally {
|
||||
this.disconnect()
|
||||
}
|
||||
|
|
|
@ -476,21 +476,15 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
|||
this.config.password
|
||||
}" pg_dump --schema-only "${dumpCommandParts.join(" ")}"`
|
||||
|
||||
return new Promise<string>((res, rej) => {
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
exec(dumpCommand, (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
console.error(`Error generating dump: ${error.message}`)
|
||||
rej(error.message)
|
||||
if (error || stderr) {
|
||||
console.error(stderr)
|
||||
reject(new Error(stderr))
|
||||
return
|
||||
}
|
||||
|
||||
if (stderr) {
|
||||
console.error(`pg_dump error: ${stderr}`)
|
||||
rej(stderr)
|
||||
return
|
||||
}
|
||||
|
||||
res(stdout)
|
||||
resolve(stdout)
|
||||
console.log("SQL dump generated successfully!")
|
||||
})
|
||||
})
|
||||
|
|
|
@ -102,6 +102,9 @@ function createDummyTest() {
|
|||
}
|
||||
|
||||
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||
// tests that call this need a lot longer timeouts
|
||||
jest.setTimeout(120000)
|
||||
|
||||
if (process.env.DATASOURCE === "none") {
|
||||
createDummyTest()
|
||||
}
|
||||
|
@ -146,6 +149,7 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
|||
isMongodb: dbName === DatabaseName.MONGODB,
|
||||
isMSSQL: dbName === DatabaseName.SQL_SERVER,
|
||||
isOracle: dbName === DatabaseName.ORACLE,
|
||||
isMariaDB: dbName === DatabaseName.MARIADB,
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -155,19 +159,19 @@ function getDatasource(
|
|||
return providers[sourceName]()
|
||||
}
|
||||
|
||||
export async function knexClient(ds: Datasource) {
|
||||
export async function knexClient(ds: Datasource, opts?: Knex.Config) {
|
||||
switch (ds.source) {
|
||||
case SourceName.POSTGRES: {
|
||||
return postgres.knexClient(ds)
|
||||
return postgres.knexClient(ds, opts)
|
||||
}
|
||||
case SourceName.MYSQL: {
|
||||
return mysql.knexClient(ds)
|
||||
return mysql.knexClient(ds, opts)
|
||||
}
|
||||
case SourceName.SQL_SERVER: {
|
||||
return mssql.knexClient(ds)
|
||||
return mssql.knexClient(ds, opts)
|
||||
}
|
||||
case SourceName.ORACLE: {
|
||||
return oracle.knexClient(ds)
|
||||
return oracle.knexClient(ds, opts)
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unsupported source: ${ds.source}`)
|
||||
|
|
|
@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
import knex from "knex"
|
||||
import knex, { Knex } from "knex"
|
||||
import { MSSQL_IMAGE } from "./images"
|
||||
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
@ -57,7 +57,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
return datasource
|
||||
}
|
||||
|
||||
export async function knexClient(ds: Datasource) {
|
||||
export async function knexClient(ds: Datasource, opts?: Knex.Config) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
|
@ -68,5 +68,6 @@ export async function knexClient(ds: Datasource) {
|
|||
return knex({
|
||||
client: "mssql",
|
||||
connection: ds.config,
|
||||
...opts,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import { GenericContainer, Wait } from "testcontainers"
|
|||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
import knex from "knex"
|
||||
import knex, { Knex } from "knex"
|
||||
import { MYSQL_IMAGE } from "./images"
|
||||
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
@ -63,7 +63,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
return datasource
|
||||
}
|
||||
|
||||
export async function knexClient(ds: Datasource) {
|
||||
export async function knexClient(ds: Datasource, opts?: Knex.Config) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
|
@ -74,5 +74,6 @@ export async function knexClient(ds: Datasource) {
|
|||
return knex({
|
||||
client: "mysql2",
|
||||
connection: ds.config,
|
||||
...opts,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
import knex from "knex"
|
||||
import knex, { Knex } from "knex"
|
||||
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
|
@ -25,7 +25,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forLogMessage("DATABASE IS READY TO USE!").withStartupTimeout(
|
||||
20000
|
||||
60000
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -58,7 +58,7 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
return datasource
|
||||
}
|
||||
|
||||
export async function knexClient(ds: Datasource) {
|
||||
export async function knexClient(ds: Datasource, opts?: Knex.Config) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
|
@ -76,6 +76,7 @@ export async function knexClient(ds: Datasource) {
|
|||
user: ds.config.user,
|
||||
password: ds.config.password,
|
||||
},
|
||||
...opts,
|
||||
})
|
||||
|
||||
return c
|
||||
|
|
|
@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
import knex from "knex"
|
||||
import knex, { Knex } from "knex"
|
||||
import { POSTGRES_IMAGE } from "./images"
|
||||
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
@ -51,7 +51,10 @@ export async function getDatasource(): Promise<Datasource> {
|
|||
return datasource
|
||||
}
|
||||
|
||||
export async function knexClient(ds: Datasource) {
|
||||
export async function knexClient(
|
||||
ds: Datasource,
|
||||
opts?: Knex.Config
|
||||
): Promise<Knex> {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
|
@ -62,5 +65,6 @@ export async function knexClient(ds: Datasource) {
|
|||
return knex({
|
||||
client: "pg",
|
||||
connection: ds.config,
|
||||
...opts,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -138,12 +138,22 @@ export function generateColumnDefinition(config: {
|
|||
let { externalType, autocolumn, name, presence, options } = config
|
||||
let foundType = FieldType.STRING
|
||||
const lowerCaseType = externalType.toLowerCase()
|
||||
let matchingTypes = []
|
||||
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
|
||||
if (lowerCaseType.includes(external)) {
|
||||
matchingTypes.push({ external, internal })
|
||||
let matchingTypes: { external: string; internal: PrimitiveTypes }[] = []
|
||||
|
||||
// In at least MySQL, the external type of an ENUM column is "enum('option1',
|
||||
// 'option2', ...)", which can potentially contain any type name as a
|
||||
// substring. To get around this interfering with the loop below, we first
|
||||
// check for an enum column and handle that separately.
|
||||
if (lowerCaseType.startsWith("enum")) {
|
||||
matchingTypes.push({ external: "enum", internal: FieldType.OPTIONS })
|
||||
} else {
|
||||
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
|
||||
if (lowerCaseType.includes(external)) {
|
||||
matchingTypes.push({ external, internal })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set the foundType based the longest match
|
||||
if (matchingTypes.length > 0) {
|
||||
foundType = matchingTypes.reduce((acc, val) => {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { isDevAppID, isProdAppID } from "../db/utils"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
export enum AppType {
|
||||
DEV = "dev",
|
||||
|
@ -7,7 +7,7 @@ export enum AppType {
|
|||
}
|
||||
|
||||
export function middleware({ appType }: { appType?: AppType } = {}) {
|
||||
return (ctx: BBContext, next: any) => {
|
||||
return (ctx: Ctx, next: any) => {
|
||||
const appId = ctx.appId
|
||||
if (appType === AppType.DEV && appId && !isDevAppID(appId)) {
|
||||
ctx.throw(400, "Only apps in development support this endpoint")
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import { UserCtx } from "@budibase/types"
|
||||
import { checkMissingMigrations } from "../appMigrations"
|
||||
import env from "../environment"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
export default async (ctx: UserCtx, next: any) => {
|
||||
const middleware = (async (ctx: UserCtx, next: Next) => {
|
||||
const { appId } = ctx
|
||||
|
||||
// migrations can be disabled via environment variable if you
|
||||
|
@ -16,4 +17,6 @@ export default async (ctx: UserCtx, next: any) => {
|
|||
}
|
||||
|
||||
return checkMissingMigrations(ctx, next, appId)
|
||||
}
|
||||
}) as Middleware
|
||||
|
||||
export default middleware
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import { Ctx } from "@budibase/types"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { tracer } from "dd-trace"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
export default async (ctx: Ctx, next: any) => {
|
||||
const middleware = (async (ctx: Ctx, next: Next) => {
|
||||
const resp = await next()
|
||||
|
||||
const current = context.getCurrentContext()
|
||||
|
@ -30,4 +31,6 @@ export default async (ctx: Ctx, next: any) => {
|
|||
}
|
||||
|
||||
return resp
|
||||
}
|
||||
}) as Middleware
|
||||
|
||||
export default middleware
|
||||
|
|
|
@ -13,8 +13,9 @@ import env from "../environment"
|
|||
import { isWebhookEndpoint, isBrowser, isApiKey } from "./utils"
|
||||
import { UserCtx, ContextUser } from "@budibase/types"
|
||||
import tracer from "dd-trace"
|
||||
import type { Middleware, Next } from "koa"
|
||||
|
||||
export default async (ctx: UserCtx, next: any) => {
|
||||
const middleware = (async (ctx: UserCtx, next: Next) => {
|
||||
// try to get the appID from the request
|
||||
let requestAppId = await utils.getAppIdFromCtx(ctx)
|
||||
if (!requestAppId) {
|
||||
|
@ -116,4 +117,6 @@ export default async (ctx: UserCtx, next: any) => {
|
|||
|
||||
return next()
|
||||
})
|
||||
}
|
||||
}) as Middleware
|
||||
|
||||
export default middleware
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue