Merge branch 'master' into 3.0-metrics

This commit is contained in:
Martin McKeaveney 2024-12-06 18:29:45 +00:00 committed by GitHub
commit aba76b0650
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
213 changed files with 4067 additions and 3762 deletions

View File

@ -200,6 +200,20 @@ jobs:
- run: yarn --frozen-lockfile
- name: Set up PostgreSQL 16
if: matrix.datasource == 'postgres'
run: |
sudo systemctl stop postgresql
sudo apt-get remove --purge -y postgresql* libpq-dev
sudo rm -rf /etc/postgresql /var/lib/postgresql
sudo apt-get autoremove -y
sudo apt-get autoclean
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt-get update
sudo apt-get install -y postgresql-16
- name: Test server
env:
DATASOURCE: ${{ matrix.datasource }}
@ -281,6 +295,7 @@ jobs:
check-lockfile:
runs-on: ubuntu-latest
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
steps:
- name: Checkout repo
uses: actions/checkout@v4

View File

@ -22,6 +22,6 @@
"@types/react": "17.0.39",
"eslint": "8.10.0",
"eslint-config-next": "12.1.0",
"typescript": "5.5.2"
"typescript": "5.7.2"
}
}

View File

@ -6,6 +6,26 @@ import {
import { ContainerInfo } from "dockerode"
import path from "path"
import lockfile from "proper-lockfile"
import { execSync } from "child_process"
interface DockerContext {
Name: string
Description: string
DockerEndpoint: string
ContextType: string
Error: string
}
function getCurrentDockerContext(): DockerContext {
const out = execSync("docker context ls --format json")
for (const line of out.toString().split("\n")) {
const parsed = JSON.parse(line)
if (parsed.Current) {
return parsed as DockerContext
}
}
throw new Error("No current Docker context")
}
async function getBudibaseContainers() {
const client = await getContainerRuntimeClient()
@ -27,6 +47,16 @@ async function killContainers(containers: ContainerInfo[]) {
}
export default async function setup() {
process.env.TESTCONTAINERS_RYUK_DISABLED = "true"
// For whatever reason, testcontainers doesn't always use the correct current
// docker context. This bit of code forces the issue by finding the current
// context and setting it as the DOCKER_HOST environment
if (!process.env.DOCKER_HOST) {
const dockerContext = getCurrentDockerContext()
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
}
const lockPath = path.resolve(__dirname, "globalSetup.ts")
// If you run multiple tests at the same time, it's possible for the CouchDB
// shared container to get started multiple times despite having an
@ -47,6 +77,7 @@ export default async function setup() {
try {
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3-sqs-v2.1.1")
.withName("couchdb_testcontainer")
.withExposedPorts(5984, 4984)
.withEnvironment({
COUCHDB_PASSWORD: "budibase",
@ -71,6 +102,7 @@ export default async function setup() {
)
const minio = new GenericContainer("minio/minio")
.withName("minio_testcontainer")
.withExposedPorts(9000)
.withCommand(["server", "/data"])
.withTmpFs({ "/data": "rw" })

View File

@ -46,6 +46,11 @@ server {
}
location ~ ^/api/(system|admin|global)/ {
# Enable buffering for potentially large OIDC configs
proxy_buffering on;
proxy_buffer_size 16k;
proxy_buffers 4 32k;
proxy_pass http://127.0.0.1:4002;
}

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.14",
"version": "3.2.25",
"npmClient": "yarn",
"concurrency": 20,
"command": {

View File

@ -9,6 +9,7 @@
"@types/node": "20.10.0",
"@types/proper-lockfile": "^4.1.4",
"@typescript-eslint/parser": "6.9.0",
"cross-spawn": "7.0.6",
"depcheck": "^1.4.7",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.14.0",
@ -27,10 +28,9 @@
"proper-lockfile": "^4.1.2",
"svelte": "4.2.19",
"svelte-eslint-parser": "^0.33.1",
"typescript": "5.5.2",
"typescript": "5.7.2",
"typescript-eslint": "^7.3.1",
"yargs": "^17.7.2",
"cross-spawn": "7.0.6"
"yargs": "^17.7.2"
},
"scripts": {
"get-past-client-version": "node scripts/getPastClientVersion.js",
@ -76,7 +76,6 @@
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 -t budibase/couchdb:v3.3.3-sqs-v2.1.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable",
"env:selfhost:enable": "lerna run --stream env:selfhost:enable",

View File

@ -83,6 +83,7 @@
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
"@types/koa": "2.13.4",
"chance": "1.1.8",
"ioredis-mock": "8.9.0",
"jest": "29.7.0",
@ -90,9 +91,9 @@
"nock": "^13.5.6",
"pino-pretty": "10.0.0",
"pouchdb-adapter-memory": "7.2.2",
"testcontainers": "^10.7.2",
"testcontainers": "10.16.0",
"timekeeper": "2.2.0",
"typescript": "5.5.2"
"typescript": "5.7.2"
},
"nx": {
"targets": {

View File

@ -121,7 +121,7 @@ const identifyInstallationGroup = async (
const identifyTenantGroup = async (
tenantId: string,
account: Account | undefined,
hosting: Hosting,
timestamp?: string | number
): Promise<void> => {
const id = await getEventTenantId(tenantId)
@ -129,26 +129,12 @@ const identifyTenantGroup = async (
const installationId = await getInstallationId()
const environment = getDeploymentEnvironment()
let hosting: Hosting
let profession: string | undefined
let companySize: string | undefined
if (account) {
profession = account.profession
companySize = account.size
hosting = account.hosting
} else {
hosting = getHostingFromEnv()
}
const group: TenantGroup = {
id,
type,
hosting,
environment,
installationId,
profession,
companySize,
}
await identifyGroup(group, timestamp)

View File

@ -266,12 +266,14 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
// new flag, add it here and use the `fetch` and `get` functions to access it.
// All of the machinery in this file is to make sure that flags have their
// default values set correctly and their types flow through the system.
export const flags = new FlagSet({
const flagsConfig: Record<FeatureFlag, Flag<any>> = {
[FeatureFlag.DEFAULT_VALUES]: Flag.boolean(true),
[FeatureFlag.AUTOMATION_BRANCHING]: Flag.boolean(true),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(true),
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(true),
})
[FeatureFlag.USE_ZOD_VALIDATOR]: Flag.boolean(env.isDev()),
}
export const flags = new FlagSet(flagsConfig)
type UnwrapPromise<T> = T extends Promise<infer U> ? U : T
export type FeatureFlags = UnwrapPromise<ReturnType<typeof flags.fetch>>

View File

@ -1,6 +1,10 @@
import { BBContext } from "@budibase/types"
import { Ctx } from "@budibase/types"
import type { Middleware, Next } from "koa"
export default async (ctx: BBContext | any, next: any) => {
// this middleware exists purely to be overridden by middlewares supplied by the @budibase/pro library
const middleware = (async (ctx: Ctx, next: Next) => {
// Placeholder for audit log middleware
return next()
}
}) as Middleware
export default middleware

View File

@ -22,6 +22,7 @@ import {
} from "@budibase/types"
import { ErrorCode, InvalidAPIKeyError } from "../errors"
import tracer from "dd-trace"
import type { Middleware, Next } from "koa"
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
? parseInt(env.SESSION_UPDATE_PERIOD)
@ -94,6 +95,14 @@ async function checkApiKey(
})
}
function getHeader(ctx: Ctx, header: Header): string | undefined {
const contents = ctx.request.headers[header]
if (Array.isArray(contents)) {
throw new Error("Unexpected header format")
}
return contents
}
/**
* This middleware is tenancy aware, so that it does not depend on other middlewares being used.
* The tenancy modules should not be used here and it should be assumed that the tenancy context
@ -106,9 +115,9 @@ export default function (
}
) {
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
return async (ctx: Ctx | any, next: any) => {
return (async (ctx: Ctx, next: Next) => {
let publicEndpoint = false
const version = ctx.request.headers[Header.API_VER]
const version = getHeader(ctx, Header.API_VER)
// the path is not authenticated
const found = matches(ctx, noAuthOptions)
if (found) {
@ -116,18 +125,18 @@ export default function (
}
try {
// check the actual user is authenticated first, try header or cookie
let headerToken = ctx.request.headers[Header.TOKEN]
let headerToken = getHeader(ctx, Header.TOKEN)
const authCookie =
getCookie<SessionCookie>(ctx, Cookie.Auth) ||
openJwt<SessionCookie>(headerToken)
let apiKey = ctx.request.headers[Header.API_KEY]
let apiKey = getHeader(ctx, Header.API_KEY)
if (!apiKey && ctx.request.headers[Header.AUTHORIZATION]) {
apiKey = ctx.request.headers[Header.AUTHORIZATION].split(" ")[1]
}
const tenantId = ctx.request.headers[Header.TENANT_ID]
const tenantId = getHeader(ctx, Header.TENANT_ID)
let authenticated: boolean = false,
user: User | { tenantId: string } | undefined = undefined,
internal: boolean = false,
@ -243,5 +252,5 @@ export default function (
ctx.throw(err.status || 403, err)
}
}
}
}) as Middleware
}

View File

@ -1,6 +1,7 @@
import { Header } from "../constants"
import { buildMatcherRegex, matches } from "./matchers"
import { BBContext, EndpointMatcher } from "@budibase/types"
import { Ctx, EndpointMatcher } from "@budibase/types"
import type { Middleware, Next } from "koa"
/**
* GET, HEAD and OPTIONS methods are considered safe operations
@ -36,7 +37,7 @@ export default function (
opts: { noCsrfPatterns: EndpointMatcher[] } = { noCsrfPatterns: [] }
) {
const noCsrfOptions = buildMatcherRegex(opts.noCsrfPatterns)
return async (ctx: BBContext | any, next: any) => {
return (async (ctx: Ctx, next: Next) => {
// don't apply for excluded paths
const found = matches(ctx, noCsrfOptions)
if (found) {
@ -77,5 +78,5 @@ export default function (
}
return next()
}
}) as Middleware
}

View File

@ -1,11 +1,11 @@
import { Header } from "../constants"
import { BBContext } from "@budibase/types"
import { Ctx } from "@budibase/types"
import { isValidInternalAPIKey } from "../utils"
/**
* API Key only endpoint.
*/
export default async (ctx: BBContext, next: any) => {
export default async (ctx: Ctx, next: any) => {
const apiKey = ctx.request.headers[Header.API_KEY]
if (!apiKey) {
ctx.throw(403, "Unauthorized")

View File

@ -1,4 +1,4 @@
import { BBContext, EndpointMatcher, RegexMatcher } from "@budibase/types"
import { Ctx, EndpointMatcher, RegexMatcher } from "@budibase/types"
const PARAM_REGEX = /\/:(.*?)(\/.*)?$/g
@ -27,7 +27,7 @@ export const buildMatcherRegex = (
})
}
export const matches = (ctx: BBContext, options: RegexMatcher[]) => {
export const matches = (ctx: Ctx, options: RegexMatcher[]) => {
return options.find(({ regex, method }) => {
const urlMatch = regex.test(ctx.request.url)
const methodMatch =

View File

@ -2,7 +2,7 @@ import { UserStatus } from "../../constants"
import { compare } from "../../utils"
import * as users from "../../users"
import { authError } from "./utils"
import { BBContext } from "@budibase/types"
import { Ctx } from "@budibase/types"
const INVALID_ERR = "Invalid credentials"
const EXPIRED = "This account has expired. Please reset your password"
@ -20,7 +20,7 @@ export const options = {
* @returns The authenticated user, or errors if they occur
*/
export async function authenticate(
ctx: BBContext,
ctx: Ctx,
email: string,
password: string,
done: Function

View File

@ -3,11 +3,12 @@ import { getTenantIDFromCtx } from "../tenancy"
import { buildMatcherRegex, matches } from "./matchers"
import { Header } from "../constants"
import {
BBContext,
Ctx,
EndpointMatcher,
GetTenantIdOptions,
TenantResolutionStrategy,
} from "@budibase/types"
import type { Next, Middleware } from "koa"
export default function (
allowQueryStringPatterns: EndpointMatcher[],
@ -17,7 +18,7 @@ export default function (
const allowQsOptions = buildMatcherRegex(allowQueryStringPatterns)
const noTenancyOptions = buildMatcherRegex(noTenancyPatterns)
return async function (ctx: BBContext | any, next: any) {
return async function (ctx: Ctx, next: Next) {
const allowNoTenant =
opts.noTenancyRequired || !!matches(ctx, noTenancyOptions)
const tenantOpts: GetTenantIdOptions = {
@ -32,5 +33,5 @@ export default function (
const tenantId = getTenantIDFromCtx(ctx, tenantOpts)
ctx.set(Header.TENANT_ID, tenantId as string)
return doInTenant(tenantId, next)
}
} as Middleware
}

View File

@ -11,7 +11,7 @@ describe("redis", () => {
let container: StartedTestContainer
beforeAll(async () => {
const container = await new GenericContainer("redis")
container = await new GenericContainer("redis")
.withExposedPorts(6379)
.start()

View File

@ -2,6 +2,8 @@ import {
PermissionLevel,
PermissionType,
BuiltinPermissionID,
Permission,
BuiltinPermissions,
} from "@budibase/types"
import flatten from "lodash/flatten"
import cloneDeep from "lodash/fp/cloneDeep"
@ -12,7 +14,7 @@ export type RoleHierarchy = {
permissionId: string
}[]
export class Permission {
export class PermissionImpl implements Permission {
type: PermissionType
level: PermissionLevel
@ -61,68 +63,62 @@ export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
}
}
export const BUILTIN_PERMISSIONS: {
[key in keyof typeof BuiltinPermissionID]: {
_id: (typeof BuiltinPermissionID)[key]
name: string
permissions: Permission[]
}
} = {
export const BUILTIN_PERMISSIONS: BuiltinPermissions = {
PUBLIC: {
_id: BuiltinPermissionID.PUBLIC,
name: "Public",
permissions: [
new Permission(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.EXECUTE),
],
},
READ_ONLY: {
_id: BuiltinPermissionID.READ_ONLY,
name: "Read only",
permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.READ),
new Permission(PermissionType.TABLE, PermissionLevel.READ),
new Permission(PermissionType.APP, PermissionLevel.READ),
new PermissionImpl(PermissionType.QUERY, PermissionLevel.READ),
new PermissionImpl(PermissionType.TABLE, PermissionLevel.READ),
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
],
},
WRITE: {
_id: BuiltinPermissionID.WRITE,
name: "Read/Write",
permissions: [
new Permission(PermissionType.QUERY, PermissionLevel.WRITE),
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new Permission(PermissionType.APP, PermissionLevel.READ),
new PermissionImpl(PermissionType.QUERY, PermissionLevel.WRITE),
new PermissionImpl(PermissionType.TABLE, PermissionLevel.WRITE),
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
],
},
POWER: {
_id: BuiltinPermissionID.POWER,
name: "Power",
permissions: [
new Permission(PermissionType.TABLE, PermissionLevel.WRITE),
new Permission(PermissionType.USER, PermissionLevel.READ),
new Permission(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new Permission(PermissionType.APP, PermissionLevel.READ),
new PermissionImpl(PermissionType.TABLE, PermissionLevel.WRITE),
new PermissionImpl(PermissionType.USER, PermissionLevel.READ),
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.EXECUTE),
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.READ),
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
],
},
ADMIN: {
_id: BuiltinPermissionID.ADMIN,
name: "Admin",
permissions: [
new Permission(PermissionType.TABLE, PermissionLevel.ADMIN),
new Permission(PermissionType.USER, PermissionLevel.ADMIN),
new Permission(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
new Permission(PermissionType.WEBHOOK, PermissionLevel.READ),
new Permission(PermissionType.QUERY, PermissionLevel.ADMIN),
new Permission(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new Permission(PermissionType.APP, PermissionLevel.READ),
new PermissionImpl(PermissionType.TABLE, PermissionLevel.ADMIN),
new PermissionImpl(PermissionType.USER, PermissionLevel.ADMIN),
new PermissionImpl(PermissionType.AUTOMATION, PermissionLevel.ADMIN),
new PermissionImpl(PermissionType.WEBHOOK, PermissionLevel.READ),
new PermissionImpl(PermissionType.QUERY, PermissionLevel.ADMIN),
new PermissionImpl(PermissionType.LEGACY_VIEW, PermissionLevel.READ),
new PermissionImpl(PermissionType.APP, PermissionLevel.READ),
],
},
}
export function getBuiltinPermissions() {
export function getBuiltinPermissions(): BuiltinPermissions {
return cloneDeep(BUILTIN_PERMISSIONS)
}

View File

@ -592,7 +592,10 @@ export class AccessController {
)
}
async checkScreensAccess(screens: Screen[], userRoleId: string) {
async checkScreensAccess(
screens: Screen[],
userRoleId: string
): Promise<Screen[]> {
let accessibleScreens = []
// don't want to handle this with Promise.all as this would mean all custom roles would be
// retrieved at same time, it is likely a custom role will be re-used and therefore want

View File

@ -133,7 +133,7 @@ describe("getBuiltinPermissionByID", () => {
_id: BuiltinPermissionID.PUBLIC,
name: "Public",
permissions: [
new permissions.Permission(
new permissions.PermissionImpl(
permissions.PermissionType.WEBHOOK,
permissions.PermissionLevel.EXECUTE
),

View File

@ -18,6 +18,7 @@ import {
BasicOperator,
BBReferenceFieldMetadata,
CalculationType,
EnrichedQueryJson,
FieldSchema,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
@ -27,7 +28,6 @@ import {
LogicalOperator,
Operation,
prefixed,
QueryJson,
QueryOptions,
RangeOperator,
RelationshipsJson,
@ -134,18 +134,18 @@ const allowEmptyRelationships: Record<SearchFilterKey, boolean> = {
class InternalBuilder {
private readonly client: SqlClient
private readonly query: QueryJson
private readonly query: EnrichedQueryJson
private readonly splitter: dataFilters.ColumnSplitter
private readonly knex: Knex
constructor(client: SqlClient, knex: Knex, query: QueryJson) {
constructor(client: SqlClient, knex: Knex, query: EnrichedQueryJson) {
this.client = client
this.query = query
this.knex = knex
this.splitter = new dataFilters.ColumnSplitter([this.table], {
aliases: this.query.tableAliases,
columnPrefix: this.query.meta.columnPrefix,
columnPrefix: this.query.meta?.columnPrefix,
})
}
@ -167,7 +167,7 @@ class InternalBuilder {
}
get table(): Table {
return this.query.meta.table
return this.query.table
}
get knexClient(): Knex.Client {
@ -273,8 +273,7 @@ class InternalBuilder {
}
private isFullSelectStatementRequired(): boolean {
const { meta } = this.query
for (let column of Object.values(meta.table.schema)) {
for (let column of Object.values(this.table.schema)) {
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(column)) {
return true
} else if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(column)) {
@ -285,14 +284,14 @@ class InternalBuilder {
}
private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
const { meta, endpoint, resource } = this.query
const { table, resource } = this.query
if (!resource || !resource.fields || resource.fields.length === 0) {
return "*"
}
const alias = this.getTableName(endpoint.entityId)
const schema = meta.table.schema
const alias = this.getTableName(table)
const schema = this.table.schema
if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw("??", [`${alias}.*`])]
}
@ -497,9 +496,8 @@ class InternalBuilder {
filterKey: string,
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
): Knex.QueryBuilder {
const { relationships, endpoint, tableAliases: aliases } = this.query
const tableName = endpoint.entityId
const fromAlias = aliases?.[tableName] || tableName
const { relationships, schema, tableAliases: aliases, table } = this.query
const fromAlias = aliases?.[table.name] || table.name
const matches = (value: string) =>
filterKey.match(new RegExp(`^${value}\\.`))
if (!relationships) {
@ -539,7 +537,7 @@ class InternalBuilder {
aliases?.[manyToMany.through] || relationship.through
let throughTable = this.tableNameWithSchema(manyToMany.through, {
alias: throughAlias,
schema: endpoint.schema,
schema,
})
subQuery = subQuery
// add a join through the junction table
@ -1012,28 +1010,10 @@ class InternalBuilder {
return isSqs(this.table)
}
getTableName(tableOrName?: Table | string): string {
let table: Table
if (typeof tableOrName === "string") {
const name = tableOrName
if (this.query.table?.name === name) {
table = this.query.table
} else if (this.query.meta.table?.name === name) {
table = this.query.meta.table
} else if (!this.query.meta.tables?.[name]) {
// This can legitimately happen in custom queries, where the user is
// querying against a table that may not have been imported into
// Budibase.
return name
} else {
table = this.query.meta.tables[name]
}
} else if (tableOrName) {
table = tableOrName
} else {
getTableName(table?: Table): string {
if (!table) {
table = this.table
}
let name = table.name
if (isSqs(table) && table._id) {
// SQS uses the table ID rather than the table name
@ -1191,8 +1171,9 @@ class InternalBuilder {
return withSchema
}
private buildJsonField(field: string): string {
private buildJsonField(table: Table, field: string): [string, Knex.Raw] {
const parts = field.split(".")
let baseName = parts[parts.length - 1]
let unaliased: string
let tableField: string
@ -1205,10 +1186,19 @@ class InternalBuilder {
tableField = unaliased
}
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
return this.knex
.raw(`?${separator}??`, [unaliased, this.rawQuotedIdentifier(tableField)])
.toString()
if (this.query.meta?.columnPrefix) {
baseName = baseName.replace(this.query.meta.columnPrefix, "")
}
let identifier = this.rawQuotedIdentifier(tableField)
// Internal tables have special _id, _rev, createdAt, and updatedAt fields
// that do not appear in the schema, meaning schema could actually be
// undefined.
const schema: FieldSchema | undefined = table.schema[baseName]
if (schema && schema.type === FieldType.BIGINT) {
identifier = this.castIntToString(identifier)
}
return [unaliased, identifier]
}
maxFunctionParameters() {
@ -1234,7 +1224,7 @@ class InternalBuilder {
): Knex.QueryBuilder {
const sqlClient = this.client
const knex = this.knex
const { resource, tableAliases: aliases, endpoint, meta } = this.query
const { resource, tableAliases: aliases, schema, tables } = this.query
const fields = resource?.fields || []
for (let relationship of relationships) {
const {
@ -1249,13 +1239,16 @@ class InternalBuilder {
if (!toTable || !fromTable) {
continue
}
const relatedTable = meta.tables?.[toTable]
const relatedTable = tables[toTable]
if (!relatedTable) {
throw new Error(`related table "${toTable}" not found in datasource`)
}
const toAlias = aliases?.[toTable] || toTable,
fromAlias = aliases?.[fromTable] || fromTable,
throughAlias = (throughTable && aliases?.[throughTable]) || throughTable
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema: endpoint.schema,
schema,
})
const requiredFields = [
...(relatedTable?.primary || []),
@ -1271,8 +1264,14 @@ class InternalBuilder {
0,
Math.floor(this.maxFunctionParameters() / 2)
)
const fieldList: string = relationshipFields
.map(field => this.buildJsonField(field))
const fieldList = relationshipFields.map(field =>
this.buildJsonField(relatedTable, field)
)
const fieldListFormatted = fieldList
.map(f => {
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
return this.knex.raw(`?${separator}??`, [f[0], f[1]]).toString()
})
.join(",")
// SQL Server uses TOP - which performs a little differently to the normal LIMIT syntax
// it reduces the result set rather than limiting how much data it filters over
@ -1293,7 +1292,7 @@ class InternalBuilder {
if (isManyToMany) {
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema: endpoint.schema,
schema,
})
subQuery = subQuery.join(throughTableWithSchema, function () {
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
@ -1320,35 +1319,42 @@ class InternalBuilder {
// need to check the junction table document is to the right column, this is just for SQS
subQuery = this.addJoinFieldCheck(subQuery, relationship)
wrapperQuery = standardWrap(
this.knex.raw(`json_group_array(json_object(${fieldList}))`)
this.knex.raw(
`json_group_array(json_object(${fieldListFormatted}))`
)
)
break
case SqlClient.POSTGRES:
wrapperQuery = standardWrap(
this.knex.raw(`json_agg(json_build_object(${fieldList}))`)
this.knex.raw(`json_agg(json_build_object(${fieldListFormatted}))`)
)
break
case SqlClient.MARIADB:
// can't use the standard wrap due to correlated sub-query limitations in MariaDB
wrapperQuery = subQuery.select(
knex.raw(
`json_arrayagg(json_object(${fieldList}) LIMIT ${getRelationshipLimit()})`
`json_arrayagg(json_object(${fieldListFormatted}) LIMIT ${getRelationshipLimit()})`
)
)
break
case SqlClient.MY_SQL:
case SqlClient.ORACLE:
wrapperQuery = standardWrap(
this.knex.raw(`json_arrayagg(json_object(${fieldList}))`)
this.knex.raw(`json_arrayagg(json_object(${fieldListFormatted}))`)
)
break
case SqlClient.MS_SQL: {
const comparatorQuery = knex
.select(`${fromAlias}.*`)
.select(`*`)
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery
.select(`${toAlias}.*`)
.select(
fieldList.map(f => {
// @ts-expect-error raw is fine here, knex types are wrong
return knex.ref(f[1]).as(f[0])
})
)
.limit(getRelationshipLimit()),
})
@ -1377,8 +1383,7 @@ class InternalBuilder {
toPrimary?: string
}[]
): Knex.QueryBuilder {
const { tableAliases: aliases, endpoint } = this.query
const schema = endpoint.schema
const { tableAliases: aliases, schema } = this.query
const toTable = tables.to,
fromTable = tables.from,
throughTable = tables.through
@ -1429,16 +1434,16 @@ class InternalBuilder {
}
qualifiedKnex(opts?: { alias?: string | boolean }): Knex.QueryBuilder {
let alias = this.query.tableAliases?.[this.query.endpoint.entityId]
let alias = this.query.tableAliases?.[this.query.table.name]
if (opts?.alias === false) {
alias = undefined
} else if (typeof opts?.alias === "string") {
alias = opts.alias
}
return this.knex(
this.tableNameWithSchema(this.query.endpoint.entityId, {
this.tableNameWithSchema(this.query.table.name, {
alias,
schema: this.query.endpoint.schema,
schema: this.query.schema,
})
)
}
@ -1455,9 +1460,7 @@ class InternalBuilder {
if (this.client === SqlClient.ORACLE) {
// Oracle doesn't seem to automatically insert nulls
// if we don't specify them, so we need to do that here
for (const [column, schema] of Object.entries(
this.query.meta.table.schema
)) {
for (const [column, schema] of Object.entries(this.query.table.schema)) {
if (
schema.constraints?.presence === true ||
schema.type === FieldType.FORMULA ||
@ -1534,11 +1537,9 @@ class InternalBuilder {
limits?: { base: number; query: number }
} = {}
): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships } = this.query
let { operation, filters, paginate, relationships, table } = this.query
const { limits } = opts
const counting = endpoint.operation === Operation.COUNT
const tableName = endpoint.entityId
// start building the query
let query = this.qualifiedKnex()
// handle pagination
@ -1557,7 +1558,7 @@ class InternalBuilder {
foundLimit = paginate.limit
}
// counting should not sort, limit or offset
if (!counting) {
if (operation !== Operation.COUNT) {
// add the found limit if supplied
if (foundLimit != null) {
query = query.limit(foundLimit)
@ -1569,7 +1570,7 @@ class InternalBuilder {
}
const aggregations = this.query.resource?.aggregations || []
if (counting) {
if (operation === Operation.COUNT) {
query = this.addDistinctCount(query)
} else if (aggregations.length > 0) {
query = this.addAggregations(query, aggregations)
@ -1578,7 +1579,7 @@ class InternalBuilder {
}
// have to add after as well (this breaks MS-SQL)
if (!counting) {
if (operation !== Operation.COUNT) {
query = this.addSorting(query)
}
@ -1586,9 +1587,7 @@ class InternalBuilder {
// handle relationships with a CTE for all others
if (relationships?.length && aggregations.length === 0) {
const mainTable =
this.query.tableAliases?.[this.query.endpoint.entityId] ||
this.query.endpoint.entityId
const mainTable = this.query.tableAliases?.[table.name] || table.name
const cte = this.addSorting(
this.knex
.with("paginated", query)
@ -1598,7 +1597,7 @@ class InternalBuilder {
})
)
// add JSON aggregations attached to the CTE
return this.addJsonRelationships(cte, tableName, relationships)
return this.addJsonRelationships(cte, table.name, relationships)
}
return query
@ -1661,7 +1660,10 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
* @return the query ready to be passed to the driver.
*/
_query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] {
_query(
json: EnrichedQueryJson,
opts: QueryOptions = {}
): SqlQuery | SqlQuery[] {
const sqlClient = this.getSqlClient()
const config: Knex.Config = {
client: this.getBaseSqlClient(),
@ -1711,34 +1713,30 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return this.convertToNative(query, opts)
}
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
async getReturningRow(queryFn: QueryFunction, json: EnrichedQueryJson) {
if (!json.extra || !json.extra.idFilter) {
return {}
}
const input = this._query({
endpoint: {
...json.endpoint,
operation: Operation.READ,
},
resource: {
fields: [],
},
operation: Operation.READ,
datasource: json.datasource,
schema: json.schema,
table: json.table,
tables: json.tables,
resource: { fields: [] },
filters: json.extra?.idFilter,
paginate: {
limit: 1,
},
meta: json.meta,
paginate: { limit: 1 },
})
return queryFn(input, Operation.READ)
}
// when creating if an ID has been inserted need to make sure
// the id filter is enriched with it before trying to retrieve the row
checkLookupKeys(id: any, json: QueryJson) {
if (!id || !json.meta.table || !json.meta.table.primary) {
checkLookupKeys(id: any, json: EnrichedQueryJson) {
if (!id || !json.table.primary) {
return json
}
const primaryKey = json.meta.table.primary?.[0]
const primaryKey = json.table.primary[0]
json.extra = {
idFilter: {
equal: {
@ -1751,7 +1749,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// this function recreates the returning functionality of postgres
async queryWithReturning(
json: QueryJson,
json: EnrichedQueryJson,
queryFn: QueryFunction,
processFn: Function = (result: any) => result
) {

View File

@ -3,13 +3,13 @@ import {
FieldType,
NumberFieldMetadata,
Operation,
QueryJson,
RelationshipType,
RenameColumn,
SqlQuery,
Table,
TableSourceType,
SqlClient,
EnrichedQueryJson,
} from "@budibase/types"
import { breakExternalTableId, getNativeSql } from "./utils"
import { helpers, utils } from "@budibase/shared-core"
@ -25,7 +25,7 @@ function generateSchema(
schema: CreateTableBuilder,
table: Table,
tables: Record<string, Table>,
oldTable: null | Table = null,
oldTable?: Table,
renamed?: RenameColumn
) {
let primaryKeys = table && table.primary ? table.primary : []
@ -55,7 +55,7 @@ function generateSchema(
)
for (let [key, column] of Object.entries(table.schema)) {
// skip things that are already correct
const oldColumn = oldTable ? oldTable.schema[key] : null
const oldColumn = oldTable?.schema[key]
if (
(oldColumn && oldColumn.type) ||
columnTypeSet.includes(key) ||
@ -199,8 +199,8 @@ function buildUpdateTable(
knex: SchemaBuilder,
table: Table,
tables: Record<string, Table>,
oldTable: Table,
renamed: RenameColumn
oldTable?: Table,
renamed?: RenameColumn
): SchemaBuilder {
return knex.alterTable(table.name, schema => {
generateSchema(schema, table, tables, oldTable, renamed)
@ -238,19 +238,18 @@ class SqlTableQueryBuilder {
* @param json the input JSON structure from which an SQL query will be built.
* @return the operation that was found in the JSON.
*/
_operation(json: QueryJson): Operation {
return json.endpoint.operation
_operation(json: EnrichedQueryJson): Operation {
return json.operation
}
_tableQuery(json: QueryJson): SqlQuery | SqlQuery[] {
_tableQuery(json: EnrichedQueryJson): SqlQuery | SqlQuery[] {
let client = knex({ client: this.sqlClient }).schema
let schemaName = json?.endpoint?.schema
if (schemaName) {
client = client.withSchema(schemaName)
if (json?.schema) {
client = client.withSchema(json.schema)
}
let query: Knex.SchemaBuilder
if (!json.table || !json.meta || !json.meta.tables) {
if (!json.table || !json.tables) {
throw new Error("Cannot execute without table being specified")
}
if (json.table.sourceType === TableSourceType.INTERNAL) {
@ -259,17 +258,17 @@ class SqlTableQueryBuilder {
switch (this._operation(json)) {
case Operation.CREATE_TABLE:
query = buildCreateTable(client, json.table, json.meta.tables)
query = buildCreateTable(client, json.table, json.tables)
break
case Operation.UPDATE_TABLE:
if (!json.meta || !json.meta.table) {
if (!json.table) {
throw new Error("Must specify old table for update")
}
// renameColumn does not work for MySQL, so return a raw query
if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) {
if (this.sqlClient === SqlClient.MY_SQL && json.meta?.renamed) {
const updatedColumn = json.meta.renamed.updated
const tableName = schemaName
? `\`${schemaName}\`.\`${json.table.name}\``
const tableName = json?.schema
? `\`${json.schema}\`.\`${json.table.name}\``
: `\`${json.table.name}\``
return {
sql: `alter table ${tableName} rename column \`${json.meta.renamed.old}\` to \`${updatedColumn}\`;`,
@ -280,18 +279,18 @@ class SqlTableQueryBuilder {
query = buildUpdateTable(
client,
json.table,
json.meta.tables,
json.meta.table,
json.meta.renamed!
json.tables,
json.meta?.oldTable,
json.meta?.renamed
)
// renameColumn for SQL Server returns a parameterised `sp_rename` query,
// which is not supported by SQL Server and gives a syntax error.
if (this.sqlClient === SqlClient.MS_SQL && json.meta.renamed) {
if (this.sqlClient === SqlClient.MS_SQL && json.meta?.renamed) {
const oldColumn = json.meta.renamed.old
const updatedColumn = json.meta.renamed.updated
const tableName = schemaName
? `${schemaName}.${json.table.name}`
const tableName = json?.schema
? `${json.schema}.${json.table.name}`
: `${json.table.name}`
const sql = getNativeSql(query)
if (Array.isArray(sql)) {

View File

@ -6,7 +6,7 @@ import {
getPlatformURL,
} from "../context"
import {
BBContext,
Ctx,
TenantResolutionStrategy,
GetTenantIdOptions,
} from "@budibase/types"
@ -37,7 +37,7 @@ export const isUserInAppTenant = (appId: string, user?: any) => {
const ALL_STRATEGIES = Object.values(TenantResolutionStrategy)
export const getTenantIDFromCtx = (
ctx: BBContext,
ctx: Ctx,
opts: GetTenantIdOptions
): string | undefined => {
// exit early if not multi-tenant

View File

@ -5,7 +5,7 @@ import * as db from "../../db"
import { Header } from "../../constants"
import { newid } from "../../utils"
import env from "../../environment"
import { BBContext } from "@budibase/types"
import { Ctx } from "@budibase/types"
describe("utils", () => {
const config = new DBTestConfiguration()
@ -109,7 +109,7 @@ describe("utils", () => {
})
describe("isServingBuilder", () => {
let ctx: BBContext
let ctx: Ctx
const expectResult = (result: boolean) =>
expect(utils.isServingBuilder(ctx)).toBe(result)
@ -133,7 +133,7 @@ describe("utils", () => {
})
describe("isServingBuilderPreview", () => {
let ctx: BBContext
let ctx: Ctx
const expectResult = (result: boolean) =>
expect(utils.isServingBuilderPreview(ctx)).toBe(result)
@ -157,7 +157,7 @@ describe("utils", () => {
})
describe("isPublicAPIRequest", () => {
let ctx: BBContext
let ctx: Ctx
const expectResult = (result: boolean) =>
expect(utils.isPublicApiRequest(ctx)).toBe(result)

View File

@ -1,8 +1,8 @@
import { createMockContext, createMockCookies } from "@shopify/jest-koa-mocks"
import { BBContext } from "@budibase/types"
import { Ctx } from "@budibase/types"
export const newContext = (): BBContext => {
const ctx = createMockContext() as any
export const newContext = (): Ctx => {
const ctx = createMockContext() as Ctx
return {
...ctx,
path: "/",

View File

@ -25,7 +25,7 @@ function getTestcontainers(): ContainerInfo[] {
// We use --format json to make sure the output is nice and machine-readable,
// and we use --no-trunc so that the command returns full container IDs so we
// can filter on them correctly.
return execSync("docker ps --format json --no-trunc")
return execSync("docker ps --all --format json --no-trunc")
.toString()
.split("\n")
.filter(x => x.length > 0)
@ -70,7 +70,36 @@ export function getExposedV4Port(container: ContainerInfo, port: number) {
return getExposedV4Ports(container).find(x => x.container === port)?.host
}
interface DockerContext {
Name: string
Description: string
DockerEndpoint: string
ContextType: string
Error: string
}
function getCurrentDockerContext(): DockerContext {
const out = execSync("docker context ls --format json")
for (const line of out.toString().split("\n")) {
const parsed = JSON.parse(line)
if (parsed.Current) {
return parsed as DockerContext
}
}
throw new Error("No current Docker context")
}
export function setupEnv(...envs: any[]) {
process.env.TESTCONTAINERS_RYUK_DISABLED = "true"
// For whatever reason, testcontainers doesn't always use the correct current
// docker context. This bit of code forces the issue by finding the current
// context and setting it as the DOCKER_HOST environment
if (!process.env.DOCKER_HOST) {
const dockerContext = getCurrentDockerContext()
process.env.DOCKER_HOST = dockerContext.DockerEndpoint
}
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
// should be relatively safe to look for it by its image name.
const couch = getContainerByImage("budibase/couchdb")
@ -116,11 +145,12 @@ export async function startContainer(container: GenericContainer) {
key = imageName.split("@")[0]
}
key = key.replace(/\//g, "-").replace(/:/g, "-")
const name = `${key}_testcontainer`
container = container
.withReuse()
.withLabels({ "com.budibase": "true" })
.withName(`${key}_testcontainer`)
.withName(name)
let startedContainer: StartedTestContainer | undefined = undefined
let lastError = undefined

View File

@ -4,27 +4,21 @@
"version": "0.0.0",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
"module": "dist/bbui.mjs",
"exports": {
".": {
"import": "./dist/bbui.es.js"
"import": "./dist/bbui.mjs"
},
"./package.json": "./package.json",
"./spectrum-icons-rollup.js": "./src/spectrum-icons-rollup.js",
"./spectrum-icons-vite.js": "./src/spectrum-icons-vite.js"
},
"scripts": {
"build": "rollup -c"
"build": "vite build"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^16.0.0",
"@rollup/plugin-json": "^4.1.0",
"@rollup/plugin-node-resolve": "^11.2.1",
"postcss": "^8.2.9",
"rollup": "^2.45.2",
"rollup-plugin-postcss": "^4.0.0",
"rollup-plugin-svelte": "^7.1.0",
"rollup-plugin-terser": "^7.0.2"
"@sveltejs/vite-plugin-svelte": "1.4.0",
"vite-plugin-css-injected-by-js": "3.5.2"
},
"keywords": [
"svelte"
@ -96,8 +90,7 @@
"dependsOn": [
{
"projects": [
"@budibase/string-templates",
"@budibase/shared-core"
"@budibase/string-templates"
],
"target": "build"
}

View File

@ -1,32 +0,0 @@
import svelte from "rollup-plugin-svelte"
import resolve from "@rollup/plugin-node-resolve"
import commonjs from "@rollup/plugin-commonjs"
import json from "@rollup/plugin-json"
import { terser } from "rollup-plugin-terser"
import postcss from "rollup-plugin-postcss"
export default {
input: "src/index.js",
output: {
sourcemap: true,
format: "esm",
file: "dist/bbui.es.js",
},
onwarn(warning, warn) {
// suppress eval warnings
if (warning.code === "EVAL") {
return
}
warn(warning)
},
plugins: [
resolve(),
commonjs(),
svelte({
emitCss: true,
}),
postcss(),
terser(),
json(),
],
}

View File

@ -0,0 +1,29 @@
import { defineConfig } from "vite"
import { svelte } from "@sveltejs/vite-plugin-svelte"
import path from "path"
import cssInjectedByJsPlugin from "vite-plugin-css-injected-by-js"
export default defineConfig(({ mode }) => {
const isProduction = mode === "production"
return {
build: {
sourcemap: !isProduction,
lib: {
entry: "src/index.js",
formats: ["es"],
},
},
plugins: [
svelte({
emitCss: true,
}),
cssInjectedByJsPlugin(),
],
resolve: {
alias: {
"@budibase/shared-core": path.resolve(__dirname, "../shared-core/src"),
"@budibase/types": path.resolve(__dirname, "../types/src"),
},
},
}
})

View File

@ -63,7 +63,7 @@
if (!name?.length) {
return "Name is required"
}
if (snippets.some(snippet => snippet.name === name)) {
if (!snippet?.name && snippets.some(snippet => snippet.name === name)) {
return "That name is already in use"
}
if (firstCharNumberRegex.test(name)) {
@ -106,11 +106,7 @@
Delete
</Button>
{/if}
<Button
cta
on:click={saveSnippet}
disabled={!snippet && (loading || nameError)}
>
<Button cta on:click={saveSnippet} disabled={!code || loading || nameError}>
Save
</Button>
</svelte:fragment>

View File

@ -186,7 +186,7 @@
<div class="snippet-popover">
{#key hoveredSnippet}
<CodeEditor
value={hoveredSnippet.code.trim()}
value={hoveredSnippet.code?.trim()}
mode={EditorModes.JS}
readonly
/>

View File

@ -52,9 +52,16 @@
let modal
$: text = value?.label ?? "Choose an option"
$: tables = $tablesStore.list.map(table =>
format.table(table, $datasources.list)
)
$: tables = $tablesStore.list
.map(table => format.table(table, $datasources.list))
.sort((a, b) => {
// sort tables alphabetically, grouped by datasource
const dsComparison = a.datasourceName.localeCompare(b.datasourceName)
if (dsComparison !== 0) {
return dsComparison
}
return a.label.localeCompare(b.label)
})
$: viewsV1 = $viewsStore.list.map(view => ({
...view,
label: view.name,

View File

@ -1,5 +1,5 @@
<script>
import { Heading, Body, Layout, Button, Modal } from "@budibase/bbui"
import { Heading, Body, Layout, Button, Modal, Icon } from "@budibase/bbui"
import AutomationPanel from "components/automation/AutomationPanel/AutomationPanel.svelte"
import CreateAutomationModal from "components/automation/AutomationPanel/CreateAutomationModal.svelte"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
@ -12,11 +12,13 @@
automationStore,
selectedAutomation,
} from "stores/builder"
import { createLocalStorageStore } from "@budibase/frontend-core"
import { fly } from "svelte/transition"
$: automationId = $selectedAutomation?.data?._id
$: builderStore.selectResource(automationId)
// Keep URL and state in sync for selected screen ID
const surveyDismissed = createLocalStorageStore("automation-survey", false)
const stopSyncing = syncURLToState({
urlParam: "automationId",
stateKey: "selectedAutomationId",
@ -29,9 +31,11 @@
let modal
let webhookModal
let mounted = false
onMount(() => {
$automationStore.showTestPanel = false
mounted = true
})
onDestroy(stopSyncing)
@ -79,6 +83,43 @@
</Modal>
</div>
{#if !$surveyDismissed && mounted}
<div
class="survey"
in:fly={{ x: 600, duration: 260, delay: 1000 }}
out:fly={{ x: 600, duration: 260 }}
>
<div class="survey__body">
<div class="survey__title">We value your feedback!</div>
<div class="survey__text">
<a
href="https://t.maze.co/310149185"
target="_blank"
rel="noopener noreferrer"
on:click={() => surveyDismissed.set(true)}
>
Complete our survey on Automations</a
>
and receive a $20 thank-you gift.
<a
href="https://drive.google.com/file/d/12-qk_2F9g5PdbM6wuKoz2KkIyLI-feMX/view?usp=sharing"
target="_blank"
rel="noopener noreferrer"
>
Terms apply.
</a>
</div>
</div>
<Icon
name="Close"
hoverable
color="var(--spectrum-global-color-static-gray-300)"
hoverColor="var(--spectrum-global-color-static-gray-100)"
on:click={() => surveyDismissed.set(true)}
/>
</div>
{/if}
<style>
.root {
flex: 1 1 auto;
@ -108,11 +149,9 @@
justify-content: center;
align-items: center;
}
.main {
width: 300px;
}
.setup {
padding-top: 9px;
border-left: var(--border-light);
@ -125,4 +164,39 @@
grid-column: 3;
overflow: auto;
}
/* Survey */
.survey {
position: absolute;
bottom: 32px;
right: 32px;
background: var(--spectrum-semantic-positive-color-background);
display: flex;
flex-direction: row;
padding: var(--spacing-l) var(--spacing-xl);
border-radius: 4px;
gap: var(--spacing-xl);
}
.survey * {
color: var(--spectrum-global-color-static-gray-300);
white-space: nowrap;
}
.survey a {
text-decoration: underline;
transition: color 130ms ease-out;
}
.survey a:hover {
color: var(--spectrum-global-color-static-gray-100);
cursor: pointer;
}
.survey__body {
flex: 1 1 auto;
display: flex;
flex-direction: column;
gap: 2px;
}
.survey__title {
font-weight: 600;
font-size: 15px;
}
</style>

View File

@ -40,6 +40,6 @@
"@types/node-fetch": "2.6.4",
"@types/pouchdb": "^6.4.0",
"ts-node": "10.8.1",
"typescript": "5.5.2"
"typescript": "5.7.2"
}
}

@ -1 +1 @@
Subproject commit e8ef2205de8bca5adcf18d07573096086aa9a606
Subproject commit 5321c7589257711cf153600597ef4e6a5f6b7162

View File

@ -129,7 +129,8 @@
"uuid": "^8.3.2",
"validate.js": "0.13.1",
"worker-farm": "1.7.0",
"xml2js": "0.6.2"
"xml2js": "0.6.2",
"zod-validation-error": "^3.4.0"
},
"devDependencies": {
"@babel/core": "^7.22.5",
@ -169,13 +170,14 @@
"rimraf": "3.0.2",
"supertest": "6.3.3",
"swagger-jsdoc": "6.1.0",
"testcontainers": "10.7.2",
"testcontainers": "10.16.0",
"timekeeper": "2.2.0",
"ts-node": "10.8.1",
"tsconfig-paths": "4.0.0",
"typescript": "5.5.2",
"typescript": "5.7.2",
"update-dotenv": "1.1.1",
"yargs": "13.2.4"
"yargs": "^13.2.4",
"zod": "^3.23.8"
},
"nx": {
"targets": {

View File

@ -1,16 +1,22 @@
import { events, context } from "@budibase/backend-core"
import { AnalyticsPingRequest, App, PingSource } from "@budibase/types"
import {
AnalyticsPingRequest,
App,
PingSource,
Ctx,
AnalyticsEnabledResponse,
} from "@budibase/types"
import { DocumentType, isDevAppID } from "../../db/utils"
export const isEnabled = async (ctx: any) => {
export const isEnabled = async (ctx: Ctx<void, AnalyticsEnabledResponse>) => {
const enabled = await events.analytics.enabled()
ctx.body = {
enabled,
}
}
export const ping = async (ctx: any) => {
const body = ctx.request.body as AnalyticsPingRequest
export const ping = async (ctx: Ctx<AnalyticsPingRequest, void>) => {
const body = ctx.request.body
switch (body.source) {
case PingSource.APP: {

View File

@ -1,18 +1,25 @@
import { db as dbCore, tenancy } from "@budibase/backend-core"
import { BBContext, Document } from "@budibase/types"
import {
Document,
UserCtx,
ApiKeyDoc,
ApiKeyFetchResponse,
UpdateApiKeyRequest,
UpdateApiKeyResponse,
} from "@budibase/types"
const KEYS_DOC = dbCore.StaticDatabases.GLOBAL.docs.apiKeys
async function getBuilderMainDoc() {
const db = tenancy.getGlobalDB()
try {
return await db.get<any>(KEYS_DOC)
} catch (err) {
// doesn't exist yet, nothing to get
const doc = await db.tryGet<ApiKeyDoc>(KEYS_DOC)
if (!doc) {
return {
_id: KEYS_DOC,
apiKeys: {},
}
}
return doc
}
async function setBuilderMainDoc(doc: Document) {
@ -22,7 +29,7 @@ async function setBuilderMainDoc(doc: Document) {
return db.put(doc)
}
export async function fetch(ctx: BBContext) {
export async function fetch(ctx: UserCtx<void, ApiKeyFetchResponse>) {
try {
const mainDoc = await getBuilderMainDoc()
ctx.body = mainDoc.apiKeys ? mainDoc.apiKeys : {}
@ -32,7 +39,9 @@ export async function fetch(ctx: BBContext) {
}
}
export async function update(ctx: BBContext) {
export async function update(
ctx: UserCtx<UpdateApiKeyRequest, UpdateApiKeyResponse>
) {
const key = ctx.params.key
const value = ctx.request.body.value

View File

@ -59,6 +59,15 @@ import {
BBReferenceFieldSubType,
Row,
BBRequest,
SyncAppResponse,
CreateAppResponse,
FetchAppsResponse,
UpdateAppClientResponse,
RevertAppClientResponse,
DeleteAppResponse,
ImportToUpdateAppRequest,
ImportToUpdateAppResponse,
SetRevertableAppVersionRequest,
} from "@budibase/types"
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
import sdk from "../../sdk"
@ -166,7 +175,7 @@ async function createInstance(appId: string, template: AppTemplate) {
return { _id: appId }
}
export const addSampleData = async (ctx: UserCtx) => {
export const addSampleData = async (ctx: UserCtx<void, void>) => {
const db = context.getAppDB()
try {
@ -182,7 +191,7 @@ export const addSampleData = async (ctx: UserCtx) => {
ctx.status = 200
}
export async function fetch(ctx: UserCtx<void, App[]>) {
export async function fetch(ctx: UserCtx<void, FetchAppsResponse>) {
ctx.body = await sdk.applications.fetch(
ctx.query.status as AppStatus,
ctx.user
@ -242,7 +251,9 @@ export async function fetchAppPackage(
}
}
async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) {
async function performAppCreate(
ctx: UserCtx<CreateAppRequest, CreateAppResponse>
) {
const apps = (await dbCore.getAllApps({ dev: true })) as App[]
const { body } = ctx.request
const { name, url, encryptionPassword, templateKey } = body
@ -510,7 +521,9 @@ async function appPostCreate(ctx: UserCtx<CreateAppRequest, App>, app: App) {
}
}
export async function create(ctx: UserCtx<CreateAppRequest, App>) {
export async function create(
ctx: UserCtx<CreateAppRequest, CreateAppResponse>
) {
const newApplication = await quotas.addApp(() => performAppCreate(ctx))
await appPostCreate(ctx, newApplication)
await cache.bustCache(cache.CacheKey.CHECKLIST)
@ -553,7 +566,9 @@ export async function update(
})
}
export async function updateClient(ctx: UserCtx) {
export async function updateClient(
ctx: UserCtx<void, UpdateAppClientResponse>
) {
// Get current app version
const application = await sdk.applications.metadata.get()
const currentVersion = application.version
@ -581,7 +596,9 @@ export async function updateClient(ctx: UserCtx) {
ctx.body = app
}
export async function revertClient(ctx: UserCtx) {
export async function revertClient(
ctx: UserCtx<void, RevertAppClientResponse>
) {
// Check app can be reverted
const application = await sdk.applications.metadata.get()
if (!application.revertableVersion) {
@ -668,7 +685,7 @@ async function postDestroyApp(ctx: UserCtx) {
}
}
export async function destroy(ctx: UserCtx) {
export async function destroy(ctx: UserCtx<void, DeleteAppResponse>) {
await preDestroyApp(ctx)
const result = await destroyApp(ctx)
await postDestroyApp(ctx)
@ -676,7 +693,7 @@ export async function destroy(ctx: UserCtx) {
ctx.body = result
}
export async function unpublish(ctx: UserCtx) {
export async function unpublish(ctx: UserCtx<void, void>) {
const prodAppId = dbCore.getProdAppID(ctx.params.appId)
const dbExists = await dbCore.dbExists(prodAppId)
@ -692,7 +709,7 @@ export async function unpublish(ctx: UserCtx) {
builderSocket?.emitAppUnpublish(ctx)
}
export async function sync(ctx: UserCtx) {
export async function sync(ctx: UserCtx<void, SyncAppResponse>) {
const appId = ctx.params.appId
try {
ctx.body = await sdk.applications.syncApp(appId)
@ -701,10 +718,12 @@ export async function sync(ctx: UserCtx) {
}
}
export async function importToApp(ctx: UserCtx) {
export async function importToApp(
ctx: UserCtx<ImportToUpdateAppRequest, ImportToUpdateAppResponse>
) {
const { appId } = ctx.params
const appExport = ctx.request.files?.appExport
const password = ctx.request.body.encryptionPassword as string
const password = ctx.request.body.encryptionPassword
if (!appExport) {
ctx.throw(400, "Must supply app export to import")
}
@ -811,7 +830,7 @@ export async function updateAppPackage(
}
export async function setRevertableVersion(
ctx: UserCtx<{ revertableVersion: string }, App>
ctx: UserCtx<SetRevertableAppVersionRequest, void>
) {
if (!env.isDev()) {
ctx.status = 403

View File

@ -2,7 +2,7 @@ import { outputProcessing } from "../../utilities/rowProcessor"
import { InternalTables } from "../../db/utils"
import { getFullUser } from "../../utilities/users"
import { roles, context, db as dbCore } from "@budibase/backend-core"
import { ContextUser, Row, UserCtx } from "@budibase/types"
import { AppSelfResponse, ContextUser, UserCtx } from "@budibase/types"
import sdk from "../../sdk"
import { processUser } from "../../utilities/global"
@ -17,7 +17,7 @@ const addSessionAttributesToUser = (ctx: any) => {
}
}
export async function fetchSelf(ctx: UserCtx) {
export async function fetchSelf(ctx: UserCtx<void, AppSelfResponse>) {
let userId = ctx.user.userId || ctx.user._id
/* istanbul ignore next */
if (!userId || !ctx.isAuthenticated) {
@ -45,9 +45,9 @@ export async function fetchSelf(ctx: UserCtx) {
try {
const userTable = await sdk.tables.getTable(InternalTables.USER_METADATA)
// specifically needs to make sure is enriched
ctx.body = await outputProcessing(userTable, user as Row)
ctx.body = await outputProcessing(userTable, user)
} catch (err: any) {
let response
let response: ContextUser | {}
// user didn't exist in app, don't pretend they do
if (user.roleId === PUBLIC_ROLE) {
response = {}

View File

@ -9,10 +9,25 @@ import {
App,
Automation,
AutomationActionStepId,
AutomationResults,
UserCtx,
DeleteAutomationResponse,
FetchAutomationResponse,
GetAutomationTriggerDefinitionsResponse,
GetAutomationStepDefinitionsResponse,
GetAutomationActionDefinitionsResponse,
FindAutomationResponse,
UpdateAutomationRequest,
UpdateAutomationResponse,
CreateAutomationRequest,
CreateAutomationResponse,
SearchAutomationLogsRequest,
SearchAutomationLogsResponse,
ClearAutomationLogRequest,
ClearAutomationLogResponse,
TriggerAutomationRequest,
TriggerAutomationResponse,
TestAutomationRequest,
TestAutomationResponse,
} from "@budibase/types"
import { getActionDefinitions as actionDefs } from "../../automations/actions"
import sdk from "../../sdk"
@ -34,7 +49,7 @@ function getTriggerDefinitions() {
*************************/
export async function create(
ctx: UserCtx<Automation, { message: string; automation: Automation }>
ctx: UserCtx<CreateAutomationRequest, CreateAutomationResponse>
) {
let automation = ctx.request.body
automation.appId = ctx.appId
@ -55,7 +70,9 @@ export async function create(
builderSocket?.emitAutomationUpdate(ctx, automation)
}
export async function update(ctx: UserCtx) {
export async function update(
ctx: UserCtx<UpdateAutomationRequest, UpdateAutomationResponse>
) {
let automation = ctx.request.body
automation.appId = ctx.appId
@ -80,7 +97,7 @@ export async function fetch(ctx: UserCtx<void, FetchAutomationResponse>) {
ctx.body = { automations }
}
export async function find(ctx: UserCtx) {
export async function find(ctx: UserCtx<void, FindAutomationResponse>) {
ctx.body = await sdk.automations.get(ctx.params.id)
}
@ -96,11 +113,15 @@ export async function destroy(ctx: UserCtx<void, DeleteAutomationResponse>) {
builderSocket?.emitAutomationDeletion(ctx, automationId)
}
export async function logSearch(ctx: UserCtx) {
export async function logSearch(
ctx: UserCtx<SearchAutomationLogsRequest, SearchAutomationLogsResponse>
) {
ctx.body = await automations.logs.logSearch(ctx.request.body)
}
export async function clearLogError(ctx: UserCtx) {
export async function clearLogError(
ctx: UserCtx<ClearAutomationLogRequest, ClearAutomationLogResponse>
) {
const { automationId, appId } = ctx.request.body
await context.doInAppContext(appId, async () => {
const db = context.getProdAppDB()
@ -119,15 +140,21 @@ export async function clearLogError(ctx: UserCtx) {
})
}
export async function getActionList(ctx: UserCtx) {
export async function getActionList(
ctx: UserCtx<void, GetAutomationActionDefinitionsResponse>
) {
ctx.body = await getActionDefinitions()
}
export async function getTriggerList(ctx: UserCtx) {
export async function getTriggerList(
ctx: UserCtx<void, GetAutomationTriggerDefinitionsResponse>
) {
ctx.body = getTriggerDefinitions()
}
export async function getDefinitionList(ctx: UserCtx) {
export async function getDefinitionList(
ctx: UserCtx<void, GetAutomationStepDefinitionsResponse>
) {
ctx.body = {
trigger: getTriggerDefinitions(),
action: await getActionDefinitions(),
@ -140,14 +167,16 @@ export async function getDefinitionList(ctx: UserCtx) {
* *
*********************/
export async function trigger(ctx: UserCtx) {
export async function trigger(
ctx: UserCtx<TriggerAutomationRequest, TriggerAutomationResponse>
) {
const db = context.getAppDB()
let automation = await db.get<Automation>(ctx.params.id)
let hasCollectStep = sdk.automations.utils.checkForCollectStep(automation)
if (hasCollectStep && (await features.isSyncAutomationsEnabled())) {
try {
const response: AutomationResults = await triggers.externalTrigger(
const response = await triggers.externalTrigger(
automation,
{
fields: ctx.request.body.fields,
@ -158,6 +187,10 @@ export async function trigger(ctx: UserCtx) {
{ getResponses: true }
)
if (!("steps" in response)) {
ctx.throw(400, "Unable to collect response")
}
let collectedValue = response.steps.find(
step => step.stepId === AutomationActionStepId.COLLECT
)
@ -185,7 +218,7 @@ export async function trigger(ctx: UserCtx) {
}
}
function prepareTestInput(input: any) {
function prepareTestInput(input: TestAutomationRequest) {
// prepare the test parameters
if (input.id && input.row) {
input.row._id = input.id
@ -196,7 +229,9 @@ function prepareTestInput(input: any) {
return input
}
export async function test(ctx: UserCtx) {
export async function test(
ctx: UserCtx<TestAutomationRequest, TestAutomationResponse>
) {
const db = context.getAppDB()
let automation = await db.get<Automation>(ctx.params.id)
await setTestFlag(automation._id!)

View File

@ -1,14 +1,16 @@
import sdk from "../../sdk"
import { events, context, db } from "@budibase/backend-core"
import { DocumentType } from "../../db/utils"
import { App, Ctx } from "@budibase/types"
import {
App,
Ctx,
ExportAppDumpRequest,
ExportAppDumpResponse,
} from "@budibase/types"
interface ExportAppDumpRequest {
excludeRows: boolean
encryptPassword?: string
}
export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
export async function exportAppDump(
ctx: Ctx<ExportAppDumpRequest, ExportAppDumpResponse>
) {
const { appId } = ctx.query as any
const { excludeRows, encryptPassword } = ctx.request.body

View File

@ -1,9 +1,16 @@
import { DocumentType } from "../../db/utils"
import { App, Plugin, UserCtx } from "@budibase/types"
import {
App,
FetchComponentDefinitionResponse,
Plugin,
UserCtx,
} from "@budibase/types"
import { db as dbCore, context, tenancy } from "@budibase/backend-core"
import { getComponentLibraryManifest } from "../../utilities/fileSystem"
export async function fetchAppComponentDefinitions(ctx: UserCtx) {
export async function fetchAppComponentDefinitions(
ctx: UserCtx<void, FetchComponentDefinitionResponse>
) {
try {
const db = context.getAppDB()
const app = await db.get<App>(DocumentType.APP_METADATA)

View File

@ -23,13 +23,17 @@ import {
Table,
RowValue,
DynamicVariable,
FetchDatasourcesResponse,
FindDatasourcesResponse,
DeleteDatasourceResponse,
FetchExternalSchemaResponse,
} from "@budibase/types"
import sdk from "../../sdk"
import { builderSocket } from "../../websockets"
import { isEqual } from "lodash"
import { processTable } from "../../sdk/app/tables/getters"
export async function fetch(ctx: UserCtx) {
export async function fetch(ctx: UserCtx<void, FetchDatasourcesResponse>) {
ctx.body = await sdk.datasources.fetch()
}
@ -260,7 +264,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) {
}
}
export async function destroy(ctx: UserCtx) {
export async function destroy(ctx: UserCtx<void, DeleteDatasourceResponse>) {
const db = context.getAppDB()
const datasourceId = ctx.params.datasourceId
@ -291,22 +295,14 @@ export async function destroy(ctx: UserCtx) {
builderSocket?.emitDatasourceDeletion(ctx, datasourceId)
}
export async function find(ctx: UserCtx) {
export async function find(ctx: UserCtx<void, FindDatasourcesResponse>) {
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
ctx.body = await sdk.datasources.removeSecretSingle(datasource)
}
// dynamic query functionality
export async function query(ctx: UserCtx) {
const queryJson = ctx.request.body
try {
ctx.body = await sdk.rows.utils.getDatasourceAndQuery(queryJson)
} catch (err: any) {
ctx.throw(400, err)
}
}
export async function getExternalSchema(ctx: UserCtx) {
export async function getExternalSchema(
ctx: UserCtx<void, FetchExternalSchemaResponse>
) {
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const enrichedDatasource = await sdk.datasources.getAndMergeDatasource(
datasource
@ -316,9 +312,10 @@ export async function getExternalSchema(ctx: UserCtx) {
if (!connector.getExternalSchema) {
ctx.throw(400, "Datasource does not support exporting external schema")
}
const response = await connector.getExternalSchema()
ctx.body = {
schema: response,
try {
ctx.body = { schema: await connector.getExternalSchema() }
} catch (e: any) {
ctx.throw(400, e.message)
}
}

View File

@ -1,4 +1,5 @@
import { context, utils } from "@budibase/backend-core"
import { DeploymentStatus } from "@budibase/types"
/**
* This is used to pass around information about the deployment that is occurring
@ -6,7 +7,7 @@ import { context, utils } from "@budibase/backend-core"
export default class Deployment {
_id: string
verification: any
status?: string
status?: DeploymentStatus
err?: any
appUrl?: string
@ -25,7 +26,7 @@ export default class Deployment {
return this.verification
}
setStatus(status: string, err?: any) {
setStatus(status: DeploymentStatus, err?: any) {
this.status = status
if (err) {
this.err = err

View File

@ -7,20 +7,26 @@ import {
enableCronTrigger,
} from "../../../automations/utils"
import { backups } from "@budibase/pro"
import { App, AppBackupTrigger } from "@budibase/types"
import {
App,
AppBackupTrigger,
DeploymentDoc,
FetchDeploymentResponse,
PublishAppResponse,
UserCtx,
DeploymentStatus,
DeploymentProgressResponse,
} from "@budibase/types"
import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets"
// the max time we can wait for an invalidation to complete before considering it failed
const MAX_PENDING_TIME_MS = 30 * 60000
const DeploymentStatus = {
SUCCESS: "SUCCESS",
PENDING: "PENDING",
FAILURE: "FAILURE",
}
// checks that deployments are in a good state, any pending will be updated
async function checkAllDeployments(deployments: any) {
async function checkAllDeployments(
deployments: any
): Promise<{ updated: boolean; deployments: DeploymentDoc }> {
let updated = false
let deployment: any
for (deployment of Object.values(deployments.history)) {
@ -96,7 +102,9 @@ async function initDeployedApp(prodAppId: any) {
})
}
export async function fetchDeployments(ctx: any) {
export async function fetchDeployments(
ctx: UserCtx<void, FetchDeploymentResponse>
) {
try {
const db = context.getAppDB()
const deploymentDoc = await db.get(DocumentType.DEPLOYMENTS)
@ -104,17 +112,24 @@ export async function fetchDeployments(ctx: any) {
if (updated) {
await db.put(deployments)
}
ctx.body = Object.values(deployments.history).reverse()
ctx.body = deployments.history
? Object.values(deployments.history).reverse()
: []
} catch (err) {
ctx.body = []
}
}
export async function deploymentProgress(ctx: any) {
export async function deploymentProgress(
ctx: UserCtx<void, DeploymentProgressResponse>
) {
try {
const db = context.getAppDB()
const deploymentDoc = await db.get<any>(DocumentType.DEPLOYMENTS)
ctx.body = deploymentDoc[ctx.params.deploymentId]
const deploymentDoc = await db.get<DeploymentDoc>(DocumentType.DEPLOYMENTS)
if (!deploymentDoc.history?.[ctx.params.deploymentId]) {
ctx.throw(404, "No deployment found")
}
ctx.body = deploymentDoc.history?.[ctx.params.deploymentId]
} catch (err) {
ctx.throw(
500,
@ -123,7 +138,9 @@ export async function deploymentProgress(ctx: any) {
}
}
export const publishApp = async function (ctx: any) {
export const publishApp = async function (
ctx: UserCtx<void, PublishAppResponse>
) {
let deployment = new Deployment()
console.log("Deployment object created")
deployment.setStatus(DeploymentStatus.PENDING)

View File

@ -11,7 +11,13 @@ import {
db as dbCore,
cache,
} from "@budibase/backend-core"
import { App } from "@budibase/types"
import {
App,
ClearDevLockResponse,
Ctx,
GetVersionResponse,
RevertAppResponse,
} from "@budibase/types"
async function redirect(
ctx: any,
@ -69,7 +75,7 @@ export function buildRedirectDelete(path: string) {
}
}
export async function clearLock(ctx: any) {
export async function clearLock(ctx: Ctx<void, ClearDevLockResponse>) {
const { appId } = ctx.params
try {
await redisClearLock(appId, ctx.user)
@ -81,7 +87,7 @@ export async function clearLock(ctx: any) {
}
}
export async function revert(ctx: any) {
export async function revert(ctx: Ctx<void, RevertAppResponse>) {
const { appId } = ctx.params
const productionAppId = dbCore.getProdAppID(appId)
@ -131,7 +137,7 @@ export async function revert(ctx: any) {
}
}
export async function getBudibaseVersion(ctx: any) {
export async function getBudibaseVersion(ctx: Ctx<void, GetVersionResponse>) {
const version = envCore.VERSION
ctx.body = {
version,

View File

@ -1,12 +1,17 @@
import { getDefinition, getDefinitions } from "../../integrations"
import { SourceName, UserCtx } from "@budibase/types"
import {
SourceName,
UserCtx,
FetchIntegrationsResponse,
FindIntegrationResponse,
} from "@budibase/types"
const DISABLED_EXTERNAL_INTEGRATIONS = [
SourceName.AIRTABLE,
SourceName.BUDIBASE,
]
export async function fetch(ctx: UserCtx) {
export async function fetch(ctx: UserCtx<void, FetchIntegrationsResponse>) {
const definitions = await getDefinitions()
for (let disabledIntegration of DISABLED_EXTERNAL_INTEGRATIONS) {
delete definitions[disabledIntegration]
@ -14,10 +19,14 @@ export async function fetch(ctx: UserCtx) {
ctx.body = definitions
}
export async function find(ctx: UserCtx) {
export async function find(ctx: UserCtx<void, FindIntegrationResponse>) {
const sourceType = ctx.params?.type
if (DISABLED_EXTERNAL_INTEGRATIONS.indexOf(sourceType) !== -1) {
ctx.throw(400, `Invalid source type - ${sourceType} is not supported.`)
}
ctx.body = await getDefinition(ctx.params.type)
const integration = await getDefinition(ctx.params.type)
if (!integration) {
ctx.throw(404, "Integration not found")
}
ctx.body = integration
}

View File

@ -2,7 +2,7 @@ import { EMPTY_LAYOUT } from "../../constants/layouts"
import { generateLayoutID, getScreenParams } from "../../db/utils"
import { events, context } from "@budibase/backend-core"
import {
BBContext,
DeleteLayoutResponse,
Layout,
SaveLayoutRequest,
SaveLayoutResponse,
@ -32,7 +32,7 @@ export async function save(
ctx.status = 200
}
export async function destroy(ctx: BBContext) {
export async function destroy(ctx: UserCtx<void, DeleteLayoutResponse>) {
const db = context.getAppDB()
const layoutId = ctx.params.layoutId,
layoutRev = ctx.params.layoutRev

View File

@ -1,24 +1,35 @@
import { MetadataTypes } from "../../constants"
import { generateMetadataID } from "../../db/utils"
import { saveEntityMetadata, deleteEntityMetadata } from "../../utilities"
import { context } from "@budibase/backend-core"
import { BBContext } from "@budibase/types"
import {
UserCtx,
MetadataType,
GetMetadataTypesResponse,
SaveMetadataRequest,
SaveMetadataResponse,
DeleteMetadataResponse,
FindMetadataResponse,
} from "@budibase/types"
export async function getTypes(ctx: BBContext) {
export async function getTypes(ctx: UserCtx<void, GetMetadataTypesResponse>) {
ctx.body = {
types: MetadataTypes,
types: MetadataType,
}
}
export async function saveMetadata(ctx: BBContext) {
export async function saveMetadata(
ctx: UserCtx<SaveMetadataRequest, SaveMetadataResponse>
) {
const { type, entityId } = ctx.params
if (type === MetadataTypes.AUTOMATION_TEST_HISTORY) {
if (type === MetadataType.AUTOMATION_TEST_HISTORY) {
ctx.throw(400, "Cannot save automation history type")
}
ctx.body = await saveEntityMetadata(type, entityId, ctx.request.body)
}
export async function deleteMetadata(ctx: BBContext) {
export async function deleteMetadata(
ctx: UserCtx<void, DeleteMetadataResponse>
) {
const { type, entityId } = ctx.params
await deleteEntityMetadata(type, entityId)
ctx.body = {
@ -26,17 +37,9 @@ export async function deleteMetadata(ctx: BBContext) {
}
}
export async function getMetadata(ctx: BBContext) {
export async function getMetadata(ctx: UserCtx<void, FindMetadataResponse>) {
const { type, entityId } = ctx.params
const db = context.getAppDB()
const id = generateMetadataID(type, entityId)
try {
ctx.body = await db.get(id)
} catch (err: any) {
if (err.status === 404) {
ctx.body = {}
} else {
ctx.throw(err.status, err)
}
}
ctx.body = (await db.tryGet(id)) || {}
}

View File

@ -1,24 +1,33 @@
import { context } from "@budibase/backend-core"
import { migrate as migrationImpl, MIGRATIONS } from "../../migrations"
import { Ctx } from "@budibase/types"
import {
Ctx,
FetchOldMigrationResponse,
GetOldMigrationStatus,
RunOldMigrationRequest,
} from "@budibase/types"
import {
getAppMigrationVersion,
getLatestEnabledMigrationId,
} from "../../appMigrations"
export async function migrate(ctx: Ctx) {
export async function migrate(ctx: Ctx<RunOldMigrationRequest, void>) {
const options = ctx.request.body
// don't await as can take a while, just return
migrationImpl(options)
ctx.status = 200
}
export async function fetchDefinitions(ctx: Ctx) {
export async function fetchDefinitions(
ctx: Ctx<void, FetchOldMigrationResponse>
) {
ctx.body = MIGRATIONS
ctx.status = 200
}
export async function getMigrationStatus(ctx: Ctx) {
export async function getMigrationStatus(
ctx: Ctx<void, GetOldMigrationStatus>
) {
const appId = context.getAppId()
if (!appId) {

View File

@ -1,16 +1,7 @@
import { Ctx } from "@budibase/types"
import { Ctx, LogOpsRequest, ErrorOpsRequest } from "@budibase/types"
import { logging } from "@budibase/backend-core"
interface LogRequest {
message: string
data?: any
}
interface ErrorRequest {
message: string
}
export async function log(ctx: Ctx<LogRequest>) {
export async function log(ctx: Ctx<LogOpsRequest, void>) {
const body = ctx.request.body
console.trace(body.message, body.data)
console.debug(body.message, body.data)
@ -20,13 +11,13 @@ export async function log(ctx: Ctx<LogRequest>) {
ctx.status = 204
}
export async function alert(ctx: Ctx<ErrorRequest>) {
export async function alert(ctx: Ctx<ErrorOpsRequest, void>) {
const body = ctx.request.body
logging.logAlert(body.message, new Error(body.message))
ctx.status = 204
}
export async function error(ctx: Ctx<ErrorRequest>) {
export async function error(ctx: Ctx<ErrorOpsRequest, void>) {
const body = ctx.request.body
throw new Error(body.message)
}

View File

@ -9,6 +9,8 @@ import {
RemovePermissionRequest,
RemovePermissionResponse,
FetchResourcePermissionInfoResponse,
FetchBuiltinPermissionsRequest,
FetchPermissionLevelsRequest,
} from "@budibase/types"
import {
CURRENTLY_SUPPORTED_LEVELS,
@ -19,11 +21,13 @@ import { PermissionUpdateType } from "../../sdk/app/permissions"
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
export function fetchBuiltin(ctx: UserCtx) {
export function fetchBuiltin(
ctx: UserCtx<void, FetchBuiltinPermissionsRequest>
) {
ctx.body = Object.values(permissions.getBuiltinPermissions())
}
export function fetchLevels(ctx: UserCtx) {
export function fetchLevels(ctx: UserCtx<void, FetchPermissionLevelsRequest>) {
// for now only provide the read/write perms externally
ctx.body = SUPPORTED_LEVELS
}

View File

@ -3,8 +3,12 @@ import {
getPluginMetadata,
extractTarball,
} from "../../../utilities/fileSystem"
import { KoaFile } from "@budibase/types"
export async function fileUpload(file: { name: string; path: string }) {
export async function fileUpload(file: KoaFile) {
if (!file.name || !file.path) {
throw new Error("File is not valid - cannot upload.")
}
if (!file.name.endsWith(".tar.gz")) {
throw new Error("Plugin must be compressed into a gzipped tarball.")
}

View File

@ -2,26 +2,37 @@ import { npmUpload, urlUpload, githubUpload } from "./uploaders"
import { plugins as pluginCore } from "@budibase/backend-core"
import {
PluginType,
FileType,
PluginSource,
Ctx,
CreatePluginRequest,
CreatePluginResponse,
UserCtx,
UploadPluginRequest,
Plugin,
UploadPluginResponse,
FetchPluginResponse,
DeletePluginResponse,
} from "@budibase/types"
import env from "../../../environment"
import { clientAppSocket } from "../../../websockets"
import sdk from "../../../sdk"
import { sdk as pro } from "@budibase/pro"
export async function upload(ctx: any) {
const plugins: FileType[] =
ctx.request.files.file.length > 1
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
export async function upload(
ctx: UserCtx<UploadPluginRequest, UploadPluginResponse>
) {
const files = ctx.request.files
const plugins =
files && Array.isArray(files.file) && files.file.length > 1
? Array.from(files.file)
: [files?.file]
try {
let docs = []
let docs: Plugin[] = []
// can do single or multiple plugins
for (let plugin of plugins) {
if (!plugin || Array.isArray(plugin)) {
continue
}
const doc = await sdk.plugins.processUploaded(plugin, PluginSource.FILE)
docs.push(doc)
}
@ -37,7 +48,7 @@ export async function upload(ctx: any) {
}
export async function create(
ctx: Ctx<CreatePluginRequest, CreatePluginResponse>
ctx: UserCtx<CreatePluginRequest, CreatePluginResponse>
) {
const { source, url, headers, githubToken } = ctx.request.body
@ -91,11 +102,11 @@ export async function create(
}
}
export async function fetch(ctx: any) {
export async function fetch(ctx: UserCtx<void, FetchPluginResponse>) {
ctx.body = await sdk.plugins.fetch()
}
export async function destroy(ctx: any) {
export async function destroy(ctx: UserCtx<void, DeletePluginResponse>) {
const { pluginId } = ctx.params
try {

View File

@ -4,26 +4,38 @@ import { save as saveDatasource } from "../datasource"
import { RestImporter } from "./import"
import { invalidateCachedVariable } from "../../../threads/utils"
import env from "../../../environment"
import { events, context, utils, constants } from "@budibase/backend-core"
import { constants, context, events, utils } from "@budibase/backend-core"
import sdk from "../../../sdk"
import { QueryEvent, QueryEventParameters } from "../../../threads/definitions"
import {
ConfigType,
Query,
UserCtx,
SessionCookie,
JsonFieldSubType,
QueryResponse,
QuerySchema,
FieldType,
CreateDatasourceRequest,
Datasource,
ExecuteQueryRequest,
ExecuteQueryResponse,
ExecuteV2QueryResponse,
ExecuteV1QueryResponse,
FetchQueriesResponse,
FieldType,
FindQueryResponse,
ImportRestQueryRequest,
ImportRestQueryResponse,
JsonFieldSubType,
PreviewQueryRequest,
PreviewQueryResponse,
Query,
QueryResponse,
QuerySchema,
SaveQueryRequest,
SaveQueryResponse,
SessionCookie,
SourceName,
UserCtx,
DeleteQueryResponse,
} from "@budibase/types"
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
import { utils as JsonUtils, ValidQueryNameRegex } from "@budibase/shared-core"
import { findHBSBlocks } from "@budibase/string-templates"
import { ObjectId } from "mongodb"
import { merge } from "lodash"
const Runner = new Thread(ThreadType.QUERY, {
timeoutMs: env.QUERY_THREAD_TIMEOUT,
@ -43,11 +55,13 @@ function validateQueryInputs(parameters: QueryEventParameters) {
}
}
export async function fetch(ctx: UserCtx) {
export async function fetch(ctx: UserCtx<void, FetchQueriesResponse>) {
ctx.body = await sdk.queries.fetch()
}
const _import = async (ctx: UserCtx) => {
const _import = async (
ctx: UserCtx<ImportRestQueryRequest, ImportRestQueryResponse>
) => {
const body = ctx.request.body
const data = body.data
@ -58,9 +72,9 @@ const _import = async (ctx: UserCtx) => {
if (!body.datasourceId) {
// construct new datasource
const info: any = await importer.getInfo()
let datasource = {
let datasource: Datasource = {
type: "datasource",
source: "REST",
source: SourceName.REST,
config: {
url: info.url,
defaultHeaders: [],
@ -69,8 +83,14 @@ const _import = async (ctx: UserCtx) => {
name: info.name,
}
// save the datasource
const datasourceCtx = { ...ctx }
datasourceCtx.request.body.datasource = datasource
const datasourceCtx: UserCtx<CreateDatasourceRequest> = merge(ctx, {
request: {
body: {
datasource,
tablesFilter: [],
},
},
})
await saveDatasource(datasourceCtx)
datasourceId = datasourceCtx.body.datasource._id
} else {
@ -88,7 +108,7 @@ const _import = async (ctx: UserCtx) => {
}
export { _import as import }
export async function save(ctx: UserCtx<Query, Query>) {
export async function save(ctx: UserCtx<SaveQueryRequest, SaveQueryResponse>) {
const db = context.getAppDB()
const query: Query = ctx.request.body
@ -119,10 +139,9 @@ export async function save(ctx: UserCtx<Query, Query>) {
query._rev = response.rev
ctx.body = query
ctx.message = `Query ${query.name} saved successfully.`
}
export async function find(ctx: UserCtx) {
export async function find(ctx: UserCtx<void, FindQueryResponse>) {
const queryId = ctx.params.queryId
ctx.body = await sdk.queries.find(queryId)
}
@ -335,7 +354,7 @@ export async function preview(
async function execute(
ctx: UserCtx<
ExecuteQueryRequest,
ExecuteQueryResponse | Record<string, any>[]
ExecuteV2QueryResponse | ExecuteV1QueryResponse
>,
opts: any = { rowsOnly: false, isAutomation: false }
) {
@ -390,19 +409,21 @@ async function execute(
}
export async function executeV1(
ctx: UserCtx<ExecuteQueryRequest, Record<string, any>[]>
ctx: UserCtx<ExecuteQueryRequest, ExecuteV1QueryResponse>
) {
return execute(ctx, { rowsOnly: true, isAutomation: false })
}
export async function executeV2(
ctx: UserCtx<
ExecuteQueryRequest,
ExecuteQueryResponse | Record<string, any>[]
>,
{ isAutomation }: { isAutomation?: boolean } = {}
ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse>
) {
return execute(ctx, { rowsOnly: false, isAutomation })
return execute(ctx, { rowsOnly: false })
}
export async function executeV2AsAutomation(
ctx: UserCtx<ExecuteQueryRequest, ExecuteV2QueryResponse>
) {
return execute(ctx, { rowsOnly: false, isAutomation: true })
}
const removeDynamicVariables = async (queryId: string) => {
@ -426,14 +447,14 @@ const removeDynamicVariables = async (queryId: string) => {
}
}
export async function destroy(ctx: UserCtx) {
export async function destroy(ctx: UserCtx<void, DeleteQueryResponse>) {
const db = context.getAppDB()
const queryId = ctx.params.queryId as string
await removeDynamicVariables(queryId)
const query = await db.get<Query>(queryId)
const datasource = await sdk.datasources.get(query.datasourceId)
await db.remove(ctx.params.queryId, ctx.params.revId)
ctx.message = `Query deleted.`
ctx.body = { message: `Query deleted.` }
ctx.status = 200
await events.query.deleted(datasource, query)
}

View File

@ -9,7 +9,7 @@ import { getUserMetadataParams, InternalTables } from "../../db/utils"
import {
AccessibleRolesResponse,
Database,
DestroyRoleResponse,
DeleteRoleResponse,
FetchRolesResponse,
FindRoleResponse,
Role,
@ -199,7 +199,7 @@ export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
builderSocket?.emitRoleUpdate(ctx, role)
}
export async function destroy(ctx: UserCtx<void, DestroyRoleResponse>) {
export async function destroy(ctx: UserCtx<void, DeleteRoleResponse>) {
const db = context.getAppDB()
let roleId = ctx.params.roleId as string
if (roles.isBuiltin(roleId)) {

View File

@ -1,11 +1,17 @@
import { getRoutingInfo } from "../../utilities/routing"
import { roles } from "@budibase/backend-core"
import { UserCtx } from "@budibase/types"
import {
FetchClientScreenRoutingResponse,
FetchScreenRoutingResponse,
ScreenRoutingJson,
UserCtx,
} from "@budibase/types"
const URL_SEPARATOR = "/"
class Routing {
json: any
json: ScreenRoutingJson
constructor() {
this.json = {}
}
@ -43,7 +49,7 @@ class Routing {
* @returns The routing structure, this is the full structure designed for use in the builder,
* if the client routing is required then the updateRoutingStructureForUserRole should be used.
*/
async function getRoutingStructure() {
async function getRoutingStructure(): Promise<{ routes: ScreenRoutingJson }> {
const screenRoutes = await getRoutingInfo()
const routing = new Routing()
@ -56,11 +62,13 @@ async function getRoutingStructure() {
return { routes: routing.json }
}
export async function fetch(ctx: UserCtx) {
export async function fetch(ctx: UserCtx<void, FetchScreenRoutingResponse>) {
ctx.body = await getRoutingStructure()
}
export async function clientFetch(ctx: UserCtx) {
export async function clientFetch(
ctx: UserCtx<void, FetchClientScreenRoutingResponse>
) {
const routing = await getRoutingStructure()
let roleId = ctx.user?.role?._id
const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : []

View File

@ -42,7 +42,7 @@ import {
sqlOutputProcessing,
} from "./utils"
import {
getDatasourceAndQuery,
enrichQueryJson,
processRowCountResponse,
} from "../../../sdk/app/rows/utils"
import { processObjectSync } from "@budibase/string-templates"
@ -135,16 +135,9 @@ function cleanupConfig(config: RunConfig, table: Table): RunConfig {
return config
}
function getEndpoint(tableId: string | undefined, operation: string) {
if (!tableId) {
throw new Error("Cannot get endpoint information - no table ID specified")
}
function getEndpoint(tableId: string, operation: Operation) {
const { datasourceId, tableName } = breakExternalTableId(tableId)
return {
datasourceId: datasourceId,
entityId: tableName,
operation: operation as Operation,
}
return { datasourceId, entityId: tableName, operation }
}
function isOneSide(
@ -268,12 +261,9 @@ export class ExternalRequest<T extends Operation> {
const filters = this.prepareFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
return makeExternalQuery({
endpoint: getEndpoint(tableId, Operation.DELETE),
filters,
meta: {
table,
},
})
} else {
return []
@ -289,13 +279,10 @@ export class ExternalRequest<T extends Operation> {
const filters = this.prepareFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
return makeExternalQuery({
endpoint: getEndpoint(tableId, Operation.UPDATE),
body: { [colName]: null },
filters,
meta: {
table,
},
})
} else {
return []
@ -311,12 +298,9 @@ export class ExternalRequest<T extends Operation> {
}
async getRow(table: Table, rowId: string): Promise<Row> {
const response = await getDatasourceAndQuery({
const response = await makeExternalQuery({
endpoint: getEndpoint(table._id!, Operation.READ),
filters: this.prepareFilters(rowId, {}, table),
meta: {
table,
},
})
if (Array.isArray(response) && response.length > 0) {
return response[0]
@ -490,16 +474,13 @@ export class ExternalRequest<T extends Operation> {
if (!relatedTable) {
throw new Error("unable to find related table")
}
const response = await getDatasourceAndQuery({
endpoint: endpoint,
const response = await makeExternalQuery({
endpoint,
filters: {
equal: {
[fieldName]: row[lookupField],
},
},
meta: {
table: relatedTable,
},
})
// this is the response from knex if no rows found
const rows: Row[] =
@ -537,6 +518,11 @@ export class ExternalRequest<T extends Operation> {
for (let relationship of relationships) {
const { key, tableId, isUpdate, id, relationshipType, ...rest } =
relationship
if (!tableId) {
throw new Error("Table ID is unknown, cannot find table")
}
const body: { [key: string]: any } = processObjectSync(rest, row, {})
const linkTable = this.getTable(tableId)
const relationshipPrimary = linkTable?.primary || []
@ -583,14 +569,11 @@ export class ExternalRequest<T extends Operation> {
const operation = isUpdate ? Operation.UPDATE : Operation.CREATE
if (!existingRelationship) {
promises.push(
getDatasourceAndQuery({
makeExternalQuery({
endpoint: getEndpoint(tableId, operation),
// if we're doing many relationships then we're writing, only one response
body,
filters: this.prepareFilters(id, {}, linkTable),
meta: {
table: linkTable,
},
})
)
} else {
@ -723,8 +706,8 @@ export class ExternalRequest<T extends Operation> {
let json: QueryJson = {
endpoint: {
datasourceId: this.datasource._id!,
entityId: table.name,
datasourceId: this.datasource,
entityId: table,
operation,
},
resource: {
@ -749,10 +732,6 @@ export class ExternalRequest<T extends Operation> {
table
),
},
meta: {
table,
tables: this.tables,
},
}
// remove any relationships that could block deletion
@ -773,8 +752,11 @@ export class ExternalRequest<T extends Operation> {
response = [unprocessedRow]
} else {
response = env.SQL_ALIASING_DISABLE
? await getDatasourceAndQuery(json)
: await aliasing.queryWithAliasing(json, makeExternalQuery)
? await makeExternalQuery(json)
: await aliasing.queryWithAliasing(
await enrichQueryJson(json),
makeExternalQuery
)
}
// if it's a counting operation there will be no more processing, just return the number

View File

@ -11,23 +11,30 @@ import {
DeleteRow,
DeleteRowRequest,
DeleteRows,
DownloadAttachmentResponse,
EventType,
ExportRowsRequest,
ExportRowsResponse,
FetchEnrichedRowResponse,
FetchRowsResponse,
FieldType,
GetRowResponse,
FindRowResponse,
isRelationshipField,
PatchRowRequest,
PatchRowResponse,
RequiredKeys,
Row,
RowAttachment,
RowSearchParams,
SaveRowRequest,
SaveRowResponse,
SearchFilters,
SearchRowRequest,
SearchRowResponse,
Table,
UserCtx,
ValidateResponse,
ValidateRowRequest,
ValidateRowResponse,
} from "@budibase/types"
import * as utils from "./utils"
import { gridSocket } from "../../../websockets"
@ -82,7 +89,7 @@ export async function patch(
}
}
export const save = async (ctx: UserCtx<Row, Row>) => {
export const save = async (ctx: UserCtx<SaveRowRequest, SaveRowResponse>) => {
const { tableId, viewId } = utils.getSourceId(ctx)
const sourceId = viewId || tableId
@ -130,12 +137,12 @@ export async function fetchLegacyView(ctx: any) {
})
}
export async function fetch(ctx: any) {
export async function fetch(ctx: UserCtx<void, FetchRowsResponse>) {
const { tableId } = utils.getSourceId(ctx)
ctx.body = await sdk.rows.fetch(tableId)
}
export async function find(ctx: UserCtx<void, GetRowResponse>) {
export async function find(ctx: UserCtx<void, FindRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx)
const sourceId = viewId || tableId
const rowId = ctx.params.rowId
@ -239,7 +246,8 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
await context.ensureSnippetContext(true)
let { query } = ctx.request.body
const searchRequest = ctx.request.body
let { query } = searchRequest
if (query) {
const allTables = await sdk.tables.getAllTables()
query = replaceTableNamesInFilters(tableId, query, allTables)
@ -249,11 +257,22 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
user: sdk.users.getUserContextBindings(ctx.user),
})
const searchParams: RowSearchParams = {
...ctx.request.body,
const searchParams: RequiredKeys<RowSearchParams> = {
query: enrichedQuery,
tableId,
viewId,
bookmark: searchRequest.bookmark ?? undefined,
paginate: searchRequest.paginate,
limit: searchRequest.limit,
sort: searchRequest.sort ?? undefined,
sortOrder: searchRequest.sortOrder,
sortType: searchRequest.sortType ?? undefined,
countRows: searchRequest.countRows,
version: searchRequest.version,
disableEscaping: searchRequest.disableEscaping,
fields: undefined,
indexer: undefined,
rows: undefined,
}
ctx.status = 200
@ -301,7 +320,9 @@ function replaceTableNamesInFilters(
})
}
export async function validate(ctx: Ctx<Row, ValidateResponse>) {
export async function validate(
ctx: Ctx<ValidateRowRequest, ValidateRowResponse>
) {
const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source)
// external tables are hard to validate currently
@ -315,7 +336,9 @@ export async function validate(ctx: Ctx<Row, ValidateResponse>) {
}
}
export async function fetchEnrichedRow(ctx: UserCtx<void, Row>) {
export async function fetchEnrichedRow(
ctx: UserCtx<void, FetchEnrichedRowResponse>
) {
const { tableId } = utils.getSourceId(ctx)
ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx)
}
@ -353,7 +376,9 @@ export const exportRows = async (
ctx.body = apiFileReturn(content)
}
export async function downloadAttachment(ctx: UserCtx) {
export async function downloadAttachment(
ctx: UserCtx<void, DownloadAttachmentResponse>
) {
const { columnName } = ctx.params
const { tableId } = utils.getSourceId(ctx)

View File

@ -15,10 +15,21 @@ import {
} from "@budibase/types"
import * as linkRows from "../../../db/linkedRows"
import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp"
import { cloneDeep, merge } from "lodash/fp"
import sdk from "../../../sdk"
import * as pro from "@budibase/pro"
function mergeRows(row1: Row, row2: Row) {
const merged = merge(row1, row2)
// make sure any specifically undefined fields are removed
for (const key of Object.keys(row2)) {
if (row2[key] === undefined) {
delete merged[key]
}
}
return merged
}
/**
* This function runs through a list of enriched rows, looks at the rows which
* are related and then checks if they need the state of their formulas
@ -162,9 +173,14 @@ export async function finaliseRow(
})
}
const response = await db.put(row)
// for response, calculate the formulas for the enriched row
enrichedRow._rev = response.rev
await db.put(row)
const retrieved = await db.tryGet<Row>(row._id)
if (!retrieved) {
throw new Error(`Unable to retrieve row ${row._id} after saving.`)
}
delete enrichedRow._rev
enrichedRow = mergeRows(retrieved, enrichedRow)
enrichedRow = await processFormulas(table, enrichedRow, {
dynamic: false,
})

View File

@ -175,7 +175,7 @@ export async function enrichArrayContext(
}
export async function enrichSearchContext(
fields: Record<string, any>,
fields: Record<string, any> | undefined,
inputs = {},
helpers = true
): Promise<Record<string, any>> {

View File

@ -29,19 +29,20 @@ export async function searchView(
await context.ensureSnippetContext(true)
const searchOptions: RequiredKeys<SearchViewRowRequest> &
RequiredKeys<
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
> = {
const searchOptions: RequiredKeys<RowSearchParams> = {
tableId: view.tableId,
viewId: view.id,
query: body.query,
query: body.query || {},
fields: viewFields,
...getSortOptions(body, view),
limit: body.limit,
bookmark: body.bookmark,
bookmark: body.bookmark ?? undefined,
paginate: body.paginate,
countRows: body.countRows,
version: undefined,
disableEscaping: undefined,
indexer: undefined,
rows: undefined,
}
const result = await sdk.rows.search(searchOptions, {
@ -56,7 +57,7 @@ function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
return {
sort: request.sort,
sortOrder: request.sortOrder,
sortType: request.sortType,
sortType: request.sortType ?? undefined,
}
}
if (view.sort) {

View File

@ -10,13 +10,16 @@ import { updateAppPackage } from "./application"
import {
Plugin,
ScreenProps,
BBContext,
Screen,
UserCtx,
FetchScreenResponse,
SaveScreenRequest,
SaveScreenResponse,
DeleteScreenResponse,
} from "@budibase/types"
import { builderSocket } from "../../websockets"
export async function fetch(ctx: BBContext) {
export async function fetch(ctx: UserCtx<void, FetchScreenResponse>) {
const db = context.getAppDB()
const screens = (
@ -37,7 +40,9 @@ export async function fetch(ctx: BBContext) {
)
}
export async function save(ctx: UserCtx<Screen, Screen>) {
export async function save(
ctx: UserCtx<SaveScreenRequest, SaveScreenResponse>
) {
const db = context.getAppDB()
let screen = ctx.request.body
@ -107,7 +112,7 @@ export async function save(ctx: UserCtx<Screen, Screen>) {
builderSocket?.emitScreenUpdate(ctx, savedScreen)
}
export async function destroy(ctx: BBContext) {
export async function destroy(ctx: UserCtx<void, DeleteScreenResponse>) {
const db = context.getAppDB()
const id = ctx.params.screenId
const screen = await db.get<Screen>(id)

View File

@ -14,7 +14,3 @@ export async function execute(ctx: Ctx) {
throw err
}
}
export async function save(ctx: Ctx) {
ctx.throw(501, "Not currently implemented")
}

View File

@ -27,7 +27,13 @@ import {
Ctx,
DocumentType,
Feature,
GetSignedUploadUrlRequest,
GetSignedUploadUrlResponse,
ProcessAttachmentResponse,
ServeAppResponse,
ServeBuilderPreviewResponse,
ServeClientLibraryResponse,
ToggleBetaFeatureResponse,
UserCtx,
} from "@budibase/types"
import {
@ -38,7 +44,9 @@ import {
import send from "koa-send"
import { getThemeVariables } from "../../../constants/themes"
export const toggleBetaUiFeature = async function (ctx: Ctx) {
export const toggleBetaUiFeature = async function (
ctx: Ctx<void, ToggleBetaFeatureResponse>
) {
const cookieName = `beta:${ctx.params.feature}`
if (ctx.cookies.get(cookieName)) {
@ -66,13 +74,13 @@ export const toggleBetaUiFeature = async function (ctx: Ctx) {
}
}
export const serveBuilder = async function (ctx: Ctx) {
export const serveBuilder = async function (ctx: Ctx<void, void>) {
const builderPath = join(TOP_LEVEL_PATH, "builder")
await send(ctx, ctx.file, { root: builderPath })
}
export const uploadFile = async function (
ctx: Ctx<{}, ProcessAttachmentResponse>
ctx: Ctx<void, ProcessAttachmentResponse>
) {
const file = ctx.request?.files?.file
if (!file) {
@ -144,7 +152,7 @@ const requiresMigration = async (ctx: Ctx) => {
return latestMigrationApplied !== latestMigration
}
export const serveApp = async function (ctx: UserCtx) {
export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
if (ctx.url.includes("apple-touch-icon.png")) {
ctx.redirect("/builder/bblogo.png")
return
@ -249,7 +257,9 @@ export const serveApp = async function (ctx: UserCtx) {
}
}
export const serveBuilderPreview = async function (ctx: Ctx) {
export const serveBuilderPreview = async function (
ctx: Ctx<void, ServeBuilderPreviewResponse>
) {
const db = context.getAppDB({ skip_setup: true })
const appInfo = await db.get<App>(DocumentType.APP_METADATA)
@ -268,7 +278,9 @@ export const serveBuilderPreview = async function (ctx: Ctx) {
}
}
export const serveClientLibrary = async function (ctx: Ctx) {
export const serveClientLibrary = async function (
ctx: Ctx<void, ServeClientLibraryResponse>
) {
const version = ctx.request.query.version
if (Array.isArray(version)) {
@ -297,7 +309,9 @@ export const serveClientLibrary = async function (ctx: Ctx) {
}
}
export const getSignedUploadURL = async function (ctx: Ctx) {
export const getSignedUploadURL = async function (
ctx: Ctx<GetSignedUploadUrlRequest, GetSignedUploadUrlResponse>
) {
// Ensure datasource is valid
let datasource
try {

View File

@ -11,27 +11,24 @@ export async function makeTableRequest(
datasource: Datasource,
operation: Operation,
table: Table,
tables: Record<string, Table>,
oldTable?: Table,
renamed?: RenameColumn
) {
const json: QueryJson = {
endpoint: {
datasourceId: datasource._id!,
entityId: table._id!,
datasourceId: datasource,
entityId: table,
operation,
},
meta: {
table,
tables,
},
table,
}
if (!json.meta) {
json.meta = {}
}
if (oldTable) {
json.meta!.table = oldTable
json.meta.oldTable = oldTable
}
if (renamed) {
json.meta!.renamed = renamed
json.meta.renamed = renamed
}
return makeExternalQuery(datasource, json)
return makeExternalQuery(json)
}

View File

@ -19,17 +19,18 @@ import {
EventType,
FetchTablesResponse,
FieldType,
MigrateRequest,
MigrateResponse,
MigrateTableRequest,
MigrateTableResponse,
SaveTableRequest,
SaveTableResponse,
Table,
TableResponse,
FindTableResponse,
TableSourceType,
UserCtx,
ValidateNewTableImportRequest,
ValidateTableImportRequest,
ValidateTableImportResponse,
DeleteTableResponse,
} from "@budibase/types"
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
@ -94,7 +95,7 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
ctx.body = result
}
export async function find(ctx: UserCtx<void, TableResponse>) {
export async function find(ctx: UserCtx<void, FindTableResponse>) {
const tableId = ctx.params.tableId
const table = await sdk.tables.getTable(tableId)
@ -144,7 +145,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
builderSocket?.emitTableUpdate(ctx, cloneDeep(savedTable))
}
export async function destroy(ctx: UserCtx) {
export async function destroy(ctx: UserCtx<void, DeleteTableResponse>) {
const appId = ctx.appId
const tableId = ctx.params.tableId
await sdk.rowActions.deleteAll(tableId)
@ -230,7 +231,9 @@ export async function validateExistingTableImport(
}
}
export async function migrate(ctx: UserCtx<MigrateRequest, MigrateResponse>) {
export async function migrate(
ctx: UserCtx<MigrateTableRequest, MigrateTableResponse>
) {
const { oldColumn, newColumn } = ctx.request.body
let tableId = ctx.params.tableId as string
const table = await sdk.tables.getTable(tableId)

View File

@ -1,13 +1,17 @@
import nodeFetch from "node-fetch"
import { downloadTemplate as dlTemplate } from "../../utilities/fileSystem"
import env from "../../environment"
import { BBContext } from "@budibase/types"
import {
DownloadTemplateResponse,
FetchTemplateResponse,
UserCtx,
} from "@budibase/types"
// development flag, can be used to test against templates exported locally
const DEFAULT_TEMPLATES_BUCKET =
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
export async function fetch(ctx: BBContext) {
export async function fetch(ctx: UserCtx<void, FetchTemplateResponse>) {
let type = env.TEMPLATE_REPOSITORY
let response,
error = false
@ -32,7 +36,9 @@ export async function fetch(ctx: BBContext) {
// can't currently test this, have to ignore from coverage
/* istanbul ignore next */
export async function downloadTemplate(ctx: BBContext) {
export async function downloadTemplate(
ctx: UserCtx<void, DownloadTemplateResponse>
) {
const { type, name } = ctx.params
await dlTemplate(type, name)

View File

@ -7,19 +7,24 @@ import {
FetchUserMetadataResponse,
FindUserMetadataResponse,
Flags,
SetFlagRequest,
SetUserFlagRequest,
UpdateSelfMetadataRequest,
UpdateSelfMetadataResponse,
UpdateUserMetadataResponse,
UpdateUserMetadataRequest,
UserCtx,
UserMetadata,
DeleteUserMetadataResponse,
SetUserFlagResponse,
GetUserFlagsResponse,
} from "@budibase/types"
import sdk from "../../sdk"
import { DocumentInsertResponse } from "@budibase/nano"
export async function fetchMetadata(ctx: Ctx<void, FetchUserMetadataResponse>) {
ctx.body = await sdk.users.fetchMetadata()
}
export async function updateSelfMetadata(
ctx: UserCtx<UserMetadata, DocumentInsertResponse>
ctx: UserCtx<UpdateSelfMetadataRequest, UpdateSelfMetadataResponse>
) {
// overwrite the ID with current users
ctx.request.body._id = ctx.user?._id
@ -31,7 +36,7 @@ export async function updateSelfMetadata(
}
export async function updateMetadata(
ctx: UserCtx<UserMetadata, DocumentInsertResponse>
ctx: UserCtx<UpdateUserMetadataRequest, UpdateUserMetadataResponse>
) {
const db = context.getAppDB()
const user = ctx.request.body
@ -44,7 +49,9 @@ export async function updateMetadata(
ctx.body = await db.put(metadata)
}
export async function destroyMetadata(ctx: UserCtx<void, { message: string }>) {
export async function destroyMetadata(
ctx: UserCtx<void, DeleteUserMetadataResponse>
) {
const db = context.getAppDB()
try {
const dbUser = await sdk.users.get(ctx.params.id)
@ -64,7 +71,7 @@ export async function findMetadata(
}
export async function setFlag(
ctx: UserCtx<SetFlagRequest, { message: string }>
ctx: UserCtx<SetUserFlagRequest, SetUserFlagResponse>
) {
const userId = ctx.user?._id
const { flag, value } = ctx.request.body
@ -84,7 +91,7 @@ export async function setFlag(
ctx.body = { message: "Flag set successfully" }
}
export async function getFlags(ctx: UserCtx<void, Flags>) {
export async function getFlags(ctx: UserCtx<void, GetUserFlagsResponse>) {
const userId = ctx.user?._id
const docId = generateUserFlagID(userId!)
const db = context.getAppDB()

View File

@ -4,7 +4,6 @@ import {
Ctx,
RequiredKeys,
UpdateViewRequest,
ViewResponse,
ViewResponseEnriched,
ViewV2,
BasicViewFieldMetadata,
@ -15,6 +14,8 @@ import {
ViewFetchResponseEnriched,
CountDistinctCalculationFieldMetadata,
CountCalculationFieldMetadata,
CreateViewResponse,
UpdateViewResponse,
} from "@budibase/types"
import { events } from "@budibase/backend-core"
import { builderSocket, gridSocket } from "../../../websockets"
@ -133,7 +134,7 @@ export async function fetch(ctx: Ctx<void, ViewFetchResponseEnriched>) {
}
}
export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
export async function create(ctx: Ctx<CreateViewRequest, CreateViewResponse>) {
const view = ctx.request.body
const { tableId } = view
@ -163,7 +164,7 @@ export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
gridSocket?.emitViewUpdate(ctx, result)
}
export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) {
export async function update(ctx: Ctx<UpdateViewRequest, UpdateViewResponse>) {
const view = ctx.request.body
if (view.version !== 2) {
@ -201,7 +202,7 @@ export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) {
gridSocket?.emitViewUpdate(ctx, result)
}
export async function remove(ctx: Ctx) {
export async function remove(ctx: Ctx<void, void>) {
const { viewId } = ctx.params
const view = await sdk.views.remove(viewId)

View File

@ -4,9 +4,17 @@ import { db as dbCore, context } from "@budibase/backend-core"
import {
Webhook,
WebhookActionType,
BBContext,
Ctx,
Automation,
AutomationActionStepId,
FetchWebhooksResponse,
SaveWebhookResponse,
SaveWebhookRequest,
DeleteWebhookResponse,
BuildWebhookSchemaRequest,
BuildWebhookSchemaResponse,
TriggerWebhookRequest,
TriggerWebhookResponse,
} from "@budibase/types"
import sdk from "../../sdk"
import * as pro from "@budibase/pro"
@ -16,17 +24,17 @@ const validate = require("jsonschema").validate
const AUTOMATION_DESCRIPTION = "Generated from Webhook Schema"
export async function fetch(ctx: BBContext) {
export async function fetch(ctx: Ctx<void, FetchWebhooksResponse>) {
const db = context.getAppDB()
const response = await db.allDocs(
const response = await db.allDocs<Webhook>(
getWebhookParams(null, {
include_docs: true,
})
)
ctx.body = response.rows.map((row: any) => row.doc)
ctx.body = response.rows.filter(row => row.doc).map(row => row.doc!)
}
export async function save(ctx: BBContext) {
export async function save(ctx: Ctx<SaveWebhookRequest, SaveWebhookResponse>) {
const webhook = await sdk.automations.webhook.save(ctx.request.body)
ctx.body = {
message: "Webhook created successfully",
@ -34,21 +42,23 @@ export async function save(ctx: BBContext) {
}
}
export async function destroy(ctx: BBContext) {
export async function destroy(ctx: Ctx<void, DeleteWebhookResponse>) {
ctx.body = await sdk.automations.webhook.destroy(
ctx.params.id,
ctx.params.rev
)
}
export async function buildSchema(ctx: BBContext) {
export async function buildSchema(
ctx: Ctx<BuildWebhookSchemaRequest, BuildWebhookSchemaResponse>
) {
await context.doInAppContext(ctx.params.instance, async () => {
const db = context.getAppDB()
const webhook = (await db.get(ctx.params.id)) as Webhook
const webhook = await db.get<Webhook>(ctx.params.id)
webhook.bodySchema = toJsonSchema(ctx.request.body)
// update the automation outputs
if (webhook.action.type === WebhookActionType.AUTOMATION) {
let automation = (await db.get(webhook.action.target)) as Automation
let automation = await db.get<Automation>(webhook.action.target)
const autoOutputs = automation.definition.trigger.schema.outputs
let properties = webhook.bodySchema.properties
// reset webhook outputs
@ -67,56 +77,66 @@ export async function buildSchema(ctx: BBContext) {
})
}
export async function trigger(ctx: BBContext) {
export async function trigger(
ctx: Ctx<TriggerWebhookRequest, TriggerWebhookResponse>
) {
const prodAppId = dbCore.getProdAppID(ctx.params.instance)
const appNotDeployed = () => {
ctx.body = {
message: "Application not deployed yet.",
}
}
await context.doInAppContext(prodAppId, async () => {
try {
const db = context.getAppDB()
const webhook = (await db.get(ctx.params.id)) as Webhook
// validate against the schema
if (webhook.bodySchema) {
validate(ctx.request.body, webhook.bodySchema)
}
const target = await db.get<Automation>(webhook.action.target)
if (webhook.action.type === WebhookActionType.AUTOMATION) {
// trigger with both the pure request and then expand it
// incase the user has produced a schema to bind to
let hasCollectStep = sdk.automations.utils.checkForCollectStep(target)
const db = context.getAppDB()
const webhook = await db.tryGet<Webhook>(ctx.params.id)
if (!webhook) {
return appNotDeployed()
}
// validate against the schema
if (webhook.bodySchema) {
validate(ctx.request.body, webhook.bodySchema)
}
const target = await db.tryGet<Automation>(webhook.action.target)
if (!target) {
return appNotDeployed()
}
if (webhook.action.type === WebhookActionType.AUTOMATION) {
// trigger with both the pure request and then expand it
// incase the user has produced a schema to bind to
let hasCollectStep = sdk.automations.utils.checkForCollectStep(target)
if (hasCollectStep && (await pro.features.isSyncAutomationsEnabled())) {
const response = await triggers.externalTrigger(
target,
{
body: ctx.request.body,
if (hasCollectStep && (await pro.features.isSyncAutomationsEnabled())) {
const response = await triggers.externalTrigger(
target,
{
fields: {
...ctx.request.body,
appId: prodAppId,
body: ctx.request.body,
},
{ getResponses: true }
)
appId: prodAppId,
},
{ getResponses: true }
)
if (triggers.isAutomationResults(response)) {
let collectedValue = response.steps.find(
(step: any) => step.stepId === AutomationActionStepId.COLLECT
)
ctx.status = 200
ctx.body = collectedValue.outputs
ctx.body = collectedValue?.outputs
} else {
await triggers.externalTrigger(target, {
body: ctx.request.body,
...ctx.request.body,
appId: prodAppId,
})
ctx.status = 200
ctx.body = {
message: "Webhook trigger fired successfully",
}
ctx.throw(400, "Automation did not have a collect block.")
}
}
} catch (err: any) {
if (err.status === 404) {
ctx.status = 200
} else {
await triggers.externalTrigger(target, {
fields: {
...ctx.request.body,
body: ctx.request.body,
},
appId: prodAppId,
})
ctx.body = {
message: "Application not deployed yet.",
message: "Webhook trigger fired successfully",
}
}
}

View File

@ -58,12 +58,9 @@ if (apiEnabled()) {
})
)
.use(pro.licensing())
// @ts-ignore
.use(currentApp)
.use(auth.auditLog)
// @ts-ignore
.use(migrations)
// @ts-ignore
.use(cleanup)
// authenticated routes

View File

@ -2,10 +2,7 @@ import Router from "@koa/router"
import * as datasourceController from "../controllers/datasource"
import authorized from "../../middleware/authorized"
import { permissions } from "@budibase/backend-core"
import {
datasourceValidator,
datasourceQueryValidator,
} from "./utils/validators"
import { datasourceValidator } from "./utils/validators"
const router: Router = new Router()
@ -41,15 +38,6 @@ router
),
datasourceController.update
)
.post(
"/api/datasources/query",
authorized(
permissions.PermissionType.TABLE,
permissions.PermissionLevel.READ
),
datasourceQueryValidator(),
datasourceController.query
)
.post(
"/api/datasources/:datasourceId/schema",
authorized(permissions.BUILDER),

View File

@ -56,7 +56,7 @@ router
"/api/v2/queries/:queryId",
paramResource("queryId"),
authorized(PermissionType.QUERY, PermissionLevel.WRITE),
queryController.executeV2 as any
queryController.executeV2
)
export default router

View File

@ -5,6 +5,8 @@ import { paramResource, paramSubResource } from "../../middleware/resourceId"
import { permissions } from "@budibase/backend-core"
import { internalSearchValidator } from "./utils/validators"
import trimViewRowInfo from "../../middleware/trimViewRowInfo"
import { validateBody } from "../../middleware/zod-validator"
import { searchRowRequestValidator } from "@budibase/types"
const { PermissionType, PermissionLevel } = permissions
@ -32,6 +34,7 @@ router
.post(
"/api/:sourceId/search",
internalSearchValidator(),
validateBody(searchRowRequestValidator),
paramResource("sourceId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.search
@ -87,6 +90,7 @@ router
router.post(
"/api/v2/views/:viewId/search",
internalSearchValidator(),
validateBody(searchRowRequestValidator),
authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"),
rowController.views.searchView
)

View File

@ -1,10 +0,0 @@
import Router from "@koa/router"
import * as controller from "../controllers/script"
import authorized from "../../middleware/authorized"
import { permissions } from "@budibase/backend-core"
const router: Router = new Router()
router.post("/api/script", authorized(permissions.BUILDER), controller.save)
export default router

View File

@ -169,331 +169,521 @@ const descriptions = datasourceDescribe({
})
if (descriptions.length) {
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
let datasource: Datasource
let rawDatasource: Datasource
let client: Knex
describe.each(descriptions)(
"$dbName",
({ config, dsProvider, isOracle, isMSSQL }) => {
let datasource: Datasource
let rawDatasource: Datasource
let client: Knex
beforeEach(async () => {
const ds = await dsProvider()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
beforeEach(async () => {
const ds = await dsProvider()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
jest.clearAllMocks()
nock.cleanAll()
})
describe("get", () => {
it("should be able to get a datasource", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds).toEqual({
config: expect.any(Object),
plus: datasource.plus,
source: datasource.source,
isSQL: true,
type: "datasource_plus",
_id: datasource._id,
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
jest.clearAllMocks()
nock.cleanAll()
})
it("should not return database password", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
})
})
describe("list", () => {
it("returns all the datasources", async () => {
const datasources = await config.api.datasource.fetch()
expect(datasources).toContainEqual(expect.objectContaining(datasource))
})
})
describe("put", () => {
it("should update an existing datasource", async () => {
const newName = generator.guid()
datasource.name = newName
const updatedDs = await config.api.datasource.update(datasource)
expect(updatedDs.name).toEqual(newName)
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
it("should not overwrite database password with --secret-value--", async () => {
const password = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(password).not.toBe("--secret-value--")
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
await config.api.datasource.update(
await config.api.datasource.get(datasource._id!)
)
const newPassword = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(newPassword).not.toBe("--secret-value--")
expect(newPassword).toBe(password)
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.api.query.save({
datasourceId: datasource._id!,
name: "Test Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: null,
readable: true,
})
await config.api.datasource.delete(datasource)
const datasources = await config.api.datasource.fetch()
expect(datasources).not.toContainEqual(
expect.objectContaining(datasource)
)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
})
describe("schema", () => {
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
describe("get", () => {
it("should be able to get a datasource", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds).toEqual({
config: expect.any(Object),
plus: datasource.plus,
source: datasource.source,
isSQL: true,
type: "datasource_plus",
_id: datasource._id,
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: stringName,
type: FieldType.STRING,
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: {
allowEmpty: false,
},
inclusion: [],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: [],
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = expect.objectContaining({
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = {
...field,
externalType: allowUndefined(expect.any(String)),
constraints: allowUndefined(expect.any(Object)),
autocolumn: allowUndefined(expect.any(Boolean)),
}
return acc
},
{}
),
})
return acc
},
{}
),
_rev: expect.any(String),
updatedAt: expect.any(String),
}
expect(updated).toEqual(expected)
})
})
describe("verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: /.*/, // error message differs between databases
},
}
)
})
})
describe("info", () => {
it("should fetch information about a datasource with a single table", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableName = generator.guid()
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([tableName, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
it("should not return database password", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
})
})
it("should fetch information about a datasource with multiple tables", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
describe("list", () => {
it("returns all the datasources", async () => {
const datasources = await config.api.datasource.fetch()
expect(datasources).toContainEqual(
expect.objectContaining(datasource)
)
})
})
const tableNames = [
generator.guid(),
generator.guid(),
generator.guid(),
generator.guid(),
]
for (const tableName of tableNames) {
describe("put", () => {
it("should update an existing datasource", async () => {
const newName = generator.guid()
datasource.name = newName
const updatedDs = await config.api.datasource.update(datasource)
expect(updatedDs.name).toEqual(newName)
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
it("should not overwrite database password with --secret-value--", async () => {
const password = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(password).not.toBe("--secret-value--")
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
await config.api.datasource.update(
await config.api.datasource.get(datasource._id!)
)
const newPassword = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(newPassword).not.toBe("--secret-value--")
expect(newPassword).toBe(password)
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.api.query.save({
datasourceId: datasource._id!,
name: "Test Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: null,
readable: true,
})
await config.api.datasource.delete(datasource)
const datasources = await config.api.datasource.fetch()
expect(datasources).not.toContainEqual(
expect.objectContaining(datasource)
)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
})
describe("schema", () => {
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: stringName,
type: FieldType.STRING,
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: {
allowEmpty: false,
},
inclusion: ["1", "2", "3"],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: [],
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = expect.objectContaining({
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = {
...field,
externalType: allowUndefined(expect.any(String)),
constraints: allowUndefined(expect.any(Object)),
autocolumn: allowUndefined(expect.any(Boolean)),
}
return acc
},
{}
),
})
return acc
},
{}
),
_rev: expect.any(String),
updatedAt: expect.any(String),
}
expect(updated).toEqual(expected)
})
!isOracle &&
!isMSSQL &&
it("can fetch options columns with a large number of options", async () => {
const enumOptions = new Array(1000)
.fill(0)
.map((_, i) => i.toString())
.toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
!isOracle &&
!isMSSQL &&
it("can fetch options with commas in them", async () => {
const enumOptions = [
"Lincoln, Abraham",
"Washington, George",
"Fred",
"Bob",
].toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
!isOracle &&
!isMSSQL &&
it("can fetch options that may include other type names", async () => {
const enumOptions = [
"int",
"bigint",
"float",
"numeric",
"json",
"map",
].toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
})
describe("verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: /.*/, // error message differs between databases
},
}
)
})
})
describe("info", () => {
it("should fetch information about a datasource with a single table", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableName = generator.guid()
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
}
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([...tableNames, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(
existingTableNames.length + tableNames.length
)
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([tableName, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
})
it("should fetch information about a datasource with multiple tables", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableNames = [
generator.guid(),
generator.guid(),
generator.guid(),
generator.guid(),
]
for (const tableName of tableNames) {
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
}
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([...tableNames, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(
existingTableNames.length + tableNames.length
)
})
})
})
})
}
)
}
const datasources = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE],
})
if (datasources.length) {
describe.each(datasources)(
"$dbName",
({ config, dsProvider, isPostgres, isMySQL, isMariaDB }) => {
let datasource: Datasource
let client: Knex
beforeEach(async () => {
const ds = await dsProvider()
datasource = ds.datasource!
client = ds.client!
})
describe("external export", () => {
let table: Table
beforeEach(async () => {
table = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
primary: ["id"],
primaryDisplay: "name",
schema: {
id: {
name: "id",
autocolumn: true,
type: FieldType.NUMBER,
constraints: {
presence: false,
},
},
name: {
name: "name",
autocolumn: false,
type: FieldType.STRING,
constraints: {
presence: false,
},
},
},
})
)
})
it("should be able to export and reimport a schema", async () => {
let { schema } = await config.api.datasource.externalSchema(
datasource
)
if (isPostgres) {
// pg_dump 17 puts this config parameter into the dump but no DB < 17
// can load it. We're using postgres 16 in tests at the time of writing.
schema = schema.replace("SET transaction_timeout = 0;", "")
}
await config.api.table.destroy(table._id!, table._rev!)
if (isMySQL || isMariaDB) {
// MySQL/MariaDB clients don't let you run multiple queries in a
// single call. They also throw an error when given an empty query.
// The below handles both of these things.
for (let query of schema.split(";\n")) {
query = query.trim()
if (!query) {
continue
}
await client.raw(query)
}
} else {
await client.raw(schema)
}
await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const tables = await config.api.table.fetch()
const newTable = tables.find(t => t.name === table.name)!
// This is only set on tables created through Budibase, we don't
// expect it to match after we import the table.
delete table.created
for (const field of Object.values(newTable.schema)) {
// Will differ per-database, not useful for this test.
delete field.externalType
}
expect(newTable).toEqual(table)
})
})
}
)
}

View File

@ -1,11 +1,11 @@
const { testAutomation } = require("./utilities/TestFunctions")
const setup = require("./utilities")
const { MetadataTypes } = require("../../../constants")
import { testAutomation } from "./utilities/TestFunctions"
import * as setup from "./utilities"
import { MetadataType, Automation } from "@budibase/types"
describe("/metadata", () => {
let request = setup.getRequest()
let config = setup.getConfig()
let automation
let automation: Automation
afterAll(setup.afterAll)
@ -15,8 +15,8 @@ describe("/metadata", () => {
})
async function createMetadata(
data,
type = MetadataTypes.AUTOMATION_TEST_INPUT
data: Record<string, string>,
type = MetadataType.AUTOMATION_TEST_INPUT
) {
const res = await request
.post(`/api/metadata/${type}/${automation._id}`)
@ -27,7 +27,7 @@ describe("/metadata", () => {
expect(res.body._rev).toBeDefined()
}
async function getMetadata(type) {
async function getMetadata(type: MetadataType) {
const res = await request
.get(`/api/metadata/${type}/${automation._id}`)
.set(config.defaultHeaders())
@ -39,14 +39,14 @@ describe("/metadata", () => {
describe("save", () => {
it("should be able to save some metadata", async () => {
await createMetadata({ test: "a" })
const testInput = await getMetadata(MetadataTypes.AUTOMATION_TEST_INPUT)
const testInput = await getMetadata(MetadataType.AUTOMATION_TEST_INPUT)
expect(testInput.test).toBe("a")
})
it("should save history metadata on automation run", async () => {
// this should have created some history
await testAutomation(config, automation)
const metadata = await getMetadata(MetadataTypes.AUTOMATION_TEST_HISTORY)
await testAutomation(config, automation, {})
const metadata = await getMetadata(MetadataType.AUTOMATION_TEST_HISTORY)
expect(metadata).toBeDefined()
expect(metadata.history.length).toBe(1)
expect(typeof metadata.history[0].occurredAt).toBe("number")
@ -57,13 +57,13 @@ describe("/metadata", () => {
it("should be able to delete some test inputs", async () => {
const res = await request
.delete(
`/api/metadata/${MetadataTypes.AUTOMATION_TEST_INPUT}/${automation._id}`
`/api/metadata/${MetadataType.AUTOMATION_TEST_INPUT}/${automation._id}`
)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.message).toBeDefined()
const metadata = await getMetadata(MetadataTypes.AUTOMATION_TEST_INPUT)
const metadata = await getMetadata(MetadataType.AUTOMATION_TEST_INPUT)
expect(metadata.test).toBeUndefined()
})
})

View File

@ -1,10 +1,4 @@
import {
Datasource,
Operation,
Query,
QueryPreview,
TableSourceType,
} from "@budibase/types"
import { Datasource, Query, QueryPreview } from "@budibase/types"
import {
DatabaseName,
datasourceDescribe,
@ -817,49 +811,6 @@ if (descriptions.length) {
})
describe("query through datasource", () => {
it("should be able to query the datasource", async () => {
const datasource = await config.api.datasource.create(rawDatasource)
const entityId = tableName
await config.api.datasource.update({
...datasource,
entities: {
[entityId]: {
name: entityId,
schema: {},
type: "table",
primary: ["id"],
sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL,
},
},
})
const res = await config.api.datasource.query({
endpoint: {
datasourceId: datasource._id!,
operation: Operation.READ,
entityId,
},
resource: {
fields: ["id", "name"],
},
filters: {
string: {
name: "two",
},
},
})
expect(res).toHaveLength(1)
expect(res[0]).toEqual({
id: 2,
name: "two",
// the use of table.* introduces the possibility of nulls being returned
birthday: null,
number: null,
})
})
// this parameter really only impacts SQL queries
describe("confirm nullDefaultSupport", () => {
let queryParams: Partial<Query>

View File

@ -48,7 +48,7 @@ jest.mock("@budibase/pro", () => ({
ai: {
LargeLanguageModel: {
forCurrentTenant: async () => ({
initialised: true,
llm: {},
run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(),
}),
@ -2607,6 +2607,8 @@ if (descriptions.length) {
name: "foo",
description: "bar",
tableId,
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
})
})
@ -2628,6 +2630,8 @@ if (descriptions.length) {
id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined,
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id,
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
})
})
@ -2650,6 +2654,8 @@ if (descriptions.length) {
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined,
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
})
})
@ -2729,6 +2735,8 @@ if (descriptions.length) {
id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined,
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id,
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
})
})
@ -2745,15 +2753,8 @@ if (descriptions.length) {
user: null,
users: null,
})
expect(updatedRow).toEqual({
name: "foo",
description: "bar",
tableId,
_id: row._id,
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined,
})
expect(updatedRow.user).toBeUndefined()
expect(updatedRow.users).toBeUndefined()
})
it("fetch all will populate the relationships", async () => {
@ -3268,7 +3269,7 @@ if (descriptions.length) {
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: formula,
formula,
responseType: opts?.responseType,
formulaType: opts?.formulaType || FormulaType.DYNAMIC,
},
@ -3495,6 +3496,72 @@ if (descriptions.length) {
)
})
})
if (!isInternal && !isOracle) {
describe("bigint ids", () => {
let table1: Table, table2: Table
let table1Name: string, table2Name: string
beforeAll(async () => {
table1Name = `table1-${generator.guid().substring(0, 5)}`
await client!.schema.createTable(table1Name, table => {
table.bigInteger("table1Id").primary()
})
table2Name = `table2-${generator.guid().substring(0, 5)}`
await client!.schema.createTable(table2Name, table => {
table.bigInteger("table2Id").primary()
table
.bigInteger("table1Ref")
.references("table1Id")
.inTable(table1Name)
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const tables = Object.values(resp.datasource.entities || {})
table1 = tables.find(t => t.name === table1Name)!
table2 = tables.find(t => t.name === table2Name)!
await config.api.datasource.addExistingRelationship({
one: {
tableId: table2._id!,
relationshipName: "one",
foreignKey: "table1Ref",
},
many: {
tableId: table1._id!,
relationshipName: "many",
primaryKey: "table1Id",
},
})
})
it("should be able to fetch rows with related bigint ids", async () => {
const row = await config.api.row.save(table1._id!, {
table1Id: "1",
})
await config.api.row.save(table2._id!, {
table2Id: "2",
table1Ref: row.table1Id,
})
let resp = await config.api.row.search(table1._id!)
expect(resp.rows).toHaveLength(1)
expect(resp.rows[0]._id).toBe("%5B'1'%5D")
expect(resp.rows[0].many).toHaveLength(1)
expect(resp.rows[0].many[0]._id).toBe("%5B'2'%5D")
resp = await config.api.row.search(table2._id!)
expect(resp.rows).toHaveLength(1)
expect(resp.rows[0]._id).toBe("%5B'2'%5D")
expect(resp.rows[0].one).toHaveLength(1)
expect(resp.rows[0].one[0]._id).toBe("%5B'1'%5D")
})
})
}
}
)
}

View File

@ -24,6 +24,7 @@ import {
JsonFieldSubType,
LogicalOperator,
RelationshipType,
RequiredKeys,
Row,
RowSearchParams,
SearchFilters,
@ -51,7 +52,7 @@ jest.mock("@budibase/pro", () => ({
ai: {
LargeLanguageModel: {
forCurrentTenant: async () => ({
initialised: true,
llm: {},
run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(),
}),
@ -208,9 +209,25 @@ if (descriptions.length) {
private async performSearch(): Promise<SearchResponse<Row>> {
if (isInMemory) {
return dataFilters.search(_.cloneDeep(rows), {
...this.query,
})
const inMemoryQuery: RequiredKeys<
Omit<RowSearchParams, "tableId">
> = {
sort: this.query.sort ?? undefined,
query: { ...this.query.query },
paginate: this.query.paginate,
bookmark: this.query.bookmark ?? undefined,
limit: this.query.limit,
sortOrder: this.query.sortOrder,
sortType: this.query.sortType ?? undefined,
version: this.query.version,
disableEscaping: this.query.disableEscaping,
countRows: this.query.countRows,
viewId: undefined,
fields: undefined,
indexer: undefined,
rows: undefined,
}
return dataFilters.search(_.cloneDeep(rows), inMemoryQuery)
} else {
return config.api.row.search(tableOrViewId, this.query)
}

View File

@ -1,5 +1,4 @@
import { auth, permissions } from "@budibase/backend-core"
import { DataSourceOperation } from "../../../constants"
import {
AutomationActionStepId,
AutomationStep,
@ -231,30 +230,6 @@ export function externalSearchValidator() {
)
}
export function datasourceQueryValidator() {
return auth.joiValidator.body(
Joi.object({
endpoint: Joi.object({
datasourceId: Joi.string().required(),
operation: Joi.string()
.required()
.valid(...Object.values(DataSourceOperation)),
entityId: Joi.string().required(),
}).required(),
resource: Joi.object({
fields: Joi.array().items(Joi.string()).optional(),
}).optional(),
body: Joi.object().optional(),
sort: Joi.object().optional(),
filters: filterObject().optional(),
paginate: Joi.object({
page: Joi.string().alphanum().optional(),
limit: Joi.number().optional(),
}).optional(),
})
)
}
export function webhookValidator() {
return auth.joiValidator.body(
Joi.object({

View File

@ -96,9 +96,15 @@ if (env.SELF_HOSTED) {
ACTION_IMPLS["EXECUTE_BASH"] = bash.run
// @ts-ignore
BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition
if (env.isTest()) {
BUILTIN_ACTION_DEFINITIONS["OPENAI"] = openai.definition
}
}
export async function getActionDefinitions() {
export async function getActionDefinitions(): Promise<
Record<keyof typeof AutomationActionStepId, AutomationStepDefinition>
> {
if (await features.flags.isEnabled(FeatureFlag.AUTOMATION_BRANCHING)) {
BUILTIN_ACTION_DEFINITIONS["BRANCH"] = branch.definition
}

View File

@ -94,7 +94,7 @@ export async function run({
})
try {
await queryController.executeV2(ctx, { isAutomation: true })
await queryController.executeV2AsAutomation(ctx)
const { data, ...rest } = ctx.body
return {

View File

@ -106,13 +106,15 @@ export async function run({
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
(await pro.features.isBudibaseAIEnabled())
let llm
let llmWrapper
if (budibaseAIEnabled || customConfigsEnabled) {
llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(
inputs.model
)
}
response = llm?.initialised
? await llm.run(inputs.prompt)
response = llmWrapper?.llm
? await llmWrapper.run(inputs.prompt)
: await legacyOpenAIPrompt(inputs)
return {

View File

@ -3,7 +3,6 @@ import {
AutomationStepDefinition,
AutomationStepType,
AutomationIOType,
AutomationResults,
Automation,
AutomationCustomIOType,
TriggerAutomationStepInputs,
@ -78,7 +77,7 @@ export async function run({
const db = context.getAppDB()
let automation = await db.get<Automation>(inputs.automation.automationId)
const response: AutomationResults = await triggers.externalTrigger(
const response = await triggers.externalTrigger(
automation,
{
fields: { ...fieldParams },
@ -88,9 +87,13 @@ export async function run({
{ getResponses: true }
)
return {
success: true,
value: response.steps,
if (triggers.isAutomationResults(response)) {
return {
success: true,
value: response.steps,
}
} else {
throw new Error("Automation did not have a collect block")
}
}
} else {

View File

@ -1,26 +1,148 @@
import { getConfig, afterAll as _afterAll, runStep } from "./utilities"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import * as automation from "../index"
import * as setup from "./utilities"
import { Table } from "@budibase/types"
describe("test the bash action", () => {
let config = getConfig()
describe("Execute Bash Automations", () => {
let config = setup.getConfig(),
table: Table
beforeAll(async () => {
await automation.init()
await config.init()
})
afterAll(_afterAll)
it("should be able to execute a script", async () => {
let res = await runStep(config, "EXECUTE_BASH", {
code: "echo 'test'",
table = await config.createTable()
await config.createRow({
name: "test row",
description: "test description",
tableId: table._id!,
})
expect(res.stdout).toEqual("test\n")
expect(res.success).toEqual(true)
})
it("should handle a null value", async () => {
let res = await runStep(config, "EXECUTE_BASH", {
code: null,
afterAll(setup.afterAll)
it("should use trigger data in bash command and pass output to subsequent steps", async () => {
const result = await createAutomationBuilder({
name: "Bash with Trigger Data",
config,
})
expect(res.stdout).toEqual(
.appAction({ fields: { command: "hello world" } })
.bash(
{ code: "echo '{{ trigger.fields.command }}'" },
{ stepName: "Echo Command" }
)
.serverLog(
{ text: "Bash output was: {{ steps.[Echo Command].stdout }}" },
{ stepName: "Log Output" }
)
.run()
expect(result.steps[0].outputs.stdout).toEqual("hello world\n")
expect(result.steps[1].outputs.message).toContain(
"Bash output was: hello world"
)
})
it("should chain multiple bash commands using previous outputs", async () => {
const result = await createAutomationBuilder({
name: "Chained Bash Commands",
config,
})
.appAction({ fields: { filename: "testfile.txt" } })
.bash(
{ code: "echo 'initial content' > {{ trigger.fields.filename }}" },
{ stepName: "Create File" }
)
.bash(
{ code: "cat {{ trigger.fields.filename }} | tr '[a-z]' '[A-Z]'" },
{ stepName: "Transform Content" }
)
.bash(
{ code: "rm {{ trigger.fields.filename }}" },
{ stepName: "Cleanup" }
)
.run()
expect(result.steps[1].outputs.stdout).toEqual("INITIAL CONTENT\n")
expect(result.steps[1].outputs.success).toEqual(true)
})
it("should integrate bash output with row operations", async () => {
const result = await createAutomationBuilder({
name: "Bash with Row Operations",
config,
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {},
},
{ stepName: "Get Row" }
)
.bash(
{
code: "echo Row data: {{ steps.[Get Row].rows.[0].name }} - {{ steps.[Get Row].rows.[0].description }}",
},
{ stepName: "Process Row Data" }
)
.serverLog(
{ text: "{{ steps.[Process Row Data].stdout }}" },
{ stepName: "Log Result" }
)
.run()
expect(result.steps[1].outputs.stdout).toContain(
"Row data: test row - test description"
)
expect(result.steps[2].outputs.message).toContain(
"Row data: test row - test description"
)
})
it("should handle bash output in conditional logic", async () => {
const result = await createAutomationBuilder({
name: "Bash with Conditional",
config,
})
.appAction({ fields: { threshold: "5" } })
.bash(
{ code: "echo $(( {{ trigger.fields.threshold }} + 5 ))" },
{ stepName: "Calculate Value" }
)
.executeScript(
{
code: `
const value = parseInt(steps["Calculate Value"].stdout);
return value > 8 ? "high" : "low";
`,
},
{ stepName: "Check Value" }
)
.serverLog(
{ text: "Value was {{ steps.[Check Value].value }}" },
{ stepName: "Log Result" }
)
.run()
expect(result.steps[0].outputs.stdout).toEqual("10\n")
expect(result.steps[1].outputs.value).toEqual("high")
expect(result.steps[2].outputs.message).toContain("Value was high")
})
it("should handle null values gracefully", async () => {
const result = await createAutomationBuilder({
name: "Null Bash Input",
config,
})
.appAction({ fields: {} })
.bash(
//@ts-ignore
{ code: null },
{ stepName: "Null Command" }
)
.run()
expect(result.steps[0].outputs.stdout).toBe(
"Budibase bash automation failed: Invalid inputs"
)
})

View File

@ -3,7 +3,7 @@ import * as triggers from "../triggers"
import { loopAutomation } from "../../tests/utilities/structures"
import { context } from "@budibase/backend-core"
import * as setup from "./utilities"
import { Table, LoopStepType } from "@budibase/types"
import { Table, LoopStepType, AutomationResults } from "@budibase/types"
import * as loopUtils from "../loopUtils"
import { LoopInput } from "../../definitions/automations"
@ -20,15 +20,19 @@ describe("Attempt to run a basic loop automation", () => {
afterAll(setup.afterAll)
async function runLoop(loopOpts?: LoopInput) {
async function runLoop(loopOpts?: LoopInput): Promise<AutomationResults> {
const appId = config.getAppId()
return await context.doInAppContext(appId, async () => {
const params = { fields: { appId } }
return await triggers.externalTrigger(
const result = await triggers.externalTrigger(
loopAutomation(table._id!, loopOpts),
params,
{ getResponses: true }
)
if ("outputs" in result && !result.outputs.success) {
throw new Error("Unable to proceed - failed to return anything.")
}
return result as AutomationResults
})
}

View File

@ -1,7 +1,9 @@
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
import { getConfig, afterAll as _afterAll } from "./utilities"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import { OpenAI } from "openai"
import { setEnv as setCoreEnv } from "@budibase/backend-core"
import * as pro from "@budibase/pro"
import { Model } from "@budibase/types"
jest.mock("openai", () => ({
OpenAI: jest.fn().mockImplementation(() => ({
@ -25,7 +27,7 @@ jest.mock("@budibase/pro", () => ({
ai: {
LargeLanguageModel: {
forCurrentTenant: jest.fn().mockImplementation(() => ({
initialised: true,
llm: {},
init: jest.fn(),
run: jest.fn(),
})),
@ -47,6 +49,7 @@ describe("test the openai action", () => {
let resetEnv: () => void | undefined
beforeAll(async () => {
setCoreEnv({ SELF_HOSTED: true })
await config.init()
})
@ -62,17 +65,39 @@ describe("test the openai action", () => {
afterAll(_afterAll)
it("should be able to receive a response from ChatGPT given a prompt", async () => {
const res = await runStep(config, "OPENAI", { prompt: OPENAI_PROMPT })
expect(res.response).toEqual("This is a test")
expect(res.success).toBeTruthy()
setCoreEnv({ SELF_HOSTED: true })
const result = await createAutomationBuilder({
name: "Test OpenAI Response",
config,
})
.appAction({ fields: {} })
.openai(
{ prompt: OPENAI_PROMPT, model: Model.GPT_4O_MINI },
{ stepName: "Basic OpenAI Query" }
)
.run()
expect(result.steps[0].outputs.response).toEqual("This is a test")
expect(result.steps[0].outputs.success).toBeTruthy()
})
it("should present the correct error message when a prompt is not provided", async () => {
const res = await runStep(config, "OPENAI", { prompt: null })
expect(res.response).toEqual(
const result = await createAutomationBuilder({
name: "Test OpenAI No Prompt",
config,
})
.appAction({ fields: {} })
.openai(
{ prompt: "", model: Model.GPT_4O_MINI },
{ stepName: "Empty Prompt Query" }
)
.run()
expect(result.steps[0].outputs.response).toEqual(
"Budibase OpenAI Automation Failed: No prompt supplied"
)
expect(res.success).toBeFalsy()
expect(result.steps[0].outputs.success).toBeFalsy()
})
it("should present the correct error message when an error is thrown from the createChatCompletion call", async () => {
@ -91,14 +116,21 @@ describe("test the openai action", () => {
} as any)
)
const res = await runStep(config, "OPENAI", {
prompt: OPENAI_PROMPT,
const result = await createAutomationBuilder({
name: "Test OpenAI Error",
config,
})
.appAction({ fields: {} })
.openai(
{ prompt: OPENAI_PROMPT, model: Model.GPT_4O_MINI },
{ stepName: "Error Producing Query" }
)
.run()
expect(res.response).toEqual(
expect(result.steps[0].outputs.response).toEqual(
"Error: An error occurred while calling createChatCompletion"
)
expect(res.success).toBeFalsy()
expect(result.steps[0].outputs.success).toBeFalsy()
})
it("should ensure that the pro AI module is called when the budibase AI features are enabled", async () => {
@ -106,10 +138,19 @@ describe("test the openai action", () => {
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
const prompt = "What is the meaning of life?"
await runStep(config, "OPENAI", {
model: "gpt-4o-mini",
prompt,
await createAutomationBuilder({
name: "Test OpenAI Pro Features",
config,
})
.appAction({ fields: {} })
.openai(
{
model: Model.GPT_4O_MINI,
prompt,
},
{ stepName: "Pro Features Query" }
)
.run()
expect(pro.ai.LargeLanguageModel.forCurrentTenant).toHaveBeenCalledWith(
"gpt-4o-mini"

View File

@ -1,5 +1,7 @@
import { Table } from "@budibase/types"
import { EmptyFilterOption, SortOrder, Table } from "@budibase/types"
import * as setup from "./utilities"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import * as automation from "../index"
const NAME = "Test"
@ -8,6 +10,7 @@ describe("Test a query step automation", () => {
let config = setup.getConfig()
beforeAll(async () => {
await automation.init()
await config.init()
table = await config.createTable()
const row = {
@ -22,107 +25,132 @@ describe("Test a query step automation", () => {
afterAll(setup.afterAll)
it("should be able to run the query step", async () => {
const inputs = {
tableId: table._id,
filters: {
equal: {
name: NAME,
},
},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Basic Query Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(true)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(2)
expect(res.rows[0].name).toBe(NAME)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {
equal: {
name: NAME,
},
},
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query All Rows" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
expect(result.steps[0].outputs.rows[0].name).toBe(NAME)
})
it("Returns all rows when onEmptyFilter has no value and no filters are passed", async () => {
const inputs = {
tableId: table._id,
filters: {},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Empty Filter Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(true)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(2)
expect(res.rows[0].name).toBe(NAME)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {},
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Empty Filter" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
expect(result.steps[0].outputs.rows[0].name).toBe(NAME)
})
it("Returns no rows when onEmptyFilter is RETURN_NONE and theres no filters", async () => {
const inputs = {
tableId: table._id,
filters: {},
"filters-def": [],
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
onEmptyFilter: "none",
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Return None Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(false)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(0)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {},
"filters-def": [],
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
},
{ stepName: "Query With Return None" }
)
.run()
expect(result.steps[0].outputs.success).toBe(false)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(0)
})
it("Returns no rows when onEmptyFilters RETURN_NONE and a filter is passed with a null value", async () => {
const inputs = {
tableId: table._id,
onEmptyFilter: "none",
filters: {},
"filters-def": [
{
value: null,
},
],
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Null Filter Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(false)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(0)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
filters: {},
"filters-def": [
{
value: null,
},
],
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Null Filter" }
)
.run()
expect(result.steps[0].outputs.success).toBe(false)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(0)
})
it("Returns rows when onEmptyFilter is RETURN_ALL and no filter is passed", async () => {
const inputs = {
tableId: table._id,
onEmptyFilter: "all",
filters: {},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Return All Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(true)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(2)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
onEmptyFilter: EmptyFilterOption.RETURN_ALL,
filters: {},
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Return All" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
})
})

View File

@ -152,6 +152,44 @@ describe("Loop automations", () => {
)
})
it("ensure the loop stops if the max iterations are reached", async () => {
const builder = createAutomationBuilder({
name: "Test Loop max iterations",
})
const results = await builder
.appAction({ fields: {} })
.loop({
option: LoopStepType.ARRAY,
binding: ["test", "test2", "test3"],
iterations: 2,
})
.serverLog({ text: "{{loop.currentItem}}" })
.serverLog({ text: "{{steps.1.iterations}}" })
.run()
expect(results.steps[0].outputs.iterations).toBe(2)
})
it("should run an automation with loop and max iterations to ensure context correctness further down the tree", async () => {
const builder = createAutomationBuilder({
name: "Test context down tree with Loop and max iterations",
})
const results = await builder
.appAction({ fields: {} })
.loop({
option: LoopStepType.ARRAY,
binding: ["test", "test2", "test3"],
iterations: 2,
})
.serverLog({ text: "{{loop.currentItem}}" })
.serverLog({ text: "{{steps.1.iterations}}" })
.run()
expect(results.steps[1].outputs.message).toContain("- 2")
})
it("should run an automation where a loop is successfully run twice", async () => {
const builder = createAutomationBuilder({
name: "Test Trigger with Loop and Create Row",

View File

@ -35,6 +35,8 @@ import {
Branch,
FilterStepInputs,
ExecuteScriptStepInputs,
OpenAIStepInputs,
BashStepInputs,
} from "@budibase/types"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import * as setup from "../utilities"
@ -221,6 +223,30 @@ class BaseStepBuilder {
input
)
}
bash(
input: BashStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.EXECUTE_BASH,
BUILTIN_ACTION_DEFINITIONS.EXECUTE_BASH,
input,
opts
)
}
openai(
input: OpenAIStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.OPENAI,
BUILTIN_ACTION_DEFINITIONS.OPENAI,
input,
opts
)
}
}
class StepBuilder extends BaseStepBuilder {
build(): AutomationStep[] {

View File

@ -20,6 +20,7 @@ import {
AutomationStatus,
AutomationRowEvent,
UserBindings,
AutomationResults,
} from "@budibase/types"
import { executeInThread } from "../threads/automation"
import { dataFilters, sdk } from "@budibase/shared-core"
@ -32,6 +33,14 @@ const JOB_OPTS = {
import * as automationUtils from "../automations/automationUtils"
import { doesTableExist } from "../sdk/app/tables/getters"
type DidNotTriggerResponse = {
outputs: {
success: false
status: AutomationStatus.STOPPED
}
message: AutomationStoppedReason.TRIGGER_FILTER_NOT_MET
}
async function getAllAutomations() {
const db = context.getAppDB()
let automations = await db.allDocs<Automation>(
@ -139,6 +148,14 @@ function rowPassesFilters(row: Row, filters: SearchFilters) {
return filteredRows.length > 0
}
export function isAutomationResults(
response: AutomationResults | DidNotTriggerResponse | AutomationJob
): response is AutomationResults {
return (
response !== null && "steps" in response && Array.isArray(response.steps)
)
}
export async function externalTrigger(
automation: Automation,
params: {
@ -148,7 +165,7 @@ export async function externalTrigger(
user?: UserBindings
},
{ getResponses }: { getResponses?: boolean } = {}
): Promise<any> {
): Promise<AutomationResults | DidNotTriggerResponse | AutomationJob> {
if (automation.disabled) {
throw new Error("Automation is disabled")
}

View File

@ -2,16 +2,18 @@ import { Thread, ThreadType } from "../threads"
import { definitions } from "./triggerInfo"
import { automationQueue } from "./bullboard"
import { updateEntityMetadata } from "../utilities"
import { MetadataTypes } from "../constants"
import { context, db as dbCore, utils } from "@budibase/backend-core"
import { getAutomationMetadataParams } from "../db/utils"
import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro"
import {
Automation,
AutomationActionStepId,
AutomationJob,
AutomationStepDefinition,
AutomationTriggerDefinition,
AutomationTriggerStepId,
MetadataType,
} from "@budibase/types"
import { automationsEnabled } from "../features"
import { helpers, REBOOT_CRON } from "@budibase/shared-core"
@ -105,7 +107,7 @@ export async function updateTestHistory(
history: any
) {
return updateEntityMetadata(
MetadataTypes.AUTOMATION_TEST_HISTORY,
MetadataType.AUTOMATION_TEST_HISTORY,
automation._id,
(metadata: any) => {
if (metadata && Array.isArray(metadata.history)) {
@ -120,19 +122,21 @@ export async function updateTestHistory(
)
}
export function removeDeprecated(
definitions: Record<
export function removeDeprecated<
T extends
| Record<keyof typeof AutomationTriggerStepId, AutomationTriggerDefinition>
| Record<keyof typeof AutomationActionStepId, AutomationStepDefinition>
>(definitions: T): T {
const base: Record<
string,
AutomationStepDefinition | AutomationTriggerDefinition
>
) {
const base = cloneDeep(definitions)
AutomationTriggerDefinition | AutomationStepDefinition
> = cloneDeep(definitions)
for (let key of Object.keys(base)) {
if (base[key].deprecated) {
delete base[key]
}
}
return base
return base as T
}
// end the repetition and the job itself

View File

@ -45,17 +45,6 @@ export enum AuthTypes {
EXTERNAL = "external",
}
export enum DataSourceOperation {
CREATE = "CREATE",
READ = "READ",
UPDATE = "UPDATE",
DELETE = "DELETE",
BULK_CREATE = "BULK_CREATE",
CREATE_TABLE = "CREATE_TABLE",
UPDATE_TABLE = "UPDATE_TABLE",
DELETE_TABLE = "DELETE_TABLE",
}
export enum DatasourceAuthTypes {
GOOGLE = "google",
}
@ -135,11 +124,6 @@ export enum BaseQueryVerbs {
DELETE = "delete",
}
export enum MetadataTypes {
AUTOMATION_TEST_INPUT = "automationTestInput",
AUTOMATION_TEST_HISTORY = "automationTestHistory",
}
export enum InvalidColumns {
ID = "_id",
REV = "_rev",
@ -148,7 +132,6 @@ export enum InvalidColumns {
export enum AutomationErrors {
INCORRECT_TYPE = "INCORRECT_TYPE",
MAX_ITERATIONS = "MAX_ITERATIONS_REACHED",
FAILURE_CONDITION = "FAILURE_CONDITION_MET",
}

View File

@ -26,3 +26,6 @@ export interface AutomationContext extends AutomationResults {
company?: string
}
}
export interface AutomationResponse
extends Omit<AutomationContext, "stepsByName" | "stepsById"> {}

View File

@ -1,37 +1,39 @@
import {
QueryJson,
Datasource,
DatasourcePlusQueryResponse,
RowOperations,
EnrichedQueryJson,
QueryJson,
} from "@budibase/types"
import { getIntegration } from "../index"
import sdk from "../../sdk"
import { enrichQueryJson } from "../../sdk/app/rows/utils"
function isEnriched(
json: QueryJson | EnrichedQueryJson
): json is EnrichedQueryJson {
return "datasource" in json
}
export async function makeExternalQuery(
datasource: Datasource,
json: QueryJson
json: QueryJson | EnrichedQueryJson
): Promise<DatasourcePlusQueryResponse> {
const entityId = json.endpoint.entityId,
tableName = json.meta.table.name,
tableId = json.meta.table._id
// case found during testing - make sure this doesn't happen again
if (
RowOperations.includes(json.endpoint.operation) &&
entityId !== tableId &&
entityId !== tableName
) {
throw new Error("Entity ID and table metadata do not align")
if (!isEnriched(json)) {
json = await enrichQueryJson(json)
if (json.datasource) {
json.datasource = await sdk.datasources.enrich(json.datasource)
}
}
if (!datasource) {
if (!json.datasource) {
throw new Error("No datasource provided for external query")
}
datasource = await sdk.datasources.enrich(datasource)
const Integration = await getIntegration(datasource.source)
const Integration = await getIntegration(json.datasource.source)
// query is the opinionated function
if (Integration.prototype.query) {
const integration = new Integration(datasource.config)
return integration.query(json)
} else {
if (!Integration.prototype.query) {
throw "Datasource does not support query."
}
const integration = new Integration(json.datasource.config)
return integration.query(json)
}

View File

@ -7,7 +7,6 @@ import {
Integration,
Operation,
PaginationJson,
QueryJson,
QueryType,
Row,
Schema,
@ -18,6 +17,7 @@ import {
TableSourceType,
DatasourcePlusQueryResponse,
BBReferenceFieldSubType,
EnrichedQueryJson,
} from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
import {
@ -381,9 +381,9 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
return { tables: externalTables, errors }
}
async query(json: QueryJson): Promise<DatasourcePlusQueryResponse> {
const sheet = json.endpoint.entityId
switch (json.endpoint.operation) {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
const sheet = json.table.name
switch (json.operation) {
case Operation.CREATE:
return this.create({ sheet, row: json.body as Row })
case Operation.BULK_CREATE:
@ -400,7 +400,7 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
rowIndex: json.extra?.idFilter?.equal?.rowNumber,
sheet,
row: json.body,
table: json.meta.table,
table: json.table,
})
case Operation.DELETE:
return this.delete({
@ -426,7 +426,7 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
return this.deleteTable(json?.table?.name)
default:
throw new Error(
`GSheets integration does not support "${json.endpoint.operation}".`
`GSheets integration does not support "${json.operation}".`
)
}
}

View File

@ -4,9 +4,9 @@ import {
DatasourceFieldType,
DatasourcePlus,
DatasourcePlusQueryResponse,
EnrichedQueryJson,
Integration,
Operation,
QueryJson,
QueryType,
Schema,
SourceName,
@ -193,6 +193,34 @@ const SCHEMA: Integration = {
},
}
interface MSSQLColumnDefinition {
TableName: string
ColumnName: string
DataType: string
MaxLength: number
IsNullable: boolean
IsIdentity: boolean
Precision: number
Scale: number
}
interface ColumnDefinitionMetadata {
usesMaxLength?: boolean
usesPrecision?: boolean
}
const COLUMN_DEFINITION_METADATA: Record<string, ColumnDefinitionMetadata> = {
DATETIME2: { usesMaxLength: true },
TIME: { usesMaxLength: true },
DATETIMEOFFSET: { usesMaxLength: true },
NCHAR: { usesMaxLength: true },
NVARCHAR: { usesMaxLength: true },
BINARY: { usesMaxLength: true },
VARBINARY: { usesMaxLength: true },
DECIMAL: { usesPrecision: true },
NUMERIC: { usesPrecision: true },
}
class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig
private index: number = 0
@ -342,7 +370,8 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;`
: query.sql
this.log(sql, query.bindings)
return await request.query(sql)
const resp = await request.query(sql)
return resp
} catch (err: any) {
let readableMessage = getReadableErrorMessage(
SourceName.SQL_SERVER,
@ -505,43 +534,45 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
return response.recordset || [{ deleted: true }]
}
async query(json: QueryJson): Promise<DatasourcePlusQueryResponse> {
async query(json: EnrichedQueryJson): Promise<DatasourcePlusQueryResponse> {
const schema = this.config.schema
await this.connect()
if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) {
json.endpoint.schema = schema
if (schema && schema !== DEFAULT_SCHEMA) {
json.schema = schema
}
const operation = this._operation(json)
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
const processFn = (result: any) => {
if (json?.meta?.table && result.recordset) {
if (result.recordset) {
return this.convertJsonStringColumns(
json.meta.table,
json.table,
result.recordset,
json.tableAliases
)
} else if (result.recordset) {
return result.recordset
}
return [{ [operation]: true }]
}
return this.queryWithReturning(json, queryFn, processFn)
}
async getExternalSchema() {
private async getColumnDefinitions(): Promise<MSSQLColumnDefinition[]> {
// Query to retrieve table schema
const query = `
SELECT
t.name AS TableName,
c.name AS ColumnName,
ty.name AS DataType,
ty.precision AS Precision,
ty.scale AS Scale,
c.max_length AS MaxLength,
c.is_nullable AS IsNullable,
c.is_identity AS IsIdentity
FROM
sys.tables t
INNER JOIN sys.columns c ON t.object_id = c.object_id
INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id
INNER JOIN sys.types ty
ON c.system_type_id = ty.system_type_id
AND c.user_type_id = ty.user_type_id
WHERE
t.is_ms_shipped = 0
ORDER BY
@ -554,17 +585,36 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
sql: query,
})
return result.recordset as MSSQLColumnDefinition[]
}
private getDataType(columnDef: MSSQLColumnDefinition): string {
const { DataType, MaxLength, Precision, Scale } = columnDef
const { usesMaxLength = false, usesPrecision = false } =
COLUMN_DEFINITION_METADATA[DataType] || {}
let dataType = DataType
if (usesMaxLength) {
if (MaxLength === -1) {
dataType += `(MAX)`
} else {
dataType += `(${MaxLength})`
}
}
if (usesPrecision) {
dataType += `(${Precision}, ${Scale})`
}
return dataType
}
async getExternalSchema() {
const scriptParts = []
const tables: any = {}
for (const row of result.recordset) {
const {
TableName,
ColumnName,
DataType,
MaxLength,
IsNullable,
IsIdentity,
} = row
const columns = await this.getColumnDefinitions()
for (const row of columns) {
const { TableName, ColumnName, IsNullable, IsIdentity } = row
if (!tables[TableName]) {
tables[TableName] = {
@ -572,9 +622,11 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
}
}
const columnDefinition = `${ColumnName} ${DataType}${
MaxLength ? `(${MaxLength})` : ""
}${IsNullable ? " NULL" : " NOT NULL"}`
const nullable = IsNullable ? "NULL" : "NOT NULL"
const identity = IsIdentity ? "IDENTITY" : ""
const columnDefinition = `[${ColumnName}] ${this.getDataType(
row
)} ${nullable} ${identity}`
tables[TableName].columns.push(columnDefinition)

Some files were not shown because too many files have changed in this diff Show More