Merge branch 'master' of github.com:Budibase/budibase into chore/api-typing-2

This commit is contained in:
mike12345567 2024-12-03 12:39:37 +00:00
commit 009a2749c5
13 changed files with 766 additions and 445 deletions

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.18",
"version": "3.2.19",
"npmClient": "yarn",
"concurrency": 20,
"command": {

View File

@ -2,7 +2,7 @@ import { outputProcessing } from "../../utilities/rowProcessor"
import { InternalTables } from "../../db/utils"
import { getFullUser } from "../../utilities/users"
import { roles, context, db as dbCore } from "@budibase/backend-core"
import { AppSelfResponse, ContextUser, Row, UserCtx } from "@budibase/types"
import { AppSelfResponse, ContextUser, UserCtx } from "@budibase/types"
import sdk from "../../sdk"
import { processUser } from "../../utilities/global"
@ -45,7 +45,7 @@ export async function fetchSelf(ctx: UserCtx<void, AppSelfResponse>) {
try {
const userTable = await sdk.tables.getTable(InternalTables.USER_METADATA)
// specifically needs to make sure is enriched
ctx.body = (await outputProcessing(userTable, user as Row)) as ContextUser
ctx.body = await outputProcessing(userTable, user)
} catch (err: any) {
let response: ContextUser | {}
// user didn't exist in app, don't pretend they do

View File

@ -126,6 +126,9 @@ export async function deploymentProgress(
try {
const db = context.getAppDB()
const deploymentDoc = await db.get<DeploymentDoc>(DocumentType.DEPLOYMENTS)
if (!deploymentDoc.history?.[ctx.params.deploymentId]) {
ctx.throw(404, "No deployment found")
}
ctx.body = deploymentDoc.history?.[ctx.params.deploymentId]
} catch (err) {
ctx.throw(

View File

@ -169,331 +169,422 @@ const descriptions = datasourceDescribe({
})
if (descriptions.length) {
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
let datasource: Datasource
let rawDatasource: Datasource
let client: Knex
describe.each(descriptions)(
"$dbName",
({ config, dsProvider, isOracle, isMSSQL }) => {
let datasource: Datasource
let rawDatasource: Datasource
let client: Knex
beforeEach(async () => {
const ds = await dsProvider()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
beforeEach(async () => {
const ds = await dsProvider()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
jest.clearAllMocks()
nock.cleanAll()
})
describe("get", () => {
it("should be able to get a datasource", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds).toEqual({
config: expect.any(Object),
plus: datasource.plus,
source: datasource.source,
isSQL: true,
type: "datasource_plus",
_id: datasource._id,
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
jest.clearAllMocks()
nock.cleanAll()
})
it("should not return database password", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
})
})
describe("list", () => {
it("returns all the datasources", async () => {
const datasources = await config.api.datasource.fetch()
expect(datasources).toContainEqual(expect.objectContaining(datasource))
})
})
describe("put", () => {
it("should update an existing datasource", async () => {
const newName = generator.guid()
datasource.name = newName
const updatedDs = await config.api.datasource.update(datasource)
expect(updatedDs.name).toEqual(newName)
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
it("should not overwrite database password with --secret-value--", async () => {
const password = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(password).not.toBe("--secret-value--")
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
await config.api.datasource.update(
await config.api.datasource.get(datasource._id!)
)
const newPassword = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(newPassword).not.toBe("--secret-value--")
expect(newPassword).toBe(password)
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.api.query.save({
datasourceId: datasource._id!,
name: "Test Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: null,
readable: true,
})
await config.api.datasource.delete(datasource)
const datasources = await config.api.datasource.fetch()
expect(datasources).not.toContainEqual(
expect.objectContaining(datasource)
)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
})
describe("schema", () => {
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
describe("get", () => {
it("should be able to get a datasource", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds).toEqual({
config: expect.any(Object),
plus: datasource.plus,
source: datasource.source,
isSQL: true,
type: "datasource_plus",
_id: datasource._id,
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
})
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: stringName,
type: FieldType.STRING,
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: {
allowEmpty: false,
},
inclusion: [],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: [],
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = expect.objectContaining({
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = {
...field,
externalType: allowUndefined(expect.any(String)),
constraints: allowUndefined(expect.any(Object)),
autocolumn: allowUndefined(expect.any(Boolean)),
}
return acc
},
{}
),
})
return acc
},
{}
),
_rev: expect.any(String),
updatedAt: expect.any(String),
}
expect(updated).toEqual(expected)
})
})
describe("verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: /.*/, // error message differs between databases
},
}
)
})
})
describe("info", () => {
it("should fetch information about a datasource with a single table", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableName = generator.guid()
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([tableName, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
it("should not return database password", async () => {
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
})
})
it("should fetch information about a datasource with multiple tables", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
describe("list", () => {
it("returns all the datasources", async () => {
const datasources = await config.api.datasource.fetch()
expect(datasources).toContainEqual(
expect.objectContaining(datasource)
)
})
})
const tableNames = [
generator.guid(),
generator.guid(),
generator.guid(),
generator.guid(),
]
for (const tableName of tableNames) {
describe("put", () => {
it("should update an existing datasource", async () => {
const newName = generator.guid()
datasource.name = newName
const updatedDs = await config.api.datasource.update(datasource)
expect(updatedDs.name).toEqual(newName)
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
})
it("should not overwrite database password with --secret-value--", async () => {
const password = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(password).not.toBe("--secret-value--")
const ds = await config.api.datasource.get(datasource._id!)
expect(ds.config!.password).toBe("--secret-value--")
await config.api.datasource.update(
await config.api.datasource.get(datasource._id!)
)
const newPassword = await context.doInAppContext(
config.getAppId(),
async () => {
const ds = await sdk.datasources.get(datasource._id!)
return ds.config!.password
}
)
expect(newPassword).not.toBe("--secret-value--")
expect(newPassword).toBe(password)
})
})
describe("destroy", () => {
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.api.query.save({
datasourceId: datasource._id!,
name: "Test Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: null,
readable: true,
})
await config.api.datasource.delete(datasource)
const datasources = await config.api.datasource.fetch()
expect(datasources).not.toContainEqual(
expect.objectContaining(datasource)
)
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
})
})
describe("schema", () => {
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
const stringName = "string"
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: stringName,
type: FieldType.STRING,
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: {
allowEmpty: false,
},
inclusion: ["1", "2", "3"],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: [],
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "bb_reference_single",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema({ datasourceId })
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = expect.objectContaining({
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = {
...field,
externalType: allowUndefined(expect.any(String)),
constraints: allowUndefined(expect.any(Object)),
autocolumn: allowUndefined(expect.any(Boolean)),
}
return acc
},
{}
),
})
return acc
},
{}
),
_rev: expect.any(String),
updatedAt: expect.any(String),
}
expect(updated).toEqual(expected)
})
!isOracle &&
!isMSSQL &&
it("can fetch options columns with a large number of options", async () => {
const enumOptions = new Array(1000)
.fill(0)
.map((_, i) => i.toString())
.toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
!isOracle &&
!isMSSQL &&
it("can fetch options with commas in them", async () => {
const enumOptions = [
"Lincoln, Abraham",
"Washington, George",
"Fred",
"Bob",
].toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
!isOracle &&
!isMSSQL &&
it("can fetch options that may include other type names", async () => {
const enumOptions = [
"int",
"bigint",
"float",
"numeric",
"json",
"map",
].toSorted()
await client.schema.createTable("options", table => {
table.increments("id").primary()
table.enum("enum", enumOptions, {
useNative: true,
enumName: "enum",
})
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(resp.errors).toEqual({})
const table = resp.datasource.entities!.options
expect(
table.schema.enum.constraints!.inclusion!.toSorted()
).toEqual(enumOptions)
})
})
describe("verify", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
datasource: rawDatasource,
},
{
body: {
connected: true,
},
}
)
})
it("should state an invalid datasource cannot connect", async () => {
await config.api.datasource.verify(
{
datasource: {
...rawDatasource,
config: {
...rawDatasource.config,
password: "wrongpassword",
},
},
},
{
body: {
connected: false,
error: /.*/, // error message differs between databases
},
}
)
})
})
describe("info", () => {
it("should fetch information about a datasource with a single table", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableName = generator.guid()
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
}
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([...tableNames, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(
existingTableNames.length + tableNames.length
)
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([tableName, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(existingTableNames.length + 1)
})
it("should fetch information about a datasource with multiple tables", async () => {
const existingTableNames = (
await config.api.datasource.info(datasource)
).tableNames
const tableNames = [
generator.guid(),
generator.guid(),
generator.guid(),
generator.guid(),
]
for (const tableName of tableNames) {
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
}
const info = await config.api.datasource.info(datasource)
expect(info.tableNames).toEqual(
expect.arrayContaining([...tableNames, ...existingTableNames])
)
expect(info.tableNames).toHaveLength(
existingTableNames.length + tableNames.length
)
})
})
})
})
}
)
}

View File

@ -96,6 +96,10 @@ if (env.SELF_HOSTED) {
ACTION_IMPLS["EXECUTE_BASH"] = bash.run
// @ts-ignore
BUILTIN_ACTION_DEFINITIONS["EXECUTE_BASH"] = bash.definition
if (env.isTest()) {
BUILTIN_ACTION_DEFINITIONS["OPENAI"] = openai.definition
}
}
export async function getActionDefinitions(): Promise<

View File

@ -1,26 +1,148 @@
import { getConfig, afterAll as _afterAll, runStep } from "./utilities"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import * as automation from "../index"
import * as setup from "./utilities"
import { Table } from "@budibase/types"
describe("test the bash action", () => {
let config = getConfig()
describe("Execute Bash Automations", () => {
let config = setup.getConfig(),
table: Table
beforeAll(async () => {
await automation.init()
await config.init()
})
afterAll(_afterAll)
it("should be able to execute a script", async () => {
let res = await runStep(config, "EXECUTE_BASH", {
code: "echo 'test'",
table = await config.createTable()
await config.createRow({
name: "test row",
description: "test description",
tableId: table._id!,
})
expect(res.stdout).toEqual("test\n")
expect(res.success).toEqual(true)
})
it("should handle a null value", async () => {
let res = await runStep(config, "EXECUTE_BASH", {
code: null,
afterAll(setup.afterAll)
it("should use trigger data in bash command and pass output to subsequent steps", async () => {
const result = await createAutomationBuilder({
name: "Bash with Trigger Data",
config,
})
expect(res.stdout).toEqual(
.appAction({ fields: { command: "hello world" } })
.bash(
{ code: "echo '{{ trigger.fields.command }}'" },
{ stepName: "Echo Command" }
)
.serverLog(
{ text: "Bash output was: {{ steps.[Echo Command].stdout }}" },
{ stepName: "Log Output" }
)
.run()
expect(result.steps[0].outputs.stdout).toEqual("hello world\n")
expect(result.steps[1].outputs.message).toContain(
"Bash output was: hello world"
)
})
it("should chain multiple bash commands using previous outputs", async () => {
const result = await createAutomationBuilder({
name: "Chained Bash Commands",
config,
})
.appAction({ fields: { filename: "testfile.txt" } })
.bash(
{ code: "echo 'initial content' > {{ trigger.fields.filename }}" },
{ stepName: "Create File" }
)
.bash(
{ code: "cat {{ trigger.fields.filename }} | tr '[a-z]' '[A-Z]'" },
{ stepName: "Transform Content" }
)
.bash(
{ code: "rm {{ trigger.fields.filename }}" },
{ stepName: "Cleanup" }
)
.run()
expect(result.steps[1].outputs.stdout).toEqual("INITIAL CONTENT\n")
expect(result.steps[1].outputs.success).toEqual(true)
})
it("should integrate bash output with row operations", async () => {
const result = await createAutomationBuilder({
name: "Bash with Row Operations",
config,
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {},
},
{ stepName: "Get Row" }
)
.bash(
{
code: "echo Row data: {{ steps.[Get Row].rows.[0].name }} - {{ steps.[Get Row].rows.[0].description }}",
},
{ stepName: "Process Row Data" }
)
.serverLog(
{ text: "{{ steps.[Process Row Data].stdout }}" },
{ stepName: "Log Result" }
)
.run()
expect(result.steps[1].outputs.stdout).toContain(
"Row data: test row - test description"
)
expect(result.steps[2].outputs.message).toContain(
"Row data: test row - test description"
)
})
it("should handle bash output in conditional logic", async () => {
const result = await createAutomationBuilder({
name: "Bash with Conditional",
config,
})
.appAction({ fields: { threshold: "5" } })
.bash(
{ code: "echo $(( {{ trigger.fields.threshold }} + 5 ))" },
{ stepName: "Calculate Value" }
)
.executeScript(
{
code: `
const value = parseInt(steps["Calculate Value"].stdout);
return value > 8 ? "high" : "low";
`,
},
{ stepName: "Check Value" }
)
.serverLog(
{ text: "Value was {{ steps.[Check Value].value }}" },
{ stepName: "Log Result" }
)
.run()
expect(result.steps[0].outputs.stdout).toEqual("10\n")
expect(result.steps[1].outputs.value).toEqual("high")
expect(result.steps[2].outputs.message).toContain("Value was high")
})
it("should handle null values gracefully", async () => {
const result = await createAutomationBuilder({
name: "Null Bash Input",
config,
})
.appAction({ fields: {} })
.bash(
//@ts-ignore
{ code: null },
{ stepName: "Null Command" }
)
.run()
expect(result.steps[0].outputs.stdout).toBe(
"Budibase bash automation failed: Invalid inputs"
)
})

View File

@ -1,7 +1,9 @@
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
import { getConfig, afterAll as _afterAll } from "./utilities"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import { OpenAI } from "openai"
import { setEnv as setCoreEnv } from "@budibase/backend-core"
import * as pro from "@budibase/pro"
import { Model } from "@budibase/types"
jest.mock("openai", () => ({
OpenAI: jest.fn().mockImplementation(() => ({
@ -47,6 +49,7 @@ describe("test the openai action", () => {
let resetEnv: () => void | undefined
beforeAll(async () => {
setCoreEnv({ SELF_HOSTED: true })
await config.init()
})
@ -62,17 +65,39 @@ describe("test the openai action", () => {
afterAll(_afterAll)
it("should be able to receive a response from ChatGPT given a prompt", async () => {
const res = await runStep(config, "OPENAI", { prompt: OPENAI_PROMPT })
expect(res.response).toEqual("This is a test")
expect(res.success).toBeTruthy()
setCoreEnv({ SELF_HOSTED: true })
const result = await createAutomationBuilder({
name: "Test OpenAI Response",
config,
})
.appAction({ fields: {} })
.openai(
{ prompt: OPENAI_PROMPT, model: Model.GPT_4O_MINI },
{ stepName: "Basic OpenAI Query" }
)
.run()
expect(result.steps[0].outputs.response).toEqual("This is a test")
expect(result.steps[0].outputs.success).toBeTruthy()
})
it("should present the correct error message when a prompt is not provided", async () => {
const res = await runStep(config, "OPENAI", { prompt: null })
expect(res.response).toEqual(
const result = await createAutomationBuilder({
name: "Test OpenAI No Prompt",
config,
})
.appAction({ fields: {} })
.openai(
{ prompt: "", model: Model.GPT_4O_MINI },
{ stepName: "Empty Prompt Query" }
)
.run()
expect(result.steps[0].outputs.response).toEqual(
"Budibase OpenAI Automation Failed: No prompt supplied"
)
expect(res.success).toBeFalsy()
expect(result.steps[0].outputs.success).toBeFalsy()
})
it("should present the correct error message when an error is thrown from the createChatCompletion call", async () => {
@ -91,14 +116,21 @@ describe("test the openai action", () => {
} as any)
)
const res = await runStep(config, "OPENAI", {
prompt: OPENAI_PROMPT,
const result = await createAutomationBuilder({
name: "Test OpenAI Error",
config,
})
.appAction({ fields: {} })
.openai(
{ prompt: OPENAI_PROMPT, model: Model.GPT_4O_MINI },
{ stepName: "Error Producing Query" }
)
.run()
expect(res.response).toEqual(
expect(result.steps[0].outputs.response).toEqual(
"Error: An error occurred while calling createChatCompletion"
)
expect(res.success).toBeFalsy()
expect(result.steps[0].outputs.success).toBeFalsy()
})
it("should ensure that the pro AI module is called when the budibase AI features are enabled", async () => {
@ -106,10 +138,19 @@ describe("test the openai action", () => {
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
const prompt = "What is the meaning of life?"
await runStep(config, "OPENAI", {
model: "gpt-4o-mini",
prompt,
await createAutomationBuilder({
name: "Test OpenAI Pro Features",
config,
})
.appAction({ fields: {} })
.openai(
{
model: Model.GPT_4O_MINI,
prompt,
},
{ stepName: "Pro Features Query" }
)
.run()
expect(pro.ai.LargeLanguageModel.forCurrentTenant).toHaveBeenCalledWith(
"gpt-4o-mini"

View File

@ -1,5 +1,7 @@
import { Table } from "@budibase/types"
import { EmptyFilterOption, SortOrder, Table } from "@budibase/types"
import * as setup from "./utilities"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import * as automation from "../index"
const NAME = "Test"
@ -8,6 +10,7 @@ describe("Test a query step automation", () => {
let config = setup.getConfig()
beforeAll(async () => {
await automation.init()
await config.init()
table = await config.createTable()
const row = {
@ -22,107 +25,132 @@ describe("Test a query step automation", () => {
afterAll(setup.afterAll)
it("should be able to run the query step", async () => {
const inputs = {
tableId: table._id,
filters: {
equal: {
name: NAME,
},
},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Basic Query Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(true)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(2)
expect(res.rows[0].name).toBe(NAME)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {
equal: {
name: NAME,
},
},
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query All Rows" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
expect(result.steps[0].outputs.rows[0].name).toBe(NAME)
})
it("Returns all rows when onEmptyFilter has no value and no filters are passed", async () => {
const inputs = {
tableId: table._id,
filters: {},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Empty Filter Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(true)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(2)
expect(res.rows[0].name).toBe(NAME)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {},
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Empty Filter" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
expect(result.steps[0].outputs.rows[0].name).toBe(NAME)
})
it("Returns no rows when onEmptyFilter is RETURN_NONE and theres no filters", async () => {
const inputs = {
tableId: table._id,
filters: {},
"filters-def": [],
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
onEmptyFilter: "none",
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Return None Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(false)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(0)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
filters: {},
"filters-def": [],
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
},
{ stepName: "Query With Return None" }
)
.run()
expect(result.steps[0].outputs.success).toBe(false)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(0)
})
it("Returns no rows when onEmptyFilters RETURN_NONE and a filter is passed with a null value", async () => {
const inputs = {
tableId: table._id,
onEmptyFilter: "none",
filters: {},
"filters-def": [
{
value: null,
},
],
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Null Filter Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(false)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(0)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
filters: {},
"filters-def": [
{
value: null,
},
],
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Null Filter" }
)
.run()
expect(result.steps[0].outputs.success).toBe(false)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(0)
})
it("Returns rows when onEmptyFilter is RETURN_ALL and no filter is passed", async () => {
const inputs = {
tableId: table._id,
onEmptyFilter: "all",
filters: {},
sortColumn: "name",
sortOrder: "ascending",
limit: 10,
}
const res = await setup.runStep(
const result = await createAutomationBuilder({
name: "Return All Test",
config,
setup.actions.QUERY_ROWS.stepId,
inputs
)
expect(res.success).toBe(true)
expect(res.rows).toBeDefined()
expect(res.rows.length).toBe(2)
})
.appAction({ fields: {} })
.queryRows(
{
tableId: table._id!,
onEmptyFilter: EmptyFilterOption.RETURN_ALL,
filters: {},
sortColumn: "name",
sortOrder: SortOrder.ASCENDING,
limit: 10,
},
{ stepName: "Query With Return All" }
)
.run()
expect(result.steps[0].outputs.success).toBe(true)
expect(result.steps[0].outputs.rows).toBeDefined()
expect(result.steps[0].outputs.rows.length).toBe(2)
})
})

View File

@ -35,6 +35,8 @@ import {
Branch,
FilterStepInputs,
ExecuteScriptStepInputs,
OpenAIStepInputs,
BashStepInputs,
} from "@budibase/types"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import * as setup from "../utilities"
@ -221,6 +223,30 @@ class BaseStepBuilder {
input
)
}
bash(
input: BashStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.EXECUTE_BASH,
BUILTIN_ACTION_DEFINITIONS.EXECUTE_BASH,
input,
opts
)
}
openai(
input: OpenAIStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.OPENAI,
BUILTIN_ACTION_DEFINITIONS.OPENAI,
input,
opts
)
}
}
class StepBuilder extends BaseStepBuilder {
build(): AutomationStep[] {

View File

@ -322,9 +322,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
presence: required && !isAuto && !hasDefault,
externalType: column.Type,
options: column.Type.startsWith("enum")
? column.Type.substring(5, column.Type.length - 1)
.split(",")
.map(str => str.replace(/^'(.*)'$/, "$1"))
? column.Type.substring(6, column.Type.length - 2).split("','")
: undefined,
})
}

View File

@ -138,12 +138,22 @@ export function generateColumnDefinition(config: {
let { externalType, autocolumn, name, presence, options } = config
let foundType = FieldType.STRING
const lowerCaseType = externalType.toLowerCase()
let matchingTypes = []
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
if (lowerCaseType.includes(external)) {
matchingTypes.push({ external, internal })
let matchingTypes: { external: string; internal: PrimitiveTypes }[] = []
// In at least MySQL, the external type of an ENUM column is "enum('option1',
// 'option2', ...)", which can potentially contain any type name as a
// substring. To get around this interfering with the loop below, we first
// check for an enum column and handle that separately.
if (lowerCaseType.startsWith("enum")) {
matchingTypes.push({ external: "enum", internal: FieldType.OPTIONS })
} else {
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
if (lowerCaseType.includes(external)) {
matchingTypes.push({ external, internal })
}
}
}
// Set the foundType based the longest match
if (matchingTypes.length > 0) {
foundType = matchingTypes.reduce((acc, val) => {

View File

@ -2,13 +2,11 @@ import { DeploymentDoc, DeploymentStatus } from "../../documents"
export interface PublishAppResponse extends DeploymentDoc {}
export type DeploymentProgressResponse =
| {
_id: string
appId: string
status?: DeploymentStatus
updatedAt: number
}
| undefined
export interface DeploymentProgressResponse {
_id: string
appId: string
status?: DeploymentStatus
updatedAt: number
}
export type FetchDeploymentResponse = DeploymentProgressResponse[]

View File

@ -150,7 +150,7 @@ export type OpenAIStepInputs = {
prompt: string
model: Model
}
enum Model {
export enum Model {
GPT_35_TURBO = "gpt-3.5-turbo",
// will only work with api keys that have access to the GPT4 API
GPT_4 = "gpt-4",