Merge master.

This commit is contained in:
Sam Rose 2025-04-24 17:49:56 +01:00
commit 3f3518e4c5
No known key found for this signature in database
28 changed files with 1030 additions and 140 deletions

View File

@ -1,4 +1,5 @@
import { TableNames } from "@/constants"
import { INTERNAL_TABLE_SOURCE_ID } from "@budibase/types"
const showDatasourceOpen = ({
selected,
@ -41,7 +42,7 @@ const containsActiveEntity = (
// Check for hardcoded datasource edge cases
if (
isActive("./datasource/bb_internal") &&
datasource._id === "bb_internal"
datasource._id === INTERNAL_TABLE_SOURCE_ID
) {
return true
}

View File

@ -28,6 +28,7 @@
$: isPremiumOrAbove = [
Constants.PlanType.PREMIUM,
Constants.PlanType.PREMIUM_PLUS,
Constants.PlanType.ENTERPRISE,
Constants.PlanType.ENTERPRISE_BASIC_TRIAL,
Constants.PlanType.ENTERPRISE_BASIC,

View File

@ -31,6 +31,8 @@
$: disabled = !aiEnabled || creditsExceeded || readonly || promptLoading
$: animateBorder = !disabled && expanded
$: canSubmit = !readonly && !!value
function collapse() {
dispatch("collapse")
expanded = expandedOnly
@ -62,7 +64,7 @@
}
async function onPromptSubmit() {
if (readonly) {
if (!canSubmit) {
return
}
promptLoading = true
@ -155,6 +157,7 @@
? "#6E56FF"
: "var(--spectrum-global-color-gray-600)"}
size="S"
disabled={!canSubmit}
hoverable={!readonly}
hoverColor="#6E56FF"
name={promptLoading ? "StopCircle" : "PlayCircle"}
@ -265,6 +268,7 @@
.ai-icon {
width: 18px;
height: 18px;
margin-left: 4px;
margin-right: 8px;
flex-shrink: 0;
cursor: var(--ai-icon-cursor, pointer);

View File

@ -1,113 +1,5 @@
/**
* Duplicates a name with respect to a collection of existing names
* e.g.
* name all names result
* ------ ----------- --------
* ("foo") ["foo"] "foo 1"
* ("foo") ["foo", "foo 1"] "foo 2"
* ("foo 1") ["foo", "foo 1"] "foo 2"
* ("foo") ["foo", "foo 2"] "foo 1"
*
* Repl
*/
export const duplicateName = (name: string, allNames: string[]) => {
const duplicatePattern = new RegExp(`\\s(\\d+)$`)
const baseName = name.split(duplicatePattern)[0]
const isDuplicate = new RegExp(`${baseName}\\s(\\d+)$`)
// export * from
import { helpers } from "@budibase/shared-core"
// get the sequence from matched names
const sequence: number[] = []
allNames.filter(n => {
if (n === baseName) {
return true
}
const match = n.match(isDuplicate)
if (match) {
sequence.push(parseInt(match[1]))
return true
}
return false
})
sequence.sort((a, b) => a - b)
// get the next number in the sequence
let number
if (sequence.length === 0) {
number = 1
} else {
// get the next number in the sequence
for (let i = 0; i < sequence.length; i++) {
if (sequence[i] !== i + 1) {
number = i + 1
break
}
}
if (!number) {
number = sequence.length + 1
}
}
return `${baseName} ${number}`
}
/**
* More flexible alternative to the above function, which handles getting the
* next sequential name from an array of existing items while accounting for
* any type of prefix, and being able to deeply retrieve that name from the
* existing item array.
*
* Examples with a prefix of "foo":
* [] => "foo"
* ["foo"] => "foo2"
* ["foo", "foo6"] => "foo7"
*
* Examples with a prefix of "foo " (space at the end):
* [] => "foo"
* ["foo"] => "foo 2"
* ["foo", "foo 6"] => "foo 7"
*
* @param items the array of existing items
* @param prefix the string prefix of each name, including any spaces desired
* @param getName optional function to extract the name for an item, if not a
* flat array of strings
*/
export const getSequentialName = <T extends any>(
items: T[] | null,
prefix: string | null,
{
getName,
numberFirstItem,
separator = "",
}: {
getName?: (item: T) => string
numberFirstItem?: boolean
separator?: string
} = {}
) => {
if (!prefix?.length) {
return ""
}
const trimmedPrefix = prefix.trim()
const firstName = numberFirstItem ? `${prefix}1` : trimmedPrefix
if (!items?.length) {
return firstName
}
let max = 0
items.forEach(item => {
const name = getName?.(item) ?? item
if (typeof name !== "string" || !name.startsWith(trimmedPrefix)) {
return
}
const split = name.split(trimmedPrefix)
if (split.length !== 2) {
return
}
if (split[1].trim() === "") {
split[1] = "1"
}
const num = parseInt(split[1])
if (num > max) {
max = num
}
})
return max === 0 ? firstName : `${prefix}${separator}${max + 1}`
}
export const duplicateName = helpers.duplicateName
export const getSequentialName = helpers.getSequentialName

View File

@ -1,16 +1,32 @@
<script lang="ts">
import { API } from "@/api"
import AiInput from "@/components/common/ai/AIInput.svelte"
import { datasources, tables } from "@/stores/builder"
import { auth, licensing } from "@/stores/portal"
import { ActionButton, notifications } from "@budibase/bbui"
import { goto } from "@roxi/routify"
let promptText = ""
$: isEnabled = $auth?.user?.llm && !$licensing.aiCreditsExceeded
async function submitPrompt(message: string) {
await API.generateTables(message)
notifications.success("Tables created successfully!")
try {
const { createdTables } = await API.generateTables({
prompt: message,
})
const [tableToRedirect] = createdTables.sort((a, b) =>
a.name.localeCompare(b.name)
)
notifications.success(`Tables created successfully.`)
await datasources.fetch()
await tables.fetch()
$goto(`./table/${tableToRedirect.id}`)
} catch (e: any) {
notifications.error(e.message)
}
}
const examplePrompts = [

View File

@ -6,7 +6,7 @@
let modal
let promptUpload = false
export function show({ promptUpload: newPromptUpload = false }) {
export function show({ promptUpload: newPromptUpload = false } = {}) {
promptUpload = newPromptUpload
modal.show()
}

View File

@ -1,4 +1,4 @@
<script>
<script lang="ts">
import { API } from "@/api"
import {
tables,
@ -19,8 +19,8 @@
import { featureFlag } from "@/helpers"
import { FeatureFlag } from "@budibase/types"
let internalTableModal
let externalDatasourceModal
let internalTableModal: CreateInternalTableModal
let externalDatasourceModal: CreateExternalDatasourceModal
let sampleDataLoading = false
let externalDatasourceLoading = false
@ -72,7 +72,7 @@
</div>
{/if}
<DatasourceOption
on:click={internalTableModal.show}
on:click={() => internalTableModal.show()}
title="Create new table"
description="Non-relational"
{disabled}

View File

@ -1,11 +1,17 @@
import { GenerateJsRequest, GenerateJsResponse } from "@budibase/types"
import {
GenerateJsRequest,
GenerateJsResponse,
GenerateTablesRequest,
GenerateTablesResponse,
} from "@budibase/types"
import { BaseAPIClient } from "./types"
import { sleep } from "../utils/utils"
export interface AIEndpoints {
generateCronExpression: (prompt: string) => Promise<{ message: string }>
generateJs: (req: GenerateJsRequest) => Promise<GenerateJsResponse>
generateTables: (prompt: string) => Promise<void>
generateTables: (
req: GenerateTablesRequest
) => Promise<GenerateTablesResponse>
}
export const buildAIEndpoints = (API: BaseAPIClient): AIEndpoints => ({
@ -25,8 +31,11 @@ export const buildAIEndpoints = (API: BaseAPIClient): AIEndpoints => ({
body: req,
})
},
generateTables: async prompt => {
console.warn({ prompt })
await sleep(1000)
generateTables: async req => {
return await API.post({
url: "/api/ai/tables",
body: req,
})
},
})

View File

@ -86,6 +86,7 @@ export const PlanType = {
PRO: "pro",
BUSINESS: "business",
PREMIUM: "premium",
PREMIUM_PLUS: "premium_plus",
ENTERPRISE: "enterprise",
ENTERPRISE_BASIC_TRIAL: "enterprise_basic_trial",
}

View File

@ -0,0 +1,25 @@
import {
GenerateTablesRequest,
GenerateTablesResponse,
UserCtx,
} from "@budibase/types"
import { ai } from "@budibase/pro"
import sdk from "../../sdk"
export async function generateTables(
ctx: UserCtx<GenerateTablesRequest, GenerateTablesResponse>
) {
const { prompt } = ctx.request.body
const tableGenerator = await ai.TableGeneration.init({
generateTablesDelegate: sdk.ai.helpers.generateTables,
getTablesDelegate: sdk.tables.getTables,
generateDataDelegate: sdk.ai.helpers.generateRows,
})
tableGenerator.withData(ctx.user._id || "")
const createdTables = await tableGenerator.generate(prompt)
ctx.body = {
createdTables,
}
}

View File

@ -0,0 +1,9 @@
import * as ai from "../controllers/ai"
import Router from "@koa/router"
import { auth } from "@budibase/backend-core"
const router: Router = new Router()
router.post("/api/ai/tables", auth.builderOrAdmin, ai.generateTables)
export default router

View File

@ -31,6 +31,7 @@ import { api as pro } from "@budibase/pro"
import rowActionRoutes from "./rowAction"
import oauth2Routes from "./oauth2"
import featuresRoutes from "./features"
import aiRoutes from "./ai"
export { default as staticRoutes } from "./static"
export { default as publicRoutes } from "./public"
@ -76,4 +77,5 @@ export const mainRoutes: Router[] = [
// these need to be handled last as they still use /api/:tableId
// this could be breaking as koa may recognise other routes as this
tableRoutes,
aiRoutes,
]

View File

@ -1,21 +1,25 @@
import { z } from "zod"
import { zodResponseFormat } from "openai/helpers/zod"
import { mockChatGPTResponse } from "../../../tests/utilities/mocks/ai/openai"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import nock from "nock"
import { configs, env, features, setEnv } from "@budibase/backend-core"
import {
AIInnerConfig,
AIOperationEnum,
AttachmentSubType,
ConfigType,
Feature,
FieldType,
License,
PlanModel,
PlanType,
ProviderConfig,
StructuredOutput,
RelationshipType,
} from "@budibase/types"
import { context } from "@budibase/backend-core"
import { generator, mocks } from "@budibase/backend-core/tests"
import { ai, quotas } from "@budibase/pro"
import { quotas, ai } from "@budibase/pro"
import { MockLLMResponseFn } from "../../../tests/utilities/mocks/ai"
import { mockAnthropicResponse } from "../../../tests/utilities/mocks/ai/anthropic"
@ -414,11 +418,12 @@ describe("BudibaseAI", () => {
expect(usage.monthly.current.budibaseAICredits).toBe(0)
const gptResponse = generator.guid()
const structuredOutput = generator.word() as unknown as StructuredOutput
ai.structuredOutputs[structuredOutput] = {
key: generator.word(),
validator: z.object({ name: z.string() }),
}
const structuredOutput = zodResponseFormat(
z.object({
[generator.word()]: z.string(),
}),
"key"
)
mockChatGPTResponse(gptResponse, { format: structuredOutput })
const { message } = await config.api.ai.chat({
messages: [{ role: "user", content: "Hello!" }],
@ -431,4 +436,492 @@ describe("BudibaseAI", () => {
expect(usage.monthly.current.budibaseAICredits).toBeGreaterThan(0)
})
})
describe("POST /api/ai/tables", () => {
let featureCleanup: () => void
beforeAll(() => {
featureCleanup = features.testutils.setFeatureFlags("*", {
AI_TABLE_GENERATION: true,
})
})
afterAll(() => {
featureCleanup()
})
beforeEach(async () => {
await config.newTenant()
nock.cleanAll()
})
const mockAIGenerationStructure = (
generationStructure: ai.GenerationStructure
) =>
mockChatGPTResponse(JSON.stringify(generationStructure), {
format: zodResponseFormat(ai.generationStructure, "key"),
})
const mockAIColumnGeneration = (
generationStructure: ai.GenerationStructure,
aiColumnGeneration: ai.AIColumnSchemas
) =>
mockChatGPTResponse(JSON.stringify(aiColumnGeneration), {
format: zodResponseFormat(
ai.aiColumnSchemas(generationStructure),
"key"
),
})
const mockDataGeneration = (
dataGeneration: Record<string, Record<string, any>[]>
) =>
mockChatGPTResponse(JSON.stringify(dataGeneration), {
format: zodResponseFormat(ai.tableDataStructuredOutput([]), "key"),
})
const mockProcessAIColumn = (response: string) =>
mockChatGPTResponse(response)
it("handles correct chat response", async () => {
const prompt = "Create me a table for managing IT tickets"
const generationStructure: ai.GenerationStructure = {
tables: [
{
name: "Tickets",
primaryDisplay: "Title",
schema: [
{
name: "Title",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
{
name: "Description",
type: FieldType.LONGFORM,
constraints: {
presence: true,
},
},
{
name: "Priority",
type: FieldType.OPTIONS,
constraints: {
inclusion: ["Low", "Medium", "High"],
presence: true,
},
},
{
name: "Status",
type: FieldType.OPTIONS,
constraints: {
inclusion: ["Open", "In Progress", "Closed"],
presence: true,
},
},
{
name: "Assignee",
type: FieldType.LINK,
tableId: "Employees",
relationshipType: RelationshipType.MANY_TO_ONE,
reverseFieldName: "AssignedTickets",
relationshipId: "TicketUser",
},
{
name: "Created Date",
type: FieldType.DATETIME,
ignoreTimezones: false,
dateOnly: true,
},
{
name: "Resolution Time (Days)",
type: FieldType.FORMULA,
formula:
'return (new Date() - new Date($("Created Date"))) / (1000 * 60 * 60 * 24);',
responseType: FieldType.NUMBER,
},
{
name: "Attachment",
type: FieldType.ATTACHMENT_SINGLE,
},
],
},
{
name: "Employees",
primaryDisplay: "First Name",
schema: [
{
name: "First Name",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
{
name: "Last Name",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
{
name: "Position",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
{
name: "Photo",
type: FieldType.ATTACHMENT_SINGLE,
subtype: AttachmentSubType.IMAGE,
},
{
name: "Documents",
type: FieldType.ATTACHMENTS,
},
{
name: "AssignedTickets",
type: FieldType.LINK,
tableId: "Tickets",
relationshipType: RelationshipType.ONE_TO_MANY,
reverseFieldName: "Assignee",
relationshipId: "TicketUser",
},
],
},
],
}
mockAIGenerationStructure(generationStructure)
const aiColumnGeneration: ai.AIColumnSchemas = {
Tickets: [
{
name: "Ticket Summary",
type: FieldType.AI,
operation: AIOperationEnum.SUMMARISE_TEXT,
columns: ["Title", "Description"],
},
{
name: "Translated Description",
type: FieldType.AI,
operation: AIOperationEnum.TRANSLATE,
column: "Description",
language: "es",
},
],
Employees: [
{
name: "Role Category",
type: FieldType.AI,
operation: AIOperationEnum.CATEGORISE_TEXT,
columns: ["Position"],
categories: "Manager,Staff,Intern,Contractor",
},
],
}
mockAIColumnGeneration(generationStructure, aiColumnGeneration)
nock("https://photourl.com").get("/any.png").reply(200).persist()
const dataGeneration: Record<string, Record<string, any>[]> = {
Tickets: [
{
Title: "System slow performance",
Description:
"User reports significant slowdowns when using multiple applications simultaneously on their PC.",
Priority: "Medium",
Status: "Closed",
"Created Date": "2025-04-17",
Attachment: {
name: "performance_logs.txt",
extension: ".txt",
content: "performance logs",
},
},
{
Title: "Email delivery failure",
Description:
"Emails sent to external clients are bouncing back. Bounce back message: '550: Recipient address rejected'.",
Priority: "Medium",
Status: "In Progress",
"Created Date": "2025-04-19",
Attachment: {
name: "email_bounce_back.txt",
extension: ".txt",
content: "Email delivery failure",
},
},
{
Title: "Software installation request",
Description:
"Request to install Adobe Photoshop on users workstation for design work.",
Priority: "Low",
Status: "In Progress",
"Created Date": "2025-04-18",
Attachment: {
name: "software_request_form.pdf",
extension: ".pdf",
content: "Software installation request",
},
},
{
Title: "Unable to connect to VPN",
Description:
"User is experiencing issues when trying to connect to the VPN. Error message: 'VPN connection failed due to incorrect credentials'.",
Priority: "High",
Status: "Open",
"Created Date": "2025-04-20",
Attachment: {
name: "vpn_error_screenshot.pdf",
extension: ".pdf",
content: "vpn error",
},
},
],
"Employees 2": [
{
"First Name": "Joshua",
"Last Name": "Lee",
Position: "Application Developer",
Photo: "https://photourl.com/any.png",
Documents: [
{
name: "development_guidelines.pdf",
extension: ".pdf",
content: "any content",
},
{
name: "project_documents.txt",
extension: ".txt",
content: "any content",
},
],
},
{
"First Name": "Emily",
"Last Name": "Davis",
Position: "Software Deployment Technician",
Photo: "https://photourl.com/any.png",
Documents: [
{
name: "software_license_list.txt",
extension: ".txt",
content: "any content",
},
{
name: "deployment_guide.pdf",
extension: ".pdf",
content: "any content",
},
{
name: "installation_logs.txt",
extension: ".txt",
content: "any content",
},
],
},
{
"First Name": "James",
"Last Name": "Smith",
Position: "IT Support Specialist",
Photo: "https://photourl.com/any.png",
Documents: [
{
name: "certificates.pdf",
extension: ".pdf",
content: "any content",
},
{
name: "employment_contract.pdf",
extension: ".pdf",
content: "any content",
},
],
},
{
"First Name": "Jessica",
"Last Name": "Taylor",
Position: "Cybersecurity Analyst",
Photo: "https://photourl.com/any.png",
Documents: [
{
name: "security_audit_report.pdf",
extension: ".pdf",
content: "any content",
},
{
name: "incident_response_plan.pdf",
extension: ".pdf",
content: "any content",
},
],
},
{
"First Name": "Ashley",
"Last Name": "Harris",
Position: "Database Administrator",
Photo: "https://photourl.com/any.png",
Documents: [
{
name: "database_backup.txt",
extension: ".txt",
content: "any content",
},
{
name: "permission_settings.pdf",
extension: ".pdf",
content: "any content",
},
],
},
],
}
mockDataGeneration(dataGeneration)
mockProcessAIColumn("Mock LLM Response")
const { createdTables } = await config.api.ai.generateTables({ prompt })
expect(createdTables).toEqual([
{ id: expect.stringMatching(/ta_\w+/), name: "Tickets" },
{ id: expect.stringMatching(/ta_\w+/), name: "Employees" },
])
const tables = [
await config.api.table.get(createdTables[0].id),
await config.api.table.get(createdTables[1].id),
]
expect(tables).toEqual([
expect.objectContaining({
name: "Tickets",
schema: {
Title: {
name: "Title",
type: "string",
constraints: {
presence: true,
},
},
Description: {
name: "Description",
type: "longform",
constraints: {
presence: true,
},
},
Priority: {
name: "Priority",
type: "options",
constraints: {
inclusion: ["Low", "Medium", "High"],
presence: true,
},
},
Status: {
name: "Status",
type: "options",
constraints: {
inclusion: ["Open", "In Progress", "Closed"],
presence: true,
},
},
Assignee: {
name: "Assignee",
type: "link",
tableId: createdTables[1].id,
fieldName: "AssignedTickets",
relationshipType: "one-to-many",
},
"Created Date": {
name: "Created Date",
type: "datetime",
ignoreTimezones: false,
dateOnly: true,
},
"Resolution Time (Days)": {
name: "Resolution Time (Days)",
type: "formula",
formula:
'{{ js "cmV0dXJuIChuZXcgRGF0ZSgpIC0gbmV3IERhdGUoJCgiQ3JlYXRlZCBEYXRlIikpKSAvICgxMDAwICogNjAgKiA2MCAqIDI0KTs=" }}',
responseType: "number",
},
Attachment: {
name: "Attachment",
type: "attachment_single",
},
"Ticket Summary": {
name: "Ticket Summary",
type: "ai",
operation: "SUMMARISE_TEXT",
columns: ["Title", "Description"],
},
"Translated Description": {
name: "Translated Description",
type: "ai",
operation: "TRANSLATE",
column: "Description",
language: "es",
},
},
}),
expect.objectContaining({
name: "Employees 2",
schema: {
"First Name": {
constraints: {
presence: true,
},
name: "First Name",
type: "string",
},
"Last Name": {
constraints: {
presence: true,
},
name: "Last Name",
type: "string",
},
Photo: {
name: "Photo",
subtype: "image",
type: "attachment_single",
},
Position: {
constraints: {
presence: true,
},
name: "Position",
type: "string",
},
AssignedTickets: {
fieldName: "Assignee",
name: "AssignedTickets",
relationshipType: "many-to-one",
tableId: createdTables[0].id,
type: "link",
},
Documents: {
name: "Documents",
type: "attachment",
},
"Role Category": {
categories: "Manager,Staff,Intern,Contractor",
columns: ["Position"],
name: "Role Category",
operation: "CATEGORISE_TEXT",
type: "ai",
},
},
}),
])
const tickets = await config.api.row.fetch(createdTables[0].id)
expect(tickets).toHaveLength(4)
const employees = await config.api.row.fetch(createdTables[1].id)
expect(employees).toHaveLength(5)
})
})
})

View File

@ -0,0 +1,2 @@
export * from "./table"
export * from "./rows"

View File

@ -0,0 +1,117 @@
import { FieldSchema, FieldType, Table } from "@budibase/types"
import sdk from "../../.."
import { uploadFile, uploadUrl } from "../../../../utilities"
export async function generateRows(
data: Record<string, Record<string, any>[]>,
userId: string,
tables: Record<string, Table>
) {
const createdData: Record<string, Record<string, string>> = {}
const toUpdateLinks: {
tableId: string
rowId: string
data: Record<string, { rowId: string[]; tableId: string }>
}[] = []
for (const tableName of Object.keys(data)) {
const table = tables[tableName]
const linksOverride: Record<string, null> = {}
for (const field of Object.values(table.schema).filter(
f => f.type === FieldType.LINK
)) {
linksOverride[field.name] = null
}
const attachmentColumns = Object.values(table.schema).filter(f =>
[FieldType.ATTACHMENTS, FieldType.ATTACHMENT_SINGLE].includes(f.type)
)
for (const entry of data[tableName]) {
await processAttachments(entry, attachmentColumns)
const tableId = table._id!
const createdRow = await sdk.rows.save(
tableId,
{
...entry,
...linksOverride,
_id: undefined,
},
userId
)
createdData[tableId] ??= {}
createdData[tableId][entry._id] = createdRow.row._id!
const overridenLinks = Object.keys(linksOverride).reduce<
Record<string, { rowId: string[]; tableId: string }>
>((acc, l) => {
if (entry[l]) {
acc[l] = {
tableId: (table.schema[l] as any).tableId,
rowId: entry[l],
}
}
return acc
}, {})
if (Object.keys(overridenLinks).length) {
toUpdateLinks.push({
tableId: createdRow.table._id!,
rowId: createdRow.row._id!,
data: overridenLinks,
})
}
}
}
for (const data of toUpdateLinks) {
const persistedRow = await sdk.rows.find(data.tableId, data.rowId)
const updatedLinks = Object.keys(data.data).reduce<Record<string, any>>(
(acc, d) => {
acc[d] = [
...(persistedRow[d] || []),
...data.data[d].rowId.map(
rid => createdData[data.data[d].tableId][rid]
),
]
return acc
},
{}
)
await sdk.rows.save(
data.tableId,
{
...persistedRow,
...updatedLinks,
},
userId
)
}
}
async function processAttachments(
entry: Record<string, any>,
attachmentColumns: FieldSchema[]
) {
function processAttachment(value: any) {
if (typeof value === "object") {
return uploadFile(value)
}
return uploadUrl(value)
}
for (const column of attachmentColumns) {
if (!Array.isArray(entry[column.name])) {
entry[column.name] = await processAttachment(entry[column.name])
} else {
entry[column.name] = await Promise.all(
entry[column.name].map((attachment: any) =>
processAttachment(attachment)
)
)
}
}
}

View File

@ -0,0 +1,80 @@
import {
FieldType,
GenerateTablesResponse,
INTERNAL_TABLE_SOURCE_ID,
TableSchema,
TableSourceType,
} from "@budibase/types"
import sdk from "../../.."
import { helpers } from "@budibase/shared-core"
export async function generateTables(
tables: { name: string; primaryDisplay: string; schema: TableSchema }[]
) {
const createdTables: GenerateTablesResponse["createdTables"] = []
const tableIds: Record<string, string> = {}
try {
for (const table of tables) {
for (const linkField of Object.values(table.schema).filter(
f => f.type === FieldType.LINK
)) {
if (!tables.find(t => t.name === linkField.tableId)) {
throw `Table ${linkField.tableId} not found in the json response.`
}
}
}
const existingTableNames = (await sdk.tables.getAllInternalTables()).map(
t => t.name
)
for (const table of tables) {
const name = helpers.getSequentialName(existingTableNames, table.name, {
separator: " ",
})
const createdTable = await sdk.tables.create({
...table,
name,
schema: {},
primaryDisplay: undefined,
sourceType: TableSourceType.INTERNAL,
sourceId: INTERNAL_TABLE_SOURCE_ID,
type: "table",
})
createdTables.push({ id: createdTable._id!, name: table.name })
tableIds[table.name] = createdTable._id!
}
for (const table of tables) {
for (const field of Object.values(table.schema)) {
if (field.type === FieldType.LINK) {
field.tableId = tableIds[field.tableId]
} else if (field.type === FieldType.FORMULA) {
field.formula = `{{ js "${btoa(field.formula)}" }}`
}
}
}
for (const table of tables) {
const storedTable = await sdk.tables.getTable(tableIds[table.name])
await sdk.tables.update({
...storedTable,
schema: {
...storedTable.schema,
...table.schema,
},
primaryDisplay: table.primaryDisplay,
})
}
} catch (e) {
const tables = await sdk.tables.getTables(createdTables.map(t => t.id))
await Promise.all(tables.map(sdk.tables.internal.destroy))
throw e
}
return createdTables
}

View File

@ -0,0 +1 @@
export * as helpers from "./helpers"

View File

@ -11,6 +11,7 @@ import {
Row,
RestConfig,
SourceName,
INTERNAL_TABLE_SOURCE_ID,
} from "@budibase/types"
import { cloneDeep } from "lodash/fp"
import { getEnvironmentVariables } from "../../utils"
@ -51,7 +52,7 @@ export async function fetch(opts?: {
)
const internal = internalTables.rows.reduce((acc: any, row: Row) => {
const sourceId = row.doc.sourceId || "bb_internal"
const sourceId = row.doc.sourceId || INTERNAL_TABLE_SOURCE_ID
acc[sourceId] = acc[sourceId] || []
acc[sourceId].push(row.doc)
return acc

View File

@ -14,6 +14,7 @@ import * as rowActions from "./app/rowActions"
import * as screens from "./app/screens"
import * as common from "./app/common"
import * as oauth2 from "./app/oauth2"
import * as ai from "./app/ai"
const sdk = {
backups,
@ -32,6 +33,7 @@ const sdk = {
rowActions,
common,
oauth2,
ai,
}
// default export for TS

View File

@ -5,6 +5,8 @@ import {
GenerateCronResponse,
GenerateJsRequest,
GenerateJsResponse,
GenerateTablesRequest,
GenerateTablesResponse,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
import { constants } from "@budibase/backend-core"
@ -44,4 +46,16 @@ export class AIAPI extends TestAPI {
expectations,
})
}
generateTables = async (
req: GenerateTablesRequest,
expectations?: Expectations
): Promise<GenerateTablesResponse> => {
const headers: Record<string, string> = {}
return await this._post<GenerateTablesResponse>(`/api/ai/tables`, {
body: req,
headers,
expectations,
})
}
}

View File

@ -0,0 +1,85 @@
import fs from "fs"
import path from "path"
import { pipeline } from "stream"
import { promisify } from "util"
import * as uuid from "uuid"
import fetch from "node-fetch"
import { context, objectStore } from "@budibase/backend-core"
import { Upload } from "@budibase/types"
import { ObjectStoreBuckets } from "../constants"
function getTmpPath() {
const tmpPath = path.join(objectStore.budibaseTempDir(), "ai-downloads")
if (!fs.existsSync(tmpPath)) {
fs.mkdirSync(tmpPath)
}
return tmpPath
}
export async function uploadUrl(url: string): Promise<Upload | undefined> {
try {
const res = await fetch(url)
const extension = [...res.url.split(".")].pop()!.split("?")[0]
const destination = path.resolve(getTmpPath(), `${uuid.v4()}${extension}`)
const fileStream = fs.createWriteStream(destination, { flags: "wx" })
await promisify(pipeline)(res.body, fileStream)
const processedFileName = path.basename(destination)
const s3Key = `${context.getProdAppId()}/attachments/${processedFileName}`
const response = await objectStore.upload({
bucket: ObjectStoreBuckets.APPS,
filename: s3Key,
path: destination,
type: "image/jpeg",
})
return {
size: fileStream.bytesWritten,
name: processedFileName,
url: await objectStore.getAppFileUrl(s3Key),
extension,
key: response.Key!,
}
} catch (e) {
console.error("Error downloading file", e)
return
}
}
export async function uploadFile(file: {
fileName: string
extension: string
content: string
}): Promise<Upload> {
const destination = path.resolve(
getTmpPath(),
`${file.fileName}${file.extension}`
)
fs.writeFileSync(destination, file.content)
const processedFileName = path.basename(destination)
const s3Key = `${context.getProdAppId()}/attachments/${processedFileName}`
const response = await objectStore.upload({
bucket: ObjectStoreBuckets.APPS,
filename: s3Key,
path: destination,
type: "text/plain",
})
return {
size: fs.readFileSync(destination).byteLength,
name: processedFileName,
url: await objectStore.getAppFileUrl(s3Key),
extension: file.extension,
key: response.Key!,
}
}

View File

@ -1,3 +1,4 @@
export * from "./fileUtils"
import env from "../environment"
import { context } from "@budibase/backend-core"
import { generateMetadataID } from "../db/utils"

View File

@ -0,0 +1,113 @@
/**
* Duplicates a name with respect to a collection of existing names
* e.g.
* name all names result
* ------ ----------- --------
* ("foo") ["foo"] "foo 1"
* ("foo") ["foo", "foo 1"] "foo 2"
* ("foo 1") ["foo", "foo 1"] "foo 2"
* ("foo") ["foo", "foo 2"] "foo 1"
*
* Repl
*/
export const duplicateName = (name: string, allNames: string[]) => {
const duplicatePattern = new RegExp(`\\s(\\d+)$`)
const baseName = name.split(duplicatePattern)[0]
const isDuplicate = new RegExp(`${baseName}\\s(\\d+)$`)
// get the sequence from matched names
const sequence: number[] = []
allNames.filter(n => {
if (n === baseName) {
return true
}
const match = n.match(isDuplicate)
if (match) {
sequence.push(parseInt(match[1]))
return true
}
return false
})
sequence.sort((a, b) => a - b)
// get the next number in the sequence
let number
if (sequence.length === 0) {
number = 1
} else {
// get the next number in the sequence
for (let i = 0; i < sequence.length; i++) {
if (sequence[i] !== i + 1) {
number = i + 1
break
}
}
if (!number) {
number = sequence.length + 1
}
}
return `${baseName} ${number}`
}
/**
* More flexible alternative to the above function, which handles getting the
* next sequential name from an array of existing items while accounting for
* any type of prefix, and being able to deeply retrieve that name from the
* existing item array.
*
* Examples with a prefix of "foo":
* [] => "foo"
* ["foo"] => "foo2"
* ["foo", "foo6"] => "foo7"
*
* Examples with a prefix of "foo " (space at the end):
* [] => "foo"
* ["foo"] => "foo 2"
* ["foo", "foo 6"] => "foo 7"
*
* @param items the array of existing items
* @param prefix the string prefix of each name, including any spaces desired
* @param getName optional function to extract the name for an item, if not a
* flat array of strings
*/
export const getSequentialName = <T extends any>(
items: T[] | null,
prefix: string | null,
{
getName,
numberFirstItem,
separator = "",
}: {
getName?: (item: T) => string
numberFirstItem?: boolean
separator?: string
} = {}
) => {
if (!prefix?.length) {
return ""
}
const trimmedPrefix = prefix.trim()
const firstName = numberFirstItem ? `${prefix}1` : trimmedPrefix
if (!items?.length) {
return firstName
}
let max = 0
items.forEach(item => {
const name = getName?.(item) ?? item
if (typeof name !== "string" || !name.startsWith(trimmedPrefix)) {
return
}
const split = name.split(trimmedPrefix)
if (split.length !== 2) {
return
}
if (split[1].trim() === "") {
split[1] = "1"
}
const num = parseInt(split[1])
if (num > max) {
max = num
}
})
return max === 0 ? firstName : `${prefix}${separator}${max + 1}`
}

View File

@ -7,3 +7,4 @@ export * as schema from "./schema"
export * as views from "./views"
export * as roles from "./roles"
export * as lists from "./lists"
export * from "./duplicate"

View File

@ -1,4 +1,3 @@
import { expect, describe, it } from "vitest"
import { duplicateName, getSequentialName } from "../duplicate"
describe("duplicate", () => {

View File

@ -1,3 +1,4 @@
import openai from "openai"
import { EnrichedBinding } from "../../ui"
export interface Message {
@ -5,9 +6,7 @@ export interface Message {
content: string
}
export enum StructuredOutput {}
export type ResponseFormat = "text" | "json" | StructuredOutput
export type ResponseFormat = "text" | "json" | openai.ResponseFormatJSONSchema
export interface ChatCompletionRequest {
messages: Message[]
@ -35,3 +34,11 @@ export interface GenerateCronRequest {
export interface GenerateCronResponse {
message?: string
}
export interface GenerateTablesRequest {
prompt: string
}
export interface GenerateTablesResponse {
createdTables: { id: string; name: string }[]
}

View File

@ -14,6 +14,7 @@ export type FieldSubType =
| AutoFieldSubType
| JsonFieldSubType
| BBReferenceFieldSubType
| AttachmentSubType
export enum AutoFieldSubType {
CREATED_BY = "createdBy",
@ -39,6 +40,10 @@ export enum BBReferenceFieldSubType {
USERS = "users",
}
export enum AttachmentSubType {
IMAGE = "image",
}
export type SupportedSqlTypes =
| FieldType.STRING
| FieldType.BARCODEQR

View File

@ -2,6 +2,7 @@
// column size, position and whether it can be viewed
import { FieldType, FormulaResponseType } from "../row"
import {
AttachmentSubType,
AutoFieldSubType,
AutoReason,
BBReferenceFieldSubType,
@ -142,8 +143,15 @@ export interface BBReferenceSingleFieldMetadata
default?: string
}
export interface AttachmentFieldMetadata extends BaseFieldSchema {
export interface AttachmentFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.ATTACHMENTS
subtype?: AttachmentSubType
}
export interface SingleAttachmentFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.ATTACHMENT_SINGLE
subtype?: AttachmentSubType
}
export interface FieldConstraints {
@ -246,6 +254,7 @@ export type FieldSchema =
| BBReferenceFieldMetadata
| JsonFieldMetadata
| AttachmentFieldMetadata
| SingleAttachmentFieldMetadata
| BBReferenceSingleFieldMetadata
| ArrayFieldMetadata
| OptionsFieldMetadata