Merge branch 'master' into chore/api-typing-2

This commit is contained in:
Michael Drury 2024-12-04 13:52:32 +00:00 committed by GitHub
commit afd779ad07
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 31 additions and 24 deletions

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.22", "version": "3.2.24",
"npmClient": "yarn", "npmClient": "yarn",
"concurrency": 20, "concurrency": 20,
"command": { "command": {

View File

@ -63,7 +63,7 @@
if (!name?.length) { if (!name?.length) {
return "Name is required" return "Name is required"
} }
if (snippets.some(snippet => snippet.name === name)) { if (!snippet?.name && snippets.some(snippet => snippet.name === name)) {
return "That name is already in use" return "That name is already in use"
} }
if (firstCharNumberRegex.test(name)) { if (firstCharNumberRegex.test(name)) {
@ -106,11 +106,7 @@
Delete Delete
</Button> </Button>
{/if} {/if}
<Button <Button cta on:click={saveSnippet} disabled={!code || loading || nameError}>
cta
on:click={saveSnippet}
disabled={!snippet && (loading || nameError)}
>
Save Save
</Button> </Button>
</svelte:fragment> </svelte:fragment>

View File

@ -186,7 +186,7 @@
<div class="snippet-popover"> <div class="snippet-popover">
{#key hoveredSnippet} {#key hoveredSnippet}
<CodeEditor <CodeEditor
value={hoveredSnippet.code.trim()} value={hoveredSnippet.code?.trim()}
mode={EditorModes.JS} mode={EditorModes.JS}
readonly readonly
/> />

View File

@ -52,9 +52,16 @@
let modal let modal
$: text = value?.label ?? "Choose an option" $: text = value?.label ?? "Choose an option"
$: tables = $tablesStore.list.map(table => $: tables = $tablesStore.list
format.table(table, $datasources.list) .map(table => format.table(table, $datasources.list))
) .sort((a, b) => {
// sort tables alphabetically, grouped by datasource
const dsComparison = a.datasourceName.localeCompare(b.datasourceName)
if (dsComparison !== 0) {
return dsComparison
}
return a.label.localeCompare(b.label)
})
$: viewsV1 = $viewsStore.list.map(view => ({ $: viewsV1 = $viewsStore.list.map(view => ({
...view, ...view,
label: view.name, label: view.name,

@ -1 +1 @@
Subproject commit e60f4b1b364fd49d2bb082f298757f83cb2032f0 Subproject commit 7b8789efd940d9f8e5be9927243b19f07361c445

View File

@ -48,7 +48,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: async () => ({ forCurrentTenant: async () => ({
initialised: true, llm: {},
run: jest.fn(() => `Mock LLM Response`), run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(), buildPromptFromAIOperation: jest.fn(),
}), }),

View File

@ -52,7 +52,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: async () => ({ forCurrentTenant: async () => ({
initialised: true, llm: {},
run: jest.fn(() => `Mock LLM Response`), run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(), buildPromptFromAIOperation: jest.fn(),
}), }),

View File

@ -106,13 +106,15 @@ export async function run({
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) && (await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
(await pro.features.isBudibaseAIEnabled()) (await pro.features.isBudibaseAIEnabled())
let llm let llmWrapper
if (budibaseAIEnabled || customConfigsEnabled) { if (budibaseAIEnabled || customConfigsEnabled) {
llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model) llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(
inputs.model
)
} }
response = llm?.initialised response = llmWrapper?.llm
? await llm.run(inputs.prompt) ? await llmWrapper.run(inputs.prompt)
: await legacyOpenAIPrompt(inputs) : await legacyOpenAIPrompt(inputs)
return { return {

View File

@ -27,7 +27,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: jest.fn().mockImplementation(() => ({ forCurrentTenant: jest.fn().mockImplementation(() => ({
initialised: true, llm: {},
init: jest.fn(), init: jest.fn(),
run: jest.fn(), run: jest.fn(),
})), })),

View File

@ -18,7 +18,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: async () => ({ forCurrentTenant: async () => ({
initialised: true, llm: {},
run: jest.fn(() => "response from LLM"), run: jest.fn(() => "response from LLM"),
buildPromptFromAIOperation: buildPromptMock, buildPromptFromAIOperation: buildPromptMock,
}), }),

View File

@ -126,8 +126,10 @@ export async function processAIColumns<T extends Row | Row[]>(
const numRows = Array.isArray(inputRows) ? inputRows.length : 1 const numRows = Array.isArray(inputRows) ? inputRows.length : 1
span?.addTags({ table_id: table._id, numRows }) span?.addTags({ table_id: table._id, numRows })
const rows = Array.isArray(inputRows) ? inputRows : [inputRows] const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant("gpt-4o-mini") const llmWrapper = await pro.ai.LargeLanguageModel.forCurrentTenant(
if (rows && llm.initialised) { "gpt-4o-mini"
)
if (rows && llmWrapper.llm) {
// Ensure we have snippet context // Ensure we have snippet context
await context.ensureSnippetContext() await context.ensureSnippetContext()
@ -151,14 +153,14 @@ export async function processAIColumns<T extends Row | Row[]>(
} }
} }
const prompt = llm.buildPromptFromAIOperation({ const prompt = llmWrapper.buildPromptFromAIOperation({
schema: aiSchema, schema: aiSchema,
row, row,
}) })
return tracer.trace("processAIColumn", {}, async span => { return tracer.trace("processAIColumn", {}, async span => {
span?.addTags({ table_id: table._id, column }) span?.addTags({ table_id: table._id, column })
const llmResponse = await llm.run(prompt!) const llmResponse = await llmWrapper.run(prompt!)
return { return {
...row, ...row,
[column]: llmResponse, [column]: llmResponse,