Merge pull request #14965 from Budibase/ai-config-fixes-2

further AI config updates for case where there's no configuration
This commit is contained in:
Martin McKeaveney 2024-11-05 13:52:20 +00:00 committed by GitHub
commit 3ed740ea85
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 33 additions and 37 deletions

View File

@ -1,8 +1,8 @@
<script> <script>
import { redirect } from "@roxi/routify" import { redirect } from "@roxi/routify"
import { licensing, featureFlags } from "stores/portal" import { featureFlags } from "stores/portal"
if ($featureFlags.AI_CUSTOM_CONFIGS && $licensing.customAIConfigsEnabled) { if ($featureFlags.AI_CUSTOM_CONFIGS) {
$redirect("./ai") $redirect("./ai")
} else { } else {
$redirect("./auth") $redirect("./auth")

@ -1 +1 @@
Subproject commit 2ab8536b6005576684810d774f1ac22239218546 Subproject commit 04bee88597edb1edb88ed299d0597b587f0362ec

View File

@ -56,6 +56,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: async () => ({ forCurrentTenant: async () => ({
initialised: true,
run: jest.fn(() => `Mock LLM Response`), run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(), buildPromptFromAIOperation: jest.fn(),
}), }),

View File

@ -54,6 +54,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: async () => ({ forCurrentTenant: async () => ({
initialised: true,
run: jest.fn(() => `Mock LLM Response`), run: jest.fn(() => `Mock LLM Response`),
buildPromptFromAIOperation: jest.fn(), buildPromptFromAIOperation: jest.fn(),
}), }),

View File

@ -106,21 +106,15 @@ export async function run({
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) && (await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
(await pro.features.isBudibaseAIEnabled()) (await pro.features.isBudibaseAIEnabled())
let llm
if (budibaseAIEnabled || customConfigsEnabled) { if (budibaseAIEnabled || customConfigsEnabled) {
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model) llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
response = await llm.run(inputs.prompt)
} else {
// fallback to the default that uses the environment variable for backwards compat
if (!env.OPENAI_API_KEY) {
return {
success: false,
response:
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.",
}
}
response = await legacyOpenAIPrompt(inputs)
} }
response = llm?.initialised
? await llm.run(inputs.prompt)
: await legacyOpenAIPrompt(inputs)
return { return {
response, response,
success: true, success: true,

View File

@ -1,9 +1,6 @@
import { getConfig, runStep, afterAll as _afterAll } from "./utilities" import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
import { OpenAI } from "openai" import { OpenAI } from "openai"
import { import { setEnv as setCoreEnv } from "@budibase/backend-core"
withEnv as withCoreEnv,
setEnv as setCoreEnv,
} from "@budibase/backend-core"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
jest.mock("openai", () => ({ jest.mock("openai", () => ({
@ -28,6 +25,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: jest.fn().mockImplementation(() => ({ forCurrentTenant: jest.fn().mockImplementation(() => ({
initialised: true,
init: jest.fn(), init: jest.fn(),
run: jest.fn(), run: jest.fn(),
})), })),
@ -63,16 +61,6 @@ describe("test the openai action", () => {
afterAll(_afterAll) afterAll(_afterAll)
it("should present the correct error message when the OPENAI_API_KEY variable isn't set", async () => {
await withCoreEnv({ OPENAI_API_KEY: "" }, async () => {
let res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
expect(res.response).toEqual(
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
)
expect(res.success).toBeFalsy()
})
})
it("should be able to receive a response from ChatGPT given a prompt", async () => { it("should be able to receive a response from ChatGPT given a prompt", async () => {
const res = await runStep("OPENAI", { prompt: OPENAI_PROMPT }) const res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
expect(res.response).toEqual("This is a test") expect(res.response).toEqual("This is a test")

View File

@ -18,6 +18,7 @@ jest.mock("@budibase/pro", () => ({
ai: { ai: {
LargeLanguageModel: { LargeLanguageModel: {
forCurrentTenant: async () => ({ forCurrentTenant: async () => ({
initialised: true,
run: jest.fn(() => "response from LLM"), run: jest.fn(() => "response from LLM"),
buildPromptFromAIOperation: buildPromptMock, buildPromptFromAIOperation: buildPromptMock,
}), }),

View File

@ -108,7 +108,7 @@ export async function processAIColumns<T extends Row | Row[]>(
span?.addTags({ table_id: table._id, numRows }) span?.addTags({ table_id: table._id, numRows })
const rows = Array.isArray(inputRows) ? inputRows : [inputRows] const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant("gpt-4o-mini") const llm = await pro.ai.LargeLanguageModel.forCurrentTenant("gpt-4o-mini")
if (rows) { if (rows && llm.initialised) {
// Ensure we have snippet context // Ensure we have snippet context
await context.ensureSnippetContext() await context.ensureSnippetContext()

View File

@ -12,27 +12,29 @@ import {
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { checkAnyUserExists } from "../../../utilities/users" import { checkAnyUserExists } from "../../../utilities/users"
import { import {
AIConfig,
AIInnerConfig,
Config, Config,
ConfigType, ConfigType,
Ctx, Ctx,
GetPublicOIDCConfigResponse, GetPublicOIDCConfigResponse,
GetPublicSettingsResponse, GetPublicSettingsResponse,
GoogleInnerConfig, GoogleInnerConfig,
isAIConfig,
isGoogleConfig, isGoogleConfig,
isOIDCConfig, isOIDCConfig,
isSettingsConfig, isSettingsConfig,
isSMTPConfig, isSMTPConfig,
OIDCConfigs, OIDCConfigs,
OIDCLogosConfig,
PASSWORD_REPLACEMENT,
QuotaUsageType,
SettingsBrandingConfig, SettingsBrandingConfig,
SettingsInnerConfig, SettingsInnerConfig,
SSOConfig, SSOConfig,
SSOConfigType, SSOConfigType,
StaticQuotaName,
UserCtx, UserCtx,
OIDCLogosConfig,
AIConfig,
PASSWORD_REPLACEMENT,
isAIConfig,
AIInnerConfig,
} from "@budibase/types" } from "@budibase/types"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
@ -83,6 +85,12 @@ const getEventFns = async (config: Config, existing?: Config) => {
fns.push(events.email.SMTPUpdated) fns.push(events.email.SMTPUpdated)
} else if (isAIConfig(config)) { } else if (isAIConfig(config)) {
fns.push(() => events.ai.AIConfigUpdated) fns.push(() => events.ai.AIConfigUpdated)
if (
Object.keys(existing.config).length > Object.keys(config.config).length
) {
fns.push(() => pro.quotas.removeCustomAIConfig())
}
fns.push(() => pro.quotas.addCustomAIConfig())
} else if (isGoogleConfig(config)) { } else if (isGoogleConfig(config)) {
fns.push(() => events.auth.SSOUpdated(ConfigType.GOOGLE)) fns.push(() => events.auth.SSOUpdated(ConfigType.GOOGLE))
if (!existing.config.activated && config.config.activated) { if (!existing.config.activated && config.config.activated) {
@ -248,7 +256,6 @@ export async function save(ctx: UserCtx<Config>) {
if (existingConfig) { if (existingConfig) {
await verifyAIConfig(config, existingConfig) await verifyAIConfig(config, existingConfig)
} }
await pro.quotas.addCustomAIConfig()
break break
} }
} catch (err: any) { } catch (err: any) {
@ -518,7 +525,11 @@ export async function destroy(ctx: UserCtx) {
await db.remove(id, rev) await db.remove(id, rev)
await cache.destroy(cache.CacheKey.CHECKLIST) await cache.destroy(cache.CacheKey.CHECKLIST)
if (id === configs.generateConfigID(ConfigType.AI)) { if (id === configs.generateConfigID(ConfigType.AI)) {
await pro.quotas.removeCustomAIConfig() await pro.quotas.set(
StaticQuotaName.AI_CUSTOM_CONFIGS,
QuotaUsageType.STATIC,
0
)
} }
ctx.body = { message: "Config deleted successfully" } ctx.body = { message: "Config deleted successfully" }
} catch (err: any) { } catch (err: any) {