move check for OpenAI config variable to legacy path

This commit is contained in:
Martin McKeaveney 2024-09-17 16:29:44 +01:00
parent 224be10b6c
commit e40f397c25
3 changed files with 15 additions and 19 deletions

@ -1 +1 @@
Subproject commit ff141defc6eb744f7edac788eaaaa046423cfa7f Subproject commit 72e9846146b46efc7cbdc69daea17761cb45f1d3

View File

@ -63,6 +63,11 @@ export const definition: AutomationStepDefinition = {
}, },
} }
/**
* Maintains backward compatibility with automation steps created before the introduction
* of custom configurations and Budibase AI
* @param inputs - automation inputs from the OpenAI automation step.
*/
async function legacyOpenAIPrompt(inputs: OpenAIStepInputs) { async function legacyOpenAIPrompt(inputs: OpenAIStepInputs) {
const openai = new OpenAI({ const openai = new OpenAI({
apiKey: env.OPENAI_API_KEY, apiKey: env.OPENAI_API_KEY,
@ -85,14 +90,6 @@ export async function run({
}: { }: {
inputs: OpenAIStepInputs inputs: OpenAIStepInputs
}): Promise<OpenAIStepOutputs> { }): Promise<OpenAIStepOutputs> {
if (!env.OPENAI_API_KEY) {
return {
success: false,
response:
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.",
}
}
if (inputs.prompt == null) { if (inputs.prompt == null) {
return { return {
success: false, success: false,
@ -106,20 +103,19 @@ export async function run({
const budibaseAIEnabled = await pro.features.isBudibaseAIEnabled() const budibaseAIEnabled = await pro.features.isBudibaseAIEnabled()
if (budibaseAIEnabled || customConfigsEnabled) { if (budibaseAIEnabled || customConfigsEnabled) {
// Enterprise has custom configs
// if custom configs are enabled full stop
// Don't use their budibase AI credits, unless it uses the budibase AI configuration
// TODO: grab the config from the database (maybe wrap this in the pro AI module)
// TODO: pass it into the model to execute the prompt
// TODO: if in cloud and budibaseAI is enabled, use the standard budibase AI config
// Make sure it uses their credits
// Should be handled in the LLM wrapper in pro
const llm = new pro.ai.LargeLanguageModel(inputs.model) const llm = new pro.ai.LargeLanguageModel(inputs.model)
await llm.init() await llm.init()
response = await llm.run(inputs.prompt) response = await llm.run(inputs.prompt)
} else { } else {
// fallback to the default that uses the environment variable for backwards compat // fallback to the default that uses the environment variable for backwards compat
if (!env.OPENAI_API_KEY) {
return {
success: false,
response:
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.",
}
}
response = await legacyOpenAIPrompt(inputs) response = await legacyOpenAIPrompt(inputs)
} }

View File

@ -346,7 +346,7 @@ export async function find(ctx: UserCtx) {
} }
if (type === ConfigType.AI) { if (type === ConfigType.AI) {
await pro.ai.getAIConfig(scopedConfig) await pro.ai.enrichAIConfig(scopedConfig)
// Strip out the API Keys from the response so they don't show in the UI // Strip out the API Keys from the response so they don't show in the UI
for (const key in scopedConfig.config) { for (const key in scopedConfig.config) {
if (scopedConfig.config[key].apiKey) { if (scopedConfig.config[key].apiKey) {