diff --git a/packages/pro b/packages/pro index f2cbe5aff7..b5a9a6a6b5 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit f2cbe5aff7645eb9b0b4e864924a1e1171ad85bf +Subproject commit b5a9a6a6b50749e1ce8af9589bf8a08fceabf58f diff --git a/packages/server/src/automations/steps/openai.ts b/packages/server/src/automations/steps/openai.ts index fc61af92df..758db25b43 100644 --- a/packages/server/src/automations/steps/openai.ts +++ b/packages/server/src/automations/steps/openai.ts @@ -41,7 +41,7 @@ export async function run({ try { let response - const llm = await ai.getLLM(inputs.model) + const llm = await ai.getLLM({ model: inputs.model }) response = llm ? (await llm.prompt(inputs.prompt)).message : await legacyOpenAIPrompt(inputs) diff --git a/packages/types/src/sdk/ai.ts b/packages/types/src/sdk/ai.ts index 8035f1983f..5fc5a83ce3 100644 --- a/packages/types/src/sdk/ai.ts +++ b/packages/types/src/sdk/ai.ts @@ -95,6 +95,7 @@ export type AIColumnSchema = export interface LLMConfigOptions { model: string apiKey?: string + maxTokens?: number } export interface LLMProviderConfig extends LLMConfigOptions {