From 98690b11a5e98e52af0627580694bd4b7fb494ba Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 15 May 2025 16:41:09 +0200 Subject: [PATCH 1/2] Improve setting maxtokens --- packages/pro | 2 +- packages/server/src/automations/steps/openai.ts | 2 +- packages/types/src/sdk/ai.ts | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/pro b/packages/pro index f2cbe5aff7..b5a9a6a6b5 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit f2cbe5aff7645eb9b0b4e864924a1e1171ad85bf +Subproject commit b5a9a6a6b50749e1ce8af9589bf8a08fceabf58f diff --git a/packages/server/src/automations/steps/openai.ts b/packages/server/src/automations/steps/openai.ts index fc61af92df..758db25b43 100644 --- a/packages/server/src/automations/steps/openai.ts +++ b/packages/server/src/automations/steps/openai.ts @@ -41,7 +41,7 @@ export async function run({ try { let response - const llm = await ai.getLLM(inputs.model) + const llm = await ai.getLLM({ model: inputs.model }) response = llm ? (await llm.prompt(inputs.prompt)).message : await legacyOpenAIPrompt(inputs) diff --git a/packages/types/src/sdk/ai.ts b/packages/types/src/sdk/ai.ts index 8035f1983f..5fc5a83ce3 100644 --- a/packages/types/src/sdk/ai.ts +++ b/packages/types/src/sdk/ai.ts @@ -95,6 +95,7 @@ export type AIColumnSchema = export interface LLMConfigOptions { model: string apiKey?: string + maxTokens?: number } export interface LLMProviderConfig extends LLMConfigOptions { From a644505345ade1bb77fb2779f3daa4c6e07c40da Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Thu, 15 May 2025 16:45:28 +0200 Subject: [PATCH 2/2] Fix types --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index b5a9a6a6b5..271b8677c3 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit b5a9a6a6b50749e1ce8af9589bf8a08fceabf58f +Subproject commit 271b8677c3ea814395cd67a10137900ec3a34dc7