Improve setting maxtokens

This commit is contained in:
Adria Navarro 2025-05-15 16:41:09 +02:00
parent 3e615249d7
commit 98690b11a5
3 changed files with 3 additions and 2 deletions

@ -1 +1 @@
Subproject commit f2cbe5aff7645eb9b0b4e864924a1e1171ad85bf
Subproject commit b5a9a6a6b50749e1ce8af9589bf8a08fceabf58f

View File

@ -41,7 +41,7 @@ export async function run({
try {
let response
const llm = await ai.getLLM(inputs.model)
const llm = await ai.getLLM({ model: inputs.model })
response = llm
? (await llm.prompt(inputs.prompt)).message
: await legacyOpenAIPrompt(inputs)

View File

@ -95,6 +95,7 @@ export type AIColumnSchema =
export interface LLMConfigOptions {
model: string
apiKey?: string
maxTokens?: number
}
export interface LLMProviderConfig extends LLMConfigOptions {