Merge pull request #16158 from Budibase/BUDI-9238/fix-truncated-json
Fixing AI table generation
This commit is contained in:
commit
90aa62e643
|
@ -1 +1 @@
|
||||||
Subproject commit f2cbe5aff7645eb9b0b4e864924a1e1171ad85bf
|
Subproject commit 271b8677c3ea814395cd67a10137900ec3a34dc7
|
|
@ -41,7 +41,7 @@ export async function run({
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let response
|
let response
|
||||||
const llm = await ai.getLLM(inputs.model)
|
const llm = await ai.getLLM({ model: inputs.model })
|
||||||
response = llm
|
response = llm
|
||||||
? (await llm.prompt(inputs.prompt)).message
|
? (await llm.prompt(inputs.prompt)).message
|
||||||
: await legacyOpenAIPrompt(inputs)
|
: await legacyOpenAIPrompt(inputs)
|
||||||
|
|
|
@ -95,6 +95,7 @@ export type AIColumnSchema =
|
||||||
export interface LLMConfigOptions {
|
export interface LLMConfigOptions {
|
||||||
model: string
|
model: string
|
||||||
apiKey?: string
|
apiKey?: string
|
||||||
|
maxTokens?: number
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LLMProviderConfig extends LLMConfigOptions {
|
export interface LLMProviderConfig extends LLMConfigOptions {
|
||||||
|
|
Loading…
Reference in New Issue