diff --git a/lerna.json b/lerna.json index c20b677183..ab6cf6a850 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "3.10.7", + "version": "3.11.0", "npmClient": "yarn", "concurrency": 20, "command": { diff --git a/packages/pro b/packages/pro index f2cbe5aff7..271b8677c3 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit f2cbe5aff7645eb9b0b4e864924a1e1171ad85bf +Subproject commit 271b8677c3ea814395cd67a10137900ec3a34dc7 diff --git a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts index 33434dad05..5594721aa8 100644 --- a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts +++ b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts @@ -664,7 +664,7 @@ if (descriptions.length) { parameters: { number: 0 }, }) - const rows = await client(tableName).select("*") + const rows = await client(tableName).select("*").orderBy("id") expect(rows).toHaveLength(6) expect(rows[5].number).toEqual(0) diff --git a/packages/server/src/automations/steps/openai.ts b/packages/server/src/automations/steps/openai.ts index fc61af92df..758db25b43 100644 --- a/packages/server/src/automations/steps/openai.ts +++ b/packages/server/src/automations/steps/openai.ts @@ -41,7 +41,7 @@ export async function run({ try { let response - const llm = await ai.getLLM(inputs.model) + const llm = await ai.getLLM({ model: inputs.model }) response = llm ? (await llm.prompt(inputs.prompt)).message : await legacyOpenAIPrompt(inputs) diff --git a/packages/types/src/sdk/ai.ts b/packages/types/src/sdk/ai.ts index 8035f1983f..5fc5a83ce3 100644 --- a/packages/types/src/sdk/ai.ts +++ b/packages/types/src/sdk/ai.ts @@ -95,6 +95,7 @@ export type AIColumnSchema = export interface LLMConfigOptions { model: string apiKey?: string + maxTokens?: number } export interface LLMProviderConfig extends LLMConfigOptions {