diff --git a/packages/server/src/automations/steps/openai.ts b/packages/server/src/automations/steps/openai.ts index 79586bb712..a7c22ffd0c 100644 --- a/packages/server/src/automations/steps/openai.ts +++ b/packages/server/src/automations/steps/openai.ts @@ -1,4 +1,4 @@ -import { Configuration, OpenAIApi } from "openai"; +import { Configuration, OpenAIApi } from "openai" import { AutomationActionStepId, AutomationStepSchema, @@ -7,7 +7,7 @@ import { AutomationIOType, } from "@budibase/types" import * as automationUtils from "../automationUtils" -import environment from "../../environment"; +import environment from "../../environment" enum Model { GPT_35_TURBO = "gpt-3.5-turbo", @@ -61,7 +61,8 @@ export async function run({ inputs, context }: AutomationStepInput) { if (!environment.OPENAI_API_KEY) { return { success: false, - response: "OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.", + response: + "OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.", } } @@ -75,19 +76,19 @@ export async function run({ inputs, context }: AutomationStepInput) { try { const configuration = new Configuration({ apiKey: environment.OPENAI_API_KEY, - }); + }) - const openai = new OpenAIApi(configuration); + const openai = new OpenAIApi(configuration) const completion = await openai.createChatCompletion({ model: inputs.model, messages: [ { role: "user", - content: inputs.prompt - } + content: inputs.prompt, + }, ], - }); + }) let response = completion?.data?.choices[0]?.message?.content diff --git a/packages/server/src/automations/tests/openai.spec.ts b/packages/server/src/automations/tests/openai.spec.ts index 3ba9463f21..032f670db1 100644 --- a/packages/server/src/automations/tests/openai.spec.ts +++ b/packages/server/src/automations/tests/openai.spec.ts @@ -1,23 +1,26 @@ const setup = require("./utilities") -import environment from "../../environment"; +import environment from "../../environment" import openai from "openai" -jest.mock("openai", jest.fn(() => ({ - Configuration: jest.fn(), - OpenAIApi: jest.fn(() => ({ - createChatCompletion: jest.fn(() => ({ - data: { - choices: [ - { - message: { - content: "This is a test" +jest.mock( + "openai", + jest.fn(() => ({ + Configuration: jest.fn(), + OpenAIApi: jest.fn(() => ({ + createChatCompletion: jest.fn(() => ({ + data: { + choices: [ + { + message: { + content: "This is a test", + }, }, - } - ] - } - })) + ], + }, + })), + })), })) -}))) +) const OPENAI_PROMPT = "What is the meaning of life?" @@ -34,52 +37,50 @@ describe("test the openai action", () => { afterAll(setup.afterAll) - it("should present the correct error message when the OPENAI_API_KEY variable isn't set", async () => { delete environment.OPENAI_API_KEY - let res = await setup.runStep("OPEN_AI", - { - prompt: OPENAI_PROMPT - } + let res = await setup.runStep("OPEN_AI", { + prompt: OPENAI_PROMPT, + }) + expect(res.response).toEqual( + "OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable." ) - expect(res.response).toEqual("OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.") expect(res.success).toBeFalsy() }) it("should be able to receive a response from ChatGPT given a prompt", async () => { - const res = await setup.runStep("OPEN_AI", - { - prompt: OPENAI_PROMPT - } - ) + const res = await setup.runStep("OPEN_AI", { + prompt: OPENAI_PROMPT, + }) expect(res.response).toEqual("This is a test") expect(res.success).toBeTruthy() }) - it("should present the correct error message when a prompt is not provided", async () => { - const res = await setup.runStep("OPEN_AI", - { - prompt: null - } + const res = await setup.runStep("OPEN_AI", { + prompt: null, + }) + expect(res.response).toEqual( + "Budibase OpenAI Automation Failed: No prompt supplied" ) - expect(res.response).toEqual("Budibase OpenAI Automation Failed: No prompt supplied") expect(res.success).toBeFalsy() }) it("should present the correct error message when an error is thrown from the createChatCompletion call", async () => { openai.OpenAIApi.mockImplementation(() => ({ createChatCompletion: jest.fn(() => { - throw new Error("An error occurred while calling createChatCompletion"); + throw new Error("An error occurred while calling createChatCompletion") }), - })); + })) const res = await setup.runStep("OPEN_AI", { prompt: OPENAI_PROMPT, - }); + }) - expect(res.response).toEqual("Error: An error occurred while calling createChatCompletion") + expect(res.response).toEqual( + "Error: An error occurred while calling createChatCompletion" + ) expect(res.success).toBeFalsy() - }); + }) })