From 9cbaa02f5f305542453493d0e9520cf0ca7681bc Mon Sep 17 00:00:00 2001 From: Martin McKeaveney Date: Wed, 18 Sep 2024 14:01:54 +0100 Subject: [PATCH] add test for automation run step when LLM feature flags are on --- packages/pro | 2 +- .../src/automations/tests/openai.spec.ts | 35 ++++++++++++++++++- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/packages/pro b/packages/pro index 5daf17e325..320f8ecf8a 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 5daf17e32595e539f1f4a92b59a2ea2854d9dbd4 +Subproject commit 320f8ecf8ae769995590ddc4e3679ef7c110bc11 diff --git a/packages/server/src/automations/tests/openai.spec.ts b/packages/server/src/automations/tests/openai.spec.ts index 3a5a57475a..0a64411a12 100644 --- a/packages/server/src/automations/tests/openai.spec.ts +++ b/packages/server/src/automations/tests/openai.spec.ts @@ -4,6 +4,7 @@ import { withEnv as withCoreEnv, setEnv as setCoreEnv, } from "@budibase/backend-core" +import * as pro from "@budibase/pro" jest.mock("openai", () => ({ OpenAI: jest.fn().mockImplementation(() => ({ @@ -23,6 +24,20 @@ jest.mock("openai", () => ({ })), })) +jest.mock("@budibase/pro", () => ({ + ...jest.requireActual("@budibase/pro"), + ai: { + LargeLanguageModel: jest.fn().mockImplementation(() => ({ + init: jest.fn(), + run: jest.fn(), + })), + }, + features: { + isAICustomConfigsEnabled: jest.fn(), + isBudibaseAIEnabled: jest.fn(), + } +})) + const mockedOpenAI = OpenAI as jest.MockedClass const OPENAI_PROMPT = "What is the meaning of life?" @@ -41,6 +56,7 @@ describe("test the openai action", () => { afterEach(() => { resetEnv() + jest.clearAllMocks() }) afterAll(_afterAll) @@ -86,7 +102,7 @@ describe("test the openai action", () => { ) const res = await runStep("OPENAI", { - prompt: OPENAI_PROMPT, + prompt: OPENAI_PROMPT,jj }) expect(res.response).toEqual( @@ -94,4 +110,21 @@ describe("test the openai action", () => { ) expect(res.success).toBeFalsy() }) + + it("should ensure that the pro AI module is called when the budibase AI features are enabled", async () => { + jest.spyOn(pro.features, "isBudibaseAIEnabled").mockResolvedValue(true) + jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true) + + const prompt = "What is the meaning of life?" + await runStep("OPENAI", { + model: "gpt-4o-mini", + prompt + }) + + expect(pro.ai.LargeLanguageModel).toHaveBeenCalledWith("gpt-4o-mini") + + const llmInstance = pro.ai.LargeLanguageModel.mock.results[0].value + expect(llmInstance.init).toHaveBeenCalled() + expect(llmInstance.run).toHaveBeenCalledWith(prompt) + }) })