1
0
Fork 0
mirror of synced 2024-09-24 21:31:17 +12:00

add test for automation run step when LLM feature flags are on

This commit is contained in:
Martin McKeaveney 2024-09-18 14:01:54 +01:00
parent cb8d0984b1
commit 9cbaa02f5f
2 changed files with 35 additions and 2 deletions

@ -1 +1 @@
Subproject commit 5daf17e32595e539f1f4a92b59a2ea2854d9dbd4 Subproject commit 320f8ecf8ae769995590ddc4e3679ef7c110bc11

View file

@ -4,6 +4,7 @@ import {
withEnv as withCoreEnv, withEnv as withCoreEnv,
setEnv as setCoreEnv, setEnv as setCoreEnv,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import * as pro from "@budibase/pro"
jest.mock("openai", () => ({ jest.mock("openai", () => ({
OpenAI: jest.fn().mockImplementation(() => ({ OpenAI: jest.fn().mockImplementation(() => ({
@ -23,6 +24,20 @@ jest.mock("openai", () => ({
})), })),
})) }))
jest.mock("@budibase/pro", () => ({
...jest.requireActual("@budibase/pro"),
ai: {
LargeLanguageModel: jest.fn().mockImplementation(() => ({
init: jest.fn(),
run: jest.fn(),
})),
},
features: {
isAICustomConfigsEnabled: jest.fn(),
isBudibaseAIEnabled: jest.fn(),
}
}))
const mockedOpenAI = OpenAI as jest.MockedClass<typeof OpenAI> const mockedOpenAI = OpenAI as jest.MockedClass<typeof OpenAI>
const OPENAI_PROMPT = "What is the meaning of life?" const OPENAI_PROMPT = "What is the meaning of life?"
@ -41,6 +56,7 @@ describe("test the openai action", () => {
afterEach(() => { afterEach(() => {
resetEnv() resetEnv()
jest.clearAllMocks()
}) })
afterAll(_afterAll) afterAll(_afterAll)
@ -86,7 +102,7 @@ describe("test the openai action", () => {
) )
const res = await runStep("OPENAI", { const res = await runStep("OPENAI", {
prompt: OPENAI_PROMPT, prompt: OPENAI_PROMPT,jj
}) })
expect(res.response).toEqual( expect(res.response).toEqual(
@ -94,4 +110,21 @@ describe("test the openai action", () => {
) )
expect(res.success).toBeFalsy() expect(res.success).toBeFalsy()
}) })
it("should ensure that the pro AI module is called when the budibase AI features are enabled", async () => {
jest.spyOn(pro.features, "isBudibaseAIEnabled").mockResolvedValue(true)
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
const prompt = "What is the meaning of life?"
await runStep("OPENAI", {
model: "gpt-4o-mini",
prompt
})
expect(pro.ai.LargeLanguageModel).toHaveBeenCalledWith("gpt-4o-mini")
const llmInstance = pro.ai.LargeLanguageModel.mock.results[0].value
expect(llmInstance.init).toHaveBeenCalled()
expect(llmInstance.run).toHaveBeenCalledWith(prompt)
})
}) })