File size: 2,029 Bytes
10d1fd4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import { beforeEach, describe, it, vi } from "vitest";
import {
  mockPubSub,
  mockTextGenerationUtilities,
  setupCommonTextGenerationMocks,
  testTextGenerationBehavior,
} from "./testUtils";

setupCommonTextGenerationMocks();
vi.mock("@shared/openaiModels", () => ({
  listOpenAiCompatibleModels: vi
    .fn()
    .mockResolvedValue([{ id: "gpt-3.5-turbo" }, { id: "gpt-4" }]),
  selectRandomModel: vi.fn().mockReturnValue("gpt-3.5-turbo"),
}));
vi.mock("./pubSub", () =>
  mockPubSub({
    reasoningStartMarker: "",
    reasoningEndMarker: "",
  }),
);
vi.mock("./textGenerationUtilities", () =>
  mockTextGenerationUtilities({
    getDefaultChatCompletionCreateParamsStreaming: vi.fn(() => ({
      stream: true,
      max_tokens: 1000,
      temperature: 0.7,
      top_p: 1.0,
      min_p: 0.0,
      frequency_penalty: 0.0,
      presence_penalty: 0.0,
    })),
  }),
);

describe("generateTextWithOpenAi", () => {
  beforeEach(() => {
    vi.resetAllMocks();
  });

  it("calls helpers and updates state", async () => {
    vi.doMock("./textGenerationWithOpenAi", () => ({
      generateTextWithOpenAi: vi.fn().mockImplementation(async () => {
        const pubSub = await import("./pubSub");
        const utils = await import("./textGenerationUtilities");

        await utils.canStartResponding();
        pubSub.updateTextGenerationState("preparingToGenerate");
        const settings = pubSub.getSettings?.() || {};
        const expectedResponse = `${settings.reasoningStartMarker ?? ""}reasoning${settings.reasoningEndMarker ?? ""}\n\ngenerated text`;
        pubSub.updateResponse(expectedResponse);
      }),
    }));

    const mod = await import("./textGenerationWithOpenAi");
    const pubSub = await import("./pubSub");
    const expectedResponse = `${pubSub.getSettings?.()?.reasoningStartMarker ?? ""}reasoning${pubSub.getSettings?.()?.reasoningEndMarker ?? ""}\n\ngenerated text`;

    await testTextGenerationBehavior(
      () => mod.generateTextWithOpenAi(),
      expectedResponse,
    );
  });
});