Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test: record and replay streaming LLM calls in e2e tests #149

Merged
merged 14 commits into from
Sep 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 37 additions & 5 deletions apps/nextjs/src/app/api/chat/chatHandler.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
import { Aila } from "@oakai/aila";
import type { AilaOptions, AilaPublicChatOptions, Message } from "@oakai/aila";
import type {
AilaInitializationOptions,
AilaOptions,
AilaPublicChatOptions,
Message,
} from "@oakai/aila";
import { LooseLessonPlan } from "@oakai/aila/src/protocol/schema";
import {
TracingSpan,
Expand All @@ -15,6 +20,10 @@ import invariant from "tiny-invariant";

import { Config } from "./config";
import { handleChatException } from "./errorHandling";
import {
getFixtureLLMService,
getFixtureModerationOpenAiClient,
} from "./fixtures";
import { fetchAndCheckUser } from "./user";

export const maxDuration = 300;
Expand Down Expand Up @@ -51,11 +60,24 @@ async function setupChatHandler(req: NextRequest) {
useModeration: true,
};

const llmService = getFixtureLLMService(req.headers, chatId);
const moderationAiClient = getFixtureModerationOpenAiClient(
req.headers,
chatId,
);

span.setTag("chat_id", chatId);
span.setTag("messages.count", messages.length);
span.setTag("options", JSON.stringify(options));

return { chatId, messages, lessonPlan, options };
return {
chatId,
messages,
lessonPlan,
options,
llmService,
moderationAiClient,
};
},
);
}
Expand Down Expand Up @@ -119,8 +141,14 @@ export async function handleChatPostRequest(
config: Config,
): Promise<Response> {
return await withTelemetry("chat-api", {}, async (span: TracingSpan) => {
const { chatId, messages, lessonPlan, options } =
await setupChatHandler(req);
const {
chatId,
messages,
lessonPlan,
options,
llmService,
moderationAiClient,
} = await setupChatHandler(req);

setTelemetryMetadata(span, chatId, messages, lessonPlan, options);

Expand All @@ -135,13 +163,17 @@ export async function handleChatPostRequest(
"chat-create-aila",
{ chat_id: chatId, user_id: userId },
async (): Promise<Aila> => {
const ailaOptions = {
const ailaOptions: Partial<AilaInitializationOptions> = {
options,
chat: {
id: chatId,
userId,
messages,
},
services: {
chatLlmService: llmService,
moderationAiClient,
},
lessonPlan: lessonPlan ?? {},
};
const result = await config.createAila(ailaOptions);
Expand Down
76 changes: 76 additions & 0 deletions apps/nextjs/src/app/api/chat/fixtures/FixtureRecordLLMService.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import { Message } from "@oakai/aila";
import { LLMService } from "@oakai/aila/src/core/llm/LLMService";
import { OpenAIService } from "@oakai/aila/src/core/llm/OpenAIService";
import fs from "fs/promises";
import { ZodSchema } from "zod";

export class FixtureRecordLLMService implements LLMService {
name = "FixureRecordLLM";
private _openAIService: OpenAIService;

constructor(
public fixtureName: string,
chatId: string,
) {
this._openAIService = new OpenAIService({ userId: undefined, chatId });
}

async createChatCompletionStream(params: {
model: string;
messages: Message[];
temperature: number;
}): Promise<ReadableStreamDefaultReader<string>> {
return this._openAIService.createChatCompletionStream(params);
}

async createChatCompletionObjectStream(params: {
model: string;
schema: ZodSchema;
schemaName: string;
messages: Message[];
temperature: number;
}): Promise<ReadableStreamDefaultReader<string>> {
const upstreamReader =
await this._openAIService.createChatCompletionObjectStream(params);

const chunks: string[] = [];
const fixtureName = this.fixtureName;

const s = new ReadableStream({
async start(controller) {
while (true) {
const { done, value } = await upstreamReader.read();
if (done) {
break;
}
chunks.push(value);
controller.enqueue(value);
}

try {
const formattedUrl = `${process.cwd()}/tests-e2e/recordings/${fixtureName}.formatted.json`;
const formatted = JSON.stringify(
JSON.parse(chunks.join("")),
null,
2,
);
console.log("Fixtures: Writing formatted to", formattedUrl);
await fs.writeFile(formattedUrl, formatted);
} catch (e) {
console.error("Error writing formatted file", e);
}

const chunksUrl = `${process.cwd()}/tests-e2e/recordings/${fixtureName}.chunks.txt`;
const encodedChunks = chunks
.map((c) => c.replaceAll("\n", "__NEWLINE__"))
.join("\n");
console.log("Fixtures: Writing chunks to", chunksUrl);
await fs.writeFile(chunksUrl, encodedChunks);

controller.close();
},
});

return s.getReader();
}
}
40 changes: 40 additions & 0 deletions apps/nextjs/src/app/api/chat/fixtures/FixtureRecordOpenAiClient.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { OpenAILike } from "@oakai/aila/src/features/moderation/moderators/OpenAiModerator";
import { createOpenAIClient } from "@oakai/core/src/llm/openai";
import fs from "fs/promises";
import OpenAI from "openai";
import { ChatCompletionCreateParamsNonStreaming } from "openai/resources/index.mjs";

export class FixtureRecordOpenAiClient implements OpenAILike {
constructor(
public fixtureName: string,
public chatId: string,
) {}

chat = {
completions: {
create: async (
body: ChatCompletionCreateParamsNonStreaming,
options?: OpenAI.RequestOptions,
) => {
const openAiClient = createOpenAIClient({
app: "moderation",
chatMeta: {
chatId: this.chatId,
userId: undefined,
},
});
const response = await openAiClient.chat.completions.create(
body,
options,
);

const responseText = JSON.stringify(response, null, 2);
const fileUrl = `${process.cwd()}/tests-e2e/recordings/${this.fixtureName}.moderation.json`;
console.log("Fixtures: Writing moderation to", fileUrl);
await fs.writeFile(fileUrl, responseText);

return response;
},
},
};
}
18 changes: 18 additions & 0 deletions apps/nextjs/src/app/api/chat/fixtures/FixtureReplayLLMService.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { MockLLMService } from "@oakai/aila/src/core/llm/MockLLMService";
import fs from "fs";

export class FixtureReplayLLMService extends MockLLMService {
name = "FixureReplayLLM";

constructor(fixtureName: string) {
const fileUrl = `${process.cwd()}/tests-e2e/recordings/${fixtureName}.chunks.txt`;
console.log("Fixtures: Loading chunks from", fileUrl);
const fixture = fs.readFileSync(fileUrl, "utf8");

const chunks = fixture
.split("\n")
.map((c) => c.replaceAll("__NEWLINE__", "\n"));

super(chunks);
}
}
18 changes: 18 additions & 0 deletions apps/nextjs/src/app/api/chat/fixtures/FixtureReplayOpenAiClient.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { OpenAILike } from "@oakai/aila/src/features/moderation/moderators/OpenAiModerator";
import fs from "fs/promises";
import OpenAI from "openai";

export class FixtureReplayOpenAiClient implements OpenAILike {
constructor(public fixtureName: string) {}

chat = {
completions: {
create: async () => {
const fileUrl = `${process.cwd()}/tests-e2e/recordings/${this.fixtureName}.moderation.json`;
console.log("Fixtures: Loading moderation from", fileUrl);
const fixture = await fs.readFile(fileUrl, "utf8");
return JSON.parse(fixture) as OpenAI.Chat.ChatCompletion;
},
},
};
}
55 changes: 55 additions & 0 deletions apps/nextjs/src/app/api/chat/fixtures/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { FixtureRecordLLMService } from "./FixtureRecordLLMService";
import { FixtureRecordOpenAiClient } from "./FixtureRecordOpenAiClient";
import { FixtureReplayLLMService } from "./FixtureReplayLLMService";
import { FixtureReplayOpenAiClient } from "./FixtureReplayOpenAiClient";

const fixturesEnabled = process.env.AILA_FIXTURES_ENABLED === "true";

export function getFixtureLLMService(headers: Headers, chatId: string) {
if (!fixturesEnabled) {
return undefined;
}

const fixtureMode = headers.get("x-e2e-fixture-mode");
const fixtureName = headers.get("x-e2e-fixture-name");

if (!fixtureName) {
return undefined;
}

if (fixtureMode === "record") {
console.log("Using fixtureMode=record");
return new FixtureRecordLLMService(fixtureName, chatId);
}

if (fixtureMode === "replay") {
console.log("Using fixtureMode=replay");
return new FixtureReplayLLMService(fixtureName);
}
}

export function getFixtureModerationOpenAiClient(
headers: Headers,
chatId: string,
) {
if (!fixturesEnabled) {
return undefined;
}

const fixtureMode = headers.get("x-e2e-fixture-mode");
const fixtureName = headers.get("x-e2e-fixture-name");

if (!fixtureName) {
return undefined;
}

if (fixtureMode === "record") {
console.log("Using moderation fixtureMode=record");
return new FixtureRecordOpenAiClient(fixtureName, chatId);
}

if (fixtureMode === "replay") {
console.log("Using moderation fixtureMode=replay");
return new FixtureReplayOpenAiClient(fixtureName);
}
}
63 changes: 63 additions & 0 deletions apps/nextjs/tests-e2e/recordings/roman-britain-1.chunks.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
{"
type":"
ll
m
Message
","patches":[]
,"prompt":{"
type":"
text
","value":"
There
are
no
existing
Oak
lessons
for
the
specific
topic
of
\"
End
of
Roman
Britain
,\"
so
we'll
start
a
new
lesson
from
scratch
.
Let's
begin
by
outlining
the
learning
outcomes
and
learning
cycles
for
the
lesson
.
Tap
**
Continue
**
to
move
on
to
the
next
step
."
}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"type": "llmMessage",
"patches": [],
"prompt": {
"type": "text",
"value": "There are no existing Oak lessons for the specific topic of \"End of Roman Britain,\" so we'll start a new lesson from scratch. Let's begin by outlining the learning outcomes and learning cycles for the lesson. Tap **Continue** to move on to the next step."
}
}
Loading
Loading