forked from run-llama/LlamaIndexTS
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SimpleChatEngine.ts
73 lines (65 loc) · 2.1 KB
/
SimpleChatEngine.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import type { LLM } from "@llamaindex/core/llms";
import {
streamConverter,
streamReducer,
wrapEventCaller,
} from "@llamaindex/core/utils";
import type { ChatHistory } from "../../ChatHistory.js";
import { getHistory } from "../../ChatHistory.js";
import { EngineResponse } from "../../EngineResponse.js";
import { Settings } from "../../Settings.js";
import type {
ChatEngine,
ChatEngineParamsNonStreaming,
ChatEngineParamsStreaming,
} from "./types.js";
/**
* SimpleChatEngine is the simplest possible chat engine. Useful for using your own custom prompts.
*/
export class SimpleChatEngine implements ChatEngine {
chatHistory: ChatHistory;
llm: LLM;
constructor(init?: Partial<SimpleChatEngine>) {
this.chatHistory = getHistory(init?.chatHistory);
this.llm = init?.llm ?? Settings.llm;
}
chat(
params: ChatEngineParamsStreaming,
): Promise<AsyncIterable<EngineResponse>>;
chat(params: ChatEngineParamsNonStreaming): Promise<EngineResponse>;
@wrapEventCaller
async chat(
params: ChatEngineParamsStreaming | ChatEngineParamsNonStreaming,
): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
const { message, stream } = params;
const chatHistory = params.chatHistory
? getHistory(params.chatHistory)
: this.chatHistory;
chatHistory.addMessage({ content: message, role: "user" });
if (stream) {
const stream = await this.llm.chat({
messages: await chatHistory.requestMessages(),
stream: true,
});
return streamConverter(
streamReducer({
stream,
initialValue: "",
reducer: (accumulator, part) => accumulator + part.delta,
finished: (accumulator) => {
chatHistory.addMessage({ content: accumulator, role: "assistant" });
},
}),
EngineResponse.fromChatResponseChunk,
);
}
const response = await this.llm.chat({
messages: await chatHistory.requestMessages(),
});
chatHistory.addMessage(response.message);
return EngineResponse.fromChatResponse(response);
}
reset() {
this.chatHistory.reset();
}
}