forked from KelvinQiu802/llm-mcp-rag
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathChatOpenAI.ts
92 lines (86 loc) · 3.33 KB
/
ChatOpenAI.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import OpenAI from "openai";
import { Tool } from "@modelcontextprotocol/sdk/types.js";
import 'dotenv/config'
import { logTitle } from "./utils";
export interface ToolCall {
id: string;
function: {
name: string;
arguments: string;
};
}
export default class ChatOpenAI {
private llm: OpenAI;
private model: string;
private messages: OpenAI.Chat.ChatCompletionMessageParam[] = [];
private tools: Tool[];
constructor(model: string, systemPrompt: string = '', tools: Tool[] = [], context: string = '') {
this.llm = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
baseURL: process.env.OPENAI_BASE_URL,
});
this.model = model;
this.tools = tools;
if (systemPrompt) this.messages.push({ role: "system", content: systemPrompt });
if (context) this.messages.push({ role: "user", content: context });
}
async chat(prompt?: string): Promise<{ content: string, toolCalls: ToolCall[] }> {
logTitle('CHAT');
if (prompt) {
this.messages.push({ role: "user", content: prompt });
}
const stream = await this.llm.chat.completions.create({
model: this.model,
messages: this.messages,
stream: true,
tools: this.getToolsDefinition(),
});
let content = "";
let toolCalls: ToolCall[] = [];
logTitle('RESPONSE');
for await (const chunk of stream) {
const delta = chunk.choices[0].delta;
// 处理普通Content
if (delta.content) {
const contentChunk = chunk.choices[0].delta.content || "";
content += contentChunk;
process.stdout.write(contentChunk);
}
// 处理ToolCall
if (delta.tool_calls) {
for (const toolCallChunk of delta.tool_calls) {
// 第一次要创建一个toolCall
if (toolCalls.length <= toolCallChunk.index) {
toolCalls.push({ id: '', function: { name: '', arguments: '' } });
}
let currentCall = toolCalls[toolCallChunk.index];
if (toolCallChunk.id) currentCall.id += toolCallChunk.id;
if (toolCallChunk.function?.name) currentCall.function.name += toolCallChunk.function.name;
if (toolCallChunk.function?.arguments) currentCall.function.arguments += toolCallChunk.function.arguments;
}
}
}
this.messages.push({ role: "assistant", content: content, tool_calls: toolCalls.map(call => ({ id: call.id, type: "function", function: call.function })) });
return {
content: content,
toolCalls: toolCalls,
};
}
public appendToolResult(toolCallId: string, toolOutput: string) {
this.messages.push({
role: "tool",
content: toolOutput,
tool_call_id: toolCallId
});
}
private getToolsDefinition(): OpenAI.Chat.Completions.ChatCompletionTool[] {
return this.tools.map((tool) => ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.inputSchema,
},
}));
}
}