Skip to content

Commit

Permalink
feat: fix streaming response.
Browse files Browse the repository at this point in the history
  • Loading branch information
guangzhengli committed Jun 14, 2023
1 parent db01708 commit ad36d56
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 52 deletions.
55 changes: 20 additions & 35 deletions pages/api/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import {
SystemMessagePromptTemplate
} from "langchain/prompts";
import {BufferMemory, ChatMessageHistory} from "langchain/memory";
import {ConversationChain} from "langchain/chains";
import {ConversationChain, LLMChain} from "langchain/chains";

export const config = {
// runtime: 'edge',
Expand Down Expand Up @@ -42,14 +42,26 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => {
});

try {
const stream = callChain(input, prompt, historyMessages, keyConfiguration);
return new Response(await stream, {
headers: {
"Content-Type": "text/event-stream",
Connection: "keep-alive",
"Cache-Control": "no-cache, no-transform",
},
const llm = await getChatModel(keyConfiguration, res);

const promptTemplate = ChatPromptTemplate.fromPromptMessages([
// SystemMessagePromptTemplate.fromTemplate(prompt ? prompt : DEFAULT_SYSTEM_PROMPT),
// new MessagesPlaceholder("history"),
HumanMessagePromptTemplate.fromTemplate("{input}"),
]);

const memory = new BufferMemory({
returnMessages: true,
chatHistory: new ChatMessageHistory(historyMessages),
});

const chain = new LLMChain({
prompt: promptTemplate,
llm,
memory,
});

chain.call({ input }).catch(console.error);
} catch (err) {
console.error(err);
let error = "Unexpected message";
Expand All @@ -65,32 +77,5 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => {
}
};

const callChain = async (input: string, prompt: string, historyMessages: BaseChatMessage[], keyConfiguration: KeyConfiguration) => {
const encoder = new TextEncoder();
const stream = new TransformStream();
const writer = stream.writable.getWriter();

const llm = await getChatModel(keyConfiguration, encoder, writer);

const promptTemplate = ChatPromptTemplate.fromPromptMessages([
SystemMessagePromptTemplate.fromTemplate(prompt ? prompt : DEFAULT_SYSTEM_PROMPT),
new MessagesPlaceholder("history"),
HumanMessagePromptTemplate.fromTemplate("{input}"),
]);

const memory = new BufferMemory({
returnMessages: true,
chatHistory: new ChatMessageHistory(historyMessages),
});

const chain = new ConversationChain({
prompt: promptTemplate,
llm,
memory,
});

chain.call({ input }).catch(console.error);
return stream.readable;
}

export default handler;
23 changes: 6 additions & 17 deletions utils/openai.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { KeyConfiguration, ModelType } from "@/types";
import { OpenAIChat } from "langchain/llms/openai";
import {CallbackManager} from "langchain/callbacks";
import {NextApiResponse} from "next";

export const getModel = async (keyConfiguration: KeyConfiguration) => {
if (keyConfiguration.apiType === ModelType.AZURE_OPENAI) {
Expand All @@ -19,7 +20,7 @@ export const getModel = async (keyConfiguration: KeyConfiguration) => {
}
}

export const getChatModel = async (keyConfiguration: KeyConfiguration, encoder: TextEncoder, writer: WritableStreamDefaultWriter) => {
export const getChatModel = async (keyConfiguration: KeyConfiguration, res: NextApiResponse) => {
if (keyConfiguration.apiType === ModelType.AZURE_OPENAI) {
return new OpenAIChat({
temperature: 0.9,
Expand All @@ -30,16 +31,10 @@ export const getChatModel = async (keyConfiguration: KeyConfiguration, encoder:
azureOpenAIApiVersion: keyConfiguration.azureApiVersion,
callbacks: CallbackManager.fromHandlers({
handleLLMNewToken: async (token: string, runId: string, parentRunId?: string) =>{
await writer.ready;
await writer.write(encoder.encode(token));
res.write(token);
},
handleLLMEnd: async () => {
await writer.ready;
await writer.close();
},
handleLLMError: async (e) => {
await writer.ready;
await writer.abort(e);
res.end();
},
})
});
Expand All @@ -50,16 +45,10 @@ export const getChatModel = async (keyConfiguration: KeyConfiguration, encoder:
openAIApiKey: keyConfiguration.apiKey,
callbacks: CallbackManager.fromHandlers({
handleLLMNewToken: async (token: string, runId: string, parentRunId?: string) =>{
await writer.ready;
await writer.write(encoder.encode(token));
res.write(token);
},
handleLLMEnd: async () => {
await writer.ready;
await writer.close();
},
handleLLMError: async (e) => {
await writer.ready;
await writer.abort(e);
res.end();
},
})
});
Expand Down

0 comments on commit ad36d56

Please sign in to comment.