Skip to content

Commit

Permalink
add on completion method
Browse files Browse the repository at this point in the history
  • Loading branch information
thivy committed Jul 28, 2023
1 parent 6e3ab97 commit 7fd9799
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 41 deletions.
30 changes: 9 additions & 21 deletions features/chat/chat-api-data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import {
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
} from "langchain/prompts";
import { ChainValues } from "langchain/schema";
import {
AzureCogDocument,
AzureCogSearch,
Expand All @@ -32,12 +31,18 @@ export const PromptDataGPT = async (props: PromptGPTProps) => {
streaming: true,
});

const relevantDocuments = findRelevantDocuments(lastHumanMessage.content);
const relevantDocuments = await findRelevantDocuments(
lastHumanMessage.content
);

const chain = loadQAMapReduceChain(chatModel, {
combinePrompt: defineSystemPrompt(),
});
const { stream, handlers } = LangChainStream();
const { stream, handlers } = LangChainStream({
onCompletion: async (completion: string) => {
await inertPromptAndResponse(id, lastHumanMessage.content, completion);
},
});

const memory = buildMemory(chats);

Expand All @@ -47,24 +52,7 @@ export const PromptDataGPT = async (props: PromptGPTProps) => {
question: lastHumanMessage.content,
memory: memory,
},
[
{
...handlers,
handleChainEnd: async (
outputs: ChainValues,
runId: string,
parentRunId?: string,
tags?: string[]
) => {
await handlers.handleChainEnd(outputs, runId);
await inertPromptAndResponse(
id,
lastHumanMessage.content,
outputs.text
);
},
},
]
[handlers]
);

return new StreamingTextResponse(stream);
Expand Down
27 changes: 7 additions & 20 deletions features/chat/chat-api.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
import { LangChainStream, StreamingTextResponse } from "ai";
import { ChatOpenAI } from "langchain/chat_models/openai";
import { HumanMessage, LLMResult, SystemMessage } from "langchain/schema";
import { HumanMessage, SystemMessage } from "langchain/schema";
import { PromptGPTProps, initAndGuardChatSession } from "./chat-api-helpers";
import { mostRecentMemory } from "./chat-helpers";
import { inertPromptAndResponse } from "./chat-service";

export const PromptGPT = async (props: PromptGPTProps) => {
const { lastHumanMessage, id, chats } = await initAndGuardChatSession(props);

const { stream, handlers } = LangChainStream();
const { stream, handlers } = LangChainStream({
onCompletion: async (completion: string) => {
await inertPromptAndResponse(id, lastHumanMessage.content, completion);
},
});

const memory = mostRecentMemory(chats, 10);

Expand All @@ -26,24 +30,7 @@ export const PromptGPT = async (props: PromptGPTProps) => {
new HumanMessage(lastHumanMessage.content),
],
{},
[
{
...handlers,
handleLLMEnd: async (
output: LLMResult,
runId: string,
parentRunId?: string | undefined,
tags?: string[] | undefined
) => {
await handlers.handleLLMEnd(output, runId);
await inertPromptAndResponse(
id,
lastHumanMessage.content,
output.generations[0][0].text
);
},
},
]
[handlers]
);

return new StreamingTextResponse(stream);
Expand Down

0 comments on commit 7fd9799

Please sign in to comment.