forked from langchain-ai/langchainjs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsummary_chat.ts
47 lines (42 loc) · 1.63 KB
/
summary_chat.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import { ChatOpenAI } from "@langchain/openai";
import { ConversationSummaryMemory } from "langchain/memory";
import { LLMChain } from "langchain/chains";
import { PromptTemplate } from "@langchain/core/prompts";
export const run = async () => {
const memory = new ConversationSummaryMemory({
memoryKey: "chat_history",
llm: new ChatOpenAI({ model: "gpt-3.5-turbo", temperature: 0 }),
});
const model = new ChatOpenAI();
const prompt =
PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
Current conversation:
{chat_history}
Human: {input}
AI:`);
const chain = new LLMChain({ llm: model, prompt, memory });
const res1 = await chain.call({ input: "Hi! I'm Jim." });
console.log({ res1, memory: await memory.loadMemoryVariables({}) });
/*
{
res1: {
text: "Hello Jim! It's nice to meet you. My name is AI. How may I assist you today?"
},
memory: {
chat_history: 'Jim introduces himself to the AI and the AI greets him and offers assistance.'
}
}
*/
const res2 = await chain.call({ input: "What's my name?" });
console.log({ res2, memory: await memory.loadMemoryVariables({}) });
/*
{
res2: {
text: "Your name is Jim. It's nice to meet you, Jim. How can I assist you today?"
},
memory: {
chat_history: 'Jim introduces himself to the AI and the AI greets him and offers assistance. The AI addresses Jim by name and asks how it can assist him.'
}
}
*/
};