-
Notifications
You must be signed in to change notification settings - Fork 2.5k
/
Copy pathconversation.ts
53 lines (49 loc) · 1.55 KB
/
conversation.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import { PromptTemplate } from "@langchain/core/prompts";
import { LLMChain, LLMChainInput } from "./llm_chain.js";
import { BufferMemory } from "../memory/buffer_memory.js";
import { Optional } from "../types/type-utils.js";
export const DEFAULT_TEMPLATE = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
Current conversation:
{history}
Human: {input}
AI:`;
/**
* A class for conducting conversations between a human and an AI. It
* extends the {@link LLMChain} class.
* @example
* ```typescript
* const model = new ChatOpenAI({});
* const chain = new ConversationChain({ llm: model });
*
* // Sending a greeting to the conversation chain
* const res1 = await chain.call({ input: "Hi! I'm Jim." });
* console.log({ res1 });
*
* // Following up with a question in the conversation
* const res2 = await chain.call({ input: "What's my name?" });
* console.log({ res2 });
* ```
*/
export class ConversationChain extends LLMChain {
static lc_name() {
return "ConversationChain";
}
constructor({
prompt,
outputKey,
memory,
...rest
}: Optional<LLMChainInput, "prompt">) {
super({
prompt:
prompt ??
new PromptTemplate({
template: DEFAULT_TEMPLATE,
inputVariables: ["history", "input"],
}),
outputKey: outputKey ?? "response",
memory: memory ?? new BufferMemory(),
...rest,
});
}
}