-
Notifications
You must be signed in to change notification settings - Fork 2.5k
/
Copy pathbuffer_window_memory.ts
99 lines (91 loc) · 3.13 KB
/
buffer_window_memory.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import { InputValues, MemoryVariables } from "@langchain/core/memory";
import { getBufferString } from "@langchain/core/messages";
import { BaseChatMemory, BaseChatMemoryInput } from "./chat_memory.js";
/**
* Interface for the input parameters of the BufferWindowMemory class.
*/
export interface BufferWindowMemoryInput extends BaseChatMemoryInput {
humanPrefix?: string;
aiPrefix?: string;
memoryKey?: string;
k?: number;
}
/**
* Class for managing and storing previous chat messages. It extends the
* BaseChatMemory class and implements the BufferWindowMemoryInput
* interface. This class is stateful and stores messages in a buffer. When
* called in a chain, it returns all of the messages it has stored.
* @example
* ```typescript
* const prompt =
* PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
* Current conversation:
* {chat_history}
* Human: {input}
* AI:`);
*
* const chain = new LLMChain({
* llm: new ChatOpenAI({ temperature: 0.9 }),
* prompt,
* memory: new BufferWindowMemory({ memoryKey: "chat_history", k: 1 }),
* });
*
* // Example of initiating a conversation with the AI
* const res1 = await chain.call({ input: "Hi! I'm Jim." });
* console.log({ res1 });
*
* // Example of following up with another question
* const res2 = await chain.call({ input: "What's my name?" });
* console.log({ res2 });
* ```
*/
export class BufferWindowMemory
extends BaseChatMemory
implements BufferWindowMemoryInput
{
humanPrefix = "Human";
aiPrefix = "AI";
memoryKey = "history";
k = 5;
constructor(fields?: BufferWindowMemoryInput) {
super({
returnMessages: fields?.returnMessages ?? false,
chatHistory: fields?.chatHistory,
inputKey: fields?.inputKey,
outputKey: fields?.outputKey,
});
this.humanPrefix = fields?.humanPrefix ?? this.humanPrefix;
this.aiPrefix = fields?.aiPrefix ?? this.aiPrefix;
this.memoryKey = fields?.memoryKey ?? this.memoryKey;
this.k = fields?.k ?? this.k;
}
get memoryKeys() {
return [this.memoryKey];
}
/**
* Method to load the memory variables. Retrieves the chat messages from
* the history, slices the last 'k' messages, and stores them in the
* memory under the memoryKey. If the returnMessages property is set to
* true, the method returns the messages as they are. Otherwise, it
* returns a string representation of the messages.
* @param _values InputValues object.
* @returns Promise that resolves to a MemoryVariables object.
*/
async loadMemoryVariables(_values: InputValues): Promise<MemoryVariables> {
const messages = await this.chatHistory.getMessages();
if (this.returnMessages) {
const result = {
[this.memoryKey]: messages.slice(-this.k * 2),
};
return result;
}
const result = {
[this.memoryKey]: getBufferString(
messages.slice(-this.k * 2),
this.humanPrefix,
this.aiPrefix
),
};
return result;
}
}