-
Notifications
You must be signed in to change notification settings - Fork 2.5k
/
Copy pathbuffer_memory.ts
97 lines (90 loc) · 3.49 KB
/
buffer_memory.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import { InputValues, MemoryVariables } from "@langchain/core/memory";
import { getBufferString } from "@langchain/core/messages";
import { BaseChatMemory, BaseChatMemoryInput } from "./chat_memory.js";
/**
* Interface for the input parameters of the `BufferMemory` class.
*/
export interface BufferMemoryInput extends BaseChatMemoryInput {
humanPrefix?: string;
aiPrefix?: string;
memoryKey?: string;
}
/**
* The `BufferMemory` class is a type of memory component used for storing
* and managing previous chat messages. It is a wrapper around
* `ChatMessageHistory` that extracts the messages into an input variable.
* This class is particularly useful in applications like chatbots where
* it is essential to remember previous interactions. Note: The memory
* instance represents the history of a single conversation. Therefore, it
* is not recommended to share the same history or memory instance between
* two different chains. If you deploy your LangChain app on a serverless
* environment, do not store memory instances in a variable, as your
* hosting provider may reset it by the next time the function is called.
* @example
* ```typescript
* // Initialize the memory to store chat history and set up the language model with a specific temperature.
* const memory = new BufferMemory({ memoryKey: "chat_history" });
* const model = new ChatOpenAI({ temperature: 0.9 });
*
* // Create a prompt template for a friendly conversation between a human and an AI.
* const prompt =
* PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
*
* Current conversation:
* {chat_history}
* Human: {input}
* AI:`);
*
* // Set up the chain with the language model, prompt, and memory.
* const chain = new LLMChain({ llm: model, prompt, memory });
*
* // Example usage of the chain to continue the conversation.
* // The `call` method sends the input to the model and returns the AI's response.
* const res = await chain.call({ input: "Hi! I'm Jim." });
* console.log({ res });
*
* ```
*/
export class BufferMemory extends BaseChatMemory implements BufferMemoryInput {
humanPrefix = "Human";
aiPrefix = "AI";
memoryKey = "history";
constructor(fields?: BufferMemoryInput) {
super({
chatHistory: fields?.chatHistory,
returnMessages: fields?.returnMessages ?? false,
inputKey: fields?.inputKey,
outputKey: fields?.outputKey,
});
this.humanPrefix = fields?.humanPrefix ?? this.humanPrefix;
this.aiPrefix = fields?.aiPrefix ?? this.aiPrefix;
this.memoryKey = fields?.memoryKey ?? this.memoryKey;
}
get memoryKeys() {
return [this.memoryKey];
}
/**
* Loads the memory variables. It takes an `InputValues` object as a
* parameter and returns a `Promise` that resolves with a
* `MemoryVariables` object.
* @param _values `InputValues` object.
* @returns A `Promise` that resolves with a `MemoryVariables` object.
*/
async loadMemoryVariables(_values: InputValues): Promise<MemoryVariables> {
const messages = await this.chatHistory.getMessages();
if (this.returnMessages) {
const result = {
[this.memoryKey]: messages,
};
return result;
}
const result = {
[this.memoryKey]: getBufferString(
messages,
this.humanPrefix,
this.aiPrefix
),
};
return result;
}
}