Core LangChain.js abstractions and schemas for building applications with Large Language Models
—
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Pending
The risk profile of this skill
Memory management and storage abstractions for maintaining conversation state and persistence. These components enable applications to remember context across interactions.
Abstract base class for storing and retrieving chat messages.
/**
* Abstract base class for chat message storage
*/
abstract class BaseChatMessageHistory {
/** Get all stored messages */
abstract getMessages(): Promise<BaseMessage[]>;
/** Add a message to the history */
abstract addMessage(message: BaseMessage): Promise<void>;
/** Add multiple messages to the history */
abstract addMessages(messages: BaseMessage[]): Promise<void>;
/** Add a user message */
abstract addUserMessage(message: string): Promise<void>;
/** Add an AI message */
abstract addAIChatMessage(message: string): Promise<void>;
/** Clear all messages */
abstract clear(): Promise<void>;
}Base implementation using array-based storage.
/**
* Base implementation using list/array storage
*/
abstract class BaseListChatMessageHistory extends BaseChatMessageHistory {
/** Internal message storage */
protected messages: BaseMessage[];
constructor(messages?: BaseMessage[]);
/** Get all messages */
async getMessages(): Promise<BaseMessage[]>;
/** Add a message */
async addMessage(message: BaseMessage): Promise<void>;
/** Add multiple messages */
async addMessages(messages: BaseMessage[]): Promise<void>;
/** Add user message */
async addUserMessage(message: string): Promise<void>;
/** Add AI message */
async addAIChatMessage(message: string): Promise<void>;
/** Clear all messages */
async clear(): Promise<void>;
}Simple in-memory implementation for chat message storage.
/**
* In-memory chat message history implementation
*/
class InMemoryChatMessageHistory extends BaseListChatMessageHistory {
constructor(messages?: BaseMessage[]);
}Usage Examples:
import { InMemoryChatMessageHistory, HumanMessage, AIMessage } from "@langchain/core/memory";
// Create in-memory history
const history = new InMemoryChatMessageHistory();
// Add messages
await history.addUserMessage("Hello, how are you?");
await history.addAIChatMessage("I'm doing well, thank you!");
// Add message objects directly
await history.addMessage(new HumanMessage("What can you help me with?"));
await history.addMessage(new AIMessage("I can help with various tasks like answering questions, writing, and more."));
// Get all messages
const messages = await history.getMessages();
console.log(messages);
// Clear history
await history.clear();Abstract base class for memory components.
/**
* Abstract base class for memory components
*/
abstract class BaseMemory {
/** Input keys that this memory component handles */
abstract get memoryKeys(): string[];
/** Load memory variables for the given inputs */
abstract loadMemoryVariables(inputs: InputValues): Promise<MemoryVariables>;
/** Save context from chain execution */
abstract saveContext(inputValues: InputValues, outputValues: OutputValues): Promise<void>;
/** Clear memory contents */
abstract clear(): Promise<void>;
}Memory that maintains a buffer of conversation messages.
/**
* Memory that keeps a buffer of conversation messages
*/
class ConversationBufferMemory extends BaseMemory {
/** Chat message history instance */
chatHistory: BaseChatMessageHistory;
/** Key for human input in memory variables */
humanPrefix: string;
/** Key for AI output in memory variables */
aiPrefix: string;
/** Key for memory in the returned variables */
memoryKey: string;
/** Input key to use from chain inputs */
inputKey?: string;
/** Output key to use from chain outputs */
outputKey?: string;
/** Whether to return messages as objects or strings */
returnMessages: boolean;
constructor(fields?: ConversationBufferMemoryInput);
get memoryKeys(): string[];
/** Load conversation buffer as memory variables */
async loadMemoryVariables(inputs: InputValues): Promise<MemoryVariables>;
/** Save conversation turn to memory */
async saveContext(inputValues: InputValues, outputValues: OutputValues): Promise<void>;
/** Clear conversation buffer */
async clear(): Promise<void>;
}Usage Examples:
import { ConversationBufferMemory, InMemoryChatMessageHistory } from "@langchain/core/memory";
// Create memory with in-memory storage
const memory = new ConversationBufferMemory({
chatHistory: new InMemoryChatMessageHistory(),
returnMessages: true,
memoryKey: "chat_history"
});
// Save conversation turn
await memory.saveContext(
{ input: "Hi there!" },
{ output: "Hello! How can I help you today?" }
);
await memory.saveContext(
{ input: "What's the weather like?" },
{ output: "I don't have access to current weather data, but I can help you find weather information." }
);
// Load memory variables
const memoryVars = await memory.loadMemoryVariables({});
console.log(memoryVars.chat_history); // Array of BaseMessage objects
// Use with conversational chain
const chain = ConversationalRetrievalChain.fromLLM(model, retriever, {
memory: memory,
returnSourceDocuments: true,
});Abstract base class for key-value storage.
/**
* Abstract base class for key-value storage
* @template K - Key type
* @template V - Value type
*/
abstract class BaseStore<K = string, V = unknown> {
/** Get values for given keys */
abstract mget(keys: K[]): Promise<(V | undefined)[]>;
/** Set key-value pairs */
abstract mset(keyValuePairs: [K, V][]): Promise<void>;
/** Delete keys */
abstract mdelete(keys: K[]): Promise<void>;
/** Yield all keys */
abstract *yieldKeys(prefix?: string): AsyncGenerator<K>;
}Simple in-memory key-value store implementation.
/**
* In-memory key-value store
* @template K - Key type
* @template V - Value type
*/
class InMemoryStore<K = string, V = unknown> extends BaseStore<K, V> {
/** Internal storage map */
protected store: Map<K, V>;
constructor();
/** Get multiple values */
async mget(keys: K[]): Promise<(V | undefined)[]>;
/** Set multiple key-value pairs */
async mset(keyValuePairs: [K, V][]): Promise<void>;
/** Delete multiple keys */
async mdelete(keys: K[]): Promise<void>;
/** Yield all keys with optional prefix filter */
async *yieldKeys(prefix?: string): AsyncGenerator<K>;
}Usage Examples:
import { InMemoryStore } from "@langchain/core/stores";
// Create store
const store = new InMemoryStore<string, { name: string; value: number }>();
// Set values
await store.mset([
["user:1", { name: "Alice", value: 100 }],
["user:2", { name: "Bob", value: 200 }],
["config:theme", { name: "dark", value: 1 }]
]);
// Get values
const users = await store.mget(["user:1", "user:2"]);
console.log(users); // [{ name: "Alice", value: 100 }, { name: "Bob", value: 200 }]
// Iterate keys with prefix
for await (const key of store.yieldKeys("user:")) {
console.log(key); // "user:1", "user:2"
}
// Delete keys
await store.mdelete(["user:1"]);Memory that keeps only the last N conversation turns.
/**
* Memory that keeps a sliding window of recent conversation turns
*/
class ConversationBufferWindowMemory extends BaseMemory {
/** Number of conversation turns to keep */
k: number;
/** Chat message history instance */
chatHistory: BaseChatMessageHistory;
constructor(fields: ConversationBufferWindowMemoryInput);
/** Load recent conversation window */
async loadMemoryVariables(inputs: InputValues): Promise<MemoryVariables>;
}Memory that maintains a running summary of the conversation.
/**
* Memory that maintains a summary of the conversation
*/
class ConversationSummaryMemory extends BaseMemory {
/** LLM for generating summaries */
llm: BaseLanguageModel;
/** Prompt template for summarization */
prompt: PromptTemplate;
/** Current conversation summary */
buffer: string;
constructor(fields: ConversationSummaryMemoryInput);
/** Predict new summary given old summary and new messages */
async predictNewSummary(messages: BaseMessage[], existingSummary: string): Promise<string>;
}type InputValues = Record<string, unknown>;
type OutputValues = Record<string, unknown>;
type MemoryVariables = Record<string, unknown>;
interface ConversationBufferMemoryInput {
chatHistory?: BaseChatMessageHistory;
humanPrefix?: string;
aiPrefix?: string;
memoryKey?: string;
inputKey?: string;
outputKey?: string;
returnMessages?: boolean;
}
interface ConversationBufferWindowMemoryInput extends ConversationBufferMemoryInput {
k: number;
}
interface ConversationSummaryMemoryInput {
llm: BaseLanguageModel;
chatHistory?: BaseChatMessageHistory;
prompt?: PromptTemplate;
humanPrefix?: string;
aiPrefix?: string;
memoryKey?: string;
returnMessages?: boolean;
}