docs
This guide covers working with standardized message types for communication in LangChain conversations.
import { HumanMessage } from "langchain";
// Simple text message
const msg = new HumanMessage("Hello!");
// With metadata
const msgWithMeta = new HumanMessage("Hello!", {
source: "web-app",
userId: "user-123",
});import { AIMessage } from "langchain";
const msg = new AIMessage("Hi there! How can I help you?");
const msgWithMeta = new AIMessage("Response", {
model: "gpt-4o",
tokens: 150,
});import { SystemMessage } from "langchain";
const msg = new SystemMessage("You are a helpful assistant.");import { ToolMessage } from "langchain";
const msg = new ToolMessage(
"Search results: ...",
"call_abc123" // tool_call_id
);import { createAgent } from "langchain";
const agent = createAgent({
model: "openai:gpt-4o",
tools: [],
});
// Simple strings
await agent.invoke({
messages: ["Hello!"],
});
// Role-based objects
await agent.invoke({
messages: [
{ role: "system", content: "You are helpful." },
{ role: "user", content: "Hello" },
],
});import { HumanMessage, AIMessage, SystemMessage } from "langchain";
await agent.invoke({
messages: [
new SystemMessage("You are helpful."),
new HumanMessage("Hello"),
new AIMessage("Hi!"),
new HumanMessage("How are you?"),
],
});import { HumanMessage } from "langchain";
const msg = new HumanMessage([
{ type: "text", text: "What's in this image?" },
{ type: "image_url", image_url: "https://example.com/image.jpg" },
]);const msg = new HumanMessage([
{ type: "text", text: "Describe this image in detail" },
{
type: "image_url",
image_url: {
url: "https://example.com/image.jpg",
detail: "high", // "low", "high", or "auto"
},
},
]);const msg = new HumanMessage([
{ type: "text", text: "Analyze this image" },
{
type: "image",
source: {
type: "base64",
media_type: "image/jpeg",
data: "base64EncodedData...",
},
},
]);import { filterMessages, HumanMessage, AIMessage, SystemMessage } from "langchain";
const messages = [
new SystemMessage("You are helpful."),
new HumanMessage("Hello"),
new AIMessage("Hi!"),
new HumanMessage("How are you?"),
new AIMessage("I'm doing well!"),
];
// Keep only last 2 messages
const recent = filterMessages(messages, { last: 2 });
// Keep only human and AI messages
const conversation = filterMessages(messages, {
includeTypes: ["human", "ai"],
});
// Exclude system messages
const noSystem = filterMessages(messages, {
excludeTypes: ["system"],
});
// Keep first 3 messages
const first = filterMessages(messages, { first: 3 });import { trimMessages, HumanMessage, AIMessage } from "langchain";
const messages = [
new HumanMessage("Message 1"),
new AIMessage("Response 1"),
new HumanMessage("Message 2"),
new AIMessage("Response 2"),
new HumanMessage("Message 3"),
new AIMessage("Response 3"),
];
// Trim to fit token limit (keeps last messages)
const trimmed = trimMessages(messages, {
maxTokens: 100,
strategy: "last",
minMessages: 2,
});
// With custom token counter
const customTrimmed = trimMessages(messages, {
maxTokens: 500,
tokenCounter: (msgs) => {
return msgs.reduce((sum, msg) => sum + msg.content.length, 0);
},
});
// Different trimming strategies
const first = trimMessages(messages, {
maxTokens: 100,
strategy: "first", // Keep first messages
});
const middle = trimMessages(messages, {
maxTokens: 100,
strategy: "middle", // Keep first and last, remove middle
});import { createAgent } from "langchain";
const agent = createAgent({
model: "openai:gpt-4o",
tools: [],
});
// Track history
let messages = [];
// Turn 1
let result = await agent.invoke({
messages: [...messages, { role: "user", content: "My name is Alice" }],
});
messages = result.messages;
// Turn 2
result = await agent.invoke({
messages: [...messages, { role: "user", content: "What's my name?" }],
});
messages = result.messages;
// Agent remembers: "Your name is Alice"import { createAgent } from "langchain";
import { MemorySaver } from "@langchain/langgraph";
const checkpointer = new MemorySaver();
const agent = createAgent({
model: "openai:gpt-4o",
tools: [],
checkpointer: checkpointer,
});
// First conversation
await agent.invoke(
{ messages: [{ role: "user", content: "My name is Bob" }] },
{ configurable: { thread_id: "thread-1" } }
);
// Later conversation (state restored automatically)
const result = await agent.invoke(
{ messages: [{ role: "user", content: "What's my name?" }] },
{ configurable: { thread_id: "thread-1" } }
);import { filterMessages, trimMessages } from "langchain";
async function manageConversation(messages, newMessage) {
// Add new message
messages.push(newMessage);
// Filter out old system messages
messages = filterMessages(messages, {
excludeTypes: ["system"],
});
// Trim to fit context window
messages = trimMessages(messages, {
maxTokens: 4000,
strategy: "last",
minMessages: 4, // Always keep at least 2 turns
});
// Add current system message
messages.unshift(
new SystemMessage("You are a helpful assistant.")
);
return messages;
}import { AIMessage } from "langchain";
const msg = new AIMessage("Hello!");
console.log(msg.content); // "Hello!"
// Array content for multimodal
const multimodal = new AIMessage([
{ type: "text", text: "Here's an image" },
{ type: "image_url", image_url: "https://..." },
]);
console.log(multimodal.content); // Arrayconst msg = new AIMessage("Response", {
model: "gpt-4o",
temperature: 0.7,
tokens: 150,
});
console.log(msg.additional_kwargs);
// { model: "gpt-4o", temperature: 0.7, tokens: 150 }
console.log(msg.response_metadata); // {}import { AIMessage, HumanMessage } from "langchain";
const msg = new AIMessage("Hello");
if (msg instanceof AIMessage) {
console.log("This is an AI message");
}
if (msg instanceof HumanMessage) {
console.log("This is a human message");
}import {
AIMessageChunk,
HumanMessageChunk,
SystemMessageChunk,
ToolMessageChunk,
} from "langchain";
// Used internally during streaming
const chunk = new AIMessageChunk("Hello");import { createAgent } from "langchain";
const agent = createAgent({
model: "openai:gpt-4o",
tools: [],
});
const stream = agent.stream(
{ messages: [{ role: "user", content: "Tell me a story" }] },
{ streamMode: "values" }
);
for await (const state of stream) {
const lastMessage = state.messages[state.messages.length - 1];
if (lastMessage.content) {
process.stdout.write(String(lastMessage.content));
}
}See Message API Reference for complete API documentation.