Low-level orchestration framework for building stateful, multi-actor applications with LLMs
Get started with LangGraph in minutes. This guide covers installation, your first graph, and essential patterns.
npm install @langchain/langgraph @langchain/coreFor persistence:
npm install @langchain/langgraph-checkpointUse Annotation.Root() to define your graph's state structure:
import { Annotation } from "@langchain/langgraph";
const State = Annotation.Root({
count: Annotation<number>({
default: () => 0
}),
messages: Annotation<string[]>({
reducer: (current, update) => [...current, ...update],
default: () => []
})
});Nodes are functions that receive state and return state updates:
const incrementNode = (state: typeof State.State) => {
return {
count: state.count + 1,
messages: [`Count is now ${state.count + 1}`]
};
};
const doubleNode = (state: typeof State.State) => {
return {
count: state.count * 2,
messages: [`Doubled to ${state.count * 2}`]
};
};import { StateGraph, START, END } from "@langchain/langgraph";
const graph = new StateGraph(State)
.addNode("increment", incrementNode)
.addNode("double", doubleNode)
.addEdge(START, "increment")
.addEdge("increment", "double")
.addEdge("double", END)
.compile();const result = await graph.invoke({ count: 5 });
console.log(result);
// {
// count: 12, // (5 + 1) * 2
// messages: ["Count is now 6", "Doubled to 12"]
// }Use addConditionalEdges to route based on state:
const routeByCount = (state: typeof State.State) => {
return state.count > 10 ? "high" : "low";
};
const graph = new StateGraph(State)
.addNode("check", (s) => s)
.addNode("high", highNode)
.addNode("low", lowNode)
.addEdge(START, "check")
.addConditionalEdges("check", routeByCount, {
high: "high",
low: "low"
})
.addEdge("high", END)
.addEdge("low", END)
.compile();Stream intermediate states as they're produced:
for await (const state of await graph.stream({ count: 0 }, {
streamMode: "values"
})) {
console.log(state);
}
// Prints state after each node executionStream just the updates:
for await (const update of await graph.stream({ count: 0 }, {
streamMode: "updates"
})) {
console.log(update);
}
// { increment: { count: 1, messages: [...] } }
// { double: { count: 2, messages: [...] } }Use a checkpointer to save state between invocations:
import { MemorySaver } from "@langchain/langgraph-checkpoint";
const checkpointer = new MemorySaver();
const graph = new StateGraph(State)
.addNode("increment", incrementNode)
.addEdge(START, "increment")
.addEdge("increment", END)
.compile({ checkpointer });
// First execution
await graph.invoke({ count: 0 }, {
configurable: { thread_id: "thread-1" }
});
// Continue from checkpoint
await graph.invoke({ count: 10 }, {
configurable: { thread_id: "thread-1" }
});Use MessagesAnnotation for message-based workflows:
import { MessagesAnnotation } from "@langchain/langgraph";
import { HumanMessage, AIMessage } from "@langchain/core/messages";
const chatGraph = new StateGraph(MessagesAnnotation)
.addNode("respond", async (state) => {
const userMessage = state.messages[state.messages.length - 1];
const response = await llm.invoke([...state.messages]);
return { messages: [response] };
})
.addEdge(START, "respond")
.addEdge("respond", END)
.compile();
const result = await chatGraph.invoke({
messages: [new HumanMessage("Hello!")]
});Use prebuilt agent patterns:
import { createReactAgent } from "@langchain/langgraph/prebuilt";
import { ChatOpenAI } from "@langchain/openai";
import { tool } from "@langchain/core/tools";
import { z } from "zod";
// Define tools
const calculator = tool((input) => {
return eval(input.expression).toString();
}, {
name: "calculator",
description: "Evaluate math expressions",
schema: z.object({
expression: z.string()
})
});
// Create agent
const agent = createReactAgent({
llm: new ChatOpenAI({ model: "gpt-4" }),
tools: [calculator]
});
// Use agent
const result = await agent.invoke({
messages: [{ role: "user", content: "What is 25 * 4?" }]
});Simpler workflow definition for straightforward tasks:
import { task, entrypoint } from "@langchain/langgraph";
const processItem = task("process", async (item: string) => {
return `Processed: ${item}`;
});
const workflow = entrypoint("batch", async (items: string[]) => {
const results = await Promise.all(items.map(i => processItem(i)));
return results;
});
const result = await workflow.invoke(["a", "b", "c"]);
// ["Processed: a", "Processed: b", "Processed: c"]// Last value wins (default)
field: Annotation<number>
// Array concatenation
items: Annotation<string[]>({
reducer: (a, b) => a.concat(b),
default: () => []
})
// Object merging
metadata: Annotation<Record<string, any>>({
reducer: (a, b) => ({ ...a, ...b }),
default: () => ({})
})
// Custom aggregation
total: Annotation<number>({
reducer: (a, b) => a + b,
default: () => 0
})// Direct edge
.addEdge("nodeA", "nodeB")
// Conditional edge
.addConditionalEdges("nodeA", routingFunction, {
"pathX": "nodeB",
"pathY": "nodeC"
})
// Edge from START
.addEdge(START, "firstNode")
// Edge to END
.addEdge("lastNode", END){ streamMode: "values" } // Complete state after each step
{ streamMode: "updates" } // Only state changes per node
{ streamMode: "debug" } // Detailed execution events
{ streamMode: "messages" } // Messages from writer()
{ streamMode: "custom" } // Custom data from writer()This occurs when multiple nodes write to a LastValue channel in the same step. Use a reducer or ensure only one node writes per step.
// Fix: Add a reducer
messages: Annotation<string[]>({
reducer: (a, b) => a.concat(b), // Allows multiple writes
default: () => []
})Increase the recursion limit or check for infinite loops:
await graph.invoke(input, {
recursionLimit: 100 // Default is 25
});Ensure node return types match state definition:
// State expects these fields
const State = Annotation.Root({
count: Annotation<number>,
name: Annotation<string>
});
// Node must return compatible types
const node = (state: typeof State.State) => {
return {
count: state.count + 1, // ✓ number
name: "updated" // ✓ string
};
};