Low-level orchestration framework for building stateful, multi-actor applications with LLMs
Interrupts and human review workflows.
The interrupt() function pauses execution and requests human input.
function interrupt<I, R>(value: I): R;
type Interrupt<Value = any> = {
id?: string;
value?: Value;
};
function isInterrupted<Value = unknown>(
values: unknown
): values is { [INTERRUPT]: Interrupt<Value>[] };
const INTERRUPT: "__interrupt__";import { interrupt, isInterrupted, INTERRUPT, Command } from "@langchain/langgraph";
import { MemorySaver } from "@langchain/langgraph-checkpoint";
const reviewNode = async (state: State) => {
// Request human review
const reviewData = interrupt({
question: "Approve this action?",
context: state.data
});
return reviewData.approved ? { status: "approved" } : { status: "rejected" };
};
const graph = new StateGraph(State)
.addNode("review", reviewNode)
.compile({ checkpointer: new MemorySaver() });
// First call - will interrupt
const result = await graph.invoke(input, {
configurable: { thread_id: "1" }
});
if (isInterrupted(result)) {
const interrupts = result[INTERRUPT];
console.log(interrupts[0].value); // { question: "Approve...", context: {...} }
// Resume with human input
await graph.invoke(
new Command({ resume: { approved: true } }),
{ configurable: { thread_id: "1" } }
);
}Interrupt before or after specific nodes without modifying node code.
const graph = new StateGraph(State)
.addNode("process", processNode)
.addNode("review", reviewNode)
.addNode("finalize", finalizeNode)
.compile({
checkpointer: new MemorySaver(),
interruptBefore: ["review"], // Pause before review
interruptAfter: ["process"] // Pause after process
});const graph = new StateGraph(State)
.addNode("process", processNode)
.addNode("criticalStep", criticalNode)
.addEdge(START, "process")
.addEdge("process", "criticalStep")
.addEdge("criticalStep", END)
.compile({
checkpointer: new MemorySaver(),
interruptBefore: ["criticalStep"]
});
// Execution stops before criticalStep
await graph.invoke(input, { configurable: { thread_id: "1" } });
// Check state
const state = await graph.getState({ configurable: { thread_id: "1" } });
console.log(state.next); // ["criticalStep"]
// Review state and continue
await graph.invoke(null, { configurable: { thread_id: "1" } });const graph = new StateGraph(State)
.addNode("generateOutput", generateNode)
.addNode("sendOutput", sendNode)
.compile({
checkpointer: new MemorySaver(),
interruptAfter: ["generateOutput"] // Review before sending
});
// Generate output and pause
await graph.invoke(input, { configurable: { thread_id: "1" } });
// Get generated output
const state = await graph.getState({ configurable: { thread_id: "1" } });
console.log(state.values.output); // Review the output
// Approve and continue
await graph.invoke(null, { configurable: { thread_id: "1" } });Use "*" to interrupt before/after all nodes:
compile({
checkpointer: new MemorySaver(),
interruptBefore: "*", // Pause before every node
// or
interruptAfter: "*" // Pause after every node
})Multiple interrupt points in a single node:
const multiReviewNode = async (state: State) => {
// First review
const review1 = interrupt({
step: 1,
message: "Review initial processing",
data: state.step1Data
});
const processedStep1 = processData(review1);
// Second review
const review2 = interrupt({
step: 2,
message: "Review intermediate results",
data: processedStep1
});
const processedStep2 = processData(review2);
// Third review
const review3 = interrupt({
step: 3,
message: "Final approval",
data: processedStep2
});
return { result: finalProcess(review3) };
};
const graph = new StateGraph(State)
.addNode("multiReview", multiReviewNode)
.compile({ checkpointer: new MemorySaver() });
// Execute - will interrupt at step 1
const config = { configurable: { thread_id: "1" } };
await graph.invoke(input, config);
// Resume with step 1 approval
await graph.invoke(
new Command({ resume: { approved: true, notes: "Step 1 OK" } }),
config
);
// Resume with step 2 approval
await graph.invoke(
new Command({ resume: { approved: true, notes: "Step 2 OK" } }),
config
);
// Resume with step 3 approval
const result = await graph.invoke(
new Command({ resume: { approved: true, notes: "Final OK" } }),
config
);Interrupt only when certain conditions are met:
const conditionalReviewNode = async (state: State) => {
// Only interrupt for high-value transactions
if (state.transactionAmount > 10000) {
const approval = interrupt({
message: "High-value transaction requires approval",
amount: state.transactionAmount,
details: state.transactionDetails
});
if (!approval.approved) {
return { status: "rejected", reason: approval.reason };
}
}
// Process normally
return { status: "approved", processed: true };
};Use interrupts for error handling and recovery:
const resilientNode = async (state: State) => {
try {
const result = await riskyOperation(state.data);
return { result, error: null };
} catch (error) {
// Interrupt for human decision
const decision = interrupt({
error: error.message,
data: state.data,
question: "Retry, skip, or abort?"
});
if (decision.action === "retry") {
const retryResult = await riskyOperation(state.data);
return { result: retryResult, error: null };
} else if (decision.action === "skip") {
return { result: null, error: "skipped", skipped: true };
} else {
throw new Error("Operation aborted by user");
}
}
};const result = await graph.invoke(input, config);
if (isInterrupted(result)) {
console.log("Execution interrupted");
const interrupts = result[INTERRUPT];
for (const interrupt of interrupts) {
console.log("Interrupt ID:", interrupt.id);
console.log("Interrupt value:", interrupt.value);
}
// Handle interrupt...
}const state = await graph.getState(config);
// Check if graph is waiting at an interrupt
if (state.next.length > 0) {
console.log("Graph is paused at:", state.next);
// Check if there are pending interrupts
if (state.tasks.some(task => task.interrupts.length > 0)) {
console.log("Has pending interrupts");
}
}// Resume and update state
await graph.invoke(
new Command({
resume: { approved: true },
update: { reviewedAt: new Date().toISOString() }
}),
config
);// Resume and override routing
await graph.invoke(
new Command({
resume: { action: "special_handling" },
goto: "specialHandler" // Override normal flow
}),
config
);Simply continue execution:
// Continue from where it paused
await graph.invoke(null, config);import {
StateGraph,
Annotation,
interrupt,
isInterrupted,
INTERRUPT,
Command,
START,
END
} from "@langchain/langgraph";
import { MemorySaver } from "@langchain/langgraph-checkpoint";
const ApprovalState = Annotation.Root({
request: Annotation<any>,
approvals: Annotation<Array<{
step: string;
approver: string;
decision: boolean;
timestamp: string;
}>>({
reducer: (a, b) => a.concat(b),
default: () => []
}),
status: Annotation<string>({ default: () => "pending" })
});
// Manager approval
const managerReview = async (state: typeof ApprovalState.State) => {
const approval = interrupt({
type: "manager_approval",
request: state.request,
message: "Manager approval required"
});
return {
approvals: [{
step: "manager",
approver: approval.approverName,
decision: approval.approved,
timestamp: new Date().toISOString()
}],
status: approval.approved ? "manager_approved" : "rejected"
};
};
// Director approval (only if manager approved)
const directorReview = async (state: typeof ApprovalState.State) => {
const approval = interrupt({
type: "director_approval",
request: state.request,
previousApprovals: state.approvals,
message: "Director approval required"
});
return {
approvals: [{
step: "director",
approver: approval.approverName,
decision: approval.approved,
timestamp: new Date().toISOString()
}],
status: approval.approved ? "director_approved" : "rejected"
};
};
// Final processing
const processApproved = (state: typeof ApprovalState.State) => {
return {
status: "completed",
processedAt: new Date().toISOString()
};
};
// Rejection handler
const handleRejection = (state: typeof ApprovalState.State) => {
return {
status: "rejected",
rejectedAt: new Date().toISOString()
};
};
// Routing
const routeAfterManager = (state: typeof ApprovalState.State) => {
return state.status === "manager_approved" ? "director" : "rejected";
};
const routeAfterDirector = (state: typeof ApprovalState.State) => {
return state.status === "director_approved" ? "process" : "rejected";
};
// Build graph
const graph = new StateGraph(ApprovalState)
.addNode("manager", managerReview)
.addNode("director", directorReview)
.addNode("process", processApproved)
.addNode("rejected", handleRejection)
.addEdge(START, "manager")
.addConditionalEdges("manager", routeAfterManager, {
director: "director",
rejected: "rejected"
})
.addConditionalEdges("director", routeAfterDirector, {
process: "process",
rejected: "rejected"
})
.addEdge("process", END)
.addEdge("rejected", END)
.compile({ checkpointer: new MemorySaver() });
// Usage
const config = { configurable: { thread_id: "approval-123" } };
// Start approval process
const result1 = await graph.invoke({
request: {
type: "budget_increase",
amount: 50000,
department: "Engineering"
}
}, config);
if (isInterrupted(result1)) {
console.log("Waiting for manager approval");
const interruptData = result1[INTERRUPT][0].value;
console.log(interruptData.message);
// Manager approves
const result2 = await graph.invoke(
new Command({
resume: {
approved: true,
approverName: "John Manager"
}
}),
config
);
if (isInterrupted(result2)) {
console.log("Waiting for director approval");
// Director approves
const finalResult = await graph.invoke(
new Command({
resume: {
approved: true,
approverName: "Jane Director"
}
}),
config
);
console.log("Final status:", finalResult.status);
console.log("Approvals:", finalResult.approvals);
}
}This guide provides comprehensive coverage of human-in-the-loop patterns in LangGraph.