Quarkus extension that integrates LangChain4j's agentic capabilities, enabling developers to build AI agent-based applications using declarative patterns with support for multiple agent types, agent-to-agent communication, and CDI integration.
This document describes how to manage chat memory isolation and parameter passing using @MemoryId and @V annotations.
Marks a parameter as the memory identifier for chat memory isolation, enabling separate conversation histories per user or session.
/**
* Marks a parameter as the memory identifier
* Enables per-user/per-session chat memory isolation
*/
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
@interface MemoryId {
}
// Usage
@Agent
String chat(@MemoryId String userId, @V("message") String message);Usage Example:
public interface MultiUserChatAgent {
@SystemMessage("You are a helpful assistant. Remember the conversation history with each user.")
@UserMessage("{{message}}")
@Agent(description = "Multi-user chat agent", outputKey = "response")
String chat(@MemoryId String userId, @V("message") String message);
@ChatMemoryProviderSupplier
static ChatMemory chatMemoryProvider(Object memoryId) {
// Separate memory for each user
return MessageWindowChatMemory.withMaxMessages(10);
}
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}
// Usage - each user has isolated memory
@Inject
MultiUserChatAgent chatAgent;
String response1 = chatAgent.chat("user123", "My name is Alice");
String response2 = chatAgent.chat("user456", "My name is Bob");
String response3 = chatAgent.chat("user123", "What is my name?"); // Remembers "Alice"
String response4 = chatAgent.chat("user456", "What is my name?"); // Remembers "Bob"public interface SessionChatAgent {
@Agent(description = "Session-based chat", outputKey = "response")
String chat(@MemoryId String sessionId, @V("message") String message);
@ChatMemoryProviderSupplier
static ChatMemory chatMemoryProvider(Object sessionId) {
// Create memory per session
return MessageWindowChatMemory.withMaxMessages(20);
}
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}
// Usage with session IDs
@Inject
SessionChatAgent agent;
String session1 = UUID.randomUUID().toString();
String session2 = UUID.randomUUID().toString();
agent.chat(session1, "Remember this code: ABC123");
agent.chat(session2, "Remember this code: XYZ789");
String result1 = agent.chat(session1, "What code did I give you?"); // Returns ABC123
String result2 = agent.chat(session2, "What code did I give you?"); // Returns XYZ789public interface TenantAwareChatAgent {
@Agent(description = "Tenant and user aware chat", outputKey = "response")
String chat(@MemoryId String memoryId, @V("message") String message);
@ChatMemoryProviderSupplier
static ChatMemory chatMemoryProvider(Object memoryId) {
return MessageWindowChatMemory.withMaxMessages(15);
}
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}
// Usage with composite keys
@Inject
TenantAwareChatAgent agent;
String memoryKey = "tenant:acme:user:alice";
agent.chat(memoryKey, "Hello");Marks method parameters as variables accessible in templates and agentic scope.
/**
* Marks a parameter as a variable
* Variable is accessible in templates via {{variableName}}
* Variable is stored in agentic scope and accessible to sub-agents
*
* @param value - The variable name used in templates
*/
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
@interface V {
String value();
}
// Usage
@Agent
String process(@V("input") String input, @V("options") String options);Usage Example:
public interface TemplateAgent {
@UserMessage("""
Process the following {{dataType}} data using {{algorithm}} algorithm:
Data: {{data}}
Apply these filters: {{filters}}
Output format: {{outputFormat}}
""")
@Agent(description = "Processes data with templates", outputKey = "result")
String process(
@V("data") String data,
@V("dataType") String dataType,
@V("algorithm") String algorithm,
@V("filters") String filters,
@V("outputFormat") String outputFormat
);
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}
// Usage
@Inject
TemplateAgent agent;
String result = agent.process(
"sensor readings", // data
"time-series", // dataType
"moving-average", // algorithm
"outlier-removal", // filters
"JSON" // outputFormat
);public interface DataCollector {
@Agent(description = "Collects data", outputKey = "rawData")
String collect(@V("source") String source, @V("timeRange") String timeRange);
}
public interface DataProcessor {
@Agent(description = "Processes data", outputKey = "processedData")
String process(
@V("rawData") String rawData, // From previous agent
@V("processingMode") String mode // From original input
);
}
public interface DataAnalyzer {
@Agent(description = "Analyzes data", outputKey = "analysis")
String analyze(
@V("processedData") String processedData, // From previous agent
@V("source") String source, // From original input
@V("analysisDepth") String depth // From original input
);
}
public interface DataPipeline {
@SequenceAgent(
outputKey = "analysis",
subAgents = { DataCollector.class, DataProcessor.class, DataAnalyzer.class }
)
ResultWithAgenticScope<String> execute(
@V("source") String source,
@V("timeRange") String timeRange,
@V("processingMode") String mode,
@V("analysisDepth") String depth
);
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}
// All @V parameters are available throughout the workflowpublic interface ContextualChatAgent {
@SystemMessage("""
You are a {{role}} assistant.
User tier: {{userTier}}
Always maintain {{tone}} tone.
""")
@UserMessage("{{message}}")
@Agent(description = "Contextual chat with memory", outputKey = "response")
String chat(
@MemoryId String userId,
@V("message") String message,
@V("role") String role,
@V("userTier") String userTier,
@V("tone") String tone
);
@ChatMemoryProviderSupplier
static ChatMemory chatMemoryProvider(Object memoryId) {
return MessageWindowChatMemory.withMaxMessages(10);
}
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}
// Usage
@Inject
ContextualChatAgent agent;
String response = agent.chat(
"user789", // memoryId
"How can I help?", // message
"technical expert", // role
"premium", // userTier
"professional" // tone
);public interface TokenLimitedAgent {
@Agent
String chat(@MemoryId String userId, @V("message") String message);
@ChatMemoryProviderSupplier
static ChatMemory chatMemoryProvider(Object memoryId) {
// Limit by tokens instead of message count
return TokenWindowChatMemory.withMaxTokens(1000, new OpenAiTokenizer());
}
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}public interface SharedMemoryAgent {
@Agent(description = "Shared memory across all users")
String chat(@V("message") String message);
@ChatMemorySupplier // Note: ChatMemorySupplier, not Provider
static ChatMemory chatMemory() {
// Single shared memory (no isolation)
return MessageWindowChatMemory.withMaxMessages(10);
}
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}public interface StatelessAgent {
@Agent(description = "Stateless agent without memory")
String process(@V("input") String input);
// No @ChatMemorySupplier or @ChatMemoryProviderSupplier
// Agent is stateless
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}public interface DynamicVariableAgent {
@Agent(description = "Accesses variables dynamically", outputKey = "result")
String process(@V("data") String data, @V("metadata") Map<String, Object> metadata);
@ErrorHandler
static ErrorRecoveryResult handleError(ErrorContext context) {
AgenticScope scope = context.agenticScope();
// Access variables from scope
String data = scope.readState("data");
Map<String, Object> metadata = scope.readState("metadata");
// Modify and retry
scope.writeState("data", sanitize(data));
return ErrorRecoveryResult.retry();
}
static String sanitize(String data) {
// Sanitization logic
return data != null ? data.trim() : "";
}
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}public interface ValidatedAgent {
@Agent(description = "Validates parameters", outputKey = "result")
String process(
@V("email") String email,
@V("age") Integer age,
@V("category") String category
);
@ErrorHandler
static ErrorRecoveryResult handleError(ErrorContext context) {
AgenticScope scope = context.agenticScope();
// Validate and provide defaults
String email = scope.readState("email");
if (email == null || !email.contains("@")) {
System.err.println("Invalid email provided");
return ErrorRecoveryResult.throwException();
}
Integer age = scope.readState("age");
if (age == null || age < 0) {
scope.writeState("age", 0);
return ErrorRecoveryResult.retry();
}
String category = scope.readState("category");
if (category == null) {
scope.writeState("category", "general");
return ErrorRecoveryResult.retry();
}
return ErrorRecoveryResult.throwException();
}
@ChatModelSupplier
static ChatModel chatModel() {
return new OpenAiChatModel.builder()
.apiKey(System.getenv("OPENAI_API_KEY"))
.modelName("gpt-4")
.build();
}
}Install with Tessl CLI
npx tessl i tessl/maven-io-quarkiverse-langchain4j--quarkus-langchain4j-agentic@1.7.0