tessl install tessl/maven-io-quarkiverse-langchain4j--quarkus-langchain4j-core@1.5.0Quarkus LangChain4j Core provides runtime integration for LangChain4j with the Quarkus framework, enabling declarative AI service creation through CDI annotations.
The AI Service Creation capability enables declarative creation of LangChain4j AI services as CDI beans through the @RegisterAiService annotation. This eliminates boilerplate code and automatically wires together chat models, tools, memory, and retrieval augmentation.
Declares an interface as an AI service, creating a CDI bean implementation with automatic model integration, tool support, and memory management.
// Package: io.quarkiverse.langchain4j
/**
* Annotation to declaratively create LangChain4j AiServices as CDI beans.
* Place on an interface that defines AI service methods using LangChain4j annotations.
*
* Generated implementations are ApplicationScoped CDI beans that can be injected.
* Processing occurs at build time using Quarkus bytecode generation.
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface RegisterAiService {
/**
* Supplier for the chat language model.
* Default uses CDI bean resolution with @ModelName qualifier.
*
* Marker classes:
* - BeanChatLanguageModelSupplier (default): Resolve from CDI, fail if not found
*
* @return Supplier class for ChatModel
*/
Class<? extends Supplier<ChatModel>> chatLanguageModelSupplier()
default BeanChatLanguageModelSupplier.class;
/**
* Supplier for the streaming chat language model.
* Default uses CDI bean resolution with @ModelName qualifier.
*
* Marker classes:
* - BeanStreamingChatLanguageModelSupplier (default): Resolve from CDI, fail if not found
*
* @return Supplier class for StreamingChatModel
*/
Class<? extends Supplier<StreamingChatModel>> streamingChatLanguageModelSupplier()
default BeanStreamingChatLanguageModelSupplier.class;
/**
* Named model selection via @ModelName qualifier.
* Allows selecting specific model instances from CDI container.
* Default value "<default>" uses the default model configuration.
*
* The model name must match a configuration key in application.properties:
* quarkus.langchain4j.<provider>.<modelName>.api-key=...
*
* @return Model name for CDI qualifier
*/
String modelName() default "<default>";
/**
* Tool classes available to this AI service.
* CDI beans will be used as tool implementations.
*
* Requirements:
* - Classes must be CDI beans (have scope annotation)
* - Methods annotated with @Tool will be discovered
* - Methods must be public and non-static
*
* Tool discovery occurs at build time.
*
* @return Array of tool classes
*/
Class<?>[] tools() default {};
/**
* Maximum number of sequential tool invocations allowed.
* If set to 0 (default), uses quarkus.langchain4j.ai-service.max-tool-executions property.
* If that is also unset, defaults to 10.
* Prevents infinite loops in tool calling.
*
* This limit applies per AI service method invocation, not globally.
* Reset after each method call completes.
*
* @return Maximum tool invocations (0 = use config default)
*/
int maxSequentialToolInvocations() default 0;
/**
* Supplier for chat memory provider.
* Default uses CDI bean resolution.
*
* Marker classes:
* - BeanChatMemoryProviderSupplier (default): Resolve from CDI, fail if not found
* - NoChatMemoryProviderSupplier: Disable chat memory for this service
*
* Memory provider is responsible for creating and managing ChatMemory instances
* keyed by memory ID (extracted from @MemoryId parameters).
*
* @return Supplier class for ChatMemoryProvider
*/
Class<? extends Supplier<ChatMemoryProvider>> chatMemoryProviderSupplier()
default BeanChatMemoryProviderSupplier.class;
/**
* Supplier for retrieval augmentor (RAG).
* Default uses CDI bean if it exists, otherwise none.
*
* Marker classes:
* - BeanIfExistsRetrievalAugmentorSupplier (default): Use CDI bean if present, else null
* - NoRetrievalAugmentorSupplier: Explicitly disable RAG for this service
*
* RetrievalAugmentor is invoked before sending user message to model,
* allowing enrichment with relevant documents.
*
* @return Supplier class for RetrievalAugmentor
*/
Class<? extends Supplier<RetrievalAugmentor>> retrievalAugmentor()
default BeanIfExistsRetrievalAugmentorSupplier.class;
/**
* Supplier for moderation model.
* Default uses CDI bean if it exists, otherwise none.
*
* Marker classes:
* - BeanIfExistsModerationModelSupplier (default): Use CDI bean if present, else null
*
* ModerationModel is used to check for inappropriate content
* before sending to the main model.
*
* @return Supplier class for ModerationModel
*/
Class<? extends Supplier<ModerationModel>> moderationModelSupplier()
default BeanIfExistsModerationModelSupplier.class;
/**
* Strategy for handling tool hallucinations.
* Default uses CDI bean if it exists, otherwise uses LangChain4j default.
*
* Marker classes:
* - BeanIfExistsToolHallucinationStrategy (default): Use CDI bean if present, else default
*
* Tool hallucination occurs when the LLM invents tool names or parameters
* that don't exist. The strategy determines how to handle these cases.
*
* @return Strategy function class
*/
Class<? extends Function<ToolExecutionRequest, ToolExecutionResultMessage>> toolHallucinationStrategy()
default BeanIfExistsToolHallucinationStrategy.class;
/**
* Supplier for dynamic tool provider.
* Default uses CDI bean if it exists, otherwise none.
*
* Marker classes:
* - BeanIfExistsToolProviderSupplier (default): Use CDI bean if present, else null
* - NoToolProviderSupplier: Explicitly disable dynamic tool provider
*
* ToolProvider allows tools to be provided dynamically at runtime
* instead of being specified statically in the tools() array.
*
* @return Supplier class for ToolProvider
*/
Class<? extends Supplier<ToolProvider>> toolProviderSupplier()
default BeanIfExistsToolProviderSupplier.class;
/**
* Allow continuous forced tool calling without user message interruption.
*
* When true, allows the model to call tools repeatedly without requiring
* a user message between invocations. Useful for agent-style interactions
* where the model should continue working until completion.
*
* When false (default), requires user message between tool call chains.
*
* @return Whether continuous forced tool calling is allowed
*/
boolean allowContinuousForcedToolCalling() default false;
}Usage Example:
import io.quarkiverse.langchain4j.RegisterAiService;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.service.SystemMessage;
import jakarta.inject.Inject;
@RegisterAiService(
modelName = "gpt-4",
tools = {WeatherTool.class, CalculatorTool.class},
maxSequentialToolInvocations = 5
)
public interface MyAssistant {
@SystemMessage("You are a helpful assistant.")
@UserMessage("Answer: {question}")
String chat(String question);
}
// CDI injection in application code
@Inject
MyAssistant assistant;
public void example() {
String answer = assistant.chat("What is 5 + 3?");
}Build-Time Processing:
Runtime Behavior:
Constraints and Requirements:
Overrides the default set of tools for a specific AI service method, allowing method-level tool selection.
// Package: io.quarkiverse.langchain4j
/**
* Override tools for a specific AiService method.
* Takes precedence over tools defined in @RegisterAiService.
*
* When present, only tools listed in this annotation are available
* to the method, completely replacing the service-level tools.
*
* Use cases:
* - Restrict tool access for specific operations
* - Provide specialized tools for certain methods
* - Disable all tools by specifying empty array: @ToolBox({})
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface ToolBox {
/**
* Tool classes available to this specific method.
* Empty array means no tools available for this method.
*
* Requirements same as @RegisterAiService.tools()
*
* @return Array of tool classes
*/
Class<?>[] value();
}Usage Example:
import io.quarkiverse.langchain4j.RegisterAiService;
import io.quarkiverse.langchain4j.ToolBox;
@RegisterAiService(tools = {WeatherTool.class, CalculatorTool.class, DatabaseTool.class})
public interface MyAssistant {
// Uses all three tools: WeatherTool, CalculatorTool, DatabaseTool
String generalQuery(String question);
// Only uses WeatherTool
@ToolBox(WeatherTool.class)
String weatherQuery(String question);
// No tools available
@ToolBox({})
String noToolsQuery(String question);
// Multiple specific tools
@ToolBox({CalculatorTool.class, DatabaseTool.class})
String dataQuery(String question);
}Behavior:
@ToolBox completely overrides service-level toolsSeeds chat memory with example messages for few-shot learning, placed on static methods that return example conversations.
// Package: io.quarkiverse.langchain4j
/**
* Seed chat memory with example messages for few-shot learning.
* Place on a static method that returns List<ChatMessage>.
*
* The seed method is invoked once per memory ID when memory is first created.
* Seed messages are inserted before any user messages.
*
* Method requirements:
* - Must be static
* - Must be public
* - Must return List<ChatMessage> or List<dev.langchain4j.data.message.ChatMessage>
* - Can have zero parameters or one ChatMemorySeeder.Context parameter
*
* Use cases:
* - Provide few-shot learning examples
* - Set consistent behavior patterns
* - Initialize conversation context
* - Define response style guidelines
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface SeedMemory {
}Usage Example:
import io.quarkiverse.langchain4j.RegisterAiService;
import io.quarkiverse.langchain4j.SeedMemory;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.AiMessage;
import java.util.List;
@RegisterAiService
public interface CodeReviewer {
@SystemMessage("You are a code reviewer.")
String reviewCode(@UserMessage String code);
@SeedMemory
static List<ChatMessage> seedExamples() {
return List.of(
UserMessage.from("def foo(): pass"),
AiMessage.from("Missing docstring and return type annotation."),
UserMessage.from("def bar():\n \"\"\"Does something.\"\"\"\n return 42"),
AiMessage.from("Add return type annotation: -> int")
);
}
}Advanced Example with Context:
import io.quarkiverse.langchain4j.runtime.aiservice.ChatMemorySeeder;
@RegisterAiService
public interface SmartAssistant {
String chat(String message);
String translate(String text);
@SeedMemory
static List<ChatMessage> seedWithContext(ChatMemorySeeder.Context context) {
// Different examples based on which method is called
if ("translate".equals(context.methodName())) {
return List.of(
UserMessage.from("Translate to French: Hello"),
AiMessage.from("Bonjour"),
UserMessage.from("Translate to French: Goodbye"),
AiMessage.from("Au revoir")
);
} else {
return List.of(
UserMessage.from("What is 2+2?"),
AiMessage.from("2+2 equals 4.")
);
}
}
}Behavior:
@SeedMemory methods not supported (last one wins)Memory Lifecycle with Seeding:
Marks classes created via AiServices.create() (programmatic creation) to make them Quarkus-aware with proper CDI integration.
// Package: io.quarkiverse.langchain4j
/**
* Mark classes created via AiServices.create() for Quarkus awareness.
* Enables CDI features for programmatically created AI services.
*
* When this annotation is present on an interface that is created
* using AiServices.create(), the resulting instance will have:
* - Access to CDI beans for tools
* - Access to CDI beans for guardrails
* - Access to configured models
* - Proper lifecycle management
*
* Without this annotation, programmatically created services
* operate independently of CDI and Quarkus features.
*
* Use case: When you need to create AI services dynamically at runtime
* but still want integration with Quarkus/CDI ecosystem.
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface CreatedAware {
}Usage Example:
import io.quarkiverse.langchain4j.CreatedAware;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.service.AiServices;
import jakarta.inject.Inject;
@CreatedAware
public interface ProgrammaticService {
String chat(String message);
}
// Factory for programmatic creation
public class ServiceFactory {
@Inject
ChatModel model;
public ProgrammaticService create() {
// Creates service with Quarkus awareness
return AiServices.create(ProgrammaticService.class, model);
}
}Behavior:
Comparison: Declarative vs Programmatic:
| Feature | @RegisterAiService | @CreatedAware + AiServices.create() |
|---|---|---|
| Creation Time | Build time | Runtime |
| CDI Bean | Yes | No |
| Injectable | Yes | No |
| Tool Support | Yes | Yes (if CDI beans) |
| Memory Support | Yes | Yes (if configured) |
| Configuration | application.properties | Code |
| Flexibility | Low | High |
Marker classes used in @RegisterAiService to control model and component resolution strategies.
// Package: io.quarkiverse.langchain4j.runtime.aiservice
// Chat Language Model Suppliers
/**
* Resolves ChatModel from CDI using @ModelName qualifier.
* Throws exception if not found.
*/
public static class BeanChatLanguageModelSupplier
implements Supplier<ChatModel> {
@Override
public ChatModel get() {
// CDI resolution with @ModelName qualifier
}
}
/**
* Resolves StreamingChatModel from CDI using @ModelName qualifier.
* Throws exception if not found.
*/
public static class BeanStreamingChatLanguageModelSupplier
implements Supplier<StreamingChatModel> {
@Override
public StreamingChatModel get() {
// CDI resolution with @ModelName qualifier
}
}
// Chat Memory Suppliers
/**
* Resolves ChatMemoryProvider from CDI.
* Throws exception if not found.
*/
public static class BeanChatMemoryProviderSupplier
implements Supplier<ChatMemoryProvider> {
@Override
public ChatMemoryProvider get() {
// CDI resolution
}
}
/**
* Disables chat memory for the AI service.
* Returns null, causing service to operate without memory.
*/
public static class NoChatMemoryProviderSupplier
implements Supplier<ChatMemoryProvider> {
@Override
public ChatMemoryProvider get() {
return null;
}
}
// Retrieval Augmentor Suppliers
/**
* Resolves RetrievalAugmentor from CDI if present.
* Returns null if not found (no RAG).
*/
public static class BeanIfExistsRetrievalAugmentorSupplier
implements Supplier<RetrievalAugmentor> {
@Override
public RetrievalAugmentor get() {
// CDI resolution, null if not found
}
}
/**
* Disables retrieval augmentation.
* Returns null.
*/
public static class NoRetrievalAugmentorSupplier
implements Supplier<RetrievalAugmentor> {
@Override
public RetrievalAugmentor get() {
return null;
}
}
// Moderation Model Supplier
/**
* Resolves ModerationModel from CDI if present.
* Returns null if not found (no moderation).
*/
public static class BeanIfExistsModerationModelSupplier
implements Supplier<ModerationModel> {
@Override
public ModerationModel get() {
// CDI resolution, null if not found
}
}
// Image Model Supplier
/**
* Resolves ImageModel from CDI if present.
* Returns null if not found.
*/
public static class BeanIfExistsImageModelSupplier
implements Supplier<ImageModel> {
@Override
public ImageModel get() {
// CDI resolution, null if not found
}
}
// Tool Provider Suppliers
/**
* Resolves ToolProvider from CDI if present.
* Returns null if not found (only static tools used).
*/
public static class BeanIfExistsToolProviderSupplier
implements Supplier<ToolProvider> {
@Override
public ToolProvider get() {
// CDI resolution, null if not found
}
}
/**
* Disables dynamic tool provider.
* Returns null.
*/
public static class NoToolProviderSupplier
implements Supplier<ToolProvider> {
@Override
public ToolProvider get() {
return null;
}
}
// Tool Hallucination Strategy
/**
* Resolves tool hallucination strategy from CDI if present.
* Uses LangChain4j default if not found.
*/
public static class BeanIfExistsToolHallucinationStrategy
implements Function<List<ToolExecutionRequest>, List<ToolExecutionRequest>> {
@Override
public List<ToolExecutionRequest> apply(List<ToolExecutionRequest> requests) {
// CDI resolution, default if not found
}
}
// Content Retriever
/**
* Placeholder content retriever that returns empty list.
* Used when no retriever is configured.
*/
public static class NoRetriever implements ContentRetriever {
@Override
public List<Content> retrieve(Query query) {
return List.of();
}
}These marker classes follow a naming convention:
Bean* - Uses CDI bean resolution, throws exception if not foundBeanIfExists* - Uses CDI bean if available, otherwise uses default/noneNo* - Explicitly disables the featureUsage Pattern:
// Disable memory for a stateless assistant
@RegisterAiService(
chatMemoryProviderSupplier = NoChatMemoryProviderSupplier.class
)
public interface StatelessAssistant {
String ask(String question);
}
// Disable RAG even if retriever bean exists
@RegisterAiService(
retrievalAugmentor = NoRetrievalAugmentorSupplier.class
)
public interface NoRagAssistant {
String chat(String message);
}The runtime context for AI services, providing access to effective models, chat memory, and lifecycle management.
// Package: io.quarkiverse.langchain4j.runtime.aiservice
/**
* Quarkus-specific AI service context extending LangChain4j's AiServiceContext.
* Provides additional features for chat memory seeding, image models, and tool execution limits.
*
* One context instance exists per AI service interface.
* Context is created at startup and reused for all method invocations.
*/
public class QuarkusAiServiceContext extends AiServiceContext {
/**
* Chat memory seeder for few-shot learning.
* Invoked when creating new ChatMemory instances.
* Can be null if no seeder configured.
*/
public ChatMemorySeeder chatMemorySeeder;
/**
* Image model for image generation capabilities.
* Can be null if no image model configured.
*/
public ImageModel imageModel;
/**
* Maximum sequential tool invocations allowed.
* Null means use system default (10).
*/
public Integer maxSequentialToolExecutions;
/**
* Whether continuous forced tool calling is allowed.
* When true, model can invoke tools repeatedly without user interruption.
*/
public boolean allowContinuousForcedToolCalling;
/**
* Get the effective chat model for a method call.
* Resolves model based on @ModelName and method parameters.
*
* Resolution order:
* 1. Model from @ModelName on method (if supported in future)
* 2. Model from @RegisterAiService.modelName
* 3. Default model
*
* @param createInfo Method metadata
* @param args Method arguments
* @return ChatModel instance
* @throws IllegalStateException if no model found
*/
public ChatModel effectiveChatModel(
AiServiceMethodCreateInfo createInfo,
Object[] args
);
/**
* Get the effective streaming chat model for a method call.
*
* @param createInfo Method metadata
* @param args Method arguments
* @return StreamingChatModel instance
* @throws IllegalStateException if no model found
*/
public StreamingChatModel effectiveStreamingChatModel(
AiServiceMethodCreateInfo createInfo,
Object[] args
);
/**
* Get chat memory by ID.
* Creates new memory if doesn't exist, using chatMemoryProvider.
* Seeds memory with chatMemorySeeder if configured.
*
* @param id Memory ID (from @MemoryId parameter)
* @return ChatMemory instance
*/
public ChatMemory getChatMemory(Object id);
/**
* Evict chat memory from cache.
* Memory will be recreated on next access.
* Does not call remove() on the memory.
*
* Use case: Force memory reload after external changes.
*
* @param id Memory ID to evict
*/
public void evictChatMemory(Object id);
/**
* Remove multiple chat memory IDs.
* Calls remove() on ChatMemoryProvider if it implements ChatMemoryRemovable.
* Also evicts from cache.
*
* Use case: User logout, session cleanup, privacy compliance.
*
* @param ids Memory IDs to remove
*/
public void removeChatMemoryIds(Object... ids);
/**
* Clean up resources when context is closed.
* Called during application shutdown.
* Closes all open models and clears caches.
*/
@Override
public void close();
}Lifecycle:
Thread Safety:
Factory that creates Quarkus-aware AI service instances. This class implements the LangChain4j AiServicesFactory Service Provider Interface (SPI), allowing Quarkus to provide custom AI service creation logic that includes CDI integration, tool metadata population, and Quarkus-specific features.
SPI Information:
dev.langchain4j.spi.services.AiServicesFactory// Package: io.quarkiverse.langchain4j
/**
* Factory for creating Quarkus AI service instances.
* Implements LangChain4j SPI for custom AI service creation.
*
* This factory is automatically discovered and used when AiServices.create()
* is called for interfaces annotated with @CreatedAware.
*
* Provides:
* - CDI integration for tools and guardrails
* - Access to configured models
* - Tool metadata from build-time processing
* - Quarkus-specific context and builders
*/
public class QuarkusAiServicesFactory implements AiServicesFactory {
/**
* Create a QuarkusAiServices builder for the given context.
* Returns a Quarkus-enhanced builder with additional capabilities.
*
* This method is called by LangChain4j's AiServices.builder() when
* the factory is discovered.
*
* @param context The AI service context
* @return QuarkusAiServices builder
* @param <T> AI service interface type
*/
@Override
public <T> QuarkusAiServices<T> create(AiServiceContext context);
}How It Works:
AiServices.create() is called, ServiceLoader provides this factoryQuarkusAiServices builder instead of defaultInteraction with @CreatedAware:
@CreatedAware registers interface metadata at build timeFactory for creating Quarkus-specific AI service contexts. This class implements the LangChain4j AiServiceContextFactory Service Provider Interface (SPI), allowing Quarkus to create custom context instances with CDI support.
SPI Information:
dev.langchain4j.spi.services.AiServiceContextFactory// Package: io.quarkiverse.langchain4j
/**
* Factory for creating Quarkus AI service contexts.
* Implements LangChain4j SPI for custom context creation.
*
* This factory creates QuarkusAiServiceContext instead of the default
* AiServiceContext, providing additional Quarkus-specific features:
* - Chat memory seeding
* - Image model support
* - Tool execution limits
* - CDI integration
*/
public class QuarkusAiServiceContextFactory implements AiServiceContextFactory {
/**
* Create a QuarkusAiServiceContext with Quarkus-specific features.
* Attempts to retrieve a CDI-managed context first, falls back to creating a default instance.
*
* Context resolution:
* 1. Check if CDI context exists for this service class
* 2. If found, return CDI-managed context
* 3. Otherwise, create new QuarkusAiServiceContext
*
* CDI-managed contexts are used for @RegisterAiService interfaces.
* New contexts are used for programmatically created services.
*
* @param aiServiceClass the AI service interface class
* @return AiServiceContext instance (QuarkusAiServiceContext)
*/
@Override
public AiServiceContext create(Class<?> aiServiceClass);
}Context Creation Flow:
AiServices.builder() needs a contextQuarkusAiServiceContextFactory@RegisterAiService)Builder extension that adds Quarkus-specific features to LangChain4j's AiServices builder.
// Package: io.quarkiverse.langchain4j.runtime.aiservice
/**
* Quarkus extension of LangChain4j AiServices builder.
* Adds support for tool hallucination strategies, chat memory seeding,
* image models, and tool execution limits.
*
* Returned by QuarkusAiServicesFactory when creating services.
* Inherits all methods from AiServices<T> and adds Quarkus-specific ones.
*/
public static class QuarkusAiServices<T> extends AiServices<T> {
/**
* Set tool hallucination strategy.
* Strategy determines how to handle when LLM invents non-existent tools.
*
* @param strategy Function that processes ToolExecutionRequests
* @return This builder
*/
public QuarkusAiServices<T> toolHallucinationStrategy(Object strategy);
/**
* Set chat memory seeder for few-shot learning.
* Seeder is invoked when creating new ChatMemory instances.
*
* @param seeder ChatMemorySeeder implementation
* @return This builder
*/
public QuarkusAiServices<T> chatMemorySeeder(ChatMemorySeeder seeder);
/**
* Set image model for multimodal capabilities.
* Enables image generation in addition to text generation.
*
* @param model ImageModel instance
* @return This builder
*/
public QuarkusAiServices<T> imageModel(ImageModel model);
/**
* Set maximum sequential tool invocations.
* Prevents infinite loops in tool calling.
*
* @param max Maximum invocations (null = system default of 10)
* @return This builder
*/
public QuarkusAiServices<T> maxSequentialToolInvocations(Integer max);
/**
* Allow continuous forced tool calling.
* When true, model can invoke tools repeatedly without user messages.
*
* @param allow Whether to allow continuous tool calling
* @return This builder
*/
public QuarkusAiServices<T> allowContinuousForcedToolCalling(boolean allow);
/**
* Build and validate the AI service instance.
*
* Validation includes:
* - At least one model (chat or streaming) is configured
* - All tools are CDI beans
* - All guardrails are CDI beans
* - Return types match method signatures
*
* @return AI service implementation instance
* @throws IllegalStateException if validation fails
*/
@Override
public T build();
}Usage with AiServices.create():
import dev.langchain4j.service.AiServices;
import io.quarkiverse.langchain4j.runtime.aiservice.QuarkusAiServices;
QuarkusAiServices<MyService> builder = (QuarkusAiServices<MyService>)
AiServices.builder(MyService.class)
.chatLanguageModel(model)
.tools(tools);
// Add Quarkus-specific features
builder.maxSequentialToolInvocations(5)
.chatMemorySeeder(seeder)
.allowContinuousForcedToolCalling(true);
MyService service = builder.build();Build-time metadata about a declaratively created AI service class. This record contains all the information needed to instantiate and configure an AI service at runtime.
// Package: io.quarkiverse.langchain4j.runtime.aiservice
/**
* Metadata about an AI service class created at build time.
* The methodMap key is a methodId generated at build time.
*
* Created during Quarkus build phase and stored for runtime use.
* Immutable record.
*
* @param methodMap Map of method ID to method metadata
* @param implClassName Fully qualified name of generated implementation class
* @param inputGuardrails Class-level input guardrails (null if none)
* @param outputGuardrails Class-level output guardrails (null if none)
*/
public record AiServiceClassCreateInfo(
Map<String, AiServiceMethodCreateInfo> methodMap, // methodId -> method metadata
String implClassName, // Generated implementation class name
InputGuardrailsLiteral inputGuardrails, // Class-level input guardrails
OutputGuardrailsLiteral outputGuardrails // Class-level output guardrails
) {}Method ID Format: <methodName>_<parameterCount>_<hashedSignature>
Storage: Stored in Quarkus build-time metadata and loaded at runtime
Build-time metadata about a specific AI service method. This class contains all information needed to implement the method at runtime, including message templates, tool configuration, guardrails, and observability settings.
// Package: io.quarkiverse.langchain4j.runtime.aiservice
/**
* Metadata about an AI service method created at build time.
* Contains all information needed to implement the method at runtime.
*
* Mutable class to allow runtime modifications (e.g., setting augmenters).
* One instance per AI service method.
*/
public final class AiServiceMethodCreateInfo {
// Method identification
/**
* Get fully qualified interface name.
* @return Interface name (e.g., "com.example.MyAssistant")
*/
public String getInterfaceName();
/**
* Get method name.
* @return Method name (e.g., "chat")
*/
public String getMethodName();
/**
* Get parameter information.
* @return List of parameter metadata
*/
public List<ParameterInfo> getParameterInfo();
// Message templates
/**
* Get system message template information.
* @return System message info, empty if no @SystemMessage
*/
public Optional<TemplateInfo> getSystemMessageInfo();
/**
* Get user message information.
* @return User message info (required)
*/
public UserMessageInfo getUserMessageInfo();
/**
* Get user message template string.
* @return Template with {variableName} placeholders
*/
public String getUserMessageTemplate();
// Memory and model selection
/**
* Get parameter position of @MemoryId if present.
* @return Parameter index, empty if no @MemoryId
*/
public Optional<Integer> getMemoryIdParamPosition();
/**
* Get parameter position for runtime model override.
* @return Parameter index, empty if no override parameter
*/
public Optional<Integer> getOverrideChatModelParamPosition();
// Return type information
/**
* Get return type signature.
* @return Type signature string
*/
public String getReturnTypeSignature();
/**
* Get return type.
* @return Java Type object
*/
public Type getReturnType();
// Tool configuration
/**
* Get tool class information with CDI qualifiers.
* @return Map of tool class name to annotation literal
*/
public Map<String, AnnotationLiteral<?>> getToolClassInfo();
/**
* Get tool specifications for this method.
* @return List of ToolSpecification
*/
public List<ToolSpecification> getToolSpecifications();
/**
* Get tool executors.
* @return Map of tool name to executor
*/
public Map<String, ToolExecutor> getToolExecutors();
/**
* Get MCP client names for dynamic tools.
* @return List of MCP client names
*/
public List<String> getMcpClientNames();
/**
* Check if tool execution should switch to worker thread.
* @return true if tools execute on worker thread
*/
public boolean isSwitchToWorkerThreadForToolExecution();
// Guardrails
/**
* Get input guardrails configuration.
* @return Input guardrails literal
*/
public InputGuardrailsLiteral getInputGuardrails();
/**
* Get output guardrails configuration.
* @return Output guardrails literal
*/
public OutputGuardrailsLiteral getOutputGuardrails();
/**
* Get maximum retry attempts for guardrail failures.
* @return Max retry count
*/
public int getQuarkusGuardrailsMaxRetry();
/**
* Get output token accumulator class name for streaming.
* @return Class name, null if none
*/
public String getOutputTokenAccumulatorClassName();
/**
* Get output token accumulator instance.
* @return Accumulator instance, null if none
*/
public OutputTokenAccumulator getOutputTokenAccumulator();
/**
* Set output token accumulator (runtime initialization).
* @param accumulator Accumulator instance
*/
public void setOutputTokenAccumulator(OutputTokenAccumulator accumulator);
// Response augmentation
/**
* Get response augmenter class name.
* @return Class name, null if none
*/
public String getResponseAugmenterClassName();
/**
* Get response augmenter class.
* @return Augmenter class, null if none
*/
public Class<? extends AiResponseAugmenter<?>> getResponseAugmenter();
/**
* Set response augmenter (runtime initialization).
* @param augmenter Augmenter class
*/
public void setResponseAugmenter(Class<? extends AiResponseAugmenter<?>> augmenter);
// Response schema
/**
* Get response schema information for structured outputs.
* @return Schema info
*/
public ResponseSchemaInfo getResponseSchemaInfo();
// Moderation
/**
* Check if this method requires moderation.
* @return true if moderation is required
*/
public boolean isRequiresModeration();
// Observability
/**
* Get metrics timed information.
* @return Metrics info, empty if no @Timed
*/
public Optional<MetricsTimedInfo> getMetricsTimedInfo();
/**
* Get metrics counted information.
* @return Metrics info, empty if no @Counted
*/
public Optional<MetricsCountedInfo> getMetricsCountedInfo();
/**
* Get OpenTelemetry span information.
* @return Span info, empty if no @WithSpan
*/
public Optional<SpanInfo> getSpanInfo();
// Nested types
/**
* User message information.
* Either template or paramPosition is present (not both).
*/
public record UserMessageInfo(
Optional<TemplateInfo> template, // Template with variables
Optional<Integer> paramPosition, // Or direct parameter
Optional<Integer> userNameParamPosition // Optional @UserName parameter
) {}
/**
* Template information for @SystemMessage or @UserMessage.
*/
public record TemplateInfo(
Optional<String> text, // Template text with {variables}
Map<String, Integer> nameToParamPosition, // Variable name -> parameter index
Optional<Integer> methodParamPosition // Or entire message from parameter
) {}
/**
* Micrometer @Timed metric information.
*/
public record MetricsTimedInfo(
String name, // Metric name
boolean longTask, // Long task timer
String[] extraTags, // Additional tags
double[] percentiles, // Percentiles to track
boolean histogram, // Enable histogram
String description // Metric description
) {}
/**
* Micrometer @Counted metric information.
*/
public record MetricsCountedInfo(
String name, // Metric name
String[] extraTags, // Additional tags
boolean recordFailuresOnly, // Only count failures
String description // Metric description
) {}
/**
* OpenTelemetry span information.
*/
public record SpanInfo(
String name // Span name
) {}
/**
* Response schema information for structured outputs.
*/
public record ResponseSchemaInfo(
boolean enabled, // Schema validation enabled
boolean isInSystemMessage, // Include schema in system message
Optional<Boolean> isInUserMessage, // Include schema in user message
String outputFormatInstructions, // Format instructions text
Optional<JsonSchema> structuredOutputSchema // JSON schema definition
) {}
/**
* Parameter information.
*/
public record ParameterInfo(
String name, // Parameter name
String typeDescriptor, // Type descriptor
Set<String> annotationTypes // Annotation class names
) {}
}Usage at Runtime:
QuarkusAiServiceContextRecord holding metadata about declaratively created AI services.
// Package: io.quarkiverse.langchain4j.runtime.aiservice
/**
* Metadata about declaratively created AI services.
* Created at build time for each @RegisterAiService interface.
*
* Used to initialize AI service at runtime.
*
* @param serviceClass The AI service interface class
* @param modelName The model name from @RegisterAiService
* @param tools List of tool classes
*/
public record DeclarativeAiServiceCreateInfo(
Class<?> serviceClass,
String modelName,
List<Class<?>> tools
// ... additional metadata fields
) {}CDI qualifier for AI service context instances.
// Package: io.quarkiverse.langchain4j.runtime.aiservice
/**
* CDI Qualifier for AiService context instances.
* Used internally for dependency injection.
*
* Each @RegisterAiService interface has a CDI bean of type
* QuarkusAiServiceContext qualified with this annotation.
*
* Not typically used directly by application code.
*/
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
@Qualifier
public @interface QuarkusAiServiceContextQualifier {
}@RegisterAiService(tools = {ReadTool.class, WriteTool.class, AdminTool.class})
public interface SmartAssistant {
// Only read access
@ToolBox({ReadTool.class})
String readOnlyQuery(String query);
// Read and write
@ToolBox({ReadTool.class, WriteTool.class})
String normalQuery(String query);
// Full access
String adminQuery(String query);
}@RegisterAiService(modelName = "tenant-{tenantId}")
public interface TenantAssistant {
String chat(@MemoryId String userId, String tenantId, String message);
}
// Configure per-tenant models
// quarkus.langchain4j.openai.tenant-acme.api-key=...
// quarkus.langchain4j.openai.tenant-globex.api-key=...@ApplicationScoped
public class ResilientAssistant {
@Inject
@ModelName("gpt-4")
Assistant primary;
@Inject
@ModelName("gpt-3.5-turbo")
Assistant fallback;
public String chat(String message) {
try {
return primary.chat(message);
} catch (Exception e) {
logger.warn("Primary model failed, using fallback", e);
return fallback.chat(message);
}
}
}@RegisterAiService
public interface AdaptiveAssistant {
String chat(String message, String context);
@SeedMemory
static List<ChatMessage> seedByContext(ChatMemorySeeder.Context ctx) {
// Load examples from database based on context
return ExampleRepository.findByContext(ctx.methodName());
}
}Error: "Could not find CDI bean for tool class" Solution: Ensure tool class has a CDI scope annotation
Error: "Method has no LangChain4j annotations"
Solution: Add at least @UserMessage to the method
Error: "Template variable not found in parameters"
Solution: Ensure template {variable} names match parameter names exactly
Error: "No model found with name X" Solution: Check application.properties for model configuration
Error: "ChatMemory provider not configured"
Solution: Either configure a memory provider or use NoChatMemoryProviderSupplier
Error: "Tool execution failed"
Solution: Check tool implementation for exceptions, add @HandleToolExecutionError
@QuarkusTest
class AssistantTest {
@Inject
Assistant assistant;
@Test
void testChat() {
String result = assistant.chat("test");
assertNotNull(result);
}
}@QuarkusTestResource(MockModelResource.class)
@QuarkusTest
class MockedAssistantTest {
@Inject
Assistant assistant;
@Test
void testWithMock() {
// Mock model returns predictable responses
}
}