Quarkus extension for integrating local Ollama language models with LangChain4j
Function calling support allowing models to invoke external tools and business logic, enabling agentic workflows and dynamic capability extension.
Define tools that models can invoke using function specifications.
record Tool(Type type, Function function) {
static Tool from(Function function);
}
enum Tool.Type {
FUNCTION
}
record Tool.Function(
String name,
String description,
Parameters parameters
) { }
record Tool.Function.Parameters(
String type,
Map<String, Map<String, Object>> properties,
List<String> required
) {
static Parameters objectType(Map<String, Map<String, Object>> properties, List<String> required);
static Parameters empty();
}Components:
Tool - Tool specification with type and functionTool.Type - Tool type (currently only FUNCTION supported)Tool.Function - Function specification with name, description, and parametersTool.Function.Parameters - JSON Schema-style parameter definitionUsage:
// Define function parameters
Map<String, Map<String, Object>> properties = Map.of(
"location", Map.of(
"type", "string",
"description", "City name, e.g., 'Paris' or 'New York'"
),
"unit", Map.of(
"type", "string",
"enum", List.of("celsius", "fahrenheit"),
"description", "Temperature unit"
)
);
List<String> required = List.of("location");
Tool.Function.Parameters params = Tool.Function.Parameters.objectType(properties, required);
// Define function
Tool.Function getWeatherFunc = new Tool.Function(
"getWeather",
"Get current weather for a location",
params
);
// Create tool
Tool weatherTool = Tool.from(getWeatherFunc);
// Use in chat request
ChatRequest request = ChatRequest.builder()
.model("llama3.2")
.messages(messages)
.tools(List.of(weatherTool))
.build();Empty parameters for functions with no arguments:
Tool.Function.Parameters emptyParams = Tool.Function.Parameters.empty();
Tool.Function noArgFunc = new Tool.Function(
"getCurrentTime",
"Get the current time",
emptyParams
);
Tool timeTool = Tool.from(noArgFunc);Represents a tool call requested by the AI model.
record ToolCall(FunctionCall function) {
static ToolCall fromFunctionCall(String name, Map<String, Object> arguments);
ToolExecutionRequest toToolExecutionRequest();
}
record ToolCall.FunctionCall(
String name,
Map<String, Object> arguments
) { }Components:
ToolCall - Tool invocation from modelToolCall.FunctionCall - Function name and argumentstoToolExecutionRequest() - Convert to LangChain4j ToolExecutionRequestUsage:
// Extract tool calls from assistant message
ChatResponse response = ollamaClient.chat(request);
Message assistantMsg = response.message();
List<ToolCall> toolCalls = assistantMsg.toolCalls();
if (toolCalls != null && !toolCalls.isEmpty()) {
for (ToolCall toolCall : toolCalls) {
String functionName = toolCall.function().name();
Map<String, Object> arguments = toolCall.function().arguments();
System.out.println("Function: " + functionName);
System.out.println("Arguments: " + arguments);
// Execute function
String result = executeTool(functionName, arguments);
// Add result to conversation
Message toolResultMsg = Message.builder()
.role(Role.TOOL)
.content(result)
.build();
}
}Create tool call programmatically:
ToolCall toolCall = ToolCall.fromFunctionCall(
"getWeather",
Map.of("location", "Paris", "unit", "celsius")
);Convert to LangChain4j format:
ToolExecutionRequest request = toolCall.toToolExecutionRequest();
// Use with LangChain4j tool execution infrastructureFull example showing tool definition, execution, and result handling.
import io.quarkiverse.langchain4j.ollama.*;
import java.util.*;
// 1. Define tools
Map<String, Map<String, Object>> weatherProps = Map.of(
"location", Map.of(
"type", "string",
"description", "City name"
)
);
Tool.Function.Parameters weatherParams = Tool.Function.Parameters.objectType(
weatherProps,
List.of("location")
);
Tool.Function weatherFunc = new Tool.Function(
"getWeather",
"Get current weather for a location",
weatherParams
);
Tool weatherTool = Tool.from(weatherFunc);
// 2. Build chat request with tools
List<Message> messages = new ArrayList<>();
messages.add(Message.builder()
.role(Role.SYSTEM)
.content("You are a helpful assistant with access to weather information.")
.build());
messages.add(Message.builder()
.role(Role.USER)
.content("What's the weather in Paris?")
.build());
ChatRequest request = ChatRequest.builder()
.model("llama3.2")
.messages(messages)
.tools(List.of(weatherTool))
.build();
// 3. Get model response
OllamaClient client = new OllamaClient(
"http://localhost:11434",
Duration.ofSeconds(30),
false, false, false,
null, null
);
ChatResponse response = client.chat(request);
Message assistantMsg = response.message();
// 4. Check for tool calls
if (assistantMsg.toolCalls() != null && !assistantMsg.toolCalls().isEmpty()) {
// Add assistant message with tool call to history
messages.add(assistantMsg);
// Execute each tool call
for (ToolCall toolCall : assistantMsg.toolCalls()) {
String functionName = toolCall.function().name();
Map<String, Object> arguments = toolCall.function().arguments();
// Execute tool (implement your logic)
String result;
if ("getWeather".equals(functionName)) {
String location = (String) arguments.get("location");
result = getWeatherData(location); // Your implementation
} else {
result = "{\"error\": \"Unknown function\"}";
}
// Add tool result to messages
messages.add(Message.builder()
.role(Role.TOOL)
.content(result)
.build());
}
// 5. Send tool results back to model for final response
ChatRequest followUp = ChatRequest.builder()
.model("llama3.2")
.messages(messages)
.tools(List.of(weatherTool))
.build();
ChatResponse finalResponse = client.chat(followUp);
System.out.println("Final answer: " + finalResponse.message().content());
}
// Example weather function implementation
private String getWeatherData(String location) {
// In real implementation, call weather API
return String.format("""
{
"location": "%s",
"temperature": 22,
"condition": "sunny",
"humidity": 45
}
""", location);
}Use declarative AI services with automatic tool execution.
import dev.langchain4j.agent.tool.Tool;
import io.quarkiverse.langchain4j.RegisterAiService;
// Define tool class
public class WeatherTools {
@Tool("Get current weather for a location")
public String getWeather(String location) {
// Implementation
return "Sunny, 22°C in " + location;
}
@Tool("Get weather forecast for next N days")
public String getForecast(String location, int days) {
// Implementation
return days + "-day forecast for " + location + ": mostly sunny";
}
}
// Register AI service with tools
@RegisterAiService(tools = WeatherTools.class)
public interface WeatherAssistant {
@SystemMessage("You are a weather assistant. Use available tools to answer questions.")
String chat(String userMessage);
}
// Usage - tool execution happens automatically
@Inject
WeatherAssistant assistant;
String response = assistant.chat("What's the weather in Paris?");
// Model automatically calls getWeather("Paris") and responds with resultDefine and use multiple tools for complex agent workflows.
// Weather tool
Tool.Function weatherFunc = new Tool.Function(
"getWeather",
"Get current weather",
Tool.Function.Parameters.objectType(
Map.of("location", Map.of("type", "string")),
List.of("location")
)
);
// Calculator tool
Tool.Function calcFunc = new Tool.Function(
"calculate",
"Perform mathematical calculation",
Tool.Function.Parameters.objectType(
Map.of(
"expression", Map.of(
"type", "string",
"description", "Math expression, e.g., '2 + 2' or '10 * 5'"
)
),
List.of("expression")
)
);
// Search tool
Tool.Function searchFunc = new Tool.Function(
"search",
"Search the web for information",
Tool.Function.Parameters.objectType(
Map.of("query", Map.of("type", "string")),
List.of("query")
)
);
List<Tool> tools = List.of(
Tool.from(weatherFunc),
Tool.from(calcFunc),
Tool.from(searchFunc)
);
ChatRequest request = ChatRequest.builder()
.model("llama3.2")
.messages(messages)
.tools(tools)
.build();
// Model can now choose appropriate tool based on user queryDefine tools with complex, nested parameter structures.
Map<String, Map<String, Object>> complexProps = Map.of(
"query", Map.of(
"type", "string",
"description", "Search query"
),
"filters", Map.of(
"type", "object",
"properties", Map.of(
"dateRange", Map.of(
"type", "object",
"properties", Map.of(
"start", Map.of("type", "string", "format", "date"),
"end", Map.of("type", "string", "format", "date")
)
),
"categories", Map.of(
"type", "array",
"items", Map.of("type", "string")
)
)
),
"maxResults", Map.of(
"type", "integer",
"minimum", 1,
"maximum", 100,
"default", 10
)
);
Tool.Function.Parameters complexParams = Tool.Function.Parameters.objectType(
complexProps,
List.of("query") // Only query is required
);
Tool.Function complexSearch = new Tool.Function(
"advancedSearch",
"Perform advanced search with filters",
complexParams
);Handle errors in tool execution gracefully.
try {
ChatResponse response = client.chat(request);
Message assistantMsg = response.message();
if (assistantMsg.toolCalls() != null) {
for (ToolCall toolCall : assistantMsg.toolCalls()) {
try {
// Execute tool
String result = executeTool(toolCall);
messages.add(Message.builder()
.role(Role.TOOL)
.content(result)
.build());
} catch (Exception e) {
// Return error to model
messages.add(Message.builder()
.role(Role.TOOL)
.content(String.format(
"{\"error\": \"%s\"}",
e.getMessage()
))
.build());
}
}
}
} catch (Exception e) {
logger.error("Tool calling failed", e);
}Handle tool calls in streaming mode.
List<Message> messages = new ArrayList<>();
StringBuilder currentContent = new StringBuilder();
List<ToolCall> collectedToolCalls = new ArrayList<>();
Multi<ChatResponse> stream = client.streamingChat(request);
stream.subscribe().with(
chunk -> {
if (!chunk.done()) {
Message msg = chunk.message();
if (msg.content() != null) {
currentContent.append(msg.content());
}
if (msg.toolCalls() != null) {
collectedToolCalls.addAll(msg.toolCalls());
}
} else {
// Stream complete, process tool calls
if (!collectedToolCalls.isEmpty()) {
Message assistantMsg = Message.builder()
.role(Role.ASSISTANT)
.content(currentContent.toString())
.toolCalls(collectedToolCalls)
.build();
messages.add(assistantMsg);
// Execute tool calls
for (ToolCall toolCall : collectedToolCalls) {
String result = executeTool(toolCall);
messages.add(Message.builder()
.role(Role.TOOL)
.content(result)
.build());
}
// Continue conversation with tool results
// ... make another request
}
}
},
error -> logger.error("Streaming error", error)
);Tool.Function dataRetrieval = new Tool.Function(
"queryDatabase",
"Query database for information",
parameters
);Tool.Function apiCall = new Tool.Function(
"callExternalAPI",
"Call external REST API",
parameters
);Tool.Function computation = new Tool.Function(
"calculate",
"Perform mathematical calculations",
parameters
);Tool.Function fileOps = new Tool.Function(
"readFile",
"Read contents of a file",
parameters
);Tool.Function stateUpdate = new Tool.Function(
"updateState",
"Update application state",
parameters
);// Example: Secure tool execution
private String executeTool(ToolCall toolCall, User user) {
String functionName = toolCall.function().name();
// 1. Check authorization
if (!hasPermission(user, functionName)) {
return "{\"error\": \"Unauthorized\"}";
}
// 2. Validate arguments
Map<String, Object> args = toolCall.function().arguments();
if (!validateArguments(functionName, args)) {
return "{\"error\": \"Invalid arguments\"}";
}
// 3. Rate limit check
if (isRateLimited(user, functionName)) {
return "{\"error\": \"Rate limit exceeded\"}";
}
// 4. Execute with timeout
try {
return executeWithTimeout(functionName, args, Duration.ofSeconds(10));
} catch (TimeoutException e) {
return "{\"error\": \"Execution timeout\"}";
} catch (Exception e) {
logger.error("Tool execution failed", e);
return "{\"error\": \"Execution failed\"}";
}
}Install with Tessl CLI
npx tessl i tessl/maven-io-quarkiverse-langchain4j--quarkus-langchain4j-ollama@1.7.0