LangChain4j integration for Mistral AI providing chat completion, streaming, embedding, moderation, and code completion capabilities
The langchain4j-mistral-ai library provides Service Provider Interfaces (SPIs) for dependency injection frameworks and custom instantiation patterns. These interfaces allow frameworks like Spring, Quarkus, or custom factory patterns to provide customized builder instances for all Mistral AI models.
All SPI interfaces are located in the dev.langchain4j.model.mistralai.spi package.
SPI for providing custom MistralAiChatModel.MistralAiChatModelBuilder instances.
package dev.langchain4j.model.mistralai.spi;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import java.util.function.Supplier;
/**
* Service Provider Interface for custom MistralAiChatModel builder instantiation.
* Extends Supplier to provide MistralAiChatModelBuilder instances.
*/
public interface MistralAiChatModelBuilderFactory
extends Supplier<MistralAiChatModel.MistralAiChatModelBuilder> {
/**
* Provides a MistralAiChatModelBuilder instance.
*
* @return MistralAiChatModelBuilder (never null) instance
*/
MistralAiChatModel.MistralAiChatModelBuilder get();
}SPI for providing custom MistralAiStreamingChatModel.MistralAiStreamingChatModelBuilder instances.
package dev.langchain4j.model.mistralai.spi;
import dev.langchain4j.model.mistralai.MistralAiStreamingChatModel;
import java.util.function.Supplier;
/**
* Service Provider Interface for custom MistralAiStreamingChatModel builder instantiation.
* Extends Supplier to provide MistralAiStreamingChatModelBuilder instances.
*/
public interface MistralAiStreamingChatModelBuilderFactory
extends Supplier<MistralAiStreamingChatModel.MistralAiStreamingChatModelBuilder> {
/**
* Provides a MistralAiStreamingChatModelBuilder instance.
*
* @return MistralAiStreamingChatModelBuilder (never null) instance
*/
MistralAiStreamingChatModel.MistralAiStreamingChatModelBuilder get();
}SPI for providing custom MistralAiEmbeddingModel.MistralAiEmbeddingModelBuilder instances.
package dev.langchain4j.model.mistralai.spi;
import dev.langchain4j.model.mistralai.MistralAiEmbeddingModel;
import java.util.function.Supplier;
/**
* Service Provider Interface for custom MistralAiEmbeddingModel builder instantiation.
* Extends Supplier to provide MistralAiEmbeddingModelBuilder instances.
*/
public interface MistralAiEmbeddingModelBuilderFactory
extends Supplier<MistralAiEmbeddingModel.MistralAiEmbeddingModelBuilder> {
/**
* Provides a MistralAiEmbeddingModelBuilder instance.
*
* @return MistralAiEmbeddingModelBuilder (never null) instance
*/
MistralAiEmbeddingModel.MistralAiEmbeddingModelBuilder get();
}SPI for providing custom MistralAiFimModel.Builder instances.
package dev.langchain4j.model.mistralai.spi;
import dev.langchain4j.model.mistralai.MistralAiFimModel;
import java.util.function.Supplier;
/**
* Service Provider Interface for custom MistralAiFimModel builder instantiation.
* Extends Supplier to provide MistralAiFimModel.Builder instances.
*/
public interface MistralAiFimModelBuilderFactory
extends Supplier<MistralAiFimModel.Builder> {
/**
* Provides a MistralAiFimModel.Builder instance.
*
* @return MistralAiFimModel (never null).Builder instance
*/
MistralAiFimModel.Builder get();
}SPI for providing custom MistralAiStreamingFimModel.Builder instances.
package dev.langchain4j.model.mistralai.spi;
import dev.langchain4j.model.mistralai.MistralAiStreamingFimModel;
import java.util.function.Supplier;
/**
* Service Provider Interface for custom MistralAiStreamingFimModel builder instantiation.
* Extends Supplier to provide MistralAiStreamingFimModel.Builder instances.
*/
public interface MistralAiStreamingFimModelBuilderFactory
extends Supplier<MistralAiStreamingFimModel.Builder> {
/**
* Provides a MistralAiStreamingFimModel.Builder instance.
*
* @return MistralAiStreamingFimModel (never null).Builder instance
*/
MistralAiStreamingFimModel.Builder get();
}SPI for providing custom MistralAiModels.MistralAiModelsBuilder instances.
package dev.langchain4j.model.mistralai.spi;
import dev.langchain4j.model.mistralai.MistralAiModels;
import java.util.function.Supplier;
/**
* Service Provider Interface for custom MistralAiModels builder instantiation.
* Extends Supplier to provide MistralAiModelsBuilder instances.
*/
public interface MistralAiModelsBuilderFactory
extends Supplier<MistralAiModels.MistralAiModelsBuilder> {
/**
* Provides a MistralAiModelsBuilder instance.
*
* @return MistralAiModelsBuilder (never null) instance
*/
MistralAiModels.MistralAiModelsBuilder get();
}Use SPI interfaces with Spring's dependency injection:
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.[Configuration Options](./configuration.md);
@Configuration
public class MistralAiSpringConfig {
@Bean
public MistralAiChatModelBuilderFactory chatModelBuilderFactory() {
return () -> MistralAiChatModel.builder()
.apiKey(System.getenv("MISTRAL_API_KEY"))
.temperature(0.7)
.maxTokens(1000);
}
@Bean
public MistralAiChatModel chatModel(MistralAiChatModelBuilderFactory factory) {
return factory.get()
.modelName("mistral-large-latest")
.build();
}
}Implement custom factory with pre-configured defaults:
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
public class ProductionMistralAiFactory implements MistralAiChatModelBuilderFactory {
private final String apiKey;
private final String baseUrl;
public ProductionMistralAiFactory(String apiKey, String baseUrl) {
this.apiKey = apiKey;
this.baseUrl = baseUrl;
}
@Override
public MistralAiChatModel.MistralAiChatModelBuilder get() {
return MistralAiChatModel.builder()
.apiKey(apiKey)
.baseUrl(baseUrl)
.timeout(Duration.ofMinutes(2))
.maxRetries(3)
.logRequests(false)
.logResponses(false);
}
public MistralAiChatModel createProductionModel() {
return get()
.modelName("mistral-large-latest")
.temperature(0.5)
.maxTokens(2000)
.build();
}
}
// Usage
ProductionMistralAiFactory factory = new ProductionMistralAiFactory(
System.getenv("MISTRAL_API_KEY"),
"https://api.mistral.ai/v1"
);
MistralAiChatModel model = factory.createProductionModel();Create environment-aware factories:
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
public class EnvironmentAwareFactory implements MistralAiChatModelBuilderFactory {
private final String environment;
public EnvironmentAwareFactory(String environment) {
this.environment = environment;
}
@Override
public MistralAiChatModel.MistralAiChatModelBuilder get() {
return switch (environment) {
case "production" -> productionBuilder();
case "staging" -> stagingBuilder();
default -> developmentBuilder();
};
}
private MistralAiChatModel.MistralAiChatModelBuilder productionBuilder() {
return MistralAiChatModel.builder()
.apiKey(System.getenv("MISTRAL_API_KEY_PROD"))
.modelName("mistral-large-latest")
.temperature(0.5)
.maxRetries(5)
.logRequests(false);
}
private MistralAiChatModel.MistralAiChatModelBuilder stagingBuilder() {
return MistralAiChatModel.builder()
.apiKey(System.getenv("MISTRAL_API_KEY_STAGING"))
.modelName("mistral-medium-latest")
.temperature(0.7)
.maxRetries(3)
.logRequests(true);
}
private MistralAiChatModel.MistralAiChatModelBuilder developmentBuilder() {
return MistralAiChatModel.builder()
.apiKey(System.getenv("MISTRAL_API_KEY_DEV"))
.modelName("mistral-small-latest")
.temperature(0.7)
.maxRetries(1)
.logRequests(true)
.logResponses(true);
}
}
// Usage
String environment = System.getProperty("app.environment", "development");
EnvironmentAwareFactory factory = new EnvironmentAwareFactory(environment);
MistralAiChatModel model = factory.get().build();Use Java's ServiceLoader mechanism:
// META-INF/services/dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory
com.example.MyCustomMistralAiFactory
// Implementation
package com.example;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
public class MyCustomMistralAiFactory implements MistralAiChatModelBuilderFactory {
@Override
public MistralAiChatModel.MistralAiChatModelBuilder get() {
// Load configuration from your custom source
Configuration config = ConfigurationLoader.load();
return MistralAiChatModel.builder()
.apiKey(config.getMistralApiKey())
.baseUrl(config.getMistralBaseUrl())
.temperature(config.getTemperature())
.maxTokens(config.getMaxTokens());
}
}
// Loading via ServiceLoader
ServiceLoader<MistralAiChatModelBuilderFactory> loader =
ServiceLoader.load(MistralAiChatModelBuilderFactory.class);
MistralAiChatModel model = loader.findFirst()
.orElseThrow()
.get()
.modelName("mistral-large-latest")
.build();Use with Quarkus CDI framework:
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.inject.Produces;
import org.eclipse.microprofile.config.inject.ConfigProperty;
@ApplicationScoped
public class MistralAiQuarkusConfig {
@ConfigProperty(name = "mistral.api.key")
String apiKey;
@ConfigProperty(name = "mistral.model.name", defaultValue = "mistral-large-latest")
String modelName;
@ConfigProperty(name = "mistral.temperature", defaultValue = "0.7")
Double temperature;
@Produces
@ApplicationScoped
public MistralAiChatModelBuilderFactory chatModelBuilderFactory() {
return () -> MistralAiChatModel.builder()
.apiKey(apiKey)
.temperature(temperature);
}
@Produces
@ApplicationScoped
public MistralAiChatModel chatModel(MistralAiChatModelBuilderFactory factory) {
return factory.get()
.modelName(modelName)
.build();
}
}Create mock factories for unit testing:
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
import org.junit.jupiter.api.Test;
public class MockMistralAiFactory implements MistralAiChatModelBuilderFactory {
@Override
public MistralAiChatModel.MistralAiChatModelBuilder get() {
return MistralAiChatModel.builder()
.apiKey("test-api-key")
.baseUrl("http://localhost:8080/mock")
.timeout(Duration.ofSeconds(5))
.maxRetries(0);
}
}
// Usage in tests
@Test
public void testChatModel() {
MockMistralAiFactory factory = new MockMistralAiFactory();
MistralAiChatModel model = factory.get()
.modelName("mistral-small-latest")
.build();
// Test with mock model
ChatResponse response = model.chat(
List.of(UserMessage.from("test"))
);
assertNotNull(response);
}Enables seamless integration with DI frameworks like Spring, Quarkus, Micronaut, etc.
Centralize configuration and apply defaults across all model instances.
Easily switch between development, staging, and production configurations.
Create mock or test-specific factories for unit and integration testing.
Custom logic can be injected without modifying library code.
Ensure consistent configuration across all model instances in an application.
Use SPI factories to centralize all Mistral AI configuration:
public class CentralizedMistralAiFactory implements MistralAiChatModelBuilderFactory {
private final Properties config;
public CentralizedMistralAiFactory(Properties config) {
this.config = config;
}
@Override
public MistralAiChatModel.MistralAiChatModelBuilder get() {
return MistralAiChatModel.builder()
.apiKey(config.getProperty("mistral.api.key"))
.baseUrl(config.getProperty("mistral.base.url"))
.timeout(Duration.parse(config.getProperty("mistral.timeout")))
.maxRetries(Integer.parseInt(config.getProperty("mistral.max.retries")))
.logRequests(Boolean.parseBoolean(config.getProperty("mistral.log.requests")));
}
}Add validation in factory methods:
@Override
public MistralAiChatModel.MistralAiChatModelBuilder get() {
String apiKey = config.getApiKey();
if (apiKey == null || apiKey.isEmpty()) {
throw new IllegalStateException("Mistral AI API key not configured");
}
return MistralAiChatModel.builder()
.apiKey(apiKey)
// ... other configuration
;
}Clearly document what each factory provides:
/**
* Production-grade Mistral AI chat model factory.
*
* Provides builders pre-configured with:
* - 2-minute timeout for resilience
* - 5 retry attempts with exponential backoff
* - Disabled logging for performance
* - Large model for best quality
*/
public class ProductionChatModelFactory implements MistralAiChatModelBuilderFactory {
// Implementation
}Compose multiple factories for different purposes:
public class ModelFactoryRegistry {
private final MistralAiChatModelBuilderFactory chatFactory;
private final MistralAiEmbeddingModelBuilderFactory embeddingFactory;
private final MistralAiFimModelBuilderFactory fimFactory;
public ModelFactoryRegistry(
MistralAiChatModelBuilderFactory chatFactory,
MistralAiEmbeddingModelBuilderFactory embeddingFactory,
MistralAiFimModelBuilderFactory fimFactory) {
this.chatFactory = chatFactory;
this.embeddingFactory = embeddingFactory;
this.fimFactory = fimFactory;
}
public MistralAiChatModel createChatModel() {
return chatFactory.get()
.modelName("mistral-large-latest")
.build();
}
public MistralAiEmbeddingModel createEmbeddingModel() {
return embeddingFactory.get()
.modelName("mistral-embed")
.build();
}
public MistralAiFimModel createFimModel() {
return fimFactory.get()
.modelName("codestral-latest")
.build();
}
}The SPI pattern integrates seamlessly with LangChain4j's model abstraction layer, allowing Mistral AI models to be used interchangeably with other LLM providers while maintaining custom configuration and instantiation logic specific to Mistral AI.
Install with Tessl CLI
npx tessl i tessl/maven-dev-langchain4j--langchain4j-mistral-ai@1.11.0