CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/maven-dev-langchain4j--langchain4j-mistral-ai

LangChain4j integration for Mistral AI providing chat completion, streaming, embedding, moderation, and code completion capabilities

Overview
Eval results
Files

spi.mddocs/

Service Provider Interface (SPI)

The langchain4j-mistral-ai library provides Service Provider Interfaces (SPIs) for dependency injection frameworks and custom instantiation patterns. These interfaces allow frameworks like Spring, Quarkus, or custom factory patterns to provide customized builder instances for all Mistral AI models.

Package

All SPI interfaces are located in the dev.langchain4j.model.mistralai.spi package.

Capabilities

Chat Models Builder Factory

SPI for providing custom MistralAiChatModel.MistralAiChatModelBuilder instances.

package dev.langchain4j.model.mistralai.spi;

import dev.langchain4j.model.mistralai.MistralAiChatModel;
import java.util.function.Supplier;

/**
 * Service Provider Interface for custom MistralAiChatModel builder instantiation.
 * Extends Supplier to provide MistralAiChatModelBuilder instances.
 */
public interface MistralAiChatModelBuilderFactory
        extends Supplier<MistralAiChatModel.MistralAiChatModelBuilder> {

    /**
     * Provides a MistralAiChatModelBuilder instance.
     *
     * @return MistralAiChatModelBuilder (never null) instance
     */
    MistralAiChatModel.MistralAiChatModelBuilder get();
}

Streaming Chat Model Builder Factory

SPI for providing custom MistralAiStreamingChatModel.MistralAiStreamingChatModelBuilder instances.

package dev.langchain4j.model.mistralai.spi;

import dev.langchain4j.model.mistralai.MistralAiStreamingChatModel;
import java.util.function.Supplier;

/**
 * Service Provider Interface for custom MistralAiStreamingChatModel builder instantiation.
 * Extends Supplier to provide MistralAiStreamingChatModelBuilder instances.
 */
public interface MistralAiStreamingChatModelBuilderFactory
        extends Supplier<MistralAiStreamingChatModel.MistralAiStreamingChatModelBuilder> {

    /**
     * Provides a MistralAiStreamingChatModelBuilder instance.
     *
     * @return MistralAiStreamingChatModelBuilder (never null) instance
     */
    MistralAiStreamingChatModel.MistralAiStreamingChatModelBuilder get();
}

Embedding Model Builder Factory

SPI for providing custom MistralAiEmbeddingModel.MistralAiEmbeddingModelBuilder instances.

package dev.langchain4j.model.mistralai.spi;

import dev.langchain4j.model.mistralai.MistralAiEmbeddingModel;
import java.util.function.Supplier;

/**
 * Service Provider Interface for custom MistralAiEmbeddingModel builder instantiation.
 * Extends Supplier to provide MistralAiEmbeddingModelBuilder instances.
 */
public interface MistralAiEmbeddingModelBuilderFactory
        extends Supplier<MistralAiEmbeddingModel.MistralAiEmbeddingModelBuilder> {

    /**
     * Provides a MistralAiEmbeddingModelBuilder instance.
     *
     * @return MistralAiEmbeddingModelBuilder (never null) instance
     */
    MistralAiEmbeddingModel.MistralAiEmbeddingModelBuilder get();
}

FIM Model Builder Factory

SPI for providing custom MistralAiFimModel.Builder instances.

package dev.langchain4j.model.mistralai.spi;

import dev.langchain4j.model.mistralai.MistralAiFimModel;
import java.util.function.Supplier;

/**
 * Service Provider Interface for custom MistralAiFimModel builder instantiation.
 * Extends Supplier to provide MistralAiFimModel.Builder instances.
 */
public interface MistralAiFimModelBuilderFactory
        extends Supplier<MistralAiFimModel.Builder> {

    /**
     * Provides a MistralAiFimModel.Builder instance.
     *
     * @return MistralAiFimModel (never null).Builder instance
     */
    MistralAiFimModel.Builder get();
}

Streaming FIM Model Builder Factory

SPI for providing custom MistralAiStreamingFimModel.Builder instances.

package dev.langchain4j.model.mistralai.spi;

import dev.langchain4j.model.mistralai.MistralAiStreamingFimModel;
import java.util.function.Supplier;

/**
 * Service Provider Interface for custom MistralAiStreamingFimModel builder instantiation.
 * Extends Supplier to provide MistralAiStreamingFimModel.Builder instances.
 */
public interface MistralAiStreamingFimModelBuilderFactory
        extends Supplier<MistralAiStreamingFimModel.Builder> {

    /**
     * Provides a MistralAiStreamingFimModel.Builder instance.
     *
     * @return MistralAiStreamingFimModel (never null).Builder instance
     */
    MistralAiStreamingFimModel.Builder get();
}

Models Builder Factory

SPI for providing custom MistralAiModels.MistralAiModelsBuilder instances.

package dev.langchain4j.model.mistralai.spi;

import dev.langchain4j.model.mistralai.MistralAiModels;
import java.util.function.Supplier;

/**
 * Service Provider Interface for custom MistralAiModels builder instantiation.
 * Extends Supplier to provide MistralAiModelsBuilder instances.
 */
public interface MistralAiModelsBuilderFactory
        extends Supplier<MistralAiModels.MistralAiModelsBuilder> {

    /**
     * Provides a MistralAiModelsBuilder instance.
     *
     * @return MistralAiModelsBuilder (never null) instance
     */
    MistralAiModels.MistralAiModelsBuilder get();
}

Usage Examples

Spring Framework Integration

Use SPI interfaces with Spring's dependency injection:

import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.[Configuration Options](./configuration.md);

@Configuration
public class MistralAiSpringConfig {

    @Bean
    public MistralAiChatModelBuilderFactory chatModelBuilderFactory() {
        return () -> MistralAiChatModel.builder()
            .apiKey(System.getenv("MISTRAL_API_KEY"))
            .temperature(0.7)
            .maxTokens(1000);
    }

    @Bean
    public MistralAiChatModel chatModel(MistralAiChatModelBuilderFactory factory) {
        return factory.get()
            .modelName("mistral-large-latest")
            .build();
    }
}

Custom Factory Pattern

Implement custom factory with pre-configured defaults:

import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;

public class ProductionMistralAiFactory implements MistralAiChatModelBuilderFactory {

    private final String apiKey;
    private final String baseUrl;

    public ProductionMistralAiFactory(String apiKey, String baseUrl) {
        this.apiKey = apiKey;
        this.baseUrl = baseUrl;
    }

    @Override
    public MistralAiChatModel.MistralAiChatModelBuilder get() {
        return MistralAiChatModel.builder()
            .apiKey(apiKey)
            .baseUrl(baseUrl)
            .timeout(Duration.ofMinutes(2))
            .maxRetries(3)
            .logRequests(false)
            .logResponses(false);
    }

    public MistralAiChatModel createProductionModel() {
        return get()
            .modelName("mistral-large-latest")
            .temperature(0.5)
            .maxTokens(2000)
            .build();
    }
}

// Usage
ProductionMistralAiFactory factory = new ProductionMistralAiFactory(
    System.getenv("MISTRAL_API_KEY"),
    "https://api.mistral.ai/v1"
);

MistralAiChatModel model = factory.createProductionModel();

Multi-Environment Factory

Create environment-aware factories:

import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;

public class EnvironmentAwareFactory implements MistralAiChatModelBuilderFactory {

    private final String environment;

    public EnvironmentAwareFactory(String environment) {
        this.environment = environment;
    }

    @Override
    public MistralAiChatModel.MistralAiChatModelBuilder get() {
        return switch (environment) {
            case "production" -> productionBuilder();
            case "staging" -> stagingBuilder();
            default -> developmentBuilder();
        };
    }

    private MistralAiChatModel.MistralAiChatModelBuilder productionBuilder() {
        return MistralAiChatModel.builder()
            .apiKey(System.getenv("MISTRAL_API_KEY_PROD"))
            .modelName("mistral-large-latest")
            .temperature(0.5)
            .maxRetries(5)
            .logRequests(false);
    }

    private MistralAiChatModel.MistralAiChatModelBuilder stagingBuilder() {
        return MistralAiChatModel.builder()
            .apiKey(System.getenv("MISTRAL_API_KEY_STAGING"))
            .modelName("mistral-medium-latest")
            .temperature(0.7)
            .maxRetries(3)
            .logRequests(true);
    }

    private MistralAiChatModel.MistralAiChatModelBuilder developmentBuilder() {
        return MistralAiChatModel.builder()
            .apiKey(System.getenv("MISTRAL_API_KEY_DEV"))
            .modelName("mistral-small-latest")
            .temperature(0.7)
            .maxRetries(1)
            .logRequests(true)
            .logResponses(true);
    }
}

// Usage
String environment = System.getProperty("app.environment", "development");
EnvironmentAwareFactory factory = new EnvironmentAwareFactory(environment);
MistralAiChatModel model = factory.get().build();

Service Loader Integration

Use Java's ServiceLoader mechanism:

// META-INF/services/dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory
com.example.MyCustomMistralAiFactory

// Implementation
package com.example;

import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;

public class MyCustomMistralAiFactory implements MistralAiChatModelBuilderFactory {

    @Override
    public MistralAiChatModel.MistralAiChatModelBuilder get() {
        // Load configuration from your custom source
        Configuration config = ConfigurationLoader.load();

        return MistralAiChatModel.builder()
            .apiKey(config.getMistralApiKey())
            .baseUrl(config.getMistralBaseUrl())
            .temperature(config.getTemperature())
            .maxTokens(config.getMaxTokens());
    }
}

// Loading via ServiceLoader
ServiceLoader<MistralAiChatModelBuilderFactory> loader =
    ServiceLoader.load(MistralAiChatModelBuilderFactory.class);

MistralAiChatModel model = loader.findFirst()
    .orElseThrow()
    .get()
    .modelName("mistral-large-latest")
    .build();

Quarkus CDI Integration

Use with Quarkus CDI framework:

import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.inject.Produces;
import org.eclipse.microprofile.config.inject.ConfigProperty;

@ApplicationScoped
public class MistralAiQuarkusConfig {

    @ConfigProperty(name = "mistral.api.key")
    String apiKey;

    @ConfigProperty(name = "mistral.model.name", defaultValue = "mistral-large-latest")
    String modelName;

    @ConfigProperty(name = "mistral.temperature", defaultValue = "0.7")
    Double temperature;

    @Produces
    @ApplicationScoped
    public MistralAiChatModelBuilderFactory chatModelBuilderFactory() {
        return () -> MistralAiChatModel.builder()
            .apiKey(apiKey)
            .temperature(temperature);
    }

    @Produces
    @ApplicationScoped
    public MistralAiChatModel chatModel(MistralAiChatModelBuilderFactory factory) {
        return factory.get()
            .modelName(modelName)
            .build();
    }
}

Testing with Mock Factories

Create mock factories for unit testing:

import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.mistralai.spi.MistralAiChatModelBuilderFactory;
import org.junit.jupiter.api.Test;

public class MockMistralAiFactory implements MistralAiChatModelBuilderFactory {

    @Override
    public MistralAiChatModel.MistralAiChatModelBuilder get() {
        return MistralAiChatModel.builder()
            .apiKey("test-api-key")
            .baseUrl("http://localhost:8080/mock")
            .timeout(Duration.ofSeconds(5))
            .maxRetries(0);
    }
}

// Usage in tests
@Test
public void testChatModel() {
    MockMistralAiFactory factory = new MockMistralAiFactory();
    MistralAiChatModel model = factory.get()
        .modelName("mistral-small-latest")
        .build();

    // Test with mock model
    ChatResponse response = model.chat(
        List.of(UserMessage.from("test"))
    );

    assertNotNull(response);
}

Benefits of SPI Pattern

1. Dependency Injection

Enables seamless integration with DI frameworks like Spring, Quarkus, Micronaut, etc.

2. Configuration Management

Centralize configuration and apply defaults across all model instances.

3. Environment Separation

Easily switch between development, staging, and production configurations.

4. Testability

Create mock or test-specific factories for unit and integration testing.

5. Extensibility

Custom logic can be injected without modifying library code.

6. Consistency

Ensure consistent configuration across all model instances in an application.

Best Practices

Centralize Configuration

Use SPI factories to centralize all Mistral AI configuration:

public class CentralizedMistralAiFactory implements MistralAiChatModelBuilderFactory {

    private final Properties config;

    public CentralizedMistralAiFactory(Properties config) {
        this.config = config;
    }

    @Override
    public MistralAiChatModel.MistralAiChatModelBuilder get() {
        return MistralAiChatModel.builder()
            .apiKey(config.getProperty("mistral.api.key"))
            .baseUrl(config.getProperty("mistral.base.url"))
            .timeout(Duration.parse(config.getProperty("mistral.timeout")))
            .maxRetries(Integer.parseInt(config.getProperty("mistral.max.retries")))
            .logRequests(Boolean.parseBoolean(config.getProperty("mistral.log.requests")));
    }
}

Validate Configuration

Add validation in factory methods:

@Override
public MistralAiChatModel.MistralAiChatModelBuilder get() {
    String apiKey = config.getApiKey();
    if (apiKey == null || apiKey.isEmpty()) {
        throw new IllegalStateException("Mistral AI API key not configured");
    }

    return MistralAiChatModel.builder()
        .apiKey(apiKey)
        // ... other configuration
        ;
}

Document Factory Behavior

Clearly document what each factory provides:

/**
 * Production-grade Mistral AI chat model factory.
 *
 * Provides builders pre-configured with:
 * - 2-minute timeout for resilience
 * - 5 retry attempts with exponential backoff
 * - Disabled logging for performance
 * - Large model for best quality
 */
public class ProductionChatModelFactory implements MistralAiChatModelBuilderFactory {
    // Implementation
}

Use Composition

Compose multiple factories for different purposes:

public class ModelFactoryRegistry {

    private final MistralAiChatModelBuilderFactory chatFactory;
    private final MistralAiEmbeddingModelBuilderFactory embeddingFactory;
    private final MistralAiFimModelBuilderFactory fimFactory;

    public ModelFactoryRegistry(
            MistralAiChatModelBuilderFactory chatFactory,
            MistralAiEmbeddingModelBuilderFactory embeddingFactory,
            MistralAiFimModelBuilderFactory fimFactory) {
        this.chatFactory = chatFactory;
        this.embeddingFactory = embeddingFactory;
        this.fimFactory = fimFactory;
    }

    public MistralAiChatModel createChatModel() {
        return chatFactory.get()
            .modelName("mistral-large-latest")
            .build();
    }

    public MistralAiEmbeddingModel createEmbeddingModel() {
        return embeddingFactory.get()
            .modelName("mistral-embed")
            .build();
    }

    public MistralAiFimModel createFimModel() {
        return fimFactory.get()
            .modelName("codestral-latest")
            .build();
    }
}

Integration with LangChain4j

The SPI pattern integrates seamlessly with LangChain4j's model abstraction layer, allowing Mistral AI models to be used interchangeably with other LLM providers while maintaining custom configuration and instantiation logic specific to Mistral AI.

Install with Tessl CLI

npx tessl i tessl/maven-dev-langchain4j--langchain4j-mistral-ai

docs

chat-models.md

code-completion.md

configuration.md

embedding-model.md

index.md

model-discovery.md

moderation-model.md

spi.md

types-and-enums.md

tile.json