Core model interfaces and abstractions for Spring AI framework providing portable API for chat, embeddings, images, audio, and tool calling across multiple AI providers
Common integration patterns for Spring AI Model in production applications.
@Service
public class AiIntegrationService {
private final ChatModel chatModel;
private final EmbeddingModel embeddingModel;
private final ModerationModel moderationModel;
public ServiceResponse processUserRequest(UserRequest request) {
// 1. Moderate content
if (!isContentSafe(request.content())) {
return ServiceResponse.rejected("Content policy violation");
}
// 2. Process with AI
String response = chatModel.call(request.content());
// 3. Track usage
logUsage(request.userId(), response);
return ServiceResponse.success(response);
}
private boolean isContentSafe(String content) {
ModerationResponse modResponse = moderationModel.call(new ModerationPrompt(content));
return !modResponse.getResult().getOutput().isFlagged();
}
}@Repository
public interface AiConversationRepository extends JpaRepository<AiConversation, Long> {
List<AiConversation> findByUserIdOrderByTimestampDesc(String userId);
}
@Entity
public class AiConversation {
@Id @GeneratedValue
private Long id;
private String userId;
private String message;
private String response;
@Column(columnDefinition = "json")
private String metadata;
private LocalDateTime timestamp;
}
@Service
public class PersistentChatService {
private final ChatModel chatModel;
private final AiConversationRepository repository;
public String chatAndPersist(String userId, String message) {
// Get response
ChatResponse response = chatModel.call(new Prompt(message));
String responseText = response.getResult().getOutput().getText();
// Persist
AiConversation conversation = new AiConversation();
conversation.setUserId(userId);
conversation.setMessage(message);
conversation.setResponse(responseText);
conversation.setMetadata(toJson(response.getMetadata()));
conversation.setTimestamp(LocalDateTime.now());
repository.save(conversation);
return responseText;
}
}@Service
public class AsyncAiService {
private final ChatModel chatModel;
private final ExecutorService executor = Executors.newFixedThreadPool(10);
public CompletableFuture<String> asyncChat(String message) {
return CompletableFuture.supplyAsync(
() -> chatModel.call(message),
executor
);
}
public CompletableFuture<List<String>> parallelBatch(List<String> messages) {
List<CompletableFuture<String>> futures = messages.stream()
.map(this::asyncChat)
.toList();
return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
.thenApply(v -> futures.stream()
.map(CompletableFuture::join)
.toList());
}
}@Service
public class CachedAiService {
private final ChatModel chatModel;
private final CacheManager cacheManager;
@Cacheable(value = "ai-responses", key = "#message")
public String cachedChat(String message) {
return chatModel.call(message);
}
@Cacheable(value = "embeddings", key = "#text")
public float[] cachedEmbed(String text) {
return embeddingModel.embed(text);
}
@CacheEvict(value = "ai-responses", allEntries = true)
public void clearCache() {
// Cache cleared
}
}@Service
public class EventDrivenAiService {
private final ChatModel chatModel;
private final ApplicationEventPublisher eventPublisher;
public String chatWithEvents(String message) {
// Publish start event
eventPublisher.publishEvent(new AiRequestEvent(message));
try {
ChatResponse response = chatModel.call(new Prompt(message));
String result = response.getResult().getOutput().getText();
// Publish success event
eventPublisher.publishEvent(new AiResponseEvent(
message,
result,
response.getMetadata().getUsage()
));
return result;
} catch (Exception e) {
// Publish error event
eventPublisher.publishEvent(new AiErrorEvent(message, e));
throw e;
}
}
}
@Component
public class AiEventListener {
@EventListener
public void handleAiResponse(AiResponseEvent event) {
log.info("AI response generated - Tokens: {}",
event.usage().getTotalTokens());
}
@EventListener
public void handleAiError(AiErrorEvent event) {
log.error("AI request failed: {}", event.error().getMessage());
}
}@Service
public class ResilientAiService {
private final ChatModel chatModel;
private final CircuitBreaker circuitBreaker;
@CircuitBreaker(name = "aiService", fallbackMethod = "fallbackResponse")
@Retry(name = "aiService")
@RateLimiter(name = "aiService")
public String protectedChat(String message) {
return chatModel.call(message);
}
public String fallbackResponse(String message, Exception e) {
log.error("AI service unavailable, using fallback", e);
return "Service temporarily unavailable. Please try again later.";
}
}
// In application.yml:
// resilience4j.circuitbreaker.instances.aiService:
// slidingWindowSize: 10
// failureRateThreshold: 50
// waitDurationInOpenState: 10000@Component
public class ChatModelFactory {
private final Map<String, ChatModel> models;
public ChatModelFactory(
@Qualifier("openai") ChatModel openAi,
@Qualifier("anthropic") ChatModel anthropic,
@Qualifier("azure") ChatModel azure
) {
this.models = Map.of(
"openai", openAi,
"anthropic", anthropic,
"azure", azure
);
}
public ChatModel getModel(String provider) {
ChatModel model = models.get(provider);
if (model == null) {
throw new IllegalArgumentException("Unknown provider: " + provider);
}
return model;
}
}
@Service
public class FlexibleChatService {
private final ChatModelFactory modelFactory;
public String chat(String message, String preferredProvider) {
ChatModel model = modelFactory.getModel(preferredProvider);
return model.call(message);
}
}public interface AiService {
String chat(String message);
}
@Primary
@Service
public class LoggingAiServiceDecorator implements AiService {
private final AiService delegate;
@Override
public String chat(String message) {
log.info("AI request: {}", truncate(message, 100));
long start = System.currentTimeMillis();
try {
String response = delegate.chat(message);
long duration = System.currentTimeMillis() - start;
log.info("AI response completed in {}ms", duration);
return response;
} catch (Exception e) {
log.error("AI request failed", e);
throw e;
}
}
}public interface AiStrategy {
String process(String input);
}
@Component("simpleChat")
public class SimpleChatStrategy implements AiStrategy {
private final ChatModel chatModel;
@Override
public String process(String input) {
return chatModel.call(input);
}
}
@Component("ragStrategy")
public class RAGStrategy implements AiStrategy {
private final ChatModel chatModel;
private final DocumentRetriever retriever;
@Override
public String process(String input) {
List<Document> docs = retriever.retrieve(input);
String context = buildContext(docs);
return chatModel.call(context + "\n\nQuestion: " + input);
}
}
@Service
public class StrategyOrchestrator {
private final Map<String, AiStrategy> strategies;
public String processWithStrategy(String input, String strategyName) {
AiStrategy strategy = strategies.get(strategyName);
return strategy.process(input);
}
}@Service
public class AiWorkflowOrchestrator {
private final ChatModel chatModel;
private final ImageModel imageModel;
private final EmbeddingModel embeddingModel;
public WorkflowResult executeWorkflow(WorkflowRequest request) {
List<String> completedSteps = new ArrayList<>();
try {
// Step 1: Analyze input
String analysis = chatModel.call("Analyze: " + request.input());
completedSteps.add("analysis");
// Step 2: Generate image based on analysis
ImagePrompt imagePrompt = new ImagePrompt(analysis);
String imageUrl = imageModel.call(imagePrompt)
.getResult().getOutput().getUrl();
completedSteps.add("image");
// Step 3: Generate embedding for storage
float[] embedding = embeddingModel.embed(analysis);
completedSteps.add("embedding");
// Step 4: Final summary
String summary = chatModel.call("Summarize: " + analysis);
completedSteps.add("summary");
return WorkflowResult.success(analysis, imageUrl, embedding, summary);
} catch (Exception e) {
// Compensate completed steps
compensate(completedSteps);
throw new WorkflowException("Workflow failed at step: " + completedSteps.size(), e);
}
}
private void compensate(List<String> completedSteps) {
// Rollback or cleanup completed steps
log.info("Compensating steps: {}", completedSteps);
}
}public abstract class AiPipelineTemplate {
protected final ChatModel chatModel;
public final PipelineResult execute(String input) {
// Template method
String validated = validate(input);
String preprocessed = preprocess(validated);
String processed = process(preprocessed);
String postprocessed = postprocess(processed);
return new PipelineResult(postprocessed);
}
protected String validate(String input) {
if (input == null || input.isBlank()) {
throw new IllegalArgumentException("Input cannot be empty");
}
return input;
}
protected abstract String preprocess(String input);
protected abstract String process(String input);
protected abstract String postprocess(String output);
}
@Service
public class SummarizationPipeline extends AiPipelineTemplate {
@Override
protected String preprocess(String input) {
return "Summarize: " + input;
}
@Override
protected String process(String input) {
return chatModel.call(input);
}
@Override
protected String postprocess(String output) {
return output.trim();
}
}@RestController
@RequestMapping("/api/reactive")
public class ReactiveAiController {
private final ChatModel chatModel;
@PostMapping("/chat")
public Mono<String> reactiveChat(@RequestBody String message) {
return Mono.fromCallable(() -> chatModel.call(message))
.subscribeOn(Schedulers.boundedElastic())
.timeout(Duration.ofSeconds(30))
.onErrorResume(e -> Mono.just("Error: " + e.getMessage()));
}
@GetMapping(value = "/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<ServerSentEvent<String>> streamChat(@RequestParam String message) {
return chatModel.stream(new Prompt(message))
.map(response -> response.getResult().getOutput().getText())
.map(text -> ServerSentEvent.builder(text).build());
}
}