Multi-module test support framework for Embabel Agent applications providing integration testing, mock AI services, and test configuration utilities
Complete API reference for FakeAiConfiguration and test utilities for options converters.
Spring test configuration providing pre-configured fake AI service beans and utilities for testing options converters.
Module: embabel-agent-test-internal Package: com.embabel.common.test.ai.config (configuration), com.embabel.agent.test.models (utilities) Language: Kotlin (Java-compatible)
import com.embabel.common.test.ai.config.FakeAiConfiguration
import com.embabel.agent.test.models.OptionsConverterTestSupport
import com.embabel.agent.test.models.checkOptionsConverterPreservesCoreValuesimport com.embabel.common.test.ai.config.FakeAiConfiguration;
import com.embabel.agent.test.models.OptionsConverterTestSupport;
import static com.embabel.agent.test.models.OptionsConverterTestUtilsKt.checkOptionsConverterPreservesCoreValues;/**
* Spring test configuration that provides fake AI service beans for testing
* without requiring actual API keys or making real API calls.
*
* Provides mocked LLM services for both cheap and best models, plus a fake embedding service.
*/
@TestConfiguration
class FakeAiConfigurationHow to use:
@SpringBootTest
@Import(FakeAiConfiguration::class)
class MyTest {
// Beans are automatically available
}Note: Must be explicitly imported with @Import because it's a @TestConfiguration.
Creates a mocked LLM service bean for the "cheapest" model tier.
/**
* Create a mocked LLM service bean for the "cheapest" model tier.
* Configured with OpenAI gpt-4o-mini settings but uses a mocked ChatModel.
*
* @return LlmService configured for gpt-4o-mini with mocked backend
*/
@Bean
fun cheapest(): LlmService<*>Configuration:
Usage (Kotlin):
@SpringBootTest
@Import(FakeAiConfiguration::class)
class ServiceTest {
@Autowired
@Qualifier("cheapest")
private lateinit var cheapModel: LlmService<*>
@Test
fun `test with cheap model`() {
val response = cheapModel.generate("test prompt")
// No real API calls made
}
}Usage (Java):
@SpringBootTest
@Import(FakeAiConfiguration.class)
public class ServiceTest {
@Autowired
@Qualifier("cheapest")
private LlmService<?> cheapModel;
@Test
void testWithCheapModel() {
String response = cheapModel.generate("test prompt");
// No real API calls made
}
}Creates a mocked LLM service bean for the "best" model tier.
/**
* Create a mocked LLM service bean for the "best" model tier.
* Configured with OpenAI gpt-4o settings but uses a mocked ChatModel.
*
* @return LlmService configured for gpt-4o with mocked backend
*/
@Bean
fun best(): LlmService<*>Configuration:
Usage (Kotlin):
@SpringBootTest
@Import(FakeAiConfiguration::class)
class AdvancedServiceTest {
@Autowired
@Qualifier("best")
private lateinit var bestModel: LlmService<*>
@Test
fun `test with best model`() {
val response = bestModel.generate("complex prompt")
// No real API calls made
}
}Usage (Java):
@SpringBootTest
@Import(FakeAiConfiguration.class)
public class AdvancedServiceTest {
@Autowired
@Qualifier("best")
private LlmService<?> bestModel;
@Test
void testWithBestModel() {
String response = bestModel.generate("complex prompt");
// No real API calls made
}
}Creates an embedding service bean using FakeEmbeddingModel.
/**
* Create an embedding service bean using FakeEmbeddingModel.
* Configured with text-embedding-ada-002 settings but generates random embeddings.
*
* @return EmbeddingService with fake embedding model
*/
@Bean
fun embedding(): EmbeddingServiceConfiguration:
Usage (Kotlin):
@SpringBootTest
@Import(FakeAiConfiguration::class)
class EmbeddingServiceTest {
@Autowired
private lateinit var embeddingService: EmbeddingService
@Test
fun `test embedding operations`() {
val embedding = embeddingService.embed("test text")
assertNotNull(embedding)
assertEquals(1536, embedding.size)
}
}Usage (Java):
@SpringBootTest
@Import(FakeAiConfiguration.class)
public class EmbeddingServiceTest {
@Autowired
private EmbeddingService embeddingService;
@Test
void testEmbeddingOperations() {
float[] embedding = embeddingService.embed("test text");
assertNotNull(embedding);
assertEquals(1536, embedding.length);
}
}Abstract base class for testing OptionsConverter implementations.
/**
* Abstract base class for testing OptionsConverter implementations.
* Provides a standard test that verifies core LLM option values are preserved
* during conversion (temperature, topP, frequencyPenalty).
*
* @param O The specific ChatOptions type produced by the converter
* @property optionsConverter The OptionsConverter instance to test
*/
abstract class OptionsConverterTestSupport<O : ChatOptions>(
protected val optionsConverter: OptionsConverter<O>
)Type Parameters:
O - The ChatOptions subtype produced by your converterParameters:
optionsConverter - The OptionsConverter instance to testInherited Test:
/**
* Test that verifies the converter preserves core LLM option values.
* Checks that temperature, topP, and frequencyPenalty are correctly transferred.
*/
@Test
fun `should preserve core values`()Usage:
import com.embabel.agent.test.models.OptionsConverterTestSupport
import org.junit.jupiter.api.Test
class OpenAiOptionsConverterTest : OptionsConverterTestSupport<OpenAiChatOptions>(
OpenAiOptionsConverter()
) {
// Inherits the 'should preserve core values' test
@Test
fun `should handle custom OpenAI parameters`() {
// Add additional converter-specific tests
val llmOptions = LlmOptions(customParam = "value")
val options = optionsConverter.convert(llmOptions)
assertEquals("value", options.customParameter)
}
}Standalone utility function for testing that an options converter preserves core values.
/**
* Utility function to test that an OptionsConverter preserves core LLM option values.
* Verifies that temperature, topP, and frequencyPenalty are correctly converted.
*
* @param optionsConverter The OptionsConverter to test
* @throws AssertionError if any core values are not preserved correctly
*/
fun checkOptionsConverterPreservesCoreValues(optionsConverter: OptionsConverter<*>)Parameters:
optionsConverter - The OptionsConverter to testThrows: AssertionError if preservation fails
Usage (Kotlin):
import com.embabel.agent.test.models.checkOptionsConverterPreservesCoreValues
@Test
fun `test custom converter preserves values`() {
val converter = MyCustomOptionsConverter()
// This will throw AssertionError if preservation fails
checkOptionsConverterPreservesCoreValues(converter)
}Usage (Java):
import static com.embabel.agent.test.models.OptionsConverterTestUtilsKt.checkOptionsConverterPreservesCoreValues;
@Test
public void testConverterPreservesValues() {
OptionsConverter<?> converter = new MyCustomOptionsConverter();
checkOptionsConverterPreservesCoreValues(converter);
}@SpringBootTest
@Import(FakeAiConfiguration::class)
class CompleteTest {
@Autowired
@Qualifier("cheapest")
private lateinit var cheapModel: LlmService<*>
@Autowired
@Qualifier("best")
private lateinit var bestModel: LlmService<*>
@Autowired
private lateinit var embeddingService: EmbeddingService
@Test
fun `test with all services`() {
val cheapResult = cheapModel.generate("test")
val bestResult = bestModel.generate("test")
val embedding = embeddingService.embed("test")
assertNotNull(cheapResult)
assertNotNull(bestResult)
assertEquals(1536, embedding.size)
}
}@SpringBootTest
@Import(FakeAiConfiguration::class)
class ModelTierTest {
@Autowired
@Qualifier("cheapest")
private lateinit var cheapModel: LlmService<*>
@Autowired
@Qualifier("best")
private lateinit var bestModel: LlmService<*>
@Test
fun `feature works with all model tiers`() {
val cheapResult = myFeature.execute(input, cheapModel)
val bestResult = myFeature.execute(input, bestModel)
// Both should succeed
assertNotNull(cheapResult)
assertNotNull(bestResult)
}
}@SpringBootTest
class CustomEmbeddingTest {
@TestConfiguration
class TestConfig {
@Bean
@Primary
fun customEmbedding(): EmbeddingService {
// Override with custom dimensions
val fakeModel = FakeEmbeddingModel(dimensions = 768)
return SpringAiEmbeddingService(
fakeModel,
"custom-embedding-model",
"CustomProvider"
)
}
}
@Autowired
private lateinit var embeddingService: EmbeddingService
@Test
fun `test with custom dimensions`() {
val embedding = embeddingService.embed("test")
assertEquals(768, embedding.size)
}
}class MyOptionsConverterTest : OptionsConverterTestSupport<MyCustomOptions>(
MyOptionsConverter()
) {
// Inherits core preservation test
@Test
fun `should handle custom parameters`() {
val llmOptions = LlmOptions(
temperature = 0.7,
customParam = "value"
)
val result = optionsConverter.convert(llmOptions)
assertEquals(0.7, result.temperature)
assertEquals("value", result.customParam)
}
}@Test
fun `test converter without base class`() {
val converter = MyOptionsConverter()
// Use utility function directly
checkOptionsConverterPreservesCoreValues(converter)
// Additional custom assertions
val result = converter.convert(LlmOptions(temperature = 0.5))
assertEquals(0.5, result.temperature)
}/**
* LLM service interface
*/
interface LlmService<T : ChatOptions> {
fun generate(prompt: String): String
fun generate(messages: List<Message>): String
}/**
* Embedding service interface
*/
interface EmbeddingService {
fun embed(text: String): FloatArray
fun embed(texts: List<String>): List<FloatArray>
}/**
* Options converter interface
*/
interface OptionsConverter<T : ChatOptions> {
fun convert(options: LlmOptions): T
}/**
* LLM options configuration
*/
class LlmOptions {
val temperature: Double?
val topP: Double?
val frequencyPenalty: Double?
val maxTokens: Int?
}/**
* Spring AI chat options interface
*/
interface ChatOptions {
fun getTemperature(): Double?
fun getTopP(): Double?
fun getFrequencyPenalty(): Double?
}FakeAiConfiguration is annotated with @TestConfiguration, not @Configuration. This means:
✓ Must be explicitly imported with @Import(FakeAiConfiguration::class)
✓ Will not be automatically picked up by component scanning
✓ Only active when explicitly imported in tests
The cheapest and best LLM service beans can be injected by:
By qualifier name:
@Autowired
@Qualifier("cheapest")
private lateinit var cheapModel: LlmService<*>By parameter name matching:
@Autowired
private lateinit var cheapest: LlmService<*>The LLM services use MockK library for Kotlin mocking. Ensure MockK is available in your test dependencies if you need to customize mock behavior.
The options converter testing verifies these core values are preserved:
temperature - Controls randomnesstopP - Nucleus sampling parameterfrequencyPenalty - Penalty for token repetition