Common AI framework utilities for the Embabel Agent system including LLM configuration, output converters, prompt contributors, and embedding service abstractions.
YAML-based model configuration loading system with validation framework.
Base class for loading AI model configurations from YAML files with built-in validation and error handling.
abstract class AbstractYamlModelLoader<T : LlmAutoConfigProvider<*>>(
private val resourceLoader: ResourceLoader = DefaultResourceLoader(),
private val configPath: String
) : LlmAutoConfigMetadataLoader<T> {
protected val logger: Logger
protected val yamlMapper: ObjectMapper
override fun loadAutoConfigMetadata(): T
protected abstract fun getProviderClass(): kotlin.reflect.KClass<T>
protected abstract fun createEmptyProvider(): T
protected abstract fun getProviderName(): String
protected abstract fun validateModels(provider: T)
protected fun validateCommonFields(model: LlmAutoConfigMetadata)
}Implementation Example:
import kotlin.reflect.KClass
data class OpenAiProvider(
override val models: List<OpenAiModel>
) : LlmAutoConfigProvider<OpenAiModel>
data class OpenAiModel(
override val name: String,
override val modelId: String,
override val displayName: String?,
override val knowledgeCutoffDate: LocalDate?,
override val pricingModel: PerTokenPricingModel?
) : LlmAutoConfigMetadata
class OpenAiModelLoader : AbstractYamlModelLoader<OpenAiProvider>(
configPath = "META-INF/ai-models/openai.yaml"
) {
override fun getProviderClass(): kotlin.reflect.KClass<OpenAiProvider> =
OpenAiProvider::class
override fun createEmptyProvider(): OpenAiProvider =
OpenAiProvider(emptyList())
override fun getProviderName(): String = "openai"
override fun validateModels(provider: OpenAiProvider) {
provider.models.forEach { model ->
validateCommonFields(model)
require(model.modelId.startsWith("gpt-") || model.modelId.startsWith("o1-")) {
"OpenAI model ID must start with 'gpt-' or 'o1-': ${model.modelId}"
}
}
}
}
// Usage
val loader = OpenAiModelLoader()
val provider = loader.loadAutoConfigMetadata()
println("Loaded ${provider.models.size} OpenAI models")Interface for model metadata in auto-configuration system.
interface LlmAutoConfigMetadata {
val name: String
val modelId: String
val displayName: String?
val knowledgeCutoffDate: LocalDate?
val pricingModel: PerTokenPricingModel?
}Usage:
data class CustomModel(
override val name: String,
override val modelId: String,
override val displayName: String?,
override val knowledgeCutoffDate: LocalDate?,
override val pricingModel: PerTokenPricingModel?,
val customField: String // Provider-specific fields
) : LlmAutoConfigMetadataProvider of auto-configuration metadata containing list of models.
interface LlmAutoConfigProvider<T : LlmAutoConfigMetadata> {
val models: List<T>
}Interface for loading auto-configuration metadata.
interface LlmAutoConfigMetadataLoader<T> {
fun loadAutoConfigMetadata(): T
}# File: META-INF/ai-models/openai.yaml
models:
- name: gpt-4
model_id: gpt-4
display_name: GPT-4
knowledge_cutoff_date: 2023-04-01
pricing_model:
usd_per_1m_input_tokens: 30.0
usd_per_1m_output_tokens: 60.0
- name: gpt-4-turbo
model_id: gpt-4-turbo
display_name: GPT-4 Turbo
knowledge_cutoff_date: 2023-12-01
pricing_model:
usd_per_1m_input_tokens: 10.0
usd_per_1m_output_tokens: 30.0YAML properties use snake_case and map automatically to camelCase Kotlin properties.
# YAML uses snake_case
model_id: gpt-4
display_name: GPT-4
knowledge_cutoff_date: 2023-04-01
pricing_model:
usd_per_1m_input_tokens: 30.0
usd_per_1m_output_tokens: 60.0// Kotlin uses camelCase
data class Model(
val modelId: String, // Maps to model_id
val displayName: String, // Maps to display_name
val knowledgeCutoffDate: LocalDate?, // Maps to knowledge_cutoff_date
val pricingModel: PerTokenPricingModel? // Maps to pricing_model
)YAML files are loaded from classpath at META-INF/ai-models/{providerName}.yaml.
src/main/resources/
└── META-INF/
└── ai-models/
├── openai.yaml
├── anthropic.yaml
├── ollama.yaml
└── custom-provider.yamlThe validateCommonFields method validates required fields.
override fun validateModels(provider: MyProvider) {
provider.models.forEach { model ->
// Validates: name, modelId, displayName are non-blank
validateCommonFields(model)
// Add provider-specific validation
require(model.customField.isNotEmpty()) {
"Custom field must not be empty"
}
}
}Implement custom validation in validateModels.
override fun validateModels(provider: AnthropicProvider) {
provider.models.forEach { model ->
validateCommonFields(model)
// Validate model ID format
require(model.modelId.startsWith("claude-")) {
"Anthropic models must start with 'claude-': ${model.modelId}"
}
// Validate pricing is present
requireNotNull(model.pricingModel) {
"Pricing model required for ${model.modelId}"
}
// Validate knowledge cutoff
requireNotNull(model.knowledgeCutoffDate) {
"Knowledge cutoff required for ${model.modelId}"
}
}
}Data class capturing results of provider initialization with statistics.
data class ProviderInitialization(
val provider: String,
val registeredLlms: List<RegisteredModel>,
val registeredEmbeddings: List<RegisteredModel>,
val initializedAt: Instant
) {
val totalLlms: Int
val totalEmbeddings: Int
fun summary(): String
}Usage:
val initialization = ProviderInitialization(
provider = "openai",
registeredLlms = listOf(
RegisteredModel("openai-gpt4", "gpt-4"),
RegisteredModel("openai-gpt35", "gpt-3.5-turbo")
),
registeredEmbeddings = listOf(
RegisteredModel("openai-ada", "text-embedding-ada-002")
),
initializedAt = Instant.now()
)
println(initialization.summary())
println("Total models: ${initialization.totalLlms + initialization.totalEmbeddings}")Information about a registered model.
data class RegisteredModel(
val beanName: String,
val modelId: String
)On error, the loader returns an empty provider and logs the issue.
val loader = OpenAiModelLoader()
val provider = loader.loadAutoConfigMetadata()
// If loading fails, provider.models will be empty
if (provider.models.isEmpty()) {
logger.warn("No models loaded, using defaults")
// Fallback logic
}Validation errors are logged with details including provider name, model that failed, and validation error message. Returns empty provider to allow system to continue.
// 1. Define metadata interface
data class MyProviderModel(
override val name: String,
override val modelId: String,
override val displayName: String?,
override val knowledgeCutoffDate: LocalDate?,
override val pricingModel: PerTokenPricingModel?,
val customSetting: String?
) : LlmAutoConfigMetadata
// 2. Define provider
data class MyProvider(
override val models: List<MyProviderModel>
) : LlmAutoConfigProvider<MyProviderModel>
// 3. Implement loader
class MyProviderLoader : AbstractYamlModelLoader<MyProvider>(
configPath = "META-INF/ai-models/my-provider.yaml"
) {
override fun getProviderClass() = MyProvider::class
override fun createEmptyProvider() = MyProvider(emptyList())
override fun getProviderName() = "my-provider"
override fun validateModels(provider: MyProvider) {
provider.models.forEach { model ->
validateCommonFields(model)
// Custom validation
}
}
}
// 4. Create YAML config at META-INF/ai-models/my-provider.yaml
// 5. Load and use
val loader = MyProviderLoader()
val provider = loader.loadAutoConfigMetadata()
provider.models.forEach { model ->
println("Loaded model: ${model.displayName} (${model.modelId})")
}tessl i tessl/maven-com-embabel-agent--embabel-agent-common@0.3.1