OpenAI compatible model factory for the Embabel Agent Framework
Essential patterns to get started with Embabel Agent OpenAI.
import com.embabel.agent.openai.OpenAiCompatibleModelFactory
import com.embabel.agent.openai.OpenAiChatOptionsConverter
import com.embabel.agent.openai.Gpt5ChatOptionsConverter
import com.embabel.agent.openai.StandardOpenAiOptionsConverter
import com.embabel.common.ai.model.PricingModel
import io.micrometer.observation.ObservationRegistry
import java.time.LocalDateThe factory is your entry point for creating LLM and embedding services.
Default OpenAI configuration:
val factory = OpenAiCompatibleModelFactory(
baseUrl = null, // Uses https://api.openai.com
apiKey = "sk-...",
completionsPath = null,
embeddingsPath = null,
observationRegistry = ObservationRegistry.create()
)Custom provider (local LLM, Azure, etc.):
val customFactory = OpenAiCompatibleModelFactory(
baseUrl = "http://localhost:8000",
apiKey = null, // Null if no auth required
completionsPath = "/v1/chat/completions",
embeddingsPath = "/v1/embeddings",
observationRegistry = ObservationRegistry.create()
)// Standard OpenAI model
val gpt4 = factory.openAiCompatibleLlm(
model = "gpt-4",
pricingModel = PricingModel.usdPer1MTokens(30.0, 60.0),
provider = "OpenAI",
knowledgeCutoffDate = LocalDate.of(2023, 4, 1)
)
// GPT-3.5 Turbo (cheaper, faster)
val gpt35 = factory.openAiCompatibleLlm(
model = "gpt-3.5-turbo",
pricingModel = PricingModel.usdPer1MTokens(0.5, 1.5),
provider = "OpenAI",
knowledgeCutoffDate = LocalDate.of(2021, 9, 1)
)
// GPT-5 model (uses special converter)
val gpt5 = factory.openAiCompatibleLlm(
model = "gpt-5-turbo",
pricingModel = PricingModel.usdPer1MTokens(10.0, 30.0),
provider = "OpenAI",
knowledgeCutoffDate = LocalDate.of(2024, 10, 1),
optionsConverter = Gpt5ChatOptionsConverter // Required for GPT-5
)
// Local model (free)
val localLlm = customFactory.openAiCompatibleLlm(
model = "llama-3-70b",
pricingModel = PricingModel.ALL_YOU_CAN_EAT,
provider = "LocalLLM",
knowledgeCutoffDate = null
)// Small embedding model (cheaper)
val smallEmbeddings = factory.openAiCompatibleEmbeddingService(
model = "text-embedding-3-small",
provider = "OpenAI"
)
// Large embedding model (more accurate)
val largeEmbeddings = factory.openAiCompatibleEmbeddingService(
model = "text-embedding-3-large",
provider = "OpenAI"
)The converter transforms portable LlmOptions to OpenAI-specific OpenAiChatOptions.
// Default: OpenAiChatOptionsConverter (auto-selected if not specified)
// Works with most OpenAI models, some options may not be supported
// StandardOpenAiOptionsConverter: For models supporting all parameters
// Explicitly supports: temperature, topP, maxTokens, presencePenalty, frequencyPenalty
val service1 = factory.openAiCompatibleLlm(
model = "gpt-4",
pricingModel = PricingModel.usdPer1MTokens(30.0, 60.0),
provider = "OpenAI",
knowledgeCutoffDate = LocalDate.of(2023, 4, 1),
optionsConverter = StandardOpenAiOptionsConverter
)
// Gpt5ChatOptionsConverter: For GPT-5 models (no temperature support)
// Logs warning if temperature != 1.0
val service2 = factory.openAiCompatibleLlm(
model = "gpt-5-turbo",
pricingModel = PricingModel.usdPer1MTokens(10.0, 30.0),
provider = "OpenAI",
knowledgeCutoffDate = LocalDate.of(2024, 10, 1),
optionsConverter = Gpt5ChatOptionsConverter
)Reading API key from environment:
val factory = OpenAiCompatibleModelFactory(
baseUrl = null,
apiKey = System.getenv("OPENAI_API_KEY"),
completionsPath = null,
embeddingsPath = null,
observationRegistry = ObservationRegistry.create()
)Multiple models from one factory:
val factory = OpenAiCompatibleModelFactory(...)
val cheapModel = factory.openAiCompatibleLlm(
model = "gpt-3.5-turbo",
pricingModel = PricingModel.usdPer1MTokens(0.5, 1.5),
provider = "OpenAI",
knowledgeCutoffDate = LocalDate.of(2021, 9, 1)
)
val powerfulModel = factory.openAiCompatibleLlm(
model = "gpt-4",
pricingModel = PricingModel.usdPer1MTokens(30.0, 60.0),
provider = "OpenAI",
knowledgeCutoffDate = LocalDate.of(2023, 4, 1)
)
val embeddings = factory.openAiCompatibleEmbeddingService(
model = "text-embedding-3-small",
provider = "OpenAI"
)No authentication (local servers):
val factory = OpenAiCompatibleModelFactory(
baseUrl = "http://localhost:11434",
apiKey = null, // No auth
completionsPath = null,
embeddingsPath = null,
observationRegistry = ObservationRegistry.create()
)Install with Tessl CLI
npx tessl i tessl/maven-com-embabel-agent--embabel-agent-openai@0.3.0