This package provides a deprecated integration module that enables Java applications to interact with GitHub Models through the LangChain4j framework. It offers chat models (both synchronous and streaming), embedding models, and support for AI services with tool integration, JSON schema responses, and responsible AI features. The module wraps Azure AI Inference SDK to provide a unified API for accessing various language models hosted on GitHub Models, including chat completion capabilities, embeddings generation, and content filtering management. As of version 1.10.0, this module has been marked for deprecation and future removal, with users recommended to migrate to the langchain4j-openai-official module for enhanced functionality and better integration. The library is designed for reusability as a foundational component in LLM-powered Java applications that need to leverage GitHub-hosted AI models, offering builder patterns for configuration, support for proxy options, custom timeouts, and comprehensive model service versioning capabilities.
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
API reference for GitHubModelsChatModel - synchronous chat completion.
package dev.langchain4j.model.github;
public class GitHubModelsChatModel implements ChatModel {
public ChatResponse chat(ChatRequest chatRequest);
public Set<Capability> supportedCapabilities();
public List<ChatModelListener> listeners();
public ModelProvider provider();
public static Builder builder();
}public ChatResponse chat(ChatRequest chatRequest);Execute synchronous chat completion.
Parameters:
chatRequest: ChatRequest - Messages, tools, and parametersReturns:
Throws:
public Set<Capability> supportedCapabilities();Get model's supported capabilities.
Returns:
public List<ChatModelListener> listeners();Get registered listeners.
Returns:
public ModelProvider provider();Get model provider identifier.
Returns:
ModelProvider.GITHUB_MODELSpublic static Builder builder();Create builder instance. Uses SPI factory if registered.
Returns:
public static class Builder {
// Required
public Builder gitHubToken(String gitHubToken);
public Builder modelName(String modelName);
public Builder modelName(GitHubModelsChatModelName modelName);
// Endpoint
public Builder endpoint(String endpoint);
public Builder serviceVersion(ModelServiceVersion serviceVersion);
// Sampling
public Builder temperature(Double temperature);
public Builder topP(Double topP);
public Builder maxTokens(Integer maxTokens);
public Builder presencePenalty(Double presencePenalty);
public Builder frequencyPenalty(Double frequencyPenalty);
public Builder seed(Long seed);
public Builder stop(List<String> stop);
// Response Format
public Builder responseFormat(ChatCompletionsResponseFormat responseFormat);
public Builder strictJsonSchema(boolean strictJsonSchema);
// Network
public Builder timeout(Duration timeout);
public Builder maxRetries(Integer maxRetries);
public Builder proxyOptions(ProxyOptions proxyOptions);
public Builder customHeaders(Map<String, String> customHeaders);
// Advanced
public Builder supportedCapabilities(Set<Capability> supportedCapabilities);
public Builder chatCompletionsClient(ChatCompletionsClient chatCompletionsClient);
public Builder listeners(List<ChatModelListener> listeners);
public Builder logRequestsAndResponses(Boolean logRequestsAndResponses);
public Builder userAgentSuffix(String userAgentSuffix);
public GitHubModelsChatModel build();
}public Builder gitHubToken(String gitHubToken);Set GitHub personal access token (required).
Parameters:
gitHubToken: String - GitHub tokenReturns:
public Builder modelName(String modelName);
public Builder modelName(GitHubModelsChatModelName modelName);Set model name (required).
Parameters:
modelName: String or GitHubModelsChatModelName - Model identifierReturns:
public Builder endpoint(String endpoint);Set API endpoint (default: https://models.inference.ai.azure.com).
Parameters:
endpoint: String - Endpoint URLReturns:
public Builder serviceVersion(ModelServiceVersion serviceVersion);Set Azure API service version.
Parameters:
serviceVersion: ModelServiceVersion - API versionReturns:
public Builder temperature(Double temperature);Set sampling temperature (0.0-2.0).
Parameters:
temperature: Double - Temperature valueReturns:
public Builder topP(Double topP);Set nucleus sampling parameter (0.0-1.0).
Parameters:
topP: Double - Top-p valueReturns:
public Builder maxTokens(Integer maxTokens);Set maximum tokens to generate.
Parameters:
maxTokens: Integer - Max token countReturns:
public Builder presencePenalty(Double presencePenalty);Set presence penalty (-2.0 to 2.0).
Parameters:
presencePenalty: Double - Penalty valueReturns:
public Builder frequencyPenalty(Double frequencyPenalty);Set frequency penalty (-2.0 to 2.0).
Parameters:
frequencyPenalty: Double - Penalty valueReturns:
public Builder seed(Long seed);Set random seed for deterministic generation.
Parameters:
seed: Long - Seed valueReturns:
public Builder stop(List<String> stop);Set stop sequences.
Parameters:
stop: List<String> - Stop sequencesReturns:
public Builder responseFormat(ChatCompletionsResponseFormat responseFormat);Set response format (e.g., JSON).
Parameters:
responseFormat: ChatCompletionsResponseFormat - Format specificationReturns:
public Builder strictJsonSchema(boolean strictJsonSchema);Enable strict JSON schema validation.
Parameters:
strictJsonSchema: boolean - Enable strict validationReturns:
public Builder timeout(Duration timeout);Set request timeout.
Parameters:
timeout: Duration - Timeout durationReturns:
public Builder maxRetries(Integer maxRetries);Set maximum retry attempts.
Parameters:
maxRetries: Integer - Max retriesReturns:
public Builder proxyOptions(ProxyOptions proxyOptions);Set HTTP proxy configuration.
Parameters:
proxyOptions: ProxyOptions - Proxy configurationReturns:
public Builder customHeaders(Map<String, String> customHeaders);Set custom HTTP headers.
Parameters:
customHeaders: Map<String, String> - Custom headersReturns:
public Builder supportedCapabilities(Set<Capability> supportedCapabilities);Set supported capabilities.
Parameters:
supportedCapabilities: Set<Capability> - CapabilitiesReturns:
public Builder chatCompletionsClient(ChatCompletionsClient chatCompletionsClient);Set custom Azure AI client (overrides other config).
Parameters:
chatCompletionsClient: ChatCompletionsClient - Custom clientReturns:
public Builder listeners(List<ChatModelListener> listeners);Register model listeners.
Parameters:
listeners: List<ChatModelListener> - ListenersReturns:
public Builder logRequestsAndResponses(Boolean logRequestsAndResponses);Enable request/response logging.
Parameters:
logRequestsAndResponses: Boolean - Enable loggingReturns:
public Builder userAgentSuffix(String userAgentSuffix);Set User-Agent suffix.
Parameters:
userAgentSuffix: String - SuffixReturns:
public GitHubModelsChatModel build();Build model instance.
Returns:
Throws:
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.request.ChatRequest;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.Capability;
import dev.langchain4j.model.chat.listener.ChatModelListener;
import dev.langchain4j.model.ModelProvider;
import com.azure.ai.inference.ChatCompletionsClient;
import com.azure.ai.inference.ModelServiceVersion;
import com.azure.ai.inference.models.ChatCompletionsResponseFormat;
import com.azure.core.http.ProxyOptions;
import java.time.Duration;
import java.util.List;
import java.util.Set;
import java.util.Map;Install with Tessl CLI
npx tessl i tessl/maven-dev-langchain4j--langchain4j-github-models@1.11.0docs