Support for 40+ LLM providers with unified interface.
from agno.models.openai import OpenAIChat, OpenAILike
class OpenAIChat:
def __init__(
self,
id: str = "gpt-4",
api_key: Optional[str] = None,
organization: Optional[str] = None,
base_url: Optional[str] = None,
**kwargs
): ...
class OpenAILike:
"""For OpenAI-compatible APIs"""
def __init__(
self,
id: str,
api_key: Optional[str] = None,
base_url: str,
**kwargs
): ...from agno.models.anthropic import Claude
class Claude:
def __init__(
self,
id: str = "claude-3-5-sonnet-20241022",
api_key: Optional[str] = None,
**kwargs
): ...from agno.models.google import Gemini
class Gemini:
def __init__(
self,
id: str = "gemini-2.0-flash-exp",
api_key: Optional[str] = None,
**kwargs
): ...from agno.models.aws import AwsBedrock, Claude as BedrockClaude
class AwsBedrock:
def __init__(
self,
id: str,
region_name: Optional[str] = None,
aws_access_key_id: Optional[str] = None,
aws_secret_access_key: Optional[str] = None,
**kwargs
): ...from agno.models.azure import AzureOpenAI, AzureAIFoundry
class AzureOpenAI:
def __init__(
self,
id: str,
api_key: Optional[str] = None,
azure_endpoint: Optional[str] = None,
azure_deployment: Optional[str] = None,
api_version: str = "2024-08-01-preview",
**kwargs
): ...# Groq
from agno.models.groq import Groq
# Ollama (local models)
from agno.models.ollama import Ollama
# Cohere
from agno.models.cohere import Cohere
# Mistral
from agno.models.mistral import MistralChat
# Meta Llama
from agno.models.meta import Llama, LlamaOpenAI
# DeepSeek
from agno.models.deepseek import DeepSeek
# Perplexity
from agno.models.perplexity import Perplexity# VLLM
from agno.models.vllm import VLLM
# Llama.cpp
from agno.models.llama_cpp import LlamaCpp
# LM Studio
from agno.models.lmstudio import LMStudiofrom agno.agent import Agent
from agno.models.openai import OpenAIChat
agent = Agent(
model=OpenAIChat(
id="gpt-4",
api_key="your-api-key"
)
)from agno.agent import Agent
from agno.models.anthropic import Claude
agent = Agent(
model=Claude(
id="claude-3-5-sonnet-20241022",
api_key="your-api-key"
)
)from agno.agent import Agent
from agno.models.ollama import Ollama
agent = Agent(
model=Ollama(
id="llama2",
host="http://localhost:11434"
)
)from agno.models.openai import OpenAIChat
model = OpenAIChat(
id="gpt-4",
temperature=0.7,
max_tokens=1000,
top_p=0.9,
frequency_penalty=0.0,
presence_penalty=0.0,
response_format={"type": "json_object"}
)