The official Python library for the anthropic API
npx @tessl/cli install tessl/pypi-anthropic@0.74.0The official Python library for the Anthropic API, providing access to Claude AI models for conversational AI, content generation, and text analysis.
pip install anthropicfrom anthropic import Anthropic, AsyncAnthropicFor types:
from anthropic.types import Message, ContentBlock, Usagefrom anthropic import Anthropic
# Initialize the client with your API key
client = Anthropic(api_key="your-api-key")
# Create a message
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[
{"role": "user", "content": "Hello, Claude!"}
]
)
print(message.content[0].text)Async usage:
from anthropic import AsyncAnthropic
import asyncio
async def main():
client = AsyncAnthropic(api_key="your-api-key")
message = await client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[
{"role": "user", "content": "Hello, Claude!"}
]
)
print(message.content[0].text)
asyncio.run(main())The SDK is organized around these core components:
Anthropic, AsyncAnthropic) with sync and async variantsCreate and configure the main client for interacting with the Anthropic API.
class Anthropic:
def __init__(
self,
api_key: str | None = None,
auth_token: str | None = None,
base_url: str | httpx.URL | None = None,
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
max_retries: int = 2,
default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
http_client: httpx.Client | None = None,
): ...class AsyncAnthropic:
def __init__(
self,
api_key: str | None = None,
auth_token: str | None = None,
base_url: str | httpx.URL | None = None,
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
max_retries: int = 2,
default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
http_client: httpx.AsyncClient | None = None,
): ...The api_key defaults to the ANTHROPIC_API_KEY environment variable if not provided. The base_url defaults to https://api.anthropic.com.
Create conversational messages with Claude models, including multi-turn conversations and system prompts.
def create(
self,
*,
max_tokens: int,
messages: List[MessageParam],
model: str,
metadata: MetadataParam | None = None,
service_tier: str | None = None,
stop_sequences: List[str] | None = None,
stream: bool = False,
system: str | List[TextBlockParam] | None = None,
temperature: float | None = None,
thinking: ThinkingConfigParam | None = None,
tool_choice: ToolChoiceParam | None = None,
tools: List[ToolUnionParam] | None = None,
top_k: int | None = None,
top_p: float | None = None,
betas: List[str] | None = None,
) -> Message: ...Stream message generation in real-time with automatic text accumulation and event handling.
def stream(
self,
*,
max_tokens: int,
messages: List[MessageParam],
model: str,
# ... same parameters as create
) -> MessageStreamManager: ...Convert Python functions to tools that Claude can call, with automatic execution loops.
def beta_tool(func: Callable) -> BetaFunctionTool: ...
def beta_async_tool(func: Callable) -> BetaAsyncFunctionTool: ...Process multiple message requests efficiently in batches with automatic result aggregation.
def create(
self,
*,
requests: List[MessageBatchRequestParam],
) -> MessageBatch: ...
def retrieve(
self,
message_batch_id: str,
) -> MessageBatch: ...
def results(
self,
message_batch_id: str,
) -> Iterator[MessageBatchIndividualResponse]: ...Use Claude models through AWS Bedrock, Google Vertex AI, or Azure AI Foundry with platform-native authentication.
class AnthropicBedrock:
def __init__(
self,
*,
aws_region: str | None = None,
aws_access_key: str | None = None,
aws_secret_key: str | None = None,
aws_session_token: str | None = None,
# ... standard client parameters
): ...class AnthropicVertex:
def __init__(
self,
*,
region: str | NotGiven = NOT_GIVEN,
project_id: str | NotGiven = NOT_GIVEN,
access_token: str | None = None,
credentials: GoogleCredentials | None = None,
# ... standard client parameters
): ...class AnthropicFoundry:
def __init__(
self,
*,
resource: str | None = None,
api_key: str | None = None,
azure_ad_token_provider: Callable | None = None,
base_url: str | None = None,
# ... standard client parameters
): ...Access experimental features including file uploads, skills management, and extended thinking capabilities.
# File uploads
def upload(
self,
*,
file: FileContent,
purpose: str,
) -> FileMetadata: ...
# Skills management
def create(
self,
*,
container: SkillContainerParam,
description: str,
name: str,
) -> SkillCreateResponse: ...Retrieve information about available models and their capabilities.
def retrieve(
self,
model_id: str,
*,
betas: List[str] | None = None,
) -> ModelInfo: ...
def list(
self,
*,
after_id: str | None = None,
before_id: str | None = None,
limit: int | None = None,
betas: List[str] | None = None,
) -> SyncPage[ModelInfo]: ...Handle API errors with specific exception types for different error conditions.
class AnthropicError(Exception): ...
class APIError(AnthropicError): ...
class APIStatusError(APIError): ...
class BadRequestError(APIStatusError): ...
class AuthenticationError(APIStatusError): ...
class PermissionDeniedError(APIStatusError): ...
class NotFoundError(APIStatusError): ...
class RateLimitError(APIStatusError): ...
class InternalServerError(APIStatusError): ...
class APIConnectionError(AnthropicError): ...
class APITimeoutError(AnthropicError): ...The SDK uses Pydantic models for type safety. Key types include:
Message: Complete message response with content blocksContentBlock: Text, tool use, or thinking blocksMessageParam: Input message formatToolParam: Tool definitionModelInfo: Model metadataUsage: Token usage statisticsHUMAN_PROMPT: str = "\n\nHuman:"
AI_PROMPT: str = "\n\nAssistant:"Legacy prompt constants for the older completion API.