The official Python library for the anthropic API
—
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Pending
The risk profile of this skill
Beta features provide access to experimental and preview functionality including advanced capabilities, new model features, and cutting-edge functionality that is in testing or early release phases.
Access to beta versions of core APIs with experimental features and improvements.
class Beta:
messages: BetaMessages
models: BetaModels
files: BetaFiles
class AsyncBeta:
messages: AsyncBetaMessages
models: AsyncBetaModels
files: AsyncBetaFiles
class BetaMessages:
def create(
self,
max_tokens: int,
messages: List[BetaMessageParam],
model: str,
*,
betas: Optional[List[AnthropicBetaParam]] = None,
**kwargs
) -> BetaMessage: ...
batches: BetaBatches
class AsyncBetaMessages:
async def create(
self,
max_tokens: int,
messages: List[BetaMessageParam],
model: str,
*,
betas: Optional[List[AnthropicBetaParam]] = None,
**kwargs
) -> BetaMessage: ...
batches: AsyncBetaBatchesFile management capabilities for uploading, managing, and using files with beta features.
class BetaFiles:
def upload(
self,
*,
file: FileTypes,
betas: Optional[List[AnthropicBetaParam]] = None,
**kwargs
) -> FileMetadata: ...
def retrieve_metadata(self, file_id: str, **kwargs) -> FileMetadata: ...
def list(**kwargs) -> List[FileMetadata]: ...
def delete(self, file_id: str, **kwargs) -> DeletedFile: ...
class AsyncBetaFiles:
async def upload(
self,
*,
file: FileTypes,
betas: Optional[List[AnthropicBetaParam]] = None,
**kwargs
) -> FileMetadata: ...
async def retrieve_metadata(self, file_id: str, **kwargs) -> FileMetadata: ...
async def list(**kwargs) -> List[FileMetadata]: ...
async def delete(self, file_id: str, **kwargs) -> DeletedFile: ...Access to beta model features and experimental model capabilities.
class BetaModels:
def list(**kwargs) -> List[BetaModelInfo]: ...
class AsyncBetaModels:
async def list(**kwargs) -> List[BetaModelInfo]: ...class BetaMessage(TypedDict):
id: str
type: Literal["message"]
role: Literal["assistant"]
content: List[BetaContentBlock]
model: str
stop_reason: Optional[StopReason]
stop_sequence: Optional[str]
usage: BetaUsage
class BetaMessageParam(TypedDict):
role: Literal["user", "assistant"]
content: Union[str, List[BetaContentBlockParam]]
class BetaContentBlock(TypedDict):
type: str
class BetaTextBlock(BetaContentBlock):
type: Literal["text"]
text: str
class BetaContentBlockParam(TypedDict):
type: str
class BetaUsage(TypedDict):
input_tokens: int
output_tokens: int
cache_creation_input_tokens: Optional[int]
cache_read_input_tokens: Optional[int]class BetaToolParam(TypedDict):
name: str
description: str
input_schema: Dict[str, Any]
class AnthropicBetaParam(TypedDict):
type: strclass FileMetadata(TypedDict):
id: str
type: Literal["file"]
filename: str
purpose: str
size_bytes: int
created_at: str
class DeletedFile(TypedDict):
id: str
type: Literal["file"]
deleted: boolclass BetaModelInfo(TypedDict):
id: str
type: Literal["model"]
display_name: str
created_at: strclass BetaError(TypedDict):
type: str
message: str
class BetaAPIError(TypedDict):
type: Literal["api_error"]
message: str
class BetaErrorResponse(TypedDict):
type: Literal["error"]
error: BetaError
class BetaBillingError(TypedDict):
type: Literal["billing_error"]
message: str
class BetaAuthenticationError(TypedDict):
type: Literal["authentication_error"]
message: str
class BetaPermissionError(TypedDict):
type: Literal["permission_error"]
message: str
class BetaNotFoundError(TypedDict):
type: Literal["not_found_error"]
message: str
class BetaRateLimitError(TypedDict):
type: Literal["rate_limit_error"]
message: str
class BetaOverloadedError(TypedDict):
type: Literal["overloaded_error"]
message: str
class BetaGatewayTimeoutError(TypedDict):
type: Literal["gateway_timeout_error"]
message: str
class BetaInvalidRequestError(TypedDict):
type: Literal["invalid_request_error"]
message: strfrom anthropic import Anthropic
client = Anthropic()
# Access beta APIs
beta_message = client.beta.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=1024,
messages=[
{"role": "user", "content": "Hello from beta!"}
],
betas=["beta-feature-name"] # Enable specific beta features
)
print(beta_message.content[0].text)# Upload a file for beta features
with open("document.pdf", "rb") as file:
uploaded_file = client.beta.files.create(
file=file,
purpose="analysis"
)
print(f"File uploaded: {uploaded_file.id}")
print(f"Filename: {uploaded_file.filename}")
print(f"Size: {uploaded_file.size_bytes} bytes")
# List uploaded files
files = client.beta.files.list()
for file in files:
print(f"File: {file.filename} ({file.id})")
# Retrieve specific file
file_info = client.beta.files.retrieve(uploaded_file.id)
print(f"File info: {file_info}")
# Use file in beta message
beta_message_with_file = client.beta.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=1024,
messages=[
{
"role": "user",
"content": [
{"type": "text", "text": f"Analyze the uploaded file {uploaded_file.id}"}
]
}
]
)
# Delete file when done
deleted = client.beta.files.delete(uploaded_file.id)
print(f"File deleted: {deleted.deleted}")# List beta models
beta_models = client.beta.models.list()
print("Available beta models:")
for model in beta_models:
print(f"- {model.display_name} ({model.id})")
# Use beta model
if beta_models:
beta_model_id = beta_models[0].id
message = client.beta.messages.create(
model=beta_model_id,
max_tokens=1024,
messages=[
{"role": "user", "content": "Hello from beta model!"}
]
)# Create beta batch requests
beta_batch_requests = [
{
"custom_id": "beta-request-1",
"method": "POST",
"url": "/v1/messages",
"body": {
"model": "claude-sonnet-4-20250514",
"max_tokens": 1024,
"messages": [
{"role": "user", "content": "Beta batch request 1"}
],
"betas": ["beta-feature-name"]
}
},
{
"custom_id": "beta-request-2",
"method": "POST",
"url": "/v1/messages",
"body": {
"model": "claude-sonnet-4-20250514",
"max_tokens": 1024,
"messages": [
{"role": "user", "content": "Beta batch request 2"}
],
"betas": ["beta-feature-name"]
}
}
]
# Create beta batch
beta_batch = client.beta.messages.batches.create(requests=beta_batch_requests)
print(f"Beta batch created: {beta_batch.id}")# Stream with beta features
with client.beta.messages.stream(
model="claude-sonnet-4-20250514",
max_tokens=1024,
messages=[
{"role": "user", "content": "Stream with beta features"}
],
betas=["beta-streaming-feature"]
) as stream:
for text in stream.text_stream:
print(text, end="", flush=True)from anthropic import BetaAPIError, BetaRateLimitError
try:
beta_message = client.beta.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=1024,
messages=[
{"role": "user", "content": "Beta request"}
],
betas=["experimental-feature"]
)
except BetaRateLimitError as e:
print(f"Beta rate limit: {e}")
except BetaAPIError as e:
print(f"Beta API error: {e}")
except Exception as e:
print(f"Unexpected error: {e}")import asyncio
from anthropic import AsyncAnthropic
async def beta_async_example():
client = AsyncAnthropic()
# Async beta message
beta_message = await client.beta.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=1024,
messages=[
{"role": "user", "content": "Async beta request"}
]
)
# Async beta file operations
with open("test.txt", "rb") as file:
uploaded_file = await client.beta.files.create(
file=file,
purpose="testing"
)
files = await client.beta.files.list()
print(f"Beta files: {len(files)}")
# Cleanup
await client.beta.files.delete(uploaded_file.id)
return beta_message.content[0].text
result = asyncio.run(beta_async_example())
print(f"Beta result: {result}")def check_beta_features(client: Anthropic) -> dict:
"""Check which beta features are available"""
available_features = {
"beta_messages": False,
"beta_files": False,
"beta_models": False,
"beta_batching": False
}
try:
# Test beta messages
client.beta.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=1,
messages=[{"role": "user", "content": "test"}]
)
available_features["beta_messages"] = True
except:
pass
try:
# Test beta files
client.beta.files.list()
available_features["beta_files"] = True
except:
pass
try:
# Test beta models
client.beta.models.list()
available_features["beta_models"] = True
except:
pass
try:
# Test beta batching
client.beta.messages.batches.list()
available_features["beta_batching"] = True
except:
pass
return available_features
# Check available beta features
features = check_beta_features(client)
print("Available beta features:")
for feature, available in features.items():
status = "✅" if available else "❌"
print(f"{status} {feature}")class BetaConfig:
"""Configuration for beta features"""
def __init__(self):
self.enabled_features = [
"beta-messages-2024",
"beta-files-upload",
"beta-streaming-enhanced"
]
def create_beta_client(self) -> Anthropic:
"""Create client optimized for beta features"""
return Anthropic(
api_key=os.environ.get("ANTHROPIC_API_KEY"),
default_headers={
"Anthropic-Beta": ",".join(self.enabled_features),
"X-Beta-User": "true"
}
)
def create_beta_message(self, client: Anthropic, **kwargs) -> Any:
"""Create message with beta features enabled"""
return client.beta.messages.create(
betas=self.enabled_features,
**kwargs
)
# Usage
beta_config = BetaConfig()
beta_client = beta_config.create_beta_client()
message = beta_config.create_beta_message(
beta_client,
model="claude-sonnet-4-20250514",
max_tokens=1024,
messages=[{"role": "user", "content": "Beta configured message"}]
)