- Spec files
pypi-anthropic
Describes: pkg:pypi/anthropic@0.66.x
- Description
- The official Python library for the anthropic API
- Author
- tessl
- Last updated
batching.md docs/
1# Message Batching23Message batching allows efficient processing of multiple message requests in batches, providing cost optimization and throughput improvements for high-volume applications. This is ideal for bulk processing, data analysis, and scenarios where real-time responses are not required.45## Capabilities67### Batch Operations89Create, retrieve, list, and cancel message batches for asynchronous processing of multiple requests.1011```python { .api }12def create(13requests: List[Dict[str, Any]],14**kwargs15) -> Any1617async def create(18requests: List[Dict[str, Any]],19**kwargs20) -> Any2122def retrieve(batch_id: str, **kwargs) -> Any23async def retrieve(batch_id: str, **kwargs) -> Any2425def list(**kwargs) -> Any26async def list(**kwargs) -> Any2728def cancel(batch_id: str, **kwargs) -> Any29async def cancel(batch_id: str, **kwargs) -> Any30```3132## Core Types3334### Batch Types3536The exact types for batching are part of the beta API and may vary. The following represents the general structure:3738```python { .api }39class BatchRequest(TypedDict):40custom_id: str41method: Literal["POST"]42url: str43body: Dict[str, Any]4445class Batch(TypedDict):46id: str47type: Literal["message_batch"]48processing_status: str49request_counts: Dict[str, int]50ended_at: Optional[str]51created_at: str52expires_at: str53archived_at: Optional[str]54cancel_initiated_at: Optional[str]55results_url: Optional[str]5657class BatchResponse(TypedDict):58custom_id: str59result: Optional[Dict[str, Any]]60error: Optional[Dict[str, Any]]61```6263## Usage Examples6465### Basic Batch Creation6667```python68from anthropic import Anthropic6970client = Anthropic()7172# Prepare batch requests73batch_requests = [74{75"custom_id": "request-1",76"method": "POST",77"url": "/v1/messages",78"body": {79"model": "claude-sonnet-4-20250514",80"max_tokens": 1024,81"messages": [82{"role": "user", "content": "What is the capital of France?"}83]84}85},86{87"custom_id": "request-2",88"method": "POST",89"url": "/v1/messages",90"body": {91"model": "claude-sonnet-4-20250514",92"max_tokens": 1024,93"messages": [94{"role": "user", "content": "What is the capital of Germany?"}95]96}97},98{99"custom_id": "request-3",100"method": "POST",101"url": "/v1/messages",102"body": {103"model": "claude-sonnet-4-20250514",104"max_tokens": 1024,105"messages": [106{"role": "user", "content": "What is the capital of Italy?"}107]108}109}110]111112# Create the batch113batch = client.messages.batches.create(requests=batch_requests)114115print(f"Batch created with ID: {batch.id}")116print(f"Status: {batch.processing_status}")117```118119### Monitoring Batch Progress120121```python122import time123124def wait_for_batch_completion(client: Anthropic, batch_id: str, max_wait_time: int = 300) -> Any:125"""Wait for a batch to complete processing"""126127start_time = time.time()128129while time.time() - start_time < max_wait_time:130batch = client.messages.batches.retrieve(batch_id)131132print(f"Batch {batch_id} status: {batch.processing_status}")133134if batch.processing_status in ["completed", "failed", "cancelled"]:135return batch136elif batch.processing_status == "in_progress":137print(f"Progress: {batch.request_counts}")138139time.sleep(10) # Check every 10 seconds140141raise TimeoutError(f"Batch {batch_id} did not complete within {max_wait_time} seconds")142143# Usage144try:145completed_batch = wait_for_batch_completion(client, batch.id)146print(f"Batch completed: {completed_batch.processing_status}")147148if completed_batch.results_url:149print(f"Results available at: {completed_batch.results_url}")150151except TimeoutError as e:152print(f"Timeout error: {e}")153```154155### Bulk Text Processing156157```python158def process_documents_in_batch(documents: List[str], task: str) -> str:159"""Process multiple documents in a single batch"""160161batch_requests = []162163for i, document in enumerate(documents):164request = {165"custom_id": f"doc-{i}",166"method": "POST",167"url": "/v1/messages",168"body": {169"model": "claude-sonnet-4-20250514",170"max_tokens": 2048,171"messages": [172{173"role": "user",174"content": f"{task}\n\nDocument:\n{document}"175}176]177}178}179batch_requests.append(request)180181# Create batch182batch = client.messages.batches.create(requests=batch_requests)183184# Wait for completion185completed_batch = wait_for_batch_completion(client, batch.id)186187return completed_batch.id188189# Example usage190documents = [191"Annual report showing 15% growth in Q4...",192"Marketing campaign results indicate 23% increase...",193"Customer feedback survey reveals high satisfaction..."194]195196batch_id = process_documents_in_batch(197documents,198"Please summarize the key points from this document in 3 bullet points."199)200201print(f"Document summarization batch completed: {batch_id}")202```203204### Data Analysis Batch205206```python207def analyze_customer_feedback_batch(feedback_list: List[str]) -> str:208"""Analyze customer feedback in batch for sentiment and themes"""209210batch_requests = []211212for i, feedback in enumerate(feedback_list):213request = {214"custom_id": f"feedback-{i}",215"method": "POST",216"url": "/v1/messages",217"body": {218"model": "claude-sonnet-4-20250514",219"max_tokens": 512,220"system": "You are an expert at analyzing customer feedback. Provide sentiment (positive/negative/neutral) and key themes.",221"messages": [222{223"role": "user",224"content": f"Analyze this customer feedback:\n\n{feedback}"225}226]227}228}229batch_requests.append(request)230231batch = client.messages.batches.create(requests=batch_requests)232return batch.id233234# Usage235customer_feedback = [236"The product is amazing, but delivery was slow.",237"Great customer service, very helpful staff.",238"Product quality is poor, disappointed with purchase.",239"Fast shipping, product exactly as described."240]241242analysis_batch_id = analyze_customer_feedback_batch(customer_feedback)243print(f"Feedback analysis batch started: {analysis_batch_id}")244```245246### Batch with Different Models247248```python249def multi_model_comparison_batch(prompt: str) -> str:250"""Compare responses from different models in a single batch"""251252models = ["claude-haiku-3-20241022", "claude-sonnet-4-20250514"]253254batch_requests = []255256for model in models:257request = {258"custom_id": f"model-{model}",259"method": "POST",260"url": "/v1/messages",261"body": {262"model": model,263"max_tokens": 1024,264"messages": [265{"role": "user", "content": prompt}266]267}268}269batch_requests.append(request)270271batch = client.messages.batches.create(requests=batch_requests)272return batch.id273274# Usage275comparison_batch_id = multi_model_comparison_batch(276"Explain quantum computing in simple terms."277)278print(f"Model comparison batch started: {comparison_batch_id}")279```280281### Batch Management282283```python284class BatchManager:285def __init__(self, client: Anthropic):286self.client = client287288def list_active_batches(self) -> List[Any]:289"""List all active batches"""290batches = self.client.messages.batches.list()291active_batches = [292batch for batch in batches293if batch.processing_status in ["validating", "in_progress"]294]295return active_batches296297def cancel_batch(self, batch_id: str) -> bool:298"""Cancel a batch if it's still processing"""299try:300batch = self.client.messages.batches.retrieve(batch_id)301302if batch.processing_status in ["validating", "in_progress"]:303self.client.messages.batches.cancel(batch_id)304print(f"Batch {batch_id} cancellation initiated")305return True306else:307print(f"Cannot cancel batch {batch_id} - status: {batch.processing_status}")308return False309310except Exception as e:311print(f"Error cancelling batch {batch_id}: {e}")312return False313314def get_batch_stats(self, batch_id: str) -> Dict[str, Any]:315"""Get detailed statistics for a batch"""316batch = self.client.messages.batches.retrieve(batch_id)317318stats = {319"id": batch.id,320"status": batch.processing_status,321"created_at": batch.created_at,322"request_counts": batch.request_counts,323"total_requests": sum(batch.request_counts.values()) if batch.request_counts else 0324}325326if batch.ended_at:327stats["ended_at"] = batch.ended_at328329if batch.results_url:330stats["results_url"] = batch.results_url331332return stats333334# Usage335manager = BatchManager(client)336337# List active batches338active_batches = manager.list_active_batches()339print(f"Active batches: {len(active_batches)}")340341# Get stats for a specific batch342if active_batches:343batch_stats = manager.get_batch_stats(active_batches[0].id)344print(f"Batch stats: {batch_stats}")345```346347### Async Batch Processing348349```python350import asyncio351from anthropic import AsyncAnthropic352353async def async_batch_processing():354client = AsyncAnthropic()355356# Create batch requests357batch_requests = [358{359"custom_id": f"async-request-{i}",360"method": "POST",361"url": "/v1/messages",362"body": {363"model": "claude-sonnet-4-20250514",364"max_tokens": 512,365"messages": [366{"role": "user", "content": f"Generate a creative title for article #{i}"}367]368}369}370for i in range(5)371]372373# Create batch374batch = await client.messages.batches.create(requests=batch_requests)375print(f"Async batch created: {batch.id}")376377# Monitor progress378while True:379batch_status = await client.messages.batches.retrieve(batch.id)380print(f"Status: {batch_status.processing_status}")381382if batch_status.processing_status in ["completed", "failed", "cancelled"]:383break384385await asyncio.sleep(5)386387return batch_status388389# Run async batch390batch_result = asyncio.run(async_batch_processing())391print(f"Final batch status: {batch_result.processing_status}")392```393394### Error Handling in Batches395396```python397def robust_batch_creation(requests: List[Dict[str, Any]], max_retries: int = 3) -> Optional[Any]:398"""Create a batch with error handling and retries"""399400for attempt in range(max_retries):401try:402batch = client.messages.batches.create(requests=requests)403print(f"Batch created successfully on attempt {attempt + 1}")404return batch405406except Exception as e:407print(f"Attempt {attempt + 1} failed: {e}")408409if attempt < max_retries - 1:410# Wait before retrying411time.sleep(2 ** attempt) # Exponential backoff412else:413print("All attempts failed")414return None415416# Usage with error handling417batch_requests = [418{419"custom_id": "safe-request-1",420"method": "POST",421"url": "/v1/messages",422"body": {423"model": "claude-sonnet-4-20250514",424"max_tokens": 1024,425"messages": [426{"role": "user", "content": "Hello world"}427]428}429}430]431432batch = robust_batch_creation(batch_requests)433if batch:434print(f"Batch created: {batch.id}")435else:436print("Failed to create batch after all retries")437```