pypi-anthropic

Description
The official Python library for the anthropic API
Author
tessl
Last updated

How to use

npx @tessl/cli registry install tessl/pypi-anthropic@0.66.0

models.md docs/

1
# Models API
2
3
The Models API provides access to information about available Claude models, including their capabilities, context limits, and metadata. This helps developers select appropriate models for different use cases and understand model-specific features.
4
5
## Capabilities
6
7
### Model Listing
8
9
Retrieve a list of all available Claude models with their specifications and capabilities.
10
11
```python { .api }
12
def list(**kwargs) -> List[Model]
13
async def list(**kwargs) -> List[Model]
14
```
15
16
## Core Types
17
18
### Model Types
19
20
```python { .api }
21
class Model(TypedDict):
22
id: str
23
type: Literal["model"]
24
display_name: str
25
created_at: str
26
27
class ModelInfo(TypedDict):
28
id: str
29
type: Literal["model"]
30
display_name: str
31
created_at: str
32
33
class ModelParam(TypedDict):
34
model: str
35
36
class ModelListParams(TypedDict, total=False):
37
pass
38
```
39
40
## Usage Examples
41
42
### List Available Models
43
44
```python
45
from anthropic import Anthropic
46
47
client = Anthropic()
48
49
# Get all available models
50
models = client.models.list()
51
52
for model in models:
53
print(f"Model ID: {model.id}")
54
print(f"Display Name: {model.display_name}")
55
print(f"Created: {model.created_at}")
56
print("---")
57
```
58
59
### Model Selection Helper
60
61
```python
62
def select_model_by_capability(models: List[Model], capability: str) -> Optional[str]:
63
"""Select a model based on capability requirements"""
64
65
# Model capability mapping (example)
66
capabilities = {
67
"vision": ["claude-sonnet-4-20250514", "claude-haiku-3-20241022"],
68
"function_calling": ["claude-sonnet-4-20250514", "claude-haiku-3-20241022"],
69
"long_context": ["claude-sonnet-4-20250514"],
70
"speed": ["claude-haiku-3-20241022"],
71
}
72
73
suitable_models = capabilities.get(capability, [])
74
75
for model in models:
76
if model.id in suitable_models:
77
return model.id
78
79
return None
80
81
# Usage
82
models = client.models.list()
83
vision_model = select_model_by_capability(models, "vision")
84
85
if vision_model:
86
print(f"Selected model for vision: {vision_model}")
87
88
# Use the selected model
89
message = client.messages.create(
90
model=vision_model,
91
max_tokens=1024,
92
messages=[
93
{
94
"role": "user",
95
"content": [
96
{"type": "text", "text": "What's in this image?"},
97
{"type": "image", "source": {"type": "url", "url": "https://example.com/image.jpg"}}
98
]
99
}
100
]
101
)
102
```
103
104
### Model Comparison
105
106
```python
107
def compare_models(models: List[Model]) -> None:
108
"""Compare available models and their characteristics"""
109
110
print("Available Claude Models:")
111
print("=" * 50)
112
113
# Sort models by creation date (newest first)
114
sorted_models = sorted(models, key=lambda m: m.created_at, reverse=True)
115
116
for model in sorted_models:
117
print(f"ID: {model.id}")
118
print(f"Name: {model.display_name}")
119
print(f"Created: {model.created_at}")
120
121
# Add capability hints based on model ID
122
if "haiku" in model.id.lower():
123
print("• Optimized for: Speed and efficiency")
124
print("• Best for: Quick responses, simple tasks")
125
elif "sonnet" in model.id.lower():
126
print("• Optimized for: Balance of capability and speed")
127
print("• Best for: Most general use cases")
128
elif "opus" in model.id.lower():
129
print("• Optimized for: Maximum capability")
130
print("• Best for: Complex reasoning, creative tasks")
131
132
print("-" * 30)
133
134
# Usage
135
models = client.models.list()
136
compare_models(models)
137
```
138
139
### Async Model Listing
140
141
```python
142
import asyncio
143
from anthropic import AsyncAnthropic
144
145
async def list_models_async():
146
client = AsyncAnthropic()
147
148
models = await client.models.list()
149
150
print(f"Found {len(models)} available models:")
151
for model in models:
152
print(f"- {model.display_name} ({model.id})")
153
154
return models
155
156
# Run async
157
models = asyncio.run(list_models_async())
158
```
159
160
### Model Validation
161
162
```python
163
def validate_model_for_task(model_id: str, task_type: str) -> tuple[bool, str]:
164
"""Validate if a model is suitable for a specific task type"""
165
166
# Get available models
167
models = client.models.list()
168
model_ids = [m.id for m in models]
169
170
if model_id not in model_ids:
171
return False, f"Model {model_id} is not available"
172
173
# Task-specific validation
174
if task_type == "vision" and "haiku" not in model_id:
175
return True, "Model supports vision tasks"
176
elif task_type == "vision" and "haiku" in model_id:
177
return False, "This model version may have limited vision capabilities"
178
elif task_type == "long_document" and "opus" not in model_id and "sonnet" not in model_id:
179
return False, "Consider using a higher-capability model for long documents"
180
181
return True, "Model is suitable for this task"
182
183
# Usage
184
is_valid, message = validate_model_for_task("claude-sonnet-4-20250514", "vision")
185
print(f"Valid: {is_valid}, Message: {message}")
186
187
if is_valid:
188
# Proceed with using the model
189
response = client.messages.create(
190
model="claude-sonnet-4-20250514",
191
max_tokens=1024,
192
messages=[{"role": "user", "content": "Hello!"}]
193
)
194
```
195
196
### Model Metadata Tracking
197
198
```python
199
from datetime import datetime
200
from typing import Dict, Any
201
202
class ModelTracker:
203
def __init__(self, client: Anthropic):
204
self.client = client
205
self.usage_stats: Dict[str, Dict[str, Any]] = {}
206
207
def get_models_with_metadata(self) -> Dict[str, Dict[str, Any]]:
208
"""Get models with additional metadata and usage tracking"""
209
models = self.client.models.list()
210
211
model_metadata = {}
212
for model in models:
213
model_metadata[model.id] = {
214
"display_name": model.display_name,
215
"created_at": model.created_at,
216
"usage_count": self.usage_stats.get(model.id, {}).get("count", 0),
217
"last_used": self.usage_stats.get(model.id, {}).get("last_used"),
218
"estimated_cost_tier": self._get_cost_tier(model.id),
219
"capabilities": self._get_capabilities(model.id)
220
}
221
222
return model_metadata
223
224
def track_usage(self, model_id: str):
225
"""Track model usage"""
226
if model_id not in self.usage_stats:
227
self.usage_stats[model_id] = {"count": 0, "last_used": None}
228
229
self.usage_stats[model_id]["count"] += 1
230
self.usage_stats[model_id]["last_used"] = datetime.now().isoformat()
231
232
def _get_cost_tier(self, model_id: str) -> str:
233
"""Estimate cost tier based on model name"""
234
if "haiku" in model_id.lower():
235
return "low"
236
elif "sonnet" in model_id.lower():
237
return "medium"
238
elif "opus" in model_id.lower():
239
return "high"
240
return "unknown"
241
242
def _get_capabilities(self, model_id: str) -> List[str]:
243
"""Get model capabilities"""
244
capabilities = ["text"]
245
246
# Add capabilities based on model ID patterns
247
if any(x in model_id for x in ["sonnet", "opus"]):
248
capabilities.extend(["vision", "function_calling", "long_context"])
249
elif "haiku" in model_id:
250
capabilities.extend(["vision", "function_calling"])
251
252
return capabilities
253
254
# Usage
255
tracker = ModelTracker(client)
256
models_metadata = tracker.get_models_with_metadata()
257
258
for model_id, metadata in models_metadata.items():
259
print(f"Model: {metadata['display_name']}")
260
print(f"Capabilities: {', '.join(metadata['capabilities'])}")
261
print(f"Cost Tier: {metadata['cost_tier']}")
262
print(f"Usage Count: {metadata['usage_count']}")
263
print("---")
264
265
# Track usage when making requests
266
model_to_use = "claude-sonnet-4-20250514"
267
tracker.track_usage(model_to_use)
268
269
response = client.messages.create(
270
model=model_to_use,
271
max_tokens=1024,
272
messages=[{"role": "user", "content": "Hello!"}]
273
)
274
```
275
276
### Model Selection Strategy
277
278
```python
279
def select_optimal_model(
280
task_description: str,
281
priority: str = "balanced", # "speed", "quality", "cost", "balanced"
282
require_vision: bool = False,
283
require_function_calling: bool = False
284
) -> str:
285
"""Select the optimal model based on requirements"""
286
287
models = client.models.list()
288
289
# Filter models based on capabilities
290
suitable_models = []
291
292
for model in models:
293
model_id = model.id.lower()
294
295
# Check vision requirement
296
if require_vision and "claude-instant" in model_id:
297
continue # Skip models without vision
298
299
# Check function calling requirement
300
if require_function_calling and "claude-instant" in model_id:
301
continue # Skip models without function calling
302
303
suitable_models.append(model)
304
305
if not suitable_models:
306
raise ValueError("No models meet the specified requirements")
307
308
# Select based on priority
309
if priority == "speed":
310
# Prefer Haiku models for speed
311
for model in suitable_models:
312
if "haiku" in model.id.lower():
313
return model.id
314
elif priority == "quality":
315
# Prefer Opus models for quality
316
for model in suitable_models:
317
if "opus" in model.id.lower():
318
return model.id
319
elif priority == "cost":
320
# Prefer Haiku models for cost efficiency
321
for model in suitable_models:
322
if "haiku" in model.id.lower():
323
return model.id
324
elif priority == "balanced":
325
# Prefer Sonnet models for balance
326
for model in suitable_models:
327
if "sonnet" in model.id.lower():
328
return model.id
329
330
# Fallback to first suitable model
331
return suitable_models[0].id
332
333
# Usage examples
334
speed_model = select_optimal_model(
335
"Quick text generation",
336
priority="speed"
337
)
338
339
vision_model = select_optimal_model(
340
"Analyze an image",
341
priority="quality",
342
require_vision=True
343
)
344
345
function_model = select_optimal_model(
346
"Call external APIs",
347
priority="balanced",
348
require_function_calling=True
349
)
350
351
print(f"Speed model: {speed_model}")
352
print(f"Vision model: {vision_model}")
353
print(f"Function calling model: {function_model}")
354
```