pypi-anthropic

Description
The official Python library for the anthropic API
Author
tessl
Last updated

How to use

npx @tessl/cli registry install tessl/pypi-anthropic@0.66.0

messages.md docs/

1
# Messages API
2
3
The Messages API is the primary interface for conversational interactions with Claude models. It supports multi-turn conversations, system prompts, tool use, streaming responses, and message batching with comprehensive type safety.
4
5
## Capabilities
6
7
### Message Creation
8
9
Create conversational messages with Claude models, supporting various content types, system prompts, and tool integration.
10
11
```python { .api }
12
def create(
13
max_tokens: int,
14
messages: List[MessageParam],
15
model: str,
16
*,
17
metadata: Optional[MetadataParam] = None,
18
service_tier: Optional[Literal["auto", "standard_only"]] = None,
19
stop_sequences: Optional[List[str]] = None,
20
stream: Optional[bool] = None,
21
system: Optional[str] = None,
22
temperature: Optional[float] = None,
23
thinking: Optional[ThinkingConfigParam] = None,
24
tool_choice: Optional[ToolChoiceParam] = None,
25
tools: Optional[List[ToolParam]] = None,
26
top_k: Optional[int] = None,
27
top_p: Optional[float] = None,
28
**kwargs
29
) -> Message
30
31
async def create(
32
max_tokens: int,
33
messages: List[MessageParam],
34
model: str,
35
*,
36
metadata: Optional[MetadataParam] = None,
37
service_tier: Optional[Literal["auto", "standard_only"]] = None,
38
stop_sequences: Optional[List[str]] = None,
39
stream: Optional[bool] = None,
40
system: Optional[str] = None,
41
temperature: Optional[float] = None,
42
thinking: Optional[ThinkingConfigParam] = None,
43
tool_choice: Optional[ToolChoiceParam] = None,
44
tools: Optional[List[ToolParam]] = None,
45
top_k: Optional[int] = None,
46
top_p: Optional[float] = None,
47
**kwargs
48
) -> Message
49
```
50
51
### Token Counting
52
53
Count tokens in messages before sending to estimate costs and ensure messages fit within model limits.
54
55
```python { .api }
56
def count_tokens(
57
messages: List[MessageParam],
58
model: str,
59
*,
60
system: Optional[str] = None,
61
tool_choice: Optional[ToolChoiceParam] = None,
62
tools: Optional[List[ToolParam]] = None,
63
**kwargs
64
) -> MessageTokensCount
65
66
async def count_tokens(
67
messages: List[MessageParam],
68
model: str,
69
*,
70
system: Optional[str] = None,
71
tool_choice: Optional[ToolChoiceParam] = None,
72
tools: Optional[List[ToolParam]] = None,
73
**kwargs
74
) -> MessageTokensCount
75
```
76
77
## Core Types
78
79
### Message Types
80
81
```python { .api }
82
class Message(TypedDict):
83
id: str
84
type: Literal["message"]
85
role: Literal["assistant"]
86
content: List[ContentBlock]
87
model: str
88
stop_reason: Optional[StopReason]
89
stop_sequence: Optional[str]
90
usage: Usage
91
92
class MessageParam(TypedDict):
93
role: Literal["user", "assistant"]
94
content: Union[str, List[ContentBlockParam]]
95
96
class MessageTokensCount(TypedDict):
97
input_tokens: int
98
cache_creation_input_tokens: Optional[int]
99
cache_read_input_tokens: Optional[int]
100
```
101
102
### Content Block Types
103
104
```python { .api }
105
class ContentBlock(TypedDict):
106
type: str
107
108
class TextBlock(ContentBlock):
109
type: Literal["text"]
110
text: str
111
112
class ToolUseBlock(ContentBlock):
113
type: Literal["tool_use"]
114
id: str
115
name: str
116
input: Dict[str, Any]
117
118
class ContentBlockParam(TypedDict):
119
type: str
120
121
class TextBlockParam(ContentBlockParam):
122
type: Literal["text"]
123
text: str
124
cache_control: Optional[CacheControlEphemeralParam]
125
126
class ImageBlockParam(ContentBlockParam):
127
type: Literal["image"]
128
source: Union[Base64ImageSourceParam, URLImageSourceParam]
129
cache_control: Optional[CacheControlEphemeralParam]
130
131
class DocumentBlockParam(ContentBlockParam):
132
type: Literal["document"]
133
source: Union[Base64PDFSourceParam, URLPDFSourceParam]
134
cache_control: Optional[CacheControlEphemeralParam]
135
136
class ToolUseBlockParam(ContentBlockParam):
137
type: Literal["tool_use"]
138
id: str
139
name: str
140
input: Dict[str, Any]
141
cache_control: Optional[CacheControlEphemeralParam]
142
143
class ToolResultBlockParam(ContentBlockParam):
144
type: Literal["tool_result"]
145
tool_use_id: str
146
content: Union[str, List[ContentBlockParam]]
147
is_error: Optional[bool]
148
cache_control: Optional[CacheControlEphemeralParam]
149
```
150
151
### Image and Document Sources
152
153
```python { .api }
154
class Base64ImageSourceParam(TypedDict):
155
type: Literal["base64"]
156
media_type: Literal["image/jpeg", "image/png", "image/gif", "image/webp"]
157
data: str
158
159
class URLImageSourceParam(TypedDict):
160
type: Literal["url"]
161
url: str
162
163
class Base64PDFSourceParam(TypedDict):
164
type: Literal["base64"]
165
media_type: Literal["application/pdf"]
166
data: str
167
168
class URLPDFSourceParam(TypedDict):
169
type: Literal["url"]
170
url: str
171
172
class PlainTextSourceParam(TypedDict):
173
type: Literal["text"]
174
text: str
175
```
176
177
### Usage and Metadata
178
179
```python { .api }
180
class Usage(TypedDict):
181
input_tokens: int
182
output_tokens: int
183
cache_creation_input_tokens: Optional[int]
184
cache_read_input_tokens: Optional[int]
185
186
class MetadataParam(TypedDict, total=False):
187
user_id: Optional[str]
188
189
class CacheControlEphemeralParam(TypedDict):
190
type: Literal["ephemeral"]
191
192
class StopReason(TypedDict):
193
type: Literal["end_turn", "max_tokens", "stop_sequence", "tool_use"]
194
```
195
196
### Extended Thinking Configuration
197
198
```python { .api }
199
class ThinkingConfigParam(TypedDict, total=False):
200
type: Literal["enabled", "disabled"]
201
budget_tokens: Optional[int] # Required when type="enabled", must be ≥1024 and < max_tokens
202
203
class ThinkingConfigEnabledParam(TypedDict):
204
type: Literal["enabled"]
205
budget_tokens: int # Determines how many tokens Claude can use for internal reasoning
206
207
class ThinkingConfigDisabledParam(TypedDict):
208
type: Literal["disabled"]
209
```
210
211
## Usage Examples
212
213
### Basic Text Message
214
215
```python
216
from anthropic import Anthropic
217
218
client = Anthropic()
219
220
message = client.messages.create(
221
model="claude-sonnet-4-20250514",
222
max_tokens=1024,
223
messages=[
224
{"role": "user", "content": "What is the capital of France?"}
225
]
226
)
227
228
print(message.content[0].text)
229
```
230
231
### Multi-Turn Conversation
232
233
```python
234
messages = [
235
{"role": "user", "content": "Hello, can you help me with Python?"},
236
{"role": "assistant", "content": "Of course! I'd be happy to help you with Python. What specific topic or problem would you like assistance with?"},
237
{"role": "user", "content": "How do I read a CSV file?"}
238
]
239
240
message = client.messages.create(
241
model="claude-sonnet-4-20250514",
242
max_tokens=1024,
243
messages=messages
244
)
245
```
246
247
### System Prompt
248
249
```python
250
message = client.messages.create(
251
model="claude-sonnet-4-20250514",
252
max_tokens=1024,
253
system="You are a helpful coding assistant. Always provide code examples when relevant.",
254
messages=[
255
{"role": "user", "content": "How do I sort a list in Python?"}
256
]
257
)
258
```
259
260
### Image Input
261
262
```python
263
import base64
264
265
# Read and encode image
266
with open("image.jpg", "rb") as img_file:
267
img_data = base64.b64encode(img_file.read()).decode()
268
269
message = client.messages.create(
270
model="claude-sonnet-4-20250514",
271
max_tokens=1024,
272
messages=[
273
{
274
"role": "user",
275
"content": [
276
{
277
"type": "image",
278
"source": {
279
"type": "base64",
280
"media_type": "image/jpeg",
281
"data": img_data
282
}
283
},
284
{
285
"type": "text",
286
"text": "What do you see in this image?"
287
}
288
]
289
}
290
]
291
)
292
```
293
294
### PDF Document Input
295
296
```python
297
import base64
298
299
# Read and encode PDF
300
with open("document.pdf", "rb") as pdf_file:
301
pdf_data = base64.b64encode(pdf_file.read()).decode()
302
303
message = client.messages.create(
304
model="claude-sonnet-4-20250514",
305
max_tokens=1024,
306
messages=[
307
{
308
"role": "user",
309
"content": [
310
{
311
"type": "document",
312
"source": {
313
"type": "base64",
314
"media_type": "application/pdf",
315
"data": pdf_data
316
}
317
},
318
{
319
"type": "text",
320
"text": "Summarize this document"
321
}
322
]
323
}
324
]
325
)
326
```
327
328
### Token Counting Example
329
330
```python
331
# Count tokens before sending
332
token_count = client.messages.count_tokens(
333
model="claude-sonnet-4-20250514",
334
messages=[
335
{"role": "user", "content": "What is the capital of France?"}
336
]
337
)
338
339
print(f"Input tokens: {token_count.input_tokens}")
340
341
if token_count.input_tokens < 4000: # Model's context limit
342
message = client.messages.create(
343
model="claude-sonnet-4-20250514",
344
max_tokens=1024,
345
messages=[
346
{"role": "user", "content": "What is the capital of France?"}
347
]
348
)
349
```
350
351
### Streaming Messages
352
353
```python
354
with client.messages.stream(
355
model="claude-sonnet-4-20250514",
356
max_tokens=1024,
357
messages=[
358
{"role": "user", "content": "Write a short story"}
359
]
360
) as stream:
361
for text in stream.text_stream:
362
print(text, end="", flush=True)
363
```
364
365
### Async Usage
366
367
```python
368
import asyncio
369
from anthropic import AsyncAnthropic
370
371
async def chat():
372
client = AsyncAnthropic()
373
374
message = await client.messages.create(
375
model="claude-sonnet-4-20250514",
376
max_tokens=1024,
377
messages=[
378
{"role": "user", "content": "Hello!"}
379
]
380
)
381
382
return message.content[0].text
383
384
result = asyncio.run(chat())
385
```
386
387
### Extended Thinking Example
388
389
```python
390
# Enable extended thinking for complex analysis
391
message = client.messages.create(
392
model="claude-sonnet-4-20250514",
393
max_tokens=4000,
394
thinking={
395
"type": "enabled",
396
"budget_tokens": 2000 # Allow Claude 2000 tokens for internal reasoning
397
},
398
messages=[
399
{"role": "user", "content": "Analyze this complex business problem and provide a detailed solution..."}
400
]
401
)
402
```
403
404
### Service Tier Example
405
406
```python
407
# Use priority capacity when available
408
message = client.messages.create(
409
model="claude-sonnet-4-20250514",
410
max_tokens=1024,
411
service_tier="auto", # Use priority capacity if available, fallback to standard
412
messages=[
413
{"role": "user", "content": "Urgent request requiring priority processing"}
414
]
415
)
416
```