0
# Messaging
1
2
Core messaging functionality including Message classes for user communication, Step class for execution tracking and observability, and error handling mechanisms. These components form the foundation of Chainlit's conversational AI capabilities.
3
4
## Capabilities
5
6
### Message Communication
7
8
Send rich messages with content, media attachments, and interactive elements to users in the chat interface.
9
10
```python { .api }
11
import chainlit as cl
12
13
class Message:
14
"""
15
Main class for sending messages to the UI with support for rich content,
16
actions, and elements.
17
18
Args:
19
content: Union[str, Dict] - Message content (text, dict, or JSON)
20
author: Optional[str] - Message author (defaults to config.ui.name)
21
language: Optional[str] - Code language for syntax highlighting
22
actions: Optional[List[Action]] - Interactive buttons
23
elements: Optional[List[ElementBased]] - Attached media/files
24
type: MessageStepType - Message type ("assistant_message", "user_message")
25
metadata: Optional[Dict] - Custom metadata
26
tags: Optional[List[str]] - Message tags
27
id: Optional[str] - Message ID (auto-generated if not provided)
28
parent_id: Optional[str] - Parent message/step ID for nesting
29
command: Optional[str] - Optional command identifier
30
created_at: Optional[str] - Creation timestamp (auto-generated if not provided)
31
32
Returns:
33
Message instance
34
"""
35
def __init__(
36
self,
37
content: Union[str, Dict] = "",
38
author: Optional[str] = None,
39
language: Optional[str] = None,
40
actions: Optional[List[Action]] = None,
41
elements: Optional[List[ElementBased]] = None,
42
type: MessageStepType = "assistant_message",
43
metadata: Optional[Dict] = None,
44
tags: Optional[List[str]] = None,
45
id: Optional[str] = None,
46
parent_id: Optional[str] = None,
47
command: Optional[str] = None,
48
created_at: Optional[str] = None
49
): ...
50
51
async def send(self) -> "Message":
52
"""Send the message to the UI."""
53
54
async def update(self) -> "Message":
55
"""Update an existing message in the UI."""
56
57
async def remove(self) -> None:
58
"""Remove the message from the UI."""
59
60
async def stream_token(self, token: str) -> None:
61
"""Stream a single token to the message for real-time display."""
62
```
63
64
Usage example:
65
66
```python
67
import chainlit as cl
68
69
@cl.on_message
70
async def main(message: cl.Message):
71
# Send a simple text message
72
await cl.Message("Hello! How can I help you?").send()
73
74
# Send a message with code highlighting
75
code_content = "def hello(): return 'world'"
76
await cl.Message(
77
content=code_content,
78
language="python",
79
author="Code Assistant"
80
).send()
81
82
# Send with metadata and tags
83
await cl.Message(
84
content="Processing your request...",
85
metadata={"request_id": "123"},
86
tags=["processing", "status"]
87
).send()
88
```
89
90
### Error Message Display
91
92
Display error messages with specialized formatting and error handling options.
93
94
```python { .api }
95
class ErrorMessage:
96
"""
97
Specialized message class for displaying errors in the UI.
98
99
Args:
100
content: str - Error message text
101
author: str - Author (defaults to config.ui.name)
102
fail_on_persist_error: bool - Whether to raise on persistence errors (default: False)
103
104
Returns:
105
ErrorMessage instance
106
"""
107
def __init__(
108
self,
109
content: str,
110
author: str = config.ui.name,
111
fail_on_persist_error: bool = False
112
): ...
113
114
async def send(self) -> "ErrorMessage":
115
"""Send the error message to the UI."""
116
```
117
118
### Interactive User Input
119
120
Prompt users for text input, file uploads, action selection, and custom element submission with timeout handling.
121
122
```python { .api }
123
class AskUserMessage:
124
"""
125
Interactive message that prompts user for text input.
126
127
Args:
128
content: str - Prompt text displayed to user
129
author: str - Message author (defaults to config.ui.name)
130
type: MessageStepType - Message type (default: "assistant_message")
131
timeout: int - Timeout in seconds (default: 60)
132
raise_on_timeout: bool - Whether to raise TimeoutError on timeout (default: False)
133
134
Returns:
135
Union[StepDict, None] - User's response or None if timeout
136
"""
137
def __init__(
138
self,
139
content: str,
140
author: str = config.ui.name,
141
type: MessageStepType = "assistant_message",
142
timeout: int = 60,
143
raise_on_timeout: bool = False
144
): ...
145
146
async def send(self) -> Optional[Dict]:
147
"""Send the prompt and wait for user response."""
148
149
class AskFileMessage:
150
"""
151
Interactive message for file uploads from users.
152
153
Args:
154
content: str - Upload prompt text
155
accept: Union[List[str], Dict[str, List[str]]] - Accepted MIME types
156
max_size_mb: int - Maximum file size (default: 2MB, max: 100MB)
157
max_files: int - Maximum number of files (default: 1, max: 10)
158
timeout: int - Timeout in seconds (default: 90)
159
160
Returns:
161
Union[List[AskFileResponse], None] - List of uploaded files or None
162
"""
163
def __init__(
164
self,
165
content: str,
166
accept: Union[List[str], Dict[str, List[str]]],
167
max_size_mb: int = 2,
168
max_files: int = 1,
169
timeout: int = 90
170
): ...
171
172
async def send(self) -> Optional[List]:
173
"""Send file upload prompt and wait for files."""
174
175
class AskActionMessage:
176
"""
177
Interactive message for action selection from predefined options.
178
179
Args:
180
content: str - Selection prompt text
181
actions: List[Action] - Available actions for user to choose
182
timeout: int - Timeout in seconds (default: 90)
183
184
Returns:
185
Union[AskActionResponse, None] - Selected action details or None
186
"""
187
def __init__(
188
self,
189
content: str,
190
actions: List[Action],
191
timeout: int = 90
192
): ...
193
194
async def send(self) -> Optional[Dict]:
195
"""Send action selection prompt and wait for choice."""
196
197
class AskElementMessage:
198
"""
199
Interactive message for custom element submission and interaction.
200
201
Args:
202
content: str - Prompt text displayed with element
203
element: CustomElement - Custom element to display
204
timeout: int - Timeout in seconds (default: 90)
205
206
Returns:
207
Union[AskElementResponse, None] - Element response data or None
208
"""
209
def __init__(
210
self,
211
content: str,
212
element: CustomElement,
213
timeout: int = 90
214
): ...
215
216
async def send(self) -> Optional[Dict]:
217
"""Send element prompt and wait for interaction."""
218
```
219
220
Usage examples for interactive messages:
221
222
```python
223
import chainlit as cl
224
225
@cl.on_message
226
async def handle_message(message: cl.Message):
227
# Ask for text input
228
user_input = await cl.AskUserMessage(
229
content="What's your name?",
230
timeout=30
231
).send()
232
233
if user_input:
234
name = user_input["content"]
235
await cl.Message(f"Hello, {name}!").send()
236
237
# Ask for file upload
238
files = await cl.AskFileMessage(
239
content="Please upload your document:",
240
accept=["application/pdf", "text/plain"],
241
max_size_mb=10
242
).send()
243
244
if files:
245
file = files[0]
246
await cl.Message(f"Received file: {file.name}").send()
247
248
# Ask for action selection
249
actions = [
250
cl.Action(name="option1", label="Option 1"),
251
cl.Action(name="option2", label="Option 2")
252
]
253
254
action_response = await cl.AskActionMessage(
255
content="Choose an option:",
256
actions=actions
257
).send()
258
259
if action_response:
260
await cl.Message(f"You chose: {action_response['name']}").send()
261
```
262
263
### Step Execution Tracking
264
265
Track execution steps and provide observability into AI processing workflows with automatic timing, input/output capture, and hierarchical organization.
266
267
```python { .api }
268
class Step:
269
"""
270
Core class for tracking execution steps and providing observability
271
into AI processing workflows.
272
273
Args:
274
name: Optional[str] - Step name for display (defaults to config.ui.name)
275
type: TrueStepType - Step type ("run", "llm", "tool", "embedding", etc.) (default: "undefined")
276
id: Optional[str] - Step ID (auto-generated if not provided)
277
parent_id: Optional[str] - Parent step ID for nesting
278
elements: Optional[List[Element]] - Attached elements
279
metadata: Optional[Dict] - Custom metadata
280
tags: Optional[List[str]] - Step tags for categorization
281
language: Optional[str] - Content language for syntax highlighting
282
default_open: Optional[bool] - Whether step is expanded by default
283
show_input: Union[bool, str] - Whether to show input ("json", True, False)
284
thread_id: Optional[str] - Thread ID (auto-set from context if not provided)
285
286
Returns:
287
Step instance
288
"""
289
def __init__(
290
self,
291
name: Optional[str] = None,
292
type: TrueStepType = "undefined",
293
id: Optional[str] = None,
294
parent_id: Optional[str] = None,
295
elements: Optional[List[Element]] = None,
296
metadata: Optional[Dict] = None,
297
tags: Optional[List[str]] = None,
298
language: Optional[str] = None,
299
default_open: Optional[bool] = False,
300
show_input: Union[bool, str] = "json",
301
thread_id: Optional[str] = None
302
): ...
303
304
# Properties automatically formatted for display
305
input: Any
306
output: Any
307
308
async def send(self) -> "Step":
309
"""Send the step to the UI for display."""
310
311
async def update(self) -> "Step":
312
"""Update the step in the UI."""
313
314
async def remove(self) -> None:
315
"""Remove the step from the UI."""
316
317
async def stream_token(self, token: str) -> None:
318
"""Stream output tokens to the step."""
319
320
# Context manager support for automatic step lifecycle
321
async def __aenter__(self) -> "Step":
322
"""Enter async context manager."""
323
324
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
325
"""Exit async context manager."""
326
327
def __enter__(self) -> "Step":
328
"""Enter sync context manager."""
329
330
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
331
"""Exit sync context manager."""
332
333
def step(
334
original_function: Optional[Callable] = None,
335
*,
336
name: Optional[str] = "",
337
type: TrueStepType = "undefined",
338
id: Optional[str] = None,
339
parent_id: Optional[str] = None,
340
tags: Optional[List[str]] = None,
341
metadata: Optional[Dict] = None,
342
language: Optional[str] = None,
343
show_input: Union[bool, str] = "json",
344
default_open: bool = False
345
):
346
"""
347
Decorator for automatic step creation and tracking of function execution.
348
349
Args:
350
original_function: Optional[Callable] - Function being decorated (internal)
351
name: Optional[str] - Step name (defaults to function name if empty)
352
type: TrueStepType - Step type (default: "undefined")
353
id: Optional[str] - Step ID (auto-generated if not provided)
354
parent_id: Optional[str] - Parent step ID for nesting
355
tags: Optional[List[str]] - Step tags for categorization
356
metadata: Optional[Dict] - Custom metadata
357
language: Optional[str] - Content language for syntax highlighting
358
show_input: Union[bool, str] - Input display mode (default: "json")
359
default_open: bool - Whether expanded by default
360
361
Usage:
362
@step
363
def my_function():
364
return "result"
365
366
@step(name="Custom Step", type="llm")
367
async def llm_call():
368
return "response"
369
"""
370
```
371
372
Usage examples for steps:
373
374
```python
375
import chainlit as cl
376
377
@cl.on_message
378
async def main(message: cl.Message):
379
# Using step as context manager
380
async with cl.Step(name="Processing", type="run") as step:
381
step.input = {"user_message": message.content}
382
383
# Simulate processing
384
result = await process_message(message.content)
385
step.output = {"result": result}
386
387
await cl.Message(result).send()
388
389
# Using step decorator
390
@cl.step(name="LLM Call", type="llm")
391
async def call_llm(prompt: str) -> str:
392
# Function automatically wrapped in step tracking
393
response = await openai_client.chat.completions.create(
394
model="gpt-3.5-turbo",
395
messages=[{"role": "user", "content": prompt}]
396
)
397
return response.choices[0].message.content
398
399
@cl.on_message
400
async def handle_message(message: cl.Message):
401
# Step automatically created and tracked
402
response = await call_llm(message.content)
403
await cl.Message(response).send()
404
405
# Nested steps example
406
@cl.on_message
407
async def complex_workflow(message: cl.Message):
408
async with cl.Step(name="Main Workflow", type="run") as main_step:
409
main_step.input = message.content
410
411
# Nested step for data processing
412
async with cl.Step(name="Data Processing", type="tool") as data_step:
413
processed_data = await process_data(message.content)
414
data_step.output = processed_data
415
416
# Nested step for LLM call
417
async with cl.Step(name="Generate Response", type="llm") as llm_step:
418
response = await generate_response(processed_data)
419
llm_step.output = response
420
421
main_step.output = response
422
await cl.Message(response).send()
423
```
424
425
## Core Types
426
427
```python { .api }
428
from typing import Union, Optional, List, Dict, Any
429
from dataclasses import dataclass
430
431
# Message and step types
432
MessageStepType = Union["assistant_message", "user_message"]
433
434
TrueStepType = Union[
435
"run", "llm", "tool", "embedding", "retrieval",
436
"rerank", "undefined", "assistant_message", "user_message"
437
]
438
439
# Response types for interactive messages
440
@dataclass
441
class AskFileResponse:
442
name: str
443
content: bytes
444
size: int
445
type: str
446
447
@dataclass
448
class AskActionResponse:
449
name: str
450
label: str
451
payload: Dict[str, Any]
452
453
@dataclass
454
class AskElementResponse:
455
data: Dict[str, Any]
456
```