or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

advanced.mdauthentication.mdcallbacks.mdindex.mdinput-widgets.mdintegrations.mdmessaging.mdui-elements.mduser-management.md

callbacks.mddocs/

0

# Callbacks

1

2

Event-driven hooks for handling application lifecycle, user interactions, and system events. Callbacks enable responsive conversational applications that react to user actions, authentication events, and system state changes.

3

4

## Capabilities

5

6

### Application Lifecycle

7

8

Manage application startup, shutdown, and global resource initialization.

9

10

```python { .api }

11

import chainlit as cl

12

13

@cl.on_app_startup

14

async def startup():

15

"""

16

Hook executed when the Chainlit application starts.

17

Use for initializing resources, loading models, setting up connections.

18

19

Signature: Callable[[], Union[None, Awaitable[None]]]

20

21

Returns:

22

None - No return value expected

23

"""

24

25

@cl.on_app_shutdown

26

async def shutdown():

27

"""

28

Hook executed when the Chainlit application shuts down.

29

Use for cleanup, closing connections, saving final state.

30

31

Signature: Callable[[], Union[None, Awaitable[None]]]

32

33

Returns:

34

None - No return value expected

35

"""

36

```

37

38

Usage examples for application lifecycle:

39

40

```python

41

import chainlit as cl

42

import asyncio

43

from some_ai_library import AIModel

44

45

# Global resources

46

ai_model = None

47

database_connection = None

48

49

@cl.on_app_startup

50

async def initialize_app():

51

"""Initialize global resources when app starts"""

52

global ai_model, database_connection

53

54

# Load AI model

55

print("Loading AI model...")

56

ai_model = AIModel.load("gpt-3.5-turbo")

57

58

# Setup database connection

59

database_connection = await connect_to_database()

60

61

# Initialize caches

62

await setup_redis_cache()

63

64

print("Application startup complete!")

65

66

@cl.on_app_shutdown

67

async def cleanup_app():

68

"""Clean up resources when app shuts down"""

69

global ai_model, database_connection

70

71

# Save any pending data

72

if database_connection:

73

await database_connection.save_pending_data()

74

await database_connection.close()

75

76

# Clean up model resources

77

if ai_model:

78

ai_model.cleanup()

79

80

print("Application shutdown complete!")

81

```

82

83

### Chat Session Lifecycle

84

85

Handle chat session events including start, resume, and end for managing conversation state.

86

87

```python { .api }

88

@cl.on_chat_start

89

async def start():

90

"""

91

Hook executed when user connects or starts a new chat session.

92

Use for initializing chat state, sending welcome messages.

93

94

Signature: Callable[[], Any]

95

96

Returns:

97

Any - Return value ignored

98

"""

99

100

@cl.on_chat_resume

101

async def resume(thread_dict: ThreadDict):

102

"""

103

Hook executed when user resumes an existing chat session.

104

Use for restoring chat context, loading previous state.

105

106

Args:

107

thread_dict: ThreadDict - Previous thread metadata and history

108

109

Signature: Callable[[ThreadDict], Any]

110

111

Returns:

112

Any - Return value ignored

113

"""

114

115

@cl.on_chat_end

116

async def end():

117

"""

118

Hook executed when user disconnects from chat session.

119

Use for cleanup, saving final state, logging.

120

121

Signature: Callable[[], Any]

122

123

Returns:

124

Any - Return value ignored

125

"""

126

127

@cl.on_stop

128

async def stop():

129

"""

130

Hook executed when user stops thread execution.

131

Use for canceling ongoing operations, cleanup.

132

133

Signature: Callable[[], Any]

134

135

Returns:

136

Any - Return value ignored

137

"""

138

```

139

140

Usage examples for chat lifecycle:

141

142

```python

143

import chainlit as cl

144

from datetime import datetime

145

146

@cl.on_chat_start

147

async def start_chat():

148

"""Initialize new chat session"""

149

# Set up user session data

150

cl.user_session.set("start_time", datetime.now().isoformat())

151

cl.user_session.set("message_count", 0)

152

cl.user_session.set("conversation_context", [])

153

154

# Get user information

155

user = cl.user_session.get("user")

156

157

if user:

158

welcome_msg = f"Welcome back, {user.display_name}! How can I help you today?"

159

else:

160

welcome_msg = "Hello! I'm your AI assistant. How can I help you?"

161

162

await cl.Message(welcome_msg).send()

163

164

# Send initial context or instructions

165

await cl.Message(

166

"πŸ’‘ Tip: You can ask me questions, upload files, or request help with various tasks."

167

).send()

168

169

@cl.on_chat_resume

170

async def resume_chat(thread_dict: dict):

171

"""Resume existing chat session"""

172

# Extract thread information

173

thread_id = thread_dict.get("id")

174

message_history = thread_dict.get("messages", [])

175

176

# Restore session context

177

cl.user_session.set("thread_id", thread_id)

178

cl.user_session.set("resumed", True)

179

180

# Load conversation context from history

181

context = []

182

for msg in message_history[-5:]: # Last 5 messages

183

context.append({

184

"content": msg.get("content", ""),

185

"author": msg.get("author", ""),

186

"timestamp": msg.get("createdAt", "")

187

})

188

189

cl.user_session.set("conversation_context", context)

190

191

await cl.Message("Welcome back! I've restored our conversation context.").send()

192

193

@cl.on_chat_end

194

async def end_chat():

195

"""Clean up when chat session ends"""

196

# Get session statistics

197

start_time = cl.user_session.get("start_time")

198

message_count = cl.user_session.get("message_count", 0)

199

200

# Log session data

201

session_data = {

202

"start_time": start_time,

203

"end_time": datetime.now().isoformat(),

204

"message_count": message_count,

205

"user_id": cl.user_session.get("user", {}).get("identifier", "anonymous")

206

}

207

208

print(f"Chat session ended: {session_data}")

209

210

# Clean up any resources specific to this session

211

# (Global resources are cleaned up in on_app_shutdown)

212

213

@cl.on_stop

214

async def stop_execution():

215

"""Handle user stopping execution"""

216

await cl.Message("⏹️ Execution stopped by user.").send()

217

218

# Cancel any ongoing operations

219

# Set flags to stop processing loops

220

cl.user_session.set("stop_requested", True)

221

```

222

223

### Message Handling

224

225

Process incoming user messages and provide responses.

226

227

```python { .api }

228

@cl.on_message

229

async def handle_message(message: cl.Message):

230

"""

231

Hook executed for each incoming user message.

232

Main handler for processing user input and generating responses.

233

234

Args:

235

message: cl.Message - The user's message object containing content and metadata

236

237

Signature: Callable[[Message], Any] or Callable[[], Any]

238

239

Returns:

240

Any - Return value ignored

241

"""

242

```

243

244

Usage example for message handling:

245

246

```python

247

import chainlit as cl

248

249

@cl.on_message

250

async def main(message: cl.Message):

251

"""Process incoming user messages"""

252

# Update message count

253

count = cl.user_session.get("message_count", 0) + 1

254

cl.user_session.set("message_count", count)

255

256

# Get message content

257

user_input = message.content.strip()

258

259

# Handle different types of messages

260

if user_input.lower().startswith("/help"):

261

await show_help()

262

elif user_input.lower().startswith("/stats"):

263

await show_stats()

264

elif len(user_input) == 0:

265

await cl.Message("Please send a message with some content.").send()

266

else:

267

# Process regular message with AI

268

async with cl.Step(name="Processing", type="llm") as step:

269

step.input = user_input

270

response = await process_with_ai(user_input)

271

step.output = response

272

273

await cl.Message(response).send()

274

275

async def show_help():

276

"""Show help information"""

277

help_text = """

278

**Available Commands:**

279

- `/help` - Show this help message

280

- `/stats` - Show session statistics

281

- Upload files for analysis

282

- Ask any question for AI assistance

283

"""

284

await cl.Message(help_text).send()

285

286

async def show_stats():

287

"""Show session statistics"""

288

count = cl.user_session.get("message_count", 0)

289

start_time = cl.user_session.get("start_time", "Unknown")

290

291

stats = f"""

292

**Session Statistics:**

293

- Messages sent: {count}

294

- Session started: {start_time}

295

- Current profile: {cl.user_session.get('chat_profile', 'default')}

296

"""

297

await cl.Message(stats).send()

298

```

299

300

### Audio Processing

301

302

Handle real-time audio input with chunk-based processing.

303

304

```python { .api }

305

@cl.on_audio_start

306

async def start_audio():

307

"""

308

Hook executed when user starts audio input.

309

Use for initializing audio processing resources.

310

311

Signature: Callable[[], Any]

312

313

Returns:

314

Any - Return value ignored

315

"""

316

317

@cl.on_audio_chunk

318

async def handle_audio_chunk(chunk: InputAudioChunk):

319

"""

320

Hook executed for each audio data chunk during recording.

321

Use for real-time audio processing, speech recognition.

322

323

Args:

324

chunk: InputAudioChunk - Audio data chunk with metadata

325

326

Signature: Callable[[InputAudioChunk], Any]

327

328

Returns:

329

Any - Return value ignored

330

"""

331

332

@cl.on_audio_end

333

async def end_audio():

334

"""

335

Hook executed when audio input session ends.

336

Use for final audio processing, cleanup.

337

338

Signature: Callable[[], Any]

339

340

Returns:

341

Any - Return value ignored

342

"""

343

```

344

345

Usage examples for audio processing:

346

347

```python

348

import chainlit as cl

349

350

# Global audio processing state

351

audio_buffer = []

352

speech_recognizer = None

353

354

@cl.on_audio_start

355

async def start_audio_session():

356

"""Initialize audio processing"""

357

global audio_buffer, speech_recognizer

358

359

audio_buffer = []

360

# Initialize speech recognition service

361

speech_recognizer = initialize_speech_service()

362

363

await cl.Message("🎀 Audio recording started. Speak now...").send()

364

365

@cl.on_audio_chunk

366

async def process_audio_chunk(chunk: cl.InputAudioChunk):

367

"""Process incoming audio chunks"""

368

global audio_buffer, speech_recognizer

369

370

# Add chunk to buffer

371

audio_buffer.append(chunk.data)

372

373

# Real-time processing for certain chunk properties

374

if chunk.isStart:

375

print(f"Audio started - MIME type: {chunk.mimeType}")

376

377

# Process chunk with speech recognizer if available

378

if speech_recognizer and len(chunk.data) > 1024: # Minimum chunk size

379

try:

380

# Attempt partial transcription

381

partial_text = await speech_recognizer.process_chunk(chunk.data)

382

if partial_text:

383

# Update live transcription (optional)

384

cl.user_session.set("partial_transcription", partial_text)

385

except Exception as e:

386

print(f"Audio processing error: {e}")

387

388

@cl.on_audio_end

389

async def finalize_audio():

390

"""Process complete audio recording"""

391

global audio_buffer, speech_recognizer

392

393

if not audio_buffer:

394

await cl.Message("No audio data received.").send()

395

return

396

397

# Combine all chunks

398

complete_audio = b"".join(audio_buffer)

399

400

# Process complete audio

401

try:

402

async with cl.Step(name="Speech Recognition", type="tool") as step:

403

step.input = f"Processing {len(complete_audio)} bytes of audio"

404

405

# Transcribe complete audio

406

transcription = await speech_recognizer.transcribe(complete_audio)

407

step.output = transcription

408

409

if transcription:

410

await cl.Message(f"🎀 You said: {transcription}").send()

411

412

# Process transcription as a regular message

413

await cl.Message(f"Processing your spoken request: {transcription}").send()

414

415

else:

416

await cl.Message("Could not transcribe audio. Please try again.").send()

417

418

except Exception as e:

419

await cl.Message(f"Audio processing failed: {str(e)}").send()

420

421

finally:

422

# Cleanup

423

audio_buffer = []

424

speech_recognizer = None

425

```

426

427

### User Interactions and Feedback

428

429

Handle user actions, feedback, and custom interactions.

430

431

```python { .api }

432

@cl.action_callback("action_name")

433

async def handle_action(action: cl.Action):

434

"""

435

Register callback for specific action button clicks.

436

Decorator factory that creates action-specific handlers.

437

438

Args:

439

action: cl.Action - Action object containing name, payload, and metadata

440

441

Signature: action_callback(name: str) -> Callable[[Action], Any]

442

443

Usage:

444

@cl.action_callback("button_name")

445

async def my_handler(action): ...

446

447

Returns:

448

Any - Return value ignored

449

"""

450

451

@cl.on_feedback

452

async def handle_feedback(feedback: Feedback):

453

"""

454

Hook executed when user provides feedback on messages.

455

Use for collecting user satisfaction data, improving responses.

456

457

Args:

458

feedback: Feedback - Feedback object with rating and optional comment

459

460

Signature: Callable[[Feedback], Any]

461

462

Returns:

463

Any - Return value ignored

464

"""

465

466

@cl.author_rename

467

async def rename_author(author: str) -> str:

468

"""

469

Transform author names for display in the UI.

470

Use for customizing how author names appear to users.

471

472

Args:

473

author: str - Original author name

474

475

Signature: Callable[[str], Awaitable[str]]

476

477

Returns:

478

str - Transformed display name

479

"""

480

```

481

482

Usage examples for user interactions:

483

484

```python

485

import chainlit as cl

486

487

# Action callback examples

488

@cl.action_callback("approve")

489

async def handle_approval(action: cl.Action):

490

"""Handle approval action"""

491

request_id = action.payload.get("request_id")

492

await cl.Message(f"βœ… Request {request_id} approved!").send()

493

494

# Process approval logic

495

await process_approval(request_id)

496

497

@cl.action_callback("reject")

498

async def handle_rejection(action: cl.Action):

499

"""Handle rejection action"""

500

request_id = action.payload.get("request_id")

501

reason = action.payload.get("reason", "No reason provided")

502

503

await cl.Message(f"❌ Request {request_id} rejected: {reason}").send()

504

505

@cl.action_callback("more_info")

506

async def show_more_info(action: cl.Action):

507

"""Show additional information"""

508

item_id = action.payload.get("item_id")

509

510

# Fetch detailed information

511

details = await get_item_details(item_id)

512

513

await cl.Message(f"**Details for {item_id}:**\n{details}").send()

514

515

# Send message with actions

516

@cl.on_message

517

async def send_with_actions(message: cl.Message):

518

"""Example of sending messages with action buttons"""

519

actions = [

520

cl.Action(

521

name="approve",

522

label="Approve",

523

payload={"request_id": "REQ-123"},

524

icon="check"

525

),

526

cl.Action(

527

name="reject",

528

label="Reject",

529

payload={"request_id": "REQ-123", "reason": "Incomplete"},

530

icon="x"

531

),

532

cl.Action(

533

name="more_info",

534

label="More Info",

535

payload={"item_id": "ITEM-456"},

536

icon="info"

537

)

538

]

539

540

await cl.Message(

541

"Please review this request:",

542

actions=actions

543

).send()

544

545

# Feedback handling

546

@cl.on_feedback

547

async def collect_feedback(feedback):

548

"""Process user feedback on messages"""

549

rating = feedback.rating # 1-5 stars or thumbs up/down

550

comment = feedback.comment # Optional text comment

551

message_id = feedback.messageId # ID of message being rated

552

553

# Store feedback in database

554

feedback_data = {

555

"message_id": message_id,

556

"rating": rating,

557

"comment": comment,

558

"user_id": cl.user_session.get("user", {}).get("identifier", "anonymous"),

559

"timestamp": datetime.now().isoformat()

560

}

561

562

await store_feedback(feedback_data)

563

564

# Thank the user

565

if rating >= 4:

566

await cl.Message("Thanks for the positive feedback! 😊").send()

567

else:

568

await cl.Message("Thanks for the feedback. We'll work to improve!").send()

569

570

# Author name transformation

571

@cl.author_rename

572

async def customize_author_names(author: str) -> str:

573

"""Customize how author names appear in the UI"""

574

# Add emoji prefixes based on author type

575

if author.startswith("AI"):

576

return f"πŸ€– {author}"

577

elif author.startswith("System"):

578

return f"βš™οΈ {author}"

579

elif author.startswith("Admin"):

580

return f"πŸ‘‘ {author}"

581

else:

582

return f"πŸ‘€ {author}"

583

```

584

585

### Window Communication

586

587

Handle browser window communication for advanced integrations.

588

589

```python { .api }

590

@cl.on_window_message

591

async def handle_window_message(message: str):

592

"""

593

Hook for JavaScript postMessage events from the browser window.

594

Use for custom frontend-backend communication.

595

596

Args:

597

message: str - Message content from browser via postMessage

598

599

Signature: Callable[[str], Any]

600

601

Returns:

602

Any - Return value ignored

603

"""

604

605

async def send_window_message(data: Any) -> None:

606

"""

607

Send data to browser window via postMessage API.

608

Use for pushing data to custom frontend components.

609

610

Args:

611

data: Any - Data to send to browser window

612

613

Returns:

614

None

615

"""

616

```

617

618

Usage examples for window communication:

619

620

```python

621

import chainlit as cl

622

import json

623

624

@cl.on_window_message

625

async def handle_browser_message(message: str):

626

"""Handle messages from browser JavaScript"""

627

try:

628

# Parse JSON message from browser

629

data = json.loads(message)

630

631

message_type = data.get("type")

632

633

if message_type == "user_action":

634

# Handle custom user action from frontend

635

action_data = data.get("payload", {})

636

await handle_custom_action(action_data)

637

638

elif message_type == "page_visibility":

639

# Handle page visibility changes

640

is_visible = data.get("visible", True)

641

if not is_visible:

642

cl.user_session.set("page_hidden", True)

643

else:

644

cl.user_session.set("page_hidden", False)

645

await cl.Message("Welcome back!").send()

646

647

elif message_type == "custom_widget_data":

648

# Handle data from custom widgets

649

widget_data = data.get("data", {})

650

await process_widget_data(widget_data)

651

652

except json.JSONDecodeError:

653

print(f"Invalid JSON message from browser: {message}")

654

655

async def send_data_to_browser():

656

"""Send data to browser for custom components"""

657

# Send configuration to frontend

658

config_data = {

659

"type": "config_update",

660

"theme": "dark",

661

"language": "en",

662

"features": ["voice_input", "file_upload"]

663

}

664

665

await cl.send_window_message(config_data)

666

667

# Send real-time updates

668

status_data = {

669

"type": "status_update",

670

"processing": True,

671

"progress": 75,

672

"message": "Processing your request..."

673

}

674

675

await cl.send_window_message(status_data)

676

677

@cl.on_message

678

async def demo_window_communication(message: cl.Message):

679

"""Demo of bidirectional window communication"""

680

# Send processing status to browser

681

await cl.send_window_message({

682

"type": "processing_start",

683

"message_id": "msg_123"

684

})

685

686

# Simulate processing

687

await cl.sleep(2)

688

689

# Send completion status

690

await cl.send_window_message({

691

"type": "processing_complete",

692

"message_id": "msg_123",

693

"result": "Processing completed successfully"

694

})

695

696

await cl.Message("Task completed!").send()

697

```

698

699

## Core Types

700

701

```python { .api }

702

from typing import Dict, Any, Optional

703

from dataclasses import dataclass

704

705

# Thread and session types

706

ThreadDict = Dict[str, Any] # Contains thread metadata and message history

707

708

# Audio chunk data structure

709

@dataclass

710

class InputAudioChunk:

711

isStart: bool # Whether this is the first chunk

712

mimeType: str # Audio MIME type (e.g., "audio/wav")

713

elapsedTime: float # Elapsed time in seconds

714

data: bytes # Audio data bytes

715

716

# Feedback data structure

717

@dataclass

718

class Feedback:

719

messageId: str # ID of the message being rated

720

rating: int # Rating value (1-5 or thumbs up/down)

721

comment: Optional[str] # Optional text comment

722

723

# Action response structure

724

@dataclass

725

class ActionResponse:

726

name: str # Action identifier

727

payload: Dict[str, Any] # Action payload data

728

```