CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-chainlit

Build production-ready conversational AI applications in minutes with rich UI components and LLM integrations

Pending

Quality

Pending

Does it follow best practices?

Impact

Pending

No eval scenarios have been run

Overview
Eval results
Files

callbacks.mddocs/

Callbacks

Event-driven hooks for handling application lifecycle, user interactions, and system events. Callbacks enable responsive conversational applications that react to user actions, authentication events, and system state changes.

Capabilities

Application Lifecycle

Manage application startup, shutdown, and global resource initialization.

import chainlit as cl

@cl.on_app_startup
async def startup():
    """
    Hook executed when the Chainlit application starts.
    Use for initializing resources, loading models, setting up connections.
    
    Signature: Callable[[], Union[None, Awaitable[None]]]
    
    Returns:
        None - No return value expected
    """

@cl.on_app_shutdown  
async def shutdown():
    """
    Hook executed when the Chainlit application shuts down.
    Use for cleanup, closing connections, saving final state.
    
    Signature: Callable[[], Union[None, Awaitable[None]]]
    
    Returns:
        None - No return value expected
    """

Usage examples for application lifecycle:

import chainlit as cl
import asyncio
from some_ai_library import AIModel

# Global resources
ai_model = None
database_connection = None

@cl.on_app_startup
async def initialize_app():
    """Initialize global resources when app starts"""
    global ai_model, database_connection
    
    # Load AI model
    print("Loading AI model...")
    ai_model = AIModel.load("gpt-3.5-turbo")
    
    # Setup database connection
    database_connection = await connect_to_database()
    
    # Initialize caches
    await setup_redis_cache()
    
    print("Application startup complete!")

@cl.on_app_shutdown
async def cleanup_app():
    """Clean up resources when app shuts down"""
    global ai_model, database_connection
    
    # Save any pending data
    if database_connection:
        await database_connection.save_pending_data()
        await database_connection.close()
    
    # Clean up model resources  
    if ai_model:
        ai_model.cleanup()
    
    print("Application shutdown complete!")

Chat Session Lifecycle

Handle chat session events including start, resume, and end for managing conversation state.

@cl.on_chat_start
async def start():
    """
    Hook executed when user connects or starts a new chat session.
    Use for initializing chat state, sending welcome messages.
    
    Signature: Callable[[], Any]
    
    Returns:
        Any - Return value ignored
    """

@cl.on_chat_resume
async def resume(thread_dict: ThreadDict):
    """
    Hook executed when user resumes an existing chat session.
    Use for restoring chat context, loading previous state.
    
    Args:
        thread_dict: ThreadDict - Previous thread metadata and history
        
    Signature: Callable[[ThreadDict], Any] 
    
    Returns:
        Any - Return value ignored
    """

@cl.on_chat_end
async def end():
    """
    Hook executed when user disconnects from chat session.
    Use for cleanup, saving final state, logging.
    
    Signature: Callable[[], Any]
    
    Returns:
        Any - Return value ignored
    """

@cl.on_stop
async def stop():
    """
    Hook executed when user stops thread execution.
    Use for canceling ongoing operations, cleanup.
    
    Signature: Callable[[], Any]
    
    Returns:
        Any - Return value ignored
    """

Usage examples for chat lifecycle:

import chainlit as cl
from datetime import datetime

@cl.on_chat_start
async def start_chat():
    """Initialize new chat session"""
    # Set up user session data
    cl.user_session.set("start_time", datetime.now().isoformat())
    cl.user_session.set("message_count", 0)
    cl.user_session.set("conversation_context", [])
    
    # Get user information
    user = cl.user_session.get("user")
    
    if user:
        welcome_msg = f"Welcome back, {user.display_name}! How can I help you today?"
    else:
        welcome_msg = "Hello! I'm your AI assistant. How can I help you?"
    
    await cl.Message(welcome_msg).send()
    
    # Send initial context or instructions
    await cl.Message(
        "💡 Tip: You can ask me questions, upload files, or request help with various tasks."
    ).send()

@cl.on_chat_resume
async def resume_chat(thread_dict: dict):
    """Resume existing chat session"""
    # Extract thread information
    thread_id = thread_dict.get("id")
    message_history = thread_dict.get("messages", [])
    
    # Restore session context
    cl.user_session.set("thread_id", thread_id)
    cl.user_session.set("resumed", True)
    
    # Load conversation context from history
    context = []
    for msg in message_history[-5:]:  # Last 5 messages
        context.append({
            "content": msg.get("content", ""),
            "author": msg.get("author", ""),
            "timestamp": msg.get("createdAt", "")
        })
    
    cl.user_session.set("conversation_context", context)
    
    await cl.Message("Welcome back! I've restored our conversation context.").send()

@cl.on_chat_end
async def end_chat():
    """Clean up when chat session ends"""
    # Get session statistics
    start_time = cl.user_session.get("start_time")
    message_count = cl.user_session.get("message_count", 0)
    
    # Log session data
    session_data = {
        "start_time": start_time,
        "end_time": datetime.now().isoformat(),
        "message_count": message_count,
        "user_id": cl.user_session.get("user", {}).get("identifier", "anonymous")
    }
    
    print(f"Chat session ended: {session_data}")
    
    # Clean up any resources specific to this session
    # (Global resources are cleaned up in on_app_shutdown)

@cl.on_stop
async def stop_execution():
    """Handle user stopping execution"""
    await cl.Message("⏹️ Execution stopped by user.").send()
    
    # Cancel any ongoing operations
    # Set flags to stop processing loops
    cl.user_session.set("stop_requested", True)

Message Handling

Process incoming user messages and provide responses.

@cl.on_message
async def handle_message(message: cl.Message):
    """
    Hook executed for each incoming user message.
    Main handler for processing user input and generating responses.
    
    Args:
        message: cl.Message - The user's message object containing content and metadata
        
    Signature: Callable[[Message], Any] or Callable[[], Any]
    
    Returns:
        Any - Return value ignored
    """

Usage example for message handling:

import chainlit as cl

@cl.on_message
async def main(message: cl.Message):
    """Process incoming user messages"""
    # Update message count
    count = cl.user_session.get("message_count", 0) + 1
    cl.user_session.set("message_count", count)
    
    # Get message content
    user_input = message.content.strip()
    
    # Handle different types of messages
    if user_input.lower().startswith("/help"):
        await show_help()
    elif user_input.lower().startswith("/stats"):
        await show_stats()
    elif len(user_input) == 0:
        await cl.Message("Please send a message with some content.").send()
    else:
        # Process regular message with AI
        async with cl.Step(name="Processing", type="llm") as step:
            step.input = user_input
            response = await process_with_ai(user_input)
            step.output = response
            
        await cl.Message(response).send()

async def show_help():
    """Show help information"""
    help_text = """
    **Available Commands:**
    - `/help` - Show this help message
    - `/stats` - Show session statistics
    - Upload files for analysis
    - Ask any question for AI assistance
    """
    await cl.Message(help_text).send()

async def show_stats():
    """Show session statistics"""
    count = cl.user_session.get("message_count", 0)
    start_time = cl.user_session.get("start_time", "Unknown")
    
    stats = f"""
    **Session Statistics:**
    - Messages sent: {count}
    - Session started: {start_time}
    - Current profile: {cl.user_session.get('chat_profile', 'default')}
    """
    await cl.Message(stats).send()

Audio Processing

Handle real-time audio input with chunk-based processing.

@cl.on_audio_start
async def start_audio():
    """
    Hook executed when user starts audio input.
    Use for initializing audio processing resources.
    
    Signature: Callable[[], Any]
    
    Returns:
        Any - Return value ignored
    """

@cl.on_audio_chunk  
async def handle_audio_chunk(chunk: InputAudioChunk):
    """
    Hook executed for each audio data chunk during recording.
    Use for real-time audio processing, speech recognition.
    
    Args:
        chunk: InputAudioChunk - Audio data chunk with metadata
        
    Signature: Callable[[InputAudioChunk], Any]
    
    Returns:
        Any - Return value ignored
    """

@cl.on_audio_end
async def end_audio():
    """
    Hook executed when audio input session ends.
    Use for final audio processing, cleanup.
    
    Signature: Callable[[], Any]
    
    Returns:
        Any - Return value ignored
    """

Usage examples for audio processing:

import chainlit as cl

# Global audio processing state
audio_buffer = []
speech_recognizer = None

@cl.on_audio_start
async def start_audio_session():
    """Initialize audio processing"""
    global audio_buffer, speech_recognizer
    
    audio_buffer = []
    # Initialize speech recognition service
    speech_recognizer = initialize_speech_service()
    
    await cl.Message("🎤 Audio recording started. Speak now...").send()

@cl.on_audio_chunk
async def process_audio_chunk(chunk: cl.InputAudioChunk):
    """Process incoming audio chunks"""
    global audio_buffer, speech_recognizer
    
    # Add chunk to buffer
    audio_buffer.append(chunk.data)
    
    # Real-time processing for certain chunk properties
    if chunk.isStart:
        print(f"Audio started - MIME type: {chunk.mimeType}")
    
    # Process chunk with speech recognizer if available
    if speech_recognizer and len(chunk.data) > 1024:  # Minimum chunk size
        try:
            # Attempt partial transcription
            partial_text = await speech_recognizer.process_chunk(chunk.data)
            if partial_text:
                # Update live transcription (optional)
                cl.user_session.set("partial_transcription", partial_text)
        except Exception as e:
            print(f"Audio processing error: {e}")

@cl.on_audio_end
async def finalize_audio():
    """Process complete audio recording"""
    global audio_buffer, speech_recognizer
    
    if not audio_buffer:
        await cl.Message("No audio data received.").send()
        return
    
    # Combine all chunks
    complete_audio = b"".join(audio_buffer)
    
    # Process complete audio
    try:
        async with cl.Step(name="Speech Recognition", type="tool") as step:
            step.input = f"Processing {len(complete_audio)} bytes of audio"
            
            # Transcribe complete audio
            transcription = await speech_recognizer.transcribe(complete_audio)
            step.output = transcription
            
        if transcription:
            await cl.Message(f"🎤 You said: {transcription}").send()
            
            # Process transcription as a regular message
            await cl.Message(f"Processing your spoken request: {transcription}").send()
            
        else:
            await cl.Message("Could not transcribe audio. Please try again.").send()
            
    except Exception as e:
        await cl.Message(f"Audio processing failed: {str(e)}").send()
    
    finally:
        # Cleanup
        audio_buffer = []
        speech_recognizer = None

User Interactions and Feedback

Handle user actions, feedback, and custom interactions.

@cl.action_callback("action_name")
async def handle_action(action: cl.Action):
    """
    Register callback for specific action button clicks.
    Decorator factory that creates action-specific handlers.
    
    Args:
        action: cl.Action - Action object containing name, payload, and metadata
        
    Signature: action_callback(name: str) -> Callable[[Action], Any]
    
    Usage:
        @cl.action_callback("button_name")
        async def my_handler(action): ...
    
    Returns:
        Any - Return value ignored
    """

@cl.on_feedback
async def handle_feedback(feedback: Feedback):
    """
    Hook executed when user provides feedback on messages.
    Use for collecting user satisfaction data, improving responses.
    
    Args:
        feedback: Feedback - Feedback object with rating and optional comment
        
    Signature: Callable[[Feedback], Any]
    
    Returns:
        Any - Return value ignored
    """

@cl.author_rename
async def rename_author(author: str) -> str:
    """
    Transform author names for display in the UI.
    Use for customizing how author names appear to users.
    
    Args:
        author: str - Original author name
        
    Signature: Callable[[str], Awaitable[str]]
    
    Returns:
        str - Transformed display name
    """

Usage examples for user interactions:

import chainlit as cl

# Action callback examples
@cl.action_callback("approve")
async def handle_approval(action: cl.Action):
    """Handle approval action"""
    request_id = action.payload.get("request_id")
    await cl.Message(f"✅ Request {request_id} approved!").send()
    
    # Process approval logic
    await process_approval(request_id)

@cl.action_callback("reject")  
async def handle_rejection(action: cl.Action):
    """Handle rejection action"""
    request_id = action.payload.get("request_id")
    reason = action.payload.get("reason", "No reason provided")
    
    await cl.Message(f"❌ Request {request_id} rejected: {reason}").send()

@cl.action_callback("more_info")
async def show_more_info(action: cl.Action):
    """Show additional information"""
    item_id = action.payload.get("item_id")
    
    # Fetch detailed information
    details = await get_item_details(item_id)
    
    await cl.Message(f"**Details for {item_id}:**\n{details}").send()

# Send message with actions
@cl.on_message
async def send_with_actions(message: cl.Message):
    """Example of sending messages with action buttons"""
    actions = [
        cl.Action(
            name="approve",
            label="Approve",
            payload={"request_id": "REQ-123"},
            icon="check"
        ),
        cl.Action(
            name="reject", 
            label="Reject",
            payload={"request_id": "REQ-123", "reason": "Incomplete"},
            icon="x"
        ),
        cl.Action(
            name="more_info",
            label="More Info", 
            payload={"item_id": "ITEM-456"},
            icon="info"
        )
    ]
    
    await cl.Message(
        "Please review this request:",
        actions=actions
    ).send()

# Feedback handling
@cl.on_feedback
async def collect_feedback(feedback):
    """Process user feedback on messages"""
    rating = feedback.rating  # 1-5 stars or thumbs up/down
    comment = feedback.comment  # Optional text comment
    message_id = feedback.messageId  # ID of message being rated
    
    # Store feedback in database
    feedback_data = {
        "message_id": message_id,
        "rating": rating,
        "comment": comment,
        "user_id": cl.user_session.get("user", {}).get("identifier", "anonymous"),
        "timestamp": datetime.now().isoformat()
    }
    
    await store_feedback(feedback_data)
    
    # Thank the user
    if rating >= 4:
        await cl.Message("Thanks for the positive feedback! 😊").send()
    else:
        await cl.Message("Thanks for the feedback. We'll work to improve!").send()

# Author name transformation
@cl.author_rename
async def customize_author_names(author: str) -> str:
    """Customize how author names appear in the UI"""
    # Add emoji prefixes based on author type
    if author.startswith("AI"):
        return f"🤖 {author}"
    elif author.startswith("System"):
        return f"⚙️ {author}"
    elif author.startswith("Admin"):
        return f"👑 {author}"
    else:
        return f"👤 {author}"

Window Communication

Handle browser window communication for advanced integrations.

@cl.on_window_message
async def handle_window_message(message: str):
    """
    Hook for JavaScript postMessage events from the browser window.
    Use for custom frontend-backend communication.
    
    Args:
        message: str - Message content from browser via postMessage
        
    Signature: Callable[[str], Any]
    
    Returns:
        Any - Return value ignored
    """

async def send_window_message(data: Any) -> None:
    """
    Send data to browser window via postMessage API.
    Use for pushing data to custom frontend components.
    
    Args:
        data: Any - Data to send to browser window
        
    Returns:
        None
    """

Usage examples for window communication:

import chainlit as cl
import json

@cl.on_window_message
async def handle_browser_message(message: str):
    """Handle messages from browser JavaScript"""
    try:
        # Parse JSON message from browser
        data = json.loads(message)
        
        message_type = data.get("type")
        
        if message_type == "user_action":
            # Handle custom user action from frontend
            action_data = data.get("payload", {})
            await handle_custom_action(action_data)
            
        elif message_type == "page_visibility":
            # Handle page visibility changes
            is_visible = data.get("visible", True)
            if not is_visible:
                cl.user_session.set("page_hidden", True)
            else:
                cl.user_session.set("page_hidden", False)
                await cl.Message("Welcome back!").send()
                
        elif message_type == "custom_widget_data":
            # Handle data from custom widgets
            widget_data = data.get("data", {})
            await process_widget_data(widget_data)
            
    except json.JSONDecodeError:
        print(f"Invalid JSON message from browser: {message}")

async def send_data_to_browser():
    """Send data to browser for custom components"""
    # Send configuration to frontend
    config_data = {
        "type": "config_update",
        "theme": "dark",
        "language": "en",
        "features": ["voice_input", "file_upload"]
    }
    
    await cl.send_window_message(config_data)
    
    # Send real-time updates
    status_data = {
        "type": "status_update", 
        "processing": True,
        "progress": 75,
        "message": "Processing your request..."
    }
    
    await cl.send_window_message(status_data)

@cl.on_message
async def demo_window_communication(message: cl.Message):
    """Demo of bidirectional window communication"""
    # Send processing status to browser
    await cl.send_window_message({
        "type": "processing_start",
        "message_id": "msg_123"
    })
    
    # Simulate processing
    await cl.sleep(2)
    
    # Send completion status
    await cl.send_window_message({
        "type": "processing_complete",
        "message_id": "msg_123", 
        "result": "Processing completed successfully"
    })
    
    await cl.Message("Task completed!").send()

Core Types

from typing import Dict, Any, Optional
from dataclasses import dataclass

# Thread and session types
ThreadDict = Dict[str, Any]  # Contains thread metadata and message history

# Audio chunk data structure
@dataclass
class InputAudioChunk:
    isStart: bool           # Whether this is the first chunk
    mimeType: str          # Audio MIME type (e.g., "audio/wav")
    elapsedTime: float     # Elapsed time in seconds  
    data: bytes            # Audio data bytes

# Feedback data structure
@dataclass
class Feedback:
    messageId: str         # ID of the message being rated
    rating: int           # Rating value (1-5 or thumbs up/down)
    comment: Optional[str] # Optional text comment

# Action response structure
@dataclass 
class ActionResponse:
    name: str             # Action identifier
    payload: Dict[str, Any] # Action payload data

Install with Tessl CLI

npx tessl i tessl/pypi-chainlit

docs

advanced.md

authentication.md

callbacks.md

index.md

input-widgets.md

integrations.md

messaging.md

ui-elements.md

user-management.md

tile.json