CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-chainlit

Build production-ready conversational AI applications in minutes with rich UI components and LLM integrations

Pending

Quality

Pending

Does it follow best practices?

Impact

Pending

No eval scenarios have been run

Overview
Eval results
Files

messaging.mddocs/

Messaging

Core messaging functionality including Message classes for user communication, Step class for execution tracking and observability, and error handling mechanisms. These components form the foundation of Chainlit's conversational AI capabilities.

Capabilities

Message Communication

Send rich messages with content, media attachments, and interactive elements to users in the chat interface.

import chainlit as cl

class Message:
    """
    Main class for sending messages to the UI with support for rich content, 
    actions, and elements.
    
    Args:
        content: Union[str, Dict] - Message content (text, dict, or JSON)
        author: Optional[str] - Message author (defaults to config.ui.name)
        language: Optional[str] - Code language for syntax highlighting  
        actions: Optional[List[Action]] - Interactive buttons
        elements: Optional[List[ElementBased]] - Attached media/files
        type: MessageStepType - Message type ("assistant_message", "user_message")
        metadata: Optional[Dict] - Custom metadata
        tags: Optional[List[str]] - Message tags
        id: Optional[str] - Message ID (auto-generated if not provided)
        parent_id: Optional[str] - Parent message/step ID for nesting
        command: Optional[str] - Optional command identifier
        created_at: Optional[str] - Creation timestamp (auto-generated if not provided)
        
    Returns:
        Message instance
    """
    def __init__(
        self,
        content: Union[str, Dict] = "",
        author: Optional[str] = None,
        language: Optional[str] = None,
        actions: Optional[List[Action]] = None,
        elements: Optional[List[ElementBased]] = None,
        type: MessageStepType = "assistant_message",
        metadata: Optional[Dict] = None,
        tags: Optional[List[str]] = None,
        id: Optional[str] = None,
        parent_id: Optional[str] = None,
        command: Optional[str] = None,
        created_at: Optional[str] = None
    ): ...
    
    async def send(self) -> "Message":
        """Send the message to the UI."""
        
    async def update(self) -> "Message":
        """Update an existing message in the UI."""
        
    async def remove(self) -> None:
        """Remove the message from the UI."""
        
    async def stream_token(self, token: str) -> None:
        """Stream a single token to the message for real-time display."""

Usage example:

import chainlit as cl

@cl.on_message
async def main(message: cl.Message):
    # Send a simple text message
    await cl.Message("Hello! How can I help you?").send()
    
    # Send a message with code highlighting
    code_content = "def hello(): return 'world'"
    await cl.Message(
        content=code_content,
        language="python",
        author="Code Assistant"
    ).send()
    
    # Send with metadata and tags
    await cl.Message(
        content="Processing your request...",
        metadata={"request_id": "123"},
        tags=["processing", "status"]
    ).send()

Error Message Display

Display error messages with specialized formatting and error handling options.

class ErrorMessage:
    """
    Specialized message class for displaying errors in the UI.
    
    Args:
        content: str - Error message text
        author: str - Author (defaults to config.ui.name)
        fail_on_persist_error: bool - Whether to raise on persistence errors (default: False)
        
    Returns:
        ErrorMessage instance
    """
    def __init__(
        self,
        content: str,
        author: str = config.ui.name,
        fail_on_persist_error: bool = False
    ): ...
    
    async def send(self) -> "ErrorMessage":
        """Send the error message to the UI."""

Interactive User Input

Prompt users for text input, file uploads, action selection, and custom element submission with timeout handling.

class AskUserMessage:
    """
    Interactive message that prompts user for text input.
    
    Args:
        content: str - Prompt text displayed to user
        author: str - Message author (defaults to config.ui.name)
        type: MessageStepType - Message type (default: "assistant_message")
        timeout: int - Timeout in seconds (default: 60)
        raise_on_timeout: bool - Whether to raise TimeoutError on timeout (default: False)
        
    Returns:
        Union[StepDict, None] - User's response or None if timeout
    """
    def __init__(
        self,
        content: str,
        author: str = config.ui.name,
        type: MessageStepType = "assistant_message",
        timeout: int = 60,
        raise_on_timeout: bool = False
    ): ...
    
    async def send(self) -> Optional[Dict]:
        """Send the prompt and wait for user response."""

class AskFileMessage:
    """
    Interactive message for file uploads from users.
    
    Args:
        content: str - Upload prompt text
        accept: Union[List[str], Dict[str, List[str]]] - Accepted MIME types
        max_size_mb: int - Maximum file size (default: 2MB, max: 100MB) 
        max_files: int - Maximum number of files (default: 1, max: 10)
        timeout: int - Timeout in seconds (default: 90)
        
    Returns:
        Union[List[AskFileResponse], None] - List of uploaded files or None
    """
    def __init__(
        self,
        content: str,
        accept: Union[List[str], Dict[str, List[str]]], 
        max_size_mb: int = 2,
        max_files: int = 1,
        timeout: int = 90
    ): ...
    
    async def send(self) -> Optional[List]:
        """Send file upload prompt and wait for files."""

class AskActionMessage:
    """
    Interactive message for action selection from predefined options.
    
    Args:
        content: str - Selection prompt text
        actions: List[Action] - Available actions for user to choose
        timeout: int - Timeout in seconds (default: 90)
        
    Returns:
        Union[AskActionResponse, None] - Selected action details or None
    """
    def __init__(
        self,
        content: str,
        actions: List[Action],
        timeout: int = 90
    ): ...
    
    async def send(self) -> Optional[Dict]:
        """Send action selection prompt and wait for choice."""

class AskElementMessage:
    """
    Interactive message for custom element submission and interaction.
    
    Args:
        content: str - Prompt text displayed with element
        element: CustomElement - Custom element to display
        timeout: int - Timeout in seconds (default: 90)
        
    Returns:
        Union[AskElementResponse, None] - Element response data or None
    """
    def __init__(
        self,
        content: str,
        element: CustomElement,
        timeout: int = 90
    ): ...
    
    async def send(self) -> Optional[Dict]:
        """Send element prompt and wait for interaction."""

Usage examples for interactive messages:

import chainlit as cl

@cl.on_message
async def handle_message(message: cl.Message):
    # Ask for text input
    user_input = await cl.AskUserMessage(
        content="What's your name?",
        timeout=30
    ).send()
    
    if user_input:
        name = user_input["content"]
        await cl.Message(f"Hello, {name}!").send()
    
    # Ask for file upload
    files = await cl.AskFileMessage(
        content="Please upload your document:", 
        accept=["application/pdf", "text/plain"],
        max_size_mb=10
    ).send()
    
    if files:
        file = files[0]
        await cl.Message(f"Received file: {file.name}").send()
    
    # Ask for action selection
    actions = [
        cl.Action(name="option1", label="Option 1"),
        cl.Action(name="option2", label="Option 2")
    ]
    
    action_response = await cl.AskActionMessage(
        content="Choose an option:",
        actions=actions
    ).send()
    
    if action_response:
        await cl.Message(f"You chose: {action_response['name']}").send()

Step Execution Tracking

Track execution steps and provide observability into AI processing workflows with automatic timing, input/output capture, and hierarchical organization.

class Step:
    """
    Core class for tracking execution steps and providing observability
    into AI processing workflows.
    
    Args:
        name: Optional[str] - Step name for display (defaults to config.ui.name)
        type: TrueStepType - Step type ("run", "llm", "tool", "embedding", etc.) (default: "undefined")
        id: Optional[str] - Step ID (auto-generated if not provided)
        parent_id: Optional[str] - Parent step ID for nesting
        elements: Optional[List[Element]] - Attached elements
        metadata: Optional[Dict] - Custom metadata
        tags: Optional[List[str]] - Step tags for categorization
        language: Optional[str] - Content language for syntax highlighting
        default_open: Optional[bool] - Whether step is expanded by default
        show_input: Union[bool, str] - Whether to show input ("json", True, False)
        thread_id: Optional[str] - Thread ID (auto-set from context if not provided)
        
    Returns:
        Step instance
    """
    def __init__(
        self,
        name: Optional[str] = None,
        type: TrueStepType = "undefined",
        id: Optional[str] = None,
        parent_id: Optional[str] = None,
        elements: Optional[List[Element]] = None,
        metadata: Optional[Dict] = None,
        tags: Optional[List[str]] = None,
        language: Optional[str] = None,
        default_open: Optional[bool] = False,
        show_input: Union[bool, str] = "json",
        thread_id: Optional[str] = None
    ): ...
    
    # Properties automatically formatted for display
    input: Any
    output: Any
    
    async def send(self) -> "Step":
        """Send the step to the UI for display."""
        
    async def update(self) -> "Step":
        """Update the step in the UI."""
        
    async def remove(self) -> None:
        """Remove the step from the UI."""
        
    async def stream_token(self, token: str) -> None:
        """Stream output tokens to the step."""
        
    # Context manager support for automatic step lifecycle
    async def __aenter__(self) -> "Step":
        """Enter async context manager."""
        
    async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
        """Exit async context manager."""
        
    def __enter__(self) -> "Step":
        """Enter sync context manager."""
        
    def __exit__(self, exc_type, exc_val, exc_tb) -> None:
        """Exit sync context manager."""

def step(
    original_function: Optional[Callable] = None,
    *,
    name: Optional[str] = "",
    type: TrueStepType = "undefined",
    id: Optional[str] = None,
    parent_id: Optional[str] = None,
    tags: Optional[List[str]] = None,
    metadata: Optional[Dict] = None,
    language: Optional[str] = None,
    show_input: Union[bool, str] = "json",
    default_open: bool = False
):
    """
    Decorator for automatic step creation and tracking of function execution.
    
    Args:
        original_function: Optional[Callable] - Function being decorated (internal)
        name: Optional[str] - Step name (defaults to function name if empty)
        type: TrueStepType - Step type (default: "undefined")
        id: Optional[str] - Step ID (auto-generated if not provided)
        parent_id: Optional[str] - Parent step ID for nesting
        tags: Optional[List[str]] - Step tags for categorization
        metadata: Optional[Dict] - Custom metadata
        language: Optional[str] - Content language for syntax highlighting
        show_input: Union[bool, str] - Input display mode (default: "json") 
        default_open: bool - Whether expanded by default
        
    Usage:
        @step
        def my_function():
            return "result"
            
        @step(name="Custom Step", type="llm")
        async def llm_call():
            return "response"
    """

Usage examples for steps:

import chainlit as cl

@cl.on_message
async def main(message: cl.Message):
    # Using step as context manager
    async with cl.Step(name="Processing", type="run") as step:
        step.input = {"user_message": message.content}
        
        # Simulate processing
        result = await process_message(message.content)
        step.output = {"result": result}
        
        await cl.Message(result).send()

# Using step decorator
@cl.step(name="LLM Call", type="llm")
async def call_llm(prompt: str) -> str:
    # Function automatically wrapped in step tracking
    response = await openai_client.chat.completions.create(
        model="gpt-3.5-turbo",
        messages=[{"role": "user", "content": prompt}]
    )
    return response.choices[0].message.content

@cl.on_message
async def handle_message(message: cl.Message):
    # Step automatically created and tracked
    response = await call_llm(message.content)
    await cl.Message(response).send()

# Nested steps example
@cl.on_message
async def complex_workflow(message: cl.Message):
    async with cl.Step(name="Main Workflow", type="run") as main_step:
        main_step.input = message.content
        
        # Nested step for data processing
        async with cl.Step(name="Data Processing", type="tool") as data_step:
            processed_data = await process_data(message.content)
            data_step.output = processed_data
            
        # Nested step for LLM call
        async with cl.Step(name="Generate Response", type="llm") as llm_step:
            response = await generate_response(processed_data)
            llm_step.output = response
            
        main_step.output = response
        await cl.Message(response).send()

Core Types

from typing import Union, Optional, List, Dict, Any
from dataclasses import dataclass

# Message and step types
MessageStepType = Union["assistant_message", "user_message"]

TrueStepType = Union[
    "run", "llm", "tool", "embedding", "retrieval", 
    "rerank", "undefined", "assistant_message", "user_message"
]

# Response types for interactive messages
@dataclass
class AskFileResponse:
    name: str
    content: bytes
    size: int
    type: str

@dataclass  
class AskActionResponse:
    name: str
    label: str
    payload: Dict[str, Any]

@dataclass
class AskElementResponse:
    data: Dict[str, Any]

Install with Tessl CLI

npx tessl i tessl/pypi-chainlit

docs

advanced.md

authentication.md

callbacks.md

index.md

input-widgets.md

integrations.md

messaging.md

ui-elements.md

user-management.md

tile.json