Build production-ready conversational AI applications in minutes with rich UI components and LLM integrations
npx @tessl/cli install tessl/pypi-chainlit@2.7.0A comprehensive Python framework for building production-ready conversational AI applications in minutes. Chainlit provides rich UI components, seamless LLM integrations, and powerful observability features that enable developers to create interactive chat applications, AI agents, and conversational workflows with minimal boilerplate code.
pip install chainlitimport chainlit as clFor specific components:
from chainlit import Message, Step, User, Action
from chainlit.input_widget import Slider, Switch, Selectimport chainlit as cl
@cl.on_chat_start
async def start():
"""Initialize the chat session"""
await cl.Message("Hello! I'm your AI assistant. How can I help you today?").send()
@cl.on_message
async def main(message: cl.Message):
"""Handle incoming user messages"""
# Process the user's message
user_input = message.content
# Send a response
response = f"You said: {user_input}"
await cl.Message(response).send()
# Add an image element
await cl.Image(
name="example",
path="./image.png",
display="inline"
).send()
# Run the app with: chainlit run app.pyChainlit's architecture centers around four key concepts that work together to create rich conversational experiences:
This design enables building complex conversational AI applications with full observability, rich multimedia support, and seamless integration with popular ML/AI frameworks like OpenAI, LangChain, and LlamaIndex.
Send messages with rich content, attachments, and interactive elements. Handle user input through text, file uploads, and custom interactions.
class Message:
def __init__(
self,
content: Union[str, Dict] = "",
author: Optional[str] = None,
elements: Optional[List[Element]] = None,
actions: Optional[List[Action]] = None
): ...
async def send(self) -> "Message": ...
async def update(self) -> "Message": ...
async def stream_token(self, token: str) -> None: ...
class AskUserMessage:
def __init__(self, content: str, timeout: int = 60): ...
async def send(self) -> Optional[Dict]: ...
class AskFileMessage:
def __init__(
self,
content: str,
accept: Union[List[str], Dict[str, List[str]]],
max_size_mb: int = 2,
max_files: int = 1
): ...
async def send(self) -> Optional[List]: ...Track execution steps and provide transparency into AI processing workflows with automatic timing, input/output capture, and nested step hierarchies.
class Step:
def __init__(
self,
name: str,
type: str = "run",
show_input: Union[bool, str] = "json"
): ...
async def send(self) -> "Step": ...
async def update(self) -> "Step": ...
async def __aenter__(self) -> "Step": ...
async def __aexit__(self, *args) -> None: ...
def step(
name: Optional[str] = None,
type: str = "run",
show_input: Union[bool, str] = "json"
): ...Display images, videos, audio, charts, DataFrames, PDFs, and custom components within the chat interface.
class Image:
def __init__(
self,
name: str,
path: Optional[str] = None,
content: Optional[bytes] = None,
display: str = "inline",
size: str = "medium"
): ...
class Plotly:
def __init__(self, name: str, figure: Any, size: str = "medium"): ...
class Dataframe:
def __init__(self, name: str, data: Any, size: str = "large"): ...
class File:
def __init__(
self,
name: str,
path: Optional[str] = None,
content: Optional[bytes] = None
): ...
class ElementSidebar:
def __init__(self): ...
# Methods for managing sidebar element displayManage user authentication, sessions, and persistent data across conversations.
class User:
def __init__(
self,
identifier: str,
display_name: Optional[str] = None,
metadata: Dict = {}
): ...
user_session: UserSession
# Methods: get(), set(), create_accessor()Handle application lifecycle, user interactions, and system events through decorators.
@cl.on_chat_start
async def start(): ...
@cl.on_message
async def handle_message(message: cl.Message): ...
@cl.on_audio_chunk
async def handle_audio(chunk: InputAudioChunk): ...
@cl.password_auth_callback
async def auth(username: str, password: str) -> Optional[cl.User]: ...
@cl.action_callback("button_name")
async def handle_action(action: cl.Action): ...Create interactive settings panels with sliders, switches, text inputs, dropdowns, and custom widgets.
from chainlit.input_widget import Slider, Switch, Select, TextInput
class Slider:
def __init__(
self,
id: str,
label: str,
initial: float = 0,
min: float = 0,
max: float = 10
): ...
class ChatSettings:
def __init__(self, widgets: List[InputWidget]): ...Seamless integration with popular AI frameworks including automatic step tracking and observability.
# OpenAI Integration
def instrument_openai() -> None: ...
# LangChain Integration
class LangchainCallbackHandler: ...
class AsyncLangchainCallbackHandler: ...
# LlamaIndex Integration
class LlamaIndexCallbackHandler: ...
# Mistral AI Integration
def instrument_mistralai() -> None: ...Implement secure authentication with password, header, and OAuth providers.
@cl.password_auth_callback
async def password_auth(username: str, password: str) -> Optional[cl.User]: ...
@cl.header_auth_callback
async def header_auth(headers: Headers) -> Optional[cl.User]: ...
@cl.oauth_callback
async def oauth_callback(
provider_id: str,
token: str,
raw_user_data: Dict,
default_user: cl.User,
id_token: Optional[str] = None
) -> Optional[cl.User]: ...Caching, async utilities, MCP (Model Context Protocol) support, and server functionality for production deployments.
@cl.cache
def expensive_function(arg: str) -> str: ...
async def make_async(func: Callable) -> Callable: ...
def run_sync(coro: Awaitable) -> Any: ...
async def sleep(duration: int) -> None: ...
@cl.on_mcp_connect
async def mcp_connect(connection: McpConnection, session: ClientSession): ...
class CopilotFunction:
name: str
args: Dict[str, Any]
async def acall(self) -> Any: ...
# Context and session management
context: ChainlitContext
chat_context: ChatContextManager
# Version information
__version__: strfrom typing import Union, Optional, List, Dict, Any
# Element display modes
ElementDisplay = Union["inline", "side", "page"]
ElementSize = Union["small", "medium", "large"]
# Step types
TrueStepType = Union[
"run", "llm", "tool", "embedding", "retrieval",
"rerank", "undefined", "assistant_message", "user_message"
]
# Message types
MessageStepType = Union["assistant_message", "user_message"]
# Task status
TaskStatus = Union["ready", "running", "failed", "done"]
# Audio chunk data
@dataclass
class InputAudioChunk:
isStart: bool
mimeType: str
elapsedTime: float
data: bytes
# Chat profile configuration
@dataclass
class ChatProfile:
name: str
markdown_description: str
icon: Optional[str] = None
default: bool = False
# Conversation starter
@dataclass
class Starter:
label: str
message: str
command: Optional[str] = None
icon: Optional[str] = None