Comprehensive Python SDK for AI application observability and experimentation with OpenTelemetry-based tracing, automatic instrumentation, and dataset management.
Fundamental tracing functionality for instrumenting AI applications with automatic span creation, context propagation, and detailed performance monitoring. Built on OpenTelemetry standards for industry compatibility.
The primary interface for all Langfuse tracing operations, providing methods for creating spans, managing traces, and configuring observability.
class Langfuse:
def __init__(self, *, public_key: str = None, secret_key: str = None,
host: str = "https://cloud.langfuse.com", tracing_enabled: bool = True,
environment: str = None, timeout: int = 60, **kwargs): ...Parameters:
public_key (str, optional): Project public key, can also be set via LANGFUSE_PUBLIC_KEY env varsecret_key (str, optional): Project secret key, can also be set via LANGFUSE_SECRET_KEY env varhost (str): Langfuse server URL, defaults to cloud instancetracing_enabled (bool): Enable/disable tracing globallyenvironment (str, optional): Environment tag for tracestimeout (int): Request timeout in secondsCreate spans manually for precise control over tracing boundaries and span metadata.
def start_span(self, name: str, *, input: Any = None, output: Any = None,
metadata: Any = None, **kwargs) -> LangfuseSpan:
"""Create a new span without setting it as current.
Args:
name: Span name
input: Input data for the operation
output: Output data from the operation
metadata: Additional metadata
Returns:
LangfuseSpan that must be ended with .end()
"""Create spans with automatic lifecycle management using Python context managers.
def start_as_current_span(self, *, name: str, input: Any = None, output: Any = None,
metadata: Any = None, **kwargs) -> ContextManager[LangfuseSpan]:
"""Create a span as a context manager.
Args:
name: Span name
input: Input data for the operation
output: Output data from the operation
metadata: Additional metadata
Returns:
Context manager yielding LangfuseSpan
"""Usage Example:
with langfuse.start_as_current_span(name="process-data") as span:
result = process_data()
span.update(output=result)Create observations of any type with a unified interface supporting all observation types.
def start_observation(self, *, name: str, as_type: ObservationTypeLiteral,
input: Any = None, output: Any = None, metadata: Any = None,
**kwargs) -> Union[LangfuseSpan, LangfuseGeneration, LangfuseAgent, LangfuseTool, LangfuseChain, LangfuseRetriever, LangfuseEvaluator, LangfuseEmbedding, LangfuseGuardrail]:
"""Create observation of specified type.
Args:
name: Observation name
as_type: Type of observation (span, generation, tool, etc.)
input: Input data
output: Output data
metadata: Additional metadata
**kwargs: Type-specific parameters (model, usage_details, etc.)
Returns:
Observation of the specified type
"""def start_as_current_observation(self, *, name: str, as_type: ObservationTypeLiteralNoEvent,
input: Any = None, output: Any = None, metadata: Any = None,
**kwargs) -> ContextManager[Union[LangfuseSpan, LangfuseGeneration, ...]]:
"""Create observation as context manager.
Args:
name: Observation name
as_type: Type of observation (all types except event)
input: Input data
output: Output data
metadata: Additional metadata
**kwargs: Type-specific parameters
Returns:
Context manager yielding observation
"""Create point-in-time events for logging discrete occurrences.
def create_event(self, *, name: str, input: Any = None, output: Any = None,
metadata: Any = None, level: SpanLevel = None,
status_message: str = None) -> LangfuseEvent:
"""Create a point-in-time event.
Args:
name: Event name
input: Event input data
output: Event output data
metadata: Additional metadata
level: Importance level (DEBUG, DEFAULT, WARNING, ERROR)
status_message: Status message
Returns:
LangfuseEvent (already ended)
"""Automatic function instrumentation with minimal code changes using the observe decorator.
def observe(func=None, *, name: str = None, as_type: ObservationTypeLiteralNoEvent = None,
capture_input: bool = None, capture_output: bool = None,
transform_to_string: Callable[[Iterable], str] = None) -> Union[Callable, Callable[[Callable], Callable]]:
"""Decorator for automatic function tracing.
Args:
func: Function to decorate (when used without parentheses)
name: Custom observation name (defaults to function name)
as_type: Observation type (span, generation, tool, etc.)
capture_input: Whether to capture function inputs
capture_output: Whether to capture function outputs
transform_to_string: Function to transform generator outputs to string
Returns:
Decorated function with automatic tracing
"""Usage Examples:
# Simple decoration
@observe
def process_data(data):
return processed_data
# With configuration
@observe(name="llm-generation", as_type="generation")
def generate_text(prompt):
return llm_response
# Special function arguments for tracing control
def my_function(data, langfuse_trace_id=None, langfuse_parent_observation_id=None):
# Function will be traced with specified trace/parent IDs
passUtilities for managing client instances in multi-project setups.
def get_client(*, public_key: str = None) -> Langfuse:
"""Get or create Langfuse client instance.
Args:
public_key: Project identifier for multi-project setups
Returns:
Langfuse client instance
"""Methods for managing client resources and ensuring proper cleanup.
def flush(self) -> None:
"""Force flush of pending events to Langfuse."""
def shutdown(self) -> None:
"""Shutdown client and release resources."""
def auth_check(self) -> bool:
"""Check API authentication credentials."""Utilities for working with trace context and IDs.
def create_trace_id(self) -> str:
"""Generate a unique trace ID."""
def get_current_trace_id(self) -> str:
"""Get current trace ID from context."""
def get_current_observation_id(self) -> str:
"""Get current observation ID from context."""
def get_trace_url(self, trace_id: str) -> str:
"""Get URL to view trace in Langfuse UI."""from langfuse import Langfuse
langfuse = Langfuse()
# Manual span management
span = langfuse.start_span(name="data-processing")
try:
result = process_data()
span.update(output=result)
finally:
span.end()
# Context manager (recommended)
with langfuse.start_as_current_span(name="data-processing") as span:
result = process_data()
span.update(output=result)from langfuse import observe
@observe(as_type="generation")
def generate_response(prompt):
# This function is automatically traced
response = call_llm(prompt)
return response
@observe(name="custom-processor")
async def process_async(data):
# Works with async functions too
result = await async_process(data)
return result@observe(name="main-process")
def main_process():
# Parent span created automatically
@observe(name="sub-process")
def sub_process():
# Child span created automatically
return "result"
return sub_process()
# Or with manual management
with langfuse.start_as_current_span(name="parent") as parent:
with parent.start_as_current_observation(name="child", as_type="tool") as child:
result = call_external_api()
child.update(output=result)@observe(as_type="generation")
def risky_operation():
try:
result = might_fail()
return result
except Exception as e:
# Error automatically captured in trace
raise@observe(capture_output=True)
def stream_data():
for item in data_stream():
yield process_item(item)
# Full output automatically captured when generator exhaustedInstall with Tessl CLI
npx tessl i tessl/pypi-langfuse