OpenInference instrumentation utilities for tracking application metadata such as sessions, users, and custom metadata using Python context managers
—
OpenInference-aware tracer provider and tracer implementations with decorator support for creating spans with automatic attribute handling and span kind detection.
OpenInference-aware TracerProvider that extends OpenTelemetry's TracerProvider with additional functionality.
class TracerProvider(OTelTracerProvider):
"""
OpenInference TracerProvider with enhanced span limits and configuration support.
Args:
config (Optional[TraceConfig]): Configuration for data privacy and tracing behavior
*args: Arguments passed to OpenTelemetry TracerProvider
**kwargs: Keyword arguments passed to OpenTelemetry TracerProvider
"""
def __init__(
self,
*args: Any,
config: Optional[TraceConfig] = None,
**kwargs: Any,
) -> None: ...
def get_tracer(self, *args: Any, **kwargs: Any) -> OITracer:
"""
Get an OpenInference tracer instance.
Returns:
OITracer: OpenInference-aware tracer with decorator support
"""Usage Example:
from openinference.instrumentation import TracerProvider, TraceConfig
from opentelemetry import trace
# Create provider with configuration
config = TraceConfig(hide_inputs=True)
provider = TracerProvider(config=config)
# Set as global tracer provider
trace.set_tracer_provider(provider)
# Get tracer
tracer = provider.get_tracer(__name__)OpenInference tracer with decorator support and custom span creation methods.
class OITracer:
"""
OpenInference tracer wrapper with decorator support and enhanced span creation.
"""
def start_span(
self,
name: str,
context: Optional[Context] = None,
kind: SpanKind = SpanKind.INTERNAL,
attributes: Attributes = None,
links: Optional[Sequence[Link]] = (),
start_time: Optional[int] = None,
record_exception: bool = True,
set_status_on_exception: bool = True,
*,
openinference_span_kind: Optional[OpenInferenceSpanKind] = None,
) -> OpenInferenceSpan:
"""
Start a new OpenInference span.
Args:
name (str): Span name
openinference_span_kind (Optional[OpenInferenceSpanKind]): OpenInference span kind
**kwargs: Additional OpenTelemetry span arguments
Returns:
OpenInferenceSpan: OpenInference-aware span wrapper
"""
@contextmanager
def start_as_current_span(
self,
name: str,
context: Optional[Context] = None,
kind: SpanKind = SpanKind.INTERNAL,
attributes: Attributes = None,
links: Optional[Sequence[Link]] = (),
start_time: Optional[int] = None,
record_exception: bool = True,
set_status_on_exception: bool = True,
end_on_exit: bool = True,
*,
openinference_span_kind: Optional[OpenInferenceSpanKind] = None,
) -> Iterator[OpenInferenceSpan]:
"""
Context manager for creating and managing a span as current.
Args:
name (str): Span name
openinference_span_kind (Optional[OpenInferenceSpanKind]): OpenInference span kind
**kwargs: Additional OpenTelemetry span arguments
Yields:
OpenInferenceSpan: The current span
"""Usage Example:
from openinference.instrumentation import TracerProvider
from openinference.semconv.trace import OpenInferenceSpanKindValues
tracer = TracerProvider().get_tracer(__name__)
# Create span manually
span = tracer.start_span("my-operation", openinference_span_kind="llm")
span.set_attribute("custom.attribute", "value")
span.end()
# Use as context manager
with tracer.start_as_current_span("my-context", openinference_span_kind=OpenInferenceSpanKindValues.CHAIN) as span:
# Span is automatically current and will be ended
result = perform_operation()
span.set_output(result)Decorator for creating agent spans with automatic input/output handling.
def agent(
self,
wrapped_function: Optional[Callable] = None,
/,
*,
name: Optional[str] = None,
) -> Union[Callable, Callable[[Callable], Callable]]:
"""
Decorator for creating agent spans.
Args:
wrapped_function: Function to wrap (when used without parentheses)
name (Optional[str]): Custom span name (defaults to function name)
Returns:
Decorated function that creates agent spans
"""Usage Example:
from openinference.instrumentation import TracerProvider
tracer = TracerProvider().get_tracer(__name__)
# Simple usage
@tracer.agent
def my_agent(query: str) -> str:
return f"Agent response to: {query}"
# With custom name
@tracer.agent(name="customer-support-agent")
def support_agent(question: str, context: dict) -> str:
return generate_support_response(question, context)
# Usage
response = my_agent("Hello, how are you?")Decorator for creating chain spans with automatic input/output handling.
def chain(
self,
wrapped_function: Optional[Callable] = None,
/,
*,
name: Optional[str] = None,
) -> Union[Callable, Callable[[Callable], Callable]]:
"""
Decorator for creating chain spans.
Args:
wrapped_function: Function to wrap (when used without parentheses)
name (Optional[str]): Custom span name (defaults to function name)
Returns:
Decorated function that creates chain spans
"""Usage Example:
@tracer.chain
def process_pipeline(data: dict) -> dict:
# Multi-step processing pipeline
step1_result = preprocess(data)
step2_result = analyze(step1_result)
return finalize(step2_result)
@tracer.chain(name="rag-chain")
async def rag_pipeline(query: str) -> str:
docs = await retrieve_documents(query)
context = format_context(docs)
return await generate_response(query, context)Decorator for creating tool spans with automatic parameter inference.
def tool(
self,
wrapped_function: Optional[Callable] = None,
/,
*,
name: Optional[str] = None,
description: Optional[str] = None,
parameters: Optional[Union[str, Dict[str, Any]]] = None,
) -> Union[Callable, Callable[[Callable], Callable]]:
"""
Decorator for creating tool spans.
Args:
wrapped_function: Function to wrap (when used without parentheses)
name (Optional[str]): Custom tool name (defaults to function name)
description (Optional[str]): Tool description (defaults to docstring)
parameters (Optional[Union[str, Dict[str, Any]]]): JSON schema or dict (auto-inferred from signature)
Returns:
Decorated function that creates tool spans
"""Usage Example:
@tracer.tool
def calculate_area(length: float, width: float) -> float:
"""Calculate the area of a rectangle."""
return length * width
@tracer.tool(
name="web-search",
description="Search the web for information",
parameters={"query": {"type": "string", "description": "Search query"}}
)
def web_search(query: str) -> list:
return perform_web_search(query)
# Parameters are automatically inferred from type hints
@tracer.tool
def get_weather(city: str, units: str = "celsius") -> dict:
"""Get weather information for a city."""
return fetch_weather_data(city, units)Decorator for creating LLM spans with support for generators and custom processing.
def llm(
self,
wrapped_function: Optional[Callable] = None,
/,
*,
name: Optional[str] = None,
process_input: Optional[Callable] = None,
process_output: Optional[Callable] = None,
) -> Union[Callable, Callable[[Callable], Callable]]:
"""
Decorator for creating LLM spans.
Args:
wrapped_function: Function to wrap (when used without parentheses)
name (Optional[str]): Custom span name (defaults to function name)
process_input (Optional[Callable]): Custom input processing function
process_output (Optional[Callable]): Custom output processing function
Returns:
Decorated function that creates LLM spans
"""Usage Example:
@tracer.llm
def simple_llm_call(prompt: str) -> str:
return llm_client.generate(prompt)
# With custom processing
def process_llm_input(*args, **kwargs):
return {"llm.model_name": "gpt-4", "custom.metric": len(args)}
def process_llm_output(output):
return {"llm.token_count.total": count_tokens(output)}
@tracer.llm(
name="advanced-llm",
process_input=process_llm_input,
process_output=process_llm_output
)
def advanced_llm_call(messages: list) -> str:
return llm_client.chat(messages)
# Supports generators for streaming
@tracer.llm
def streaming_llm(prompt: str):
for chunk in llm_client.stream(prompt):
yield chunk
# Supports async generators
@tracer.llm
async def async_streaming_llm(prompt: str):
async for chunk in llm_client.async_stream(prompt):
yield chunkEnhanced span wrapper with OpenInference-specific methods.
class OpenInferenceSpan:
"""
OpenInference span wrapper with enhanced attribute handling.
"""
def set_input(
self,
value: Any,
*,
mime_type: Optional[OpenInferenceMimeType] = None,
) -> None:
"""
Set input attributes on the span.
Args:
value: Input value
mime_type (Optional[OpenInferenceMimeType]): MIME type for the input
"""
def set_output(
self,
value: Any,
*,
mime_type: Optional[OpenInferenceMimeType] = None,
) -> None:
"""
Set output attributes on the span.
Args:
value: Output value
mime_type (Optional[OpenInferenceMimeType]): MIME type for the output
"""
def set_tool(
self,
*,
name: str,
description: Optional[str] = None,
parameters: Union[str, Dict[str, Any]],
) -> None:
"""
Set tool attributes on the span.
Args:
name (str): Tool name
description (Optional[str]): Tool description
parameters (Union[str, Dict[str, Any]]): Tool parameters schema
"""Usage Example:
with tracer.start_as_current_span("manual-span", openinference_span_kind="tool") as span:
# Set input
span.set_input({"query": "search term"}, mime_type="application/json")
# Perform operation
result = search_operation()
# Set output
span.set_output(result)
# Set tool information
span.set_tool(
name="search-tool",
description="Searches the knowledge base",
parameters={"query": {"type": "string"}}
)All decorators support:
async def function decorationyield valuesasync def functions that yieldInstall with Tessl CLI
npx tessl i tessl/pypi-openinference-instrumentation