OpenInference instrumentation utilities for tracking application metadata such as sessions, users, and custom metadata using Python context managers
npx @tessl/cli install tessl/pypi-openinference-instrumentation@0.1.0OpenInference Instrumentation is a comprehensive Python library that provides utility functions for OpenInference instrumentation, enabling developers to track important application metadata such as sessions, users, and custom metadata using Python context managers. The library seamlessly integrates with OpenTelemetry ecosystem and supports various auto-instrumentors for different AI/ML frameworks and services.
pip install openinference-instrumentationfrom openinference.instrumentation import (
using_session, using_user, using_metadata, using_attributes, using_tags, using_prompt_template,
TraceConfig, suppress_tracing, TracerProvider, OITracer, REDACTED_VALUE,
capture_span_context, dangerously_using_project, safe_json_dumps,
get_attributes_from_context
)from openinference.instrumentation import using_attributes, TraceConfig, suppress_tracing
# Track a conversation session with metadata
metadata = {"user_type": "premium", "region": "us-west"}
tags = ["chatbot", "customer_support"]
with using_attributes(
session_id="session-123",
user_id="user-456",
metadata=metadata,
tags=tags
):
# All spans created within this context will include:
# - session.id = "session-123"
# - user.id = "user-456"
# - metadata = JSON-serialized metadata
# - tag.tags = ["chatbot", "customer_support"]
your_llm_call()
# Configure data privacy settings
config = TraceConfig(
hide_inputs=True,
hide_llm_invocation_parameters=True
)
# Suppress tracing for sensitive operations
with suppress_tracing():
# No tracing will occur within this block
sensitive_operation()OpenInference Instrumentation is built around several key concepts:
Python context managers for tracking sessions, users, metadata, tags, and prompt templates. These utilities enable comprehensive span customization in OpenTelemetry-based tracing systems.
class using_session:
def __init__(self, session_id: str) -> None: ...
class using_user:
def __init__(self, user_id: str) -> None: ...
class using_metadata:
def __init__(self, metadata: Dict[str, Any]) -> None: ...
class using_attributes:
def __init__(
self,
*,
session_id: str = "",
user_id: str = "",
metadata: Optional[Dict[str, Any]] = None,
tags: Optional[List[str]] = None,
prompt_template: str = "",
prompt_template_version: str = "",
prompt_template_variables: Optional[Dict[str, Any]] = None
) -> None: ...TraceConfig class for controlling tracing configurations including data privacy settings and payload size limits, plus utilities for suppressing tracing when needed.
@dataclass(frozen=True)
class TraceConfig:
hide_llm_invocation_parameters: Optional[bool] = None
hide_inputs: Optional[bool] = None
hide_outputs: Optional[bool] = None
hide_input_messages: Optional[bool] = None
hide_output_messages: Optional[bool] = None
hide_embedding_vectors: Optional[bool] = None
base64_image_max_length: Optional[int] = None
def mask(self, key: str, value: Union[AttributeValue, Callable[[], AttributeValue]]) -> Optional[AttributeValue]: ...
class suppress_tracing:
def __enter__(self) -> "suppress_tracing": ...
def __exit__(self, exc_type, exc_value, traceback) -> None: ...Custom TracerProvider and OITracer implementations with decorator support for creating OpenInference-compliant spans with automatic attribute handling.
class TracerProvider(OTelTracerProvider):
def __init__(self, *args: Any, config: Optional[TraceConfig] = None, **kwargs: Any) -> None: ...
def get_tracer(self, *args: Any, **kwargs: Any) -> OITracer: ...
class OITracer:
def start_span(self, name: str, *, openinference_span_kind: Optional[OpenInferenceSpanKind] = None, **kwargs) -> OpenInferenceSpan: ...
def start_as_current_span(self, name: str, *, openinference_span_kind: Optional[OpenInferenceSpanKind] = None, **kwargs) -> Iterator[OpenInferenceSpan]: ...
def agent(self, wrapped_function=None, /, *, name: Optional[str] = None): ...
def chain(self, wrapped_function=None, /, *, name: Optional[str] = None): ...
def tool(self, wrapped_function=None, /, *, name: Optional[str] = None, description: Optional[str] = None, parameters: Optional[Union[str, Dict[str, Any]]] = None): ...
def llm(self, wrapped_function=None, /, *, name: Optional[str] = None, process_input=None, process_output=None): ...Comprehensive set of functions for generating OpenInference-compliant span attributes for different types of operations (LLM, embedding, retrieval, etc.).
def get_llm_attributes(
*,
provider: Optional[OpenInferenceLLMProvider] = None,
model_name: Optional[str] = None,
invocation_parameters: Optional[Union[str, Dict[str, Any]]] = None,
input_messages: Optional[Sequence[Message]] = None,
output_messages: Optional[Sequence[Message]] = None,
token_count: Optional[TokenCount] = None
) -> Dict[str, AttributeValue]: ...
def get_input_attributes(value: Any, *, mime_type: Optional[OpenInferenceMimeType] = None) -> Dict[str, AttributeValue]: ...
def get_output_attributes(value: Any, *, mime_type: Optional[OpenInferenceMimeType] = None) -> Dict[str, AttributeValue]: ...Complete TypedDict definitions for all data structures used in OpenInference tracing, ensuring type safety and proper structure validation.
class Message(TypedDict, total=False):
role: str
content: str
contents: Sequence[MessageContent]
tool_call_id: str
tool_calls: Sequence[ToolCall]
class TokenCount(TypedDict, total=False):
prompt: int
completion: int
total: int
prompt_details: PromptDetails
class Document(TypedDict, total=False):
content: str
id: Union[str, int]
metadata: Union[str, Dict[str, Any]]
score: floatHelper functions and utilities for JSON serialization, span context capture, and project management.
def safe_json_dumps(obj: Any, **kwargs: Any) -> str: ...
class capture_span_context:
def __init__(self) -> None: ...
def get_first_span_id(self) -> Optional[str]: ...
def get_last_span_id(self) -> Optional[str]: ...
def get_span_contexts(self) -> Sequence[SpanContext]: ...
class dangerously_using_project:
def __init__(self, project_name: str) -> None: ...