OpenInference instrumentation utilities for tracking application metadata such as sessions, users, and custom metadata using Python context managers
—
Comprehensive set of functions for generating OpenInference-compliant span attributes for different types of operations including LLM interactions, embeddings, retrievals, and general input/output handling.
Generate comprehensive attributes for Large Language Model interactions.
def get_llm_attributes(
*,
provider: Optional[OpenInferenceLLMProvider] = None,
system: Optional[OpenInferenceLLMSystem] = None,
model_name: Optional[str] = None,
invocation_parameters: Optional[Union[str, Dict[str, Any]]] = None,
input_messages: Optional[Sequence[Message]] = None,
output_messages: Optional[Sequence[Message]] = None,
token_count: Optional[TokenCount] = None,
tools: Optional[Sequence[Tool]] = None,
) -> Dict[str, AttributeValue]:
"""
Generate LLM-related span attributes.
Args:
provider: LLM provider (e.g., "openai", "anthropic")
system: LLM system (e.g., "openai", "claude")
model_name: Model identifier (e.g., "gpt-4", "claude-3")
invocation_parameters: Model parameters (temperature, max_tokens, etc.)
input_messages: Input messages to the LLM
output_messages: Output messages from the LLM
token_count: Token usage information
tools: Available tools for the LLM
Returns:
Dict[str, AttributeValue]: OpenInference-compliant attributes
"""Usage Example:
from openinference.instrumentation import get_llm_attributes
# Complete LLM interaction attributes
attributes = get_llm_attributes(
provider="openai",
model_name="gpt-4",
invocation_parameters={"temperature": 0.7, "max_tokens": 150},
input_messages=[{"role": "user", "content": "Hello!"}],
output_messages=[{"role": "assistant", "content": "Hi there!"}],
token_count={"prompt": 10, "completion": 15, "total": 25}
)
span.set_attributes(attributes)Individual functions for specific LLM attribute components.
def get_llm_provider_attributes(
provider: Optional[OpenInferenceLLMProvider],
) -> Mapping[str, AttributeValue]:
"""Generate LLM provider attributes."""
def get_llm_system_attributes(
system: Optional[OpenInferenceLLMSystem],
) -> Mapping[str, AttributeValue]:
"""Generate LLM system attributes."""
def get_llm_model_name_attributes(
model_name: Optional[str],
) -> Mapping[str, AttributeValue]:
"""Generate LLM model name attributes."""
def get_llm_invocation_parameter_attributes(
invocation_parameters: Optional[Union[str, Dict[str, Any]]],
) -> Mapping[str, AttributeValue]:
"""Generate LLM invocation parameter attributes."""
def get_llm_input_message_attributes(
messages: Optional[Sequence[Message]],
) -> Mapping[str, AttributeValue]:
"""Generate LLM input message attributes."""
def get_llm_output_message_attributes(
messages: Optional[Sequence[Message]],
) -> Mapping[str, AttributeValue]:
"""Generate LLM output message attributes."""
def get_llm_token_count_attributes(
token_count: Optional[TokenCount],
) -> Mapping[str, AttributeValue]:
"""Generate LLM token count attributes."""
def get_llm_tool_attributes(
tools: Optional[Sequence[Tool]],
) -> Mapping[str, AttributeValue]:
"""Generate LLM tool attributes."""Usage Example:
# Generate specific attribute groups
provider_attrs = get_llm_provider_attributes("openai")
model_attrs = get_llm_model_name_attributes("gpt-4")
token_attrs = get_llm_token_count_attributes({"total": 100})
# Combine as needed
combined = {**provider_attrs, **model_attrs, **token_attrs}Generate attributes for general input and output values.
def get_input_attributes(
value: Any,
*,
mime_type: Optional[OpenInferenceMimeType] = None,
) -> Dict[str, AttributeValue]:
"""
Generate input-related span attributes.
Args:
value: Input value (automatically serialized based on type)
mime_type: MIME type override ("text/plain" or "application/json")
Returns:
Dict[str, AttributeValue]: Input attributes including value and MIME type
"""
def get_output_attributes(
value: Any,
*,
mime_type: Optional[OpenInferenceMimeType] = None,
) -> Dict[str, AttributeValue]:
"""
Generate output-related span attributes.
Args:
value: Output value (automatically serialized based on type)
mime_type: MIME type override ("text/plain" or "application/json")
Returns:
Dict[str, AttributeValue]: Output attributes including value and MIME type
"""Usage Example:
# Simple values
input_attrs = get_input_attributes("Hello, world!")
# Results in: {"input.value": "Hello, world!", "input.mime_type": "text/plain"}
# Complex data
output_data = {"result": [1, 2, 3], "status": "success"}
output_attrs = get_output_attributes(output_data)
# Results in: {"output.value": '{"result": [1, 2, 3], "status": "success"}', "output.mime_type": "application/json"}
# Custom MIME type
json_attrs = get_input_attributes('{"key": "value"}', mime_type="application/json")Generate attributes for embedding operations.
def get_embedding_attributes(
*,
model_name: Optional[str] = None,
embeddings: Optional[List[Embedding]] = None,
) -> Dict[str, AttributeValue]:
"""
Generate embedding-related span attributes.
Args:
model_name: Embedding model name
embeddings: List of embeddings with text and vector data
Returns:
Dict[str, AttributeValue]: Embedding attributes
"""Usage Example:
embeddings_data = [
{"text": "Hello world", "vector": [0.1, 0.2, 0.3]},
{"text": "Goodbye", "vector": [0.4, 0.5, 0.6]}
]
embedding_attrs = get_embedding_attributes(
model_name="text-embedding-ada-002",
embeddings=embeddings_data
)Generate attributes for document retrieval operations.
def get_retriever_attributes(
*,
documents: List[Document]
) -> Dict[str, AttributeValue]:
"""
Generate retriever-related span attributes.
Args:
documents: Retrieved documents with content, ID, metadata, and scores
Returns:
Dict[str, AttributeValue]: Retrieval attributes
"""Usage Example:
retrieved_docs = [
{
"content": "Document content here",
"id": "doc-123",
"metadata": {"source": "database", "timestamp": "2024-01-01"},
"score": 0.95
},
{
"content": "Another document",
"id": "doc-456",
"score": 0.87
}
]
retrieval_attrs = get_retriever_attributes(documents=retrieved_docs)Generate attributes for document reranking operations.
def get_reranker_attributes(
*,
query: Optional[str] = None,
model_name: Optional[str] = None,
input_documents: Optional[List[Document]] = None,
output_documents: Optional[List[Document]] = None,
top_k: Optional[int] = None,
) -> Dict[str, AttributeValue]:
"""
Generate reranker-related span attributes.
Args:
query: Search query used for reranking
model_name: Reranker model name
input_documents: Documents before reranking
output_documents: Documents after reranking
top_k: Number of top documents to return
Returns:
Dict[str, AttributeValue]: Reranker attributes
"""Usage Example:
reranker_attrs = get_reranker_attributes(
query="machine learning algorithms",
model_name="cross-encoder/ms-marco-MiniLM-L-6-v2",
input_documents=candidate_docs,
output_documents=reranked_docs,
top_k=5
)Generate attributes from context managers.
def get_context_attributes(
*,
session_id: Optional[str] = None,
user_id: Optional[str] = None,
metadata: Optional[Union[str, Dict[str, Any]]] = None,
tags: Optional[List[str]] = None,
) -> Dict[str, AttributeValue]:
"""
Generate context-related span attributes.
Args:
session_id: Session identifier
user_id: User identifier
metadata: Custom metadata (dict or JSON string)
tags: List of tags
Returns:
Dict[str, AttributeValue]: Context attributes
"""
def get_session_attributes(*, session_id: str) -> Dict[str, AttributeValue]:
"""Generate session attributes."""
def get_user_id_attributes(*, user_id: str) -> Dict[str, AttributeValue]:
"""Generate user ID attributes."""
def get_metadata_attributes(*, metadata: Union[str, Dict[str, Any]]) -> Dict[str, AttributeValue]:
"""Generate metadata attributes."""
def get_tag_attributes(*, tags: List[str]) -> Dict[str, AttributeValue]:
"""Generate tag attributes."""Usage Example:
# Combined context attributes
context_attrs = get_context_attributes(
session_id="session-123",
user_id="user-456",
metadata={"tier": "premium"},
tags=["important", "production"]
)
# Individual attribute types
session_attrs = get_session_attributes(session_id="session-abc")
user_attrs = get_user_id_attributes(user_id="user-xyz")
meta_attrs = get_metadata_attributes(metadata={"key": "value"})
tag_attrs = get_tag_attributes(tags=["tag1", "tag2"])Generate attributes for tool operations.
def get_tool_attributes(
*,
name: str,
description: Optional[str] = None,
parameters: Union[str, Dict[str, Any]],
) -> Dict[str, AttributeValue]:
"""
Generate tool attributes.
Args:
name: Tool name
description: Tool description
parameters: Tool parameters (JSON schema dict or JSON string)
Returns:
Dict[str, AttributeValue]: Tool attributes
"""Usage Example:
tool_attrs = get_tool_attributes(
name="calculator",
description="Performs basic arithmetic operations",
parameters={
"type": "object",
"properties": {
"operation": {"type": "string", "enum": ["+", "-", "*", "/"]},
"a": {"type": "number"},
"b": {"type": "number"}
},
"required": ["operation", "a", "b"]
}
)Generate span kind attributes for OpenInference span classification.
def get_span_kind_attributes(kind: OpenInferenceSpanKind) -> Dict[str, AttributeValue]:
"""
Generate span kind attributes.
Args:
kind: OpenInference span kind ("llm", "chain", "agent", "tool", etc.)
Returns:
Dict[str, AttributeValue]: Span kind attributes
"""Usage Example:
from openinference.semconv.trace import OpenInferenceSpanKindValues
# Using enum value
llm_kind_attrs = get_span_kind_attributes(OpenInferenceSpanKindValues.LLM)
# Using string (must be lowercase)
chain_kind_attrs = get_span_kind_attributes("chain")The attribute generation functions automatically handle serialization:
text/plain MIME typeapplication/json MIME typemodel_dump() then JSON-serializedtext/plain MIME typeAll attribute generation functions integrate with TraceConfig for privacy:
Install with Tessl CLI
npx tessl i tessl/pypi-openinference-instrumentation