OpenTelemetry instrumentation for AWS Bedrock runtime services providing automatic tracing, metrics, and event emission for AI model invocations
—
Comprehensive event models and emission functions for capturing AI model interactions as structured OpenTelemetry events. The event system provides detailed visibility into input messages and completion responses when semantic conventions are enabled.
Structured data models representing AI model interactions, following OpenTelemetry semantic conventions for AI observability.
@dataclass
class MessageEvent:
"""
Represents input messages sent to AI models.
Used for capturing user prompts, system messages, and tool interactions
in a structured format compatible with OpenTelemetry event logging.
"""
content: Any
"""Message content (text, structured data, or tool inputs)"""
role: str = "user"
"""Message role: 'user', 'assistant', 'system', or 'tool'"""
tool_calls: List[ToolCall] | None = None
"""Optional list of tool/function calls associated with this message"""
@dataclass
class ChoiceEvent:
"""
Represents AI model completion responses.
Captures model-generated content, completion metadata, and any
tool calls made by the model during response generation.
"""
index: int
"""Choice index in multi-choice responses (typically 0 for single responses)"""
message: CompletionMessage
"""The completion message content and metadata"""
finish_reason: str = "unknown"
"""Reason completion finished: 'stop', 'length', 'tool_calls', etc."""
tool_calls: List[ToolCall] | None = None
"""Optional list of tool/function calls made by the model"""Supporting type definitions for event data structures.
class _FunctionToolCall(TypedDict):
"""
Represents function call details in tool invocations.
Contains the function name and arguments for AI model
tool calling capabilities.
"""
function_name: str
"""Name of the function to call"""
arguments: Optional[dict[str, Any]]
"""Function arguments as key-value pairs"""
class ToolCall(TypedDict):
"""
Represents a tool or function call in AI model interactions.
Used to capture when models invoke external tools or functions
as part of their response generation process.
"""
id: str
"""Unique identifier for this tool call"""
function: _FunctionToolCall
"""Function call details including name and arguments"""
type: Literal["function"]
"""Type of tool call (currently only 'function' is supported)"""
class CompletionMessage(TypedDict):
"""
Represents the structure of completion messages from AI models.
Contains the actual response content and metadata about
the model's role in the conversation. Note: TypedDict cannot
have default values, so 'role' must be provided explicitly.
"""
content: Any
"""Message content (text or structured response)"""
role: str
"""Message role (typically 'assistant' for model responses)"""
class Roles(Enum):
"""
Valid message roles for AI model interactions.
Defines the standard roles used in conversational AI systems
following common industry conventions.
"""
USER = "user"
"""Human user input messages"""
ASSISTANT = "assistant"
"""AI model response messages"""
SYSTEM = "system"
"""System-level instructions and context"""
TOOL = "tool"
"""Tool or function execution results"""Functions for emitting structured events to OpenTelemetry event loggers, providing comprehensive visibility into AI model interactions.
def emit_message_events(event_logger, kwargs) -> None:
"""
Emit input message events to OpenTelemetry event logger.
Extracts and emits structured events for all input messages
sent to AI models, including user prompts and system messages.
Parameters:
- event_logger: OpenTelemetry EventLogger instance
- kwargs: Request parameters containing input messages
"""
def emit_choice_events(event_logger, response) -> None:
"""
Emit choice/completion events to OpenTelemetry event logger.
Extracts and emits structured events for AI model responses,
including generated content and completion metadata.
Parameters:
- event_logger: OpenTelemetry EventLogger instance
- response: Model response containing completion choices
"""
def emit_input_events_converse(kwargs, event_logger) -> None:
"""
Emit input events for Bedrock converse API calls.
Specialized event emission for the modern Bedrock converse API,
handling the conversation format and message structure.
Parameters:
- kwargs: Converse API request parameters
- event_logger: OpenTelemetry EventLogger instance
"""
def emit_response_event_converse(response, event_logger) -> None:
"""
Emit response events for Bedrock converse API responses.
Handles response event emission for the converse API format,
including message content and conversation metadata.
Parameters:
- response: Converse API response object
- event_logger: OpenTelemetry EventLogger instance
"""
def emit_streaming_response_event(response_body, event_logger) -> None:
"""
Emit events for streaming model responses.
Processes and emits events for streaming responses from
invoke_model_with_response_stream calls.
Parameters:
- response_body: Accumulated streaming response content
- event_logger: OpenTelemetry EventLogger instance
"""
def emit_streaming_converse_response_event(
event_logger,
response_msg,
role,
finish_reason
) -> None:
"""
Emit events for streaming converse API responses.
Handles event emission for streaming responses from the
converse_stream API, including role and completion metadata.
Parameters:
- event_logger: OpenTelemetry EventLogger instance
- response_msg: Accumulated response message content
- role: Message role (typically 'assistant')
- finish_reason: Reason streaming completed
"""
def emit_event(event: Union[MessageEvent, ChoiceEvent], event_logger) -> None:
"""
Generic event emission function.
Low-level function for emitting structured events to OpenTelemetry.
Used internally by other emission functions.
Parameters:
- event: MessageEvent or ChoiceEvent to emit
- event_logger: OpenTelemetry EventLogger instance
"""Functions for determining when events should be emitted based on configuration.
def should_emit_events() -> bool:
"""
Check if event emission is enabled.
Returns:
Boolean indicating whether structured events should be emitted
based on the use_legacy_attributes configuration setting.
"""from opentelemetry._events import get_event_logger
from opentelemetry.instrumentation.bedrock.event_models import MessageEvent, ChoiceEvent
from opentelemetry.instrumentation.bedrock.event_emitter import emit_event
# Get event logger
event_logger = get_event_logger(__name__)
# Create and emit a message event
message_event = MessageEvent(
content="What is the capital of France?",
role="user"
)
emit_event(message_event, event_logger)
# Create and emit a choice event
choice_event = ChoiceEvent(
index=0,
message={"content": "The capital of France is Paris.", "role": "assistant"},
finish_reason="stop"
)
emit_event(choice_event, event_logger)Events are automatically emitted when semantic conventions are enabled:
from opentelemetry.instrumentation.bedrock import BedrockInstrumentor
# Enable semantic conventions (disables legacy attributes)
BedrockInstrumentor(use_legacy_attributes=False).instrument()
# Events will be automatically emitted for all Bedrock API callsfrom opentelemetry.instrumentation.bedrock.event_models import MessageEvent, ToolCall
# Message with tool calls
tool_call = {
"id": "call_123",
"function": {"name": "get_weather", "arguments": '{"location": "Paris"}'},
"type": "function"
}
message_event = MessageEvent(
content="I need to check the weather in Paris",
role="user",
tool_calls=[tool_call]
)For streaming responses, events are emitted when the stream completes:
# Streaming events are handled automatically by the instrumentation
# and emitted when the stream finishes processing
import boto3
from opentelemetry.instrumentation.bedrock import BedrockInstrumentor
BedrockInstrumentor(use_legacy_attributes=False).instrument()
client = boto3.client('bedrock-runtime', region_name='us-east-1')
# Events will be automatically emitted as the stream completes
response = client.invoke_model_with_response_stream(
modelId='anthropic.claude-3-sonnet-20240229-v1:0',
body='{"messages": [{"role": "user", "content": "Hello"}]}'
)
# Process stream - events emitted automatically
for event in response['body']:
# Process streaming data
passEvents follow OpenTelemetry semantic conventions for AI observability:
{
"name": "gen_ai.content.prompt",
"body": {
"content": "What is the capital of France?",
"role": "user",
"tool_calls": []
},
"attributes": {
"gen_ai.system": "bedrock",
"gen_ai.request.model": "anthropic.claude-3-sonnet-20240229-v1:0"
}
}{
"name": "gen_ai.content.completion",
"body": {
"index": 0,
"message": {
"content": "The capital of France is Paris.",
"role": "assistant"
},
"finish_reason": "stop"
},
"attributes": {
"gen_ai.system": "bedrock",
"gen_ai.response.model": "anthropic.claude-3-sonnet-20240229-v1:0"
}
}Event emission is controlled by the use_legacy_attributes setting:
This allows gradual migration from legacy attribute-based observability to modern event-based observability patterns.
Install with Tessl CLI
npx tessl i tessl/pypi-opentelemetry-instrumentation-bedrock