Open Neural Network Exchange for AI model interoperability and machine learning frameworks
—
Convert between ONNX protocol buffer representations and human-readable text formats for debugging, serialization, and model inspection. This module enables working with ONNX models in textual form for better readability and debugging.
Parse human-readable text representations into ONNX protocol buffer objects.
def parse_model(model_text):
"""
Parse text representation to ModelProto.
Parameters:
- model_text: String containing text representation of ONNX model
Returns:
ModelProto: Parsed model object
Raises:
ParseError: If text cannot be parsed or contains syntax errors
"""
def parse_graph(graph_text):
"""
Parse text representation to GraphProto.
Parameters:
- graph_text: String containing text representation of ONNX graph
Returns:
GraphProto: Parsed graph object
Raises:
ParseError: If text cannot be parsed or contains syntax errors
"""
def parse_function(function_text):
"""
Parse text representation to FunctionProto.
Parameters:
- function_text: String containing text representation of ONNX function
Returns:
FunctionProto: Parsed function object
Raises:
ParseError: If text cannot be parsed or contains syntax errors
"""
def parse_node(node_text):
"""
Parse text representation to NodeProto.
Parameters:
- node_text: String containing text representation of ONNX node
Returns:
NodeProto: Parsed node object
Raises:
ParseError: If text cannot be parsed or contains syntax errors
"""Convert ONNX protocol buffer objects to human-readable text representations.
def to_text(proto):
"""
Convert ONNX proto to text representation.
Parameters:
- proto: ONNX protocol buffer object (ModelProto, GraphProto, FunctionProto, or NodeProto)
Returns:
str: Human-readable text representation
Raises:
ValueError: If proto type is not supported for text conversion
"""Exception types for text processing errors.
class ParseError(Exception):
"""
Exception raised when text parsing fails.
Contains detailed information about parsing errors including
line numbers and specific syntax issues.
"""import onnx
from onnx import printer, parser
# Load a binary model
model = onnx.load_model("example_model.onnx")
# Convert to text representation
model_text = printer.to_text(model)
print("Model in text format:")
print(model_text[:500] + "..." if len(model_text) > 500 else model_text)
# Save text representation to file
with open("model.txt", "w") as f:
f.write(model_text)
# Parse text back to model
try:
parsed_model = parser.parse_model(model_text)
# Verify they are equivalent
onnx.checker.check_model(parsed_model)
print("Text parsing and conversion successful!")
except parser.ParseError as e:
print(f"Parse error: {e}")import onnx
from onnx import printer, helper, TensorProto
import numpy as np
# Create a simple graph for demonstration
def create_debug_graph():
X = helper.make_tensor_value_info('input', TensorProto.FLOAT, [1, 3, 224, 224])
Y = helper.make_tensor_value_info('output', TensorProto.FLOAT, [1, 1000])
# Create some weights
conv_weight = np.random.randn(64, 3, 7, 7).astype(np.float32)
conv_tensor = helper.make_tensor('conv_weight', TensorProto.FLOAT,
conv_weight.shape, conv_weight)
fc_weight = np.random.randn(64, 1000).astype(np.float32)
fc_tensor = helper.make_tensor('fc_weight', TensorProto.FLOAT,
fc_weight.shape, fc_weight)
# Create nodes
conv_node = helper.make_node(
'Conv', ['input', 'conv_weight'], ['conv_out'],
kernel_shape=[7, 7], strides=[2, 2], pads=[3, 3, 3, 3]
)
relu_node = helper.make_node('Relu', ['conv_out'], ['relu_out'])
pool_node = helper.make_node('GlobalAveragePool', ['relu_out'], ['pool_out'])
reshape_node = helper.make_node('Flatten', ['pool_out'], ['flat_out'])
fc_node = helper.make_node('MatMul', ['flat_out', 'fc_weight'], ['output'])
# Create graph
graph = helper.make_graph(
[conv_node, relu_node, pool_node, reshape_node, fc_node],
'debug_model',
[X], [Y],
[conv_tensor, fc_tensor]
)
return helper.make_model(graph)
# Create and inspect model
debug_model = create_debug_graph()
# Print different components
print("=== FULL MODEL ===")
model_text = printer.to_text(debug_model)
print(model_text)
print("\n=== GRAPH ONLY ===")
graph_text = printer.to_text(debug_model.graph)
print(graph_text)
print("\n=== INDIVIDUAL NODES ===")
for i, node in enumerate(debug_model.graph.node):
node_text = printer.to_text(node)
print(f"Node {i} ({node.op_type}):")
print(node_text)
print()import onnx
from onnx import printer, parser
def interactive_node_editor(model_path):
"""Interactive tool for editing model nodes via text."""
model = onnx.load_model(model_path)
print(f"Model has {len(model.graph.node)} nodes:")
for i, node in enumerate(model.graph.node):
print(f" {i}: {node.op_type} ({node.name or 'unnamed'})")
while True:
try:
node_idx = input("\nEnter node index to edit (or 'q' to quit): ")
if node_idx.lower() == 'q':
break
node_idx = int(node_idx)
if node_idx < 0 or node_idx >= len(model.graph.node):
print("Invalid node index")
continue
# Show current node
current_node = model.graph.node[node_idx]
node_text = printer.to_text(current_node)
print(f"\nCurrent node {node_idx}:")
print(node_text)
# Get new text
print("\nEnter new node definition (or press Enter to skip):")
new_text = input()
if not new_text.strip():
continue
# Parse new node
try:
new_node = parser.parse_node(new_text)
model.graph.node[node_idx].CopyFrom(new_node)
print("Node updated successfully!")
# Validate model
onnx.checker.check_model(model)
print("Model validation passed!")
except parser.ParseError as e:
print(f"Parse error: {e}")
except onnx.checker.ValidationError as e:
print(f"Validation error: {e}")
except (ValueError, KeyboardInterrupt):
print("Invalid input or interrupted")
continue
# Save modified model
save_path = input("\nEnter path to save modified model (or press Enter to skip): ")
if save_path.strip():
onnx.save_model(model, save_path)
print(f"Model saved to {save_path}")
# Example usage (commented out)
# interactive_node_editor("model.onnx")import onnx
from onnx import printer
import difflib
def compare_models_text(model1_path, model2_path):
"""Compare two models using text diff."""
model1 = onnx.load_model(model1_path)
model2 = onnx.load_model(model2_path)
# Convert to text
text1 = printer.to_text(model1).splitlines()
text2 = printer.to_text(model2).splitlines()
# Generate diff
diff = list(difflib.unified_diff(
text1, text2,
fromfile=model1_path,
tofile=model2_path,
n=3
))
if diff:
print(f"Differences between {model1_path} and {model2_path}:")
for line in diff:
print(line)
else:
print("Models are identical in structure")
# Example usage (commented out)
# compare_models_text("model_v1.onnx", "model_v2.onnx")import onnx
from onnx import parser
def create_model_from_template():
"""Create models using text templates."""
# Define a model template
model_template = '''
ir_version: 7
producer_name: "text-template"
graph {
name: "linear_model"
input {
name: "input"
type {
tensor_type {
elem_type: 1
shape {
dim { dim_value: 1 }
dim { dim_value: 784 }
}
}
}
}
output {
name: "output"
type {
tensor_type {
elem_type: 1
shape {
dim { dim_value: 1 }
dim { dim_value: 10 }
}
}
}
}
initializer {
name: "weight"
data_type: 1
dims: 784
dims: 10
raw_data: "\\000\\000\\000..."
}
node {
input: "input"
input: "weight"
output: "output"
op_type: "MatMul"
}
}
opset_import {
version: 14
}
'''
try:
# Parse the template (this is a simplified example)
# In practice, you'd need proper tensor data
print("Model template:")
print(model_template)
# For a real implementation, you'd parse actual protobuf text format
print("Note: This is a demonstration of the text format structure")
print("Real implementation would require proper protobuf text parsing")
except Exception as e:
print(f"Template parsing error: {e}")
# Demonstrate text template structure
create_model_from_template()import onnx
from onnx import parser, printer
def validate_text_roundtrip(model_path):
"""Validate that text conversion preserves model integrity."""
# Load original model
original_model = onnx.load_model(model_path)
print("Original model loaded successfully")
# Convert to text
model_text = printer.to_text(original_model)
print(f"Model converted to text ({len(model_text)} characters)")
try:
# Parse back from text
parsed_model = parser.parse_model(model_text)
print("Text parsed back to model successfully")
# Validate both models
onnx.checker.check_model(original_model)
onnx.checker.check_model(parsed_model)
print("Both models pass validation")
# Compare key properties
print("\nComparison:")
print(f" Nodes: {len(original_model.graph.node)} vs {len(parsed_model.graph.node)}")
print(f" Inputs: {len(original_model.graph.input)} vs {len(parsed_model.graph.input)}")
print(f" Outputs: {len(original_model.graph.output)} vs {len(parsed_model.graph.output)}")
print(f" Initializers: {len(original_model.graph.initializer)} vs {len(parsed_model.graph.initializer)}")
# Check if functionally equivalent
if (len(original_model.graph.node) == len(parsed_model.graph.node) and
len(original_model.graph.input) == len(parsed_model.graph.input) and
len(original_model.graph.output) == len(parsed_model.graph.output)):
print("✓ Text roundtrip preserves model structure")
else:
print("✗ Text roundtrip changed model structure")
except parser.ParseError as e:
print(f"✗ Parse error during roundtrip: {e}")
except onnx.checker.ValidationError as e:
print(f"✗ Validation error during roundtrip: {e}")
# Example usage (commented out)
# validate_text_roundtrip("model.onnx")Install with Tessl CLI
npx tessl i tessl/pypi-onnx