Open Neural Network Exchange for AI model interoperability and machine learning frameworks
—
Core functions for loading and saving ONNX models from various sources including files, streams, and binary data. These functions support multiple serialization formats and external data storage for large models.
Load ONNX models from files, file-like objects, or string data with support for external data loading and format detection.
def load_model(
f: IO[bytes] | str | os.PathLike,
format: _SupportedFormat | None = None,
load_external_data: bool = True,
) -> ModelProto:
"""
Loads a serialized ModelProto into memory.
Parameters:
- f: can be a file-like object (has "read" function) or a string/PathLike containing a file name
- format: The serialization format. When it is not specified, it is inferred
from the file extension when ``f`` is a path. If not specified _and_
``f`` is not a path, 'protobuf' is used. The encoding is assumed to
be "utf-8" when the format is a text format.
- load_external_data: Whether to load the external data.
Set to True if the data is under the same directory of the model.
If not, users need to call :func:`load_external_data_for_model`
with directory to load external data from.
Returns:
ModelProto: Loaded in-memory ModelProto.
"""
def load_model_from_string(
s: bytes | str,
format: _SupportedFormat = _DEFAULT_FORMAT,
) -> ModelProto:
"""
Loads a binary string (bytes) that contains serialized ModelProto.
Parameters:
- s: a string, which contains serialized ModelProto
- format: The serialization format. When it is not specified, it is inferred
from the file extension when ``f`` is a path. If not specified _and_
``f`` is not a path, 'protobuf' is used. The encoding is assumed to
be "utf-8" when the format is a text format.
Returns:
ModelProto: Loaded in-memory ModelProto.
"""Load individual tensors from files or string data, useful for loading model weights or intermediate results.
def load_tensor(
f: IO[bytes] | str | os.PathLike,
format: _SupportedFormat | None = None,
) -> TensorProto:
"""
Loads a serialized TensorProto into memory.
Parameters:
- f: can be a file-like object (has "read" function) or a string/PathLike containing a file name
- format: The serialization format. When it is not specified, it is inferred
from the file extension when ``f`` is a path. If not specified _and_
``f`` is not a path, 'protobuf' is used. The encoding is assumed to
be "utf-8" when the format is a text format.
Returns:
TensorProto: Loaded in-memory TensorProto.
"""
def load_tensor_from_string(
s: bytes,
format: _SupportedFormat = _DEFAULT_FORMAT,
) -> TensorProto:
"""
Loads a binary string (bytes) that contains serialized TensorProto.
Parameters:
- s: a string, which contains serialized TensorProto
- format: The serialization format. When it is not specified, it is inferred
from the file extension when ``f`` is a path. If not specified _and_
``f`` is not a path, 'protobuf' is used. The encoding is assumed to
be "utf-8" when the format is a text format.
Returns:
TensorProto: Loaded in-memory TensorProto.
"""Save ONNX models to files with support for external data storage, compression, and format selection.
def save_model(
proto: ModelProto | bytes,
f: IO[bytes] | str | os.PathLike,
format: _SupportedFormat | None = None,
*,
save_as_external_data: bool = False,
all_tensors_to_one_file: bool = True,
location: str | None = None,
size_threshold: int = 1024,
convert_attribute: bool = False,
) -> None:
"""
Saves the ModelProto to the specified path and optionally, serialize tensors with raw data as external data before saving.
Parameters:
- proto: should be a in-memory ModelProto
- f: can be a file-like object (has "write" function) or a string containing
a file name or a pathlike object
- format: The serialization format. When it is not specified, it is inferred
from the file extension when ``f`` is a path. If not specified _and_
``f`` is not a path, 'protobuf' is used. The encoding is assumed to
be "utf-8" when the format is a text format.
- save_as_external_data: If true, save tensors to external file(s).
- all_tensors_to_one_file: Effective only if save_as_external_data is True.
If true, save all tensors to one external file specified by location.
If false, save each tensor to a file named with the tensor name.
- location: Effective only if save_as_external_data is true.
Specify the external file that all tensors to save to.
Path is relative to the model path.
If not specified, will use the model name.
- size_threshold: Effective only if save_as_external_data is True.
Threshold for size of data. Only when tensor's data is >= the size_threshold it will be converted
to external data. To convert every tensor with raw data to external data set size_threshold=0.
- convert_attribute: Effective only if save_as_external_data is True.
If true, convert all tensors to external data
If false, convert only non-attribute tensors to external data
"""Save individual tensors to files with format selection and serialization options.
def save_tensor(
proto: TensorProto,
f: IO[bytes] | str | os.PathLike,
format: _SupportedFormat | None = None,
) -> None:
"""
Saves the TensorProto to the specified path.
Parameters:
- proto: should be a in-memory TensorProto
- f: can be a file-like object (has "write" function) or a string
containing a file name or a pathlike object.
- format: The serialization format. When it is not specified, it is inferred
from the file extension when ``f`` is a path. If not specified _and_
``f`` is not a path, 'protobuf' is used. The encoding is assumed to
be "utf-8" when the format is a text format.
"""Manage external data files for large models, enabling efficient storage and loading of large tensors.
def load_external_data_for_model(model, base_dir):
"""
Load external data for all tensors in a model.
Parameters:
- model: ModelProto with external data references
- base_dir: Directory path containing external data files
Returns:
None (modifies model in-place)
"""
def convert_model_to_external_data(model, all_tensors_to_one_file=True,
location=None, size_threshold=1024,
convert_attribute=False):
"""
Convert model tensors to external data format.
Parameters:
- model: ModelProto to convert
- all_tensors_to_one_file: Save all tensors to single file
- location: External file location
- size_threshold: Minimum size for external storage
- convert_attribute: Convert attribute tensors to external data
Returns:
None (modifies model in-place)
"""
def write_external_data_tensors(model, filepath):
"""
Write external data tensors to files.
Parameters:
- model: ModelProto with external data references
- filepath: Base path for external data files
Returns:
ModelProto: Updated model with external data written
"""Backward compatibility aliases for common loading and saving operations.
load = load_model # Alias for load_model
load_from_string = load_model_from_string # Alias for load_model_from_string
save = save_model # Alias for save_modelimport onnx
# Load a model from file
model = onnx.load_model("path/to/model.onnx")
# Save the model to a new location
onnx.save_model(model, "path/to/new_model.onnx")
# Load model from binary data
with open("model.onnx", "rb") as f:
binary_data = f.read()
model = onnx.load_model_from_string(binary_data)import onnx
# Load model with external data
model = onnx.load_model("large_model.onnx", load_external_data=True)
# Save model with external data for tensors > 1KB
onnx.save_model(model, "output_model.onnx",
save_as_external_data=True,
size_threshold=1024)
# Convert existing model to use external data
onnx.convert_model_to_external_data(model,
all_tensors_to_one_file=True,
location="weights.bin")import onnx
# Load with specific format
model = onnx.load_model("model.txt", format="textproto")
# Save in text format for debugging
onnx.save_model(model, "debug_model.txt", format="textproto")
# Auto-detect format from file extension
model = onnx.load_model("model.onnx") # Detects protobuf format
onnx.save_model(model, "output.json", format="json") # JSON formatInstall with Tessl CLI
npx tessl i tessl/pypi-onnx