CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-onnx

Open Neural Network Exchange for AI model interoperability and machine learning frameworks

Pending
Overview
Eval results
Files

model-hub.mddocs/

Model Hub

Access to the ONNX Model Zoo for downloading pre-trained models, including model metadata and test data for validation. This module provides programmatic access to community-contributed ONNX models.

Capabilities

Model Discovery

Browse and discover available models in the ONNX Model Zoo.

def list_models(
    repo: str = "onnx/models:main",
    model: Optional[str] = None,
    tags: Optional[List[str]] = None,
) -> List[ModelInfo]:
    """
    List available models in the model repository.

    Parameters:
    - repo: Repository location in format "user/repo[:branch]" (default: "onnx/models:main")
    - model: Model name to search for (if None, returns all models with matching tags)
    - tags: Filter by tags (list of strings)

    Returns:
    List[ModelInfo]: List of available model information objects
    """

def get_model_info(
    model: str, 
    repo: str = "onnx/models:main", 
    opset: Optional[int] = None
) -> ModelInfo:
    """
    Get detailed information about a specific model.

    Parameters:
    - model: Model name (case-sensitive)
    - repo: Repository location in format "user/repo[:branch]"
    - opset: Opset version (None for largest available)

    Returns:
    ModelInfo: Detailed model information including metadata
    """

Model Loading

Download and load models directly from the model hub.

def load(
    model: str,
    repo: str = "onnx/models:main",
    opset: Optional[int] = None,
    force_reload: bool = False,
    silent: bool = False,
) -> Optional[onnx.ModelProto]:
    """
    Load model from the model hub.

    Parameters:
    - model: Model name (case-sensitive)
    - repo: Repository location in format "user/repo[:branch]"
    - opset: Opset version (None for largest available)
    - force_reload: Force re-download even if cached
    - silent: Suppress download progress output

    Returns:
    Optional[ModelProto]: Loaded ONNX model or None if failed

    Raises:
    AssertionError: If model cannot be found or downloaded
    """

def download_model_with_test_data(
    model: str,
    repo: str = "onnx/models:main",
    opset: Optional[int] = None,
    force_reload: bool = False,
    silent: bool = False,
) -> Optional[str]:
    """
    Download model along with test data and return extraction directory.

    Parameters:
    - model: Model name (case-sensitive)
    - repo: Repository location in format "user/repo[:branch]"
    - opset: Opset version (None for largest available)
    - force_reload: Force re-download even if cached
    - silent: Suppress download progress output

    Returns:
    Optional[str]: Directory path where files were extracted, or None if failed
    """

def load_composite_model(
    network_model: str,
    preprocessing_model: str,
    network_repo: str = "onnx/models:main",
    preprocessing_repo: str = "onnx/models:main",
    opset: Optional[int] = None,
    force_reload: bool = False,
    silent: bool = False,
) -> Optional[onnx.ModelProto]:
    """
    Build composite model by combining network and preprocessing models.

    Parameters:
    - network_model: Main network model name
    - preprocessing_model: Preprocessing model name
    - network_repo: Repository for network model
    - preprocessing_repo: Repository for preprocessing model
    - opset: Opset version (None for largest available)
    - force_reload: Force re-download even if cached
    - silent: Suppress download progress output

    Returns:
    Optional[ModelProto]: Combined composite model or None if failed
    """

Configuration

Configure hub settings and caching behavior.

def set_dir(new_dir):
    """
    Set directory for model downloads and caching.

    Parameters:
    - new_dir: Path to directory for storing downloaded models
    """

def get_dir():
    """
    Get current download directory.

    Returns:
    str: Path to current download directory
    """

Model Information

Container class for model metadata and information.

class ModelInfo:
    """
    Container for model metadata and information.
    
    Attributes:
    - name: Model name
    - description: Model description
    - tags: Associated tags
    - framework: Source framework
    - domain: Application domain
    - size: Model size information
    - accuracy: Performance metrics
    """

Usage Examples

Browse Available Models

import onnx
from onnx import hub

# List all available models
models = hub.list_models()
print(f"Found {len(models)} models in the hub")

# Filter by tags
vision_models = hub.list_models(tags=["vision", "classification"])
print(f"Found {len(vision_models)} vision classification models")

# Browse model information
for model_info in vision_models[:3]:  # Show first 3
    print(f"Model: {model_info.name}")
    print(f"Description: {model_info.description}")
    print(f"Tags: {model_info.tags}")
    print()

Download and Use Models

import onnx
from onnx import hub
import numpy as np

# Set download directory
hub.set_dir("./models")

try:
    # Load a popular model (example)
    model = hub.load("onnx/models", "resnet50")
    print(f"Loaded model: {model.graph.name}")
    
    # Inspect model inputs/outputs
    print("Model inputs:")
    for input_info in model.graph.input:
        print(f"  {input_info.name}: {input_info.type}")
    
    print("Model outputs:")
    for output_info in model.graph.output:
        print(f"  {output_info.name}: {output_info.type}")
    
    # Validate the model
    onnx.checker.check_model(model)
    print("Model validation passed!")
    
except Exception as e:
    print(f"Error loading model: {e}")

Download Models with Test Data

import onnx
from onnx import hub
import os

# Download model with test data for validation
try:
    # Download model and test data
    download_info = hub.download_model_with_test_data("onnx/models", "mobilenetv2")
    
    print(f"Model downloaded to: {download_info['model_path']}")
    print(f"Test data available at: {download_info['test_data_path']}")
    
    # Load and validate with test data
    model = onnx.load_model(download_info['model_path'])
    
    # Use test data for validation
    test_data_dir = download_info['test_data_path']
    if os.path.exists(test_data_dir):
        print(f"Test data contains:")
        for item in os.listdir(test_data_dir):
            print(f"  {item}")
            
except Exception as e:
    print(f"Error downloading model with test data: {e}")

Model Information and Metadata

import onnx
from onnx import hub

# Get detailed model information
try:
    model_info = hub.get_model_info("onnx/models", "resnet50")
    
    print(f"Model Name: {model_info.name}")
    print(f"Description: {model_info.description}")
    print(f"Tags: {', '.join(model_info.tags)}")
    print(f"Framework: {model_info.framework}")
    print(f"Domain: {model_info.domain}")
    
    if hasattr(model_info, 'size'):
        print(f"Model Size: {model_info.size}")
    if hasattr(model_info, 'accuracy'):
        print(f"Accuracy Metrics: {model_info.accuracy}")
        
except Exception as e:
    print(f"Error getting model info: {e}")

Hub Configuration and Caching

import onnx
from onnx import hub
import os

# Configure download directory
custom_dir = "./my_models"
os.makedirs(custom_dir, exist_ok=True)

# Set custom download directory
hub.set_dir(custom_dir)
print(f"Download directory set to: {hub.get_dir()}")

# Download models to custom directory
try:
    model = hub.load("onnx/models", "squeezenet")
    print(f"Model downloaded and cached in: {hub.get_dir()}")
    
    # List cached models
    if os.path.exists(hub.get_dir()):
        cached_files = os.listdir(hub.get_dir())
        print(f"Cached files: {cached_files}")
        
except Exception as e:
    print(f"Error with custom directory: {e}")

Working with Composite Models

import onnx
from onnx import hub

# Load composite models with multiple components
try:
    composite_model = hub.load_composite_model("onnx/models", "bert-base")
    
    print("Composite model components:")
    for component_name, component_model in composite_model.items():
        print(f"  {component_name}: {component_model.graph.name}")
        print(f"    Inputs: {len(component_model.graph.input)}")
        print(f"    Outputs: {len(component_model.graph.output)}")
        print(f"    Nodes: {len(component_model.graph.node)}")
        
except Exception as e:
    print(f"Error loading composite model: {e}")

Install with Tessl CLI

npx tessl i tessl/pypi-onnx

docs

backend-integration.md

index.md

model-composition.md

model-construction.md

model-hub.md

model-io.md

model-validation.md

numpy-integration.md

operator-definitions.md

reference-implementation.md

shape-inference.md

text-processing.md

version-conversion.md

tile.json