or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

backend-integration.mdindex.mdmodel-composition.mdmodel-construction.mdmodel-hub.mdmodel-io.mdmodel-validation.mdnumpy-integration.mdoperator-definitions.mdreference-implementation.mdshape-inference.mdtext-processing.mdversion-conversion.md

model-hub.mddocs/

0

# Model Hub

1

2

Access to the ONNX Model Zoo for downloading pre-trained models, including model metadata and test data for validation. This module provides programmatic access to community-contributed ONNX models.

3

4

## Capabilities

5

6

### Model Discovery

7

8

Browse and discover available models in the ONNX Model Zoo.

9

10

```python { .api }

11

def list_models(

12

repo: str = "onnx/models:main",

13

model: Optional[str] = None,

14

tags: Optional[List[str]] = None,

15

) -> List[ModelInfo]:

16

"""

17

List available models in the model repository.

18

19

Parameters:

20

- repo: Repository location in format "user/repo[:branch]" (default: "onnx/models:main")

21

- model: Model name to search for (if None, returns all models with matching tags)

22

- tags: Filter by tags (list of strings)

23

24

Returns:

25

List[ModelInfo]: List of available model information objects

26

"""

27

28

def get_model_info(

29

model: str,

30

repo: str = "onnx/models:main",

31

opset: Optional[int] = None

32

) -> ModelInfo:

33

"""

34

Get detailed information about a specific model.

35

36

Parameters:

37

- model: Model name (case-sensitive)

38

- repo: Repository location in format "user/repo[:branch]"

39

- opset: Opset version (None for largest available)

40

41

Returns:

42

ModelInfo: Detailed model information including metadata

43

"""

44

```

45

46

### Model Loading

47

48

Download and load models directly from the model hub.

49

50

```python { .api }

51

def load(

52

model: str,

53

repo: str = "onnx/models:main",

54

opset: Optional[int] = None,

55

force_reload: bool = False,

56

silent: bool = False,

57

) -> Optional[onnx.ModelProto]:

58

"""

59

Load model from the model hub.

60

61

Parameters:

62

- model: Model name (case-sensitive)

63

- repo: Repository location in format "user/repo[:branch]"

64

- opset: Opset version (None for largest available)

65

- force_reload: Force re-download even if cached

66

- silent: Suppress download progress output

67

68

Returns:

69

Optional[ModelProto]: Loaded ONNX model or None if failed

70

71

Raises:

72

AssertionError: If model cannot be found or downloaded

73

"""

74

75

def download_model_with_test_data(

76

model: str,

77

repo: str = "onnx/models:main",

78

opset: Optional[int] = None,

79

force_reload: bool = False,

80

silent: bool = False,

81

) -> Optional[str]:

82

"""

83

Download model along with test data and return extraction directory.

84

85

Parameters:

86

- model: Model name (case-sensitive)

87

- repo: Repository location in format "user/repo[:branch]"

88

- opset: Opset version (None for largest available)

89

- force_reload: Force re-download even if cached

90

- silent: Suppress download progress output

91

92

Returns:

93

Optional[str]: Directory path where files were extracted, or None if failed

94

"""

95

96

def load_composite_model(

97

network_model: str,

98

preprocessing_model: str,

99

network_repo: str = "onnx/models:main",

100

preprocessing_repo: str = "onnx/models:main",

101

opset: Optional[int] = None,

102

force_reload: bool = False,

103

silent: bool = False,

104

) -> Optional[onnx.ModelProto]:

105

"""

106

Build composite model by combining network and preprocessing models.

107

108

Parameters:

109

- network_model: Main network model name

110

- preprocessing_model: Preprocessing model name

111

- network_repo: Repository for network model

112

- preprocessing_repo: Repository for preprocessing model

113

- opset: Opset version (None for largest available)

114

- force_reload: Force re-download even if cached

115

- silent: Suppress download progress output

116

117

Returns:

118

Optional[ModelProto]: Combined composite model or None if failed

119

"""

120

```

121

122

### Configuration

123

124

Configure hub settings and caching behavior.

125

126

```python { .api }

127

def set_dir(new_dir):

128

"""

129

Set directory for model downloads and caching.

130

131

Parameters:

132

- new_dir: Path to directory for storing downloaded models

133

"""

134

135

def get_dir():

136

"""

137

Get current download directory.

138

139

Returns:

140

str: Path to current download directory

141

"""

142

```

143

144

### Model Information

145

146

Container class for model metadata and information.

147

148

```python { .api }

149

class ModelInfo:

150

"""

151

Container for model metadata and information.

152

153

Attributes:

154

- name: Model name

155

- description: Model description

156

- tags: Associated tags

157

- framework: Source framework

158

- domain: Application domain

159

- size: Model size information

160

- accuracy: Performance metrics

161

"""

162

```

163

164

## Usage Examples

165

166

### Browse Available Models

167

168

```python

169

import onnx

170

from onnx import hub

171

172

# List all available models

173

models = hub.list_models()

174

print(f"Found {len(models)} models in the hub")

175

176

# Filter by tags

177

vision_models = hub.list_models(tags=["vision", "classification"])

178

print(f"Found {len(vision_models)} vision classification models")

179

180

# Browse model information

181

for model_info in vision_models[:3]: # Show first 3

182

print(f"Model: {model_info.name}")

183

print(f"Description: {model_info.description}")

184

print(f"Tags: {model_info.tags}")

185

print()

186

```

187

188

### Download and Use Models

189

190

```python

191

import onnx

192

from onnx import hub

193

import numpy as np

194

195

# Set download directory

196

hub.set_dir("./models")

197

198

try:

199

# Load a popular model (example)

200

model = hub.load("onnx/models", "resnet50")

201

print(f"Loaded model: {model.graph.name}")

202

203

# Inspect model inputs/outputs

204

print("Model inputs:")

205

for input_info in model.graph.input:

206

print(f" {input_info.name}: {input_info.type}")

207

208

print("Model outputs:")

209

for output_info in model.graph.output:

210

print(f" {output_info.name}: {output_info.type}")

211

212

# Validate the model

213

onnx.checker.check_model(model)

214

print("Model validation passed!")

215

216

except Exception as e:

217

print(f"Error loading model: {e}")

218

```

219

220

### Download Models with Test Data

221

222

```python

223

import onnx

224

from onnx import hub

225

import os

226

227

# Download model with test data for validation

228

try:

229

# Download model and test data

230

download_info = hub.download_model_with_test_data("onnx/models", "mobilenetv2")

231

232

print(f"Model downloaded to: {download_info['model_path']}")

233

print(f"Test data available at: {download_info['test_data_path']}")

234

235

# Load and validate with test data

236

model = onnx.load_model(download_info['model_path'])

237

238

# Use test data for validation

239

test_data_dir = download_info['test_data_path']

240

if os.path.exists(test_data_dir):

241

print(f"Test data contains:")

242

for item in os.listdir(test_data_dir):

243

print(f" {item}")

244

245

except Exception as e:

246

print(f"Error downloading model with test data: {e}")

247

```

248

249

### Model Information and Metadata

250

251

```python

252

import onnx

253

from onnx import hub

254

255

# Get detailed model information

256

try:

257

model_info = hub.get_model_info("onnx/models", "resnet50")

258

259

print(f"Model Name: {model_info.name}")

260

print(f"Description: {model_info.description}")

261

print(f"Tags: {', '.join(model_info.tags)}")

262

print(f"Framework: {model_info.framework}")

263

print(f"Domain: {model_info.domain}")

264

265

if hasattr(model_info, 'size'):

266

print(f"Model Size: {model_info.size}")

267

if hasattr(model_info, 'accuracy'):

268

print(f"Accuracy Metrics: {model_info.accuracy}")

269

270

except Exception as e:

271

print(f"Error getting model info: {e}")

272

```

273

274

### Hub Configuration and Caching

275

276

```python

277

import onnx

278

from onnx import hub

279

import os

280

281

# Configure download directory

282

custom_dir = "./my_models"

283

os.makedirs(custom_dir, exist_ok=True)

284

285

# Set custom download directory

286

hub.set_dir(custom_dir)

287

print(f"Download directory set to: {hub.get_dir()}")

288

289

# Download models to custom directory

290

try:

291

model = hub.load("onnx/models", "squeezenet")

292

print(f"Model downloaded and cached in: {hub.get_dir()}")

293

294

# List cached models

295

if os.path.exists(hub.get_dir()):

296

cached_files = os.listdir(hub.get_dir())

297

print(f"Cached files: {cached_files}")

298

299

except Exception as e:

300

print(f"Error with custom directory: {e}")

301

```

302

303

### Working with Composite Models

304

305

```python

306

import onnx

307

from onnx import hub

308

309

# Load composite models with multiple components

310

try:

311

composite_model = hub.load_composite_model("onnx/models", "bert-base")

312

313

print("Composite model components:")

314

for component_name, component_model in composite_model.items():

315

print(f" {component_name}: {component_model.graph.name}")

316

print(f" Inputs: {len(component_model.graph.input)}")

317

print(f" Outputs: {len(component_model.graph.output)}")

318

print(f" Nodes: {len(component_model.graph.node)}")

319

320

except Exception as e:

321

print(f"Error loading composite model: {e}")

322

```