A comprehensive 2D and 3D face analysis toolkit with state-of-the-art algorithms for face recognition, detection, and alignment.
—
Comprehensive utilities for downloading, storing, and managing pre-trained models and model packs. Handles automatic downloads, local caching, and model availability checks for seamless model deployment.
Core functions for model download and local storage management.
def download(sub_dir, name, force=False, root='~/.insightface') -> str:
"""
Download model or model pack to local storage.
Parameters:
- sub_dir: str, subdirectory within root ('models', 'objects', etc.)
- name: str, model pack or file name to download
- force: bool, force re-download even if already exists
- root: str, root directory for insightface storage
Returns:
str: path to downloaded content
"""
def ensure_available(sub_dir, name, root='~/.insightface') -> str:
"""
Ensure model is available locally, download if necessary.
Parameters:
- sub_dir: str, subdirectory for model storage
- name: str, model name to ensure availability
- root: str, root storage directory
Returns:
str: path to model directory or file
"""
def download_onnx(sub_dir, model_file, force=False, root='~/.insightface', download_zip=False) -> str:
"""
Download specific ONNX model file.
Parameters:
- sub_dir: str, storage subdirectory
- model_file: str, ONNX model filename
- force: bool, force re-download
- root: str, root storage directory
- download_zip: bool, download as zip archive
Returns:
str: path to downloaded ONNX file
"""
def get_model_file(name, root='~/.insightface/models') -> str:
"""
Get location for pretrained model on local file system.
Downloads from online model zoo if model cannot be found or has hash mismatch.
The root directory will be created if it doesn't exist.
Parameters:
- name: str, name of the model
- root: str, location for keeping model parameters
Returns:
str: path to the requested pretrained model file
"""Functions for managing local model storage and directory operations.
def get_model_dir(name, root='~/.insightface') -> str:
"""
Get model directory path for given model pack.
Parameters:
- name: str, model pack name
- root: str, root storage directory
Returns:
str: full path to model directory
"""
def makedirs(path) -> None:
"""
Create directory structure recursively.
Parameters:
- path: str, directory path to create
"""Helper functions for managing optional dependencies and package imports.
def try_import(package, message=None) -> module:
"""
Attempt to import package with graceful failure.
Parameters:
- package: str, package name to import
- message: str, custom error message if import fails
Returns:
module: imported module or None if failed
"""
def try_import_cv2() -> module:
"""Try to import OpenCV (cv2) with helpful error message."""
def try_import_mmcv() -> module:
"""Try to import MMCV library."""
def try_import_rarfile() -> module:
"""Try to import rarfile for RAR archive support."""
def import_try_install(package, extern_url=None) -> module:
"""
Import package and attempt installation if not found.
Parameters:
- package: str, package name
- extern_url: str, external installation URL if pip fails
Returns:
module: imported module
"""
def try_import_dali() -> module:
"""Try to import NVIDIA DALI for accelerated data loading."""Low-level utilities for downloading files from URLs with integrity checking.
def check_sha1(filename, sha1_hash) -> bool:
"""
Check SHA1 hash of downloaded file.
Parameters:
- filename: str, path to file to check
- sha1_hash: str, expected SHA1 hash
Returns:
bool: True if hash matches, False otherwise
"""
def download_file(url, path=None, overwrite=False, sha1_hash=None) -> str:
"""
Download file from URL with optional integrity checking.
Parameters:
- url: str, URL to download from
- path: str, local path to save file (optional)
- overwrite: bool, overwrite existing file
- sha1_hash: str, expected SHA1 hash for verification
Returns:
str: path to downloaded file
"""Default configuration values and model pack names.
DEFAULT_MP_NAME = 'buffalo_l' # Default model pack nameCLI commands for model management operations.
class ModelDownloadCommand:
def __init__(self, model: str, root: str, force: bool):
"""
Initialize model download command.
Parameters:
- model: str, model pack name to download
- root: str, storage root directory
- force: bool, force re-download
"""
def run(self) -> None:
"""Execute model download."""
def main() -> None:
"""Main CLI entry point for insightface-cli command."""from insightface.utils import download, ensure_available
# Download default model pack
model_path = download('models', 'buffalo_l')
print(f"Model downloaded to: {model_path}")
# Ensure model is available (download if needed)
model_dir = ensure_available('models', 'buffalo_s')
print(f"Model available at: {model_dir}")
# List files in model directory
import os
model_files = os.listdir(model_dir)
print(f"Model files: {model_files}")# Use custom storage directory
custom_root = './my_models'
model_path = download('models', 'buffalo_m', root=custom_root)
# Check model directory structure
from insightface.utils import get_model_dir
model_dir = get_model_dir('buffalo_m', root=custom_root)
print(f"Custom model directory: {model_dir}")from insightface.utils import download_onnx
# Download specific ONNX model
onnx_path = download_onnx('models/buffalo_l', 'det_10g.onnx')
print(f"Downloaded ONNX model: {onnx_path}")
# Download as zip archive
zip_path = download_onnx('models', 'arcface_r100_v1.onnx', download_zip=True)
print(f"Downloaded zip archive: {zip_path}")import os
from insightface.utils import get_model_dir
def check_model_availability(model_name, root='~/.insightface'):
"""Check if model pack is available locally."""
model_dir = get_model_dir(model_name, root)
if os.path.exists(model_dir):
# List available ONNX files
onnx_files = [f for f in os.listdir(model_dir) if f.endswith('.onnx')]
print(f"Model '{model_name}' available with {len(onnx_files)} ONNX files:")
for onnx_file in onnx_files:
print(f" - {onnx_file}")
return True
else:
print(f"Model '{model_name}' not found locally")
return False
# Check multiple models
models_to_check = ['buffalo_l', 'buffalo_m', 'buffalo_s', 'antelopev2']
for model in models_to_check:
check_model_availability(model)
print()# Force re-download of model pack (useful for updates)
fresh_model_path = download('models', 'buffalo_l', force=True)
print(f"Fresh model downloaded to: {fresh_model_path}")
# Force re-download specific ONNX file
fresh_onnx = download_onnx('models/buffalo_l', 'w600k_r50.onnx', force=True)
print(f"Fresh ONNX model: {fresh_onnx}")from insightface.utils import try_import, import_try_install
# Try importing optional dependencies
cv2 = try_import('cv2', 'OpenCV is required for image processing')
if cv2:
print("OpenCV available")
# Import with auto-install attempt
try:
mmcv = import_try_install('mmcv')
print("MMCV imported successfully")
except ImportError as e:
print(f"Could not import MMCV: {e}")
# Check for GPU acceleration libraries
dali = try_import('nvidia.dali', 'DALI not available - using standard data loading')
if dali:
print("NVIDIA DALI available for accelerated data loading")from insightface.utils import check_sha1, download_file
# Download file with integrity check
url = 'https://github.com/deepinsight/insightface/releases/download/v0.7/buffalo_l.zip'
expected_hash = 'a1b2c3d4e5f6...' # Expected SHA1 hash
try:
file_path = download_file(url, path='./buffalo_l.zip', sha1_hash=expected_hash)
print(f"File downloaded and verified: {file_path}")
except Exception as e:
print(f"Download or verification failed: {e}")
# Verify existing file
if check_sha1('./buffalo_l.zip', expected_hash):
print("File integrity verified")
else:
print("File integrity check failed")# Download model pack via CLI
insightface-cli model.download buffalo_l
# Download to custom directory
insightface-cli model.download buffalo_s --root ./my_models
# Force re-download
insightface-cli model.download buffalo_m --force
# Add mask parameters to recognition dataset
insightface-cli rec.add_mask_param --input dataset.rec --output masked_dataset.recdef list_available_models(root='~/.insightface'):
"""List all locally available model packs."""
import os
from insightface.utils import get_model_dir
models_dir = os.path.expanduser(os.path.join(root, 'models'))
if not os.path.exists(models_dir):
print("No models directory found")
return []
available_models = []
for item in os.listdir(models_dir):
model_path = os.path.join(models_dir, item)
if os.path.isdir(model_path):
# Count ONNX files
onnx_count = len([f for f in os.listdir(model_path) if f.endswith('.onnx')])
available_models.append((item, onnx_count))
return available_models
def cleanup_old_models(root='~/.insightface', keep_latest=2):
"""Clean up old model versions, keeping only the latest."""
available = list_available_models(root)
# Sort by modification time and keep latest
# Implementation would depend on specific versioning scheme
print(f"Found {len(available)} model packs")
for model_name, onnx_count in available:
print(f" {model_name}: {onnx_count} ONNX files")
# List available models
models = list_available_models()
print("Available model packs:")
for name, count in models:
print(f" {name}: {count} models")def setup_models_for_production(models_needed=['buffalo_l'], root='./production_models'):
"""Set up models for production deployment."""
from insightface.utils import download, makedirs
# Create production directory
makedirs(root)
downloaded_models = {}
for model_name in models_needed:
try:
print(f"Setting up {model_name}...")
model_path = download('models', model_name, root=root)
downloaded_models[model_name] = model_path
print(f"✓ {model_name} ready")
except Exception as e:
print(f"✗ Failed to setup {model_name}: {e}")
return downloaded_models
# Setup for production
production_models = setup_models_for_production(['buffalo_l', 'buffalo_s'])
print(f"Production setup complete. Models: {list(production_models.keys())}")Install with Tessl CLI
npx tessl i tessl/pypi-insightface