Microsoft Azure Machine Learning Client Library for Python providing comprehensive SDK for ML workflows including job execution, pipeline components, model deployment, and AutoML capabilities
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Comprehensive asset management capabilities for models, data, environments, and components with versioning, lineage tracking, and metadata management in Azure Machine Learning.
Model asset management with versioning, metadata, and deployment support.
class Model:
def __init__(
self,
*,
name: str,
path: str = None,
version: str = None,
type: str = "custom_model",
description: str = None,
tags: dict = None,
properties: dict = None,
flavors: dict = None,
**kwargs
):
"""
Model asset for storing and versioning ML models.
Parameters:
- name: Model name
- path: Path to model files (local path or cloud URI)
- version: Model version (auto-generated if not specified)
- type: Model type ("custom_model", "mlflow_model", "triton_model")
- description: Model description
- tags: Dictionary of tags for organization
- properties: Custom properties
- flavors: Model flavors (MLflow models)
"""from azure.ai.ml.entities import Model
# Register a model from local files
model = Model(
name="my-model",
path="./model",
type="custom_model",
description="My trained model",
tags={"framework": "scikit-learn", "algorithm": "random_forest"}
)
registered_model = ml_client.models.create_or_update(model)
print(f"Model registered: {registered_model.name}:{registered_model.version}")
# Reference an existing model
existing_model = Model(name="my-model", version="1")Data asset management for datasets with versioning and lineage tracking.
class Data:
def __init__(
self,
*,
name: str,
path: str = None,
version: str = None,
type: str = "uri_folder",
description: str = None,
tags: dict = None,
properties: dict = None,
**kwargs
):
"""
Data asset for storing and versioning datasets.
Parameters:
- name: Data asset name
- path: Path to data (local path, cloud URI, or datastore path)
- version: Data version (auto-generated if not specified)
- type: Data type ("uri_file", "uri_folder", "mltable")
- description: Data description
- tags: Dictionary of tags
- properties: Custom properties
"""from azure.ai.ml.entities import Data
# Register data from local folder
data = Data(
name="training-data",
path="./data/train",
type="uri_folder",
description="Training dataset for my model",
tags={"source": "synthetic", "size": "1GB"}
)
registered_data = ml_client.data.create_or_update(data)
# Register data from cloud storage
cloud_data = Data(
name="validation-data",
path="https://mystorageaccount.blob.core.windows.net/data/validation/",
type="uri_folder",
description="Validation dataset"
)
# Create MLTable data asset
mltable_data = Data(
name="processed-data",
path="./data/processed",
type="mltable",
description="Processed data with MLTable specification"
)Environment management for consistent runtime configurations.
class Environment:
def __init__(
self,
*,
name: str = None,
version: str = None,
image: str = None,
conda_file: str = None,
docker_file: str = None,
build: BuildContext = None,
inference_config: dict = None,
description: str = None,
tags: dict = None,
**kwargs
):
"""
Environment for consistent runtime configurations.
Parameters:
- name: Environment name
- version: Environment version
- image: Base Docker image
- conda_file: Path to conda environment file
- docker_file: Path to Dockerfile
- build: Build context for custom images
- inference_config: Inference-specific configuration
- description: Environment description
- tags: Dictionary of tags
"""
class BuildContext:
def __init__(
self,
*,
path: str,
dockerfile_path: str = "Dockerfile"
):
"""
Build context for creating custom environment images.
Parameters:
- path: Path to build context directory
- dockerfile_path: Path to Dockerfile within context
"""from azure.ai.ml.entities import Environment, BuildContext
# Environment with pre-built image
env = Environment(
name="sklearn-env",
image="mcr.microsoft.com/azureml/sklearn-1.0-ubuntu20.04-py38-cpu-inference:latest",
description="Scikit-learn environment"
)
# Environment with conda file
conda_env = Environment(
name="custom-env",
image="mcr.microsoft.com/azureml/openmpi4.1.0-ubuntu20.04:latest",
conda_file="./environment.yml",
description="Custom conda environment"
)
# Environment with custom Docker build
custom_env = Environment(
name="custom-docker-env",
build=BuildContext(path="./docker", dockerfile_path="Dockerfile"),
description="Custom Docker environment"
)
registered_env = ml_client.environments.create_or_update(env)Reusable components for building ML pipelines.
class Component:
def __init__(
self,
*,
name: str,
version: str = None,
display_name: str = None,
description: str = None,
tags: dict = None,
**kwargs
):
"""
Base component class for pipeline building blocks.
Parameters:
- name: Component name
- version: Component version
- display_name: Display name for UI
- description: Component description
- tags: Dictionary of tags
"""
class CommandComponent(Component):
def __init__(
self,
*,
name: str,
command: str,
code: str = None,
environment: Environment,
inputs: dict = None,
outputs: dict = None,
**kwargs
):
"""
Command component for executing arbitrary commands.
Parameters:
- name: Component name
- command: Command to execute
- code: Source code path
- environment: Runtime environment
- inputs: Component input schema
- outputs: Component output schema
"""
class ParallelComponent(Component):
def __init__(
self,
*,
name: str,
task: ParallelTask,
environment: Environment,
inputs: dict = None,
outputs: dict = None,
**kwargs
):
"""
Parallel component for batch processing tasks.
Parameters:
- name: Component name
- task: Parallel task configuration
- environment: Runtime environment
- inputs: Component input schema
- outputs: Component output schema
"""
class SparkComponent(Component):
def __init__(
self,
*,
name: str,
code: str,
entry: SparkJobEntry,
**kwargs
):
"""
Spark component for big data processing.
Parameters:
- name: Component name
- code: Spark application code
- entry: Spark entry point configuration
"""from azure.ai.ml.entities import CommandComponent, Environment
# Create a training component
train_component = CommandComponent(
name="train-model",
display_name="Train Model",
description="Training component for ML model",
command="python train.py --input_data ${{inputs.data}} --model_output ${{outputs.model}}",
code="./src",
environment=Environment(
image="mcr.microsoft.com/azureml/sklearn-1.0-ubuntu20.04-py38-cpu-inference:latest"
),
inputs={
"data": {"type": "uri_folder", "description": "Training data"}
},
outputs={
"model": {"type": "uri_folder", "description": "Trained model"}
}
)
registered_component = ml_client.components.create_or_update(train_component)Common operations available for all asset types through the MLClient.
class ModelOperations:
def create_or_update(self, model: Model) -> Model: ...
def get(self, name: str, version: str = None) -> Model: ...
def list(self, name: str = None) -> list: ...
def delete(self, name: str, version: str) -> None: ...
def download(self, name: str, version: str, download_path: str) -> str: ...
class DataOperations:
def create_or_update(self, data: Data) -> Data: ...
def get(self, name: str, version: str = None) -> Data: ...
def list(self, name: str = None) -> list: ...
def delete(self, name: str, version: str) -> None: ...
class EnvironmentOperations:
def create_or_update(self, environment: Environment) -> Environment: ...
def get(self, name: str, version: str = None) -> Environment: ...
def list(self, name: str = None) -> list: ...
def delete(self, name: str, version: str) -> None: ...
class ComponentOperations:
def create_or_update(self, component: Component) -> Component: ...
def get(self, name: str, version: str = None) -> Component: ...
def list(self, name: str = None) -> list: ...
def delete(self, name: str, version: str) -> None: ...# List all models
models = ml_client.models.list()
for model in models:
print(f"{model.name}:{model.version}")
# Get specific model version
model = ml_client.models.get("my-model", version="1")
# Download model files
download_path = ml_client.models.download("my-model", version="1", download_path="./downloads")
# List data assets
data_assets = ml_client.data.list(name="training-data")
# Get latest version of data
latest_data = ml_client.data.get("training-data") # Gets latest versionReference assets across workspaces and registries.
class WorkspaceAssetReference:
def __init__(
self,
*,
name: str,
version: str = None,
**kwargs
):
"""
Reference to an asset in the current workspace.
Parameters:
- name: Asset name
- version: Asset version (latest if not specified)
"""Intellectual property settings for protecting sensitive assets.
class IntellectualProperty:
def __init__(
self,
*,
publisher: str,
protection_level: str = "all"
):
"""
Intellectual property protection configuration.
Parameters:
- publisher: Publisher name
- protection_level: Protection level ("all", "none")
"""from azure.ai.ml.entities import Model, IntellectualProperty
# Model with IP protection
protected_model = Model(
name="proprietary-model",
path="./model",
description="Proprietary model with IP protection",
intellectual_property=IntellectualProperty(
publisher="My Company",
protection_level="all"
)
)
ml_client.models.create_or_update(protected_model)Install with Tessl CLI
npx tessl i tessl/pypi-azure-ai-ml