CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-azure-ai-ml

Microsoft Azure Machine Learning Client Library for Python providing comprehensive SDK for ML workflows including job execution, pipeline components, model deployment, and AutoML capabilities

Pending

Quality

Pending

Does it follow best practices?

Impact

Pending

No eval scenarios have been run

Overview
Eval results
Files

compute-management.mddocs/

Compute Management

Comprehensive compute resource management including Azure ML compute clusters, compute instances, Kubernetes compute, and various compute configurations for running ML workloads.

Capabilities

Azure ML Compute

Managed compute clusters for scalable ML workloads with automatic scaling and node management.

class AmlCompute:
    def __init__(
        self,
        *,
        name: str,
        type: str = "amlcompute",
        size: str,
        location: str = None,
        min_instances: int = 0,
        max_instances: int = 1,
        idle_time_before_scale_down: int = 1800,
        tier: str = "dedicated",
        identity: IdentityConfiguration = None,
        ssh_public_access_enabled: bool = False,
        ssh_settings: AmlComputeSshSettings = None,
        network_settings: NetworkSettings = None,
        **kwargs
    ):
        """
        Azure ML compute cluster for scalable workloads.
        
        Parameters:
        - name: Compute cluster name
        - type: Compute type ("amlcompute")
        - size: VM size (e.g., "Standard_DS3_v2", "Standard_NC6")
        - location: Azure region for compute
        - min_instances: Minimum number of nodes
        - max_instances: Maximum number of nodes
        - idle_time_before_scale_down: Scale down time in seconds
        - tier: Compute tier ("dedicated", "low_priority")
        - identity: Managed identity configuration
        - ssh_public_access_enabled: Enable SSH access
        - ssh_settings: SSH configuration
        - network_settings: Virtual network settings
        """

class AmlComputeSshSettings:
    def __init__(
        self,
        *,
        admin_username: str,
        admin_password: str = None,
        ssh_public_keys: str = None
    ):
        """
        SSH settings for AML compute.
        
        Parameters:
        - admin_username: Administrator username
        - admin_password: Administrator password
        - ssh_public_keys: SSH public keys for authentication
        """

Usage Example

from azure.ai.ml.entities import AmlCompute, AmlComputeSshSettings

# Create a CPU compute cluster
cpu_cluster = AmlCompute(
    name="cpu-cluster",
    size="Standard_DS3_v2",
    min_instances=0,
    max_instances=10,
    idle_time_before_scale_down=1800,
    tier="dedicated"
)

# Create a GPU compute cluster
gpu_cluster = AmlCompute(
    name="gpu-cluster",
    size="Standard_NC6",
    min_instances=0,
    max_instances=4,
    idle_time_before_scale_down=1200,
    tier="dedicated"
)

# Create cluster with SSH access
ssh_settings = AmlComputeSshSettings(
    admin_username="azureuser",
    ssh_public_keys="ssh-rsa AAAAB3NzaC1yc2EAAAA..."
)

ssh_cluster = AmlCompute(
    name="ssh-cluster",
    size="Standard_DS3_v2",
    max_instances=5,
    ssh_public_access_enabled=True,
    ssh_settings=ssh_settings
)

# Create the compute cluster
ml_client.compute.begin_create_or_update(cpu_cluster).result()

Compute Instances

Managed compute instances for development and experimentation with pre-configured environments.

class ComputeInstance:
    def __init__(
        self,
        *,
        name: str,
        size: str,
        location: str = None,
        ssh_public_access_enabled: bool = False,
        ssh_settings: ComputeInstanceSshSettings = None,
        assigned_user: AssignedUserConfiguration = None,
        idle_time_before_shutdown_minutes: int = None,
        custom_applications: list = None,
        setup_scripts: SetupScripts = None,
        network_settings: NetworkSettings = None,
        **kwargs
    ):
        """
        Azure ML compute instance for development.
        
        Parameters:
        - name: Compute instance name
        - size: VM size (e.g., "Standard_DS3_v2")
        - location: Azure region
        - ssh_public_access_enabled: Enable SSH access
        - ssh_settings: SSH configuration
        - assigned_user: User assignment configuration
        - idle_time_before_shutdown_minutes: Auto-shutdown time
        - custom_applications: Custom applications to install
        - setup_scripts: Startup scripts
        - network_settings: Virtual network settings
        """

class ComputeInstanceSshSettings:
    def __init__(
        self,
        *,
        ssh_public_keys: str = None
    ):
        """
        SSH settings for compute instances.
        
        Parameters:
        - ssh_public_keys: SSH public keys for authentication
        """

class AssignedUserConfiguration:
    def __init__(
        self,
        *,
        user_tenant_id: str,
        user_object_id: str
    ):
        """
        User assignment for compute instance.
        
        Parameters:
        - user_tenant_id: Azure AD tenant ID
        - user_object_id: Azure AD user object ID
        """

Usage Example

from azure.ai.ml.entities import ComputeInstance, AssignedUserConfiguration

# Create a compute instance
compute_instance = ComputeInstance(
    name="my-compute-instance",
    size="Standard_DS3_v2",
    idle_time_before_shutdown_minutes=30,
    assigned_user=AssignedUserConfiguration(
        user_tenant_id="your-tenant-id",
        user_object_id="your-user-object-id"
    )
)

# Create the compute instance
ml_client.compute.begin_create_or_update(compute_instance).result()

Kubernetes Compute

Attach existing Kubernetes clusters for running ML workloads.

class KubernetesCompute:
    def __init__(
        self,
        *,
        name: str,
        resource_id: str,
        namespace: str = "default",
        identity: IdentityConfiguration = None,
        **kwargs
    ):
        """
        Kubernetes compute for custom container orchestration.
        
        Parameters:
        - name: Compute target name
        - resource_id: Azure resource ID of the Kubernetes cluster
        - namespace: Kubernetes namespace to use
        - identity: Managed identity configuration
        """

Virtual Machine Compute

Attach existing virtual machines as compute targets.

class VirtualMachineCompute:
    def __init__(
        self,
        *,
        name: str,
        resource_id: str,
        ssh_settings: VirtualMachineSshSettings,
        **kwargs
    ):
        """
        Virtual machine compute for custom VM environments.
        
        Parameters:
        - name: Compute target name
        - resource_id: Azure resource ID of the VM
        - ssh_settings: SSH connection settings
        """

class VirtualMachineSshSettings:
    def __init__(
        self,
        *,
        username: str,
        password: str = None,
        private_key_file: str = None,
        public_key_file: str = None,
        port: int = 22
    ):
        """
        SSH settings for virtual machine compute.
        
        Parameters:
        - username: SSH username
        - password: SSH password (if using password auth)
        - private_key_file: Path to private key file
        - public_key_file: Path to public key file
        - port: SSH port number
        """

Synapse Spark Compute

Integration with Azure Synapse Analytics for big data processing.

class SynapseSparkCompute:
    def __init__(
        self,
        *,
        name: str,
        resource_id: str,
        identity: IdentityConfiguration = None,
        auto_scale_settings: AutoScaleSettings = None,
        auto_pause_settings: AutoPauseSettings = None,
        **kwargs
    ):
        """
        Synapse Spark compute for big data processing.
        
        Parameters:
        - name: Compute target name
        - resource_id: Synapse workspace resource ID
        - identity: Managed identity configuration
        - auto_scale_settings: Auto-scaling configuration
        - auto_pause_settings: Auto-pause configuration
        """

class AutoScaleSettings:
    def __init__(
        self,
        *,
        min_node_count: int,
        max_node_count: int,
        enabled: bool = True
    ):
        """
        Auto-scaling settings for Synapse Spark.
        
        Parameters:
        - min_node_count: Minimum number of nodes
        - max_node_count: Maximum number of nodes
        - enabled: Enable auto-scaling
        """

class AutoPauseSettings:
    def __init__(
        self,
        *,
        delay_in_minutes: int,
        enabled: bool = True
    ):
        """
        Auto-pause settings for Synapse Spark.
        
        Parameters:
        - delay_in_minutes: Delay before pausing in minutes
        - enabled: Enable auto-pause
        """

Network Settings

Virtual network configuration for compute resources.

class NetworkSettings:
    def __init__(
        self,
        *,
        vnet_name: str = None,
        subnet: str = None
    ):
        """
        Virtual network settings for compute.
        
        Parameters:
        - vnet_name: Virtual network name
        - subnet: Subnet name or resource ID
        """

Compute Operations

Operations for managing compute resources through the MLClient.

class ComputeOperations:
    def begin_create_or_update(self, compute: Compute) -> LROPoller: ...
    def get(self, name: str) -> Compute: ...
    def list(self) -> list: ...
    def begin_delete(self, name: str) -> LROPoller: ...
    def begin_start(self, name: str) -> LROPoller: ...
    def begin_stop(self, name: str) -> LROPoller: ...
    def begin_restart(self, name: str) -> LROPoller: ...
    def list_sizes(self, location: str = None) -> list: ...
    def list_usage(self, location: str) -> list: ...

Usage Example

# List all compute targets
compute_targets = ml_client.compute.list()
for compute in compute_targets:
    print(f"Compute: {compute.name}, Type: {compute.type}, State: {compute.provisioning_state}")

# Get specific compute target
compute = ml_client.compute.get("cpu-cluster")
print(f"Compute size: {compute.size}")
print(f"Max instances: {compute.max_instances}")

# Start a compute instance
ml_client.compute.begin_start("my-compute-instance").result()

# Stop a compute instance
ml_client.compute.begin_stop("my-compute-instance").result()

# List available VM sizes
sizes = ml_client.compute.list_sizes(location="eastus")
for size in sizes:
    print(f"Size: {size.name}, vCPUs: {size.v_cpus}, Memory: {size.memory_gb}GB")

Custom Applications

Custom applications for compute instances.

class CustomApplications:
    def __init__(
        self,
        *,
        image: ImageSettings,
        endpoint: EndpointsSettings,
        volumes: list = None
    ):
        """
        Custom application configuration for compute instances.
        
        Parameters:
        - image: Docker image settings
        - endpoint: Endpoint configuration
        - volumes: Volume mount settings
        """

class ImageSettings:
    def __init__(
        self,
        *,
        reference: str,
        type: str = "docker"
    ):
        """
        Docker image settings.
        
        Parameters:
        - reference: Docker image reference
        - type: Image type ("docker")
        """

class EndpointsSettings:
    def __init__(
        self,
        *,
        target: int,
        published: int,
        protocol: str = "tcp"
    ):
        """
        Endpoint configuration for custom applications.
        
        Parameters:
        - target: Target port in container
        - published: Published port on host
        - protocol: Network protocol ("tcp", "udp")
        """

Setup Scripts

Startup scripts for compute instances and clusters.

class SetupScripts:
    def __init__(
        self,
        *,
        creation_script: ScriptReference = None,
        startup_script: ScriptReference = None
    ):
        """
        Setup scripts for compute resources.
        
        Parameters:
        - creation_script: Script to run on creation
        - startup_script: Script to run on startup
        """

class ScriptReference:
    def __init__(
        self,
        *,
        script_source: str,
        script_data: str = None,
        script_arguments: str = None,
        timeout: str = "30m"
    ):
        """
        Reference to a setup script.
        
        Parameters:
        - script_source: Path to script file
        - script_data: Inline script content
        - script_arguments: Script arguments
        - timeout: Script execution timeout
        """

Usage Example

from azure.ai.ml.entities import ComputeInstance, SetupScripts, ScriptReference

# Setup script to install additional packages
setup_script = SetupScripts(
    startup_script=ScriptReference(
        script_source="./scripts/setup.sh",
        script_arguments="--install-packages",
        timeout="10m"
    )
)

# Compute instance with setup script
compute_instance = ComputeInstance(
    name="instance-with-setup",
    size="Standard_DS3_v2",
    setup_scripts=setup_script
)

ml_client.compute.begin_create_or_update(compute_instance).result()

Install with Tessl CLI

npx tessl i tessl/pypi-azure-ai-ml

docs

asset-management.md

automl.md

client-auth.md

compute-management.md

hyperparameter-tuning.md

index.md

job-management.md

model-deployment.md

tile.json