Azure Data Migration Client Library for programmatically managing database migration services and operations.
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Operations for managing project files including migration scripts, configuration files, and other migration assets. Files provide a way to store and retrieve migration-related documents, scripts, and configuration data within projects.
Creates a new project file or updates an existing one.
def create_or_update(
group_name: str,
service_name: str,
project_name: str,
file_name: str,
parameters: ProjectFile,
**kwargs
) -> ProjectFile:
"""
Creates or updates a project file.
Parameters:
- group_name: Name of the resource group
- service_name: Name of the Data Migration Service
- project_name: Name of the project
- file_name: Name of the file
- parameters: File properties and metadata
Returns:
ProjectFile object with file metadata
"""Retrieves metadata for a project file.
def get(
group_name: str,
service_name: str,
project_name: str,
file_name: str,
**kwargs
) -> ProjectFile:
"""
Gets project file metadata.
Parameters:
- group_name: Name of the resource group
- service_name: Name of the Data Migration Service
- project_name: Name of the project
- file_name: Name of the file
Returns:
ProjectFile with metadata and properties
"""Deletes a project file.
def delete(
group_name: str,
service_name: str,
project_name: str,
file_name: str,
**kwargs
) -> None:
"""
Deletes a project file.
Parameters:
- group_name: Name of the resource group
- service_name: Name of the Data Migration Service
- project_name: Name of the project
- file_name: Name of the file
Returns:
None
"""Updates properties of an existing project file.
def update(
group_name: str,
service_name: str,
project_name: str,
file_name: str,
parameters: ProjectFile,
**kwargs
) -> ProjectFile:
"""
Updates a project file.
Parameters:
- group_name: Name of the resource group
- service_name: Name of the Data Migration Service
- project_name: Name of the project
- file_name: Name of the file
- parameters: Updated file properties
Returns:
Updated ProjectFile object
"""Lists all files within a project.
def list(
group_name: str,
service_name: str,
project_name: str,
**kwargs
) -> ItemPaged[ProjectFile]:
"""
Lists project files.
Parameters:
- group_name: Name of the resource group
- service_name: Name of the Data Migration Service
- project_name: Name of the project
Returns:
Paged collection of ProjectFile objects
"""Gets download URL for reading file content.
def read(
group_name: str,
service_name: str,
project_name: str,
file_name: str,
**kwargs
) -> FileStorageInfo:
"""
Gets file download information.
Parameters:
- group_name: Name of the resource group
- service_name: Name of the Data Migration Service
- project_name: Name of the project
- file_name: Name of the file
Returns:
FileStorageInfo with download URL and headers
"""Usage Example:
import requests
# Get download access for a migration script
file_info = client.files.read(
group_name="myResourceGroup",
service_name="myMigrationService",
project_name="myProject",
file_name="migration-script.sql"
)
# Download the file content using the provided URI and headers
response = requests.get(file_info.uri, headers=file_info.headers)
if response.status_code == 200:
file_content = response.text
print(f"Downloaded file content: {len(file_content)} characters")
else:
print(f"Download failed with status: {response.status_code}")Gets both download and upload URLs for file access.
def read_write(
group_name: str,
service_name: str,
project_name: str,
file_name: str,
**kwargs
) -> FileStorageInfo:
"""
Gets file read/write access information.
Parameters:
- group_name: Name of the resource group
- service_name: Name of the Data Migration Service
- project_name: Name of the project
- file_name: Name of the file
Returns:
FileStorageInfo with both upload and download URLs
"""Usage Example:
import requests
# Get read/write access for a configuration file
file_info = client.files.read_write(
group_name="myResourceGroup",
service_name="myMigrationService",
project_name="myProject",
file_name="config.json"
)
# Download existing content (if any)
response = requests.get(file_info.uri, headers=file_info.headers)
if response.status_code == 200:
existing_content = response.json()
print("Downloaded existing configuration")
else:
existing_content = {}
# Modify content
existing_content['updated'] = True
existing_content['timestamp'] = '2023-01-01T00:00:00Z'
# Upload updated content using the same URI and headers
upload_response = requests.put(
file_info.uri,
headers=file_info.headers,
json=existing_content
)
if upload_response.status_code in [200, 201]:
print("Successfully updated file content")
else:
print(f"Upload failed with status: {upload_response.status_code}")class ProjectFile:
"""Project file resource definition."""
def __init__(self, properties: ProjectFileProperties = None, **kwargs):
"""
Initialize project file.
Parameters:
- properties: File properties and metadata
"""
# Properties
etag: str # Entity tag for concurrency control
properties: ProjectFileProperties # File properties
id: str # Resource ID
name: str # Resource name
type: str # Resource typeclass ProjectFileProperties:
"""Project file properties and metadata."""
def __init__(self, **kwargs):
"""Initialize file properties."""
# Properties
extension: str # File extension
file_path: str # File path within project
last_modified: datetime # Last modification timestamp
media_type: str # MIME type
size: int # File size in bytesclass FileStorageInfo:
"""File storage access information."""
# Properties
uri: str # File access URI
headers: Dict[str, str] # Required headers for accessfrom azure.mgmt.datamigration.models import ProjectFile, ProjectFileProperties
# Create file metadata
file_properties = ProjectFileProperties(
extension="json",
file_path="/config/migration-config.json",
media_type="application/json"
)
project_file = ProjectFile(
properties=file_properties
)
# Create the file entry
file_entry = client.files.create_or_update(
group_name="myResourceGroup",
service_name="myMigrationService",
project_name="myProject",
file_name="migration-config.json",
parameters=project_file
)
# Get upload URL
storage_info = client.files.read_write(
group_name="myResourceGroup",
service_name="myMigrationService",
project_name="myProject",
file_name="migration-config.json"
)
print(f"Upload URL: {storage_info.uri}")
print(f"Required headers: {storage_info.headers}")# Get download URL for existing file
storage_info = client.files.read(
group_name="myResourceGroup",
service_name="myMigrationService",
project_name="myProject",
file_name="migration-script.sql"
)
# Use the URL to download the file
import requests
response = requests.get(
storage_info.uri,
headers=storage_info.headers
)
if response.status_code == 200:
script_content = response.text
print("Downloaded migration script successfully")
else:
print(f"Download failed: {response.status_code}")# List all files in project
files = client.files.list(
group_name="myResourceGroup",
service_name="myMigrationService",
project_name="myProject"
)
for file in files:
print(f"File: {file.name}")
print(f" Size: {file.properties.size} bytes")
print(f" Type: {file.properties.media_type}")
print(f" Modified: {file.properties.last_modified}")
# Delete old files
old_files = [f for f in files if f.name.startswith("old_")]
for old_file in old_files:
client.files.delete(
group_name="myResourceGroup",
service_name="myMigrationService",
project_name="myProject",
file_name=old_file.name
)
print(f"Deleted: {old_file.name}")class FileShare:
"""File share configuration."""
def __init__(self, path: str, **kwargs):
"""
Parameters:
- path: File share path
- user_name: Username for file share access
- password: Password for file share access
"""
# Properties
path: str # Required: File share path
user_name: str # Username for access
password: str # Password for accessclass BlobShare:
"""Azure Blob storage share configuration."""
def __init__(self, sas_uri: str, **kwargs):
"""
Parameters:
- sas_uri: Blob container SAS URI
"""
# Properties
sas_uri: str # Required: SAS URI for blob containerProject files typically include:
.sql, .js, .py files containing migration logic.json, .xml, .yaml files with migration settings.sql, .json files defining database schemas.json, .csv files defining data transformations.sql, .py files for data validation.md, .txt files with migration documentationCommon file operation errors:
File operations use Azure Storage behind the scenes, so network connectivity and proper authentication are essential for successful file management operations.
Install with Tessl CLI
npx tessl i tessl/pypi-azure-mgmt-datamigration