Azure File Share storage client library for Python
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
The Azure Storage File Share SDK provides comprehensive data models and enums that represent file share properties, metadata, permissions, and various configuration options.
from azure.storage.fileshare import ShareProperties
from datetime import datetime
from typing import Optional, Dict, List, Union
class ShareProperties:
"""Properties of a file share."""
name: str
"""The name of the share."""
last_modified: datetime
"""A datetime object representing the last time the share was modified."""
etag: str
"""The ETag contains a value for performing operations conditionally."""
quota: int
"""The allocated quota in GB."""
access_tier: Optional[str]
"""The share's access tier (Hot, Cool, TransactionOptimized, Premium)."""
next_allowed_quota_downgrade_time: Optional[str]
"""The share's next allowed quota downgrade time."""
metadata: Dict[str, str]
"""Name-value pairs associated with the share as metadata."""
snapshot: Optional[str]
"""Snapshot identifier if this represents a share snapshot."""
deleted: Optional[bool]
"""Whether this share was deleted (for soft-deleted shares)."""
deleted_time: Optional[datetime]
"""Time at which the share was deleted (for soft-deleted shares)."""
version: Optional[str]
"""Version identifier for deleted share."""
remaining_retention_days: Optional[int]
"""Number of days until permanent deletion (for soft-deleted shares)."""
provisioned_egress_mbps: Optional[int]
"""Provisioned egress in Mbps (premium file accounts only)."""
provisioned_ingress_mbps: Optional[int]
"""Provisioned ingress in Mbps (premium file accounts only)."""
provisioned_iops: Optional[int]
"""Provisioned IOPS (premium file accounts only)."""
provisioned_bandwidth: Optional[int]
"""Provisioned bandwidth in Mbps (premium file accounts only)."""
lease: LeaseProperties
"""Share lease properties."""
protocols: Optional[List[str]]
"""Enabled protocols (SMB, NFS)."""
root_squash: Optional[Union[ShareRootSquash, str]]
"""Root squash setting (NoRootSquash, RootSquash, AllSquash)."""
enable_snapshot_virtual_directory_access: Optional[bool]
"""Whether snapshot virtual directory is accessible at share mount point."""
paid_bursting_enabled: Optional[int]
"""Whether paid bursting is enabled."""
paid_bursting_bandwidth_mibps: Optional[int]
"""Maximum throughput with paid bursting in MiB/s."""
paid_bursting_iops: Optional[int]
"""Maximum IOPS with paid bursting."""
next_provisioned_iops_downgrade: Optional[datetime]
"""Next allowed provisioned IOPS downgrade time."""
next_provisioned_bandwidth_downgrade: Optional[datetime]
"""Next allowed provisioned bandwidth downgrade time."""from azure.storage.fileshare import FileProperties, ContentSettings, CopyProperties, LeaseProperties
from azure.storage.fileshare import NTFSAttributes
class FileProperties:
"""Properties of a file."""
name: str
"""The name of the file."""
path: Optional[str]
"""The path of the file."""
share: Optional[str]
"""The name of the share containing the file."""
snapshot: Optional[str]
"""File snapshot identifier."""
content_length: int
"""Size of file in bytes."""
metadata: Dict[str, str]
"""Name-value pairs associated with the file as metadata."""
file_type: str
"""String indicating the type of file."""
last_modified: datetime
"""Last modification time of the file."""
etag: str
"""ETag for conditional operations."""
size: int
"""Size of the file in bytes."""
content_range: Optional[str]
"""Range of bytes returned for partial content requests."""
server_encrypted: bool
"""Whether encryption is enabled."""
copy: CopyProperties
"""Copy operation properties."""
content_settings: ContentSettings
"""HTTP content settings for the file."""
lease: LeaseProperties
"""File lease properties."""
change_time: Optional[Union[str, datetime]]
"""Change time for the file (Windows)."""
creation_time: Optional[Union[str, datetime]]
"""Creation time for the file (Windows)."""
last_write_time: Optional[Union[str, datetime]]
"""Last write time for the file (Windows)."""
last_access_time: Optional[datetime]
"""Last access time for the file."""
file_attributes: Union[str, NTFSAttributes]
"""File system attributes."""
permission_key: str
"""Key of the permission set for the file."""
file_id: str
"""Unique identifier for the file."""
parent_id: Optional[str]
"""Unique identifier of the parent directory."""
is_directory: bool
"""Whether this represents a directory (False for files)."""
owner: Optional[str]
"""NFS only - The owner of the file."""
group: Optional[str]
"""NFS only - The owning group of the file."""
file_mode: Optional[str]
"""NFS only - The file mode."""
link_count: Optional[int]
"""NFS only - Number of hard links."""
nfs_file_type: Optional[Literal['Regular']]
"""NFS only - The type of the file."""from azure.storage.fileshare import DirectoryProperties
class DirectoryProperties:
"""Properties of a directory."""
name: str
"""The name of the directory."""
last_modified: datetime
"""Last modification time of the directory."""
etag: str
"""ETag for conditional operations."""
server_encrypted: bool
"""Whether encryption is enabled."""
metadata: Dict[str, str]
"""Name-value pairs associated with the directory as metadata."""
change_time: Optional[Union[str, datetime]]
"""Change time for the directory (Windows)."""
creation_time: Optional[Union[str, datetime]]
"""Creation time for the directory (Windows)."""
last_write_time: Optional[Union[str, datetime]]
"""Last write time for the directory (Windows)."""
last_access_time: Optional[datetime]
"""Last access time for the directory."""
file_attributes: Union[str, NTFSAttributes]
"""File system attributes."""
permission_key: str
"""Key of the permission set for the directory."""
file_id: str
"""Unique identifier for the directory."""
parent_id: str
"""Unique identifier of the parent directory."""
is_directory: bool
"""Whether this represents a directory (True for directories)."""
owner: Optional[str]
"""NFS only - The owner of the directory."""
group: Optional[str]
"""NFS only - The owning group of the directory."""
file_mode: Optional[str]
"""NFS only - The directory mode."""
nfs_file_type: Optional[Literal['Directory']]
"""NFS only - The type of the directory."""from azure.storage.fileshare import Handle
from typing import List, Literal
class Handle:
"""A listed Azure Storage handle item."""
client_name: str
"""Name of the client machine where the share is being mounted."""
id: str
"""XSMB service handle ID."""
path: str
"""File or directory name including full path starting from share root."""
file_id: str
"""Unique identifier for the file or directory."""
parent_id: str
"""Unique identifier of the parent directory."""
session_id: str
"""SMB session ID in context of which the file handle was opened."""
client_ip: str
"""Client IP that opened the handle."""
open_time: datetime
"""Time when the handle was opened (UTC)."""
last_reconnect_time: Optional[datetime]
"""Time handle was last connected to (UTC)."""
access_rights: List[Literal['Read', 'Write', 'Delete']]
"""Access rights of the handle."""from azure.storage.fileshare import LeaseProperties
class LeaseProperties:
"""Properties of a lease."""
status: str
"""The lease status (locked, unlocked)."""
state: str
"""Lease state (available, leased, expired, breaking, broken)."""
duration: str
"""Lease duration (infinite, fixed)."""from azure.storage.fileshare import ContentSettings
class ContentSettings:
"""HTTP content settings for a file."""
def __init__(
self,
content_type: Optional[str] = None,
content_encoding: Optional[str] = None,
content_language: Optional[str] = None,
content_disposition: Optional[str] = None,
cache_control: Optional[str] = None,
content_md5: Optional[bytearray] = None
) -> None:
"""
Initialize ContentSettings.
Parameters:
content_type: The content type (MIME type) of the file
content_encoding: Content encoding (e.g., 'gzip')
content_language: Content language (e.g., 'en-US')
content_disposition: Content disposition (e.g., 'attachment; filename=file.txt')
cache_control: Cache control directive (e.g., 'no-cache', 'max-age=3600')
content_md5: MD5 hash of the content for integrity verification
"""
content_type: str
"""The content type specified for the file. Default: 'application/octet-stream'."""
content_encoding: str
"""Content encoding value stored for the file."""
content_language: str
"""Content language value stored for the file."""
content_disposition: str
"""Content disposition value stored for the file."""
cache_control: str
"""Cache control value stored for the file."""
content_md5: bytearray
"""MD5 hash for message content integrity verification."""from azure.storage.fileshare import CopyProperties
class CopyProperties:
"""Properties of a copy operation."""
id: str
"""String identifier for the copy operation."""
source: str
"""URL of the source file (up to 2 KB)."""
status: str
"""State of the copy operation (pending, success, aborted, failed)."""
progress: str
"""Number of bytes copied and total bytes in source."""
completion_time: datetime
"""Completion time of the copy operation."""
status_description: str
"""Description of failed or pending copy operations."""
incremental_copy: bool
"""Whether this is an incremental copy operation."""
destination_snapshot: str
"""Destination snapshot for incremental copy operations."""from azure.storage.fileshare import Metrics, RetentionPolicy
class Metrics:
"""Request statistics grouped by API in hour or minute aggregates."""
def __init__(
self,
enabled: bool = False,
include_apis: bool = False,
retention_policy: Optional[RetentionPolicy] = None,
version: str = "1.0"
) -> None:
"""
Initialize Metrics.
Parameters:
enabled: Whether metrics are enabled for the File service
include_apis: Whether to generate summary statistics for API operations
retention_policy: How long the associated data should persist
version: Storage Analytics version (default: 1.0)
"""
version: str
"""The version of Storage Analytics to configure."""
enabled: bool
"""Whether metrics are enabled for the File service."""
include_apis: bool
"""Whether metrics should generate summary statistics for API operations."""
retention_policy: RetentionPolicy
"""Determines how long the associated data should persist."""from azure.storage.fileshare import RetentionPolicy
class RetentionPolicy:
"""Retention policy for metrics or logging data."""
def __init__(
self,
enabled: bool = False,
days: Optional[int] = None
) -> None:
"""
Initialize RetentionPolicy.
Parameters:
enabled: Whether a retention policy is enabled
days: Number of days data should be retained (1-365)
"""
enabled: bool
"""Whether a retention policy is enabled for the storage service."""
days: Optional[int]
"""Number of days that data should be retained (1-365 when enabled)."""from azure.storage.fileshare import CorsRule
class CorsRule:
"""CORS rule for cross-origin resource sharing."""
def __init__(
self,
allowed_origins: Union[str, List[str]],
allowed_methods: Union[str, List[str]],
allowed_headers: Union[str, List[str]] = "",
exposed_headers: Union[str, List[str]] = "",
max_age_in_seconds: int = 0
) -> None:
"""
Initialize CorsRule.
Parameters:
allowed_origins: Origin domains allowed via CORS, or "*" for all
allowed_methods: HTTP methods allowed to be executed by the origin
allowed_headers: Headers allowed to be part of the cross-origin request
exposed_headers: Response headers to expose to CORS clients
max_age_in_seconds: Seconds client/browser should cache preflight response
"""
allowed_origins: str
"""Comma-delimited list of allowed origin domains or "*" for all."""
allowed_methods: str
"""Comma-delimited list of allowed HTTP methods."""
allowed_headers: str
"""Comma-delimited list of allowed request headers."""
exposed_headers: str
"""Comma-delimited list of response headers to expose."""
max_age_in_seconds: int
"""Seconds to cache preflight response."""from azure.storage.fileshare import AccessPolicy, ShareSasPermissions
from datetime import datetime
class AccessPolicy:
"""Access policy for a share."""
def __init__(
self,
permission: Optional[Union[ShareSasPermissions, str]] = None,
expiry: Optional[datetime] = None,
start: Optional[datetime] = None
) -> None:
"""
Initialize AccessPolicy.
Parameters:
permission: Permissions associated with the shared access signature
expiry: Time at which the shared access signature becomes invalid
start: Time at which the shared access signature becomes valid
"""
permission: Union[ShareSasPermissions, str]
"""The permissions associated with the shared access signature."""
expiry: datetime
"""The time at which the shared access signature becomes invalid."""
start: datetime
"""The time at which the shared access signature becomes valid."""from azure.storage.fileshare import ShareSasPermissions
class ShareSasPermissions:
"""ShareSasPermissions class for generating shared access signatures."""
def __init__(
self,
read: bool = False,
create: bool = False,
write: bool = False,
delete: bool = False,
list: bool = False
) -> None:
"""
Initialize ShareSasPermissions.
Parameters:
read: Read content, properties, metadata of any file in the share
create: Create a new file or copy a file to a new file in the share
write: Create or write content, properties, metadata. Resize files
delete: Delete any file in the share
list: List files and directories in the share
"""
read: bool
"""Read the content, properties, metadata of any file in the share."""
create: bool
"""Create a new file in the share, or copy a file to a new file."""
write: bool
"""Create or write content, properties, metadata. Resize the file."""
delete: bool
"""Delete any file in the share."""
list: bool
"""List files and directories in the share."""
@classmethod
def from_string(cls, permission: str) -> ShareSasPermissions:
"""
Create ShareSasPermissions from a string.
Parameters:
permission: Permission string (r=read, c=create, w=write, d=delete, l=list)
Returns:
ShareSasPermissions: Configured permissions object
"""from azure.storage.fileshare import FileSasPermissions
class FileSasPermissions:
"""FileSasPermissions class for generating shared access signatures."""
def __init__(
self,
read: bool = False,
create: bool = False,
write: bool = False,
delete: bool = False
) -> None:
"""
Initialize FileSasPermissions.
Parameters:
read: Read content, properties, metadata. Use as source of copy operation
create: Create a new file or copy a file to a new file
write: Create or write content, properties, metadata. Resize file
delete: Delete the file
"""
read: bool
"""Read the content, properties, metadata."""
create: bool
"""Create a new file or copy a file to a new file."""
write: bool
"""Create or write content, properties, metadata. Resize the file."""
delete: bool
"""Delete the file."""
@classmethod
def from_string(cls, permission: str) -> FileSasPermissions:
"""
Create FileSasPermissions from a string.
Parameters:
permission: Permission string (r=read, c=create, w=write, d=delete)
Returns:
FileSasPermissions: Configured permissions object
"""from azure.storage.fileshare import NTFSAttributes
class NTFSAttributes:
"""Valid set of attributes to set for file or directory.
To set attribute for directory, 'directory' should always be enabled
except setting 'none' for directory.
"""
read_only: bool = False
"""Enable/disable 'ReadOnly' attribute for DIRECTORY or FILE."""
hidden: bool = False
"""Enable/disable 'Hidden' attribute for DIRECTORY or FILE."""
system: bool = False
"""Enable/disable 'System' attribute for DIRECTORY or FILE."""
none: bool = False
"""Enable/disable 'None' attribute for DIRECTORY or FILE to clear all attributes of FILE/DIRECTORY."""
directory: bool = False
"""Enable/disable 'Directory' attribute for DIRECTORY."""
archive: bool = False
"""Enable/disable 'Archive' attribute for DIRECTORY."""
temporary: bool = False
"""Enable/disable 'Temporary' attribute for DIRECTORY."""
offline: bool = False
"""Enable/disable 'Offline' attribute for DIRECTORY."""
not_content_indexed: bool = False
"""Enable/disable 'NotContentIndexed' attribute for DIRECTORY."""
no_scrub_data: bool = False
"""Enable/disable 'NoScrubData' attribute for DIRECTORY."""from azure.storage.fileshare import ShareProtocols
from enum import Enum
class ShareProtocols(str, Enum):
"""Enabled protocols on the share."""
SMB = "SMB"
"""Server Message Block protocol."""
NFS = "NFS"
"""Network File System protocol."""from azure.storage.fileshare import ShareRootSquash
from enum import Enum
class ShareRootSquash(str, Enum):
"""Root squash setting for NFS shares."""
NO_ROOT_SQUASH = "NoRootSquash"
"""No root squashing (default)."""
ROOT_SQUASH = "RootSquash"
"""Root user mapped to anonymous user."""
ALL_SQUASH = "AllSquash"
"""All users mapped to anonymous user."""from azure.storage.fileshare import ShareAccessTier
from enum import Enum
class ShareAccessTier(str, Enum):
"""Access tier for the share."""
COOL = "Cool"
"""Cool access tier for infrequently accessed data."""
HOT = "Hot"
"""Hot access tier for frequently accessed data."""
PREMIUM = "Premium"
"""Premium access tier for high-performance requirements."""
TRANSACTION_OPTIMIZED = "TransactionOptimized"
"""Transaction optimized tier for high transaction volumes."""from azure.storage.fileshare import ShareProtocolSettings, ShareSmbSettings
class ShareProtocolSettings:
"""Protocol settings for share."""
def __init__(self, smb: Optional[ShareSmbSettings] = None) -> None:
"""
Initialize ShareProtocolSettings.
Parameters:
smb: SMB protocol settings
"""
smb: ShareSmbSettings
"""Settings for SMB protocol."""from azure.storage.fileshare import ShareSmbSettings, SmbMultichannel
class ShareSmbSettings:
"""Settings for SMB protocol."""
def __init__(self, multichannel: Optional[SmbMultichannel] = None) -> None:
"""
Initialize ShareSmbSettings.
Parameters:
multichannel: SMB multichannel settings
"""
multichannel: SmbMultichannel
"""Settings for SMB multichannel."""from azure.storage.fileshare import SmbMultichannel
class SmbMultichannel:
"""Settings for SMB multichannel."""
def __init__(self, enabled: bool = False) -> None:
"""
Initialize SmbMultichannel.
Parameters:
enabled: Whether SMB multichannel is enabled
"""
enabled: bool
"""If SMB multichannel is enabled."""from azure.storage.fileshare import ShareFileClient, ContentSettings, NTFSAttributes
from datetime import datetime, timezone
# Get comprehensive file properties
file_client = ShareFileClient.from_connection_string(
conn_str, "share", "myfile.txt"
)
properties = file_client.get_file_properties()
# Access all property types
print(f"File name: {properties.name}")
print(f"File size: {properties.size} bytes")
print(f"Content type: {properties.content_settings.content_type}")
print(f"Last modified: {properties.last_modified}")
print(f"ETag: {properties.etag}")
print(f"Server encrypted: {properties.server_encrypted}")
# File system attributes
print(f"File attributes: {properties.file_attributes}")
print(f"Creation time: {properties.creation_time}")
print(f"Last write time: {properties.last_write_time}")
print(f"File ID: {properties.file_id}")
# Metadata
for key, value in properties.metadata.items():
print(f"Metadata {key}: {value}")
# Copy properties (if file was copied)
if properties.copy.id:
print(f"Copy ID: {properties.copy.id}")
print(f"Copy status: {properties.copy.status}")
print(f"Copy progress: {properties.copy.progress}")
# Lease properties
print(f"Lease status: {properties.lease.status}")
print(f"Lease state: {properties.lease.state}")from azure.storage.fileshare import ContentSettings
# Create comprehensive content settings
content_settings = ContentSettings(
content_type="application/json",
content_encoding="gzip",
content_language="en-US",
content_disposition="attachment; filename=data.json",
cache_control="max-age=3600, must-revalidate"
)
# Apply to file
file_client.set_http_headers(content_settings=content_settings)
# Create settings for different file types
pdf_settings = ContentSettings(
content_type="application/pdf",
content_disposition="inline; filename=document.pdf",
cache_control="public, max-age=86400"
)
image_settings = ContentSettings(
content_type="image/jpeg",
cache_control="public, max-age=31536000" # 1 year
)
text_settings = ContentSettings(
content_type="text/plain; charset=utf-8",
content_encoding="utf-8"
)from azure.storage.fileshare import NTFSAttributes
# Set file attributes using bitwise operations
readonly_archived = NTFSAttributes.ReadOnly | NTFSAttributes.Archive
hidden_system = NTFSAttributes.Hidden | NTFSAttributes.System
normal_file = NTFSAttributes.Normal
# Set attributes on file
file_client.set_http_headers(file_attributes=readonly_archived)
# Set directory attributes (Directory flag is required)
directory_attrs = NTFSAttributes.Directory | NTFSAttributes.Archive
directory_client.set_http_headers(file_attributes=directory_attrs)
# Check specific attributes
props = file_client.get_file_properties()
current_attrs = props.file_attributes
if isinstance(current_attrs, int):
is_readonly = bool(current_attrs & NTFSAttributes.ReadOnly)
is_hidden = bool(current_attrs & NTFSAttributes.Hidden)
is_archive = bool(current_attrs & NTFSAttributes.Archive)
print(f"Read-only: {is_readonly}")
print(f"Hidden: {is_hidden}")
print(f"Archive: {is_archive}")from azure.storage.fileshare import (
ShareServiceClient, Metrics, RetentionPolicy, CorsRule,
ShareProtocolSettings, ShareSmbSettings, SmbMultichannel
)
service_client = ShareServiceClient.from_connection_string(conn_str)
# Configure metrics
retention = RetentionPolicy(enabled=True, days=7)
hour_metrics = Metrics(
enabled=True,
include_apis=True,
retention_policy=retention,
version="1.0"
)
minute_metrics = Metrics(
enabled=True,
include_apis=False,
retention_policy=retention
)
# Configure CORS
cors_rules = [
CorsRule(
allowed_origins=["https://mywebsite.com", "https://www.mywebsite.com"],
allowed_methods=["GET", "PUT", "POST", "DELETE", "HEAD"],
allowed_headers=["x-ms-*", "content-type", "accept"],
exposed_headers=["x-ms-*"],
max_age_in_seconds=3600
),
CorsRule(
allowed_origins=["https://admin.mywebsite.com"],
allowed_methods=["GET", "PUT", "POST"],
allowed_headers=["*"],
exposed_headers=["*"],
max_age_in_seconds=7200
)
]
# Configure SMB protocol settings
smb_multichannel = SmbMultichannel(enabled=True)
smb_settings = ShareSmbSettings(multichannel=smb_multichannel)
protocol_settings = ShareProtocolSettings(smb=smb_settings)
# Apply all settings
service_client.set_service_properties(
hour_metrics=hour_metrics,
minute_metrics=minute_metrics,
cors=cors_rules,
protocol=protocol_settings
)from azure.storage.fileshare import ShareSasPermissions, FileSasPermissions, AccessPolicy
from datetime import datetime, timedelta
# Create permission objects
share_permissions = ShareSasPermissions(
read=True,
write=True,
create=True,
list=True
)
file_permissions = FileSasPermissions(
read=True,
write=True
)
# Use with access policies
read_only_policy = AccessPolicy(
permission=ShareSasPermissions(read=True, list=True),
expiry=datetime.utcnow() + timedelta(days=30),
start=datetime.utcnow()
)
write_policy = AccessPolicy(
permission=ShareSasPermissions(read=True, write=True, create=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=2),
start=datetime.utcnow()
)
# Set policies on share
share_client.set_share_access_policy({
"readonly": read_only_policy,
"write": write_policy
})
# Create permissions from strings
share_perms_from_string = ShareSasPermissions.from_string("rwcl") # read, write, create, list
file_perms_from_string = FileSasPermissions.from_string("rw") # read, writefrom azure.storage.fileshare import ShareClient, ShareProtocols, ShareRootSquash
share_client = ShareClient.from_connection_string(conn_str, "myshare")
# Get comprehensive share properties
properties = share_client.get_share_properties()
print(f"Share name: {properties.name}")
print(f"Quota: {properties.quota} GB")
print(f"Last modified: {properties.last_modified}")
print(f"Access tier: {properties.access_tier}")
print(f"Protocols: {properties.protocols}")
print(f"Root squash: {properties.root_squash}")
# Premium share properties
if properties.provisioned_iops:
print(f"Provisioned IOPS: {properties.provisioned_iops}")
print(f"Provisioned bandwidth: {properties.provisioned_bandwidth} Mbps")
print(f"Provisioned egress: {properties.provisioned_egress_mbps} Mbps")
# Soft-deleted share properties
if properties.deleted:
print(f"Deleted time: {properties.deleted_time}")
print(f"Remaining retention days: {properties.remaining_retention_days}")
# Snapshot properties
if properties.snapshot:
print(f"Snapshot ID: {properties.snapshot}")
# Metadata
for key, value in properties.metadata.items():
print(f"Metadata {key}: {value}")
# Lease information
lease = properties.lease
print(f"Lease status: {lease.status}")
print(f"Lease state: {lease.state}")
print(f"Lease duration: {lease.duration}")from azure.storage.fileshare import ShareDirectoryClient
directory_client = ShareDirectoryClient.from_connection_string(
conn_str, "share", "directory"
)
# List and examine handles
handles = list(directory_client.list_handles(recursive=True))
for handle in handles:
print(f"Handle ID: {handle.id}")
print(f"Client: {handle.client_name} ({handle.client_ip})")
print(f"Path: {handle.path}")
print(f"File ID: {handle.file_id}")
print(f"Session ID: {handle.session_id}")
print(f"Opened: {handle.open_time}")
if handle.last_reconnect_time:
print(f"Last reconnect: {handle.last_reconnect_time}")
print(f"Access rights: {', '.join(handle.access_rights)}")
print("---")
# Close handles based on criteria
for handle in handles:
if 'Write' in handle.access_rights:
result = directory_client.close_handle(handle)
print(f"Closed write handle: {result['closed_handles_count']}")from azure.storage.fileshare import ExponentialRetry
class ExponentialRetry:
"""Retry policy with exponential backoff."""
def __init__(
self,
initial_backoff: int = 15,
max_backoff: int = 1000,
retry_total: int = 3,
random_jitter_range: int = 3,
**kwargs: Any
) -> None:
"""
Exponential retry policy implementation.
Parameters:
initial_backoff: Initial backoff interval in seconds
max_backoff: Maximum backoff interval in seconds
retry_total: Total number of retry attempts
random_jitter_range: Range for random jitter in seconds
"""from azure.storage.fileshare import LinearRetry
class LinearRetry:
"""Retry policy with linear backoff."""
def __init__(
self,
backoff: int = 15,
retry_total: int = 3,
random_jitter_range: int = 3,
**kwargs: Any
) -> None:
"""
Linear retry policy implementation.
Parameters:
backoff: Fixed backoff interval in seconds
retry_total: Total number of retry attempts
random_jitter_range: Range for random jitter in seconds
"""from azure.storage.fileshare import LocationMode
from enum import Enum
class LocationMode(str, Enum):
"""Specifies the location mode for requests."""
PRIMARY = "primary"
"""Requests are sent to the primary location."""
SECONDARY = "secondary"
"""Requests are sent to the secondary location."""from azure.storage.fileshare import ResourceTypes
class ResourceTypes:
"""Valid resource types for account SAS."""
service: bool = False
"""Permission to access service-level APIs."""
container: bool = False
"""Permission to access container-level APIs."""
object: bool = False
"""Permission to access object-level APIs."""
def __init__(
self,
service: bool = False,
container: bool = False,
object: bool = False
) -> None:
"""Initialize ResourceTypes with specified permissions."""from azure.storage.fileshare import AccountSasPermissions
class AccountSasPermissions:
"""Account SAS permissions."""
read: bool = False
"""Permission to read resources and metadata."""
write: bool = False
"""Permission to write resources."""
delete: bool = False
"""Permission to delete resources."""
list: bool = False
"""Permission to list resources."""
add: bool = False
"""Permission to add resources."""
create: bool = False
"""Permission to create resources."""
update: bool = False
"""Permission to update resources."""
process: bool = False
"""Permission to process queue messages."""
def __init__(
self,
read: bool = False,
write: bool = False,
delete: bool = False,
list: bool = False,
add: bool = False,
create: bool = False,
update: bool = False,
process: bool = False
) -> None:
"""Initialize AccountSasPermissions with specified permissions."""from azure.storage.fileshare import Services
class Services:
"""Valid services for account SAS."""
blob: bool = False
"""Permission for Blob service."""
queue: bool = False
"""Permission for Queue service."""
fileshare: bool = False
"""Permission for File Share service."""
def __init__(
self,
blob: bool = False,
queue: bool = False,
fileshare: bool = False
) -> None:
"""Initialize Services with specified service permissions."""from azure.storage.fileshare import StorageErrorCode
from enum import Enum
class StorageErrorCode(str, Enum):
"""Common storage error codes."""
ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists"
ACCOUNT_IS_DISABLED = "AccountIsDisabled"
AUTHORIZATION_FAILURE = "AuthorizationFailure"
CONDITION_NOT_MET = "ConditionNotMet"
CONTAINER_NOT_FOUND = "ContainerNotFound"
FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch"
INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions"
INTERNAL_ERROR = "InternalError"
INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo"
INVALID_HEADER_VALUE = "InvalidHeaderValue"
INVALID_HTTP_VERB = "InvalidHttpVerb"
INVALID_INPUT = "InvalidInput"
INVALID_MD5 = "InvalidMd5"
INVALID_METADATA = "InvalidMetadata"
INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue"
INVALID_RANGE = "InvalidRange"
INVALID_RESOURCE_NAME = "InvalidResourceName"
INVALID_URI = "InvalidUri"
INVALID_XML_DOCUMENT = "InvalidXmlDocument"
INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue"
MD5_MISMATCH = "Md5Mismatch"
METADATA_TOO_LARGE = "MetadataTooLarge"
MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader"
MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter"
MISSING_REQUIRED_HEADER = "MissingRequiredHeader"
MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode"
MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported"
OPERATION_TIMED_OUT = "OperationTimedOut"
OUT_OF_RANGE_INPUT = "OutOfRangeInput"
OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue"
REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge"
RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists"
RESOURCE_NOT_FOUND = "ResourceNotFound"
SERVER_BUSY = "ServerBusy"
UNSUPPORTED_HEADER = "UnsupportedHeader"
UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb"
UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter"
UNSUPPORTED_XML_NODE = "UnsupportedXmlNode"The data models provide comprehensive access to all Azure File Share properties, enabling detailed inspection, configuration, and management of shares, files, directories, and related metadata through strongly-typed Python objects.
Install with Tessl CLI
npx tessl i tessl/pypi-azure-storage-file-share