MinIO Python SDK for Amazon S3 Compatible Cloud Storage
—
Comprehensive exception hierarchy for robust error handling across all MinIO operations. The SDK provides specific exception types for different error conditions, enabling precise error handling and debugging.
Foundation exception classes providing common functionality for all MinIO-related errors.
class MinioException(Exception):
"""Base exception class for all MinIO operations."""
def __init__(self, message: str) -> None:
"""
Initialize MinIO exception.
Args:
message: Error message describing the exception
"""
def __str__(self) -> str:
"""Return string representation of the exception."""Exceptions for S3 API operation failures with detailed error information from the server.
class S3Error(MinioException):
"""Exception raised when S3 operation fails with error response."""
def __init__(
self,
code: str,
message: str,
resource: str | None = None,
request_id: str | None = None,
host_id: str | None = None,
response: urllib3.HTTPResponse | None = None,
bucket_name: str | None = None,
object_name: str | None = None
) -> None:
"""
Initialize S3 error.
Args:
code: S3 error code (e.g., "NoSuchBucket", "AccessDenied")
message: Human-readable error message
resource: S3 resource that caused the error
request_id: AWS request ID for debugging
host_id: AWS host ID for debugging
response: Original HTTP response object
bucket_name: Bucket involved in the operation
object_name: Object involved in the operation
"""
code: str
message: str
resource: str | None
request_id: str | None
host_id: str | None
response: urllib3.HTTPResponse | None
bucket_name: str | None
object_name: str | None
@classmethod
def fromxml(cls, response: urllib3.HTTPResponse) -> S3Error:
"""
Create S3Error from XML error response.
Args:
response: HTTP response containing XML error
Returns:
S3Error instance parsed from XML
"""
def copy(self, code: str | None = None, message: str | None = None) -> S3Error:
"""
Create copy of S3Error with modified code/message.
Args:
code: New error code (optional)
message: New error message (optional)
Returns:
New S3Error instance with updated values
"""Exceptions for HTTP-level errors including network issues and server failures.
class ServerError(MinioException):
"""Exception raised when S3 service returns HTTP server error."""
def __init__(self, message: str, status_code: int) -> None:
"""
Initialize server error.
Args:
message: Error message
status_code: HTTP status code (5xx series)
"""
status_code: int
def __str__(self) -> str:
"""Return formatted error message with status code."""Exceptions for malformed or unexpected server responses.
class InvalidResponseError(MinioException):
"""Exception raised when server returns non-XML response."""
def __init__(
self,
code: int | None = None,
content_type: str | None = None,
body: str | None = None
) -> None:
"""
Initialize invalid response error.
Args:
code: HTTP status code
content_type: Response content type
body: Response body content
"""
code: int | None
content_type: str | None
body: str | NoneExceptions specific to MinIO administrative operations.
class MinioAdminException(MinioException):
"""Exception raised for MinIO admin API execution errors."""
def __init__(self, code: int, body: str) -> None:
"""
Initialize admin exception.
Args:
code: HTTP status code
body: Response body containing error details
"""
code: int
body: strThe SDK handles numerous standard S3 error codes. Understanding these helps in implementing appropriate error handling strategies.
# Common error codes for access issues:
# - "AccessDenied": Insufficient permissions
# - "InvalidAccessKeyId": Invalid access key
# - "SignatureDoesNotMatch": Invalid secret key or signature
# - "TokenRefreshRequired": STS token needs refresh
# - "ExpiredToken": STS token has expired# Common bucket operation error codes:
# - "NoSuchBucket": Bucket doesn't exist
# - "BucketAlreadyExists": Bucket name already taken
# - "BucketNotEmpty": Cannot delete non-empty bucket
# - "InvalidBucketName": Bucket name violates naming rules# Common object operation error codes:
# - "NoSuchKey": Object doesn't exist
# - "InvalidObjectName": Object name violates naming rules
# - "EntityTooLarge": Object exceeds size limits
# - "InvalidPart": Multipart upload part is invalid
# - "InvalidPartOrder": Multipart parts not in order# Common constraint error codes:
# - "PreconditionFailed": If-Match/If-None-Match condition failed
# - "NotModified": Object not modified since specified date
# - "InvalidRange": Byte range is invalid
# - "RequestTimeout": Request took too longfrom minio import Minio, S3Error, ServerError, InvalidResponseError
client = Minio("localhost:9000", "minio", "minio123")
try:
# Attempt bucket operation
client.make_bucket("my-bucket")
print("Bucket created successfully")
except S3Error as e:
if e.code == "BucketAlreadyExists":
print("Bucket already exists, continuing...")
elif e.code == "AccessDenied":
print("Access denied. Check credentials and permissions.")
else:
print(f"S3 error: {e.code} - {e.message}")
except ServerError as e:
print(f"Server error {e.status_code}: {e}")
except InvalidResponseError as e:
print(f"Invalid response: {e.code} - {e.content_type}")
except Exception as e:
print(f"Unexpected error: {e}")def safe_bucket_operations(client: Minio, bucket_name: str):
"""Demonstrate handling specific S3 error scenarios."""
try:
# Check if bucket exists
if not client.bucket_exists(bucket_name):
client.make_bucket(bucket_name)
print(f"Created bucket: {bucket_name}")
else:
print(f"Bucket {bucket_name} already exists")
except S3Error as e:
if e.code == "NoSuchBucket":
print("Bucket doesn't exist and cannot be created")
elif e.code == "BucketAlreadyOwnedByYou":
print("Bucket already owned by you")
elif e.code == "InvalidBucketName":
print(f"Invalid bucket name: {bucket_name}")
else:
print(f"Bucket operation failed: {e.code} - {e.message}")
raise
try:
# List objects in bucket
objects = client.list_objects(bucket_name)
for obj in objects:
print(f"Object: {obj.object_name}")
except S3Error as e:
if e.code == "AccessDenied":
print("Cannot list objects: access denied")
else:
print(f"List operation failed: {e.code}")
raiseimport io
from minio.error import S3Error
def robust_multipart_upload(client: Minio, bucket_name: str, object_name: str, file_path: str):
"""Handle errors in multipart upload with proper cleanup."""
upload_id = None
try:
# Initiate multipart upload
upload_id = client.initiate_multipart_upload(bucket_name, object_name)
parts = []
part_number = 1
with open(file_path, 'rb') as f:
while True:
chunk = f.read(5 * 1024 * 1024) # 5MB chunks
if not chunk:
break
try:
part = client.upload_part(
bucket_name, object_name, upload_id,
part_number, io.BytesIO(chunk), len(chunk)
)
parts.append(part)
part_number += 1
except S3Error as e:
if e.code == "InvalidPart":
print(f"Invalid part {part_number}, retrying...")
continue # Retry this part
else:
raise # Re-raise for cleanup
# Complete upload
result = client.complete_multipart_upload(
bucket_name, object_name, upload_id, parts
)
print(f"Upload completed: {result.etag}")
return result
except S3Error as e:
print(f"Multipart upload failed: {e.code} - {e.message}")
# Cleanup incomplete upload
if upload_id:
try:
client.abort_multipart_upload(bucket_name, object_name, upload_id)
print("Aborted incomplete multipart upload")
except S3Error as abort_error:
print(f"Failed to abort upload: {abort_error.code}")
raise
except Exception as e:
print(f"Unexpected error during upload: {e}")
# Cleanup on any error
if upload_id:
try:
client.abort_multipart_upload(bucket_name, object_name, upload_id)
except:
pass # Best effort cleanup
raisefrom minio.credentials import ChainedProvider, EnvAWSProvider, StaticProvider
def create_resilient_client(endpoint: str) -> Minio:
"""Create client with robust credential handling."""
# Try multiple credential sources
providers = [
EnvAWSProvider(),
StaticProvider("fallback-key", "fallback-secret")
]
credential_chain = ChainedProvider(providers)
try:
client = Minio(endpoint, credentials=credential_chain)
# Test credentials by listing buckets
buckets = client.list_buckets()
print(f"Successfully authenticated, found {len(buckets)} buckets")
return client
except S3Error as e:
if e.code in ["AccessDenied", "InvalidAccessKeyId", "SignatureDoesNotMatch"]:
print(f"Authentication failed: {e.code}")
print("Please check your credentials")
elif e.code == "ExpiredToken":
print("STS token has expired, refresh required")
else:
print(f"Authentication error: {e.code} - {e.message}")
raise
except Exception as e:
print(f"Client creation failed: {e}")
raisefrom minio import MinioAdmin
from minio.error import MinioAdminException
from minio.credentials import StaticProvider
def safe_admin_operations():
"""Handle MinIO admin operation errors."""
try:
admin = MinioAdmin(
"localhost:9000",
credentials=StaticProvider("admin", "password")
)
# Add user with error handling
try:
result = admin.user_add("newuser", "password123")
print(f"User created: {result}")
except MinioAdminException as e:
if e.code == 409: # Conflict - user already exists
print("User already exists")
elif e.code == 403: # Forbidden - insufficient permissions
print("Insufficient permissions to create user")
else:
print(f"User creation failed: {e.code} - {e.body}")
# Set policy with error handling
try:
policy_json = '{"Version": "2012-10-17", "Statement": []}'
admin.policy_add("test-policy", policy_json)
except MinioAdminException as e:
if e.code == 409:
print("Policy already exists")
else:
print(f"Policy creation failed: {e.code}")
except Exception as e:
print(f"Admin client error: {e}")import time
import random
from typing import Callable, TypeVar
T = TypeVar('T')
def retry_with_backoff(
func: Callable[[], T],
max_retries: int = 3,
base_delay: float = 1.0,
max_delay: float = 60.0
) -> T:
"""Retry function with exponential backoff for transient errors."""
for attempt in range(max_retries + 1):
try:
return func()
except S3Error as e:
# Don't retry on client errors (4xx)
if e.code in ["AccessDenied", "NoSuchBucket", "InvalidBucketName"]:
raise
# Retry on server errors and throttling
if attempt < max_retries and e.code in ["InternalError", "ServiceUnavailable", "SlowDown"]:
delay = min(base_delay * (2 ** attempt) + random.uniform(0, 1), max_delay)
print(f"Retrying after {delay:.2f}s (attempt {attempt + 1}/{max_retries + 1})")
time.sleep(delay)
continue
raise
except ServerError as e:
# Retry on 5xx server errors
if attempt < max_retries and e.status_code >= 500:
delay = min(base_delay * (2 ** attempt) + random.uniform(0, 1), max_delay)
print(f"Server error, retrying after {delay:.2f}s")
time.sleep(delay)
continue
raise
except Exception as e:
# Don't retry on unexpected errors
raise
# Usage example
def upload_with_retry(client: Minio, bucket: str, object_name: str, data: bytes):
"""Upload with automatic retry on transient errors."""
def upload_operation():
return client.put_object(
bucket,
object_name,
io.BytesIO(data),
len(data)
)
try:
result = retry_with_backoff(upload_operation, max_retries=3)
print(f"Upload successful: {result.etag}")
return result
except Exception as e:
print(f"Upload failed after retries: {e}")
raiseimport logging
from datetime import datetime
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
def logged_operation(operation_name: str, func: Callable, *args, **kwargs):
"""Execute operation with comprehensive error logging."""
start_time = datetime.now()
logger.info(f"Starting {operation_name}")
try:
result = func(*args, **kwargs)
duration = (datetime.now() - start_time).total_seconds()
logger.info(f"Completed {operation_name} in {duration:.2f}s")
return result
except S3Error as e:
duration = (datetime.now() - start_time).total_seconds()
logger.error(
f"S3Error in {operation_name} after {duration:.2f}s: "
f"Code={e.code}, Message={e.message}, "
f"Bucket={e.bucket_name}, Object={e.object_name}, "
f"RequestId={e.request_id}"
)
raise
except ServerError as e:
duration = (datetime.now() - start_time).total_seconds()
logger.error(
f"ServerError in {operation_name} after {duration:.2f}s: "
f"Status={e.status_code}, Message={e}"
)
raise
except MinioAdminException as e:
duration = (datetime.now() - start_time).total_seconds()
logger.error(
f"AdminError in {operation_name} after {duration:.2f}s: "
f"Code={e.code}, Body={e.body}"
)
raise
except Exception as e:
duration = (datetime.now() - start_time).total_seconds()
logger.error(
f"UnexpectedError in {operation_name} after {duration:.2f}s: "
f"Type={type(e).__name__}, Message={e}"
)
raise
# Usage
client = Minio("localhost:9000", "minio", "minio123")
logged_operation(
"bucket_creation",
client.make_bucket,
"test-bucket"
)
logged_operation(
"object_upload",
client.put_object,
"test-bucket",
"test-object.txt",
io.BytesIO(b"test data"),
9
)Install with Tessl CLI
npx tessl i tessl/pypi-minio