CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-minio

MinIO Python SDK for Amazon S3 Compatible Cloud Storage

Pending
Overview
Eval results
Files

client-operations.mddocs/

Client Operations

Core bucket and object operations using the Minio class. These operations form the foundation of object storage management, providing CRUD functionality for buckets and objects along with metadata management and basic configuration.

Capabilities

Bucket Operations

Create, list, check existence, and remove buckets. Bucket operations also include policy management and metadata configuration.

def make_bucket(
    self, 
    bucket_name: str, 
    location: str | None = None, 
    object_lock: bool = False
) -> None:
    """
    Create a new bucket.
    
    Args:
        bucket_name: Name of the bucket to create
        location: AWS region for the bucket (optional)
        object_lock: Enable object lock for compliance (default: False)
    
    Raises:
        S3Error: If bucket creation fails
    """

def list_buckets(self) -> list[Bucket]:
    """
    List all buckets accessible by current credentials.
    
    Returns:
        List of Bucket objects containing name and creation date
    
    Raises:
        S3Error: If listing fails
    """

def bucket_exists(self, bucket_name: str) -> bool:
    """
    Check if a bucket exists and is accessible.
    
    Args:
        bucket_name: Name of the bucket to check
        
    Returns:
        True if bucket exists and is accessible, False otherwise
    
    Raises:
        S3Error: If check operation fails (other than NotFound)
    """

def remove_bucket(self, bucket_name: str) -> None:
    """
    Remove an empty bucket.
    
    Args:
        bucket_name: Name of the bucket to remove
        
    Raises:
        S3Error: If bucket is not empty or removal fails
    """

def get_bucket_policy(self, bucket_name: str) -> str:
    """
    Get bucket policy configuration as JSON string.
    
    Args:
        bucket_name: Name of the bucket
        
    Returns:
        JSON string containing bucket policy
        
    Raises:
        S3Error: If policy retrieval fails
    """

def set_bucket_policy(self, bucket_name: str, policy: str) -> None:
    """
    Set bucket policy configuration.
    
    Args:
        bucket_name: Name of the bucket
        policy: Policy configuration as JSON string
        
    Raises:
        S3Error: If policy setting fails
    """

def delete_bucket_policy(self, bucket_name: str) -> None:
    """
    Remove bucket policy configuration.
    
    Args:
        bucket_name: Name of the bucket
        
    Raises:
        S3Error: If policy deletion fails
    """

Object Upload Operations

Upload objects to buckets with various options for metadata, encryption, progress tracking, and multipart uploads.

def put_object(
    self,
    bucket_name: str,
    object_name: str,
    data: io.IOBase,
    length: int = -1,
    content_type: str = "application/octet-stream",
    metadata: dict[str, str] | None = None,
    sse: Sse | None = None,
    progress: ProgressType | None = None,
    part_size: int = 0,
    num_parallel_uploads: int = 3,
    tags: Tags | None = None,
    retention: Retention | None = None,
    legal_hold: bool = False
) -> ObjectWriteResult:
    """
    Upload object data to bucket from a file-like object.
    
    Args:
        bucket_name: Name of the destination bucket
        object_name: Name of the object to create
        data: File-like object containing data to upload
        length: Size of data (-1 for unknown size, requires data.seek support)
        content_type: MIME type of the object
        metadata: User-defined metadata key-value pairs
        sse: Server-side encryption configuration
        progress: Progress callback function
        part_size: Multipart upload part size (0 for auto)
        num_parallel_uploads: Number of parallel uploads for multipart
        tags: Object tags
        retention: Object retention configuration
        legal_hold: Enable legal hold on object
        
    Returns:
        ObjectWriteResult with upload details
        
    Raises:
        S3Error: If upload fails
    """

def fput_object(
    self,
    bucket_name: str,
    object_name: str,
    file_path: str,
    content_type: str | None = None,
    metadata: dict[str, str] | None = None,
    sse: Sse | None = None,
    progress: ProgressType | None = None,
    part_size: int = 0,
    num_parallel_uploads: int = 3,
    tags: Tags | None = None,
    retention: Retention | None = None,
    legal_hold: bool = False
) -> ObjectWriteResult:
    """
    Upload object data to bucket from a file path.
    
    Args:
        bucket_name: Name of the destination bucket
        object_name: Name of the object to create
        file_path: Path to file to upload
        content_type: MIME type (auto-detected if None)
        metadata: User-defined metadata key-value pairs
        sse: Server-side encryption configuration
        progress: Progress callback function
        part_size: Multipart upload part size (0 for auto)
        num_parallel_uploads: Number of parallel uploads for multipart
        tags: Object tags
        retention: Object retention configuration
        legal_hold: Enable legal hold on object
        
    Returns:
        ObjectWriteResult with upload details
        
    Raises:
        S3Error: If upload fails
    """

Object Download Operations

Download objects from buckets with support for range requests, encryption, and versioning.

def get_object(
    self,
    bucket_name: str,
    object_name: str,
    offset: int = 0,
    length: int = 0,
    request_headers: dict[str, str] | None = None,
    ssec: SseCustomerKey | None = None,
    version_id: str | None = None,
    extra_query_params: dict[str, str] | None = None
) -> urllib3.HTTPResponse:
    """
    Get object data from bucket as HTTP response stream.
    
    Args:
        bucket_name: Name of the source bucket
        object_name: Name of the object to retrieve
        offset: Start byte position for range request (0 for beginning)
        length: Number of bytes to read (0 for entire object)
        request_headers: Additional HTTP headers
        ssec: Server-side encryption key for encrypted objects
        version_id: Specific version to retrieve
        extra_query_params: Additional query parameters
        
    Returns:
        urllib3.HTTPResponse object for streaming data
        
    Raises:
        S3Error: If download fails
    """

def fget_object(
    self,
    bucket_name: str,
    object_name: str,
    file_path: str,
    request_headers: dict[str, str] | None = None,
    ssec: SseCustomerKey | None = None,
    version_id: str | None = None,
    extra_query_params: dict[str, str] | None = None,
    tmp_file_path: str | None = None
) -> None:
    """
    Download object data from bucket to a file path.
    
    Args:
        bucket_name: Name of the source bucket
        object_name: Name of the object to retrieve
        file_path: Path where object will be downloaded
        request_headers: Additional HTTP headers
        ssec: Server-side encryption key for encrypted objects
        version_id: Specific version to retrieve
        extra_query_params: Additional query parameters
        tmp_file_path: Temporary file path for atomic download
        
    Raises:
        S3Error: If download fails
    """

Object Management Operations

Manage existing objects including copying, metadata retrieval, and deletion.

def stat_object(
    self,
    bucket_name: str,
    object_name: str,
    ssec: SseCustomerKey | None = None,
    version_id: str | None = None,
    extra_query_params: dict[str, str] | None = None
) -> Object:
    """
    Get object metadata without downloading the object.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object
        ssec: Server-side encryption key for encrypted objects
        version_id: Specific version to retrieve metadata for
        extra_query_params: Additional query parameters
        
    Returns:
        Object containing metadata information
        
    Raises:
        S3Error: If metadata retrieval fails
    """

def copy_object(
    self,
    bucket_name: str,
    object_name: str,
    source: CopySource,
    sse: Sse | None = None,
    metadata: dict[str, str] | None = None,
    tags: Tags | None = None,
    retention: Retention | None = None,
    legal_hold: bool = False
) -> ObjectWriteResult:
    """
    Copy object from another source object.
    
    Args:
        bucket_name: Name of the destination bucket
        object_name: Name of the destination object
        source: CopySource specifying source bucket and object
        sse: Server-side encryption for destination
        metadata: Metadata for destination object (replaces source metadata)
        tags: Tags for destination object
        retention: Retention configuration for destination
        legal_hold: Enable legal hold on destination object
        
    Returns:
        ObjectWriteResult with copy operation details
        
    Raises:
        S3Error: If copy operation fails
    """

def compose_object(
    self,
    bucket_name: str,
    object_name: str,
    sources: list[ComposeSource],
    sse: Sse | None = None,
    metadata: dict[str, str] | None = None,
    tags: Tags | None = None,
    retention: Retention | None = None,
    legal_hold: bool = False
) -> ObjectWriteResult:
    """
    Create object by composing multiple source objects.
    
    Args:
        bucket_name: Name of the destination bucket
        object_name: Name of the destination object
        sources: List of ComposeSource objects to combine
        sse: Server-side encryption for destination
        metadata: Metadata for destination object
        tags: Tags for destination object
        retention: Retention configuration for destination
        legal_hold: Enable legal hold on destination object
        
    Returns:
        ObjectWriteResult with compose operation details
        
    Raises:
        S3Error: If compose operation fails
    """

def remove_object(
    self, 
    bucket_name: str, 
    object_name: str, 
    version_id: str | None = None
) -> None:
    """
    Remove an object from bucket.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object to remove
        version_id: Specific version to remove (for versioned buckets)
        
    Raises:
        S3Error: If removal fails
    """

def remove_objects(
    self,
    bucket_name: str,
    delete_object_list: Iterable[DeleteObject],
    bypass_governance_mode: bool = False
) -> Iterable[DeleteResult]:
    """
    Remove multiple objects from bucket in batch operation.
    
    Args:
        bucket_name: Name of the bucket
        delete_object_list: Iterable of DeleteObject specifications
        bypass_governance_mode: Bypass governance retention mode
        
    Returns:
        Iterable of DeleteResult objects
        
    Raises:
        S3Error: If batch removal fails
    """

def prompt_object(
    self,
    bucket_name: str,
    object_name: str,
    prompt: str,
    lambda_arn: str | None = None,
    request_headers: dict[str, str] | None = None,
    ssec: SseCustomerKey | None = None,
    version_id: str | None = None,
    **kwargs: Any
) -> urllib3.HTTPResponse:
    """
    Prompt an object using natural language through AI model.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object to prompt
        prompt: Natural language prompt to interact with AI model
        lambda_arn: Lambda ARN to use for prompt processing (optional)
        request_headers: Additional HTTP headers
        ssec: Server-side encryption key for encrypted objects
        version_id: Specific version to prompt
        **kwargs: Extra parameters for advanced usage
        
    Returns:
        urllib3.HTTPResponse object with AI model response
        
    Raises:
        S3Error: If prompt operation fails
    """

Object Listing Operations

List and iterate through objects in buckets with filtering and pagination support.

def list_objects(
    self,
    bucket_name: str,
    prefix: str | None = None,
    recursive: bool = True,
    start_after: str | None = None,
    include_user_metadata: bool = False,
    include_version: bool = False,
    use_api_v1: bool = False,
    max_keys: int = 1000
) -> Iterable[Object]:
    """
    List objects in a bucket.
    
    Args:
        bucket_name: Name of the bucket
        prefix: Filter objects by key prefix
        recursive: List objects recursively (True) or only top-level (False)
        start_after: Start listing after this key name
        include_user_metadata: Include user-defined metadata in results
        include_version: Include version information for versioned buckets
        use_api_v1: Use S3 API v1 (for legacy compatibility)
        max_keys: Maximum objects returned per request
        
    Returns:
        Iterable of Object instances
        
    Raises:
        S3Error: If listing fails
    """

def append_object(
    self,
    bucket_name: str,
    object_name: str,
    data: BinaryIO,
    length: int,
    chunk_size: int | None = None,
    progress: ProgressType | None = None,
    extra_headers: dict[str, str] | None = None
) -> ObjectWriteResult:
    """
    Append data to an existing object in a bucket.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the existing object to append to
        data: Binary data stream to append
        length: Size of data to append
        chunk_size: Chunk size for optimized uploads
        progress: Progress callback function
        extra_headers: Additional HTTP headers
        
    Returns:
        ObjectWriteResult with append operation details
        
    Raises:
        S3Error: If append operation fails
    """

Tags and Metadata Operations

Manage object and bucket tags for organization and billing.

def set_bucket_tags(self, bucket_name: str, tags: Tags) -> None:
    """
    Set tags on a bucket.
    
    Args:
        bucket_name: Name of the bucket
        tags: Tags object containing key-value pairs
        
    Raises:
        S3Error: If tag setting fails
    """

def get_bucket_tags(self, bucket_name: str) -> Tags | None:
    """
    Get tags from a bucket.
    
    Args:
        bucket_name: Name of the bucket
        
    Returns:
        Tags object or None if no tags exist
        
    Raises:
        S3Error: If tag retrieval fails
    """

def delete_bucket_tags(self, bucket_name: str) -> None:
    """
    Remove all tags from a bucket.
    
    Args:
        bucket_name: Name of the bucket
        
    Raises:
        S3Error: If tag deletion fails
    """

def set_object_tags(
    self,
    bucket_name: str,
    object_name: str,
    tags: Tags,
    version_id: str | None = None
) -> None:
    """
    Set tags on an object.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object
        tags: Tags object containing key-value pairs
        version_id: Specific version to tag (for versioned objects)
        
    Raises:
        S3Error: If tag setting fails
    """

def get_object_tags(
    self,
    bucket_name: str,
    object_name: str,
    version_id: str | None = None
) -> Tags | None:
    """
    Get tags from an object.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object
        version_id: Specific version to get tags from
        
    Returns:
        Tags object or None if no tags exist
        
    Raises:
        S3Error: If tag retrieval fails
    """

def delete_object_tags(
    self,
    bucket_name: str,
    object_name: str,
    version_id: str | None = None
) -> None:
    """
    Remove all tags from an object.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object
        version_id: Specific version to remove tags from
        
    Raises:
        S3Error: If tag deletion fails
    """

Legal Hold Operations

Manage legal hold status on objects for compliance and regulatory requirements.

def enable_object_legal_hold(
    self,
    bucket_name: str,
    object_name: str,
    version_id: str | None = None
) -> None:
    """
    Enable legal hold on an object.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object
        version_id: Specific version to enable legal hold on
        
    Raises:
        S3Error: If legal hold enable fails
    """

def disable_object_legal_hold(
    self,
    bucket_name: str,
    object_name: str,
    version_id: str | None = None
) -> None:
    """
    Disable legal hold on an object.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object
        version_id: Specific version to disable legal hold on
        
    Raises:
        S3Error: If legal hold disable fails
    """

def is_object_legal_hold_enabled(
    self,
    bucket_name: str,
    object_name: str,
    version_id: str | None = None
) -> bool:
    """
    Check if legal hold is enabled on an object.
    
    Args:
        bucket_name: Name of the bucket
        object_name: Name of the object
        version_id: Specific version to check
        
    Returns:
        True if legal hold is enabled, False otherwise
        
    Raises:
        S3Error: If legal hold status check fails
    """

Types

Bucket and Object Types

class Bucket:
    """Container for bucket information."""
    def __init__(self, name: str, creation_date: datetime.datetime | None = None) -> None: ...
    name: str
    creation_date: datetime.datetime | None

class Object:
    """Container for object information and metadata."""
    bucket_name: str | None
    object_name: str | None
    last_modified: datetime.datetime | None
    etag: str | None
    size: int | None
    content_type: str | None
    is_dir: bool
    version_id: str | None
    is_latest: bool
    is_delete_marker: bool
    storage_class: str | None
    owner_id: str | None
    owner_name: str | None
    tags: Tags | None

class ObjectWriteResult:
    """Result container for object write operations."""
    def __init__(
        self,
        bucket_name: str,
        object_name: str,
        etag: str,
        version_id: str | None = None,
        location: str | None = None
    ) -> None: ...
    bucket_name: str
    object_name: str
    etag: str
    version_id: str | None
    location: str | None

Delete Operation Types

class DeleteObject:
    """Specification for object deletion."""
    def __init__(self, name: str, version_id: str | None = None) -> None: ...
    name: str
    version_id: str | None

class DeleteResult:
    """Result of batch delete operation."""
    deleted_objects: list[DeleteObject]
    error_objects: list[DeleteError]

class DeleteError:
    """Error information for failed deletions."""
    code: str
    message: str
    object_name: str
    version_id: str | None

Usage Examples

Basic Bucket and Object Operations

from minio import Minio
from minio.error import S3Error

client = Minio("localhost:9000", "minio", "minio123")

# Create bucket
try:
    client.make_bucket("my-photos")
    print("Bucket created successfully")
except S3Error as e:
    print(f"Error: {e}")

# Upload file
try:
    result = client.fput_object(
        "my-photos", 
        "vacation/beach.jpg", 
        "/home/user/photos/beach.jpg",
        content_type="image/jpeg"
    )
    print(f"Upload successful: {result.etag}")
except S3Error as e:
    print(f"Upload failed: {e}")

# Download file
try:
    client.fget_object(
        "my-photos", 
        "vacation/beach.jpg", 
        "/tmp/downloaded-beach.jpg"
    )
    print("Download successful")
except S3Error as e:
    print(f"Download failed: {e}")

Batch Operations

from minio.commonconfig import Tags
from minio.deleteobjects import DeleteObject

# Set tags on multiple objects
bucket_tags = Tags.new_bucket_tags()
bucket_tags["Environment"] = "Production"
bucket_tags["Team"] = "DevOps"

client.set_bucket_tags("my-bucket", bucket_tags)

# Batch delete objects
delete_objects = [
    DeleteObject("old-file1.txt"),
    DeleteObject("old-file2.txt"),
    DeleteObject("archived/old-file3.txt")
]

for result in client.remove_objects("my-bucket", delete_objects):
    if result.error_objects:
        for error in result.error_objects:
            print(f"Failed to delete {error.object_name}: {error.message}")
    for obj in result.deleted_objects:
        print(f"Successfully deleted: {obj.name}")

Install with Tessl CLI

npx tessl i tessl/pypi-minio

docs

admin-operations.md

advanced-operations.md

client-operations.md

configuration.md

credentials-auth.md

error-handling.md

index.md

tile.json