Python client library for Google Cloud Platform services including Datastore, Storage, and Pub/Sub
Overall
score
93%
Google Cloud Storage is an object storage service for storing and retrieving files of any size. It provides durable, secure, and scalable storage with global accessibility and rich metadata support.
High-level client for managing Google Cloud Storage operations including bucket management and configuration.
class Client:
def __init__(self, project=None, credentials=None, http=None):
"""
Initialize Storage client.
Parameters:
- project (str): Google Cloud project ID
- credentials: OAuth2 credentials object
- http: Optional HTTP client
"""
def get_bucket(self, bucket_name):
"""
Get bucket instance, raises NotFound if doesn't exist.
Parameters:
- bucket_name (str): Name of bucket
Returns:
Bucket: Bucket instance
Raises:
NotFound: If bucket doesn't exist
"""
def lookup_bucket(self, bucket_name):
"""
Get bucket instance, returns None if doesn't exist.
Parameters:
- bucket_name (str): Name of bucket
Returns:
Bucket or None: Bucket instance or None
"""
def create_bucket(self, bucket_name):
"""
Create new bucket.
Parameters:
- bucket_name (str): Name for new bucket
Returns:
Bucket: Created bucket instance
"""
def list_buckets(self, max_results=None, page_token=None, prefix=None,
projection='noAcl', fields=None):
"""
List buckets in project.
Parameters:
- max_results (int): Maximum results per page
- page_token (str): Pagination token
- prefix (str): Filter by name prefix
- projection (str): Projection type ('full', 'noAcl')
- fields (str): Fields to return
Returns:
Iterator: Bucket listing iterator
"""Bucket management including blob operations, access control, and configuration settings.
class Bucket:
def __init__(self, client, name=None):
"""
Initialize bucket.
Parameters:
- client (Client): Storage client
- name (str): Bucket name
"""
def exists(self, client=None):
"""
Check if bucket exists.
Parameters:
- client (Client): Optional client override
Returns:
bool: True if bucket exists
"""
def create(self, client=None):
"""
Create bucket.
Parameters:
- client (Client): Optional client override
"""
def delete(self, force=False, client=None):
"""
Delete bucket.
Parameters:
- force (bool): Delete even if bucket contains objects
- client (Client): Optional client override
"""
def get_blob(self, blob_name, client=None):
"""
Get blob instance.
Parameters:
- blob_name (str): Name of blob
- client (Client): Optional client override
Returns:
Blob: Blob instance
"""
def list_blobs(self, max_results=None, page_token=None, prefix=None,
delimiter=None, versions=None, projection='noAcl', fields=None, client=None):
"""
List blobs in bucket.
Parameters:
- max_results (int): Maximum results per page
- page_token (str): Pagination token
- prefix (str): Filter by name prefix
- delimiter (str): Delimiter for hierarchical listing
- versions (bool): Include all object versions
- projection (str): Projection type ('full', 'noAcl')
- fields (str): Fields to return
- client (Client): Optional client override
Returns:
Iterator: Blob listing iterator
"""
def delete_blob(self, blob_name, client=None):
"""
Delete single blob.
Parameters:
- blob_name (str): Name of blob to delete
- client (Client): Optional client override
"""
def delete_blobs(self, blobs, on_error=None, client=None):
"""
Delete multiple blobs.
Parameters:
- blobs (list): List of blobs or blob names
- on_error (callable): Optional error handler
- client (Client): Optional client override
"""
def copy_blob(self, blob, destination_bucket, new_name=None, client=None):
"""
Copy blob to another bucket.
Parameters:
- blob (Blob): Source blob
- destination_bucket (Bucket): Destination bucket
- new_name (str): Optional new name
- client (Client): Optional client override
Returns:
Blob: Copied blob instance
"""
def upload_file(self, filename, blob_name=None, client=None):
"""
Upload file to bucket.
Parameters:
- filename (str): Local file path
- blob_name (str): Optional blob name (defaults to filename)
- client (Client): Optional client override
Returns:
Blob: Uploaded blob instance
"""
def upload_file_object(self, file_obj, blob_name=None, client=None):
"""
Upload file-like object to bucket.
Parameters:
- file_obj: File-like object to upload
- blob_name (str): Optional blob name
- client (Client): Optional client override
Returns:
Blob: Uploaded blob instance
"""
def make_public(self, recursive=False, future=False, client=None):
"""
Make bucket public.
Parameters:
- recursive (bool): Apply to all objects
- future (bool): Apply to future objects
- client (Client): Optional client override
"""
def configure_website(self, main_page_suffix=None, not_found_page=None):
"""
Configure bucket for static website hosting.
Parameters:
- main_page_suffix (str): Main page filename
- not_found_page (str): 404 error page filename
"""
def disable_website(self):
"""
Disable website configuration.
"""
def enable_logging(self, bucket_name, object_prefix=''):
"""
Enable access logging.
Parameters:
- bucket_name (str): Destination bucket for logs
- object_prefix (str): Log object prefix
"""
def disable_logging(self):
"""
Disable access logging.
"""
@property
def name(self):
"""
Bucket name.
Returns:
str: Bucket name
"""
@property
def versioning_enabled(self):
"""
Whether versioning is enabled.
Returns:
bool: Versioning status
"""
@versioning_enabled.setter
def versioning_enabled(self, value):
"""
Set versioning status.
Parameters:
- value (bool): Enable/disable versioning
"""Object-level operations for uploading, downloading, and managing individual files in Cloud Storage.
class Blob:
def __init__(self, name, bucket, chunk_size=None):
"""
Initialize blob.
Parameters:
- name (str): Blob name
- bucket (Bucket): Parent bucket
- chunk_size (int): Optional chunk size for transfers
"""
def exists(self, client=None):
"""
Check if blob exists.
Parameters:
- client (Client): Optional client override
Returns:
bool: True if blob exists
"""
def rename(self, new_name, client=None):
"""
Rename blob.
Parameters:
- new_name (str): New blob name
- client (Client): Optional client override
"""
def delete(self, client=None):
"""
Delete blob.
Parameters:
- client (Client): Optional client override
"""
def download_to_file(self, file_obj, client=None):
"""
Download blob to file-like object.
Parameters:
- file_obj: File-like object to write to
- client (Client): Optional client override
"""
def download_to_filename(self, filename, client=None):
"""
Download blob to local file.
Parameters:
- filename (str): Local file path
- client (Client): Optional client override
"""
def download_as_string(self, client=None):
"""
Download blob content as string.
Parameters:
- client (Client): Optional client override
Returns:
str: Blob content
"""
def upload_from_file(self, file_obj, rewind=False, size=None, content_type=None,
num_retries=6, client=None):
"""
Upload from file-like object.
Parameters:
- file_obj: File-like object to upload
- rewind (bool): Rewind file before reading
- size (int): Object size hint
- content_type (str): MIME content type
- num_retries (int): Number of retry attempts
- client (Client): Optional client override
"""
def upload_from_filename(self, filename, content_type=None, client=None):
"""
Upload from local file.
Parameters:
- filename (str): Local file path
- content_type (str): MIME content type
- client (Client): Optional client override
"""
def upload_from_string(self, data, content_type='text/plain', client=None):
"""
Upload from string data.
Parameters:
- data (str): String data to upload
- content_type (str): MIME content type
- client (Client): Optional client override
"""
def make_public(self, client=None):
"""
Make blob publicly accessible.
Parameters:
- client (Client): Optional client override
"""
def generate_signed_url(self, expiration, method='GET', content_type=None,
content_md5=None, response_disposition=None,
response_type=None, generation=None, headers=None):
"""
Generate signed URL for temporary access.
Parameters:
- expiration (datetime): URL expiration time
- method (str): HTTP method ('GET', 'POST', 'PUT', 'DELETE')
- content_type (str): Expected content type
- content_md5 (str): Expected MD5 hash
- response_disposition (str): Response Content-Disposition header
- response_type (str): Response Content-Type header
- generation (int): Object generation number
- headers (dict): Additional headers to sign
Returns:
str: Signed URL
"""
@property
def name(self):
"""
Blob name.
Returns:
str: Blob name
"""
@property
def bucket(self):
"""
Parent bucket.
Returns:
Bucket: Parent bucket
"""
@property
def size(self):
"""
Blob size in bytes.
Returns:
int: Size in bytes
"""
@property
def public_url(self):
"""
Public URL for blob.
Returns:
str: Public URL
"""
@property
def metadata(self):
"""
Custom metadata dictionary.
Returns:
dict: Metadata key-value pairs
"""
@metadata.setter
def metadata(self, value):
"""
Set custom metadata.
Parameters:
- value (dict): Metadata key-value pairs
"""
@property
def chunk_size(self):
"""
Upload/download chunk size.
Returns:
int: Chunk size in bytes
"""
@chunk_size.setter
def chunk_size(self, value):
"""
Set chunk size for transfers.
Parameters:
- value (int): Chunk size in bytes
"""Batch operations for efficient grouping of multiple storage operations.
class Batch:
def __init__(self, client):
"""
Initialize batch.
Parameters:
- client (Client): Storage client
"""
def __enter__(self):
"""
Enter context manager.
Returns:
Batch: Self
"""
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Exit context manager and execute batch.
"""from gcloud import storage
# Initialize client
client = storage.Client(project='my-project')
# Create bucket
bucket = client.create_bucket('my-bucket')
# Upload file
blob = bucket.blob('path/to/file.txt')
blob.upload_from_string('Hello, Cloud Storage!')
# Download file
content = blob.download_as_string()
print(content) # Hello, Cloud Storage!
# List blobs
for blob in bucket.list_blobs():
print(f"{blob.name}: {blob.size} bytes")# Upload local file
blob = bucket.blob('documents/report.pdf')
blob.upload_from_filename('/local/path/report.pdf')
# Download to local file
blob.download_to_filename('/local/path/downloaded_report.pdf')
# Upload from file object
with open('/local/path/data.csv', 'rb') as file_obj:
blob = bucket.blob('data/data.csv')
blob.upload_from_file(file_obj)# Make blob public
blob.make_public()
print(blob.public_url)
# Set custom metadata
blob.metadata = {
'author': 'Alice Smith',
'department': 'Engineering'
}
# Generate signed URL for temporary access
from datetime import datetime, timedelta
expiration = datetime.utcnow() + timedelta(hours=1)
signed_url = blob.generate_signed_url(expiration, method='GET')# Enable versioning
bucket.versioning_enabled = True
# Configure for static website
bucket.configure_website(
main_page_suffix='index.html',
not_found_page='404.html'
)
# Enable access logging
bucket.enable_logging('my-logs-bucket', 'access-logs/')Install with Tessl CLI
npx tessl i tessl/pypi-gcloudevals
scenario-1
scenario-2
scenario-3
scenario-4
scenario-5
scenario-6
scenario-7
scenario-8
scenario-9
scenario-10