Firebase Admin Python SDK enables server-side Python developers to integrate Firebase services into their applications from privileged environments.
—
Firebase Cloud Storage integration providing access to Google Cloud Storage buckets for file upload, download, and management operations. Full integration with Google Cloud Storage client capabilities.
Get Cloud Storage bucket instances for file operations with automatic Firebase configuration integration.
def bucket(name=None, app=None):
"""
Return a handle to a Cloud Storage bucket.
Args:
name: Bucket name string (optional). If not provided, uses the default
bucket from Firebase app configuration.
app: Firebase app instance (optional)
Returns:
google.cloud.storage.Bucket: Storage bucket instance with full Google Cloud Storage API
"""The Firebase Admin SDK provides direct access to Google Cloud Storage buckets through the google.cloud.storage.Bucket class. This provides complete file storage capabilities including upload, download, metadata management, and access control.
import firebase_admin
from firebase_admin import storage
import os
# Get default bucket
bucket = storage.bucket()
# Upload file from local path
blob = bucket.blob('uploads/document.pdf')
blob.upload_from_filename('/local/path/document.pdf')
# Upload from memory
data = b'Hello, Cloud Storage!'
blob = bucket.blob('text/hello.txt')
blob.upload_from_string(data, content_type='text/plain')
# Upload with metadata
blob = bucket.blob('images/photo.jpg')
blob.metadata = {
'uploaded_by': 'user123',
'category': 'profile_photo'
}
blob.upload_from_filename('/local/path/photo.jpg')# Download to local file
blob = bucket.blob('uploads/document.pdf')
blob.download_to_filename('/local/path/downloaded.pdf')
# Download to memory
blob = bucket.blob('text/hello.txt')
content = blob.download_as_bytes()
text_content = blob.download_as_text()
# Check if file exists
if blob.exists():
print('File exists in storage')# Get file information
blob = bucket.blob('uploads/document.pdf')
blob.reload() # Refresh metadata from server
print(f'Size: {blob.size} bytes')
print(f'Content type: {blob.content_type}')
print(f'Created: {blob.time_created}')
print(f'Updated: {blob.updated}')
print(f'MD5 hash: {blob.md5_hash}')
print(f'Custom metadata: {blob.metadata}')
# Update metadata
blob.metadata = {'description': 'Updated document'}
blob.patch()
# Set content type
blob.content_type = 'application/pdf'
blob.patch()# List all files in bucket
blobs = bucket.list_blobs()
for blob in blobs:
print(f'File: {blob.name}')
# List files with prefix
blobs = bucket.list_blobs(prefix='uploads/')
for blob in blobs:
print(f'Upload: {blob.name}')
# List with delimiter (folder-like structure)
blobs = bucket.list_blobs(prefix='images/', delimiter='/')
for blob in blobs:
print(f'Image: {blob.name}')
# Get specific number of files
blobs = bucket.list_blobs(max_results=10)# Copy file
source_blob = bucket.blob('original/file.txt')
destination_blob = bucket.blob('backup/file.txt')
bucket.copy_blob(source_blob, bucket, 'backup/file.txt')
# Move/Rename file (copy then delete)
bucket.copy_blob(source_blob, bucket, 'new/location/file.txt')
source_blob.delete()
# Delete file
blob = bucket.blob('uploads/temp.txt')
blob.delete()
# Delete multiple files
blobs_to_delete = bucket.list_blobs(prefix='temp/')
bucket.delete_blobs(blobs_to_delete)# Make file publicly readable
blob = bucket.blob('public/image.jpg')
blob.make_public()
print(f'Public URL: {blob.public_url}')
# Generate signed URL for temporary access
from datetime import datetime, timedelta
blob = bucket.blob('private/document.pdf')
url = blob.generate_signed_url(
expiration=datetime.utcnow() + timedelta(hours=1),
method='GET'
)
print(f'Signed URL: {url}')
# Generate upload signed URL
upload_url = blob.generate_signed_url(
expiration=datetime.utcnow() + timedelta(hours=1),
method='PUT',
content_type='application/pdf'
)# Get bucket information
bucket.reload()
print(f'Bucket name: {bucket.name}')
print(f'Location: {bucket.location}')
print(f'Storage class: {bucket.storage_class}')
# List bucket contents with pagination
iterator = bucket.list_blobs()
page_iterator = iterator.pages
for page in page_iterator:
for blob in page:
print(f'File: {blob.name}')# Batch upload multiple files
import concurrent.futures
import os
def upload_file(file_path):
blob_name = os.path.basename(file_path)
blob = bucket.blob(f'batch_upload/{blob_name}')
blob.upload_from_filename(file_path)
return blob_name
file_paths = ['/path/to/file1.txt', '/path/to/file2.txt']
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = [executor.submit(upload_file, path) for path in file_paths]
for future in concurrent.futures.as_completed(futures):
print(f'Uploaded: {future.result()}')# Set storage class for cost optimization
blob = bucket.blob('archive/old_data.zip')
blob.storage_class = 'COLDLINE' # or 'NEARLINE', 'ARCHIVE'
blob.upload_from_filename('/local/path/old_data.zip')# Upload compressed content
import gzip
# Compress data
data = b'Large amount of text data...'
compressed_data = gzip.compress(data)
blob = bucket.blob('compressed/data.txt.gz')
blob.content_encoding = 'gzip'
blob.content_type = 'text/plain'
blob.upload_from_string(compressed_data)from google.cloud.exceptions import NotFound, Forbidden, GoogleCloudError
from google.api_core.exceptions import ServiceUnavailable
try:
blob = bucket.blob('nonexistent/file.txt')
content = blob.download_as_bytes()
except NotFound:
print('File not found')
except Forbidden:
print('Access denied')
except ServiceUnavailable:
print('Storage service temporarily unavailable')
except GoogleCloudError as e:
print(f'Google Cloud error: {e}')# Use Firebase Auth tokens for client-side access
from firebase_admin import auth
# Generate custom token for client
custom_token = auth.create_custom_token('user123')
# Client can use this token to authenticate with Firebase Storage
# and access files based on Firebase Storage security rulesFor projects with multiple storage buckets:
# Default bucket (from Firebase config)
default_bucket = storage.bucket()
# Named bucket
named_bucket = storage.bucket('my-other-bucket-name')
# Use named bucket
blob = named_bucket.blob('data/file.txt')
blob.upload_from_string('data for named bucket')Firebase Storage uses security rules for access control:
// Example storage rules (defined in Firebase Console)
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
// Public read access
match /public/{allPaths=**} {
allow read;
}
// User-specific files
match /users/{userId}/{allPaths=**} {
allow read, write: if request.auth != null && request.auth.uid == userId;
}
// Admin only
match /admin/{allPaths=**} {
allow read, write: if request.auth != null &&
request.auth.token.admin == true;
}
}
}# The bucket() function returns google.cloud.storage.Bucket
# which provides access to all Google Cloud Storage types:
# Core types from google.cloud.storage:
# - Bucket: Storage bucket container
# - Blob: Individual file/object in storage
# - BlobReader: For streaming downloads
# - BlobWriter: For streaming uploads
# Common properties and methods available on Bucket:
# - name: Bucket name
# - location: Geographic location
# - storage_class: Storage class (STANDARD, NEARLINE, COLDLINE, ARCHIVE)
# - list_blobs(): List objects in bucket
# - blob(name): Get blob reference
# - copy_blob(): Copy objects
# - delete_blobs(): Delete multiple objects
# Common properties and methods available on Blob:
# - name: Object name/path
# - size: Object size in bytes
# - content_type: MIME type
# - metadata: Custom metadata dict
# - time_created: Creation timestamp
# - updated: Last modified timestamp
# - upload_from_filename(): Upload from file
# - upload_from_string(): Upload from memory
# - download_to_filename(): Download to file
# - download_as_bytes(): Download to memory
# - generate_signed_url(): Create temporary access URL
# - make_public(): Make publicly accessibleInstall with Tessl CLI
npx tessl i tessl/pypi-firebase-admin