Google API Client Library for Python that provides discovery-based access to hundreds of Google services with authentication, caching, and media upload/download support.
—
The media handling functionality provides robust support for uploading and downloading files to Google APIs with features like resumable transfers, progress tracking, and chunked processing for large files.
Upload files and data to Google APIs with various source types and transfer options.
class MediaUpload:
"""Abstract base class for media uploads."""
def chunksize(self):
"""
Get the chunk size for resumable uploads.
Returns:
int: Chunk size in bytes
"""
def mimetype(self):
"""
Get the MIME type of the media.
Returns:
str: MIME type string
"""
def size(self):
"""
Get the total size of the media.
Returns:
int: Size in bytes, or None if unknown
"""
def resumable(self):
"""
Check if the upload supports resumable transfers.
Returns:
bool: True if resumable, False otherwise
"""
def getbytes(self, begin, end):
"""
Get a range of bytes from the media.
Args:
begin (int): Starting byte position
end (int): Ending byte position
Returns:
bytes: The requested byte range
"""
class MediaFileUpload(MediaUpload):
"""Upload a file from the local filesystem."""
def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE,
resumable=False):
"""
Initialize a file upload.
Args:
filename (str): Path to the file to upload
mimetype (str, optional): MIME type of the file (auto-detected if None)
chunksize (int): Size of upload chunks in bytes (default: 1MB)
resumable (bool): Whether the upload should be resumable (default: False)
Raises:
FileNotFoundError: When the specified file does not exist
ValueError: When chunksize is invalid for resumable uploads
"""
class MediaInMemoryUpload(MediaUpload):
"""Upload data from memory (bytes or string)."""
def __init__(self, body, mimetype='application/octet-stream',
chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
"""
Initialize an in-memory upload.
Args:
body (bytes or str): Data to upload
mimetype (str): MIME type of the data
chunksize (int): Size of upload chunks in bytes (default: 1MB)
resumable (bool): Whether the upload should be resumable (default: False)
Raises:
ValueError: When chunksize is invalid for resumable uploads
"""
class MediaIoBaseUpload(MediaUpload):
"""Upload data from a file-like object (IOBase)."""
def __init__(self, fd, mimetype='application/octet-stream',
chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
"""
Initialize an IOBase upload.
Args:
fd (IOBase): File-like object to read data from
mimetype (str): MIME type of the data
chunksize (int): Size of upload chunks in bytes (default: 1MB)
resumable (bool): Whether the upload should be resumable (default: False)
Raises:
ValueError: When chunksize is invalid for resumable uploads
"""Download files and data from Google APIs with progress tracking and chunked processing.
class MediaDownloadProgress:
"""Tracks the progress of a media download."""
def __init__(self, resumable_progress, total_size):
"""
Initialize download progress tracking.
Args:
resumable_progress (int): Bytes downloaded so far
total_size (int): Total size in bytes, or None if unknown
"""
@property
def resumable_progress(self):
"""
Get the number of bytes downloaded.
Returns:
int: Bytes downloaded so far
"""
@property
def total_size(self):
"""
Get the total download size.
Returns:
int: Total size in bytes, or None if unknown
"""
def progress(self):
"""
Get the download progress as a percentage.
Returns:
float: Progress percentage (0.0 to 1.0), or None if total size unknown
"""
class MediaIoBaseDownload:
"""Download media to a file-like object."""
def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
"""
Initialize a media download.
Args:
fd (IOBase): File-like object to write downloaded data
request (HttpRequest): HTTP request for the media download
chunksize (int): Size of download chunks in bytes (default: 1MB)
"""
def next_chunk(self, num_retries=0):
"""
Download the next chunk of media.
Args:
num_retries (int): Number of retry attempts on failure
Returns:
tuple: (MediaDownloadProgress, bool) - progress object and completion status
Raises:
HttpError: When the download request fails
"""class MediaUploadProgress:
"""Tracks the progress of a resumable media upload."""
def __init__(self, resumable_progress, total_size):
"""
Initialize upload progress tracking.
Args:
resumable_progress (int): Bytes uploaded so far
total_size (int): Total size in bytes
"""
@property
def resumable_progress(self):
"""
Get the number of bytes uploaded.
Returns:
int: Bytes uploaded so far
"""
@property
def total_size(self):
"""
Get the total upload size.
Returns:
int: Total size in bytes
"""
def progress(self):
"""
Get the upload progress as a percentage.
Returns:
float: Progress percentage (0.0 to 1.0)
"""from googleapiclient import discovery
from googleapiclient.http import MediaFileUpload
# Build Drive service
service = discovery.build('drive', 'v3', credentials=credentials)
# Upload a file
file_metadata = {
'name': 'my-document.pdf',
'parents': ['folder_id'] # Optional: specify parent folder
}
media = MediaFileUpload(
'local-document.pdf',
mimetype='application/pdf',
resumable=True
)
file = service.files().create(
body=file_metadata,
media_body=media,
fields='id'
).execute()
print(f'File ID: {file.get("id")}')from googleapiclient.http import MediaFileUpload
from googleapiclient.errors import HttpError
import time
def upload_with_progress(service, filename, file_metadata):
"""Upload file with progress tracking and resume capability."""
media = MediaFileUpload(
filename,
resumable=True,
chunksize=1024*1024 # 1MB chunks
)
request = service.files().create(
body=file_metadata,
media_body=media
)
response = None
while response is None:
try:
status, response = request.next_chunk()
if status:
progress = int(status.progress() * 100)
print(f"Upload progress: {progress}%")
except HttpError as error:
if error.resp.status in [500, 502, 503, 504]:
# Recoverable error, wait and retry
time.sleep(5)
continue
else:
raise
return response
# Use the function
file_metadata = {'name': 'large-file.zip'}
result = upload_with_progress(service, 'large-file.zip', file_metadata)
print(f'Upload completed. File ID: {result.get("id")}')from googleapiclient.http import MediaInMemoryUpload
import json
# Upload JSON data
data = {'key': 'value', 'numbers': [1, 2, 3]}
json_string = json.dumps(data)
media = MediaInMemoryUpload(
json_string.encode('utf-8'),
mimetype='application/json'
)
file_metadata = {'name': 'data.json'}
file = service.files().create(
body=file_metadata,
media_body=media
).execute()from googleapiclient.http import MediaIoBaseUpload
import io
# Upload from a file-like object
data_stream = io.BytesIO(b'Hello, world! This is streaming data.')
media = MediaIoBaseUpload(
data_stream,
mimetype='text/plain',
resumable=True
)
file_metadata = {'name': 'stream-data.txt'}
file = service.files().create(
body=file_metadata,
media_body=media
).execute()from googleapiclient.http import MediaIoBaseDownload
import io
# Download a file
request = service.files().get_media(fileId='file_id')
file_io = io.BytesIO()
downloader = MediaIoBaseDownload(file_io, request)
done = False
while done is False:
status, done = downloader.next_chunk()
if status:
progress = int(status.progress() * 100)
print(f"Download progress: {progress}%")
# File is now in file_io
file_content = file_io.getvalue()
print(f"Downloaded {len(file_content)} bytes")from googleapiclient.http import MediaIoBaseDownload
def download_file(service, file_id, local_filename):
"""Download a file to local filesystem."""
request = service.files().get_media(fileId=file_id)
with open(local_filename, 'wb') as f:
downloader = MediaIoBaseDownload(f, request)
done = False
while done is False:
status, done = downloader.next_chunk()
if status:
progress = int(status.progress() * 100)
print(f"Download {progress}% complete")
print(f"Download completed: {local_filename}")
# Use the function
download_file(service, 'file_id_here', 'downloaded-file.pdf')from googleapiclient import http
from googleapiclient.http import MediaFileUpload
def batch_upload_callback(request_id, response, exception):
if exception is not None:
print(f'Upload {request_id} failed: {exception}')
else:
print(f'Upload {request_id} completed: {response.get("id")}')
# Create batch for multiple uploads
batch = http.BatchHttpRequest(callback=batch_upload_callback)
files_to_upload = [
('file1.txt', 'First file'),
('file2.txt', 'Second file'),
('file3.txt', 'Third file')
]
for i, (filename, name) in enumerate(files_to_upload):
media = MediaFileUpload(filename, mimetype='text/plain')
file_metadata = {'name': name}
request = service.files().create(
body=file_metadata,
media_body=media
)
batch.add(request, request_id=f'upload_{i}')
# Execute all uploads
batch.execute()from googleapiclient.errors import HttpError, ResumableUploadError
import time
import random
def upload_with_retry(service, filename, file_metadata, max_retries=3):
"""Upload with exponential backoff retry logic."""
media = MediaFileUpload(filename, resumable=True)
request = service.files().create(body=file_metadata, media_body=media)
for attempt in range(max_retries):
try:
response = None
while response is None:
status, response = request.next_chunk()
if status:
print(f"Upload progress: {int(status.progress() * 100)}%")
return response
except (HttpError, ResumableUploadError) as error:
if attempt < max_retries - 1:
wait_time = (2 ** attempt) + (random.randint(0, 1000) / 1000)
print(f"Upload failed, retrying in {wait_time:.1f} seconds...")
time.sleep(wait_time)
continue
else:
raise
# Use retry upload
file_metadata = {'name': 'important-file.pdf'}
result = upload_with_retry(service, 'important-file.pdf', file_metadata)
print(f'Upload successful: {result.get("id")}')Install with Tessl CLI
npx tessl i tessl/pypi-google-api-python-client