CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-google-cloud-redis

Google Cloud Redis API client library for managing fully managed Redis instances on Google Cloud

Pending
Overview
Eval results
Files

data-management.mddocs/

Data Management

Import and export functionality for moving data in and out of Redis instances using Google Cloud Storage.

Capabilities

Data Import

Import Redis data from Google Cloud Storage into a Redis instance.

def import_instance(
    self,
    *,
    name: str,
    input_config: cloud_redis.InputConfig,
    **kwargs
) -> operation.Operation:
    """
    Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.

    Args:
        name: Required. Redis instance resource name using the form:
             "projects/{project_id}/locations/{location_id}/instances/{instance_id}"
        input_config: Required. Specify data to be imported.

    Returns:
        google.api_core.operation.Operation: A long-running operation object.
        The result will be an Instance object.

    Raises:
        google.api_core.exceptions.GoogleAPICallError: If the request failed.
        google.api_core.exceptions.NotFound: If the instance doesn't exist.
        google.api_core.exceptions.PermissionDenied: If lacking storage permissions.
    """

Data Export

Export Redis data from an instance to Google Cloud Storage.

def export_instance(
    self,
    *,
    name: str,
    output_config: cloud_redis.OutputConfig,
    **kwargs
) -> operation.Operation:
    """
    Export Redis instance data into a Redis RDB format file in Cloud Storage.

    Args:
        name: Required. Redis instance resource name using the form:
             "projects/{project_id}/locations/{location_id}/instances/{instance_id}"
        output_config: Required. Specify data to be exported.

    Returns:
        google.api_core.operation.Operation: A long-running operation object.
        The result will be an Instance object.

    Raises:
        google.api_core.exceptions.GoogleAPICallError: If the request failed.
        google.api_core.exceptions.NotFound: If the instance doesn't exist.
        google.api_core.exceptions.PermissionDenied: If lacking storage permissions.
    """

Request Types

Import Request

class ImportInstanceRequest:
    name: str
    input_config: InputConfig

Export Request

class ExportInstanceRequest:
    name: str
    output_config: OutputConfig

Configuration Types

Input Configuration

class InputConfig:
    gcs_source: GcsSource

class GcsSource:
    uri: str
    """
    Required. Source data URI in the format 'gs://bucket_name/path_to_file.rdb'.
    The file must be a Redis RDB snapshot file.
    """

Output Configuration

class OutputConfig:
    gcs_destination: GcsDestination

class GcsDestination:
    uri: str
    """
    Required. Data destination URI in the format 'gs://bucket_name/path_to_file.rdb'.
    The provided URI must be writeable by the service account used by the instance.
    """

Usage Examples

Import Data from Cloud Storage

from google.cloud.redis import CloudRedisClient, InputConfig, GcsSource

client = CloudRedisClient()

# Configure the import source
gcs_source = GcsSource(
    uri="gs://my-backup-bucket/redis-snapshots/backup-20240101.rdb"
)

input_config = InputConfig(gcs_source=gcs_source)

# Start the import operation
instance_name = "projects/my-project/locations/us-central1/instances/my-redis"
operation = client.import_instance(
    name=instance_name,
    input_config=input_config
)

print(f"Import operation started: {operation.name}")

# Wait for import to complete (can take several minutes)
try:
    result = operation.result(timeout=3600)  # 1 hour timeout
    print(f"Import completed successfully")
    print(f"Instance state: {result.state}")
except Exception as e:
    print(f"Import failed: {e}")

Export Data to Cloud Storage

from google.cloud.redis import CloudRedisClient, OutputConfig, GcsDestination
from datetime import datetime

client = CloudRedisClient()

# Configure the export destination with timestamp
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
gcs_destination = GcsDestination(
    uri=f"gs://my-backup-bucket/redis-exports/export-{timestamp}.rdb"
)

output_config = OutputConfig(gcs_destination=gcs_destination)

# Start the export operation
instance_name = "projects/my-project/locations/us-central1/instances/my-redis"
operation = client.export_instance(
    name=instance_name,
    output_config=output_config
)

print(f"Export operation started: {operation.name}")
print(f"Exporting to: {gcs_destination.uri}")

# Wait for export to complete
try:
    result = operation.result(timeout=3600)  # 1 hour timeout
    print(f"Export completed successfully")
    print(f"Data exported to: {gcs_destination.uri}")
except Exception as e:
    print(f"Export failed: {e}")

Scheduled Backup Workflow

from google.cloud.redis import CloudRedisClient, OutputConfig, GcsDestination
from datetime import datetime, timedelta
import schedule
import time

def create_backup(instance_name: str, bucket_name: str):
    """Create a backup of a Redis instance."""
    client = CloudRedisClient()
    
    # Generate timestamped filename
    timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
    backup_uri = f"gs://{bucket_name}/redis-backups/{timestamp}/backup.rdb"
    
    # Configure export
    gcs_destination = GcsDestination(uri=backup_uri)
    output_config = OutputConfig(gcs_destination=gcs_destination)
    
    # Start export
    operation = client.export_instance(
        name=instance_name,
        output_config=output_config
    )
    
    print(f"Backup started: {backup_uri}")
    
    # Don't wait for completion in scheduled context
    return operation

# Schedule daily backups
instance_name = "projects/my-project/locations/us-central1/instances/prod-redis"
bucket_name = "my-redis-backups"

schedule.every().day.at("02:00").do(
    create_backup, 
    instance_name=instance_name, 
    bucket_name=bucket_name
)

# Run scheduler (in practice, this would be in a separate service)
while True:
    schedule.run_pending()
    time.sleep(60)

Migration Between Instances

from google.cloud.redis import CloudRedisClient, InputConfig, OutputConfig, GcsSource, GcsDestination

def migrate_redis_data(source_instance: str, target_instance: str, temp_bucket: str):
    """Migrate data from one Redis instance to another via Cloud Storage."""
    client = CloudRedisClient()
    
    # Step 1: Export from source instance
    timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
    temp_uri = f"gs://{temp_bucket}/migration-{timestamp}.rdb"
    
    export_config = OutputConfig(
        gcs_destination=GcsDestination(uri=temp_uri)
    )
    
    print(f"Exporting from {source_instance}...")
    export_op = client.export_instance(
        name=source_instance,
        output_config=export_config
    )
    
    # Wait for export to complete
    export_result = export_op.result(timeout=3600)
    print(f"Export completed")
    
    # Step 2: Import to target instance
    import_config = InputConfig(
        gcs_source=GcsSource(uri=temp_uri)
    )
    
    print(f"Importing to {target_instance}...")
    import_op = client.import_instance(
        name=target_instance,
        input_config=import_config
    )
    
    # Wait for import to complete
    import_result = import_op.result(timeout=3600)
    print(f"Migration completed successfully")
    
    return import_result

# Example migration
source = "projects/my-project/locations/us-central1/instances/old-redis"
target = "projects/my-project/locations/us-west1/instances/new-redis"
temp_bucket = "migration-temp-bucket"

migrate_redis_data(source, target, temp_bucket)

Import with Error Handling

from google.cloud.redis import CloudRedisClient, InputConfig, GcsSource
from google.api_core import exceptions
import logging

def safe_import_redis_data(instance_name: str, backup_uri: str):
    """Import Redis data with comprehensive error handling."""
    client = CloudRedisClient()
    
    # Configure import
    input_config = InputConfig(
        gcs_source=GcsSource(uri=backup_uri)
    )
    
    try:
        # Start import operation
        operation = client.import_instance(
            name=instance_name,
            input_config=input_config
        )
        
        logging.info(f"Import started: {operation.name}")
        
        # Wait for completion with progress monitoring
        while not operation.done():
            logging.info("Import in progress...")
            time.sleep(30)  # Check every 30 seconds
            operation.reload()
        
        # Check final result
        if operation.error:
            logging.error(f"Import failed: {operation.error}")
            return False
        else:
            result = operation.result()
            logging.info(f"Import completed successfully")
            logging.info(f"Instance state: {result.state}")
            return True
            
    except exceptions.NotFound:
        logging.error(f"Instance not found: {instance_name}")
        return False
    except exceptions.PermissionDenied:
        logging.error(f"Permission denied accessing: {backup_uri}")
        return False
    except exceptions.InvalidArgument as e:
        logging.error(f"Invalid backup file or configuration: {e}")
        return False
    except Exception as e:
        logging.error(f"Unexpected error: {e}")
        return False

# Usage
instance_name = "projects/my-project/locations/us-central1/instances/my-redis"
backup_uri = "gs://my-backups/redis-backup.rdb"

success = safe_import_redis_data(instance_name, backup_uri)
if success:
    print("Data imported successfully")
else:
    print("Import failed - check logs for details")

Best Practices

Service Account Permissions

Ensure the Redis instance's service account has appropriate Cloud Storage permissions:

# The service account needs these IAM roles:
# - Storage Object Viewer (for imports)
# - Storage Object Creator (for exports)
# - Storage Legacy Bucket Reader (for both)

# Check instance service account
from google.cloud.redis import CloudRedisClient

client = CloudRedisClient()
instance = client.get_instance(name="projects/my-project/locations/us-central1/instances/my-redis")
print(f"Service account: {instance.persistence_iam_identity}")

Backup File Validation

def validate_backup_file(bucket_name: str, file_path: str) -> bool:
    """Validate that a backup file exists and is readable."""
    from google.cloud import storage
    
    try:
        storage_client = storage.Client()
        bucket = storage_client.bucket(bucket_name)
        blob = bucket.blob(file_path)
        
        # Check if file exists
        if not blob.exists():
            print(f"Backup file does not exist: gs://{bucket_name}/{file_path}")
            return False
            
        # Check file size (RDB files should not be empty)
        blob.reload()
        if blob.size == 0:
            print(f"Backup file is empty: gs://{bucket_name}/{file_path}")
            return False
            
        print(f"Backup file validated: {blob.size} bytes")
        return True
        
    except Exception as e:
        print(f"Error validating backup file: {e}")
        return False

# Usage before import
if validate_backup_file("my-backups", "redis-backup.rdb"):
    # Proceed with import
    pass
else:
    # Handle validation failure
    pass

Install with Tessl CLI

npx tessl i tessl/pypi-google-cloud-redis

docs

client-management.md

data-management.md

index.md

instance-operations.md

maintenance-operations.md

tile.json