0
# Data Management
1
2
Import and export functionality for moving data in and out of Redis instances using Google Cloud Storage.
3
4
## Capabilities
5
6
### Data Import
7
8
Import Redis data from Google Cloud Storage into a Redis instance.
9
10
```python { .api }
11
def import_instance(
12
self,
13
*,
14
name: str,
15
input_config: cloud_redis.InputConfig,
16
**kwargs
17
) -> operation.Operation:
18
"""
19
Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
20
21
Args:
22
name: Required. Redis instance resource name using the form:
23
"projects/{project_id}/locations/{location_id}/instances/{instance_id}"
24
input_config: Required. Specify data to be imported.
25
26
Returns:
27
google.api_core.operation.Operation: A long-running operation object.
28
The result will be an Instance object.
29
30
Raises:
31
google.api_core.exceptions.GoogleAPICallError: If the request failed.
32
google.api_core.exceptions.NotFound: If the instance doesn't exist.
33
google.api_core.exceptions.PermissionDenied: If lacking storage permissions.
34
"""
35
```
36
37
### Data Export
38
39
Export Redis data from an instance to Google Cloud Storage.
40
41
```python { .api }
42
def export_instance(
43
self,
44
*,
45
name: str,
46
output_config: cloud_redis.OutputConfig,
47
**kwargs
48
) -> operation.Operation:
49
"""
50
Export Redis instance data into a Redis RDB format file in Cloud Storage.
51
52
Args:
53
name: Required. Redis instance resource name using the form:
54
"projects/{project_id}/locations/{location_id}/instances/{instance_id}"
55
output_config: Required. Specify data to be exported.
56
57
Returns:
58
google.api_core.operation.Operation: A long-running operation object.
59
The result will be an Instance object.
60
61
Raises:
62
google.api_core.exceptions.GoogleAPICallError: If the request failed.
63
google.api_core.exceptions.NotFound: If the instance doesn't exist.
64
google.api_core.exceptions.PermissionDenied: If lacking storage permissions.
65
"""
66
```
67
68
## Request Types
69
70
### Import Request
71
72
```python { .api }
73
class ImportInstanceRequest:
74
name: str
75
input_config: InputConfig
76
```
77
78
### Export Request
79
80
```python { .api }
81
class ExportInstanceRequest:
82
name: str
83
output_config: OutputConfig
84
```
85
86
## Configuration Types
87
88
### Input Configuration
89
90
```python { .api }
91
class InputConfig:
92
gcs_source: GcsSource
93
94
class GcsSource:
95
uri: str
96
"""
97
Required. Source data URI in the format 'gs://bucket_name/path_to_file.rdb'.
98
The file must be a Redis RDB snapshot file.
99
"""
100
```
101
102
### Output Configuration
103
104
```python { .api }
105
class OutputConfig:
106
gcs_destination: GcsDestination
107
108
class GcsDestination:
109
uri: str
110
"""
111
Required. Data destination URI in the format 'gs://bucket_name/path_to_file.rdb'.
112
The provided URI must be writeable by the service account used by the instance.
113
"""
114
```
115
116
## Usage Examples
117
118
### Import Data from Cloud Storage
119
120
```python
121
from google.cloud.redis import CloudRedisClient, InputConfig, GcsSource
122
123
client = CloudRedisClient()
124
125
# Configure the import source
126
gcs_source = GcsSource(
127
uri="gs://my-backup-bucket/redis-snapshots/backup-20240101.rdb"
128
)
129
130
input_config = InputConfig(gcs_source=gcs_source)
131
132
# Start the import operation
133
instance_name = "projects/my-project/locations/us-central1/instances/my-redis"
134
operation = client.import_instance(
135
name=instance_name,
136
input_config=input_config
137
)
138
139
print(f"Import operation started: {operation.name}")
140
141
# Wait for import to complete (can take several minutes)
142
try:
143
result = operation.result(timeout=3600) # 1 hour timeout
144
print(f"Import completed successfully")
145
print(f"Instance state: {result.state}")
146
except Exception as e:
147
print(f"Import failed: {e}")
148
```
149
150
### Export Data to Cloud Storage
151
152
```python
153
from google.cloud.redis import CloudRedisClient, OutputConfig, GcsDestination
154
from datetime import datetime
155
156
client = CloudRedisClient()
157
158
# Configure the export destination with timestamp
159
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
160
gcs_destination = GcsDestination(
161
uri=f"gs://my-backup-bucket/redis-exports/export-{timestamp}.rdb"
162
)
163
164
output_config = OutputConfig(gcs_destination=gcs_destination)
165
166
# Start the export operation
167
instance_name = "projects/my-project/locations/us-central1/instances/my-redis"
168
operation = client.export_instance(
169
name=instance_name,
170
output_config=output_config
171
)
172
173
print(f"Export operation started: {operation.name}")
174
print(f"Exporting to: {gcs_destination.uri}")
175
176
# Wait for export to complete
177
try:
178
result = operation.result(timeout=3600) # 1 hour timeout
179
print(f"Export completed successfully")
180
print(f"Data exported to: {gcs_destination.uri}")
181
except Exception as e:
182
print(f"Export failed: {e}")
183
```
184
185
### Scheduled Backup Workflow
186
187
```python
188
from google.cloud.redis import CloudRedisClient, OutputConfig, GcsDestination
189
from datetime import datetime, timedelta
190
import schedule
191
import time
192
193
def create_backup(instance_name: str, bucket_name: str):
194
"""Create a backup of a Redis instance."""
195
client = CloudRedisClient()
196
197
# Generate timestamped filename
198
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
199
backup_uri = f"gs://{bucket_name}/redis-backups/{timestamp}/backup.rdb"
200
201
# Configure export
202
gcs_destination = GcsDestination(uri=backup_uri)
203
output_config = OutputConfig(gcs_destination=gcs_destination)
204
205
# Start export
206
operation = client.export_instance(
207
name=instance_name,
208
output_config=output_config
209
)
210
211
print(f"Backup started: {backup_uri}")
212
213
# Don't wait for completion in scheduled context
214
return operation
215
216
# Schedule daily backups
217
instance_name = "projects/my-project/locations/us-central1/instances/prod-redis"
218
bucket_name = "my-redis-backups"
219
220
schedule.every().day.at("02:00").do(
221
create_backup,
222
instance_name=instance_name,
223
bucket_name=bucket_name
224
)
225
226
# Run scheduler (in practice, this would be in a separate service)
227
while True:
228
schedule.run_pending()
229
time.sleep(60)
230
```
231
232
### Migration Between Instances
233
234
```python
235
from google.cloud.redis import CloudRedisClient, InputConfig, OutputConfig, GcsSource, GcsDestination
236
237
def migrate_redis_data(source_instance: str, target_instance: str, temp_bucket: str):
238
"""Migrate data from one Redis instance to another via Cloud Storage."""
239
client = CloudRedisClient()
240
241
# Step 1: Export from source instance
242
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
243
temp_uri = f"gs://{temp_bucket}/migration-{timestamp}.rdb"
244
245
export_config = OutputConfig(
246
gcs_destination=GcsDestination(uri=temp_uri)
247
)
248
249
print(f"Exporting from {source_instance}...")
250
export_op = client.export_instance(
251
name=source_instance,
252
output_config=export_config
253
)
254
255
# Wait for export to complete
256
export_result = export_op.result(timeout=3600)
257
print(f"Export completed")
258
259
# Step 2: Import to target instance
260
import_config = InputConfig(
261
gcs_source=GcsSource(uri=temp_uri)
262
)
263
264
print(f"Importing to {target_instance}...")
265
import_op = client.import_instance(
266
name=target_instance,
267
input_config=import_config
268
)
269
270
# Wait for import to complete
271
import_result = import_op.result(timeout=3600)
272
print(f"Migration completed successfully")
273
274
return import_result
275
276
# Example migration
277
source = "projects/my-project/locations/us-central1/instances/old-redis"
278
target = "projects/my-project/locations/us-west1/instances/new-redis"
279
temp_bucket = "migration-temp-bucket"
280
281
migrate_redis_data(source, target, temp_bucket)
282
```
283
284
### Import with Error Handling
285
286
```python
287
from google.cloud.redis import CloudRedisClient, InputConfig, GcsSource
288
from google.api_core import exceptions
289
import logging
290
291
def safe_import_redis_data(instance_name: str, backup_uri: str):
292
"""Import Redis data with comprehensive error handling."""
293
client = CloudRedisClient()
294
295
# Configure import
296
input_config = InputConfig(
297
gcs_source=GcsSource(uri=backup_uri)
298
)
299
300
try:
301
# Start import operation
302
operation = client.import_instance(
303
name=instance_name,
304
input_config=input_config
305
)
306
307
logging.info(f"Import started: {operation.name}")
308
309
# Wait for completion with progress monitoring
310
while not operation.done():
311
logging.info("Import in progress...")
312
time.sleep(30) # Check every 30 seconds
313
operation.reload()
314
315
# Check final result
316
if operation.error:
317
logging.error(f"Import failed: {operation.error}")
318
return False
319
else:
320
result = operation.result()
321
logging.info(f"Import completed successfully")
322
logging.info(f"Instance state: {result.state}")
323
return True
324
325
except exceptions.NotFound:
326
logging.error(f"Instance not found: {instance_name}")
327
return False
328
except exceptions.PermissionDenied:
329
logging.error(f"Permission denied accessing: {backup_uri}")
330
return False
331
except exceptions.InvalidArgument as e:
332
logging.error(f"Invalid backup file or configuration: {e}")
333
return False
334
except Exception as e:
335
logging.error(f"Unexpected error: {e}")
336
return False
337
338
# Usage
339
instance_name = "projects/my-project/locations/us-central1/instances/my-redis"
340
backup_uri = "gs://my-backups/redis-backup.rdb"
341
342
success = safe_import_redis_data(instance_name, backup_uri)
343
if success:
344
print("Data imported successfully")
345
else:
346
print("Import failed - check logs for details")
347
```
348
349
## Best Practices
350
351
### Service Account Permissions
352
353
Ensure the Redis instance's service account has appropriate Cloud Storage permissions:
354
355
```python
356
# The service account needs these IAM roles:
357
# - Storage Object Viewer (for imports)
358
# - Storage Object Creator (for exports)
359
# - Storage Legacy Bucket Reader (for both)
360
361
# Check instance service account
362
from google.cloud.redis import CloudRedisClient
363
364
client = CloudRedisClient()
365
instance = client.get_instance(name="projects/my-project/locations/us-central1/instances/my-redis")
366
print(f"Service account: {instance.persistence_iam_identity}")
367
```
368
369
### Backup File Validation
370
371
```python
372
def validate_backup_file(bucket_name: str, file_path: str) -> bool:
373
"""Validate that a backup file exists and is readable."""
374
from google.cloud import storage
375
376
try:
377
storage_client = storage.Client()
378
bucket = storage_client.bucket(bucket_name)
379
blob = bucket.blob(file_path)
380
381
# Check if file exists
382
if not blob.exists():
383
print(f"Backup file does not exist: gs://{bucket_name}/{file_path}")
384
return False
385
386
# Check file size (RDB files should not be empty)
387
blob.reload()
388
if blob.size == 0:
389
print(f"Backup file is empty: gs://{bucket_name}/{file_path}")
390
return False
391
392
print(f"Backup file validated: {blob.size} bytes")
393
return True
394
395
except Exception as e:
396
print(f"Error validating backup file: {e}")
397
return False
398
399
# Usage before import
400
if validate_backup_file("my-backups", "redis-backup.rdb"):
401
# Proceed with import
402
pass
403
else:
404
# Handle validation failure
405
pass
406
```