0
# Cloud Services Integration
1
2
Containers for cloud service emulation and integration including LocalStack for AWS services, Azure emulators, and Google Cloud Platform services for local development and testing without cloud dependencies.
3
4
## Capabilities
5
6
### LocalStack Container
7
8
LocalStack provides a local AWS cloud service emulator supporting S3, DynamoDB, Lambda, SQS, SNS, and many other AWS services for local development and testing.
9
10
```python { .api }
11
class LocalStackContainer:
12
def __init__(
13
self,
14
image: str = "localstack/localstack:2.0.1",
15
edge_port: int = 4566,
16
region_name: Optional[str] = None,
17
**kwargs: Any
18
):
19
"""
20
Initialize LocalStack container.
21
22
Args:
23
image: LocalStack Docker image
24
edge_port: LocalStack edge port (default 4566)
25
region_name: AWS region name (default us-east-1)
26
**kwargs: Additional container options
27
"""
28
29
def with_services(self, *services: str) -> "LocalStackContainer":
30
"""
31
Restrict LocalStack to specific AWS services.
32
33
Args:
34
*services: AWS service names (s3, dynamodb, lambda, sqs, etc.)
35
36
Returns:
37
Self for method chaining
38
"""
39
40
def get_url(self) -> str:
41
"""
42
Get LocalStack endpoint URL.
43
44
Returns:
45
LocalStack endpoint URL string
46
"""
47
48
def get_client(self, name: str, **kwargs: Any):
49
"""
50
Get boto3 client configured for LocalStack.
51
52
Args:
53
name: AWS service name (s3, dynamodb, etc.)
54
**kwargs: Additional boto3 client arguments
55
56
Returns:
57
Configured boto3 client instance
58
"""
59
```
60
61
### Azure Services Container
62
63
Azure service emulators for local development including Azurite for Azure Storage and Cosmos DB emulator.
64
65
```python { .api }
66
class AzuriteContainer:
67
def __init__(
68
self,
69
image: str = "mcr.microsoft.com/azure-storage/azurite:latest",
70
**kwargs: Any
71
):
72
"""
73
Initialize Azurite Azure Storage emulator container.
74
75
Args:
76
image: Azurite Docker image
77
**kwargs: Additional container options
78
"""
79
80
def get_connection_string(self) -> str:
81
"""
82
Get Azure Storage connection string.
83
84
Returns:
85
Azure Storage connection string
86
"""
87
88
class CosmosDbContainer:
89
def __init__(
90
self,
91
image: str = "mcr.microsoft.com/cosmosdb/linux/azure-cosmos-emulator:latest",
92
**kwargs: Any
93
):
94
"""
95
Initialize Cosmos DB emulator container.
96
97
Args:
98
image: Cosmos DB emulator Docker image
99
**kwargs: Additional container options
100
"""
101
102
def get_connection_string(self) -> str:
103
"""
104
Get Cosmos DB connection string.
105
106
Returns:
107
Cosmos DB connection string
108
"""
109
```
110
111
### Google Cloud Services Container
112
113
Google Cloud Platform service emulators for local development and testing.
114
115
```python { .api }
116
class GoogleCloudContainer:
117
def __init__(
118
self,
119
image: str = "gcr.io/google.com/cloudsdktool/cloud-sdk:latest",
120
**kwargs: Any
121
):
122
"""
123
Initialize Google Cloud services container.
124
125
Args:
126
image: Google Cloud SDK Docker image
127
**kwargs: Additional container options
128
"""
129
130
def get_pubsub_emulator_host(self) -> str:
131
"""
132
Get Pub/Sub emulator host.
133
134
Returns:
135
Pub/Sub emulator host string
136
"""
137
138
def get_datastore_emulator_host(self) -> str:
139
"""
140
Get Datastore emulator host.
141
142
Returns:
143
Datastore emulator host string
144
"""
145
```
146
147
## Usage Examples
148
149
### LocalStack AWS Services
150
151
```python
152
from testcontainers.localstack import LocalStackContainer
153
import boto3
154
155
# Start LocalStack with specific services
156
with LocalStackContainer() as localstack:
157
localstack.with_services("s3", "dynamodb", "sqs", "sns")
158
159
# Get AWS clients configured for LocalStack
160
s3_client = localstack.get_client("s3")
161
dynamodb_client = localstack.get_client("dynamodb")
162
sqs_client = localstack.get_client("sqs")
163
164
# S3 operations
165
bucket_name = "test-bucket"
166
s3_client.create_bucket(Bucket=bucket_name)
167
168
# Upload file to S3
169
s3_client.put_object(
170
Bucket=bucket_name,
171
Key="test-file.txt",
172
Body=b"Hello, LocalStack S3!"
173
)
174
175
# List S3 objects
176
response = s3_client.list_objects_v2(Bucket=bucket_name)
177
print(f"S3 objects: {[obj['Key'] for obj in response.get('Contents', [])]}")
178
179
# DynamoDB operations
180
table_name = "test-table"
181
dynamodb_client.create_table(
182
TableName=table_name,
183
KeySchema=[
184
{"AttributeName": "id", "KeyType": "HASH"}
185
],
186
AttributeDefinitions=[
187
{"AttributeName": "id", "AttributeType": "S"}
188
],
189
BillingMode="PAY_PER_REQUEST"
190
)
191
192
# Put item in DynamoDB
193
dynamodb_client.put_item(
194
TableName=table_name,
195
Item={
196
"id": {"S": "test-id"},
197
"name": {"S": "Test Item"},
198
"value": {"N": "42"}
199
}
200
)
201
202
# Get item from DynamoDB
203
response = dynamodb_client.get_item(
204
TableName=table_name,
205
Key={"id": {"S": "test-id"}}
206
)
207
print(f"DynamoDB item: {response['Item']}")
208
209
# SQS operations
210
queue_name = "test-queue"
211
queue_url = sqs_client.create_queue(QueueName=queue_name)["QueueUrl"]
212
213
# Send message to SQS
214
sqs_client.send_message(
215
QueueUrl=queue_url,
216
MessageBody="Hello, LocalStack SQS!"
217
)
218
219
# Receive message from SQS
220
messages = sqs_client.receive_message(QueueUrl=queue_url)
221
if "Messages" in messages:
222
for message in messages["Messages"]:
223
print(f"SQS message: {message['Body']}")
224
```
225
226
### Azure Storage with Azurite
227
228
```python
229
from testcontainers.azurite import AzuriteContainer
230
from azure.storage.blob import BlobServiceClient
231
232
with AzuriteContainer() as azurite:
233
# Get connection string
234
connection_string = azurite.get_connection_string()
235
236
# Create blob service client
237
blob_service = BlobServiceClient.from_connection_string(connection_string)
238
239
# Create container
240
container_name = "test-container"
241
blob_service.create_container(container_name)
242
243
# Upload blob
244
blob_name = "test-blob.txt"
245
blob_data = b"Hello, Azurite!"
246
blob_client = blob_service.get_blob_client(
247
container=container_name,
248
blob=blob_name
249
)
250
blob_client.upload_blob(blob_data, overwrite=True)
251
252
# Download blob
253
download_stream = blob_client.download_blob()
254
downloaded_data = download_stream.readall()
255
print(f"Downloaded: {downloaded_data.decode()}")
256
257
# List blobs in container
258
container_client = blob_service.get_container_client(container_name)
259
blobs = list(container_client.list_blobs())
260
print(f"Blobs: {[blob.name for blob in blobs]}")
261
```
262
263
### Google Cloud Pub/Sub Emulator
264
265
```python
266
from testcontainers.google import GoogleCloudContainer
267
from google.cloud import pubsub_v1
268
import os
269
270
with GoogleCloudContainer() as gcp:
271
# Set environment variable for emulator
272
pubsub_host = gcp.get_pubsub_emulator_host()
273
os.environ["PUBSUB_EMULATOR_HOST"] = pubsub_host
274
275
# Create publisher and subscriber clients
276
publisher = pubsub_v1.PublisherClient()
277
subscriber = pubsub_v1.SubscriberClient()
278
279
# Create topic
280
project_id = "test-project"
281
topic_name = "test-topic"
282
topic_path = publisher.topic_path(project_id, topic_name)
283
publisher.create_topic(request={"name": topic_path})
284
285
# Create subscription
286
subscription_name = "test-subscription"
287
subscription_path = subscriber.subscription_path(project_id, subscription_name)
288
subscriber.create_subscription(
289
request={"name": subscription_path, "topic": topic_path}
290
)
291
292
# Publish messages
293
for i in range(5):
294
message = f"Message {i}"
295
future = publisher.publish(topic_path, message.encode())
296
print(f"Published message ID: {future.result()}")
297
298
# Pull messages
299
def callback(message):
300
print(f"Received: {message.data.decode()}")
301
message.ack()
302
303
streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback)
304
305
# Wait for messages (in real scenario, you'd have a proper event loop)
306
import time
307
time.sleep(2)
308
streaming_pull_future.cancel()
309
```
310
311
### Multi-Cloud Development Environment
312
313
```python
314
from testcontainers.localstack import LocalStackContainer
315
from testcontainers.azurite import AzuriteContainer
316
from testcontainers.google import GoogleCloudContainer
317
from testcontainers.core.network import Network
318
319
# Create shared network for cloud services
320
with Network() as network:
321
# Start multiple cloud service emulators
322
with LocalStackContainer() as aws, \
323
AzuriteContainer() as azure, \
324
GoogleCloudContainer() as gcp:
325
326
# Configure AWS services
327
aws.with_services("s3", "dynamodb", "lambda")
328
aws.with_network(network).with_network_aliases("aws")
329
330
# Configure Azure services
331
azure.with_network(network).with_network_aliases("azure")
332
333
# Configure GCP services
334
gcp.with_network(network).with_network_aliases("gcp")
335
336
# Get service endpoints
337
aws_url = aws.get_url()
338
azure_conn = azure.get_connection_string()
339
pubsub_host = gcp.get_pubsub_emulator_host()
340
341
print(f"AWS LocalStack: {aws_url}")
342
print(f"Azure Storage: Available")
343
print(f"GCP Pub/Sub: {pubsub_host}")
344
345
# Use services together in multi-cloud application
346
# AWS for compute and storage
347
# Azure for blob storage
348
# GCP for messaging
349
```
350
351
### Lambda Function Testing with LocalStack
352
353
```python
354
from testcontainers.localstack import LocalStackContainer
355
import boto3
356
import json
357
import zipfile
358
import io
359
360
def create_lambda_zip():
361
"""Create a simple Lambda function ZIP."""
362
lambda_code = '''
363
def lambda_handler(event, context):
364
return {
365
'statusCode': 200,
366
'body': json.dumps(f'Hello from Lambda! Event: {event}')
367
}
368
'''
369
370
zip_buffer = io.BytesIO()
371
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
372
zip_file.writestr('lambda_function.py', lambda_code)
373
374
return zip_buffer.getvalue()
375
376
with LocalStackContainer() as localstack:
377
localstack.with_services("lambda", "iam")
378
379
# Get Lambda client
380
lambda_client = localstack.get_client("lambda")
381
iam_client = localstack.get_client("iam")
382
383
# Create IAM role for Lambda
384
role_name = "lambda-role"
385
trust_policy = {
386
"Version": "2012-10-17",
387
"Statement": [
388
{
389
"Effect": "Allow",
390
"Principal": {"Service": "lambda.amazonaws.com"},
391
"Action": "sts:AssumeRole"
392
}
393
]
394
}
395
396
role_response = iam_client.create_role(
397
RoleName=role_name,
398
AssumeRolePolicyDocument=json.dumps(trust_policy)
399
)
400
role_arn = role_response["Role"]["Arn"]
401
402
# Create Lambda function
403
function_name = "test-function"
404
lambda_client.create_function(
405
FunctionName=function_name,
406
Runtime="python3.9",
407
Role=role_arn,
408
Handler="lambda_function.lambda_handler",
409
Code={"ZipFile": create_lambda_zip()},
410
Description="Test Lambda function"
411
)
412
413
# Invoke Lambda function
414
response = lambda_client.invoke(
415
FunctionName=function_name,
416
InvocationType="RequestResponse",
417
Payload=json.dumps({"test": "data"})
418
)
419
420
result = json.loads(response["Payload"].read())
421
print(f"Lambda response: {result}")
422
```
423
424
### Cloud Storage Integration Testing
425
426
```python
427
from testcontainers.localstack import LocalStackContainer
428
from testcontainers.azurite import AzuriteContainer
429
import boto3
430
from azure.storage.blob import BlobServiceClient
431
import json
432
433
class CloudStorageTest:
434
def __init__(self):
435
self.test_data = {"message": "Hello, Cloud Storage!", "timestamp": "2023-01-01T12:00:00Z"}
436
437
def test_aws_s3(self, localstack):
438
"""Test AWS S3 storage."""
439
s3_client = localstack.get_client("s3")
440
441
# Create bucket and upload data
442
bucket = "test-bucket"
443
s3_client.create_bucket(Bucket=bucket)
444
s3_client.put_object(
445
Bucket=bucket,
446
Key="test-data.json",
447
Body=json.dumps(self.test_data),
448
ContentType="application/json"
449
)
450
451
# Download and verify
452
response = s3_client.get_object(Bucket=bucket, Key="test-data.json")
453
downloaded_data = json.loads(response["Body"].read())
454
assert downloaded_data == self.test_data
455
print("AWS S3 test passed")
456
457
def test_azure_blob(self, azurite):
458
"""Test Azure Blob storage."""
459
blob_service = BlobServiceClient.from_connection_string(
460
azurite.get_connection_string()
461
)
462
463
# Create container and upload data
464
container = "test-container"
465
blob_service.create_container(container)
466
blob_client = blob_service.get_blob_client(container, "test-data.json")
467
blob_client.upload_blob(
468
json.dumps(self.test_data),
469
blob_type="BlockBlob",
470
overwrite=True
471
)
472
473
# Download and verify
474
downloaded_data = json.loads(blob_client.download_blob().readall())
475
assert downloaded_data == self.test_data
476
print("Azure Blob test passed")
477
478
# Run tests with both cloud providers
479
with LocalStackContainer() as aws, AzuriteContainer() as azure:
480
aws.with_services("s3")
481
482
test_suite = CloudStorageTest()
483
test_suite.test_aws_s3(aws)
484
test_suite.test_azure_blob(azure)
485
486
print("All cloud storage tests passed!")
487
```