Python library for throwaway instances of anything that can run in a Docker container
Containers for cloud service emulation and integration including LocalStack for AWS services, Azure emulators, and Google Cloud Platform services for local development and testing without cloud dependencies.
LocalStack provides a local AWS cloud service emulator supporting S3, DynamoDB, Lambda, SQS, SNS, and many other AWS services for local development and testing.
class LocalStackContainer:
def __init__(
self,
image: str = "localstack/localstack:2.0.1",
edge_port: int = 4566,
region_name: Optional[str] = None,
**kwargs: Any
):
"""
Initialize LocalStack container.
Args:
image: LocalStack Docker image
edge_port: LocalStack edge port (default 4566)
region_name: AWS region name (default us-east-1)
**kwargs: Additional container options
"""
def with_services(self, *services: str) -> "LocalStackContainer":
"""
Restrict LocalStack to specific AWS services.
Args:
*services: AWS service names (s3, dynamodb, lambda, sqs, etc.)
Returns:
Self for method chaining
"""
def get_url(self) -> str:
"""
Get LocalStack endpoint URL.
Returns:
LocalStack endpoint URL string
"""
def get_client(self, name: str, **kwargs: Any):
"""
Get boto3 client configured for LocalStack.
Args:
name: AWS service name (s3, dynamodb, etc.)
**kwargs: Additional boto3 client arguments
Returns:
Configured boto3 client instance
"""Azure service emulators for local development including Azurite for Azure Storage and Cosmos DB emulator.
class AzuriteContainer:
def __init__(
self,
image: str = "mcr.microsoft.com/azure-storage/azurite:latest",
**kwargs: Any
):
"""
Initialize Azurite Azure Storage emulator container.
Args:
image: Azurite Docker image
**kwargs: Additional container options
"""
def get_connection_string(self) -> str:
"""
Get Azure Storage connection string.
Returns:
Azure Storage connection string
"""
class CosmosDbContainer:
def __init__(
self,
image: str = "mcr.microsoft.com/cosmosdb/linux/azure-cosmos-emulator:latest",
**kwargs: Any
):
"""
Initialize Cosmos DB emulator container.
Args:
image: Cosmos DB emulator Docker image
**kwargs: Additional container options
"""
def get_connection_string(self) -> str:
"""
Get Cosmos DB connection string.
Returns:
Cosmos DB connection string
"""Google Cloud Platform service emulators for local development and testing.
class GoogleCloudContainer:
def __init__(
self,
image: str = "gcr.io/google.com/cloudsdktool/cloud-sdk:latest",
**kwargs: Any
):
"""
Initialize Google Cloud services container.
Args:
image: Google Cloud SDK Docker image
**kwargs: Additional container options
"""
def get_pubsub_emulator_host(self) -> str:
"""
Get Pub/Sub emulator host.
Returns:
Pub/Sub emulator host string
"""
def get_datastore_emulator_host(self) -> str:
"""
Get Datastore emulator host.
Returns:
Datastore emulator host string
"""from testcontainers.localstack import LocalStackContainer
import boto3
# Start LocalStack with specific services
with LocalStackContainer() as localstack:
localstack.with_services("s3", "dynamodb", "sqs", "sns")
# Get AWS clients configured for LocalStack
s3_client = localstack.get_client("s3")
dynamodb_client = localstack.get_client("dynamodb")
sqs_client = localstack.get_client("sqs")
# S3 operations
bucket_name = "test-bucket"
s3_client.create_bucket(Bucket=bucket_name)
# Upload file to S3
s3_client.put_object(
Bucket=bucket_name,
Key="test-file.txt",
Body=b"Hello, LocalStack S3!"
)
# List S3 objects
response = s3_client.list_objects_v2(Bucket=bucket_name)
print(f"S3 objects: {[obj['Key'] for obj in response.get('Contents', [])]}")
# DynamoDB operations
table_name = "test-table"
dynamodb_client.create_table(
TableName=table_name,
KeySchema=[
{"AttributeName": "id", "KeyType": "HASH"}
],
AttributeDefinitions=[
{"AttributeName": "id", "AttributeType": "S"}
],
BillingMode="PAY_PER_REQUEST"
)
# Put item in DynamoDB
dynamodb_client.put_item(
TableName=table_name,
Item={
"id": {"S": "test-id"},
"name": {"S": "Test Item"},
"value": {"N": "42"}
}
)
# Get item from DynamoDB
response = dynamodb_client.get_item(
TableName=table_name,
Key={"id": {"S": "test-id"}}
)
print(f"DynamoDB item: {response['Item']}")
# SQS operations
queue_name = "test-queue"
queue_url = sqs_client.create_queue(QueueName=queue_name)["QueueUrl"]
# Send message to SQS
sqs_client.send_message(
QueueUrl=queue_url,
MessageBody="Hello, LocalStack SQS!"
)
# Receive message from SQS
messages = sqs_client.receive_message(QueueUrl=queue_url)
if "Messages" in messages:
for message in messages["Messages"]:
print(f"SQS message: {message['Body']}")from testcontainers.azurite import AzuriteContainer
from azure.storage.blob import BlobServiceClient
with AzuriteContainer() as azurite:
# Get connection string
connection_string = azurite.get_connection_string()
# Create blob service client
blob_service = BlobServiceClient.from_connection_string(connection_string)
# Create container
container_name = "test-container"
blob_service.create_container(container_name)
# Upload blob
blob_name = "test-blob.txt"
blob_data = b"Hello, Azurite!"
blob_client = blob_service.get_blob_client(
container=container_name,
blob=blob_name
)
blob_client.upload_blob(blob_data, overwrite=True)
# Download blob
download_stream = blob_client.download_blob()
downloaded_data = download_stream.readall()
print(f"Downloaded: {downloaded_data.decode()}")
# List blobs in container
container_client = blob_service.get_container_client(container_name)
blobs = list(container_client.list_blobs())
print(f"Blobs: {[blob.name for blob in blobs]}")from testcontainers.google import GoogleCloudContainer
from google.cloud import pubsub_v1
import os
with GoogleCloudContainer() as gcp:
# Set environment variable for emulator
pubsub_host = gcp.get_pubsub_emulator_host()
os.environ["PUBSUB_EMULATOR_HOST"] = pubsub_host
# Create publisher and subscriber clients
publisher = pubsub_v1.PublisherClient()
subscriber = pubsub_v1.SubscriberClient()
# Create topic
project_id = "test-project"
topic_name = "test-topic"
topic_path = publisher.topic_path(project_id, topic_name)
publisher.create_topic(request={"name": topic_path})
# Create subscription
subscription_name = "test-subscription"
subscription_path = subscriber.subscription_path(project_id, subscription_name)
subscriber.create_subscription(
request={"name": subscription_path, "topic": topic_path}
)
# Publish messages
for i in range(5):
message = f"Message {i}"
future = publisher.publish(topic_path, message.encode())
print(f"Published message ID: {future.result()}")
# Pull messages
def callback(message):
print(f"Received: {message.data.decode()}")
message.ack()
streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback)
# Wait for messages (in real scenario, you'd have a proper event loop)
import time
time.sleep(2)
streaming_pull_future.cancel()from testcontainers.localstack import LocalStackContainer
from testcontainers.azurite import AzuriteContainer
from testcontainers.google import GoogleCloudContainer
from testcontainers.core.network import Network
# Create shared network for cloud services
with Network() as network:
# Start multiple cloud service emulators
with LocalStackContainer() as aws, \
AzuriteContainer() as azure, \
GoogleCloudContainer() as gcp:
# Configure AWS services
aws.with_services("s3", "dynamodb", "lambda")
aws.with_network(network).with_network_aliases("aws")
# Configure Azure services
azure.with_network(network).with_network_aliases("azure")
# Configure GCP services
gcp.with_network(network).with_network_aliases("gcp")
# Get service endpoints
aws_url = aws.get_url()
azure_conn = azure.get_connection_string()
pubsub_host = gcp.get_pubsub_emulator_host()
print(f"AWS LocalStack: {aws_url}")
print(f"Azure Storage: Available")
print(f"GCP Pub/Sub: {pubsub_host}")
# Use services together in multi-cloud application
# AWS for compute and storage
# Azure for blob storage
# GCP for messagingfrom testcontainers.localstack import LocalStackContainer
import boto3
import json
import zipfile
import io
def create_lambda_zip():
"""Create a simple Lambda function ZIP."""
lambda_code = '''
def lambda_handler(event, context):
return {
'statusCode': 200,
'body': json.dumps(f'Hello from Lambda! Event: {event}')
}
'''
zip_buffer = io.BytesIO()
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
zip_file.writestr('lambda_function.py', lambda_code)
return zip_buffer.getvalue()
with LocalStackContainer() as localstack:
localstack.with_services("lambda", "iam")
# Get Lambda client
lambda_client = localstack.get_client("lambda")
iam_client = localstack.get_client("iam")
# Create IAM role for Lambda
role_name = "lambda-role"
trust_policy = {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {"Service": "lambda.amazonaws.com"},
"Action": "sts:AssumeRole"
}
]
}
role_response = iam_client.create_role(
RoleName=role_name,
AssumeRolePolicyDocument=json.dumps(trust_policy)
)
role_arn = role_response["Role"]["Arn"]
# Create Lambda function
function_name = "test-function"
lambda_client.create_function(
FunctionName=function_name,
Runtime="python3.9",
Role=role_arn,
Handler="lambda_function.lambda_handler",
Code={"ZipFile": create_lambda_zip()},
Description="Test Lambda function"
)
# Invoke Lambda function
response = lambda_client.invoke(
FunctionName=function_name,
InvocationType="RequestResponse",
Payload=json.dumps({"test": "data"})
)
result = json.loads(response["Payload"].read())
print(f"Lambda response: {result}")from testcontainers.localstack import LocalStackContainer
from testcontainers.azurite import AzuriteContainer
import boto3
from azure.storage.blob import BlobServiceClient
import json
class CloudStorageTest:
def __init__(self):
self.test_data = {"message": "Hello, Cloud Storage!", "timestamp": "2023-01-01T12:00:00Z"}
def test_aws_s3(self, localstack):
"""Test AWS S3 storage."""
s3_client = localstack.get_client("s3")
# Create bucket and upload data
bucket = "test-bucket"
s3_client.create_bucket(Bucket=bucket)
s3_client.put_object(
Bucket=bucket,
Key="test-data.json",
Body=json.dumps(self.test_data),
ContentType="application/json"
)
# Download and verify
response = s3_client.get_object(Bucket=bucket, Key="test-data.json")
downloaded_data = json.loads(response["Body"].read())
assert downloaded_data == self.test_data
print("AWS S3 test passed")
def test_azure_blob(self, azurite):
"""Test Azure Blob storage."""
blob_service = BlobServiceClient.from_connection_string(
azurite.get_connection_string()
)
# Create container and upload data
container = "test-container"
blob_service.create_container(container)
blob_client = blob_service.get_blob_client(container, "test-data.json")
blob_client.upload_blob(
json.dumps(self.test_data),
blob_type="BlockBlob",
overwrite=True
)
# Download and verify
downloaded_data = json.loads(blob_client.download_blob().readall())
assert downloaded_data == self.test_data
print("Azure Blob test passed")
# Run tests with both cloud providers
with LocalStackContainer() as aws, AzuriteContainer() as azure:
aws.with_services("s3")
test_suite = CloudStorageTest()
test_suite.test_aws_s3(aws)
test_suite.test_azure_blob(azure)
print("All cloud storage tests passed!")Install with Tessl CLI
npx tessl i tessl/pypi-testcontainers