Snowflake SQLAlchemy Dialect providing comprehensive database connectivity and ORM support for Snowflake cloud data warehouse
—
Native integration with AWS S3 and Azure Blob Storage including credential management, encryption support, and stage management for external data access.
Comprehensive AWS S3 bucket support with credential management and encryption options.
from snowflake.sqlalchemy import AWSBucket
class AWSBucket:
"""AWS S3 bucket descriptor with credentials and encryption."""
def __init__(self, bucket_name: str, path: str = ""):
"""
Create AWS bucket descriptor.
Args:
bucket_name: S3 bucket name
path: Optional path within bucket
"""
@classmethod
def from_uri(cls, uri: str):
"""
Create bucket from S3 URI.
Args:
uri: S3 URI (s3://bucket/path)
Returns:
AWSBucket: Bucket instance
"""
def credentials(self, aws_role=None, aws_key_id=None,
aws_secret_key=None, aws_token=None):
"""
Set AWS credentials.
Args:
aws_role: IAM role ARN
aws_key_id: AWS access key ID
aws_secret_key: AWS secret access key
aws_token: AWS session token
Returns:
AWSBucket: Self for method chaining
"""
def encryption_aws_cse(self, master_key: str):
"""
Set AWS client-side encryption.
Args:
master_key: Encryption master key
Returns:
AWSBucket: Self for method chaining
"""
def encryption_aws_sse_s3(self):
"""
Set AWS server-side encryption with S3 keys.
Returns:
AWSBucket: Self for method chaining
"""
def encryption_aws_sse_kms(self, kms_key_id: str = None):
"""
Set AWS server-side encryption with KMS.
Args:
kms_key_id: Optional KMS key ID
Returns:
AWSBucket: Self for method chaining
"""Azure container support with SAS token authentication and encryption.
from snowflake.sqlalchemy import AzureContainer
class AzureContainer:
"""Azure container descriptor with SAS token and encryption."""
def __init__(self, account_name: str, container_name: str, path: str = ""):
"""
Create Azure container descriptor.
Args:
account_name: Azure storage account name
container_name: Container name
path: Optional path within container
"""
@classmethod
def from_uri(cls, uri: str):
"""
Create container from Azure URI.
Args:
uri: Azure URI (azure://account.blob.core.windows.net/container/path)
Returns:
AzureContainer: Container instance
"""
def credentials(self, azure_sas_token: str):
"""
Set Azure SAS token.
Args:
azure_sas_token: Shared Access Signature token
Returns:
AzureContainer: Self for method chaining
"""
def encryption_azure_cse(self, master_key: str):
"""
Set Azure client-side encryption.
Args:
master_key: Encryption master key
Returns:
AzureContainer: Self for method chaining
"""External stage descriptors for accessing cloud storage locations.
from snowflake.sqlalchemy import ExternalStage
class ExternalStage:
"""External stage descriptor for cloud storage access."""
def __init__(self, stage_name: str, path: str = ""):
"""
Create external stage descriptor.
Args:
stage_name: Stage name
path: Optional path within stage
"""
@classmethod
def from_parent_stage(cls, parent_stage, path: str, file_format=None):
"""
Create stage from parent stage.
Args:
parent_stage: Parent stage
path: Path within parent stage
file_format: Optional file format
Returns:
ExternalStage: Stage instance
"""from snowflake.sqlalchemy import AWSBucket, CopyIntoStorage
# Create S3 bucket with IAM role
s3_bucket = (AWSBucket
.from_uri('s3://my-data-bucket/sales/')
.credentials(aws_role='arn:aws:iam::123456789012:role/SnowflakeRole')
)
# Copy from S3 with server-side encryption
s3_encrypted = (AWSBucket('secure-bucket', 'sensitive-data/')
.credentials(aws_key_id='AKIAIOSFODNN7EXAMPLE',
aws_secret_key='wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY')
.encryption_aws_sse_kms('arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012')
)
# Use in COPY statement
copy_from_s3 = CopyIntoStorage(
table=sales_table,
stage_location=s3_bucket,
file_format=csv_format
)# S3 with client-side encryption
s3_cse = (AWSBucket('encrypted-bucket')
.credentials(aws_key_id='AKIAIOSFODNN7EXAMPLE',
aws_secret_key='wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY')
.encryption_aws_cse('my-master-encryption-key')
)
copy_encrypted = CopyIntoStorage(
table=sensitive_table,
stage_location=s3_cse
).force(True)from snowflake.sqlalchemy import AzureContainer
# Create Azure container with SAS token
azure_container = (AzureContainer
.from_uri('azure://myaccount.blob.core.windows.net/data/exports/')
.credentials(azure_sas_token='sp=r&st=2023-01-01T00:00:00Z&se=2024-01-01T00:00:00Z&spr=https&sv=2022-11-02&sr=c&sig=signature')
)
# Copy from Azure with encryption
azure_encrypted = (AzureContainer('myaccount', 'secure-data')
.credentials(azure_sas_token='sas-token-here')
.encryption_azure_cse('azure-master-key')
)
copy_from_azure = CopyIntoStorage(
table=customer_table,
stage_location=azure_container,
file_format=json_format
)from snowflake.sqlalchemy import ExternalStage
# Use existing external stage
external_stage = ExternalStage('MY_EXTERNAL_STAGE', 'monthly_data/')
# Create from parent stage
child_stage = ExternalStage.from_parent_stage(
parent_stage='PARENT_STAGE',
path='2024/01/',
file_format='CSV_FORMAT'
)
# Copy from external stage
copy_from_stage = CopyIntoStorage(
table=monthly_reports,
stage_location=external_stage
).pattern('.*report.*\.csv')from snowflake.sqlalchemy import CreateStage
# Create AWS stage
create_aws_stage = CreateStage(
name='AWS_DATA_STAGE',
url='s3://my-bucket/data/',
credentials={
'aws_key_id': 'AKIAIOSFODNN7EXAMPLE',
'aws_secret_key': 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'
},
encryption={
'type': 'AWS_SSE_KMS',
'kms_key_id': 'arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012'
}
)
# Create Azure stage
create_azure_stage = CreateStage(
name='AZURE_DATA_STAGE',
url='azure://myaccount.blob.core.windows.net/data/',
credentials={
'azure_sas_token': 'sp=r&st=2023-01-01T00:00:00Z&se=2024-01-01T00:00:00Z&spr=https&sv=2022-11-02&sr=c&sig=signature'
}
)
engine.execute(create_aws_stage)
engine.execute(create_azure_stage)from snowflake.sqlalchemy import (
AWSBucket, CSVFormatter, CopyIntoStorage, CreateFileFormat
)
# 1. Create file format
csv_format = CreateFileFormat(
name='CLOUD_CSV_FORMAT',
format_type='CSV',
field_delimiter=',',
skip_header=1,
compression='GZIP'
)
# 2. Set up S3 bucket with encryption
s3_location = (AWSBucket('data-lake-bucket', 'processed-data/')
.credentials(aws_role='arn:aws:iam::123456789012:role/SnowflakeDataRole')
.encryption_aws_sse_s3()
)
# 3. Copy data from S3
copy_operation = (CopyIntoStorage(
table=target_table,
stage_location=s3_location,
file_format='CLOUD_CSV_FORMAT'
)
.files(['data_2024_01.csv.gz', 'data_2024_02.csv.gz'])
.force(False)
)
# Execute operations
engine.execute(csv_format)
engine.execute(copy_operation)Install with Tessl CLI
npx tessl i tessl/pypi-snowflake-sqlalchemy