0
# Infrastructure Services
1
2
Modal provides essential infrastructure services for networking, security, and resource management. These services enable secure communication, secret management, network connectivity, and fine-grained control over where functions execute.
3
4
## Capabilities
5
6
### Secret - Secure Environment Variable Management
7
8
Secure storage and injection of environment variables, API keys, database credentials, and other sensitive data into Modal functions.
9
10
```python { .api }
11
class Secret:
12
@classmethod
13
def from_name(cls, label: str, *, environment_name: Optional[str] = None) -> "Secret":
14
"""Load a Secret by its unique name"""
15
16
@classmethod
17
def from_dict(
18
cls,
19
env_dict: dict[str, Optional[str]],
20
*,
21
name: Optional[str] = None
22
) -> "Secret":
23
"""Create a Secret from a dictionary of environment variables"""
24
25
@classmethod
26
def from_dotenv(cls, path: Optional[str] = None) -> "Secret":
27
"""Create a Secret from a .env file"""
28
29
class SecretInfo:
30
"""Information about a Secret object"""
31
name: Optional[str]
32
created_at: datetime
33
created_by: Optional[str]
34
```
35
36
#### Usage Examples
37
38
```python
39
import modal
40
41
app = modal.App()
42
43
# Create secret from dictionary
44
db_secret = modal.Secret.from_dict({
45
"DATABASE_URL": "postgresql://user:pass@host:5432/db",
46
"DATABASE_PASSWORD": "secure_password",
47
"API_KEY": "your_api_key_here"
48
})
49
50
# Load existing secret by name
51
api_secret = modal.Secret.from_name("openai-api-key")
52
53
@app.function(secrets=[db_secret, api_secret])
54
def database_operation():
55
import os
56
57
# Access secret values as environment variables
58
db_url = os.environ["DATABASE_URL"]
59
api_key = os.environ["API_KEY"]
60
61
# Use secrets in your code
62
connection = create_database_connection(db_url)
63
client = APIClient(api_key)
64
65
return process_data(connection, client)
66
67
# Create secret from .env file
68
@app.local_entrypoint()
69
def main():
70
# Load secrets from local .env file
71
local_secrets = modal.Secret.from_dotenv(".env")
72
73
# Deploy function with secrets
74
result = database_operation.remote()
75
print(result)
76
```
77
78
### Proxy - Static Outbound IP Address
79
80
Proxy objects provide Modal containers with static outbound IP addresses for connecting to services that require IP whitelisting.
81
82
```python { .api }
83
class Proxy:
84
@classmethod
85
def from_name(cls, name: str, *, environment_name: Optional[str] = None) -> "Proxy":
86
"""Reference a Proxy by its name (must be provisioned via Dashboard)"""
87
```
88
89
#### Usage Examples
90
91
```python
92
import modal
93
94
app = modal.App()
95
96
# Reference proxy created in Modal Dashboard
97
proxy = modal.Proxy.from_name("production-proxy")
98
99
@app.function(proxy=proxy)
100
def connect_to_whitelisted_service():
101
import requests
102
103
# This request will come from the proxy's static IP
104
response = requests.get("https://api.partner.com/data")
105
106
# The partner service sees requests from the proxy's static IP
107
# which can be added to their IP whitelist
108
return response.json()
109
110
# Multiple functions can share the same proxy
111
@app.function(proxy=proxy)
112
def another_whitelisted_operation():
113
# Also uses the same static IP
114
return make_api_call()
115
```
116
117
### Tunnel - Network Tunnel for Remote Services
118
119
Network tunnels enable secure connections between Modal functions and remote services, useful for development and accessing services behind firewalls.
120
121
```python { .api }
122
class Tunnel:
123
@classmethod
124
def create(
125
cls,
126
*,
127
host: str = "0.0.0.0",
128
port: int = 8000,
129
timeout: Optional[int] = None
130
) -> "Tunnel":
131
"""Create a network tunnel"""
132
133
def forward(port: int, *, host: str = "localhost") -> str:
134
"""Forward network traffic through tunnel (utility function)"""
135
```
136
137
#### Usage Examples
138
139
```python
140
import modal
141
142
app = modal.App()
143
144
@app.function()
145
def create_development_tunnel():
146
# Create tunnel for development access
147
tunnel = modal.Tunnel.create(port=8000)
148
149
# Start a simple web server
150
from http.server import HTTPServer, SimpleHTTPRequestHandler
151
152
server = HTTPServer(("0.0.0.0", 8000), SimpleHTTPRequestHandler)
153
print(f"Server accessible via tunnel: {tunnel.url}")
154
155
# Server runs and is accessible through tunnel
156
server.serve_forever()
157
158
@app.function()
159
def connect_through_tunnel():
160
# Forward local service through tunnel
161
tunnel_url = modal.forward(5432, host="database.internal")
162
163
# Connect to database through tunnel
164
connection = create_connection(tunnel_url)
165
return query_database(connection)
166
```
167
168
### SchedulerPlacement - Control Function Placement
169
170
Control where functions are scheduled to run, enabling optimization for latency, compliance, or resource availability.
171
172
```python { .api }
173
class SchedulerPlacement:
174
@classmethod
175
def zone(cls, zone: str) -> "SchedulerPlacement":
176
"""Set preferred zone/region for function execution"""
177
```
178
179
#### Usage Examples
180
181
```python
182
import modal
183
184
app = modal.App()
185
186
# Place functions in specific zones for latency optimization
187
us_east_placement = modal.SchedulerPlacement.zone("us-east-1")
188
eu_placement = modal.SchedulerPlacement.zone("eu-west-1")
189
190
@app.function(scheduler_placement=us_east_placement)
191
def process_us_data():
192
# This function will preferentially run in us-east-1
193
# for low latency to US-based services
194
return fetch_and_process_us_data()
195
196
@app.function(scheduler_placement=eu_placement)
197
def process_eu_data():
198
# This function will preferentially run in eu-west-1
199
# for GDPR compliance and low latency to EU services
200
return fetch_and_process_eu_data()
201
202
@app.function()
203
def coordinator():
204
# Start processing in appropriate regions
205
us_result = process_us_data.spawn()
206
eu_result = process_eu_data.spawn()
207
208
# Collect results
209
return {
210
"us": us_result.get(),
211
"eu": eu_result.get()
212
}
213
```
214
215
## Infrastructure Patterns
216
217
### Multi-Environment Secret Management
218
219
```python
220
import modal
221
222
app = modal.App()
223
224
# Different secrets for different environments
225
dev_secrets = modal.Secret.from_dict({
226
"DATABASE_URL": "postgresql://localhost:5432/dev_db",
227
"API_ENDPOINT": "https://api-dev.example.com",
228
"DEBUG": "true"
229
})
230
231
prod_secrets = modal.Secret.from_name("production-secrets")
232
233
# Environment-specific function deployment
234
@app.function(secrets=[dev_secrets])
235
def dev_function():
236
return process_with_dev_config()
237
238
@app.function(secrets=[prod_secrets])
239
def prod_function():
240
return process_with_prod_config()
241
```
242
243
### Secure Multi-Service Integration
244
245
```python
246
import modal
247
248
app = modal.App()
249
250
# Multiple secrets for different services
251
database_secret = modal.Secret.from_name("postgres-credentials")
252
api_secret = modal.Secret.from_name("external-api-keys")
253
cloud_secret = modal.Secret.from_name("aws-credentials")
254
255
# Static IP for whitelisted services
256
proxy = modal.Proxy.from_name("main-proxy")
257
258
@app.function(
259
secrets=[database_secret, api_secret, cloud_secret],
260
proxy=proxy
261
)
262
def integrated_data_pipeline():
263
import os
264
import boto3
265
import psycopg2
266
import requests
267
268
# Database connection using secret
269
db_conn = psycopg2.connect(os.environ["DATABASE_URL"])
270
271
# AWS client using secret
272
s3_client = boto3.client(
273
's3',
274
aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
275
aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"]
276
)
277
278
# API call using secret and proxy (for static IP)
279
headers = {"Authorization": f"Bearer {os.environ['API_TOKEN']}"}
280
response = requests.get("https://api.partner.com/data", headers=headers)
281
282
# Process data across all services
283
data = response.json()
284
processed = transform_data(data)
285
286
# Store in database
287
store_in_database(db_conn, processed)
288
289
# Backup to S3
290
backup_to_s3(s3_client, processed)
291
292
return {"status": "success", "records": len(processed)}
293
```
294
295
### Geographically Distributed Processing
296
297
```python
298
import modal
299
300
app = modal.App()
301
302
# Regional placement for compliance and latency
303
us_placement = modal.SchedulerPlacement.zone("us-east-1")
304
eu_placement = modal.SchedulerPlacement.zone("eu-west-1")
305
asia_placement = modal.SchedulerPlacement.zone("ap-southeast-1")
306
307
# Regional secrets for compliance
308
us_secrets = modal.Secret.from_name("us-compliance-keys")
309
eu_secrets = modal.Secret.from_name("eu-gdpr-keys")
310
asia_secrets = modal.Secret.from_name("asia-keys")
311
312
@app.function(
313
scheduler_placement=us_placement,
314
secrets=[us_secrets]
315
)
316
def process_us_customer_data(customer_ids: list[str]):
317
# Process US customer data in US region for compliance
318
return [process_customer(id) for id in customer_ids]
319
320
@app.function(
321
scheduler_placement=eu_placement,
322
secrets=[eu_secrets]
323
)
324
def process_eu_customer_data(customer_ids: list[str]):
325
# Process EU customer data in EU region for GDPR compliance
326
return [process_customer_gdpr(id) for id in customer_ids]
327
328
@app.function()
329
def global_customer_processing():
330
# Route customers to appropriate regional processors
331
us_customers = get_us_customers()
332
eu_customers = get_eu_customers()
333
334
# Process in appropriate regions simultaneously
335
us_task = process_us_customer_data.spawn(us_customers)
336
eu_task = process_eu_customer_data.spawn(eu_customers)
337
338
# Collect results
339
us_results = us_task.get()
340
eu_results = eu_task.get()
341
342
return combine_regional_results(us_results, eu_results)
343
```
344
345
### Development to Production Pipeline
346
347
```python
348
import modal
349
350
# Development setup
351
dev_app = modal.App("data-pipeline-dev")
352
dev_secrets = modal.Secret.from_dict({
353
"DATABASE_URL": "postgresql://localhost:5432/dev",
354
"API_KEY": "dev_key_123",
355
"S3_BUCKET": "dev-data-bucket"
356
})
357
358
# Production setup
359
prod_app = modal.App("data-pipeline-prod")
360
prod_secrets = modal.Secret.from_name("production-secrets")
361
prod_proxy = modal.Proxy.from_name("production-proxy")
362
363
# Shared function logic with environment-specific configuration
364
def create_pipeline_function(app, secrets, proxy=None):
365
@app.function(secrets=secrets, proxy=proxy)
366
def data_pipeline():
367
import os
368
369
# Environment variables automatically injected based on secrets
370
db_url = os.environ["DATABASE_URL"]
371
api_key = os.environ["API_KEY"]
372
bucket = os.environ["S3_BUCKET"]
373
374
# Same processing logic, different configurations
375
return process_data(db_url, api_key, bucket)
376
377
return data_pipeline
378
379
# Create environment-specific functions
380
dev_pipeline = create_pipeline_function(dev_app, [dev_secrets])
381
prod_pipeline = create_pipeline_function(prod_app, [prod_secrets], prod_proxy)
382
383
# Deploy based on environment
384
if os.getenv("ENVIRONMENT") == "production":
385
prod_pipeline.remote()
386
else:
387
dev_pipeline.remote()
388
```