0
# Application Framework (eapp)
1
2
Absl flags and application utilities for building command-line applications with dataclass-based flag parsing, enhanced logging, and streamlined application development workflows.
3
4
## Capabilities
5
6
### Dataclass-Based Flags
7
8
Create command-line flag parsers from dataclasses for type-safe argument handling.
9
10
```python { .api }
11
def make_flags_parser(
12
dataclass_cls: type,
13
prefix: str = "",
14
exclude_fields: set[str] | None = None
15
) -> Callable[[list[str]], Any]:
16
"""
17
Create a flags parser from a dataclass definition.
18
19
Args:
20
dataclass_cls: Dataclass to create flags from
21
prefix: Prefix for flag names (e.g., "model_" for --model_param)
22
exclude_fields: Set of field names to exclude from flags
23
24
Returns:
25
Parser function that takes command-line arguments and returns
26
populated dataclass instance
27
28
Example:
29
@dataclass
30
class Config:
31
learning_rate: float = 0.01
32
batch_size: int = 32
33
34
parser = make_flags_parser(Config)
35
config = parser(sys.argv[1:]) # Parse from command line
36
"""
37
```
38
39
### Enhanced Logging
40
41
Improved logging configuration for better development and production logging.
42
43
```python { .api }
44
def better_logging(
45
level: str = 'INFO',
46
format_string: str | None = None,
47
include_timestamp: bool = True,
48
include_level: bool = True,
49
include_module: bool = False,
50
colored_output: bool = True
51
) -> None:
52
"""
53
Configure enhanced logging with better defaults.
54
55
Args:
56
level: Logging level ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL')
57
format_string: Custom format string for log messages
58
include_timestamp: Include timestamp in log messages
59
include_level: Include log level in messages
60
include_module: Include module name in messages
61
colored_output: Use colored output for different log levels
62
63
Example:
64
better_logging('DEBUG', colored_output=True)
65
logging.info("This will be nicely formatted")
66
"""
67
```
68
69
## Usage Examples
70
71
### Basic Dataclass Flags
72
73
```python
74
from etils import eapp
75
from dataclasses import dataclass
76
from typing import Optional
77
import sys
78
79
@dataclass
80
class TrainingConfig:
81
"""Configuration for model training."""
82
learning_rate: float = 0.001
83
batch_size: int = 32
84
num_epochs: int = 100
85
model_name: str = "resnet50"
86
data_path: str = "/data/train"
87
output_dir: str = "./outputs"
88
use_gpu: bool = True
89
seed: Optional[int] = None
90
91
# Create parser from dataclass
92
parser = eapp.make_flags_parser(TrainingConfig)
93
94
# Parse command line arguments
95
# python script.py --learning_rate=0.01 --batch_size=64 --num_epochs=50
96
config = parser(sys.argv[1:])
97
98
print(f"Training with learning rate: {config.learning_rate}")
99
print(f"Batch size: {config.batch_size}")
100
print(f"Number of epochs: {config.num_epochs}")
101
```
102
103
### Advanced Flag Configuration
104
105
```python
106
from etils import eapp
107
from dataclasses import dataclass, field
108
from typing import List, Dict, Optional
109
import json
110
111
@dataclass
112
class ModelConfig:
113
"""Model architecture configuration."""
114
layers: List[int] = field(default_factory=lambda: [64, 128, 256])
115
dropout_rate: float = 0.1
116
activation: str = "relu"
117
use_batch_norm: bool = True
118
119
@dataclass
120
class DataConfig:
121
"""Data processing configuration."""
122
input_size: tuple[int, int] = (224, 224)
123
normalize: bool = True
124
augmentation: bool = False
125
num_workers: int = 4
126
127
@dataclass
128
class ExperimentConfig:
129
"""Complete experiment configuration."""
130
experiment_name: str = "default_experiment"
131
model: ModelConfig = field(default_factory=ModelConfig)
132
data: DataConfig = field(default_factory=DataConfig)
133
134
# Training parameters
135
learning_rate: float = 0.001
136
weight_decay: float = 1e-4
137
max_epochs: int = 100
138
139
# System configuration
140
device: str = "cuda"
141
precision: str = "16-mixed"
142
143
# Optional parameters
144
checkpoint_path: Optional[str] = None
145
resume_from: Optional[str] = None
146
147
# Advanced options
148
hyperparams: Dict[str, float] = field(default_factory=dict)
149
150
# Create parsers with prefixes for nested configs
151
model_parser = eapp.make_flags_parser(ModelConfig, prefix="model_")
152
data_parser = eapp.make_flags_parser(DataConfig, prefix="data_")
153
main_parser = eapp.make_flags_parser(
154
ExperimentConfig,
155
exclude_fields={"model", "data"} # Handle these separately
156
)
157
158
def parse_experiment_config(args: List[str]) -> ExperimentConfig:
159
"""Parse complete experiment configuration."""
160
# Parse main config
161
config = main_parser(args)
162
163
# Parse nested configs
164
config.model = model_parser(args)
165
config.data = data_parser(args)
166
167
return config
168
169
# Command line usage:
170
# python train.py --experiment_name=vision_v1 --learning_rate=0.01 \\
171
# --model_layers="[128,256,512]" --data_input_size="(256,256)"
172
config = parse_experiment_config(sys.argv[1:])
173
```
174
175
### Enhanced Logging Setup
176
177
```python
178
from etils import eapp
179
import logging
180
181
# Basic enhanced logging
182
eapp.better_logging('INFO')
183
184
# Development logging with debug information
185
eapp.better_logging(
186
level='DEBUG',
187
include_module=True,
188
colored_output=True
189
)
190
191
# Production logging with structured format
192
eapp.better_logging(
193
level='WARNING',
194
format_string='%(asctime)s [%(levelname)s] %(name)s: %(message)s',
195
colored_output=False
196
)
197
198
# Custom format for specific use case
199
eapp.better_logging(
200
level='INFO',
201
format_string='[%(levelname)s] %(funcName)s:%(lineno)d - %(message)s',
202
include_timestamp=True,
203
colored_output=True
204
)
205
206
# Use the configured logging
207
logging.info("Application started")
208
logging.debug("Debug information")
209
logging.warning("Warning message")
210
logging.error("Error occurred")
211
```
212
213
### Complete CLI Application
214
215
```python
216
from etils import eapp
217
from dataclasses import dataclass, field
218
from typing import List, Optional
219
import logging
220
import sys
221
222
@dataclass
223
class DatabaseConfig:
224
"""Database connection configuration."""
225
host: str = "localhost"
226
port: int = 5432
227
database: str = "myapp"
228
username: str = "user"
229
password: str = ""
230
pool_size: int = 10
231
232
@dataclass
233
class ServerConfig:
234
"""Server configuration."""
235
host: str = "0.0.0.0"
236
port: int = 8000
237
workers: int = 4
238
reload: bool = False
239
debug: bool = False
240
241
@dataclass
242
class AppConfig:
243
"""Main application configuration."""
244
# Application settings
245
app_name: str = "MyApp"
246
version: str = "1.0.0"
247
log_level: str = "INFO"
248
249
# Component configurations
250
database: DatabaseConfig = field(default_factory=DatabaseConfig)
251
server: ServerConfig = field(default_factory=ServerConfig)
252
253
# Feature flags
254
enable_metrics: bool = True
255
enable_tracing: bool = False
256
257
# Optional settings
258
config_file: Optional[str] = None
259
secrets_file: Optional[str] = None
260
261
def create_app_parser() -> callable:
262
"""Create application argument parser."""
263
# Create parsers for each component
264
db_parser = eapp.make_flags_parser(DatabaseConfig, prefix="db_")
265
server_parser = eapp.make_flags_parser(ServerConfig, prefix="server_")
266
app_parser = eapp.make_flags_parser(
267
AppConfig,
268
exclude_fields={"database", "server"}
269
)
270
271
def parse_args(args: List[str]) -> AppConfig:
272
"""Parse all application arguments."""
273
config = app_parser(args)
274
config.database = db_parser(args)
275
config.server = server_parser(args)
276
return config
277
278
return parse_args
279
280
def main():
281
"""Main application entry point."""
282
# Parse configuration
283
parser = create_app_parser()
284
config = parser(sys.argv[1:])
285
286
# Setup logging based on configuration
287
eapp.better_logging(
288
level=config.log_level,
289
include_module=config.server.debug,
290
colored_output=config.server.debug
291
)
292
293
logging.info(f"Starting {config.app_name} v{config.version}")
294
logging.info(f"Database: {config.database.host}:{config.database.port}")
295
logging.info(f"Server: {config.server.host}:{config.server.port}")
296
297
if config.enable_metrics:
298
logging.info("Metrics collection enabled")
299
300
if config.enable_tracing:
301
logging.info("Distributed tracing enabled")
302
303
# Start application components
304
start_database(config.database)
305
start_server(config.server)
306
307
def start_database(db_config: DatabaseConfig):
308
"""Initialize database connection."""
309
logging.info(f"Connecting to database: {db_config.database}")
310
# Database initialization logic here
311
312
def start_server(server_config: ServerConfig):
313
"""Start the web server."""
314
logging.info(f"Starting server on {server_config.host}:{server_config.port}")
315
# Server startup logic here
316
317
if __name__ == "__main__":
318
# Command line usage:
319
# python app.py --log_level=DEBUG --db_host=prod-db --server_port=9000 \\
320
# --server_workers=8 --enable_metrics=true
321
main()
322
```
323
324
### Integration with Existing Applications
325
326
```python
327
from etils import eapp
328
from dataclasses import dataclass
329
import absl.app
330
import absl.flags
331
import absl.logging
332
333
# Traditional Absl flags approach
334
FLAGS = absl.flags.FLAGS
335
absl.flags.DEFINE_string('model_path', '/models/default', 'Path to model')
336
absl.flags.DEFINE_float('threshold', 0.5, 'Classification threshold')
337
338
@dataclass
339
class ProcessingConfig:
340
"""Processing pipeline configuration."""
341
batch_size: int = 100
342
num_threads: int = 4
343
output_format: str = "json"
344
verbose: bool = False
345
346
# Combine traditional flags with dataclass parser
347
processing_parser = eapp.make_flags_parser(ProcessingConfig, prefix="proc_")
348
349
def main(argv):
350
"""Main function compatible with absl.app."""
351
del argv # Unused
352
353
# Setup enhanced logging
354
eapp.better_logging(
355
level='DEBUG' if FLAGS.proc_verbose else 'INFO',
356
colored_output=True
357
)
358
359
# Parse dataclass config from remaining flags
360
import sys
361
processing_config = processing_parser(sys.argv[1:])
362
363
# Use both traditional flags and dataclass config
364
absl.logging.info(f"Model path: {FLAGS.model_path}")
365
absl.logging.info(f"Threshold: {FLAGS.threshold}")
366
absl.logging.info(f"Batch size: {processing_config.batch_size}")
367
absl.logging.info(f"Output format: {processing_config.output_format}")
368
369
# Run processing pipeline
370
run_pipeline(FLAGS.model_path, FLAGS.threshold, processing_config)
371
372
def run_pipeline(model_path: str, threshold: float, config: ProcessingConfig):
373
"""Run the processing pipeline."""
374
absl.logging.info("Starting pipeline...")
375
# Pipeline logic here
376
377
if __name__ == '__main__':
378
absl.app.run(main)
379
```
380
381
### Configuration Validation
382
383
```python
384
from etils import eapp
385
from dataclasses import dataclass, field
386
from typing import List
387
import logging
388
import sys
389
390
@dataclass
391
class ValidationConfig:
392
"""Configuration with validation rules."""
393
learning_rate: float = 0.001
394
batch_size: int = 32
395
model_layers: List[int] = field(default_factory=lambda: [128, 64])
396
397
def __post_init__(self):
398
"""Validate configuration after parsing."""
399
if self.learning_rate <= 0:
400
raise ValueError("learning_rate must be positive")
401
402
if self.batch_size <= 0:
403
raise ValueError("batch_size must be positive")
404
405
if not self.model_layers:
406
raise ValueError("model_layers cannot be empty")
407
408
if any(layer <= 0 for layer in self.model_layers):
409
raise ValueError("All layer sizes must be positive")
410
411
def main():
412
"""Main with configuration validation."""
413
eapp.better_logging('INFO')
414
415
try:
416
parser = eapp.make_flags_parser(ValidationConfig)
417
config = parser(sys.argv[1:])
418
419
logging.info("Configuration validated successfully")
420
logging.info(f"Learning rate: {config.learning_rate}")
421
logging.info(f"Batch size: {config.batch_size}")
422
logging.info(f"Model layers: {config.model_layers}")
423
424
except ValueError as e:
425
logging.error(f"Configuration validation failed: {e}")
426
sys.exit(1)
427
except Exception as e:
428
logging.error(f"Failed to parse configuration: {e}")
429
sys.exit(1)
430
431
if __name__ == "__main__":
432
main()
433
```