or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

cli.mdcore-configuration.mderror-handling.mdframework-integration.mdindex.mdutilities.mdvalidation.md

utilities.mddocs/

0

# Utilities

1

2

Configuration inspection, debugging, and extension capabilities including custom type converters, settings history tracking, and hook system for extending functionality. These utilities provide powerful tools for debugging configuration issues and extending dynaconf's capabilities.

3

4

## Capabilities

5

6

### Type Converters

7

8

Add custom type conversion functions for parsing configuration values.

9

10

```python { .api }

11

def add_converter(converter_key: str, func: callable):

12

"""

13

Add custom type converter for parsing configuration values.

14

15

Args:

16

converter_key (str): Key for the converter (auto-prefixed with '@')

17

func (callable): Converter function that takes a value and returns converted value

18

"""

19

...

20

21

def parse_conf_data(data, tomlfy=False, box_settings=None):

22

"""Parse configuration data with dynaconf processing."""

23

...

24

25

def unparse_conf_data(value):

26

"""Reverse parse configuration data to string representation."""

27

...

28

```

29

30

Usage examples:

31

32

```python

33

from dynaconf import add_converter

34

from datetime import datetime

35

import json

36

37

# Add datetime converter

38

def to_datetime(value):

39

"""Convert string to datetime object."""

40

return datetime.fromisoformat(value)

41

42

add_converter("datetime", to_datetime)

43

44

# Add JSON converter for complex objects

45

def to_json_object(value):

46

"""Parse JSON string to Python object."""

47

return json.loads(value)

48

49

add_converter("json_obj", to_json_object)

50

51

# Use converters in configuration files

52

# settings.toml:

53

# created_at = "@datetime 2023-01-15T10:30:00"

54

# config_data = "@json_obj {'key': 'value', 'nested': {'data': 123}}"

55

56

# Access converted values

57

settings = Dynaconf(settings_files=["settings.toml"])

58

created_date = settings.created_at # datetime object

59

config_obj = settings.config_data # Python dict

60

```

61

62

### Settings Inspection

63

64

Analyze and debug settings loading with comprehensive inspection tools.

65

66

```python { .api }

67

def inspect_settings(

68

settings, # Dynaconf settings instance

69

key=None, # Specific key to inspect

70

env=None, # Environment filter

71

new_first=True, # Order results (newest first)

72

history_limit=None, # Limit number of entries

73

include_internal=False, # Include internal loaders

74

to_file=None, # Write output to file

75

print_report=True, # Print to stdout

76

dumper="yaml", # Output format ("yaml", "json", "json-compact")

77

report_builder=None # Custom report builder function

78

) -> dict:

79

"""

80

Inspect and analyze settings loading history.

81

82

Returns:

83

dict: Inspection data with loading history and metadata

84

"""

85

...

86

87

def get_debug_info(settings, verbosity=0, key=None) -> dict:

88

"""

89

Get debug information about settings configuration.

90

91

Args:

92

settings: Settings instance

93

verbosity (int): Level of detail (0-2)

94

key (str): Optional key filter

95

96

Returns:

97

dict: Debug information

98

"""

99

...

100

101

def print_debug_info(settings, dumper=None, verbosity=0, key=None):

102

"""

103

Print debug information to stdout.

104

105

Args:

106

settings: Settings instance

107

dumper (str): Output format ("yaml", "json")

108

verbosity (int): Level of detail (0-2)

109

key (str): Optional key filter

110

"""

111

...

112

```

113

114

Usage examples:

115

116

```python

117

from dynaconf import Dynaconf, inspect_settings

118

119

settings = Dynaconf(

120

settings_files=["config.toml", "local.yaml"],

121

environments=True,

122

load_dotenv=True

123

)

124

125

# Inspect all settings

126

inspection_data = inspect_settings(settings)

127

128

# Inspect specific key

129

db_inspection = inspect_settings(settings, key="DATABASE_URL")

130

131

# Inspect with custom output

132

inspect_settings(

133

settings,

134

env="production",

135

dumper="json",

136

to_file="inspection_report.json",

137

print_report=False

138

)

139

140

# Custom inspection report

141

def custom_report_builder(data):

142

"""Custom report format."""

143

return f"Settings loaded from {len(data['sources'])} sources"

144

145

inspect_settings(

146

settings,

147

report_builder=custom_report_builder

148

)

149

```

150

151

### Loading History

152

153

Get detailed history of configuration loading and value sources.

154

155

```python { .api }

156

def get_history(

157

obj, # Settings object

158

key=None, # Optional key filter

159

filter_callable=None, # Optional filter function

160

include_internal=False, # Include internal loaders

161

history_limit=None # Limit entries

162

) -> list:

163

"""

164

Get loading history for settings.

165

166

Returns:

167

list: History entries with source metadata

168

"""

169

...

170

```

171

172

Usage examples:

173

174

```python

175

from dynaconf import get_history

176

177

# Get full loading history

178

history = get_history(settings)

179

for entry in history:

180

print(f"Key: {entry['key']}, Source: {entry['loader']}, Value: {entry['value']}")

181

182

# Get history for specific key

183

db_history = get_history(settings, key="DATABASE_URL")

184

print(f"DATABASE_URL loaded from: {db_history[0]['loader']}")

185

186

# Custom filtering

187

def production_only(entry):

188

"""Filter for production environment entries."""

189

return entry.get('env') == 'production'

190

191

prod_history = get_history(settings, filter_callable=production_only)

192

193

# Limited history

194

recent_history = get_history(settings, history_limit=10)

195

```

196

197

### Hook System

198

199

Extend dynaconf functionality with post-loading hooks.

200

201

```python { .api }

202

def post_hook(func):

203

"""

204

Decorator to mark functions as post hooks for settings loading.

205

206

The decorated function will be called after settings are loaded

207

with the settings instance as an argument.

208

"""

209

...

210

```

211

212

Usage examples:

213

214

```python

215

from dynaconf import post_hook, Dynaconf

216

217

# Basic post hook

218

@post_hook

219

def validate_database_connection(settings):

220

"""Validate database connection after loading."""

221

if hasattr(settings, 'DATABASE_URL'):

222

# Test database connection

223

print(f"Testing connection to {settings.DATABASE_URL}")

224

# Connection validation logic here

225

226

@post_hook

227

def setup_logging(settings):

228

"""Configure logging based on loaded settings."""

229

import logging

230

level = getattr(settings, 'LOG_LEVEL', 'INFO')

231

logging.basicConfig(level=getattr(logging, level))

232

233

# Hook for environment-specific setup

234

@post_hook

235

def production_checks(settings):

236

"""Run production-specific validation."""

237

if settings.current_env == 'production':

238

required_keys = ['SECRET_KEY', 'DATABASE_URL', 'ALLOWED_HOSTS']

239

for key in required_keys:

240

if not hasattr(settings, key):

241

raise ValueError(f"Production requires {key} to be set")

242

243

# Hooks are automatically called when settings are loaded

244

settings = Dynaconf(

245

settings_files=["config.toml"],

246

environments=True

247

)

248

# All registered hooks are executed after loading

249

```

250

251

### Built-in Converters

252

253

Dynaconf provides several built-in converters for common data transformations.

254

255

```python { .api }

256

# Type Converters

257

# @str - Convert to string

258

# @int - Convert to integer

259

# @float - Convert to float

260

# @bool - Convert to boolean

261

# @json - Parse as JSON

262

263

# Template Formatters

264

# @format - String formatting with settings values

265

# @jinja - Jinja2 template rendering

266

# @get - Get value from settings

267

268

# Data Manipulation

269

# @reset - Reset/clear existing value

270

# @del - Delete key

271

# @merge - Merge with existing dict/list

272

# @merge_unique - Merge unique values only

273

# @insert - Insert into existing list

274

275

# Special Markers

276

# @note - Add documentation note (ignored)

277

# @comment - Add comment (ignored)

278

# @null - Set to None

279

# @none - Set to None

280

# @empty - Set to empty string/list/dict

281

282

class Lazy:

283

"""Wrapper for lazy evaluation of values."""

284

def __init__(self, func, *args, **kwargs): ...

285

286

class MetaValue:

287

"""Base class for special value types."""

288

...

289

290

class Reset(MetaValue):

291

"""Reset existing value marker."""

292

...

293

294

class Del(MetaValue):

295

"""Delete key marker."""

296

...

297

298

class Merge(MetaValue):

299

"""Merge with existing value marker."""

300

...

301

302

class Insert(MetaValue):

303

"""Insert into existing list marker."""

304

...

305

306

## Advanced Utility Patterns

307

308

### Custom Inspection Reports

309

310

Create specialized inspection reports for different use cases.

311

312

```python

313

def security_audit_report(data):

314

"""Generate security-focused inspection report."""

315

sensitive_keys = ['SECRET_KEY', 'PASSWORD', 'TOKEN', 'API_KEY']

316

report = ["=== SECURITY AUDIT REPORT ===\n"]

317

318

for entry in data.get('history', []):

319

key = entry.get('key', '').upper()

320

if any(sensitive in key for sensitive in sensitive_keys):

321

source = entry.get('loader', 'unknown')

322

report.append(f"SENSITIVE: {key} loaded from {source}")

323

324

return "\n".join(report)

325

326

def performance_report(data):

327

"""Generate performance-focused inspection report."""

328

loader_counts = {}

329

for entry in data.get('history', []):

330

loader = entry.get('loader', 'unknown')

331

loader_counts[loader] = loader_counts.get(loader, 0) + 1

332

333

report = ["=== PERFORMANCE REPORT ===\n"]

334

report.append("Loader usage:")

335

for loader, count in sorted(loader_counts.items()):

336

report.append(f" {loader}: {count} keys loaded")

337

338

return "\n".join(report)

339

340

# Use custom reports

341

inspect_settings(settings, report_builder=security_audit_report)

342

inspect_settings(settings, report_builder=performance_report)

343

```

344

345

### Complex Type Converters

346

347

Create sophisticated type converters for complex data structures.

348

349

```python

350

import re

351

from urllib.parse import urlparse

352

353

def to_database_config(connection_string):

354

"""Convert database URL to configuration dict."""

355

parsed = urlparse(connection_string)

356

return {

357

'engine': f"django.db.backends.{parsed.scheme}",

358

'name': parsed.path[1:], # Remove leading slash

359

'user': parsed.username,

360

'password': parsed.password,

361

'host': parsed.hostname,

362

'port': parsed.port or 5432,

363

}

364

365

def to_email_list(value):

366

"""Convert comma-separated emails to validated list."""

367

if isinstance(value, list):

368

return value

369

370

emails = [email.strip() for email in value.split(',')]

371

email_pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'

372

373

valid_emails = []

374

for email in emails:

375

if re.match(email_pattern, email):

376

valid_emails.append(email)

377

else:

378

print(f"Warning: Invalid email '{email}' ignored")

379

380

return valid_emails

381

382

def to_size_bytes(value):

383

"""Convert human-readable size to bytes."""

384

if isinstance(value, int):

385

return value

386

387

units = {'B': 1, 'KB': 1024, 'MB': 1024**2, 'GB': 1024**3}

388

size_pattern = r'^(\d+(?:\.\d+)?)\s*([A-Z]*B?)$'

389

390

match = re.match(size_pattern, value.upper())

391

if not match:

392

raise ValueError(f"Invalid size format: {value}")

393

394

number, unit = match.groups()

395

return int(float(number) * units.get(unit, 1))

396

397

# Register complex converters

398

add_converter("db_config", to_database_config)

399

add_converter("email_list", to_email_list)

400

add_converter("size_bytes", to_size_bytes)

401

402

# Use in configuration

403

# settings.toml:

404

# database = "@db_config postgresql://user:pass@localhost:5432/mydb"

405

# admin_emails = "@email_list admin@example.com, support@example.com"

406

# max_upload_size = "@size_bytes 10MB"

407

```

408

409

### Debugging Hooks

410

411

Create hooks for debugging and monitoring configuration loading.

412

413

```python

414

@post_hook

415

def debug_configuration_loading(settings):

416

"""Debug hook to log configuration loading details."""

417

import logging

418

419

logger = logging.getLogger("dynaconf.debug")

420

logger.info(f"Configuration loaded for environment: {settings.current_env}")

421

422

# Log all loaded keys

423

for key in dir(settings):

424

if not key.startswith('_') and not callable(getattr(settings, key)):

425

value = getattr(settings, key)

426

logger.debug(f" {key} = {type(value).__name__}")

427

428

@post_hook

429

def monitor_sensitive_settings(settings):

430

"""Monitor access to sensitive configuration."""

431

sensitive_patterns = ['SECRET', 'PASSWORD', 'TOKEN', 'KEY']

432

433

original_get = settings.get

434

435

def monitored_get(key, *args, **kwargs):

436

if any(pattern in key.upper() for pattern in sensitive_patterns):

437

print(f"SECURITY: Accessing sensitive setting '{key}'")

438

return original_get(key, *args, **kwargs)

439

440

settings.get = monitored_get

441

442

@post_hook

443

def cache_frequent_settings(settings):

444

"""Cache frequently accessed settings for performance."""

445

cache = {}

446

access_count = {}

447

448

original_getattr = settings.__getattr__

449

450

def cached_getattr(key):

451

# Count access

452

access_count[key] = access_count.get(key, 0) + 1

453

454

# Cache after 3 accesses

455

if access_count[key] > 3 and key not in cache:

456

cache[key] = original_getattr(key)

457

print(f"CACHE: Cached frequently accessed setting '{key}'")

458

return cache[key]

459

460

return cache.get(key, original_getattr(key))

461

462

settings.__getattr__ = cached_getattr

463

```

464

465

### Configuration Validation Utilities

466

467

Combine utilities with validation for comprehensive configuration management.

468

469

```python

470

from dynaconf import Dynaconf, Validator, inspect_settings, post_hook

471

472

@post_hook

473

def validate_configuration_completeness(settings):

474

"""Ensure all required configuration is present."""

475

required_by_env = {

476

'development': ['DATABASE_URL', 'DEBUG'],

477

'production': ['DATABASE_URL', 'SECRET_KEY', 'ALLOWED_HOSTS'],

478

'testing': ['DATABASE_URL']

479

}

480

481

current_env = settings.current_env

482

required_keys = required_by_env.get(current_env, [])

483

484

missing_keys = []

485

for key in required_keys:

486

if not hasattr(settings, key):

487

missing_keys.append(key)

488

489

if missing_keys:

490

raise ValueError(

491

f"Missing required settings for {current_env}: {missing_keys}"

492

)

493

494

def create_validated_settings():

495

"""Create settings with comprehensive validation and inspection."""

496

settings = Dynaconf(

497

envvar_prefix="MYAPP",

498

settings_files=["config.toml", "local.yaml"],

499

environments=True,

500

validators=[

501

Validator("DEBUG", cast=bool, default=False),

502

Validator("PORT", cast=int, gte=1000, lte=65535, default=8000),

503

]

504

)

505

506

# Generate inspection report

507

report = inspect_settings(

508

settings,

509

print_report=False,

510

dumper="json-compact"

511

)

512

513

print(f"Configuration loaded successfully:")

514

print(f" Environment: {settings.current_env}")

515

print(f" Sources: {len(report.get('sources', []))}")

516

print(f" Keys loaded: {len(report.get('history', []))}")

517

518

return settings

519

```