The dynamic configurator for your Python Project
—
Configuration inspection, debugging, and extension capabilities including custom type converters, settings history tracking, and hook system for extending functionality. These utilities provide powerful tools for debugging configuration issues and extending dynaconf's capabilities.
Add custom type conversion functions for parsing configuration values.
def add_converter(converter_key: str, func: callable):
"""
Add custom type converter for parsing configuration values.
Args:
converter_key (str): Key for the converter (auto-prefixed with '@')
func (callable): Converter function that takes a value and returns converted value
"""
...
def parse_conf_data(data, tomlfy=False, box_settings=None):
"""Parse configuration data with dynaconf processing."""
...
def unparse_conf_data(value):
"""Reverse parse configuration data to string representation."""
...Usage examples:
from dynaconf import add_converter
from datetime import datetime
import json
# Add datetime converter
def to_datetime(value):
"""Convert string to datetime object."""
return datetime.fromisoformat(value)
add_converter("datetime", to_datetime)
# Add JSON converter for complex objects
def to_json_object(value):
"""Parse JSON string to Python object."""
return json.loads(value)
add_converter("json_obj", to_json_object)
# Use converters in configuration files
# settings.toml:
# created_at = "@datetime 2023-01-15T10:30:00"
# config_data = "@json_obj {'key': 'value', 'nested': {'data': 123}}"
# Access converted values
settings = Dynaconf(settings_files=["settings.toml"])
created_date = settings.created_at # datetime object
config_obj = settings.config_data # Python dictAnalyze and debug settings loading with comprehensive inspection tools.
def inspect_settings(
settings, # Dynaconf settings instance
key=None, # Specific key to inspect
env=None, # Environment filter
new_first=True, # Order results (newest first)
history_limit=None, # Limit number of entries
include_internal=False, # Include internal loaders
to_file=None, # Write output to file
print_report=True, # Print to stdout
dumper="yaml", # Output format ("yaml", "json", "json-compact")
report_builder=None # Custom report builder function
) -> dict:
"""
Inspect and analyze settings loading history.
Returns:
dict: Inspection data with loading history and metadata
"""
...
def get_debug_info(settings, verbosity=0, key=None) -> dict:
"""
Get debug information about settings configuration.
Args:
settings: Settings instance
verbosity (int): Level of detail (0-2)
key (str): Optional key filter
Returns:
dict: Debug information
"""
...
def print_debug_info(settings, dumper=None, verbosity=0, key=None):
"""
Print debug information to stdout.
Args:
settings: Settings instance
dumper (str): Output format ("yaml", "json")
verbosity (int): Level of detail (0-2)
key (str): Optional key filter
"""
...Usage examples:
from dynaconf import Dynaconf, inspect_settings
settings = Dynaconf(
settings_files=["config.toml", "local.yaml"],
environments=True,
load_dotenv=True
)
# Inspect all settings
inspection_data = inspect_settings(settings)
# Inspect specific key
db_inspection = inspect_settings(settings, key="DATABASE_URL")
# Inspect with custom output
inspect_settings(
settings,
env="production",
dumper="json",
to_file="inspection_report.json",
print_report=False
)
# Custom inspection report
def custom_report_builder(data):
"""Custom report format."""
return f"Settings loaded from {len(data['sources'])} sources"
inspect_settings(
settings,
report_builder=custom_report_builder
)Get detailed history of configuration loading and value sources.
def get_history(
obj, # Settings object
key=None, # Optional key filter
filter_callable=None, # Optional filter function
include_internal=False, # Include internal loaders
history_limit=None # Limit entries
) -> list:
"""
Get loading history for settings.
Returns:
list: History entries with source metadata
"""
...Usage examples:
from dynaconf import get_history
# Get full loading history
history = get_history(settings)
for entry in history:
print(f"Key: {entry['key']}, Source: {entry['loader']}, Value: {entry['value']}")
# Get history for specific key
db_history = get_history(settings, key="DATABASE_URL")
print(f"DATABASE_URL loaded from: {db_history[0]['loader']}")
# Custom filtering
def production_only(entry):
"""Filter for production environment entries."""
return entry.get('env') == 'production'
prod_history = get_history(settings, filter_callable=production_only)
# Limited history
recent_history = get_history(settings, history_limit=10)Extend dynaconf functionality with post-loading hooks.
def post_hook(func):
"""
Decorator to mark functions as post hooks for settings loading.
The decorated function will be called after settings are loaded
with the settings instance as an argument.
"""
...Usage examples:
from dynaconf import post_hook, Dynaconf
# Basic post hook
@post_hook
def validate_database_connection(settings):
"""Validate database connection after loading."""
if hasattr(settings, 'DATABASE_URL'):
# Test database connection
print(f"Testing connection to {settings.DATABASE_URL}")
# Connection validation logic here
@post_hook
def setup_logging(settings):
"""Configure logging based on loaded settings."""
import logging
level = getattr(settings, 'LOG_LEVEL', 'INFO')
logging.basicConfig(level=getattr(logging, level))
# Hook for environment-specific setup
@post_hook
def production_checks(settings):
"""Run production-specific validation."""
if settings.current_env == 'production':
required_keys = ['SECRET_KEY', 'DATABASE_URL', 'ALLOWED_HOSTS']
for key in required_keys:
if not hasattr(settings, key):
raise ValueError(f"Production requires {key} to be set")
# Hooks are automatically called when settings are loaded
settings = Dynaconf(
settings_files=["config.toml"],
environments=True
)
# All registered hooks are executed after loadingDynaconf provides several built-in converters for common data transformations.
# Type Converters
# @str - Convert to string
# @int - Convert to integer
# @float - Convert to float
# @bool - Convert to boolean
# @json - Parse as JSON
# Template Formatters
# @format - String formatting with settings values
# @jinja - Jinja2 template rendering
# @get - Get value from settings
# Data Manipulation
# @reset - Reset/clear existing value
# @del - Delete key
# @merge - Merge with existing dict/list
# @merge_unique - Merge unique values only
# @insert - Insert into existing list
# Special Markers
# @note - Add documentation note (ignored)
# @comment - Add comment (ignored)
# @null - Set to None
# @none - Set to None
# @empty - Set to empty string/list/dict
class Lazy:
"""Wrapper for lazy evaluation of values."""
def __init__(self, func, *args, **kwargs): ...
class MetaValue:
"""Base class for special value types."""
...
class Reset(MetaValue):
"""Reset existing value marker."""
...
class Del(MetaValue):
"""Delete key marker."""
...
class Merge(MetaValue):
"""Merge with existing value marker."""
...
class Insert(MetaValue):
"""Insert into existing list marker."""
...
## Advanced Utility Patterns
### Custom Inspection Reports
Create specialized inspection reports for different use cases.
```python
def security_audit_report(data):
"""Generate security-focused inspection report."""
sensitive_keys = ['SECRET_KEY', 'PASSWORD', 'TOKEN', 'API_KEY']
report = ["=== SECURITY AUDIT REPORT ===\n"]
for entry in data.get('history', []):
key = entry.get('key', '').upper()
if any(sensitive in key for sensitive in sensitive_keys):
source = entry.get('loader', 'unknown')
report.append(f"SENSITIVE: {key} loaded from {source}")
return "\n".join(report)
def performance_report(data):
"""Generate performance-focused inspection report."""
loader_counts = {}
for entry in data.get('history', []):
loader = entry.get('loader', 'unknown')
loader_counts[loader] = loader_counts.get(loader, 0) + 1
report = ["=== PERFORMANCE REPORT ===\n"]
report.append("Loader usage:")
for loader, count in sorted(loader_counts.items()):
report.append(f" {loader}: {count} keys loaded")
return "\n".join(report)
# Use custom reports
inspect_settings(settings, report_builder=security_audit_report)
inspect_settings(settings, report_builder=performance_report)Create sophisticated type converters for complex data structures.
import re
from urllib.parse import urlparse
def to_database_config(connection_string):
"""Convert database URL to configuration dict."""
parsed = urlparse(connection_string)
return {
'engine': f"django.db.backends.{parsed.scheme}",
'name': parsed.path[1:], # Remove leading slash
'user': parsed.username,
'password': parsed.password,
'host': parsed.hostname,
'port': parsed.port or 5432,
}
def to_email_list(value):
"""Convert comma-separated emails to validated list."""
if isinstance(value, list):
return value
emails = [email.strip() for email in value.split(',')]
email_pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
valid_emails = []
for email in emails:
if re.match(email_pattern, email):
valid_emails.append(email)
else:
print(f"Warning: Invalid email '{email}' ignored")
return valid_emails
def to_size_bytes(value):
"""Convert human-readable size to bytes."""
if isinstance(value, int):
return value
units = {'B': 1, 'KB': 1024, 'MB': 1024**2, 'GB': 1024**3}
size_pattern = r'^(\d+(?:\.\d+)?)\s*([A-Z]*B?)$'
match = re.match(size_pattern, value.upper())
if not match:
raise ValueError(f"Invalid size format: {value}")
number, unit = match.groups()
return int(float(number) * units.get(unit, 1))
# Register complex converters
add_converter("db_config", to_database_config)
add_converter("email_list", to_email_list)
add_converter("size_bytes", to_size_bytes)
# Use in configuration
# settings.toml:
# database = "@db_config postgresql://user:pass@localhost:5432/mydb"
# admin_emails = "@email_list admin@example.com, support@example.com"
# max_upload_size = "@size_bytes 10MB"Create hooks for debugging and monitoring configuration loading.
@post_hook
def debug_configuration_loading(settings):
"""Debug hook to log configuration loading details."""
import logging
logger = logging.getLogger("dynaconf.debug")
logger.info(f"Configuration loaded for environment: {settings.current_env}")
# Log all loaded keys
for key in dir(settings):
if not key.startswith('_') and not callable(getattr(settings, key)):
value = getattr(settings, key)
logger.debug(f" {key} = {type(value).__name__}")
@post_hook
def monitor_sensitive_settings(settings):
"""Monitor access to sensitive configuration."""
sensitive_patterns = ['SECRET', 'PASSWORD', 'TOKEN', 'KEY']
original_get = settings.get
def monitored_get(key, *args, **kwargs):
if any(pattern in key.upper() for pattern in sensitive_patterns):
print(f"SECURITY: Accessing sensitive setting '{key}'")
return original_get(key, *args, **kwargs)
settings.get = monitored_get
@post_hook
def cache_frequent_settings(settings):
"""Cache frequently accessed settings for performance."""
cache = {}
access_count = {}
original_getattr = settings.__getattr__
def cached_getattr(key):
# Count access
access_count[key] = access_count.get(key, 0) + 1
# Cache after 3 accesses
if access_count[key] > 3 and key not in cache:
cache[key] = original_getattr(key)
print(f"CACHE: Cached frequently accessed setting '{key}'")
return cache[key]
return cache.get(key, original_getattr(key))
settings.__getattr__ = cached_getattrCombine utilities with validation for comprehensive configuration management.
from dynaconf import Dynaconf, Validator, inspect_settings, post_hook
@post_hook
def validate_configuration_completeness(settings):
"""Ensure all required configuration is present."""
required_by_env = {
'development': ['DATABASE_URL', 'DEBUG'],
'production': ['DATABASE_URL', 'SECRET_KEY', 'ALLOWED_HOSTS'],
'testing': ['DATABASE_URL']
}
current_env = settings.current_env
required_keys = required_by_env.get(current_env, [])
missing_keys = []
for key in required_keys:
if not hasattr(settings, key):
missing_keys.append(key)
if missing_keys:
raise ValueError(
f"Missing required settings for {current_env}: {missing_keys}"
)
def create_validated_settings():
"""Create settings with comprehensive validation and inspection."""
settings = Dynaconf(
envvar_prefix="MYAPP",
settings_files=["config.toml", "local.yaml"],
environments=True,
validators=[
Validator("DEBUG", cast=bool, default=False),
Validator("PORT", cast=int, gte=1000, lte=65535, default=8000),
]
)
# Generate inspection report
report = inspect_settings(
settings,
print_report=False,
dumper="json-compact"
)
print(f"Configuration loaded successfully:")
print(f" Environment: {settings.current_env}")
print(f" Sources: {len(report.get('sources', []))}")
print(f" Keys loaded: {len(report.get('history', []))}")
return settingsInstall with Tessl CLI
npx tessl i tessl/pypi-dynaconf