Uniform interface for multiple dust reddening maps
System for managing dust map data storage, downloading map files, and configuring package behavior. dustmaps provides centralized configuration management and standardized data fetching across all dust map implementations.
Global configuration system for managing data directories, API endpoints, and package behavior.
class Configuration:
def __init__(self, fname):
"""
Initialize configuration from file.
Parameters:
- fname (str): Path to configuration file
"""
def __getitem__(self, key):
"""
Get configuration value using dictionary syntax.
Parameters:
- key (str): Configuration key
Returns:
- str | float | bool: Configuration value
"""
def __setitem__(self, key, value):
"""
Set configuration value using dictionary syntax.
Parameters:
- key (str): Configuration key
- value (str | float | bool): Configuration value
"""
def get(self, key, default=None):
"""
Get configuration value with default fallback.
Parameters:
- key (str): Configuration key
- default: Default value if key not found
Returns:
- Configuration value or default
"""
def load(self):
"""Load configuration from file."""
def save(self, force=False):
"""
Save configuration to file.
Parameters:
- force (bool): Overwrite existing file without confirmation
"""
def remove(self, key):
"""
Remove configuration key.
Parameters:
- key (str): Configuration key to remove
"""
def reset(self):
"""Reset configuration to default values."""
# Global configuration instance
config: ConfigurationUsage Example:
from dustmaps.config import config
# Set data directory for large map files
config['data_dir'] = '/path/to/large/data/directory'
# Get current data directory
data_dir = config['data_dir']
print(f"Data directory: {data_dir}")
# Get with default value
api_timeout = config.get('api_timeout', 30)
# Save configuration to file
config.save()# Standard configuration keys
ConfigKeys = {
'data_dir': str, # Directory for storing large map data files
'api_timeout': float, # Web API request timeout (seconds)
'max_memory': float, # Maximum memory usage for map loading (GB)
'cache_enabled': bool # Enable/disable local caching
}Configuration-related exceptions for error handling.
class ConfigError(Exception):
"""Exception raised for configuration errors."""
class ConfigWarning(UserWarning):
"""Warning issued for configuration issues."""Utilities for managing file paths and directories used by dustmaps.
def fix_path(path):
"""
Expand user home and environment variables in file paths.
Parameters:
- path (str): File path potentially containing ~ or environment variables
Returns:
- str: Expanded absolute path
"""
def data_dir():
"""
Get the configured data directory for storing large map files.
Returns:
- str: Absolute path to data directory
"""
def output_dir():
"""
Get directory for temporary output files.
Returns:
- str: Absolute path to output directory
"""
# Module-level path constants
script_dir: str # dustmaps package installation directory
data_dir_default: str # Default data directory
test_dir: str # Test files directory
output_dir_default: str # Default output directoryCommon utilities for downloading and validating dust map data files.
def get_md5sum(fname, chunk_size=1024):
"""
Calculate MD5 checksum of a file.
Parameters:
- fname (str): Path to file
- chunk_size (int): Size of chunks to read (bytes)
Returns:
- str: MD5 checksum as hexadecimal string
"""
def h5_file_exists(fname, size_guess=None, rtol=0.1, atol=1.0, dsets={}):
"""
Check if HDF5 file exists and has expected properties.
Parameters:
- fname (str): Path to HDF5 file
- size_guess (int, optional): Expected file size in bytes
- rtol (float): Relative tolerance for size check
- atol (float): Absolute tolerance for size check (MB)
- dsets (dict): Expected datasets and their properties
Returns:
- bool: True if file exists and meets requirements
"""
def dataverse_download_doi(doi, local_fname, file_requirements={}):
"""
Download file from Harvard Dataverse using DOI.
Parameters:
- doi (str): Digital Object Identifier for the dataset
- local_fname (str): Local filename to save downloaded file
- file_requirements (dict): Expected file properties for validation
Raises:
- DownloadError: If download fails or file validation fails
"""
class DownloadError(Exception):
"""Exception raised during data download operations."""
# Dataverse base URL
dataverse: str = 'https://dataverse.harvard.edu'Each dust map module provides a standardized fetch() function for downloading map data.
# Available in dustmaps.sfd
def fetch():
"""Download SFD'98 dust map data (north and south galactic pole files)."""
# Available in dustmaps.bayestar
def fetch(version='bayestar2019'):
"""
Download Bayestar 3D dust map data.
Parameters:
- version (str): Map version ('bayestar2015', 'bayestar2017', 'bayestar2019')
"""
# Available in dustmaps.planck
def fetch(which='2013'):
"""
Download Planck dust map data.
Parameters:
- which (str): Map version ('2013' or 'GNILC')
"""
# Available in dustmaps.marshall
def fetch(clobber=False):
"""
Download Marshall et al. 2006 dust map data.
Parameters:
- clobber (bool): Overwrite existing files
"""
# Available in dustmaps.iphas
def fetch(clobber=False):
"""
Download IPHAS dust map data.
Parameters:
- clobber (bool): Overwrite existing files
"""
# Similar fetch() functions available in:
# dustmaps.csfd, dustmaps.chen2014, dustmaps.chen2018,
# dustmaps.lenz2017, dustmaps.pg2010, dustmaps.leike_ensslin_2019,
# dustmaps.leike2020, dustmaps.edenhofer2023, dustmaps.gaia_tge,
# dustmaps.decapsfrom dustmaps.config import config
# Configure data directory (first-time setup)
config['data_dir'] = '/home/user/dustmaps_data'
# Optional: Configure other settings
config['api_timeout'] = 60.0 # seconds
config['max_memory'] = 8.0 # GB
# Save configuration
config.save()# Override config file location
export DUSTMAPS_CONFIG_FNAME="/custom/path/dustmapsrc"
# dustmaps will use this file instead of default ~/.dustmapsrc# Step 1: Configure data directory
from dustmaps.config import config
config['data_dir'] = '/path/to/data'
# Step 2: Download specific maps
import dustmaps.sfd
import dustmaps.bayestar
import dustmaps.planck
dustmaps.sfd.fetch() # Download SFD map
dustmaps.bayestar.fetch() # Download Bayestar 2019
dustmaps.planck.fetch(which='GNILC') # Download Planck GNILC
# Step 3: Use maps
from dustmaps.sfd import SFDQuery
from dustmaps.bayestar import BayestarQuery
from dustmaps.planck import PlanckGNILCQuery
sfd = SFDQuery()
bayestar = BayestarQuery()
planck = PlanckGNILCQuery()# Download multiple maps programmatically
maps_to_fetch = [
('sfd', lambda: __import__('dustmaps.sfd', fromlist=['fetch']).fetch()),
('planck', lambda: __import__('dustmaps.planck', fromlist=['fetch']).fetch()),
('bayestar', lambda: __import__('dustmaps.bayestar', fromlist=['fetch']).fetch()),
('marshall', lambda: __import__('dustmaps.marshall', fromlist=['fetch']).fetch()),
]
for map_name, fetch_func in maps_to_fetch:
try:
print(f"Downloading {map_name}...")
fetch_func()
print(f"✓ {map_name} download complete")
except Exception as e:
print(f"✗ {map_name} download failed: {e}")from dustmaps.config import ConfigError, ConfigWarning
from dustmaps.fetch_utils import DownloadError
from dustmaps.dustexceptions import Error
try:
# Configuration operations
config['data_dir'] = '/invalid/path'
config.save()
# Data download
import dustmaps.sfd
dustmaps.sfd.fetch()
# Map usage
from dustmaps.sfd import SFDQuery
sfd = SFDQuery()
except ConfigError as e:
print(f"Configuration error: {e}")
except DownloadError as e:
print(f"Download failed: {e}")
except Error as e:
print(f"dustmaps error: {e}")The configuration file (~/.dustmapsrc by default) is stored in JSON format:
{
"data_dir": "/path/to/dustmaps/data",
"api_timeout": 30.0,
"max_memory": 4.0,
"cache_enabled": true
}Install with Tessl CLI
npx tessl i tessl/pypi-dustmaps