Python parser for the Shuttle Radar Topography Mission elevation data
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
File handling, caching, and configuration options for managing SRTM data files including custom cache directories, batch processing modes, and network settings.
Custom file handling system for managing SRTM data file storage, caching, and retrieval with configurable cache directories.
class FileHandler:
def __init__(self, local_cache_dir: Optional[str] = None): ...
def exists(self, file_name: str) -> bool: ...
def write(self, file_name: str, contents: bytes) -> None: ...
def read(self, file_name: str) -> bytes: ...Constructor Parameters:
local_cache_dir (Optional[str]): Custom directory for caching SRTM files. Default: None (uses ~/.cache/srtm/)Methods:
exists(): Check if a file exists in the cache
file_name (str) - Name of the SRTM file to checkwrite(): Write file contents to the cache
file_name (str), contents (bytes)read(): Read file contents from the cache
file_name (str)Usage Example:
import srtm
from srtm.utils import FileHandler
# Create custom file handler with specific cache directory
custom_handler = FileHandler(local_cache_dir="/data/srtm_cache")
# Use with elevation data
elevation_data = srtm.get_data(file_handler=custom_handler)
# Check if specific file is cached
if custom_handler.exists("N45E007.hgt"):
print("File is already cached locally")
else:
print("File will be downloaded on first use")
# Manual file operations (advanced usage)
if custom_handler.exists("N45E007.hgt"):
file_data = custom_handler.read("N45E007.hgt")
print(f"File size: {len(file_data)} bytes")Configure custom cache directories for different use cases and deployment scenarios.
Usage Example:
import srtm
import os
# Project-specific cache directory
project_cache = os.path.join(os.getcwd(), "elevation_cache")
elevation_data = srtm.get_data(local_cache_dir=project_cache)
# Shared cache directory for multiple applications
shared_cache = "/opt/shared/srtm_data"
os.makedirs(shared_cache, exist_ok=True)
elevation_data = srtm.get_data(local_cache_dir=shared_cache)
# Temporary cache directory
import tempfile
temp_cache = tempfile.mkdtemp(prefix="srtm_")
elevation_data = srtm.get_data(local_cache_dir=temp_cache)
print(f"Using temporary cache: {temp_cache}")Memory-efficient processing mode that keeps only the most recently accessed SRTM file in memory, ideal for processing large datasets or many geographic locations.
Usage Example:
import srtm
# Enable batch mode for memory efficiency
elevation_data = srtm.get_data(batch_mode=True)
# Process many coordinates efficiently
coordinates = [
(45.0, 7.0), (45.1, 7.1), (45.2, 7.2),
(46.0, 8.0), (46.1, 8.1), (46.2, 8.2),
(47.0, 9.0), (47.1, 9.1), (47.2, 9.2)
]
elevations = []
for lat, lon in coordinates:
elevation = elevation_data.get_elevation(lat, lon)
elevations.append(elevation)
print(f"({lat}, {lon}): {elevation}m")
print(f"Processed {len(coordinates)} coordinates in batch mode")
print(f"Memory usage optimized by keeping only current file loaded")Configure whether downloaded SRTM files should be stored in compressed or uncompressed format.
Usage Example:
import srtm
# Store files as compressed ZIP files (saves disk space)
elevation_data = srtm.get_data(leave_zipped=True)
# Store files uncompressed (faster access, more disk space)
elevation_data = srtm.get_data(leave_zipped=False)
# Compare storage requirements
import os
from srtm.utils import FileHandler
handler = FileHandler()
if handler.exists("N45E007.hgt.zip"):
zip_size = len(handler.read("N45E007.hgt.zip"))
print(f"Compressed file size: {zip_size / 1024 / 1024:.1f} MB")
if handler.exists("N45E007.hgt"):
hgt_size = len(handler.read("N45E007.hgt"))
print(f"Uncompressed file size: {hgt_size / 1024 / 1024:.1f} MB")
print(f"Compression ratio: {zip_size / hgt_size:.2f}")Configure network timeouts and URL sources for downloading SRTM data files.
Usage Example:
import srtm
# Configure custom timeout for slow networks
elevation_data = srtm.get_data(timeout=60) # 60 second timeout
# Disable pre-cached URL list (force fresh URL discovery)
elevation_data = srtm.get_data(use_included_urls=False)
# Combine custom network settings
elevation_data = srtm.get_data(
timeout=45, # 45 second timeout
use_included_urls=True, # Use cached URLs for speed
local_cache_dir="./srtm", # Custom cache location
batch_mode=True # Memory efficient mode
)Utilities for managing and monitoring the SRTM file cache.
Usage Example:
import srtm
import os
from srtm.utils import FileHandler
def analyze_cache(cache_dir=None):
"""Analyze SRTM cache directory contents."""
handler = FileHandler(local_cache_dir=cache_dir)
# Get cache directory path
if cache_dir:
cache_path = cache_dir
else:
cache_path = os.path.expanduser("~/.cache/srtm/")
if not os.path.exists(cache_path):
print(f"Cache directory does not exist: {cache_path}")
return
# Count files and calculate sizes
hgt_files = [f for f in os.listdir(cache_path) if f.endswith('.hgt')]
zip_files = [f for f in os.listdir(cache_path) if f.endswith('.hgt.zip')]
total_size = 0
for filename in hgt_files + zip_files:
filepath = os.path.join(cache_path, filename)
total_size += os.path.getsize(filepath)
print(f"Cache directory: {cache_path}")
print(f"HGT files: {len(hgt_files)}")
print(f"ZIP files: {len(zip_files)}")
print(f"Total size: {total_size / 1024 / 1024:.1f} MB")
return {
'cache_path': cache_path,
'hgt_count': len(hgt_files),
'zip_count': len(zip_files),
'total_size_mb': total_size / 1024 / 1024
}
# Analyze default cache
cache_info = analyze_cache()
# Analyze custom cache
custom_info = analyze_cache("/data/custom_srtm")Advanced file handling operations for specialized use cases.
Usage Example:
import srtm
from srtm.utils import FileHandler
import shutil
import os
def backup_cache(source_cache=None, backup_dir="srtm_backup"):
"""Backup SRTM cache directory."""
handler = FileHandler(local_cache_dir=source_cache)
# Determine source cache directory
if source_cache:
cache_path = source_cache
else:
cache_path = os.path.expanduser("~/.cache/srtm/")
if not os.path.exists(cache_path):
print(f"Source cache directory does not exist: {cache_path}")
return False
# Create backup
if os.path.exists(backup_dir):
shutil.rmtree(backup_dir)
shutil.copytree(cache_path, backup_dir)
print(f"Cache backed up to: {backup_dir}")
return True
def restore_cache(backup_dir="srtm_backup", target_cache=None):
"""Restore SRTM cache from backup."""
if not os.path.exists(backup_dir):
print(f"Backup directory does not exist: {backup_dir}")
return False
# Determine target cache directory
if target_cache:
cache_path = target_cache
else:
cache_path = os.path.expanduser("~/.cache/srtm/")
# Remove existing cache and restore from backup
if os.path.exists(cache_path):
shutil.rmtree(cache_path)
shutil.copytree(backup_dir, cache_path)
print(f"Cache restored from: {backup_dir}")
return True
# Backup current cache
backup_cache()
# Restore cache (example)
# restore_cache("srtm_backup")Key configuration constants for data management:
# Default cache location
DEFAULT_CACHE_DIR = "~/.cache/srtm/"
# Network settings
DEFAULT_TIMEOUT = 15 # seconds
SRTM1_URL = "https://srtm.kurviger.de/SRTM1/"
SRTM3_URL = "https://srtm.kurviger.de/SRTM3/"
# File settings
DEFAULT_LIST_JSON = "list.json" # Pre-cached file URLsCommon error conditions in data management operations:
Best Practices:
Install with Tessl CLI
npx tessl i tessl/pypi-srtm-py