ObsPy is a Python toolbox for seismology providing parsers for seismological data formats, clients for data centers, and signal processing routines for seismological time series analysis.
—
Standardized clients for accessing 67+ global seismological data centers including IRIS, GEOFON, NCEDC, SCEDC, and others through FDSN web services with unified interfaces for waveforms, events, and station metadata. These clients provide seamless access to the global seismological data ecosystem with automatic format handling and error recovery.
Primary client for accessing seismological data centers using the International Federation of Digital Seismograph Networks (FDSN) web service standards.
# Import from obspy.clients.fdsn
class Client:
def __init__(self, base_url: str = "IRIS", user: str = None, password: str = None,
user_agent: str = None, debug: bool = False, timeout: int = 120,
**kwargs):
"""
FDSN web service client.
Args:
base_url: Data center identifier or full URL
user: Username for restricted data access
password: Password for restricted data access
user_agent: Custom user agent string
debug: Enable debug output
timeout: Request timeout in seconds
**kwargs: Additional client options
Available data centers:
IRIS, GEOFON, NCEDC, SCEDC, USGS, EMSC, ISC, ORFEUS, RESIF, INGV,
BGR, KOERI, NOA, LMU, ETHZ, NIEP, GEONET, GA, IPGP, and 50+ others
"""
def get_waveforms(self, network: str, station: str, location: str, channel: str,
starttime, endtime, quality: str = "B", minimumlength: float = None,
longestonly: bool = False, **kwargs) -> Stream:
"""
Request waveform data.
Args:
network: Network code (e.g., "IU", "II", "G")
station: Station code (e.g., "ANMO", "BFO")
location: Location code (e.g., "00", "10", "--")
channel: Channel code (e.g., "BHZ", "HHE", "LHN")
starttime: Start time (UTCDateTime)
endtime: End time (UTCDateTime)
quality: Data quality ("D", "R", "Q", "M", "B")
minimumlength: Minimum trace length in seconds
longestonly: Return only longest continuous segment
**kwargs: Additional service parameters
Returns:
Stream object with requested waveforms
Raises:
FDSNException: Service error or no data found
"""
def get_waveforms_bulk(self, bulk, quality: str = "B", minimumlength: float = None,
longestonly: bool = False, **kwargs) -> Stream:
"""
Request multiple waveform segments efficiently.
Args:
bulk: List of (network, station, location, channel, starttime, endtime) tuples
quality: Data quality preference
minimumlength: Minimum trace length in seconds
longestonly: Return only longest segments
**kwargs: Additional parameters
Returns:
Stream object with all requested waveforms
"""
def get_events(self, starttime=None, endtime=None, minlatitude: float = None,
maxlatitude: float = None, minlongitude: float = None,
maxlongitude: float = None, latitude: float = None,
longitude: float = None, minradius: float = None,
maxradius: float = None, mindepth: float = None,
maxdepth: float = None, minmagnitude: float = None,
maxmagnitude: float = None, magnitudetype: str = None,
includeallorigins: bool = None, includeallmagnitudes: bool = None,
includearrivals: bool = None, eventid: str = None,
limit: int = None, offset: int = None, orderby: str = "time",
catalog: str = None, contributor: str = None, **kwargs):
"""
Request earthquake event data.
Args:
starttime: Earliest event origin time
endtime: Latest event origin time
minlatitude: Southern boundary in degrees
maxlatitude: Northern boundary in degrees
minlongitude: Western boundary in degrees
maxlongitude: Eastern boundary in degrees
latitude: Center latitude for radial search
longitude: Center longitude for radial search
minradius: Minimum radius from center in degrees
maxradius: Maximum radius from center in degrees
mindepth: Minimum depth in km
maxdepth: Maximum depth in km
minmagnitude: Minimum magnitude
maxmagnitude: Maximum magnitude
magnitudetype: Magnitude type preference
includeallorigins: Include all origins per event
includeallmagnitudes: Include all magnitudes per event
includearrivals: Include phase arrival data
eventid: Specific event identifier
limit: Maximum number of events
offset: Number of events to skip
orderby: Sort order ("time", "magnitude", "time-asc")
catalog: Preferred event catalog
contributor: Preferred data contributor
**kwargs: Additional query parameters
Returns:
Catalog object containing matching events
"""
def get_events_bulk(self, bulk, **kwargs):
"""
Request multiple event queries efficiently.
Args:
bulk: List of event query parameter dictionaries
**kwargs: Common parameters for all queries
Returns:
Catalog object with all matching events
"""
def get_stations(self, network=None, station=None, location=None, channel=None,
starttime=None, endtime=None, startbefore=None, startafter=None,
endbefore=None, endafter=None, minlatitude: float = None,
maxlatitude: float = None, minlongitude: float = None,
maxlongitude: float = None, latitude: float = None,
longitude: float = None, minradius: float = None,
maxradius: float = None, level: str = "station",
includerestricted: bool = None, includeavailability: bool = None,
updatedafter=None, matchtimeseries: bool = None, **kwargs):
"""
Request station metadata.
Args:
network: Network code(s) or pattern
station: Station code(s) or pattern
location: Location code(s) or pattern
channel: Channel code(s) or pattern
starttime: Earliest station start time
endtime: Latest station end time
startbefore: Stations starting before this time
startafter: Stations starting after this time
endbefore: Stations ending before this time
endafter: Stations ending after this time
minlatitude: Southern boundary in degrees
maxlatitude: Northern boundary in degrees
minlongitude: Western boundary in degrees
maxlongitude: Eastern boundary in degrees
latitude: Center latitude for radial search
longitude: Center longitude for radial search
minradius: Minimum radius from center in degrees
maxradius: Maximum radius from center in degrees
level: Detail level ("network", "station", "channel", "response")
includerestricted: Include restricted stations
includeavailability: Include data availability
updatedafter: Stations updated after this time
matchtimeseries: Match to available time series
**kwargs: Additional query parameters
Returns:
Inventory object containing station metadata
"""
def get_stations_bulk(self, bulk, **kwargs):
"""
Request multiple station queries efficiently.
Args:
bulk: List of station query parameter dictionaries
**kwargs: Common parameters for all queries
Returns:
Inventory object with all matching stations
"""
def get_waveform_availability(self, network=None, station=None, location=None,
channel=None, starttime=None, endtime=None, **kwargs):
"""
Get waveform data availability information.
Args:
network: Network code(s) or pattern
station: Station code(s) or pattern
location: Location code(s) or pattern
channel: Channel code(s) or pattern
starttime: Earliest time window
endtime: Latest time window
**kwargs: Additional parameters
Returns:
List of availability information dictionaries
"""Advanced client that automatically routes requests to appropriate data centers based on network-station combinations and data availability.
class RoutingClient:
def __init__(self, routing_type: str = "eida-routing",
eida_token: str = None, include_providers: list = None,
exclude_providers: list = None, debug: bool = False,
timeout: int = 120, **kwargs):
"""
Multi-data center routing client.
Args:
routing_type: Routing service type ("eida-routing", "iris-federator")
eida_token: EIDA authentication token for restricted data
include_providers: List of data centers to include
exclude_providers: List of data centers to exclude
debug: Enable debug output
timeout: Request timeout in seconds
**kwargs: Additional client options
"""
def get_waveforms(self, network: str, station: str, location: str, channel: str,
starttime, endtime, **kwargs) -> Stream:
"""
Request waveforms with automatic routing.
Automatically determines appropriate data center and handles
cross-center data assembly for complex requests.
Args:
network: Network code
station: Station code
location: Location code
channel: Channel code
starttime: Start time
endtime: End time
**kwargs: Additional parameters
Returns:
Stream object with waveforms from appropriate data centers
"""
def get_waveforms_bulk(self, bulk, **kwargs) -> Stream:
"""
Bulk waveform request with intelligent routing.
Args:
bulk: List of waveform request tuples
**kwargs: Additional parameters
Returns:
Stream object with data from multiple centers
"""
def get_stations(self, **kwargs):
"""
Request station metadata with routing.
Args:
**kwargs: Station query parameters
Returns:
Inventory object from appropriate data centers
"""
def get_availability(self, **kwargs):
"""
Get data availability across multiple centers.
Args:
**kwargs: Availability query parameters
Returns:
Comprehensive availability information
"""Automated bulk data download system for large-scale seismological studies with intelligent request management and error handling.
# Import from obspy.clients.fdsn.mass_downloader
class MassDownloader:
def __init__(self, providers: list = None, debug: bool = False,
configure_logging: bool = True, **kwargs):
"""
Automated mass data download system.
Args:
providers: List of FDSN data center identifiers
debug: Enable debug logging
configure_logging: Set up logging automatically
**kwargs: Additional downloader options
"""
def download(self, domain, restrictions, mseed_storage: str,
stationxml_storage: str = None, threads_per_client: int = 3,
download_chunk_size_in_mb: int = 20, **kwargs):
"""
Download seismological data for specified domain and restrictions.
Args:
domain: Spatial domain for download (Domain object)
restrictions: Download restrictions (Restrictions object)
mseed_storage: Directory for MiniSEED files
stationxml_storage: Directory for StationXML files
threads_per_client: Concurrent downloads per data center
download_chunk_size_in_mb: Maximum chunk size for downloads
**kwargs: Additional download options
"""
def get_availability(self, domain, restrictions, **kwargs):
"""
Get data availability for domain without downloading.
Args:
domain: Spatial domain
restrictions: Time and channel restrictions
**kwargs: Additional options
Returns:
Availability summary information
"""
class Restrictions:
def __init__(self, starttime, endtime, chunklength_in_sec: int = 86400,
network: str = None, station: str = None, location: str = None,
channel: str = None, exclude_networks: list = None,
exclude_stations: list = None, minimum_length: float = 0.0,
minimum_interstation_distance_in_m: float = 0.0,
channel_priorities: list = None, location_priorities: list = None,
reject_channels_with_gaps: bool = False,
sanitize: bool = True, **kwargs):
"""
Download restrictions and data selection criteria.
Args:
starttime: Download start time (UTCDateTime)
endtime: Download end time (UTCDateTime)
chunklength_in_sec: File chunk length in seconds
network: Network code pattern
station: Station code pattern
location: Location code pattern
channel: Channel code pattern
exclude_networks: Networks to exclude
exclude_stations: Stations to exclude
minimum_length: Minimum trace length fraction
minimum_interstation_distance_in_m: Station spacing filter
channel_priorities: Preferred channel order
location_priorities: Preferred location order
reject_channels_with_gaps: Reject gapped channels
sanitize: Clean up file naming
**kwargs: Additional restrictions
"""
class Domain:
"""Base class for spatial domains."""
pass
class RectangularDomain(Domain):
def __init__(self, minlatitude: float, maxlatitude: float,
minlongitude: float, maxlongitude: float, **kwargs):
"""
Rectangular geographic domain.
Args:
minlatitude: Southern boundary in degrees
maxlatitude: Northern boundary in degrees
minlongitude: Western boundary in degrees
maxlongitude: Eastern boundary in degrees
**kwargs: Additional domain options
"""
class CircularDomain(Domain):
def __init__(self, latitude: float, longitude: float, minradius: float,
maxradius: float, **kwargs):
"""
Circular geographic domain.
Args:
latitude: Center latitude in degrees
longitude: Center longitude in degrees
minradius: Inner radius in degrees
maxradius: Outer radius in degrees
**kwargs: Additional domain options
"""
class GlobalDomain(Domain):
def __init__(self, **kwargs):
"""
Global domain (all available stations).
Args:
**kwargs: Additional domain options
"""Additional clients for accessing specific seismological services and data types.
# IRIS-specific services
# Import from obspy.clients.iris
class Client:
def __init__(self, user: str = None, password: str = None, timeout: int = 120,
debug: bool = False, **kwargs):
"""
IRIS Data Management Center client.
Args:
user: Username for restricted access
password: Password for restricted access
timeout: Request timeout in seconds
debug: Enable debug output
**kwargs: Additional options
"""
def distaz(self, stalat: float, stalon: float, evtlat: float, evtlon: float):
"""
Calculate distance and azimuth using IRIS web service.
Args:
stalat: Station latitude in degrees
stalon: Station longitude in degrees
evtlat: Event latitude in degrees
evtlon: Event longitude in degrees
Returns:
Dictionary with distance, azimuth, and back-azimuth
"""
def flinnengdahl(self, lat: float, lon: float):
"""
Get Flinn-Engdahl region number and name.
Args:
lat: Latitude in degrees
lon: Longitude in degrees
Returns:
Dictionary with region information
"""
def traveltime(self, model: str, phases: list, evdepth: float,
distdeg: float = None, distkm: float = None):
"""
Calculate travel times using IRIS TauP service.
Args:
model: Earth model name
phases: List of seismic phases
evdepth: Event depth in km
distdeg: Distance in degrees (or use distkm)
distkm: Distance in km (or use distdeg)
Returns:
List of travel time dictionaries
"""
# Syngine synthetic seismogram service
# Import from obspy.clients.syngine
class Client:
def __init__(self, base_url: str = "http://service.iris.edu",
user_agent: str = None, debug: bool = False, **kwargs):
"""
Syngine synthetic seismogram client.
Args:
base_url: Syngine service URL
user_agent: Custom user agent
debug: Enable debug output
**kwargs: Additional options
"""
def get_waveforms(self, model: str, network: str, station: str,
starttime, endtime, eventid: str = None, **kwargs) -> Stream:
"""
Generate synthetic seismograms.
Args:
model: Earth model for synthetics
network: Network code
station: Station code
starttime: Start time
endtime: End time
eventid: Event identifier
**kwargs: Source parameters (lat, lon, depth, magnitude, etc.)
Returns:
Stream with synthetic seismograms
"""
def get_available_models(self):
"""
Get list of available Earth models.
Returns:
List of model names and descriptions
"""
# Real-time SeedLink client
# Import from obspy.clients.seedlink
class EasySeedLinkClient:
def __init__(self, server_url: str, autoconnect: bool = True, **kwargs):
"""
SeedLink real-time streaming client.
Args:
server_url: SeedLink server URL
autoconnect: Connect automatically
**kwargs: Connection options
"""
def select_stream(self, net: str, station: str, selector: str = "BHZ"):
"""
Select data stream.
Args:
net: Network code
station: Station code
selector: Channel selector
"""
def run(self):
"""Start real-time data streaming."""
def on_data(self, trace: Trace):
"""
Callback for incoming data (override in subclass).
Args:
trace: Real-time trace data
"""
pass
# Nominal Response Library client
# Import from obspy.clients.nrl
class NRL:
def __init__(self, root: str = None, **kwargs):
"""
Nominal Response Library client.
Args:
root: Local NRL path or None for online access
**kwargs: Additional options
"""
def get_response(self, sensor_keys: list, datalogger_keys: list, **kwargs):
"""
Get instrument response from NRL.
Args:
sensor_keys: Sensor identification path
datalogger_keys: Datalogger identification path
**kwargs: Additional parameters
Returns:
Response object
"""
def get_sensors(self):
"""Get available sensors list."""
def get_dataloggers(self):
"""Get available dataloggers list."""from obspy.clients.fdsn import Client
from obspy import UTCDateTime
# Create client for IRIS data center
client = Client("IRIS")
# Alternative: use specific data center URL
# client = Client("http://service.iris.edu")
# Download waveform data
starttime = UTCDateTime("2023-01-15T10:00:00")
endtime = UTCDateTime("2023-01-15T12:00:00")
st = client.get_waveforms("IU", "ANMO", "00", "BHZ", starttime, endtime)
print(f"Downloaded {len(st)} traces")
# Download earthquake catalog
events = client.get_events(starttime=starttime, endtime=endtime,
minmagnitude=5.0, maxmagnitude=8.0)
print(f"Found {len(events)} events")
# Download station metadata
inventory = client.get_stations(network="IU", station="ANMO",
level="response")
print(f"Got metadata for {len(inventory.networks)} networks")from obspy.clients.fdsn import Client
from obspy import UTCDateTime
client = Client("IRIS")
# Define bulk waveform request
starttime = UTCDateTime("2023-01-01")
endtime = UTCDateTime("2023-01-02")
bulk_list = [
("IU", "ANMO", "00", "BHZ", starttime, endtime),
("IU", "ANMO", "00", "BHN", starttime, endtime),
("IU", "ANMO", "00", "BHE", starttime, endtime),
("IU", "COLA", "00", "BHZ", starttime, endtime),
("IU", "COLA", "00", "BHN", starttime, endtime),
("IU", "COLA", "00", "BHE", starttime, endtime),
]
# Download all traces efficiently
st = client.get_waveforms_bulk(bulk_list)
print(f"Downloaded {len(st)} traces from bulk request")
# Process by station
for net_sta in set([f"{tr.stats.network}.{tr.stats.station}" for tr in st]):
st_station = st.select(network=net_sta.split('.')[0],
station=net_sta.split('.')[1])
print(f"Station {net_sta}: {len(st_station)} traces")from obspy.clients.fdsn.mass_downloader import (
RectangularDomain, Restrictions, MassDownloader
)
from obspy import UTCDateTime
# Define spatial domain (Southern California)
domain = RectangularDomain(minlatitude=32.0, maxlatitude=37.0,
minlongitude=-122.0, maxlongitude=-115.0)
# Define temporal and channel restrictions
restrictions = Restrictions(
starttime=UTCDateTime("2023-01-01"),
endtime=UTCDateTime("2023-01-07"),
chunklength_in_sec=86400, # 1-day files
channel_priorities=["HH[ZNE]", "BH[ZNE]", "EH[ZNE]"],
location_priorities=["", "00", "10"],
minimum_length=0.8, # Require 80% data coverage
reject_channels_with_gaps=False,
minimum_interstation_distance_in_m=1000.0 # 1 km minimum spacing
)
# Create downloader
mdl = MassDownloader(providers=["IRIS", "NCEDC", "SCEDC"])
# Download data
mdl.download(domain, restrictions,
mseed_storage="waveforms",
stationxml_storage="stations")from obspy.clients.fdsn import RoutingClient
from obspy import UTCDateTime
# Create routing client (automatically finds best data center)
client = RoutingClient("eida-routing")
# Request data that may be distributed across multiple centers
starttime = UTCDateTime("2023-01-01")
endtime = UTCDateTime("2023-01-02")
# EIDA network data (will route to appropriate European centers)
st = client.get_waveforms("FR", "SJAF", "00", "HHZ", starttime, endtime)
# Get station information from multiple centers
inventory = client.get_stations(network="FR,G,IU", station="*",
level="station")
print(f"Got data from {len(set([tr.stats.network for tr in st]))} networks")
print(f"Inventory has {sum([len(net.stations) for net in inventory.networks])} stations")from obspy.clients.seedlink.easyseedlink import create_client
from obspy import UTCDateTime
def handle_data(trace):
"""Process incoming real-time data."""
print(f"Received: {trace.id} - {trace.stats.starttime}")
# Apply real-time processing
trace.detrend('linear')
trace.filter('bandpass', freqmin=1.0, freqmax=10.0)
# Check for events (example)
if trace.max() > 1000: # Simple amplitude threshold
print(f"Possible event detected on {trace.id}")
# Connect to SeedLink server
client = create_client("rtserve.iris.washington.edu", on_data=handle_data)
# Select streams
client.select_stream("IU", "ANMO", "BHZ.D")
client.select_stream("IU", "COLA", "BHZ.D")
# Start streaming (runs indefinitely)
client.run()from obspy.clients.syngine import Client
from obspy import UTCDateTime
client = Client()
# Get available models
models = client.get_available_models()
print("Available models:", [m['name'] for m in models])
# Generate synthetic seismograms for earthquake
starttime = UTCDateTime("2023-01-01T00:00:00")
endtime = UTCDateTime("2023-01-01T01:00:00")
st = client.get_waveforms(
model="ak135f_5s",
network="XX",
station="SYN",
starttime=starttime,
endtime=endtime,
# Source parameters
sourcelocation="2023-01-01T00:10:00,-10.0,30.0,10000", # time,lat,lon,depth_m
sourcemagnitude=6.5,
sourcedepthinmeters=10000
)
print(f"Generated {len(st)} synthetic traces")
st.plot()# Exception classes
class FDSNException(Exception):
"""Base exception for FDSN client errors."""
pass
class FDSNNoDataException(FDSNException):
"""No data available for request."""
pass
class FDSNTimeoutException(FDSNException):
"""Request timeout exceeded."""
pass
class FDSNBadRequestException(FDSNException):
"""Invalid request parameters."""
pass
# Data center information structure
DataCenter = {
'name': str, # Data center name
'website': str, # Website URL
'services': list[str], # Available services
'waveform_url': str, # Waveform service URL
'event_url': str, # Event service URL
'station_url': str # Station service URL
}
# Availability information structure
Availability = {
'network': str, # Network code
'station': str, # Station code
'location': str, # Location code
'channel': str, # Channel code
'starttime': UTCDateTime, # Earliest available data
'endtime': UTCDateTime, # Latest available data
'samplerate': float, # Sampling rate
'quality': str # Data quality indicator
}Install with Tessl CLI
npx tessl i tessl/pypi-obspy