ObsPy is a Python toolbox for seismology providing parsers for seismological data formats, clients for data centers, and signal processing routines for seismological time series analysis.
—
Comprehensive signal processing toolkit including digital filters, triggering algorithms, spectral analysis, array processing, and coordinate transformations specifically designed for seismological data analysis. These functions integrate seamlessly with ObsPy's core data structures and provide the foundation for advanced seismological analysis workflows.
High-performance digital filters optimized for seismological data including Butterworth, Chebyshev, and FIR implementations with zero-phase options.
# Import from obspy.signal.filter
def bandpass(data, freqmin: float, freqmax: float, df: float, corners: int = 4,
zerophase: bool = False, **kwargs):
"""
Butterworth bandpass filter.
Args:
data: Input data array
freqmin: Low frequency corner in Hz
freqmax: High frequency corner in Hz
df: Sampling frequency in Hz
corners: Filter order (number of poles)
zerophase: Apply zero-phase filter
**kwargs: Additional filter options
Returns:
Filtered data array
"""
def lowpass(data, freq: float, df: float, corners: int = 4,
zerophase: bool = False, **kwargs):
"""
Butterworth lowpass filter.
Args:
data: Input data array
freq: Corner frequency in Hz
df: Sampling frequency in Hz
corners: Filter order
zerophase: Apply zero-phase filter
**kwargs: Additional options
Returns:
Filtered data array
"""
def highpass(data, freq: float, df: float, corners: int = 4,
zerophase: bool = False, **kwargs):
"""
Butterworth highpass filter.
Args:
data: Input data array
freq: Corner frequency in Hz
df: Sampling frequency in Hz
corners: Filter order
zerophase: Apply zero-phase filter
**kwargs: Additional options
Returns:
Filtered data array
"""
def bandstop(data, freqmin: float, freqmax: float, df: float, corners: int = 4,
zerophase: bool = False, **kwargs):
"""
Butterworth bandstop (notch) filter.
Args:
data: Input data array
freqmin: Low frequency corner in Hz
freqmax: High frequency corner in Hz
df: Sampling frequency in Hz
corners: Filter order
zerophase: Apply zero-phase filter
**kwargs: Additional options
Returns:
Filtered data array
"""
def lowpass_cheby_2(data, freq: float, df: float, maxorder: int = 12,
ba: bool = False, freq_passband: bool = False):
"""
Chebyshev type 2 lowpass filter.
Args:
data: Input data array
freq: Corner frequency in Hz
df: Sampling frequency in Hz
maxorder: Maximum filter order
ba: Return filter coefficients if True
freq_passband: Frequency is passband edge if True
Returns:
Filtered data array or filter coefficients
"""
def lowpass_fir(data, freq: float, df: float, ntaps: int = None, **kwargs):
"""
FIR lowpass filter using Parks-McClellan optimal design.
Args:
data: Input data array
freq: Corner frequency in Hz
df: Sampling frequency in Hz
ntaps: Number of filter taps (auto if None)
**kwargs: FIR design options
Returns:
Filtered data array
"""
def remez_fir(data, freqs: list, desired: list, df: float, **kwargs):
"""
FIR filter using Remez exchange algorithm.
Args:
data: Input data array
freqs: List of frequency band edges in Hz
desired: List of desired gains for each band
df: Sampling frequency in Hz
**kwargs: Remez algorithm options
Returns:
Filtered data array
"""Automated event detection algorithms for identifying seismic arrivals and determining onset times with sub-sample precision.
# Import from obspy.signal.trigger
def classic_sta_lta(a, nsta: int, nlta: int):
"""
Classic STA/LTA trigger algorithm.
Args:
a: Input data array
nsta: Short time average window length in samples
nlta: Long time average window length in samples
Returns:
STA/LTA characteristic function array
"""
def recursive_sta_lta(a, nsta: int, nlta: int):
"""
Recursive STA/LTA trigger algorithm (faster implementation).
Args:
a: Input data array
nsta: Short time average window length in samples
nlta: Long time average window length in samples
Returns:
STA/LTA characteristic function array
"""
def delayed_sta_lta(a, nsta: int, nlta: int):
"""
Delayed STA/LTA trigger algorithm.
Args:
a: Input data array
nsta: Short time average window length in samples
nlta: Long time average window length in samples
Returns:
STA/LTA characteristic function array
"""
def carl_sta_trig(a, nsta: int, nlta: int, ratio: float, quiet: float):
"""
Carl-STA-Trig algorithm for coincidence triggering.
Args:
a: Input data array
nsta: Short time average window length in samples
nlta: Long time average window length in samples
ratio: Trigger ratio threshold
quiet: Quiet ratio threshold
Returns:
Trigger times array
"""
def z_detect(a, nsta: int):
"""
Z-detector algorithm for event detection.
Args:
a: Input data array
nsta: Analysis window length in samples
Returns:
Z-detector characteristic function
"""
def trigger_onset(charfct, thres1: float, thres2: float, max_len: int = None,
max_len_delete: bool = False):
"""
Determine trigger on and off times from characteristic function.
Args:
charfct: Characteristic function array
thres1: Trigger on threshold
thres2: Trigger off threshold
max_len: Maximum trigger length in samples
max_len_delete: Delete long triggers if True
Returns:
Array of (trigger_on, trigger_off) sample indices
"""
def coincidence_trigger(trigger_type: str, thr_on: float, thr_off: float,
stream, thr_coincidence_sum: int, **kwargs):
"""
Multi-station coincidence triggering.
Args:
trigger_type: Trigger algorithm ('recstalta', 'classicstalta', etc.)
thr_on: Trigger on threshold
thr_off: Trigger off threshold
stream: Stream with multiple traces
thr_coincidence_sum: Minimum stations for coincidence
**kwargs: Algorithm-specific parameters
Returns:
List of coincident trigger dictionaries
"""Advanced spectral analysis class for long-term noise analysis and instrument performance monitoring following McNamara and Buland (2004) methodology.
class PPSD:
def __init__(self, stats, paz_or_resp, dtiny: float = 1e-6,
dmedian: float = None, period_smoothing_width_octaves: float = 1.0,
period_step_octaves: float = 0.125, **kwargs):
"""
Probabilistic power spectral density analysis.
Args:
stats: Trace.stats object with metadata
paz_or_resp: Poles and zeros dict or response object
dtiny: Tiny float for numerical stability
dmedian: Median smoothing parameter
period_smoothing_width_octaves: Smoothing width in octaves
period_step_octaves: Period step size in octaves
**kwargs: Additional PPSD parameters
"""
def add(self, stream, verbose: bool = False):
"""
Add stream data to PPSD analysis.
Args:
stream: Stream object with seismic data
verbose: Print processing information
"""
def plot(self, filename: str = None, show_coverage: bool = True,
show_histogram: bool = True, show_percentiles: bool = False,
percentiles: list = [0, 25, 50, 75, 100], **kwargs):
"""
Plot PPSD results.
Args:
filename: Save plot to file if specified
show_coverage: Show data coverage timeline
show_histogram: Show probability density histogram
show_percentiles: Show percentile lines
percentiles: List of percentiles to show
**kwargs: Additional plotting options
"""
def plot_temporal(self, starttime=None, endtime=None, **kwargs):
"""
Plot temporal evolution of PSDs.
Args:
starttime: Start time for plot window
endtime: End time for plot window
**kwargs: Plotting options
"""
def plot_spectrogram(self, filename: str = None, **kwargs):
"""
Plot spectrogram representation of PSDs.
Args:
filename: Save plot to file if specified
**kwargs: Plotting options
"""
def save_npz(self, filename: str):
"""
Save PPSD data to NumPy compressed file.
Args:
filename: Output filename (.npz)
"""
def load_npz(self, filename: str):
"""
Load PPSD data from NumPy compressed file.
Args:
filename: Input filename (.npz)
"""
def get_percentile(self, percentile: float = 50, hist_stack=None):
"""
Get specified percentile curve from PPSD.
Args:
percentile: Percentile value (0-100)
hist_stack: Histogram stack to use
Returns:
Tuple of (periods, power_values)
"""
def calculate_histogram(self, starttime=None, endtime=None):
"""
Calculate probability density histogram for time window.
Args:
starttime: Window start time
endtime: Window end time
Returns:
2D histogram array
"""Instrument response correction and simulation functions for removing instrument effects and simulating different sensor responses.
# Import from obspy.signal.invsim
def simulate_seismometer(data, samp_rate: float, paz_remove=None, paz_simulate=None,
taper: bool = True, simulate_sensitivity: bool = True,
taper_fraction: float = 0.05, pre_filt=None, zero_mean: bool = True,
nfft_pow2: bool = False, **kwargs):
"""
Simulate seismometer response.
Args:
data: Input data array
samp_rate: Sampling rate in Hz
paz_remove: Poles and zeros to remove (dict with 'poles', 'zeros', 'gain')
paz_simulate: Poles and zeros to simulate
taper: Apply taper to data
simulate_sensitivity: Include sensitivity simulation
taper_fraction: Taper length as fraction of trace
pre_filt: Pre-filter frequencies (list of 4 corners)
zero_mean: Remove mean before processing
nfft_pow2: Use power-of-2 FFT length
**kwargs: Additional options
Returns:
Simulated data array
"""
def corn_freq_2_paz(fc: float, damp: float = 0.707):
"""
Convert corner frequency and damping to poles and zeros.
Args:
fc: Corner frequency in Hz
damp: Damping factor (default 0.707 for critical damping)
Returns:
Dictionary with poles, zeros, and gain
"""
def paz_2_amplitude_value_of_freq_resp(paz, freq: float):
"""
Calculate amplitude response at specific frequency.
Args:
paz: Poles and zeros dictionary
freq: Frequency in Hz
Returns:
Amplitude response value
"""
def evalresp(t_samp: float, nfft: int, filename: str, date, units: str = "VEL",
freq: bool = False, **kwargs):
"""
Use evalresp to calculate instrument response.
Args:
t_samp: Sampling period in seconds
nfft: Number of FFT points
filename: RESP filename or SEED response
date: Response date (UTCDateTime)
units: Output units ('DIS', 'VEL', 'ACC')
freq: Return frequency array if True
**kwargs: Additional evalresp options
Returns:
Complex response array (and frequencies if freq=True)
"""
def cosine_taper(npts: int, p: float = 0.1):
"""
Generate cosine taper window.
Args:
npts: Number of points
p: Taper fraction (0.1 = 10% taper on each end)
Returns:
Taper window array
"""Multi-station array processing techniques for detecting and locating seismic sources using coherent signal processing across seismic arrays.
# Import from obspy.signal.array_analysis
def array_processing(stream, win_len: float, win_frac: float, sll_x: float,
slm_x: float, sll_y: float, slm_y: float, sl_s: float,
semb_thres: float = -1e9, vel_thres: float = -1e9,
frqlow: float = 1.0, frqhigh: float = 8.0, samp_rate: float = 40.0,
prewhiten: int = 0, **kwargs):
"""
Array processing for slowness and back-azimuth estimation.
Args:
stream: Stream with array data (requires coordinates)
win_len: Sliding window length in seconds
win_frac: Window overlap fraction (0-1)
sll_x: Slowness grid minimum X in s/km
slm_x: Slowness grid maximum X in s/km
sll_y: Slowness grid minimum Y in s/km
slm_y: Slowness grid maximum Y in s/km
sl_s: Slowness grid spacing in s/km
semb_thres: Semblance threshold
vel_thres: Velocity threshold in km/s
frqlow: Low frequency in Hz
frqhigh: High frequency in Hz
samp_rate: Sampling rate in Hz
prewhiten: Pre-whitening (0=off, 1=on)
**kwargs: Additional options
Returns:
Structured array with processing results
"""
def get_geometry(stream, coordsys: str = 'lonlat', return_center: bool = False):
"""
Get array geometry from stream coordinates.
Args:
stream: Stream with coordinate metadata
coordsys: Coordinate system ('lonlat', 'xy')
return_center: Return array center coordinates
Returns:
Array geometry matrix (and center if requested)
"""
def get_timeshift(geometry, sll_x: float, slm_x: float, sll_y: float, slm_y: float,
sl_s: float, grdpts_x: int, grdpts_y: int):
"""
Calculate time shifts for slowness grid.
Args:
geometry: Array geometry matrix
sll_x: Slowness grid minimum X
slm_x: Slowness grid maximum X
sll_y: Slowness grid minimum Y
slm_y: Slowness grid maximum Y
sl_s: Slowness grid spacing
grdpts_x: Grid points in X direction
grdpts_y: Grid points in Y direction
Returns:
Time shift array for grid points
"""Coordinate system transformations for seismological data including horizontal component rotation and three-component rotations to ray-coordinate systems.
# Import from obspy.signal.rotate
def rotate_ne_rt(n, e, ba: float):
"""
Rotate North-East components to Radial-Transverse.
Args:
n: North component data array
e: East component data array
ba: Back-azimuth in degrees
Returns:
Tuple of (radial, transverse) component arrays
"""
def rotate_rt_ne(r, t, ba: float):
"""
Rotate Radial-Transverse components to North-East.
Args:
r: Radial component data array
t: Transverse component data array
ba: Back-azimuth in degrees
Returns:
Tuple of (north, east) component arrays
"""
def rotate_zne_lqt(z, n, e, ba: float, inc: float):
"""
Rotate ZNE components to LQT ray coordinate system.
Args:
z: Vertical component data array
n: North component data array
e: East component data array
ba: Back-azimuth in degrees
inc: Inclination angle in degrees
Returns:
Tuple of (L, Q, T) component arrays
L: P-wave direction, Q: SV-wave direction, T: SH-wave direction
"""
def rotate_lqt_zne(l, q, t, ba: float, inc: float):
"""
Rotate LQT ray coordinates back to ZNE components.
Args:
l: L component data array (P-wave direction)
q: Q component data array (SV-wave direction)
t: T component data array (SH-wave direction)
ba: Back-azimuth in degrees
inc: Inclination angle in degrees
Returns:
Tuple of (Z, N, E) component arrays
"""Cross-correlation analysis for measuring time delays between signals and template matching applications.
# Import from obspy.signal.cross_correlation
def correlate(a, b, shift_len: int, demean: bool = True, normalize: str = 'naive',
method: str = 'auto'):
"""
Cross-correlate two signals.
Args:
a: First signal array
b: Second signal array
shift_len: Maximum shift length in samples
demean: Remove mean before correlation
normalize: Normalization method ('naive', 'full')
method: Correlation method ('auto', 'fft', 'direct')
Returns:
Cross-correlation array
"""
def xcorr_max(a, b, abs_max: bool = True):
"""
Find maximum cross-correlation and lag.
Args:
a: First signal array
b: Second signal array
abs_max: Use absolute maximum if True
Returns:
Tuple of (correlation_coefficient, lag_in_samples)
"""
def correlate_template(data, template, mode: str = 'valid', normalize: str = 'full'):
"""
Template matching using cross-correlation.
Args:
data: Data array to search
template: Template array to match
mode: Correlation mode ('valid', 'full', 'same')
normalize: Normalization method
Returns:
Cross-correlation array
"""Frequency domain analysis tools including power spectral density estimation, spectrograms, and multitaper methods.
# Import from obspy.signal.spectral_estimation
def PPSD(stats, paz_or_resp, **kwargs):
"""Create PPSD instance (alias for main PPSD class)."""
def psd(data, NFFT: int = 256, Fs: float = 2, detrend: str = 'linear',
window: str = 'hann', noverlap: int = 0):
"""
Power spectral density using Welch's method.
Args:
data: Input data array
NFFT: FFT length
Fs: Sampling frequency
detrend: Detrend method ('linear', 'constant', 'none')
window: Window function
noverlap: Overlap between segments
Returns:
Tuple of (frequencies, power_spectral_density)
"""
def spectrogram(data, samp_rate: float, per_lap: float = 0.9, wlen: float = None,
log: bool = False, outfile: str = None, fmt: str = None,
axes=None, dbscale: bool = False, **kwargs):
"""
Create spectrogram of time series data.
Args:
data: Input data array
samp_rate: Sampling rate in Hz
per_lap: Percentage of overlap (0-1)
wlen: Window length in seconds
log: Use logarithmic frequency scale
outfile: Save to file if specified
fmt: File format for saving
axes: Matplotlib axes object
dbscale: Use dB scale for power
**kwargs: Additional plotting options
Returns:
Tuple of (time, frequency, spectrogram_array)
"""from obspy import read
from obspy.signal.filter import bandpass, lowpass
import numpy as np
# Read seismic data
st = read('seismic_data.mseed')
trace = st[0]
# Apply bandpass filter (1-10 Hz)
filtered_data = bandpass(trace.data, 1.0, 10.0, trace.stats.sampling_rate,
corners=4, zerophase=True)
# Apply to trace directly using trace method
trace.filter('bandpass', freqmin=1.0, freqmax=10.0, corners=4, zerophase=True)
# Multiple filter stages
trace.filter('highpass', freq=0.5) # Remove long periods
trace.filter('lowpass', freq=25.0) # Anti-alias before decimation
trace.decimate(2) # Reduce sampling rate by factor of 2from obspy import read
from obspy.signal.trigger import classic_sta_lta, trigger_onset, plot_trigger
import matplotlib.pyplot as plt
# Read data and select trace
st = read('continuous_data.mseed')
trace = st[0]
# Calculate STA/LTA characteristic function
df = trace.stats.sampling_rate
cft = classic_sta_lta(trace.data, int(5 * df), int(10 * df))
# Determine trigger times
thr_on = 1.5
thr_off = 0.5
on_off = trigger_onset(cft, thr_on, thr_off)
# Plot results
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 8))
# Plot original trace
times = trace.times()
ax1.plot(times, trace.data, 'black')
ax1.set_ylabel('Amplitude')
ax1.set_title(f'{trace.id} - Raw Data')
# Plot characteristic function with triggers
ax2.plot(times, cft, 'blue')
ax2.axhline(thr_on, color='red', linestyle='--', label='Thr On')
ax2.axhline(thr_off, color='orange', linestyle='--', label='Thr Off')
# Mark trigger times
for triggers in on_off:
on_time = times[triggers[0]]
off_time = times[triggers[1]]
ax2.axvspan(on_time, off_time, alpha=0.3, color='red')
ax2.set_ylabel('STA/LTA')
ax2.set_xlabel('Time (s)')
ax2.legend()
plt.tight_layout()
plt.show()from obspy import read
from obspy.signal import PPSD
from obspy.clients.fdsn import Client
# Get station response
client = Client("IRIS")
inventory = client.get_stations(network="IU", station="ANMO", location="00",
channel="BHZ", level="response")
# Read continuous data
st = read('continuous_noise.mseed')
# Create PPSD instance
ppsd = PPSD(st[0].stats, metadata=inventory)
# Add data to analysis (can add multiple days/files)
ppsd.add(st)
# Plot results
ppsd.plot(show_coverage=True, show_histogram=True)
ppsd.plot_temporal() # Show temporal evolution
ppsd.plot_spectrogram() # Show spectrogram view
# Get percentile curves
periods, median_psd = ppsd.get_percentile(percentile=50)
periods, low_noise = ppsd.get_percentile(percentile=10)
periods, high_noise = ppsd.get_percentile(percentile=90)from obspy import read
from obspy.signal.array_analysis import array_processing, get_geometry
import numpy as np
# Read array data (requires coordinate metadata)
st = read('array_data.mseed')
# Get array geometry
geometry = get_geometry(st, coordsys='lonlat')
# Array processing parameters
kwargs = dict(
# sliding window properties
win_len=1.0, # window length in seconds
win_frac=0.05, # window overlap fraction
# slowness grid
sll_x=-3.0, slm_x=3.0, # X slowness limits (s/km)
sll_y=-3.0, slm_y=3.0, # Y slowness limits (s/km)
sl_s=0.03, # slowness step (s/km)
# frequency band
frqlow=1.0, frqhigh=8.0, # frequency band (Hz)
# processing
samp_rate=40.0, # sampling rate
prewhiten=0, # pre-whitening off
semb_thres=-1e9, # semblance threshold
vel_thres=-1e9, # velocity threshold
)
# Perform array processing
out = array_processing(st, **kwargs)
# Extract results
times = out[:, 0] # time stamps
rel_power = out[:, 1] # relative power
abs_power = out[:, 2] # absolute power
baz = out[:, 3] # back-azimuth
slow = out[:, 4] # slowness magnitude
# Convert slowness to apparent velocity
app_vel = 1.0 / slow # km/s
print(f"Mean back-azimuth: {np.mean(baz):.1f} degrees")
print(f"Mean apparent velocity: {np.mean(app_vel):.2f} km/s")# Poles and zeros dictionary structure
PoleZeroDict = {
'poles': list[complex], # List of complex poles
'zeros': list[complex], # List of complex zeros
'gain': float, # System gain
'sensitivity': float # Instrument sensitivity (optional)
}
# Array processing result structure (structured NumPy array)
ArrayProcessingResult = np.dtype([
('time', 'f8'), # Time stamp
('rel_power', 'f8'), # Relative power
('abs_power', 'f8'), # Absolute power
('baz', 'f8'), # Back-azimuth in degrees
('slow', 'f8') # Slowness magnitude in s/km
])
# Trigger onset result structure
TriggerOnset = np.dtype([
('time_on', 'i4'), # Trigger on time (sample index)
('time_off', 'i4') # Trigger off time (sample index)
])Install with Tessl CLI
npx tessl i tessl/pypi-obspy