CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-segyio

Simple & fast IO for SEG-Y files

Pending
Overview
Eval results
Files

seismic-unix.mddocs/

Seismic Unix Compatibility

Seismic Unix field name aliases and specialized file handling for SU format compatibility. This subpackage provides familiar field names for users transitioning from Seismic Unix workflows.

Capabilities

SU Field Aliases

The segyio.su module provides Seismic Unix field name aliases that map to the corresponding TraceField and BinField constants, making code more readable for users familiar with SU conventions.

import segyio.su

# Trace header field aliases (key fields)
segyio.su.tracl     # TraceField.TRACE_SEQUENCE_LINE - Trace sequence number within line
segyio.su.tracr     # TraceField.TRACE_SEQUENCE_FILE - Trace sequence number within file  
segyio.su.fldr      # TraceField.FieldRecord - Original field record number
segyio.su.tracf     # TraceField.TraceNumber - Trace number within field record
segyio.su.ep        # TraceField.EnergySourcePoint - Energy source point number
segyio.su.cdp       # TraceField.CDP - CDP ensemble number
segyio.su.cdpt      # TraceField.CDP_TRACE - Trace number within CDP ensemble
segyio.su.trid      # TraceField.TraceIdentificationCode - Trace identification code
segyio.su.offset    # TraceField.offset - Distance from source to receiver group
segyio.su.sx        # TraceField.SourceX - Source coordinate X
segyio.su.sy        # TraceField.SourceY - Source coordinate Y
segyio.su.gx        # TraceField.GroupX - Group coordinate X
segyio.su.gy        # TraceField.GroupY - Group coordinate Y
segyio.su.ns        # TraceField.TRACE_SAMPLE_COUNT - Number of samples in trace
segyio.su.dt        # TraceField.TRACE_SAMPLE_INTERVAL - Sample interval (microseconds)
segyio.su.delrt     # TraceField.DelayRecordingTime - Delay recording time (ms)
segyio.su.cdpx      # TraceField.CDP_X - CDP X coordinate
segyio.su.cdpy      # TraceField.CDP_Y - CDP Y coordinate
segyio.su.iline     # TraceField.INLINE_3D - Inline number (3D surveys)
segyio.su.xline     # TraceField.CROSSLINE_3D - Crossline number (3D surveys)

Usage Example:

import segyio
import segyio.su

# Using SU field names instead of numeric constants
with segyio.open('data.sgy') as f:
    # Read trace header using SU field names
    header = f.header[0]
    
    # Instead of: header[189] and header[193]
    inline = header[segyio.su.iline]
    crossline = header[segyio.su.xline]
    cdp_number = header[segyio.su.cdp]
    source_x = header[segyio.su.sx]
    group_x = header[segyio.su.gx]
    offset_dist = header[segyio.su.offset]
    
    print(f"Trace: IL={inline}, XL={crossline}, CDP={cdp_number}")
    print(f"Geometry: SX={source_x}, GX={group_x}, Offset={offset_dist}")
    
    # Sample count and interval
    samples = header[segyio.su.ns]
    sample_rate = header[segyio.su.dt]  # microseconds
    delay_time = header[segyio.su.delrt]  # milliseconds
    
    print(f"Timing: {samples} samples, {sample_rate/1000}ms rate, {delay_time}ms delay")

Extended SU Field Aliases

Complete set of Seismic Unix trace header field aliases for comprehensive compatibility.

# Additional trace header aliases
segyio.su.nvs      # Number of vertically summed traces
segyio.su.nhs      # Number of horizontally stacked traces  
segyio.su.duse     # Data use (production/test)
segyio.su.gelev    # Receiver group elevation
segyio.su.selev    # Source elevation
segyio.su.sdepth   # Source depth below surface
segyio.su.gdel     # Datum elevation at receiver group
segyio.su.sdel     # Datum elevation at source
segyio.su.swdep    # Water depth at source
segyio.su.gwdep    # Water depth at receiver group
segyio.su.scalel   # Elevation scalar
segyio.su.scalco   # Coordinate scalar
segyio.su.counit   # Coordinate units
segyio.su.wevel    # Weathering velocity
segyio.su.swevel   # Subweathering velocity
segyio.su.sut      # Uphole time at source (ms)
segyio.su.gut      # Uphole time at receiver group (ms)
segyio.su.sstat    # Source static correction (ms)
segyio.su.gstat    # Group static correction (ms)
segyio.su.tstat    # Total static correction (ms)
segyio.su.laga     # Lag time A (ms)
segyio.su.lagb     # Lag time B (ms)
segyio.su.muts     # Mute time start (ms)
segyio.su.mute     # Mute time end (ms)
segyio.su.gain     # Gain type of field instruments
segyio.su.igc      # Instrument gain constant
segyio.su.igi      # Instrument early or initial gain
segyio.su.corr     # Correlated data traces
segyio.su.year     # Year data recorded
segyio.su.day      # Day of year
segyio.su.hour     # Hour of day (24 hour clock)
segyio.su.minute   # Minute of hour  
segyio.su.sec      # Second of minute
segyio.su.timbas   # Time basis code

Binary Header SU Aliases

Seismic Unix aliases for binary header fields.

# Binary header field aliases
segyio.su.jobid    # BinField.JobID - Job identification number
segyio.su.lino     # BinField.LineNumber - Line number  
segyio.su.reno     # BinField.ReelNumber - Reel number
segyio.su.ntrpr    # BinField.Traces - Number of data traces per ensemble
segyio.su.nart     # BinField.AuxTraces - Number of auxiliary traces per ensemble
segyio.su.hdt      # BinField.Interval - Sample interval in microseconds
segyio.su.dto      # BinField.IntervalOriginal - Original sample interval  
segyio.su.hns      # BinField.Samples - Number of samples per data trace
segyio.su.nso      # BinField.SamplesOriginal - Original number of samples
segyio.su.format   # BinField.Format - Data sample format code
segyio.su.fold     # BinField.EnsembleFold - Ensemble fold
segyio.su.tsort    # BinField.SortingCode - Trace sorting code
segyio.su.rev      # BinField.SEGYRevision - SEG Y format revision number
segyio.su.trflag   # BinField.TraceFlag - Fixed length trace flag
segyio.su.exth     # BinField.ExtendedHeaders - Number of extended headers

Usage Example:

import segyio
import segyio.su

with segyio.open('data.sgy') as f:
    # Read binary header using SU field names
    job_id = f.bin[segyio.su.jobid]
    line_num = f.bin[segyio.su.lino]
    trace_count = f.bin[segyio.su.ntrpr]
    sample_count = f.bin[segyio.su.hns]
    sample_rate = f.bin[segyio.su.hdt]
    data_format = f.bin[segyio.su.format]
    
    print(f"Job {job_id}, Line {line_num}")
    print(f"{trace_count} traces, {sample_count} samples each")
    print(f"Sample rate: {sample_rate/1000} ms")
    print(f"Format code: {data_format}")

SU File Opening

Specialized file opening function for Seismic Unix format files with optimized defaults.

def segyio.su.open(filename, mode='r', iline=189, xline=193, strict=True, ignore_geometry=False, endian='big'):
    """
    Open seismic unix file with SU-specific defaults.
    
    Parameters:
    - filename (str): Path to SU file
    - mode (str): File access mode ('r' or 'r+')
    - iline (int): Inline header field, default 189
    - xline (int): Crossline header field, default 193
    - strict (bool): Abort if geometry cannot be inferred, default True
    - ignore_geometry (bool): Skip geometry building, default False
    - endian (str): File endianness, default 'big'
    
    Returns:
    segyio.su.sufile: Specialized SegyFile subclass with SU enhancements
    """

Usage Example:

import segyio.su

# Open SU file with SU-specific handling
with segyio.su.open('data.su') as f:
    # Access using SU field names
    for i in range(min(10, f.tracecount)):
        header = f.header[i]
        
        # Common SU workflow
        cdp = header[segyio.su.cdp]
        offset = header[segyio.su.offset]
        sx = header[segyio.su.sx]
        gx = header[segyio.su.gx]
        
        # Calculate midpoint
        mx = (sx + gx) / 2
        
        print(f"Trace {i}: CDP={cdp}, Offset={offset}, MX={mx}")

SU Workflow Examples

CDP Sorting and Analysis

import segyio.su
import numpy as np

def analyze_cdp_gather(filename, target_cdp):
    """Analyze a specific CDP gather using SU field names."""
    
    with segyio.su.open(filename) as f:
        # Find all traces for target CDP
        cdp_traces = []
        offsets = []
        
        for i in range(f.tracecount):
            header = f.header[i]
            cdp = header[segyio.su.cdp]
            
            if cdp == target_cdp:
                cdp_traces.append(f.trace[i])
                offset = header[segyio.su.offset]
                offsets.append(offset)
        
        if not cdp_traces:
            print(f"No traces found for CDP {target_cdp}")
            return None
        
        # Sort by offset
        sorted_indices = np.argsort(offsets)
        sorted_traces = [cdp_traces[i] for i in sorted_indices]
        sorted_offsets = [offsets[i] for i in sorted_indices]
        
        # Create gather array
        gather = np.array(sorted_traces)
        
        return {
            'cdp': target_cdp,
            'traces': gather,
            'offsets': sorted_offsets,
            'fold': len(sorted_traces)
        }

# Usage
gather_info = analyze_cdp_gather('prestack.su', 1000)
if gather_info:
    print(f"CDP {gather_info['cdp']}: {gather_info['fold']} traces")
    print(f"Offset range: {min(gather_info['offsets'])} to {max(gather_info['offsets'])}")

Source-Receiver Geometry Analysis

import segyio.su
import numpy as np
import matplotlib.pyplot as plt

def plot_acquisition_geometry(filename, max_traces=1000):
    """Plot source and receiver positions using SU field names."""
    
    sources_x, sources_y = [], []
    receivers_x, receivers_y = [], []
    
    with segyio.su.open(filename) as f:
        coord_scalar = None
        
        for i in range(min(max_traces, f.tracecount)):
            header = f.header[i]
            
            # Get coordinates
            sx = header[segyio.su.sx]
            sy = header[segyio.su.sy] 
            gx = header[segyio.su.gx]
            gy = header[segyio.su.gy]
            
            # Get coordinate scalar (should be same for all traces)
            if coord_scalar is None:
                coord_scalar = header[segyio.su.scalco]
            
            sources_x.append(sx)
            sources_y.append(sy)
            receivers_x.append(gx)
            receivers_y.append(gy)
    
    # Apply coordinate scaling
    if coord_scalar != 0:
        if coord_scalar < 0:
            scale = -1.0 / coord_scalar
        else:
            scale = coord_scalar
        
        sources_x = np.array(sources_x) * scale
        sources_y = np.array(sources_y) * scale
        receivers_x = np.array(receivers_x) * scale
        receivers_y = np.array(receivers_y) * scale
    
    # Plot geometry
    plt.figure(figsize=(12, 8))
    plt.scatter(sources_x, sources_y, c='red', marker='*', s=20, alpha=0.6, label='Sources')
    plt.scatter(receivers_x, receivers_y, c='blue', marker='v', s=10, alpha=0.6, label='Receivers')
    plt.xlabel('X Coordinate')
    plt.ylabel('Y Coordinate')
    plt.title('Acquisition Geometry')
    plt.legend()
    plt.grid(True, alpha=0.3)
    plt.axis('equal')
    plt.show()

# Usage
plot_acquisition_geometry('marine_survey.su')

Trace Attribute Extraction

import segyio.su
import numpy as np

def extract_trace_attributes(filename, output_file):
    """Extract trace attributes using SU field names."""
    
    attributes = []
    
    with segyio.su.open(filename) as f:
        for i in range(f.tracecount):
            header = f.header[i]
            trace = f.trace[i]
            
            # Extract SU header attributes
            attr = {
                'trace_num': i,
                'tracl': header[segyio.su.tracl],
                'fldr': header[segyio.su.fldr],
                'cdp': header[segyio.su.cdp],
                'offset': header[segyio.su.offset],
                'sx': header[segyio.su.sx],
                'sy': header[segyio.su.sy],
                'gx': header[segyio.su.gx],
                'gy': header[segyio.su.gy],
                'ns': header[segyio.su.ns],
                'dt': header[segyio.su.dt],
                'delrt': header[segyio.su.delrt]
            }
            
            # Calculate derived attributes
            attr['midpoint_x'] = (attr['sx'] + attr['gx']) / 2
            attr['midpoint_y'] = (attr['sy'] + attr['gy']) / 2
            attr['azimuth'] = np.arctan2(attr['gy'] - attr['sy'], 
                                       attr['gx'] - attr['sx']) * 180 / np.pi
            
            # Trace data attributes
            attr['max_amp'] = float(trace.max())
            attr['min_amp'] = float(trace.min())
            attr['rms_amp'] = float(np.sqrt(np.mean(trace**2)))
            attr['zero_crossings'] = int(np.sum(np.diff(np.sign(trace)) != 0))
            
            attributes.append(attr)
    
    # Save to CSV
    import pandas as pd
    df = pd.DataFrame(attributes)
    df.to_csv(output_file, index=False)
    
    return df

# Usage
attrs = extract_trace_attributes('survey.su', 'trace_attributes.csv')
print(f"Extracted attributes for {len(attrs)} traces")
print(attrs.describe())

Migration from Pure SU Workflows

Field Name Mapping

# Traditional SU approach (numeric byte positions)
old_style = {
    'inline': 189,
    'crossline': 193,
    'cdp': 21,
    'offset': 37
}

# New SU-compatible approach  
import segyio.su
new_style = {
    'inline': segyio.su.iline,
    'crossline': segyio.su.xline, 
    'cdp': segyio.su.cdp,
    'offset': segyio.su.offset
}

# Both approaches work identically
with segyio.open('data.sgy') as f:
    header = f.header[0]
    
    # These are equivalent
    old_cdp = header[21]
    new_cdp = header[segyio.su.cdp]
    
    assert old_cdp == new_cdp

Common SU Processing Patterns

import segyio.su
import numpy as np

def su_style_processing(filename):
    """Common SU-style processing workflow."""
    
    with segyio.su.open(filename) as f:
        # Print file statistics (SU segyread style)
        print(f"File: {filename}")
        print(f"Traces: {f.tracecount}")
        print(f"Samples: {f.bin[segyio.su.hns]}")
        print(f"Sample rate: {f.bin[segyio.su.hdt]/1000} ms")
        
        # Trace loop (SU style)
        for i in range(f.tracecount):
            header = f.header[i]
            trace = f.trace[i]
            
            # Extract key SU fields
            tracl = header[segyio.su.tracl]
            cdp = header[segyio.su.cdp] 
            offset = header[segyio.su.offset]
            ns = header[segyio.su.ns]
            dt = header[segyio.su.dt]
            
            # Apply typical SU processing
            if offset > 5000:  # Far offset processing
                trace *= 2.0  # Gain correction
            
            # Update trace (if file writable)  
            if not f.readonly:
                f.trace[i] = trace

Install with Tessl CLI

npx tessl i tessl/pypi-segyio

docs

data-access.md

file-operations.md

header-access.md

index.md

seismic-unix.md

utilities.md

tile.json