CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-mne

MNE-Python provides comprehensive tools for analyzing MEG, EEG, and other neuroimaging data with advanced source estimation and connectivity analysis.

Pending
Overview
Eval results
Files

visualization.mddocs/

Visualization

Comprehensive visualization tools including interactive data browsers, topographic maps, 3D brain visualization, and publication-quality plotting functions for all MNE data types.

Capabilities

Raw Data Visualization

Interactive browsers and plotting functions for continuous neuroimaging data.

def plot_raw(raw: Raw, events: Optional[ArrayLike] = None, duration: float = 10.0,
            start: float = 0.0, n_channels: int = 20, bgcolor: str = 'w',
            color: Optional[Dict] = None, bad_color: str = (0.8, 0.8, 0.8),
            event_color: str = 'cyan', scalings: Optional[Union[Dict, str]] = None,
            title: Optional[str] = None, xlabel: str = 'Time (s)', ylabel: str = 'Channel',
            order: Optional[List] = None, show_options: bool = False,
            show_first_samp: bool = False, show_scrollbars: bool = True,
            time_format: str = 'float', precompute: Optional[Union[bool, str]] = None,
            use_opengl: Optional[bool] = None, theme: Optional[str] = None,
            overview_mode: Optional[str] = None, splash: bool = True,
            verbose: Optional[Union[bool, str, int]] = None) -> Figure:
    """
    Plot raw data in an interactive browser.
    
    Parameters:
    - raw: Raw data instance
    - events: Events to overlay
    - duration: Time window duration
    - start: Initial start time
    - n_channels: Number of channels to display
    - bgcolor: Background color
    - color: Color mapping for channel types
    - bad_color: Color for bad channels
    - event_color: Color for events
    - scalings: Scaling factors for channel types
    - title: Window title
    - xlabel: X-axis label
    - ylabel: Y-axis label
    - order: Channel display order
    - show_options: Show options dialog
    - show_first_samp: Show first sample index
    - show_scrollbars: Show scrollbars
    - time_format: Time display format
    - precompute: Precompute PSD
    - use_opengl: Use OpenGL rendering
    - theme: UI theme
    - overview_mode: Overview display mode
    - splash: Show splash screen
    - verbose: Verbosity level
    
    Returns:
    Figure object with interactive browser
    """

Epoched Data Visualization

Plotting functions for event-related data analysis and comparison.

def plot_epochs(epochs: Epochs, epoch_idx: Optional[Union[int, List[int]]] = None,
               picks: Optional[Union[str, List]] = None, scalings: Optional[Dict] = None,
               n_epochs: int = 20, n_channels: int = 20, title: Optional[str] = None,
               events: Optional[ArrayLike] = None, event_colors: Optional[Dict] = None,
               order: Optional[List] = None, show: bool = True, block: bool = False,
               decim: str = 'auto', noise_cov: Optional[Covariance] = None,
               butterfly: bool = False, show_scrollbars: bool = True,
               overview_mode: Optional[str] = None, splash: bool = True,
               verbose: Optional[Union[bool, str, int]] = None) -> Figure:
    """
    Plot epochs in an interactive browser.
    
    Parameters:
    - epochs: Epochs data
    - epoch_idx: Epoch indices to display
    - picks: Channel selection
    - scalings: Scaling factors
    - n_epochs: Number of epochs to display
    - n_channels: Number of channels to display
    - title: Window title
    - events: Events to display
    - event_colors: Event color mapping
    - order: Channel display order
    - show: Show plot immediately
    - block: Block execution
    - decim: Decimation factor
    - noise_cov: Noise covariance for whitening
    - butterfly: Butterfly plot mode
    - show_scrollbars: Show scrollbars
    - overview_mode: Overview display mode
    - splash: Show splash screen
    - verbose: Verbosity level
    
    Returns:
    Figure object
    """

def plot_epochs_image(epochs: Epochs, picks: Optional[Union[str, List]] = None,
                     sigma: float = 0.0, vmin: Optional[float] = None, vmax: Optional[float] = None,
                     colorbar: bool = True, order: Optional[ArrayLike] = None,
                     show: bool = True, units: Optional[Dict] = None, scalings: Optional[Dict] = None,
                     cmap: Optional[str] = None, fig: Optional[Figure] = None,
                     axes: Optional[List] = None, overlay_times: Optional[ArrayLike] = None,
                     combine: Optional[str] = None, group_by: Optional[Dict] = None,
                     evoked: bool = True, ts_args: Optional[Dict] = None,
                     title: Optional[str] = None, clear: bool = False) -> Tuple[Figure, List]:
    """
    Plot epochs as image with time on x-axis and epochs on y-axis.
    
    Returns:
    Tuple of (figure, axes)
    """

Evoked Response Visualization

Specialized plotting for averaged event-related responses.

def plot_evoked(evoked: Union[Evoked, List[Evoked]], picks: Optional[Union[str, List]] = None,
               exclude: str = 'bads', unit: bool = True, show: bool = True,
               ylim: Optional[Dict] = None, xlim: str = 'tight', proj: bool = False,
               hline: Optional[ArrayLike] = None, units: Optional[Dict] = None,
               scalings: Optional[Dict] = None, titles: Optional[Union[str, List[str]]] = None,
               axes: Optional[Union[matplotlib.axes.Axes, List]] = None, gfp: Union[bool, str] = False,
               window_title: Optional[str] = None, spatial_colors: Union[bool, str] = False,
               zorder: str = 'unsorted', selectable: bool = True, noise_cov: Optional[Covariance] = None,
               time_unit: str = 's', verbose: Optional[Union[bool, str, int]] = None) -> Figure:
    """
    Plot evoked response time series.
    
    Returns:
    Figure object
    """

def plot_evoked_joint(evoked: Evoked, times: Union[str, ArrayLike] = 'peaks',
                     title: str = '', picks: Optional[Union[str, List]] = None,
                     exclude: str = 'bads', show: bool = True, ts_args: Optional[Dict] = None,
                     topomap_args: Optional[Dict] = None, verbose: Optional[Union[bool, str, int]] = None) -> Figure:
    """
    Plot evoked response with topographic maps at specific time points.
    
    Returns:
    Figure object
    """

Topographic Mapping

Create topographic maps showing spatial distribution of neural activity.

def plot_topomap(data: ArrayLike, info: Info, picks: Optional[Union[str, List]] = None,
                axes: Optional[matplotlib.axes.Axes] = None, ch_type: str = 'eeg',
                sensors: Union[bool, str] = True, show_names: Union[bool, callable] = False,
                mask: Optional[ArrayLike] = None, mask_params: Optional[Dict] = None,
                contours: int = 6, outlines: str = 'head', sphere: Optional[float] = None,
                image_interp: str = 'bilinear', extrapolate: str = 'auto',
                border: str = 'mean', res: int = 64, size: int = 1,
                cmap: Optional[str] = None, vlim: Tuple[Optional[float], Optional[float]] = (None, None),
                cnorm: Optional[str] = None, colorbar: bool = True, cbar_fmt: str = '%3.1f',
                units: Optional[str] = None, show: bool = True,
                verbose: Optional[Union[bool, str, int]] = None) -> Tuple[Figure, matplotlib.axes.Axes]:
    """
    Plot topographic map of data values.
    
    Parameters:
    - data: Data values for each channel
    - info: Measurement info with channel locations
    - picks: Channel selection
    - axes: Matplotlib axes to plot on
    - ch_type: Channel type for layout
    - sensors: Show sensor locations
    - show_names: Show channel names
    - mask: Mask for statistical significance
    - mask_params: Mask visualization parameters
    - contours: Number of contour lines
    - outlines: Head outline style
    - sphere: Sphere radius for projection
    - image_interp: Image interpolation method
    - extrapolate: Extrapolation method
    - border: Border handling
    - res: Image resolution
    - size: Figure size multiplier
    - cmap: Colormap
    - vlim: Value limits for colormap
    - cnorm: Color normalization
    - colorbar: Show colorbar
    - cbar_fmt: Colorbar format string
    - units: Units for colorbar
    - show: Show plot immediately
    - verbose: Verbosity level
    
    Returns:
    Tuple of (figure, axes)
    """

def plot_evoked_topomap(evoked: Evoked, times: Union[float, ArrayLike] = 'auto',
                       ch_type: Optional[str] = None, vmin: Optional[float] = None,
                       vmax: Optional[float] = None, cmap: Optional[str] = None,
                       sensors: Union[bool, str] = True, colorbar: bool = True,
                       scalings: Optional[Dict] = None, units: Optional[str] = None,
                       res: int = 64, size: int = 1, cbar_fmt: str = '%3.1f',
                       time_unit: str = 's', time_format: Optional[str] = None,
                       proj: bool = False, show: bool = True, show_names: Union[bool, callable] = False,
                       title: Optional[str] = None, mask: Optional[ArrayLike] = None,
                       mask_params: Optional[Dict] = None, axes: Optional[List] = None,
                       extrapolate: str = 'auto', sphere: Optional[float] = None,
                       border: str = 'mean', nrows: str = 'auto', ncols: str = 'auto',
                       verbose: Optional[Union[bool, str, int]] = None) -> Figure:
    """
    Plot topographic maps of evoked data at multiple time points.
    
    Returns:
    Figure object
    """

3D Brain Visualization

Interactive 3D visualization of source estimates on brain surfaces.

class Brain:
    """Interactive 3D brain visualization."""
    
    def __init__(self, subject: str, hemi: str, surf: str, title: Optional[str] = None,
                cortex: Union[str, Tuple] = 'classic', alpha: float = 1.0,
                size: Union[int, Tuple[int, int]] = 800, background: Union[str, Tuple] = 'black',
                foreground: Optional[Union[str, Tuple]] = None, figure: Optional[Figure] = None,
                subjects_dir: Optional[str] = None, views: Union[str, List[str]] = 'lat',
                offset: bool = True, show_toolbar: bool = False, offscreen: bool = False,
                interaction: str = 'trackball', units: str = 'm', view_layout: str = 'vertical',
                silhouette: Union[bool, Dict] = False, verbose: Optional[Union[bool, str, int]] = None):
        """
        Initialize Brain visualization.
        
        Parameters:
        - subject: Subject name
        - hemi: Hemisphere ('lh', 'rh', 'both', 'split')
        - surf: Surface type ('pial', 'white', 'inflated')
        - title: Window title
        - cortex: Cortex color scheme
        - alpha: Surface transparency
        - size: Figure size
        - background: Background color
        - foreground: Foreground color
        - figure: Existing figure to use
        - subjects_dir: FreeSurfer subjects directory
        - views: View angles
        - offset: Apply view offset
        - show_toolbar: Show toolbar
        - offscreen: Render offscreen
        - interaction: Interaction mode
        - units: Spatial units
        - view_layout: Layout of multiple views
        - silhouette: Show silhouette
        - verbose: Verbosity level
        """
    
    def add_data(self, array: ArrayLike, fmin: Optional[float] = None, fmid: Optional[float] = None,
                fmax: Optional[float] = None, thresh: Optional[float] = None,
                center: Optional[float] = None, transparent: bool = False, colormap: str = 'auto',
                alpha: float = 1.0, vertices: Optional[ArrayLike] = None, smoothing_steps: int = 10,
                time: Optional[ArrayLike] = None, time_label: str = 'auto', colorbar: bool = True,
                hemi: Optional[str] = None, remove_existing: bool = False, time_label_size: Optional[int] = None,
                initial_time: Optional[float] = None, scale_factor: Optional[float] = None,
                vector_alpha: Optional[float] = None, clim: Optional[Dict] = None,
                src: Optional[SourceSpaces] = None, volume_options: float = 0.4,
                colorbar_kwargs: Optional[Dict] = None, verbose: Optional[Union[bool, str, int]] = None) -> Brain:
        """
        Add data to brain visualization.
        
        Parameters:
        - array: Data array to visualize
        - fmin: Minimum value for colormap
        - fmid: Middle value for colormap
        - fmax: Maximum value for colormap
        - thresh: Threshold below which data is transparent
        - center: Center value for colormap
        - transparent: Use transparency
        - colormap: Colormap name
        - alpha: Data transparency
        - vertices: Vertex indices for data
        - smoothing_steps: Number of smoothing steps
        - time: Time array for dynamic data
        - time_label: Time label format
        - colorbar: Show colorbar
        - hemi: Hemisphere for data
        - remove_existing: Remove existing data
        - time_label_size: Time label size
        - initial_time: Initial time point
        - scale_factor: Scaling factor
        - vector_alpha: Vector transparency
        - clim: Color limits dictionary
        - src: Source space for volume data
        - volume_options: Volume rendering options
        - colorbar_kwargs: Colorbar customization
        - verbose: Verbosity level
        
        Returns:
        Self for method chaining
        """
    
    def show_view(self, view: Union[str, Dict], roll: Optional[float] = None,
                 distance: Optional[float] = None, focalpoint: Optional[Tuple] = None,
                 hemi: Optional[str] = None) -> Brain:
        """
        Show specific view of brain.
        
        Parameters:
        - view: View specification
        - roll: Roll angle
        - distance: Camera distance
        - focalpoint: Camera focal point
        - hemi: Hemisphere to show
        
        Returns:
        Self for method chaining
        """
    
    def screenshot(self, filename: Optional[str] = None, mode: str = 'rgb',
                  time_viewer: bool = False) -> ArrayLike:
        """
        Take screenshot of brain visualization.
        
        Parameters:
        - filename: Output filename
        - mode: Color mode
        - time_viewer: Include time viewer
        
        Returns:
        Screenshot array
        """

def plot_source_estimates(stc: Union[SourceEstimate, List[SourceEstimate]], subject: Optional[str] = None,
                         surface: str = 'inflated', hemi: str = 'lh', colormap: str = 'auto',
                         time_label: str = 'auto', smoothing_steps: int = 10, transparent: Optional[bool] = None,
                         alpha: float = 1.0, time_viewer: Union[bool, str] = 'auto',
                         subjects_dir: Optional[str] = None, figure: Optional[Figure] = None,
                         views: Union[str, List] = 'lat', colorbar: bool = True, clim: str = 'auto',
                         cortex: Union[str, Tuple] = 'classic', size: Union[int, Tuple] = 800,
                         background: Union[str, Tuple] = 'black', foreground: Optional[Union[str, Tuple]] = None,
                         initial_time: Optional[float] = None, time_unit: str = 's', backend: str = 'auto',
                         spacing: str = 'oct6', title: Optional[str] = None, show_traces: Union[bool, str] = 'auto',
                         src: Optional[SourceSpaces] = None, volume_options: float = 0.4,
                         view_layout: str = 'vertical', add_data_kwargs: Optional[Dict] = None,
                         brain_kwargs: Optional[Dict] = None, verbose: Optional[Union[bool, str, int]] = None) -> Brain:
    """
    Plot source estimates on brain surface.
    
    Returns:
    Brain visualization object
    """

Sensor and Layout Visualization

Display sensor locations and channel layouts.

def plot_sensors(info: Info, kind: str = '3d', ch_type: Optional[str] = None,
                title: Optional[str] = None, show_names: Union[bool, List] = False,
                ch_groups: Optional[Union[str, Dict]] = None, to_sphere: bool = True,
                axes: Optional[matplotlib.axes.Axes] = None, block: bool = False,
                show: bool = True, sphere: Optional[Union[float, str, ArrayLike]] = None,
                verbose: Optional[Union[bool, str, int]] = None) -> Figure:
    """
    Plot sensor locations.
    
    Parameters:
    - info: Measurement info
    - kind: Plot type ('3d', 'topomap', 'select')
    - ch_type: Channel type to plot
    - title: Plot title
    - show_names: Show channel names
    - ch_groups: Channel groupings
    - to_sphere: Project to sphere
    - axes: Matplotlib axes
    - block: Block execution
    - show: Show plot
    - sphere: Sphere specification
    - verbose: Verbosity level
    
    Returns:
    Figure object
    """

def plot_layout(layout: Layout, picks: Optional[Union[str, List]] = None,
               axes: Optional[matplotlib.axes.Axes] = None, show: bool = True) -> Figure:
    """
    Plot channel layout.
    
    Parameters:
    - layout: Layout object
    - picks: Channel selection
    - axes: Matplotlib axes
    - show: Show plot
    
    Returns:
    Figure object
    """

Usage Examples

Interactive Raw Data Browser

import mne

# Load raw data
raw = mne.io.read_raw_fif('sample_audvis_raw.fif', preload=True)

# Plot interactive browser
raw.plot(duration=30, n_channels=20, scalings='auto', 
         title='Raw MEG/EEG Data', show_scrollbars=True)

# Plot power spectral density
raw.plot_psd(fmax=50, picks='eeg', average=True)

Evoked Response Visualization

import mne

# Load evoked data
evoked = mne.read_evokeds('sample_audvis-ave.fiv')[0]

# Plot time series
evoked.plot(picks='eeg', xlim=[0, 0.5])

# Plot joint plot with topomaps
evoked.plot_joint(times=[0.1, 0.15, 0.2], title='Auditory Response')

# Plot topographic maps at multiple times
evoked.plot_topomap(times=np.linspace(0.05, 0.3, 8), ch_type='eeg')

3D Source Visualization

import mne

# Load source estimate
stc = mne.read_source_estimate('sample-stc')

# Plot on inflated brain surface
brain = stc.plot(subject='sample', subjects_dir=subjects_dir,
                hemi='both', views=['lat', 'med'],
                initial_time=0.1, time_viewer=True)

# Add specific views
brain.show_view('lateral')
brain.show_view('medial')

# Take screenshot
brain.screenshot('source_plot.png')

Custom Topographic Plot

import mne
import numpy as np

# Load evoked data
evoked = mne.read_evokeds('sample_audvis-ave.fiv')[0]

# Extract data at specific time point
time_idx = np.argmin(np.abs(evoked.times - 0.1))
data = evoked.data[:, time_idx]

# Create custom topomap
fig, ax = plt.subplots(figsize=(6, 6))
im, _ = mne.viz.plot_topomap(data, evoked.info, axes=ax,
                            show=False, contours=6, cmap='RdBu_r')
ax.set_title('Activity at 100ms', fontsize=14)
plt.colorbar(im, ax=ax)
plt.show()

Types

import matplotlib.pyplot as plt
import matplotlib.axes
from typing import Union, Optional, List, Dict, Tuple, Any

Figure = plt.Figure
ArrayLike = Union[np.ndarray, List, Tuple]

Install with Tessl CLI

npx tessl i tessl/pypi-mne

docs

data-io.md

datasets.md

index.md

machine-learning.md

preprocessing.md

source-analysis.md

statistics.md

time-frequency.md

visualization.md

tile.json