0
# Visualization
1
2
Comprehensive visualization tools including interactive data browsers, topographic maps, 3D brain visualization, and publication-quality plotting functions for all MNE data types.
3
4
## Capabilities
5
6
### Raw Data Visualization
7
8
Interactive browsers and plotting functions for continuous neuroimaging data.
9
10
```python { .api }
11
def plot_raw(raw: Raw, events: Optional[ArrayLike] = None, duration: float = 10.0,
12
start: float = 0.0, n_channels: int = 20, bgcolor: str = 'w',
13
color: Optional[Dict] = None, bad_color: str = (0.8, 0.8, 0.8),
14
event_color: str = 'cyan', scalings: Optional[Union[Dict, str]] = None,
15
title: Optional[str] = None, xlabel: str = 'Time (s)', ylabel: str = 'Channel',
16
order: Optional[List] = None, show_options: bool = False,
17
show_first_samp: bool = False, show_scrollbars: bool = True,
18
time_format: str = 'float', precompute: Optional[Union[bool, str]] = None,
19
use_opengl: Optional[bool] = None, theme: Optional[str] = None,
20
overview_mode: Optional[str] = None, splash: bool = True,
21
verbose: Optional[Union[bool, str, int]] = None) -> Figure:
22
"""
23
Plot raw data in an interactive browser.
24
25
Parameters:
26
- raw: Raw data instance
27
- events: Events to overlay
28
- duration: Time window duration
29
- start: Initial start time
30
- n_channels: Number of channels to display
31
- bgcolor: Background color
32
- color: Color mapping for channel types
33
- bad_color: Color for bad channels
34
- event_color: Color for events
35
- scalings: Scaling factors for channel types
36
- title: Window title
37
- xlabel: X-axis label
38
- ylabel: Y-axis label
39
- order: Channel display order
40
- show_options: Show options dialog
41
- show_first_samp: Show first sample index
42
- show_scrollbars: Show scrollbars
43
- time_format: Time display format
44
- precompute: Precompute PSD
45
- use_opengl: Use OpenGL rendering
46
- theme: UI theme
47
- overview_mode: Overview display mode
48
- splash: Show splash screen
49
- verbose: Verbosity level
50
51
Returns:
52
Figure object with interactive browser
53
"""
54
```
55
56
### Epoched Data Visualization
57
58
Plotting functions for event-related data analysis and comparison.
59
60
```python { .api }
61
def plot_epochs(epochs: Epochs, epoch_idx: Optional[Union[int, List[int]]] = None,
62
picks: Optional[Union[str, List]] = None, scalings: Optional[Dict] = None,
63
n_epochs: int = 20, n_channels: int = 20, title: Optional[str] = None,
64
events: Optional[ArrayLike] = None, event_colors: Optional[Dict] = None,
65
order: Optional[List] = None, show: bool = True, block: bool = False,
66
decim: str = 'auto', noise_cov: Optional[Covariance] = None,
67
butterfly: bool = False, show_scrollbars: bool = True,
68
overview_mode: Optional[str] = None, splash: bool = True,
69
verbose: Optional[Union[bool, str, int]] = None) -> Figure:
70
"""
71
Plot epochs in an interactive browser.
72
73
Parameters:
74
- epochs: Epochs data
75
- epoch_idx: Epoch indices to display
76
- picks: Channel selection
77
- scalings: Scaling factors
78
- n_epochs: Number of epochs to display
79
- n_channels: Number of channels to display
80
- title: Window title
81
- events: Events to display
82
- event_colors: Event color mapping
83
- order: Channel display order
84
- show: Show plot immediately
85
- block: Block execution
86
- decim: Decimation factor
87
- noise_cov: Noise covariance for whitening
88
- butterfly: Butterfly plot mode
89
- show_scrollbars: Show scrollbars
90
- overview_mode: Overview display mode
91
- splash: Show splash screen
92
- verbose: Verbosity level
93
94
Returns:
95
Figure object
96
"""
97
98
def plot_epochs_image(epochs: Epochs, picks: Optional[Union[str, List]] = None,
99
sigma: float = 0.0, vmin: Optional[float] = None, vmax: Optional[float] = None,
100
colorbar: bool = True, order: Optional[ArrayLike] = None,
101
show: bool = True, units: Optional[Dict] = None, scalings: Optional[Dict] = None,
102
cmap: Optional[str] = None, fig: Optional[Figure] = None,
103
axes: Optional[List] = None, overlay_times: Optional[ArrayLike] = None,
104
combine: Optional[str] = None, group_by: Optional[Dict] = None,
105
evoked: bool = True, ts_args: Optional[Dict] = None,
106
title: Optional[str] = None, clear: bool = False) -> Tuple[Figure, List]:
107
"""
108
Plot epochs as image with time on x-axis and epochs on y-axis.
109
110
Returns:
111
Tuple of (figure, axes)
112
"""
113
```
114
115
### Evoked Response Visualization
116
117
Specialized plotting for averaged event-related responses.
118
119
```python { .api }
120
def plot_evoked(evoked: Union[Evoked, List[Evoked]], picks: Optional[Union[str, List]] = None,
121
exclude: str = 'bads', unit: bool = True, show: bool = True,
122
ylim: Optional[Dict] = None, xlim: str = 'tight', proj: bool = False,
123
hline: Optional[ArrayLike] = None, units: Optional[Dict] = None,
124
scalings: Optional[Dict] = None, titles: Optional[Union[str, List[str]]] = None,
125
axes: Optional[Union[matplotlib.axes.Axes, List]] = None, gfp: Union[bool, str] = False,
126
window_title: Optional[str] = None, spatial_colors: Union[bool, str] = False,
127
zorder: str = 'unsorted', selectable: bool = True, noise_cov: Optional[Covariance] = None,
128
time_unit: str = 's', verbose: Optional[Union[bool, str, int]] = None) -> Figure:
129
"""
130
Plot evoked response time series.
131
132
Returns:
133
Figure object
134
"""
135
136
def plot_evoked_joint(evoked: Evoked, times: Union[str, ArrayLike] = 'peaks',
137
title: str = '', picks: Optional[Union[str, List]] = None,
138
exclude: str = 'bads', show: bool = True, ts_args: Optional[Dict] = None,
139
topomap_args: Optional[Dict] = None, verbose: Optional[Union[bool, str, int]] = None) -> Figure:
140
"""
141
Plot evoked response with topographic maps at specific time points.
142
143
Returns:
144
Figure object
145
"""
146
```
147
148
### Topographic Mapping
149
150
Create topographic maps showing spatial distribution of neural activity.
151
152
```python { .api }
153
def plot_topomap(data: ArrayLike, info: Info, picks: Optional[Union[str, List]] = None,
154
axes: Optional[matplotlib.axes.Axes] = None, ch_type: str = 'eeg',
155
sensors: Union[bool, str] = True, show_names: Union[bool, callable] = False,
156
mask: Optional[ArrayLike] = None, mask_params: Optional[Dict] = None,
157
contours: int = 6, outlines: str = 'head', sphere: Optional[float] = None,
158
image_interp: str = 'bilinear', extrapolate: str = 'auto',
159
border: str = 'mean', res: int = 64, size: int = 1,
160
cmap: Optional[str] = None, vlim: Tuple[Optional[float], Optional[float]] = (None, None),
161
cnorm: Optional[str] = None, colorbar: bool = True, cbar_fmt: str = '%3.1f',
162
units: Optional[str] = None, show: bool = True,
163
verbose: Optional[Union[bool, str, int]] = None) -> Tuple[Figure, matplotlib.axes.Axes]:
164
"""
165
Plot topographic map of data values.
166
167
Parameters:
168
- data: Data values for each channel
169
- info: Measurement info with channel locations
170
- picks: Channel selection
171
- axes: Matplotlib axes to plot on
172
- ch_type: Channel type for layout
173
- sensors: Show sensor locations
174
- show_names: Show channel names
175
- mask: Mask for statistical significance
176
- mask_params: Mask visualization parameters
177
- contours: Number of contour lines
178
- outlines: Head outline style
179
- sphere: Sphere radius for projection
180
- image_interp: Image interpolation method
181
- extrapolate: Extrapolation method
182
- border: Border handling
183
- res: Image resolution
184
- size: Figure size multiplier
185
- cmap: Colormap
186
- vlim: Value limits for colormap
187
- cnorm: Color normalization
188
- colorbar: Show colorbar
189
- cbar_fmt: Colorbar format string
190
- units: Units for colorbar
191
- show: Show plot immediately
192
- verbose: Verbosity level
193
194
Returns:
195
Tuple of (figure, axes)
196
"""
197
198
def plot_evoked_topomap(evoked: Evoked, times: Union[float, ArrayLike] = 'auto',
199
ch_type: Optional[str] = None, vmin: Optional[float] = None,
200
vmax: Optional[float] = None, cmap: Optional[str] = None,
201
sensors: Union[bool, str] = True, colorbar: bool = True,
202
scalings: Optional[Dict] = None, units: Optional[str] = None,
203
res: int = 64, size: int = 1, cbar_fmt: str = '%3.1f',
204
time_unit: str = 's', time_format: Optional[str] = None,
205
proj: bool = False, show: bool = True, show_names: Union[bool, callable] = False,
206
title: Optional[str] = None, mask: Optional[ArrayLike] = None,
207
mask_params: Optional[Dict] = None, axes: Optional[List] = None,
208
extrapolate: str = 'auto', sphere: Optional[float] = None,
209
border: str = 'mean', nrows: str = 'auto', ncols: str = 'auto',
210
verbose: Optional[Union[bool, str, int]] = None) -> Figure:
211
"""
212
Plot topographic maps of evoked data at multiple time points.
213
214
Returns:
215
Figure object
216
"""
217
```
218
219
### 3D Brain Visualization
220
221
Interactive 3D visualization of source estimates on brain surfaces.
222
223
```python { .api }
224
class Brain:
225
"""Interactive 3D brain visualization."""
226
227
def __init__(self, subject: str, hemi: str, surf: str, title: Optional[str] = None,
228
cortex: Union[str, Tuple] = 'classic', alpha: float = 1.0,
229
size: Union[int, Tuple[int, int]] = 800, background: Union[str, Tuple] = 'black',
230
foreground: Optional[Union[str, Tuple]] = None, figure: Optional[Figure] = None,
231
subjects_dir: Optional[str] = None, views: Union[str, List[str]] = 'lat',
232
offset: bool = True, show_toolbar: bool = False, offscreen: bool = False,
233
interaction: str = 'trackball', units: str = 'm', view_layout: str = 'vertical',
234
silhouette: Union[bool, Dict] = False, verbose: Optional[Union[bool, str, int]] = None):
235
"""
236
Initialize Brain visualization.
237
238
Parameters:
239
- subject: Subject name
240
- hemi: Hemisphere ('lh', 'rh', 'both', 'split')
241
- surf: Surface type ('pial', 'white', 'inflated')
242
- title: Window title
243
- cortex: Cortex color scheme
244
- alpha: Surface transparency
245
- size: Figure size
246
- background: Background color
247
- foreground: Foreground color
248
- figure: Existing figure to use
249
- subjects_dir: FreeSurfer subjects directory
250
- views: View angles
251
- offset: Apply view offset
252
- show_toolbar: Show toolbar
253
- offscreen: Render offscreen
254
- interaction: Interaction mode
255
- units: Spatial units
256
- view_layout: Layout of multiple views
257
- silhouette: Show silhouette
258
- verbose: Verbosity level
259
"""
260
261
def add_data(self, array: ArrayLike, fmin: Optional[float] = None, fmid: Optional[float] = None,
262
fmax: Optional[float] = None, thresh: Optional[float] = None,
263
center: Optional[float] = None, transparent: bool = False, colormap: str = 'auto',
264
alpha: float = 1.0, vertices: Optional[ArrayLike] = None, smoothing_steps: int = 10,
265
time: Optional[ArrayLike] = None, time_label: str = 'auto', colorbar: bool = True,
266
hemi: Optional[str] = None, remove_existing: bool = False, time_label_size: Optional[int] = None,
267
initial_time: Optional[float] = None, scale_factor: Optional[float] = None,
268
vector_alpha: Optional[float] = None, clim: Optional[Dict] = None,
269
src: Optional[SourceSpaces] = None, volume_options: float = 0.4,
270
colorbar_kwargs: Optional[Dict] = None, verbose: Optional[Union[bool, str, int]] = None) -> Brain:
271
"""
272
Add data to brain visualization.
273
274
Parameters:
275
- array: Data array to visualize
276
- fmin: Minimum value for colormap
277
- fmid: Middle value for colormap
278
- fmax: Maximum value for colormap
279
- thresh: Threshold below which data is transparent
280
- center: Center value for colormap
281
- transparent: Use transparency
282
- colormap: Colormap name
283
- alpha: Data transparency
284
- vertices: Vertex indices for data
285
- smoothing_steps: Number of smoothing steps
286
- time: Time array for dynamic data
287
- time_label: Time label format
288
- colorbar: Show colorbar
289
- hemi: Hemisphere for data
290
- remove_existing: Remove existing data
291
- time_label_size: Time label size
292
- initial_time: Initial time point
293
- scale_factor: Scaling factor
294
- vector_alpha: Vector transparency
295
- clim: Color limits dictionary
296
- src: Source space for volume data
297
- volume_options: Volume rendering options
298
- colorbar_kwargs: Colorbar customization
299
- verbose: Verbosity level
300
301
Returns:
302
Self for method chaining
303
"""
304
305
def show_view(self, view: Union[str, Dict], roll: Optional[float] = None,
306
distance: Optional[float] = None, focalpoint: Optional[Tuple] = None,
307
hemi: Optional[str] = None) -> Brain:
308
"""
309
Show specific view of brain.
310
311
Parameters:
312
- view: View specification
313
- roll: Roll angle
314
- distance: Camera distance
315
- focalpoint: Camera focal point
316
- hemi: Hemisphere to show
317
318
Returns:
319
Self for method chaining
320
"""
321
322
def screenshot(self, filename: Optional[str] = None, mode: str = 'rgb',
323
time_viewer: bool = False) -> ArrayLike:
324
"""
325
Take screenshot of brain visualization.
326
327
Parameters:
328
- filename: Output filename
329
- mode: Color mode
330
- time_viewer: Include time viewer
331
332
Returns:
333
Screenshot array
334
"""
335
336
def plot_source_estimates(stc: Union[SourceEstimate, List[SourceEstimate]], subject: Optional[str] = None,
337
surface: str = 'inflated', hemi: str = 'lh', colormap: str = 'auto',
338
time_label: str = 'auto', smoothing_steps: int = 10, transparent: Optional[bool] = None,
339
alpha: float = 1.0, time_viewer: Union[bool, str] = 'auto',
340
subjects_dir: Optional[str] = None, figure: Optional[Figure] = None,
341
views: Union[str, List] = 'lat', colorbar: bool = True, clim: str = 'auto',
342
cortex: Union[str, Tuple] = 'classic', size: Union[int, Tuple] = 800,
343
background: Union[str, Tuple] = 'black', foreground: Optional[Union[str, Tuple]] = None,
344
initial_time: Optional[float] = None, time_unit: str = 's', backend: str = 'auto',
345
spacing: str = 'oct6', title: Optional[str] = None, show_traces: Union[bool, str] = 'auto',
346
src: Optional[SourceSpaces] = None, volume_options: float = 0.4,
347
view_layout: str = 'vertical', add_data_kwargs: Optional[Dict] = None,
348
brain_kwargs: Optional[Dict] = None, verbose: Optional[Union[bool, str, int]] = None) -> Brain:
349
"""
350
Plot source estimates on brain surface.
351
352
Returns:
353
Brain visualization object
354
"""
355
```
356
357
### Sensor and Layout Visualization
358
359
Display sensor locations and channel layouts.
360
361
```python { .api }
362
def plot_sensors(info: Info, kind: str = '3d', ch_type: Optional[str] = None,
363
title: Optional[str] = None, show_names: Union[bool, List] = False,
364
ch_groups: Optional[Union[str, Dict]] = None, to_sphere: bool = True,
365
axes: Optional[matplotlib.axes.Axes] = None, block: bool = False,
366
show: bool = True, sphere: Optional[Union[float, str, ArrayLike]] = None,
367
verbose: Optional[Union[bool, str, int]] = None) -> Figure:
368
"""
369
Plot sensor locations.
370
371
Parameters:
372
- info: Measurement info
373
- kind: Plot type ('3d', 'topomap', 'select')
374
- ch_type: Channel type to plot
375
- title: Plot title
376
- show_names: Show channel names
377
- ch_groups: Channel groupings
378
- to_sphere: Project to sphere
379
- axes: Matplotlib axes
380
- block: Block execution
381
- show: Show plot
382
- sphere: Sphere specification
383
- verbose: Verbosity level
384
385
Returns:
386
Figure object
387
"""
388
389
def plot_layout(layout: Layout, picks: Optional[Union[str, List]] = None,
390
axes: Optional[matplotlib.axes.Axes] = None, show: bool = True) -> Figure:
391
"""
392
Plot channel layout.
393
394
Parameters:
395
- layout: Layout object
396
- picks: Channel selection
397
- axes: Matplotlib axes
398
- show: Show plot
399
400
Returns:
401
Figure object
402
"""
403
```
404
405
## Usage Examples
406
407
### Interactive Raw Data Browser
408
409
```python
410
import mne
411
412
# Load raw data
413
raw = mne.io.read_raw_fif('sample_audvis_raw.fif', preload=True)
414
415
# Plot interactive browser
416
raw.plot(duration=30, n_channels=20, scalings='auto',
417
title='Raw MEG/EEG Data', show_scrollbars=True)
418
419
# Plot power spectral density
420
raw.plot_psd(fmax=50, picks='eeg', average=True)
421
```
422
423
### Evoked Response Visualization
424
425
```python
426
import mne
427
428
# Load evoked data
429
evoked = mne.read_evokeds('sample_audvis-ave.fiv')[0]
430
431
# Plot time series
432
evoked.plot(picks='eeg', xlim=[0, 0.5])
433
434
# Plot joint plot with topomaps
435
evoked.plot_joint(times=[0.1, 0.15, 0.2], title='Auditory Response')
436
437
# Plot topographic maps at multiple times
438
evoked.plot_topomap(times=np.linspace(0.05, 0.3, 8), ch_type='eeg')
439
```
440
441
### 3D Source Visualization
442
443
```python
444
import mne
445
446
# Load source estimate
447
stc = mne.read_source_estimate('sample-stc')
448
449
# Plot on inflated brain surface
450
brain = stc.plot(subject='sample', subjects_dir=subjects_dir,
451
hemi='both', views=['lat', 'med'],
452
initial_time=0.1, time_viewer=True)
453
454
# Add specific views
455
brain.show_view('lateral')
456
brain.show_view('medial')
457
458
# Take screenshot
459
brain.screenshot('source_plot.png')
460
```
461
462
### Custom Topographic Plot
463
464
```python
465
import mne
466
import numpy as np
467
468
# Load evoked data
469
evoked = mne.read_evokeds('sample_audvis-ave.fiv')[0]
470
471
# Extract data at specific time point
472
time_idx = np.argmin(np.abs(evoked.times - 0.1))
473
data = evoked.data[:, time_idx]
474
475
# Create custom topomap
476
fig, ax = plt.subplots(figsize=(6, 6))
477
im, _ = mne.viz.plot_topomap(data, evoked.info, axes=ax,
478
show=False, contours=6, cmap='RdBu_r')
479
ax.set_title('Activity at 100ms', fontsize=14)
480
plt.colorbar(im, ax=ax)
481
plt.show()
482
```
483
484
## Types
485
486
```python { .api }
487
import matplotlib.pyplot as plt
488
import matplotlib.axes
489
from typing import Union, Optional, List, Dict, Tuple, Any
490
491
Figure = plt.Figure
492
ArrayLike = Union[np.ndarray, List, Tuple]
493
```