0
# Storage
1
2
Storage classes manage simulation data with support for various backends including memory, files, and movie generation with efficient data compression and retrieval.
3
4
## Capabilities
5
6
### Memory Storage
7
8
Store simulation data in memory for fast access and analysis.
9
10
```python { .api }
11
class MemoryStorage:
12
def __init__(self, *, write_mode="truncate_once"):
13
"""
14
Initialize in-memory storage.
15
16
Parameters:
17
- write_mode: str, how to handle multiple writes
18
("append", "truncate_once", "truncate_always")
19
"""
20
21
@classmethod
22
def from_fields(cls, fields, times=None, *, label=None, dtype=None):
23
"""
24
Create storage from field data.
25
26
Parameters:
27
- fields: list of FieldBase, field data
28
- times: array-like, time points for each field
29
- label: str, optional storage label
30
- dtype: data type for storage
31
32
Returns:
33
MemoryStorage containing the field data
34
"""
35
36
@classmethod
37
def from_collection(cls, field_collection, times=None):
38
"""
39
Create storage from field collection.
40
41
Parameters:
42
- field_collection: FieldCollection, collection of fields
43
- times: array-like, time points for each field
44
45
Returns:
46
MemoryStorage containing the collection data
47
"""
48
49
def append(self, data, info=None):
50
"""
51
Append data to storage.
52
53
Parameters:
54
- data: FieldBase, field data to store
55
- info: dict, optional metadata
56
"""
57
58
def __len__(self):
59
"""int: Number of stored data items"""
60
61
def __getitem__(self, index):
62
"""Get stored data by index or slice"""
63
64
@property
65
def times(self):
66
"""np.ndarray: Array of time points"""
67
68
@property
69
def data(self):
70
"""list: Stored field data"""
71
72
def clear(self, clear_data_shape=False):
73
"""
74
Clear all stored data.
75
76
Parameters:
77
- clear_data_shape: bool, whether to clear data shape info
78
"""
79
80
def start_writing(self, field, info=None):
81
"""
82
Prepare storage for writing field data.
83
84
Parameters:
85
- field: FieldBase, example field for metadata
86
- info: dict, optional additional information
87
"""
88
89
def get_memory_storage():
90
"""
91
Get shared memory storage instance.
92
93
Returns:
94
MemoryStorage: Shared storage instance
95
"""
96
```
97
98
### File Storage
99
100
Store simulation data to files with support for various formats and compression.
101
102
```python { .api }
103
class FileStorage:
104
def __init__(self, filename, *, info=None, write_mode="truncate_once"):
105
"""
106
Initialize file-based storage.
107
108
Parameters:
109
- filename: str, path to storage file
110
- info: dict, optional metadata to store
111
- write_mode: str, how to handle file writing
112
"""
113
114
@property
115
def filename(self):
116
"""str: Path to storage file"""
117
118
def write(self, data, *, info=None):
119
"""
120
Write data to file.
121
122
Parameters:
123
- data: FieldBase, field data to write
124
- info: dict, optional metadata
125
"""
126
127
def read(self, index=None):
128
"""
129
Read data from file.
130
131
Parameters:
132
- index: int or slice, data index to read (None for all)
133
134
Returns:
135
FieldBase or list: Field data
136
"""
137
138
@property
139
def items(self):
140
"""Generator: Iterate over all stored items"""
141
142
def close(self):
143
"""Close storage file"""
144
145
def __enter__(self):
146
"""Context manager entry"""
147
return self
148
149
def __exit__(self, exc_type, exc_val, exc_tb):
150
"""Context manager exit"""
151
self.close()
152
```
153
154
### Movie Storage
155
156
Create movies from simulation data with automatic frame generation.
157
158
```python { .api }
159
class MovieStorage:
160
def __init__(self, filename, *, movie_writer="auto", **kwargs):
161
"""
162
Initialize movie storage.
163
164
Parameters:
165
- filename: str, output movie filename
166
- movie_writer: str, movie writing backend ("ffmpeg", "pillow")
167
- kwargs: additional movie parameters (fps, dpi, etc.)
168
"""
169
170
@property
171
def filename(self):
172
"""str: Output movie filename"""
173
174
def append(self, data, info=None):
175
"""
176
Add frame to movie.
177
178
Parameters:
179
- data: FieldBase, field data for frame
180
- info: dict, optional frame metadata
181
"""
182
183
def close(self):
184
"""Finalize and close movie file"""
185
186
@property
187
def is_writing(self):
188
"""bool: Whether movie is currently being written"""
189
```
190
191
### Modelrunner Storage
192
193
Integration with modelrunner platform for cloud-based data management.
194
195
```python { .api }
196
class ModelrunnerStorage:
197
def __init__(self, *, info=None):
198
"""
199
Initialize modelrunner storage integration.
200
201
Parameters:
202
- info: dict, optional metadata
203
204
Note:
205
Only available if modelrunner is installed and configured
206
"""
207
208
def upload(self, data, *, tags=None):
209
"""
210
Upload data to modelrunner platform.
211
212
Parameters:
213
- data: FieldBase, field data to upload
214
- tags: list, optional tags for data organization
215
"""
216
217
def download(self, run_id, index=None):
218
"""
219
Download data from modelrunner platform.
220
221
Parameters:
222
- run_id: str, modelrunner run identifier
223
- index: int, specific data index to download
224
225
Returns:
226
FieldBase: Downloaded field data
227
"""
228
```
229
230
### Storage Base Classes
231
232
Common functionality for all storage backends.
233
234
```python { .api }
235
class StorageBase:
236
@property
237
def data_shape(self):
238
"""tuple: Shape of stored data"""
239
240
@property
241
def dtype(self):
242
"""np.dtype: Data type of stored fields"""
243
244
@property
245
def shape(self):
246
"""tuple: Shape including time dimension"""
247
248
@property
249
def has_collection(self):
250
"""bool: Whether storage contains field collections"""
251
252
@property
253
def grid(self):
254
"""GridBase: Grid associated with stored fields"""
255
256
def append(self, field, time=None):
257
"""
258
Append field data to storage.
259
260
Parameters:
261
- field: FieldBase, field to store
262
- time: float, time point for the field
263
"""
264
265
def clear(self, clear_data_shape=False):
266
"""
267
Clear stored data.
268
269
Parameters:
270
- clear_data_shape: bool, whether to clear data shape info
271
"""
272
273
def items(self):
274
"""
275
Iterate over stored time-field pairs.
276
277
Yields:
278
tuple: (time, field) pairs
279
"""
280
281
def tracker(self, interrupts='0.1', transformation=None, filename=None):
282
"""
283
Create tracker for this storage.
284
285
Parameters:
286
- interrupts: interrupt specification
287
- transformation: optional field transformation
288
- filename: optional filename for output
289
290
Returns:
291
StorageTracker: Tracker instance
292
"""
293
294
def start_writing(self, field, info=None):
295
"""
296
Prepare storage for writing.
297
298
Parameters:
299
- field: FieldBase, example field
300
- info: dict, optional metadata
301
"""
302
303
def end_writing(self):
304
"""
305
Finalize writing process.
306
"""
307
308
def view_field(self, field_id):
309
"""
310
Create view of specific field.
311
312
Parameters:
313
- field_id: int or str, field identifier
314
315
Returns:
316
StorageView: View of the field data
317
"""
318
319
def extract_field(self, field_id, *, copy=True):
320
"""
321
Extract specific field data.
322
323
Parameters:
324
- field_id: int or str, field identifier
325
- copy: bool, whether to copy data
326
327
Returns:
328
DataFieldBase: Extracted field
329
"""
330
331
def extract_time_range(self, t_start=None, t_end=None, *, copy=True):
332
"""
333
Extract data within time range.
334
335
Parameters:
336
- t_start: float, start time (None for beginning)
337
- t_end: float, end time (None for end)
338
- copy: bool, whether to copy data
339
340
Returns:
341
StorageBase: Storage with extracted data
342
"""
343
344
def apply(self, func, progress=None, parallel='auto', **kwargs):
345
"""
346
Apply function to all stored fields.
347
348
Parameters:
349
- func: callable, function to apply
350
- progress: bool, whether to show progress
351
- parallel: str or bool, parallelization mode
352
- kwargs: additional arguments for func
353
354
Returns:
355
Any: Result of function application
356
"""
357
358
def copy(self, filename=None):
359
"""
360
Create copy of storage.
361
362
Parameters:
363
- filename: str, optional filename for file-based storage
364
365
Returns:
366
StorageBase: Copy of storage
367
"""
368
@property
369
def write_mode(self):
370
"""str: Current write mode"""
371
372
def start_writing(self, field, info=None):
373
"""
374
Initialize storage for writing.
375
376
Parameters:
377
- field: FieldBase, example field for initialization
378
- info: dict, optional metadata
379
"""
380
381
def end_writing(self):
382
"""Finalize storage writing"""
383
384
@property
385
def has_collection(self):
386
"""bool: Whether storage contains FieldCollection data"""
387
388
def extract_field(self, *indices, **kwargs):
389
"""
390
Extract specific field from stored data.
391
392
Parameters:
393
- indices: field indices to extract
394
- kwargs: extraction options
395
396
Returns:
397
FieldBase: Extracted field
398
"""
399
```
400
401
## Usage Examples
402
403
### Basic Memory Storage
404
405
```python
406
import pde
407
408
# Create storage
409
storage = pde.MemoryStorage()
410
411
# Run simulation with storage
412
grid = pde.UnitGrid([32], periodic=True)
413
eq = pde.DiffusionPDE(diffusivity=0.1)
414
state = pde.ScalarField.random_uniform(grid)
415
416
# Use DataTracker to fill storage
417
tracker = pde.DataTracker(storage=storage, interrupts=0.5)
418
result = eq.solve(state, t_range=5.0, tracker=tracker)
419
420
# Access stored data
421
print(f"Stored {len(storage)} time points")
422
print(f"Times: {storage.times}")
423
424
# Plot evolution
425
import matplotlib.pyplot as plt
426
for i, field in enumerate(storage):
427
if i % 2 == 0: # Plot every other frame
428
plt.plot(field.data, alpha=0.7, label=f't={storage.times[i]:.1f}')
429
plt.legend()
430
plt.show()
431
```
432
433
### File Storage for Large Simulations
434
435
```python
436
import pde
437
438
# Set up large simulation
439
grid = pde.CartesianGrid([[0, 10], [0, 10]], [128, 128])
440
eq = pde.AllenCahnPDE()
441
state = eq.get_initial_condition(grid)
442
443
# Use file storage to handle large datasets
444
with pde.FileStorage("simulation_data.h5") as storage:
445
tracker = pde.DataTracker(storage=storage, interrupts=0.1)
446
result = eq.solve(state, t_range=10.0, tracker=tracker)
447
448
# Read back data for analysis
449
with pde.FileStorage("simulation_data.h5") as storage:
450
# Read specific time points
451
initial = storage.read(0)
452
final = storage.read(-1)
453
454
print(f"Initial average: {initial.average:.3f}")
455
print(f"Final average: {final.average:.3f}")
456
457
# Read time series data
458
all_data = storage.read()
459
print(f"Loaded {len(all_data)} time points")
460
```
461
462
### Movie Generation
463
464
```python
465
import pde
466
467
# 2D simulation for movie
468
grid = pde.CartesianGrid([[0, 10], [0, 10]], [64, 64])
469
eq = pde.SwiftHohenbergPDE()
470
state = eq.get_initial_condition(grid)
471
472
# Create movie during simulation
473
movie_storage = pde.MovieStorage(
474
"pattern_formation.mp4",
475
fps=10,
476
dpi=100
477
)
478
479
tracker = pde.DataTracker(storage=movie_storage, interrupts=0.1)
480
result = eq.solve(state, t_range=20.0, tracker=tracker)
481
482
print("Movie saved as pattern_formation.mp4")
483
```
484
485
### Custom Storage Backend
486
487
```python
488
import pde
489
import pickle
490
491
class PickleStorage(pde.StorageBase):
492
"""Custom storage using pickle format"""
493
494
def __init__(self, filename):
495
super().__init__(write_mode="truncate_once")
496
self.filename = filename
497
self.data_list = []
498
self.time_list = []
499
500
def append(self, data, info=None):
501
self.data_list.append(data.copy())
502
self.time_list.append(info.get('t', 0) if info else 0)
503
504
def __len__(self):
505
return len(self.data_list)
506
507
def __getitem__(self, index):
508
return self.data_list[index]
509
510
def save(self):
511
with open(self.filename, 'wb') as f:
512
pickle.dump({
513
'data': self.data_list,
514
'times': self.time_list
515
}, f)
516
517
@classmethod
518
def load(cls, filename):
519
storage = cls(filename)
520
with open(filename, 'rb') as f:
521
saved = pickle.load(f)
522
storage.data_list = saved['data']
523
storage.time_list = saved['times']
524
return storage
525
526
# Use custom storage
527
grid = pde.UnitGrid([32], periodic=True)
528
eq = pde.DiffusionPDE()
529
state = pde.ScalarField.random_uniform(grid)
530
531
custom_storage = PickleStorage("custom_data.pkl")
532
tracker = pde.DataTracker(storage=custom_storage, interrupts=1.0)
533
result = eq.solve(state, t_range=5.0, tracker=tracker)
534
535
# Save and reload
536
custom_storage.save()
537
loaded_storage = PickleStorage.load("custom_data.pkl")
538
print(f"Loaded {len(loaded_storage)} time points")
539
```
540
541
### Compressed File Storage
542
543
```python
544
import pde
545
546
# Large dataset with compression
547
grid = pde.CartesianGrid([[0, 20], [0, 20]], [200, 200])
548
eq = pde.CahnHilliardPDE()
549
state = eq.get_initial_condition(grid)
550
551
# Configure file storage with compression
552
storage_config = {
553
"compression": "gzip",
554
"compression_opts": 9, # Maximum compression
555
"shuffle": True # Better compression for numerical data
556
}
557
558
with pde.FileStorage("large_simulation.h5", **storage_config) as storage:
559
tracker = pde.DataTracker(storage=storage, interrupts=0.2)
560
result = eq.solve(state, t_range=10.0, tracker=tracker)
561
562
print("Large simulation saved with compression")
563
```
564
565
### Multi-format Output
566
567
```python
568
import pde
569
570
# Set up simulation
571
grid = pde.UnitGrid([64, 64], periodic=True)
572
eq = pde.AllenCahnPDE()
573
state = eq.get_initial_condition(grid)
574
575
# Multiple storage backends
576
storages = [
577
pde.MemoryStorage(), # For immediate analysis
578
pde.FileStorage("backup.h5"), # For archival
579
pde.MovieStorage("visualization.mp4") # For presentation
580
]
581
582
# Use multiple trackers with different storages
583
trackers = [
584
pde.DataTracker(storage=storages[0], interrupts=1.0), # Memory
585
pde.DataTracker(storage=storages[1], interrupts=0.5), # File
586
pde.DataTracker(storage=storages[2], interrupts=0.1) # Movie
587
]
588
589
result = eq.solve(state, t_range=10.0, tracker=trackers)
590
591
print(f"Memory storage: {len(storages[0])} points")
592
print("File and movie outputs saved")
593
```