0
# Frame Handling
1
2
Frame data access and manipulation with support for depth, color, infrared, and motion data. Provides seamless integration with NumPy through Python's buffer protocol and comprehensive metadata access.
3
4
## Capabilities
5
6
### Base Frame
7
8
Core frame interface providing common functionality for all frame types.
9
10
```python { .api }
11
class frame:
12
def get_timestamp() -> float:
13
"""
14
Get frame timestamp.
15
16
Returns:
17
float: Timestamp in milliseconds
18
"""
19
20
def get_frame_timestamp_domain() -> timestamp_domain:
21
"""
22
Get timestamp domain (hardware vs system clock).
23
24
Returns:
25
timestamp_domain: Timestamp source
26
"""
27
28
def get_frame_number() -> int:
29
"""
30
Get frame sequence number.
31
32
Returns:
33
int: Frame number from start of streaming
34
"""
35
36
def get_data_size() -> int:
37
"""
38
Get frame data size in bytes.
39
40
Returns:
41
int: Data size in bytes
42
"""
43
44
def get_profile() -> stream_profile:
45
"""
46
Get stream profile that produced this frame.
47
48
Returns:
49
stream_profile: Associated stream configuration
50
"""
51
52
# Properties (read-only)
53
@property
54
def timestamp(self) -> float:
55
"""Frame timestamp in milliseconds."""
56
57
@property
58
def frame_timestamp_domain(self) -> timestamp_domain:
59
"""Timestamp domain."""
60
61
@property
62
def frame_number(self) -> int:
63
"""Frame sequence number."""
64
65
@property
66
def profile(self) -> stream_profile:
67
"""Associated stream profile."""
68
69
@property
70
def data(self) -> BufData:
71
"""Frame data with buffer protocol support."""
72
73
def get_data() -> BufData:
74
"""
75
Get frame data with Python buffer protocol support.
76
77
Returns:
78
BufData: Frame data accessible as NumPy array
79
"""
80
81
def get_frame_metadata(metadata_type) -> int:
82
"""
83
Get frame metadata value.
84
85
Args:
86
metadata_type (frame_metadata_value): Metadata field to retrieve
87
88
Returns:
89
int: Metadata value
90
91
Raises:
92
rs.error: If metadata not supported
93
"""
94
95
def supports_frame_metadata(metadata_type) -> bool:
96
"""
97
Check if frame supports specific metadata.
98
99
Args:
100
metadata_type (frame_metadata_value): Metadata field to check
101
102
Returns:
103
bool: True if metadata is available
104
"""
105
106
def keep():
107
"""
108
Extend frame lifetime beyond current scope.
109
Used when frame needs to persist after callback returns.
110
"""
111
112
def swap(other_frame):
113
"""
114
Swap frame contents with another frame.
115
116
Args:
117
other_frame (frame): Frame to swap with
118
"""
119
120
# Type checking methods
121
def is_video_frame() -> bool:
122
"""Check if frame is a video frame."""
123
124
def is_depth_frame() -> bool:
125
"""Check if frame is a depth frame."""
126
127
def is_disparity_frame() -> bool:
128
"""Check if frame is a disparity frame."""
129
130
def is_motion_frame() -> bool:
131
"""Check if frame is a motion frame."""
132
133
def is_pose_frame() -> bool:
134
"""Check if frame is a pose frame."""
135
136
def is_frameset() -> bool:
137
"""Check if frame is a frameset (composite frame)."""
138
139
def is_points() -> bool:
140
"""Check if frame is a point cloud."""
141
142
# Type casting methods
143
def as_video_frame() -> video_frame:
144
"""Cast to video frame."""
145
146
def as_depth_frame() -> depth_frame:
147
"""Cast to depth frame."""
148
149
def as_disparity_frame() -> disparity_frame:
150
"""Cast to disparity frame."""
151
152
def as_motion_frame() -> motion_frame:
153
"""Cast to motion frame."""
154
155
def as_pose_frame() -> pose_frame:
156
"""Cast to pose frame."""
157
158
def as_frameset() -> frameset:
159
"""Cast to frameset."""
160
161
def as_points() -> points:
162
"""Cast to point cloud."""
163
```
164
165
### Video Frame
166
167
Frame containing 2D image data (color, infrared, etc.).
168
169
```python { .api }
170
class video_frame(frame):
171
def get_width() -> int:
172
"""
173
Get frame width.
174
175
Returns:
176
int: Width in pixels
177
"""
178
179
def get_height() -> int:
180
"""
181
Get frame height.
182
183
Returns:
184
int: Height in pixels
185
"""
186
187
def get_stride_in_bytes() -> int:
188
"""
189
Get row stride in bytes.
190
191
Returns:
192
int: Bytes per row including padding
193
"""
194
195
def get_bits_per_pixel() -> int:
196
"""
197
Get bits per pixel.
198
199
Returns:
200
int: Bits per pixel
201
"""
202
203
def get_bytes_per_pixel() -> int:
204
"""
205
Get bytes per pixel.
206
207
Returns:
208
int: Bytes per pixel
209
"""
210
211
def extract_target_dimensions(target_type) -> list[float]:
212
"""
213
Extract target dimensions for specific use case.
214
215
Args:
216
target_type: Target type for dimension extraction
217
218
Returns:
219
list[float]: Extracted dimensions
220
"""
221
222
# Properties
223
@property
224
def width(self) -> int:
225
"""Frame width in pixels."""
226
227
@property
228
def height(self) -> int:
229
"""Frame height in pixels."""
230
231
@property
232
def stride_in_bytes(self) -> int:
233
"""Row stride in bytes."""
234
235
@property
236
def bits_per_pixel(self) -> int:
237
"""Bits per pixel."""
238
239
@property
240
def bytes_per_pixel(self) -> int:
241
"""Bytes per pixel."""
242
```
243
244
### Depth Frame
245
246
Video frame containing depth data with distance query capabilities.
247
248
```python { .api }
249
class depth_frame(video_frame):
250
def get_distance(x, y) -> float:
251
"""
252
Get distance at specific pixel coordinate.
253
254
Args:
255
x (int): Pixel x-coordinate
256
y (int): Pixel y-coordinate
257
258
Returns:
259
float: Distance in meters (0 if invalid)
260
"""
261
262
def get_units() -> float:
263
"""
264
Get depth units scaling factor.
265
266
Returns:
267
float: Meters per depth unit
268
"""
269
```
270
271
### Disparity Frame
272
273
Depth frame containing disparity data (inverse depth).
274
275
```python { .api }
276
class disparity_frame(depth_frame):
277
def get_baseline() -> float:
278
"""
279
Get stereo baseline distance.
280
281
Returns:
282
float: Baseline in millimeters
283
"""
284
```
285
286
### Motion Frame
287
288
Frame containing IMU sensor data (accelerometer, gyroscope).
289
290
```python { .api }
291
class motion_frame(frame):
292
def get_motion_data() -> vector:
293
"""
294
Get motion sensor data.
295
296
Returns:
297
vector: 3D motion data (acceleration or angular velocity)
298
"""
299
300
def get_combined_motion_data() -> vector:
301
"""
302
Get combined motion data from multiple sensors.
303
304
Returns:
305
vector: Combined motion data
306
"""
307
308
@property
309
def motion_data(self) -> vector:
310
"""Motion sensor data."""
311
```
312
313
### Pose Frame
314
315
Frame containing 6DOF pose data (position and orientation).
316
317
```python { .api }
318
class pose_frame(frame):
319
def get_pose_data() -> pose_data:
320
"""
321
Get pose data.
322
323
Returns:
324
pose_data: 6DOF pose information
325
"""
326
327
@property
328
def pose_data(self) -> pose_data:
329
"""6DOF pose data."""
330
```
331
332
### Frameset
333
334
Collection of synchronized frames from multiple streams.
335
336
```python { .api }
337
class frameset(frame):
338
def size() -> int:
339
"""
340
Get number of frames in set.
341
342
Returns:
343
int: Number of frames
344
"""
345
346
def __len__() -> int:
347
"""Number of frames in set."""
348
349
def __getitem__(index) -> frame:
350
"""Get frame by index."""
351
352
def __iter__():
353
"""Iterate over frames."""
354
355
def first(stream_type, format=rs.format.any) -> frame:
356
"""
357
Get first frame of specified type.
358
359
Args:
360
stream_type (stream): Stream type to find
361
format (format): Optional format filter
362
363
Returns:
364
frame: First matching frame
365
366
Raises:
367
rs.error: If no matching frame found
368
"""
369
370
def first_or_default(stream_type, format=rs.format.any) -> frame:
371
"""
372
Get first frame of specified type or None.
373
374
Args:
375
stream_type (stream): Stream type to find
376
format (format): Optional format filter
377
378
Returns:
379
frame: First matching frame or None
380
"""
381
382
def get_depth_frame() -> depth_frame:
383
"""
384
Get depth frame from set.
385
386
Returns:
387
depth_frame: Depth frame or None if not present
388
"""
389
390
def get_color_frame() -> video_frame:
391
"""
392
Get color frame from set.
393
394
Returns:
395
video_frame: Color frame or None if not present
396
"""
397
398
def get_infrared_frame(index=0) -> video_frame:
399
"""
400
Get infrared frame from set.
401
402
Args:
403
index (int): Infrared stream index
404
405
Returns:
406
video_frame: Infrared frame or None if not present
407
"""
408
409
def get_fisheye_frame(index=0) -> video_frame:
410
"""
411
Get fisheye frame from set.
412
413
Args:
414
index (int): Fisheye stream index
415
416
Returns:
417
video_frame: Fisheye frame or None if not present
418
"""
419
420
def get_pose_frame(index=0) -> pose_frame:
421
"""
422
Get pose frame from set.
423
424
Args:
425
index (int): Pose stream index
426
427
Returns:
428
pose_frame: Pose frame or None if not present
429
"""
430
431
def foreach(callback_function):
432
"""
433
Apply callback to each frame in set.
434
435
Args:
436
callback_function: Function called for each frame
437
"""
438
```
439
440
### Point Cloud Frame
441
442
Frame containing 3D point cloud data.
443
444
```python { .api }
445
class points(frame):
446
def get_vertices(dims=1) -> BufData:
447
"""
448
Get vertex data as buffer.
449
450
Args:
451
dims (int): Dimensionality (1 for flat array, 2 for structured)
452
453
Returns:
454
BufData: Vertex coordinates buffer
455
"""
456
457
def get_texture_coordinates(dims=1) -> BufData:
458
"""
459
Get texture coordinate data as buffer.
460
461
Args:
462
dims (int): Dimensionality
463
464
Returns:
465
BufData: Texture coordinates buffer
466
"""
467
468
def export_to_ply(filename, texture_frame):
469
"""
470
Export point cloud to PLY file.
471
472
Args:
473
filename (str): Output file path
474
texture_frame (video_frame): Frame to use for texture
475
"""
476
477
def size() -> int:
478
"""
479
Get number of points.
480
481
Returns:
482
int: Point count
483
"""
484
```
485
486
### Buffer Data
487
488
Python buffer protocol support for zero-copy NumPy integration.
489
490
```python { .api }
491
class BufData:
492
"""
493
Buffer protocol implementation for frame data.
494
Enables zero-copy access to frame data as NumPy arrays.
495
"""
496
```
497
498
## Data Types
499
500
### 3D Vector
501
502
```python { .api }
503
class vector:
504
x: float # X component
505
y: float # Y component
506
z: float # Z component
507
```
508
509
### 3D Vertex
510
511
```python { .api }
512
class vertex:
513
x: float # X coordinate
514
y: float # Y coordinate
515
z: float # Z coordinate
516
```
517
518
### Texture Coordinate
519
520
```python { .api }
521
class texture_coordinate:
522
u: float # U texture coordinate
523
v: float # V texture coordinate
524
```
525
526
### Quaternion
527
528
```python { .api }
529
class quaternion:
530
x: float # X component
531
y: float # Y component
532
z: float # Z component
533
w: float # W component
534
```
535
536
### Pose Data
537
538
```python { .api }
539
class pose_data:
540
translation: vector # Position (x, y, z)
541
velocity: vector # Velocity (x, y, z)
542
acceleration: vector # Acceleration (x, y, z)
543
rotation: quaternion # Orientation quaternion
544
angular_velocity: vector # Angular velocity (x, y, z)
545
angular_acceleration: vector # Angular acceleration (x, y, z)
546
tracker_confidence: int # Tracking confidence level
547
mapper_confidence: int # Mapping confidence level
548
```
549
550
## Usage Examples
551
552
### Basic Frame Processing
553
554
```python
555
import pyrealsense2 as rs
556
import numpy as np
557
558
# Start pipeline
559
pipeline = rs.pipeline()
560
config = rs.config()
561
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
562
config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)
563
564
pipeline.start(config)
565
566
try:
567
for i in range(100):
568
frames = pipeline.wait_for_frames()
569
570
# Get individual frames
571
depth_frame = frames.get_depth_frame()
572
color_frame = frames.get_color_frame()
573
574
if not depth_frame or not color_frame:
575
continue
576
577
# Convert to NumPy arrays (zero-copy)
578
depth_image = np.asanyarray(depth_frame.get_data())
579
color_image = np.asanyarray(color_frame.get_data())
580
581
# Frame information
582
print(f"Frame {depth_frame.get_frame_number()}:")
583
print(f" Depth: {depth_image.shape} {depth_image.dtype}")
584
print(f" Color: {color_image.shape} {color_image.dtype}")
585
print(f" Timestamp: {depth_frame.get_timestamp():.1f}ms")
586
587
# Distance at center pixel
588
width = depth_frame.get_width()
589
height = depth_frame.get_height()
590
center_distance = depth_frame.get_distance(width // 2, height // 2)
591
print(f" Center distance: {center_distance:.3f}m")
592
593
finally:
594
pipeline.stop()
595
```
596
597
### Frame Metadata Access
598
599
```python
600
import pyrealsense2 as rs
601
602
pipeline = rs.pipeline()
603
config = rs.config()
604
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
605
606
pipeline.start(config)
607
608
# Available metadata types
609
metadata_types = [
610
rs.frame_metadata_value.frame_counter,
611
rs.frame_metadata_value.frame_timestamp,
612
rs.frame_metadata_value.sensor_timestamp,
613
rs.frame_metadata_value.actual_exposure,
614
rs.frame_metadata_value.gain_level,
615
rs.frame_metadata_value.auto_exposure,
616
rs.frame_metadata_value.white_balance,
617
rs.frame_metadata_value.time_of_arrival
618
]
619
620
try:
621
frames = pipeline.wait_for_frames()
622
depth_frame = frames.get_depth_frame()
623
624
if depth_frame:
625
print("Available metadata:")
626
for metadata_type in metadata_types:
627
if depth_frame.supports_frame_metadata(metadata_type):
628
value = depth_frame.get_frame_metadata(metadata_type)
629
print(f" {metadata_type.name}: {value}")
630
else:
631
print(f" {metadata_type.name}: Not supported")
632
633
finally:
634
pipeline.stop()
635
```
636
637
### Frame Lifetime Management
638
639
```python
640
import pyrealsense2 as rs
641
import queue
642
import threading
643
644
# Frame storage for processing in another thread
645
frame_queue = queue.Queue()
646
647
def frame_callback(frameset):
648
depth_frame = frameset.get_depth_frame()
649
if depth_frame:
650
# Extend frame lifetime beyond callback
651
depth_frame.keep()
652
frame_queue.put(depth_frame)
653
654
def processing_thread():
655
while True:
656
try:
657
frame = frame_queue.get(timeout=1.0)
658
659
# Process frame (frame will be automatically released
660
# when it goes out of scope)
661
width = frame.get_width()
662
height = frame.get_height()
663
print(f"Processing frame {frame.get_frame_number()} "
664
f"({width}x{height})")
665
666
frame_queue.task_done()
667
668
except queue.Empty:
669
break
670
671
# Start processing thread
672
processor = threading.Thread(target=processing_thread)
673
processor.start()
674
675
# Start streaming with callback
676
pipeline = rs.pipeline()
677
config = rs.config()
678
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
679
680
pipeline.start(config, frame_callback)
681
682
try:
683
import time
684
time.sleep(5) # Stream for 5 seconds
685
finally:
686
pipeline.stop()
687
frame_queue.join() # Wait for processing to complete
688
processor.join()
689
```
690
691
### Depth Analysis
692
693
```python
694
import pyrealsense2 as rs
695
import numpy as np
696
697
pipeline = rs.pipeline()
698
config = rs.config()
699
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
700
701
profile = pipeline.start(config)
702
703
# Get depth sensor and scale
704
device = profile.get_device()
705
depth_sensor = device.first_depth_sensor()
706
depth_scale = depth_sensor.get_depth_scale()
707
708
print(f"Depth scale: {depth_scale} meters per unit")
709
710
try:
711
for i in range(10):
712
frames = pipeline.wait_for_frames()
713
depth_frame = frames.get_depth_frame()
714
715
if not depth_frame:
716
continue
717
718
# Convert to numpy and meters
719
depth_image = np.asanyarray(depth_frame.get_data())
720
depth_meters = depth_image * depth_scale
721
722
# Filter valid depths (exclude zeros)
723
valid_depths = depth_meters[depth_meters > 0]
724
725
if len(valid_depths) > 0:
726
print(f"Frame {depth_frame.get_frame_number()}:")
727
print(f" Valid pixels: {len(valid_depths)}/{depth_image.size}")
728
print(f" Depth range: {valid_depths.min():.3f} - {valid_depths.max():.3f}m")
729
print(f" Mean depth: {valid_depths.mean():.3f}m")
730
731
# Distance histogram
732
hist, bins = np.histogram(valid_depths, bins=10, range=(0, 5))
733
print(f" Distance distribution (0-5m):")
734
for j in range(len(hist)):
735
if hist[j] > 0:
736
print(f" {bins[j]:.1f}-{bins[j+1]:.1f}m: {hist[j]} pixels")
737
738
finally:
739
pipeline.stop()
740
```
741
742
### Motion Data Processing
743
744
```python
745
import pyrealsense2 as rs
746
import numpy as np
747
748
# Configure motion streams
749
pipeline = rs.pipeline()
750
config = rs.config()
751
config.enable_stream(rs.stream.accel, rs.format.motion_xyz32f, 250)
752
config.enable_stream(rs.stream.gyro, rs.format.motion_xyz32f, 400)
753
754
try:
755
profile = pipeline.start(config)
756
757
accel_data = []
758
gyro_data = []
759
760
for i in range(1000): # Collect 1000 samples
761
frames = pipeline.wait_for_frames()
762
763
# Check for motion frames
764
for j in range(frames.size()):
765
frame = frames[j]
766
767
if frame.is_motion_frame():
768
motion_frame = frame.as_motion_frame()
769
motion_data = motion_frame.get_motion_data()
770
771
# Get stream type
772
profile = motion_frame.get_profile()
773
stream_type = profile.stream_type()
774
775
if stream_type == rs.stream.accel:
776
accel_data.append([motion_data.x, motion_data.y, motion_data.z])
777
elif stream_type == rs.stream.gyro:
778
gyro_data.append([motion_data.x, motion_data.y, motion_data.z])
779
780
# Analyze motion data
781
if accel_data:
782
accel_array = np.array(accel_data)
783
print(f"Accelerometer data: {len(accel_data)} samples")
784
print(f" Mean: {accel_array.mean(axis=0)}")
785
print(f" Std: {accel_array.std(axis=0)}")
786
787
if gyro_data:
788
gyro_array = np.array(gyro_data)
789
print(f"Gyroscope data: {len(gyro_data)} samples")
790
print(f" Mean: {gyro_array.mean(axis=0)}")
791
print(f" Std: {gyro_array.std(axis=0)}")
792
793
except Exception as e:
794
print(f"Motion streaming not supported: {e}")
795
796
finally:
797
pipeline.stop()
798
```
799
800
### Frameset Processing
801
802
```python
803
import pyrealsense2 as rs
804
import numpy as np
805
806
pipeline = rs.pipeline()
807
config = rs.config()
808
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
809
config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)
810
config.enable_stream(rs.stream.infrared, 1, 640, 480, rs.format.y8, 30)
811
config.enable_stream(rs.stream.infrared, 2, 640, 480, rs.format.y8, 30)
812
813
pipeline.start(config)
814
815
try:
816
for i in range(10):
817
frames = pipeline.wait_for_frames()
818
819
print(f"Frameset {i}: {frames.size()} frames")
820
print(f" Timestamp: {frames.get_timestamp():.1f}ms")
821
822
# Process each frame type
823
depth = frames.get_depth_frame()
824
color = frames.get_color_frame()
825
ir1 = frames.get_infrared_frame(1)
826
ir2 = frames.get_infrared_frame(2)
827
828
if depth:
829
depth_data = np.asanyarray(depth.get_data())
830
print(f" Depth: {depth_data.shape} (frame #{depth.get_frame_number()})")
831
832
if color:
833
color_data = np.asanyarray(color.get_data())
834
print(f" Color: {color_data.shape} (frame #{color.get_frame_number()})")
835
836
if ir1:
837
ir1_data = np.asanyarray(ir1.get_data())
838
print(f" IR1: {ir1_data.shape} (frame #{ir1.get_frame_number()})")
839
840
if ir2:
841
ir2_data = np.asanyarray(ir2.get_data())
842
print(f" IR2: {ir2_data.shape} (frame #{ir2.get_frame_number()})")
843
844
# Alternative: iterate through all frames
845
print(" All frames:")
846
for j, frame in enumerate(frames):
847
profile = frame.get_profile()
848
stream_type = profile.stream_type()
849
print(f" {j}: {stream_type.name} frame #{frame.get_frame_number()}")
850
851
finally:
852
pipeline.stop()
853
```
854
855
## Integration Notes
856
857
- **NumPy Integration**: All frames support Python's buffer protocol for zero-copy numpy array creation
858
- **Memory Management**: Frames are reference-counted; use `frame.keep()` to extend lifetime in callbacks
859
- **Thread Safety**: Frame callbacks execute on separate threads; synchronization may be required
860
- **Data Types**: Frame data types depend on stream format (uint8, uint16, float32, etc.)
861
- **Performance**: Buffer protocol access is zero-copy and very efficient for image processing