Skip to content

neon_recording

Modules:

  • calib

    Camera calibration utils

  • neon_recording

    Neon Recording

  • stream

    Streams module

Classes:

Functions:

  • open

    Load a NeonRecording from a path

AudioStream

AudioStream(name: str, base_name: str, recording: NeonRecording)

Bases: BaseAVStream

Audio frames stream

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

  • data (Array) –

    Stream data as structured numpy array

  • pd

    Stream data as a pandas DataFrame

  • ts (NDArray[int64]) –

    The moment these data were recorded

Source code in src/pupil_labs/neon_recording/stream/av_stream/base_av_stream.py
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
def __init__(
    self,
    name: str,
    base_name: str,
    recording: "NeonRecording",
):
    self.name = name
    self._base_name = base_name
    self.recording = recording

    log.debug(f"NeonRecording: Loading video: {self._base_name}.")

    self.video_parts: list[plv.Reader[plv.VideoFrame]] = []
    av_files = find_sorted_multipart_files(
        self.recording._rec_dir, self._base_name, ".mp4"
    )
    parts_ts = []
    video_readers = []
    for av_file, time_file in av_files:
        if self.kind == "video":
            part_ts = Array(time_file, dtype=TIMESTAMP_DTYPE)  # type: ignore
            container_timestamps = (part_ts["ts"] - recording.start_ts) / 1e9
            reader = plv.Reader(str(av_file), self.kind, container_timestamps)
            part_ts = part_ts[: len(reader)]
        elif self.kind == "audio":
            reader = plv.Reader(str(av_file), self.kind)  # type: ignore
            part_ts = (
                recording.start_ts + (reader.container_timestamps * 1e9)  # type: ignore
            ).astype(TIMESTAMP_DTYPE)
        else:
            raise RuntimeError(f"unknown av stream kind: {self.kind}")

        parts_ts.append(part_ts)
        video_readers.append(reader)

    parts_ts = np.concatenate(parts_ts)
    idxs = np.empty(len(parts_ts), dtype=AV_INDEX_DTYPE)
    idxs[AV_INDEX_FIELD_NAME] = np.arange(len(parts_ts))

    data = join_struct_arrays(
        [
            parts_ts,  # type: ignore
            idxs,
        ],
    )
    self.av_reader = plv.MultiReader(video_readers)

    BoundAVFrameClass = type(
        f"{self.name.capitalize()}Frame",
        (BaseAVStreamFrame, AVStreamProps),
        {"dtype": data.dtype, "multi_video_reader": self.av_reader},
    )
    BoundAVFramesClass = type(
        f"{self.name.capitalize()}Frames",
        (Array, AVStreamProps),
        {
            "record_class": BoundAVFrameClass,
            "dtype": data.dtype,
            "multi_video_reader": self.av_reader,
        },
    )

    super().__init__(name, recording, data.view(BoundAVFramesClass))

data property

data: Array

Stream data as structured numpy array

pd property

pd

Stream data as a pandas DataFrame

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

BlinkStream

BlinkStream(recording: NeonRecording)

Bases: Stream[BlinkArray, BlinkRecord], BlinkProps

Blinks data

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

  • end_ts

    End timestamp of blink

  • pd

    Stream data as a pandas DataFrame

  • start_ts

    Start timestamp of blink

  • ts (NDArray[int64]) –

    The moment these data were recorded

Source code in src/pupil_labs/neon_recording/stream/blink_stream.py
47
48
49
50
51
52
53
54
55
56
57
def __init__(self, recording: "NeonRecording"):
    log.debug("NeonRecording: Loading blink data")
    file_pairs = find_sorted_multipart_files(recording._rec_dir, "blinks")
    data = load_multipart_data_time_pairs(
        file_pairs,
        np.dtype([
            ("start_timestamp_ns", "int64"),
            ("end_timestamp_ns", "int64"),
        ]),
    )
    super().__init__("blink", recording, data.view(BlinkArray))

end_ts class-attribute instance-attribute

end_ts = fields[int64]('end_timestamp_ns')

End timestamp of blink

pd property

pd

Stream data as a pandas DataFrame

start_ts class-attribute instance-attribute

start_ts = fields[int64]('start_timestamp_ns')

Start timestamp of blink

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

EventStream

EventStream(recording: NeonRecording)

Bases: Stream[EventArray, EventRecord], EventProps

Event annotations

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

  • by_name

    Return a dict of event_name => all ts

  • event

    Event name

  • pd

    Stream data as a pandas DataFrame

  • ts (NDArray[int64]) –

    The moment these data were recorded

Source code in src/pupil_labs/neon_recording/stream/event_stream.py
39
40
41
42
43
44
45
46
47
48
49
50
51
def __init__(self, recording: "NeonRecording"):
    log.debug("NeonRecording: Loading event data")

    events_file = recording._rec_dir / "event.txt"
    time_file = events_file.with_suffix(".time")
    file_pairs = []
    if events_file.exists() and time_file.exists():
        file_pairs = [(events_file, time_file)]
    data = load_multipart_data_time_pairs(file_pairs, "str")
    data.dtype.names = [
        "event" if name == "text" else name for name in data.dtype.names
    ]
    super().__init__("event", recording, data.view(EventArray))

by_name cached property

by_name

Return a dict of event_name => all ts

event class-attribute instance-attribute

event = fields[float64]('event')

Event name

pd property

pd

Stream data as a pandas DataFrame

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

EyeStateStream

EyeStateStream(recording: NeonRecording)

Bases: Stream[EyeStateArray, EyeStateRecord], EyeStateProps

Eye state data

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

Source code in src/pupil_labs/neon_recording/stream/eye_state_stream.py
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
def __init__(self, recording: "NeonRecording"):
    log.debug("NeonRecording: Loading eye state data")
    file_pairs = find_sorted_multipart_files(recording._rec_dir, "eye_state")
    data = load_multipart_data_time_pairs(
        file_pairs,
        dtype=np.dtype([
            ("pupil_diameter_left_mm", "float32"),
            ("eyeball_center_left_x", "float32"),
            ("eyeball_center_left_y", "float32"),
            ("eyeball_center_left_z", "float32"),
            ("optical_axis_left_x", "float32"),
            ("optical_axis_left_y", "float32"),
            ("optical_axis_left_z", "float32"),
            ("pupil_diameter_right_mm", "float32"),
            ("eyeball_center_right_x", "float32"),
            ("eyeball_center_right_y", "float32"),
            ("eyeball_center_right_z", "float32"),
            ("optical_axis_right_x", "float32"),
            ("optical_axis_right_y", "float32"),
            ("optical_axis_right_z", "float32"),
        ]),
    )
    super().__init__("eye_state", recording, data.view(EyeStateArray))

eyeball_center_left_xyz class-attribute instance-attribute

eyeball_center_left_xyz = fields[float64](['eyeball_center_left_x', 'eyeball_center_left_y', 'eyeball_center_left_z'])

The xyz position in mm of the left eyeball relative to the scene camera

eyeball_center_right_xyz class-attribute instance-attribute

eyeball_center_right_xyz = fields[float64](['eyeball_center_right_x', 'eyeball_center_right_y', 'eyeball_center_right_z'])

The xyz position in mm of the right eyeball relative to the scene camera

eyelid_angle class-attribute instance-attribute

eyelid_angle = fields[float64](['eyelid_angle_top_left', 'eyelid_angle_bottom_left', 'eyelid_angle_top_right', 'eyelid_angle_bottom_right'])

Eyelid angle: (top_left, bottom_left, top_right, bottom_right)

eyelid_aperture_left_right_mm class-attribute instance-attribute

eyelid_aperture_left_right_mm = fields[float64](['eyelid_aperture_left_mm', 'eyelid_aperture_right_mm'])

Eyelid aperture in mm: (left, right)

optical_axis_left_xyz class-attribute instance-attribute

optical_axis_left_xyz = fields[float64](['optical_axis_left_x', 'optical_axis_left_y', 'optical_axis_left_z'])

A xyz vector in the forward direction of the left eye's optical axis

optical_axis_right_xyz class-attribute instance-attribute

optical_axis_right_xyz = fields[float64](['optical_axis_right_x', 'optical_axis_right_y', 'optical_axis_right_z'])

A xyz vector in the forward direction of the right eye's optical axis

pd property

pd

Stream data as a pandas DataFrame

pupil_diameter_left_mm class-attribute instance-attribute

pupil_diameter_left_mm = fields[float64](['pupil_diameter_left_mm'])

Pupil diameter (in mm) for left eye

pupil_diameter_left_right_mm class-attribute instance-attribute

pupil_diameter_left_right_mm = fields[float64](['pupil_diameter_left_mm', 'pupil_diameter_right_mm'])

Pupil diameter (in mm) for both eyes: (left, right)

pupil_diameter_right_mm class-attribute instance-attribute

pupil_diameter_right_mm = fields[float64](['pupil_diameter_right_mm'])

Pupil diameter (in mm) for right eye

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

FixationStream

FixationStream(recording: NeonRecording)

Bases: Stream[FixationArray, FixationRecord], FixationProps

Fixation data

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

Source code in src/pupil_labs/neon_recording/stream/fixation_stream.py
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
def __init__(self, recording: "NeonRecording"):
    log.debug("NeonRecording: Loading fixation data")
    file_pairs = find_sorted_multipart_files(recording._rec_dir, "fixations")
    data = load_multipart_data_time_pairs(
        file_pairs,
        np.dtype([
            ("event_type", "int32"),
            ("start_timestamp_ns", "int64"),
            ("end_timestamp_ns", "int64"),
            ("start_gaze_x", "float32"),
            ("start_gaze_y", "float32"),
            ("end_gaze_x", "float32"),
            ("end_gaze_y", "float32"),
            ("mean_gaze_x", "float32"),
            ("mean_gaze_y", "float32"),
            ("amplitude_pixels", "float32"),
            ("amplitude_angle_deg", "float32"),
            ("mean_velocity", "float32"),
            ("max_velocity", "float32"),
        ]),
    )
    super().__init__("fixation", recording, data.view(FixationArray))

amplitude_angle_deg class-attribute instance-attribute

amplitude_angle_deg = fields[float32]('amplitude_angle_deg')

Amplitude angle (degrees)

amplitude_pixels class-attribute instance-attribute

amplitude_pixels = fields[float32]('amplitude_pixels')

Amplitude (pixels)

end_gaze_xy class-attribute instance-attribute

end_gaze_xy = fields[float32](['end_gaze_x', 'end_gaze_y'])

End gaze position in pixels

end_ts class-attribute instance-attribute

end_ts = fields[int64]('end_timestamp_ns')

Start timestamp of fixation

event_type class-attribute instance-attribute

event_type = fields[int32]('event_type')

Fixation event kind (0 = saccade / 1 = fixation)

max_velocity class-attribute instance-attribute

max_velocity = fields[float32]('max_velocity')

Max velocity of fixation (pixels/sec)

mean_gaze_xy class-attribute instance-attribute

mean_gaze_xy = fields[float32](['mean_gaze_x', 'mean_gaze_y'])

Mean gaze position in pixels

mean_velocity class-attribute instance-attribute

mean_velocity = fields[float32]('mean_velocity')

Mean velocity of fixation (pixels/sec)

pd property

pd

Stream data as a pandas DataFrame

start_gaze_xy class-attribute instance-attribute

start_gaze_xy = fields[float32](['start_gaze_x', 'start_gaze_y'])

Start gaze position in pixels

start_ts class-attribute instance-attribute

start_ts = fields[int64]('start_timestamp_ns')

Start timestamp of fixation

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

GazeStream

GazeStream(recording: NeonRecording)

Bases: Stream[GazeArray, GazeRecord], GazeProps

Gaze data

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

  • pd

    Stream data as a pandas DataFrame

  • ts (NDArray[int64]) –

    The moment these data were recorded

  • x

    Gaze x coordinate in pixels

  • xy

    Gaze xy coordinates in pixels

  • y

    Gaze y coordinate in pixels

Source code in src/pupil_labs/neon_recording/stream/gaze_stream.py
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
def __init__(self, recording: "NeonRecording"):
    log.debug("NeonRecording: Loading gaze data")

    gaze_200hz_file = recording._rec_dir / "gaze_200hz.raw"
    time_200hz_file = recording._rec_dir / "gaze_200hz.time"

    file_pairs = []
    if gaze_200hz_file.exists() and time_200hz_file.exists():
        log.debug("NeonRecording: Using 200Hz gaze data")
        file_pairs.append((gaze_200hz_file, time_200hz_file))
    else:
        log.debug("NeonRecording: Using realtime gaze data")
        file_pairs = find_sorted_multipart_files(recording._rec_dir, "gaze")

    data = load_multipart_data_time_pairs(
        file_pairs,
        np.dtype([
            ("x", "float32"),
            ("y", "float32"),
        ]),
    )
    super().__init__("gaze", recording, data.view(GazeArray))

pd property

pd

Stream data as a pandas DataFrame

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

x class-attribute instance-attribute

x = fields[float64]('x')

Gaze x coordinate in pixels

xy class-attribute instance-attribute

xy = fields[float64](['x', 'y'])

Gaze xy coordinates in pixels

y class-attribute instance-attribute

y = fields[float64]('y')

Gaze y coordinate in pixels

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

IMUStream

IMUStream(recording)

Bases: Stream[ImuArray, ImuRecord], ImuProps

Motion and orientation data

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

  • accel_xyz

    Acceleration data

  • data (Array) –

    Stream data as structured numpy array

  • gyro_xyz

    Gyroscope data

  • pd

    Stream data as a pandas DataFrame

  • quaternion_wxyz

    Orientation as a quaternion

  • ts (NDArray[int64]) –

    The moment these data were recorded

Source code in src/pupil_labs/neon_recording/stream/imu/imu_stream.py
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def __init__(self, recording):
    log.debug("NeonRecording: Loading IMU data")

    imu_file_pairs = find_sorted_multipart_files(recording._rec_dir, "imu")

    if len(imu_file_pairs) > 0:
        imu_data = Array(  # type: ignore
            [file for file, _ in imu_file_pairs],
            fallback_dtype=np.dtype(IMUStream.FALLBACK_DTYPE),
        )
        imu_data.dtype.names = [  # type: ignore
            TIMESTAMP_FIELD_NAME if name == "timestamp_ns" else name
            for name in imu_data.dtype.names  # type: ignore
        ]

    else:
        imu_file_pairs = find_sorted_multipart_files(recording._rec_dir, "extimu")
        time_data = Array([file for _, file in imu_file_pairs], TIMESTAMP_DTYPE)  # type: ignore

        records = []
        for imu_file, _ in imu_file_pairs:
            with imu_file.open("rb") as raw_file:
                raw_data = raw_file.read()
                imu_packets = parse_neon_imu_raw_packets(raw_data)

                records.extend([
                    (
                        packet.gyroData.x,
                        packet.gyroData.y,
                        packet.gyroData.z,
                        packet.accelData.x,
                        packet.accelData.y,
                        packet.accelData.z,
                        packet.rotVecData.w,
                        packet.rotVecData.x,
                        packet.rotVecData.y,
                        packet.rotVecData.z,
                    )
                    for packet in imu_packets
                ])

        imu_data = np.array(records, dtype=IMUStream.FALLBACK_DTYPE)  # type: ignore
        imu_data = join_struct_arrays([time_data, imu_data])

    super().__init__("imu", recording, imu_data.view(ImuArray))

accel_xyz class-attribute instance-attribute

accel_xyz = fields[float64](['accel_x', 'accel_y', 'accel_z'])

Acceleration data

data property

data: Array

Stream data as structured numpy array

gyro_xyz class-attribute instance-attribute

gyro_xyz = fields[float64](['gyro_x', 'gyro_y', 'gyro_z'])

Gyroscope data

pd property

pd

Stream data as a pandas DataFrame

quaternion_wxyz class-attribute instance-attribute

quaternion_wxyz = fields[float64](['quaternion_w', 'quaternion_x', 'quaternion_y', 'quaternion_z'])

Orientation as a quaternion

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

NeonRecording

NeonRecording(rec_dir_in: Union[Path, str])

Class to handle the Neon Recording data

Parameters:

  • rec_dir_in (Union[Path, str]) –

    Path to the recording directory.

Raises:

Attributes:

Source code in src/pupil_labs/neon_recording/neon_recording.py
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
def __init__(self, rec_dir_in: Union[pathlib.Path, str]):
    """Initialize the NeonRecording object

    Args:
        rec_dir_in: Path to the recording directory.

    Raises:
        FileNotFoundError: If the directory does not exist or is not valid.

    """
    self._rec_dir = pathlib.Path(rec_dir_in).resolve()
    if not self._rec_dir.exists() or not self._rec_dir.is_dir():
        raise FileNotFoundError(
            f"Directory not found or not valid: {self._rec_dir}"
        )

audio cached property

audio: AudioStream

Audio from the scene video

blinks: BlinkStream

Blink data

calibration cached property

calibration: Calibration | None

Device camera calibration data

device_serial property

device_serial: str | None

Device serial number

duration property

duration: int

Recording Duration (nanoseconds)

events cached property

events: EventStream

Event annotations

eye cached property

Frames of video from the eye cameras

eye_state cached property

eye_state: EyeStateStream

Eye state data

fixations cached property

fixations: FixationStream

Fixations data

gaze cached property

gaze: GazeStream

2D gaze data in scene-camera space

id property

id: str | None

UUID of the recording

imu cached property

imu: IMUStream

Motion and orientation data

info cached property

info: dict

Information loaded from info.json

scene cached property

scene: VideoStream

Frames of video from the scene camera

start_ts property

start_ts: int

Start timestamp (nanoseconds since 1970-01-01)

stop_ts property

stop_ts: int

Stop timestamp (nanoseconds since 1970-01-01)

wearer cached property

wearer: dict

Wearer information containing uuid and name

worn cached property

worn: WornStream

Worn (headset on/off) data

VideoStream

VideoStream(name: str, base_name: str, recording: NeonRecording)

Bases: BaseAVStream

Video frames from a camera

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

  • data (Array) –

    Stream data as structured numpy array

  • height (int | None) –

    Height of image in stream

  • pd

    Stream data as a pandas DataFrame

  • ts (NDArray[int64]) –

    The moment these data were recorded

  • width (int | None) –

    Width of image in stream

Source code in src/pupil_labs/neon_recording/stream/av_stream/base_av_stream.py
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
def __init__(
    self,
    name: str,
    base_name: str,
    recording: "NeonRecording",
):
    self.name = name
    self._base_name = base_name
    self.recording = recording

    log.debug(f"NeonRecording: Loading video: {self._base_name}.")

    self.video_parts: list[plv.Reader[plv.VideoFrame]] = []
    av_files = find_sorted_multipart_files(
        self.recording._rec_dir, self._base_name, ".mp4"
    )
    parts_ts = []
    video_readers = []
    for av_file, time_file in av_files:
        if self.kind == "video":
            part_ts = Array(time_file, dtype=TIMESTAMP_DTYPE)  # type: ignore
            container_timestamps = (part_ts["ts"] - recording.start_ts) / 1e9
            reader = plv.Reader(str(av_file), self.kind, container_timestamps)
            part_ts = part_ts[: len(reader)]
        elif self.kind == "audio":
            reader = plv.Reader(str(av_file), self.kind)  # type: ignore
            part_ts = (
                recording.start_ts + (reader.container_timestamps * 1e9)  # type: ignore
            ).astype(TIMESTAMP_DTYPE)
        else:
            raise RuntimeError(f"unknown av stream kind: {self.kind}")

        parts_ts.append(part_ts)
        video_readers.append(reader)

    parts_ts = np.concatenate(parts_ts)
    idxs = np.empty(len(parts_ts), dtype=AV_INDEX_DTYPE)
    idxs[AV_INDEX_FIELD_NAME] = np.arange(len(parts_ts))

    data = join_struct_arrays(
        [
            parts_ts,  # type: ignore
            idxs,
        ],
    )
    self.av_reader = plv.MultiReader(video_readers)

    BoundAVFrameClass = type(
        f"{self.name.capitalize()}Frame",
        (BaseAVStreamFrame, AVStreamProps),
        {"dtype": data.dtype, "multi_video_reader": self.av_reader},
    )
    BoundAVFramesClass = type(
        f"{self.name.capitalize()}Frames",
        (Array, AVStreamProps),
        {
            "record_class": BoundAVFrameClass,
            "dtype": data.dtype,
            "multi_video_reader": self.av_reader,
        },
    )

    super().__init__(name, recording, data.view(BoundAVFramesClass))

data property

data: Array

Stream data as structured numpy array

height property

height: int | None

Height of image in stream

pd property

pd

Stream data as a pandas DataFrame

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

width property

width: int | None

Width of image in stream

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

WornStream

WornStream(recording: NeonRecording)

Bases: Stream[WornArray, WornRecord], WornProps

Worn (headset on/off) data

Methods:

  • interpolate

    Interpolated stream data for sorted_ts

Attributes:

  • pd

    Stream data as a pandas DataFrame

  • ts (NDArray[int64]) –

    The moment these data were recorded

  • worn

    Worn

Source code in src/pupil_labs/neon_recording/stream/worn_stream.py
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
def __init__(self, recording: "NeonRecording"):
    log.debug("NeonRecording: Loading worn data")

    worn_200hz_file = recording._rec_dir / "worn_200hz.raw"
    time_200hz_file = recording._rec_dir / "gaze_200hz.time"

    file_pairs = []
    if worn_200hz_file.exists() and time_200hz_file.exists():
        log.debug("NeonRecording: Using 200Hz worn data")
        file_pairs.append((worn_200hz_file, time_200hz_file))
    else:
        log.debug("NeonRecording: Using realtime worn data")
        file_pairs = find_sorted_multipart_files(recording._rec_dir, "worn")

    data = load_multipart_data_time_pairs(file_pairs, np.dtype([("worn", "u1")]))
    super().__init__("worn", recording, data.view(WornArray))

pd property

pd

Stream data as a pandas DataFrame

ts class-attribute instance-attribute

ts: NDArray[int64] = fields[int64](TIMESTAMP_FIELD_NAME)

The moment these data were recorded

worn class-attribute instance-attribute

worn = fields[float64]('worn')

Worn

interpolate

interpolate(sorted_ts: NDArray[int64]) -> ArrayType

Interpolated stream data for sorted_ts

Source code in src/pupil_labs/neon_recording/stream/stream.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
def interpolate(self, sorted_ts: npt.NDArray[np.int64]) -> ArrayType:
    """Interpolated stream data for `sorted_ts`"""
    assert self.data.dtype is not None

    sorted_ts = np.array(sorted_ts)
    interpolated_dtype = np.dtype([
        (k, np.int64 if k == TIMESTAMP_FIELD_NAME else np.float64)
        for k in self.data.dtype.names or []
        if issubclass(self.data.dtype[k].type, (np.floating, np.integer))
    ])

    result = np.zeros(len(sorted_ts), interpolated_dtype)
    result[TIMESTAMP_FIELD_NAME] = sorted_ts
    for key in interpolated_dtype.names or []:
        if key == TIMESTAMP_FIELD_NAME:
            continue
        value = self.data[key].astype(np.float64)
        result[key] = np.interp(
            sorted_ts,
            self.ts,
            value,
            left=np.nan,
            right=np.nan,
        )
    return cast(ArrayType, result.view(self.data.__class__))

open

open(rec_dir_in: Union[Path, str]) -> NeonRecording

Load a NeonRecording from a path

Source code in src/pupil_labs/neon_recording/neon_recording.py
151
152
153
def open(rec_dir_in: Union[pathlib.Path, str]) -> NeonRecording:  # noqa: A001
    """Load a NeonRecording from a path"""
    return NeonRecording(rec_dir_in)