diff --git a/examples/async/stream_eye_events.py b/examples/async/stream_eye_events.py index 6576493..7b839fc 100644 --- a/examples/async/stream_eye_events.py +++ b/examples/async/stream_eye_events.py @@ -1,7 +1,12 @@ import asyncio import contextlib +from datetime import datetime, timezone from pupil_labs.realtime_api import Device, Network, receive_eye_events_data +from pupil_labs.realtime_api.streaming.eye_events import ( + BlinkEventData, + FixationEventData, +) async def main(): @@ -22,7 +27,20 @@ async def main(): async for eye_event in receive_eye_events_data( sensor_eye_events.url, run_loop=restart_on_disconnect ): - print(eye_event) + if isinstance(eye_event, BlinkEventData): + time_sec = eye_event.start_time_ns // 1e9 + blink_time = datetime.fromtimestamp(time_sec, timezone.utc) + print(f"[BLINK] blinked at {blink_time.strftime('%H:%M:%S')} UTC") + + elif isinstance(eye_event, FixationEventData) and eye_event.event_type == 0: + angle = eye_event.amplitude_angle_deg + print(f"[SACCADE] event with {angle:.0f}° amplitude.") + + elif isinstance(eye_event, FixationEventData) and eye_event.event_type == 1: + duration = (eye_event.end_time_ns - eye_event.start_time_ns) / 1e9 + print(f"[FIXATION] event with duration of {duration:.2f} seconds.") + + # print(eye_event) # This will print all the fields of the eye event if __name__ == "__main__": diff --git a/examples/simple/stream_eye_events.py b/examples/simple/stream_eye_events.py new file mode 100644 index 0000000..86a35fc --- /dev/null +++ b/examples/simple/stream_eye_events.py @@ -0,0 +1,42 @@ +from datetime import datetime, timezone + +from pupil_labs.realtime_api.simple import discover_one_device +from pupil_labs.realtime_api.streaming.eye_events import ( + BlinkEventData, + FixationEventData, +) + +# Look for devices. Returns as soon as it has found the first device. +print("Looking for the next best device...") +device = discover_one_device(max_search_duration_seconds=10) +if device is None: + print("No device found.") + raise SystemExit(-1) + +# device.streaming_start() # optional, if not called, stream is started on-demand + +try: + while True: + eye_event = device.receive_eye_events() + if isinstance(eye_event, BlinkEventData): + time_sec = eye_event.start_time_ns // 1e9 + blink_time = datetime.fromtimestamp(time_sec, timezone.utc) + print(f"[BLINK] blinked at {blink_time.strftime('%H:%M:%S')} UTC") + + elif isinstance(eye_event, FixationEventData) and eye_event.event_type == 0: + angle = eye_event.amplitude_angle_deg + print(f"[SACCADE] event with {angle:.0f}° amplitude.") + + elif isinstance(eye_event, FixationEventData) and eye_event.event_type == 1: + duration = (eye_event.end_time_ns - eye_event.start_time_ns) / 1e9 + print(f"[FIXATION] event with duration of {duration:.2f} seconds.") + + # print(eye_event) # This will print all the fields of the eye event + +except KeyboardInterrupt: + pass + +finally: + print("Stopping...") + # device.streaming_stop() # optional, if not called, stream is stopped on close + device.close() # explicitly stop auto-update diff --git a/setup.cfg b/setup.cfg index 0c76b28..a2156b7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,11 +28,12 @@ install_requires = av beaupy numpy>=1.20 - pl-neon-recording>=0.1.4 pydantic>=2 websockets zeroconf importlib-metadata;python_version<"3.8" + pl-neon-recording==0.1.12;python_version=="3.9" + pupil-labs-neon-recording>=1.0.0;python_version>="3.10" typing-extensions;python_version<"3.8" python_requires = >=3.9 include_package_data = true diff --git a/src/pupil_labs/realtime_api/models.py b/src/pupil_labs/realtime_api/models.py index fe297ca..4e317f4 100644 --- a/src/pupil_labs/realtime_api/models.py +++ b/src/pupil_labs/realtime_api/models.py @@ -8,6 +8,7 @@ from datetime import datetime from functools import partial from textwrap import indent +from typing import Annotated from uuid import UUID from pydantic import ( @@ -22,7 +23,7 @@ create_model, ) from pydantic.dataclasses import dataclass as dataclass_pydantic -from typing_extensions import Annotated, Literal +from typing_extensions import Literal logger = logging.getLogger(__name__) @@ -179,7 +180,7 @@ def rec_duration_seconds(self) -> float: def _init_cls_with_annotated_fields_only(cls, d: T.Dict[str, T.Any]): - return cls(**{attr: d.get(attr, None) for attr in cls.__annotations__}) + return cls(**{attr: d.get(attr) for attr in cls.__annotations__}) class UnknownComponentError(ValueError): @@ -305,7 +306,8 @@ def direct_eye_events_sensor(self) -> T.Optional[Sensor]: return next( self.matching_sensors(Sensor.Name.EYE_EVENTS, Sensor.Connection.DIRECT), Sensor( - sensor=Sensor.Name.EYES.value, conn_type=Sensor.Connection.DIRECT.value + sensor=Sensor.Name.EYE_EVENTS.value, + conn_type=Sensor.Connection.DIRECT.value, ), ) @@ -390,7 +392,7 @@ def _simple_model_validator(self): answer_input_type = conlist( answer_input_type, min_length=1 if self.required else 0, - max_length=None if self.widget_type in {"CHECKBOX_LIST"} else 1, + max_length=None if self.widget_type == "CHECKBOX_LIST" else 1, ) else: if self.required: @@ -431,7 +433,7 @@ def _api_model_validator(self): answer_input_type = conlist( answer_input_entry_type, min_length=1 if self.required else 0, - max_length=None if self.widget_type in {"CHECKBOX_LIST"} else 1, + max_length=None if self.widget_type == "CHECKBOX_LIST" else 1, ) return (answer_input_type, field) @@ -462,7 +464,7 @@ def convert_from_simple_to_api_format(self, data: T.Dict[str, T.Any]): api_format[question_id] = value return api_format - def convert_from_api_to_simple_format(self, data: T.Dict[str, T.List[str]]): + def convert_from_api_to_simple_format(self, data: T.Dict[str, list[str]]): simple_format = {} for question_id, value in data.items(): question = self.get_question_by_id(question_id) @@ -598,9 +600,9 @@ def __str__(self): error_lines = [] for error in self.errors: error_msg = "" - error_msg += f'location: {error["loc"]}\n' - error_msg += f' input: {error["input"]}\n' - error_msg += f' message: {error["msg"]}\n' + error_msg += f"location: {error['loc']}\n" + error_msg += f" input: {error['input']}\n" + error_msg += f" message: {error['msg']}\n" question = error.get("question") if question: error_msg += ( @@ -618,6 +620,6 @@ def __str__(self): name = self.template.name elif isinstance(self.template, TemplateItem): name = self.template.title - return f"{name} ({self.template.id}) validation errors:\n" f"{error_lines}" + return f"{name} ({self.template.id}) validation errors:\n{error_lines}" except Exception as e: return f"InvalidTemplateAnswersError.__str__ error: {e}" diff --git a/src/pupil_labs/realtime_api/simple/_utils.py b/src/pupil_labs/realtime_api/simple/_utils.py index b39bb99..8a5efb5 100644 --- a/src/pupil_labs/realtime_api/simple/_utils.py +++ b/src/pupil_labs/realtime_api/simple/_utils.py @@ -111,8 +111,7 @@ def _start_streaming_task_if_intended(self, sensor): def _stop_streaming_task_if_running(self): if self._streaming_task is not None: logger_receive_data.info( - f"Cancelling prior streaming connection to " - f"{self._recent_sensor.sensor}" + f"Cancelling prior streaming connection to {self._recent_sensor.sensor}" ) self._streaming_task.cancel() self._streaming_task = None @@ -138,7 +137,10 @@ async def append_data_from_sensor_to_queue(self, sensor: Sensor): device._most_recent_item[name].append(item) if name == Sensor.Name.GAZE.value: device._cached_gaze_for_matching.append( - (item.timestamp_unix_seconds, item) + ( + item.timestamp_unix_seconds, + item, + ) ) elif name == Sensor.Name.WORLD.value: # Matching priority @@ -211,9 +213,15 @@ async def append_data_from_sensor_to_queue(self, sensor: Sensor): ) elif name == Sensor.Name.EYES.value: device._cached_eyes_for_matching.append( - (item.timestamp_unix_seconds, item) + ( + item.timestamp_unix_seconds, + item, + ) ) - elif name == Sensor.Name.IMU.value: + elif ( + name == Sensor.Name.IMU.value + or name == Sensor.Name.EYE_EVENTS.value + ): pass else: logger.error(f"Unhandled {item} for sensor {name}") diff --git a/src/pupil_labs/realtime_api/simple/device.py b/src/pupil_labs/realtime_api/simple/device.py index 6f1e3e5..500f0f5 100644 --- a/src/pupil_labs/realtime_api/simple/device.py +++ b/src/pupil_labs/realtime_api/simple/device.py @@ -20,7 +20,11 @@ TemplateDataFormat, ) from ..streaming import ( + BlinkEventData, + FixationEventData, + FixationOnsetEventData, ImuPacket, + RTSPEyeEventStreamer, RTSPGazeStreamer, RTSPImuStreamer, RTSPVideoFrameStreamer, @@ -39,11 +43,10 @@ class Device(DeviceBase): - """ - .. hint:: - Use :py:func:`pupil_labs.realtime_api.simple.discover_devices` instead of - initializing the class manually. See the :ref:`simple_discovery_example` - example. + """.. hint:: + Use :py:func:`pupil_labs.realtime_api.simple.discover_devices` instead of + initializing the class manually. See the :ref:`simple_discovery_example` + example. """ def __init__( @@ -67,6 +70,14 @@ def __init__( self._errors: T.List[str] = [] + self.stream_name_event_map = { + Sensor.Name.GAZE.value: self._EVENT.SHOULD_START_GAZE, + Sensor.Name.WORLD.value: self._EVENT.SHOULD_START_WORLD, + Sensor.Name.EYES.value: self._EVENT.SHOULD_START_EYES, + Sensor.Name.IMU.value: self._EVENT.SHOULD_START_IMU, + Sensor.Name.EYE_EVENTS.value: self._EVENT.SHOULD_START_EYE_EVENTS, + } + @property def phone_name(self) -> str: return self._status.phone.device_name @@ -100,12 +111,12 @@ def version_glasses(self) -> str: return self._status.hardware.version @property - def module_serial(self) -> T.Union[str, None, Literal["default"]]: + def module_serial(self) -> T.Union[str, Literal["default"], None]: """Returns ``None`` or ``"default"`` if no glasses are connected""" return self._status.hardware.module_serial @property - def serial_number_glasses(self) -> T.Union[str, None, Literal["default"]]: + def serial_number_glasses(self) -> T.Union[str, Literal["default"], None]: """Returns ``None`` or ``"default"`` if no glasses are connected""" return self._status.hardware.glasses_serial @@ -126,6 +137,9 @@ def world_sensor(self) -> T.Optional[Sensor]: def gaze_sensor(self) -> T.Optional[Sensor]: return self._status.direct_gaze_sensor() + def eye_events_sensor(self) -> T.Optional[Sensor]: + return self._status.direct_eye_events_sensor() + def get_calibration(self): async def _get_calibration(): async with _DeviceAsync.convert_from(self) as control: @@ -188,8 +202,10 @@ async def _cancel_recording(): def send_event( self, event_name: str, event_timestamp_unix_ns: T.Optional[int] = None ) -> Event: - """ - :raises pupil_labs.realtime_api.device.DeviceError: if sending the event fails + """Wraps :py:meth:`pupil_labs.realtime_api.device.Device.send_event` + + :raises pupil_labs.realtime_api.device.DeviceError: + if sending the event fails """ async def _send_event(): @@ -199,8 +215,7 @@ async def _send_event(): return asyncio.run(_send_event()) def get_template(self) -> Template: - """ - Wraps :py:meth:`pupil_labs.realtime_api.device.Device.get_template` + """Wraps :py:meth:`pupil_labs.realtime_api.device.Device.get_template` Gets the template currently selected on device @@ -215,8 +230,7 @@ async def _get_template(): return asyncio.run(_get_template()) def get_template_data(self, format: TemplateDataFormat = "simple"): - """ - Wraps :py:meth:`pupil_labs.realtime_api.device.Device.get_template_data` + """Wraps :py:meth:`pupil_labs.realtime_api.device.Device.get_template_data` Gets the template data entered on device @@ -235,8 +249,7 @@ async def _get_template_data(): return asyncio.run(_get_template_data()) def post_template_data(self, template_data, format: TemplateDataFormat = "simple"): - """ - Wraps :py:meth:`pupil_labs.realtime_api.device.Device.post_template_data` + """Wraps :py:meth:`pupil_labs.realtime_api.device.Device.post_template_data` Sets the data for the currently selected template @@ -275,6 +288,11 @@ def receive_imu_datum( ) -> T.Optional[ImuPacket]: return self._receive_item(Sensor.Name.IMU.value, timeout_seconds) + def receive_eye_events( + self, timeout_seconds: T.Optional[float] = None + ) -> T.Optional[T.Union[FixationEventData, BlinkEventData, FixationOnsetEventData]]: + return self._receive_item(Sensor.Name.EYE_EVENTS.value, timeout_seconds) + def receive_matched_scene_video_frame_and_gaze( self, timeout_seconds: T.Optional[float] = None ) -> T.Optional[MatchedItem]: @@ -291,10 +309,12 @@ def _receive_item( if sensor == MATCHED_ITEM_LABEL: self.start_stream_if_needed(Sensor.Name.GAZE.value) self.start_stream_if_needed(Sensor.Name.WORLD.value) + elif sensor == MATCHED_GAZE_EYES_LABEL: self.start_stream_if_needed(Sensor.Name.GAZE.value) self.start_stream_if_needed(Sensor.Name.EYES.value) self.start_stream_if_needed(Sensor.Name.WORLD.value) + else: self.start_stream_if_needed(sensor) @@ -324,16 +344,7 @@ def streaming_start(self, stream_name: str): self._streaming_trigger_action(event) return - event = None - if stream_name == Sensor.Name.GAZE.value: - event = self._EVENT.SHOULD_START_GAZE - elif stream_name == Sensor.Name.WORLD.value: - event = self._EVENT.SHOULD_START_WORLD - elif stream_name == Sensor.Name.EYES.value: - event = self._EVENT.SHOULD_START_EYES - elif stream_name == Sensor.Name.IMU.value: - event = self._EVENT.SHOULD_START_IMU - + event = self.stream_name_event_map[stream_name] self._streaming_trigger_action(event) def streaming_stop(self, stream_name: str = None): @@ -410,10 +421,12 @@ class _EVENT(enum.Enum): SHOULD_START_WORLD = "should start world" SHOULD_START_EYES = "should start eyes" SHOULD_START_IMU = "should start imu" + SHOULD_START_EYE_EVENTS = "should start eye events" SHOULD_STOP_GAZE = "should stop gaze" SHOULD_STOP_WORLD = "should stop world" SHOULD_STOP_EYES = "should stop eyes" SHOULD_STOP_IMU = "should stop imu" + SHOULD_STOP_EYE_EVENTS = "should stop eye events" def _start_background_worker(self, start_streaming_by_default): self._event_manager = None @@ -425,6 +438,7 @@ def _start_background_worker(self, start_streaming_by_default): Sensor.Name.WORLD.value, Sensor.Name.EYES.value, Sensor.Name.IMU.value, + Sensor.Name.EYE_EVENTS.value, MATCHED_ITEM_LABEL, MATCHED_GAZE_EYES_LABEL, ] @@ -444,6 +458,7 @@ def _start_background_worker(self, start_streaming_by_default): Sensor.Name.WORLD.value: threading.Event(), Sensor.Name.EYES.value: threading.Event(), Sensor.Name.IMU.value: threading.Event(), + Sensor.Name.EYE_EVENTS.value: threading.Event(), } self._auto_update_thread = threading.Thread( target=self._auto_update, @@ -500,6 +515,11 @@ def _auto_update( RTSPImuStreamer, should_be_streaming_by_default=start_streaming_by_default, ), + Sensor.Name.EYE_EVENTS.value: _StreamManager( + device_weakref, + RTSPEyeEventStreamer, + should_be_streaming_by_default=start_streaming_by_default, + ), } async def _process_status_changes(changed: Component): @@ -541,6 +561,7 @@ async def _auto_update_until_closed(): start_stream(Sensor.Name.WORLD.value) start_stream(Sensor.Name.EYES.value) start_stream(Sensor.Name.IMU.value) + start_stream(Sensor.Name.EYE_EVENTS) while True: logger.debug("Background worker waiting for event...") @@ -575,10 +596,12 @@ def stop_stream(stream_name): Device._EVENT.SHOULD_START_WORLD: start_stream, Device._EVENT.SHOULD_START_EYES: start_stream, Device._EVENT.SHOULD_START_IMU: start_stream, + Device._EVENT.SHOULD_START_EYE_EVENTS: start_stream, Device._EVENT.SHOULD_STOP_GAZE: stop_stream, Device._EVENT.SHOULD_STOP_WORLD: stop_stream, Device._EVENT.SHOULD_STOP_EYES: stop_stream, Device._EVENT.SHOULD_STOP_IMU: stop_stream, + Device._EVENT.SHOULD_STOP_EYE_EVENTS: stop_stream, } event_stream_map = { @@ -586,10 +609,12 @@ def stop_stream(stream_name): Device._EVENT.SHOULD_START_WORLD: Sensor.Name.WORLD, Device._EVENT.SHOULD_START_EYES: Sensor.Name.EYES, Device._EVENT.SHOULD_START_IMU: Sensor.Name.IMU, + Device._EVENT.SHOULD_START_EYE_EVENTS: Sensor.Name.EYE_EVENTS, Device._EVENT.SHOULD_STOP_GAZE: Sensor.Name.GAZE, Device._EVENT.SHOULD_STOP_WORLD: Sensor.Name.WORLD, Device._EVENT.SHOULD_STOP_EYES: Sensor.Name.EYES, Device._EVENT.SHOULD_STOP_IMU: Sensor.Name.IMU, + Device._EVENT.SHOULD_STOP_EYE_EVENTS: Sensor.Name.EYE_EVENTS, } return asyncio.run(_auto_update_until_closed()) diff --git a/src/pupil_labs/realtime_api/simple/models.py b/src/pupil_labs/realtime_api/simple/models.py index 0217392..fe241ba 100644 --- a/src/pupil_labs/realtime_api/simple/models.py +++ b/src/pupil_labs/realtime_api/simple/models.py @@ -1,10 +1,17 @@ import datetime import typing as T -from ..streaming.gaze import DualMonocularGazeData, EyestateGazeData, GazeData +from ..streaming.gaze import ( + DualMonocularGazeData, + EyestateEyelidGazeData, + EyestateGazeData, + GazeData, +) from ..streaming.video import BGRBuffer, VideoFrame -GazeDataType = T.Union[GazeData, DualMonocularGazeData, EyestateGazeData] +GazeDataType = T.Union[ + GazeData, DualMonocularGazeData, EyestateGazeData, EyestateEyelidGazeData +] class SimpleVideoFrame(T.NamedTuple):