Define a context class

The ArContext class defines a generic base class interface to handle incoming eye tracker data before to pass them to a processing pipeline according to Python context manager feature.

The ArContext class interface provides control features to stop or pause working threads, performance assessment features to measure how many times processings are called and the time spent by the process.

Besides, there is also a DataCaptureContext class that inherits from ArContext and that defines an abstract calibrate method to write specific device calibration process.

In the same way, there is a DataPlaybackContext class that inherits from ArContext and that defines duration and progression properties to get information about a record length and playback advancement.

Finally, a specific eye tracking context can be defined into a Python file by writing a class that inherits either from ArContext, DataCaptureContext or DataPlaybackContext class.

Write data capture context

Here is a data cpature context example that processes gaze positions and camera images in two separated threads:

from argaze import ArFeatures, DataFeatures

class DataCaptureExample(ArFeatures.DataCaptureContext):

    @DataFeatures.PipelineStepInit
    def __init__(self, **kwargs):

        # Init DataCaptureContext class
        super().__init__()

        # Init private attribute
        self.__parameter = ...

    @property
    def parameter(self):
        """Any context specific parameter."""
        return self.__parameter

    @parameter.setter
    def parameter(self, parameter):
        self.__parameter = parameter

    @DataFeatures.PipelineStepEnter
    def __enter__(self):
        """Start context."""

        # Start context according any specific parameter
        ... self.parameter

        # Start a gaze position capture thread
        self.__gaze_thread = threading.Thread(target = self.__gaze_position_capture)
        self.__gaze_thread.start()

        # Start a camera image capture thread if applicable
        self.__camera_thread = threading.Thread(target = self.__camera_image_capture)
        self.__camera_thread.start()

        return self

    def __gaze_position_capture(self):
        """Capture gaze position."""

        # Capture loop
        while self.is_running():

            # Pause capture
            if not self.is_paused():

                # Assuming that timestamp, x and y values are available
                ...

                # Process timestamped gaze position
                self._process_gaze_position(timestamp = timestamp, x = x, y = y)

            # Wait some time eventually
            ...

    def __camera_image_capture(self):
        """Capture camera image if applicable."""

        # Capture loop
        while self.is_running():

            # Pause capture
            if not self.is_paused():

                # Assuming that timestamp, camera_image are available
                ...

                # Process timestamped camera image
                self._process_camera_image(timestamp = timestamp, image = camera_image)

            # Wait some time eventually
            ...

    @DataFeatures.PipelineStepExit
    def __exit__(self, exception_type, exception_value, exception_traceback):
        """End context."""

        # Stop capture loops
        self.stop()

        # Stop capture threads
        threading.Thread.join(self.__gaze_thread)
        threading.Thread.join(self.__camera_thread)

    def calibrate(self):
        """Handle device calibration process."""

        ...

Write data playback context

Here is a data playback context example that reads gaze positions and camera images in a same thread:

from argaze import ArFeatures, DataFeatures

class DataPlaybackExample(ArFeatures.DataPlaybackContext):

    @DataFeatures.PipelineStepInit
    def __init__(self, **kwargs):

        # Init DataCaptureContext class
        super().__init__()

        # Init private attribute
        self.__parameter = ...

    @property
    def parameter(self):
        """Any context specific parameter."""
        return self.__parameter

    @parameter.setter
    def parameter(self, parameter):
        self.__parameter = parameter

    @DataFeatures.PipelineStepEnter
    def __enter__(self):
        """Start context."""

        # Start context according any specific parameter
        ... self.parameter

        # Start a data playback thread
        self.__data_thread = threading.Thread(target = self.__data_playback)
        self.__data_thread.start()

        return self

    def __data_playback(self):
        """Playback gaze position and camera image if applicable."""

        # Playback loop
        while self.is_running():

            # Pause playback
            if not self.is_paused():

                # Assuming that timestamp, camera_image are available
                ...

                    # Process timestamped camera image
                    self._process_camera_image(timestamp = timestamp, image = camera_image)

                    # Assuming that timestamp, x and y values are available
                    ...

                        # Process timestamped gaze position
                        self._process_gaze_position(timestamp = timestamp, x = x, y = y)

            # Wait some time eventually
            ...

    @DataFeatures.PipelineStepExit
    def __exit__(self, exception_type, exception_value, exception_traceback):
        """End context."""

        # Stop playback loop
        self.stop()

        # Stop playback threads
        threading.Thread.join(self.__data_thread)

    @property
    def duration(self) -> int|float:
        """Get data duration."""
        ...

    @property
    def progression(self) -> float:
        """Get data playback progression between 0 and 1."""
        ...