From fad804bc7ef0cd5d0e66325c932e5c298619e8f4 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 16 Jun 2024 13:18:36 +1000 Subject: [PATCH] Initial support for VR headsets (#241) * fixed requirements.txt so 'pip install -e .' would work on windows. * added support for n170 to run on rift * altered logic to add back in jitter * simplified logic * made fullscreen again * further simplified logic * decreased font size of instructions for vr * fixed instructions display, further simplified logic, removed logs * made code easier to read * reverted board back to muse * Enable headlocking to fix jittering - cancel out any rotation and translation coming from the headset. * fixed use_vr parameter to be set to False by default for existing experiments. * fix macos build * reverted unnecessary changes and made p300 experiment display correctly * added vr support for p300 and ssvep * fix psychxr version * adding vr doc * updated vr doco --------- Co-authored-by: John Griffiths --- doc/getting_started/running_experiments.md | 49 ++++++++ eegnb/experiments/Experiment.py | 116 +++++++++++------- eegnb/experiments/visual_n170/n170.py | 9 +- eegnb/experiments/visual_p300/p300.py | 4 +- eegnb/experiments/visual_ssvep/ssvep.py | 41 ++++--- .../visual_n170/00x__n170_run_experiment.py | 2 +- requirements.txt | 9 +- 7 files changed, 164 insertions(+), 66 deletions(-) diff --git a/doc/getting_started/running_experiments.md b/doc/getting_started/running_experiments.md index 810f4127..951e6506 100644 --- a/doc/getting_started/running_experiments.md +++ b/doc/getting_started/running_experiments.md @@ -139,3 +139,52 @@ eeg_device = EEG(device=board_name) # Run stimulus presentation n170.present(duration=record_duration, eeg=eeg_device, save_fn=save_fn) ``` + + +## Using virtual reality + +### Heads up display + +A heads-up display can be used for presenting experiments in a similar way to a monitor, without much modification. + +#### Features to be added in future releases: + +* Controller input +* Controller haptic feedback + +### Prerequisites: +* Oculus Rift compatible VR headset, e.g. Oculus Rift or Meta Quest series. +* Native Windows installation with meta link compatible video card. +* EEG device, e.g. OpenBCI Cyton or Muse + +If an experiment has the use_vr argument in its present method, it can have its stimulus presented to a subject's VR headset. +The N170 experiment for example, can have its stimulus displayed on the VR headset with a simple modification to the 'use_vr' argument, when presenting an experiment: + +```python +# Run stimulus presentation with VR enabled. +n170.present(duration=record_duration, eeg=eeg_device, save_fn=save_fn, use_vr=True) +``` + +### + +### Steps for running experiment in VR + +1. Launch the Oculus app on the Windows computer and the IDE or CLI to be used for running the experiment. +2. Turn on the VR headset and put it on your head to make sure it is on and active, then take it off. +3. Go to the 'Devices' view in the Oculus app, it will show the headset as connected and active, along with any inactive or connected controllers. +4. Go to the 'Settings' view, under the 'Beta' title, enable 'Pass through over Oculus Link', double tapping the headset later with a fingertip will activate passthrough. +5. Put the VR headset onto the head, activate passthrough to help with wearing the eeg device. +6. Place the EEG device on top of the head. +7. Ensure the electrodes are touching the scalp ok and not being blocked by the headset strap. +8. From inside the VR headset's 'quick settings' dashboard, select 'Quest Link' and connect to the Oculus server running on windows, via air link or link cable. +9. Once the Oculus menu has finished loading on the VR headset, open the built-in Oculus desktop app by using the touch controllers or gamepad. +10. Try opening an eeg device raw data viwer and verify that the electrodes are receiving a good signal without too much noise, eg 'OpenBCI GUI'. +11. Run the EEG-ExPy experiment from the command line or IDE, it should load and take control from the Oculus desktop app. +12. Follow the experiment instructions, and press a key if necessary to begin the experiment and collect valid data. + +### Other experiments can have VR added too. + +1. Load/prepare stimulus in the same function as previously (def load_stimulus(self)) +2. Present stimulus in the same function as previously(def present_stimulus(self, current_trial: int)) +3. VR can be enabled for the experiment as part of the initializer to the base Experiment class, by default it is not enabled(use_vr=False) and will function the same as previously before VR functionality was added. + diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index 525236da..963a9aa3 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -8,30 +8,36 @@ obj.run() """ -from abc import ABC, abstractmethod +from abc import abstractmethod +from typing import Callable from psychopy import prefs #change the pref libraty to PTB and set the latency mode to high precision prefs.hardware['audioLib'] = 'PTB' prefs.hardware['audioLatencyMode'] = 3 -import os from time import time -from glob import glob -from random import choice -from optparse import OptionParser import random import numpy as np from pandas import DataFrame -from psychopy import visual, core, event +from psychopy import visual, event from eegnb import generate_save_fn -from eegnb.devices.eeg import EEG + class BaseExperiment: - def __init__(self, exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter): - """ Initializer for the Base Experiment Class """ + def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, soa: float, jitter: float, + use_vr=False): + """ Initializer for the Base Experiment Class + + Args: + n_trials (int): Number of trials/stimulus + iti (float): Inter-trial interval + soa (float): Stimulus on arrival + jitter (float): Random delay between stimulus + use_vr (bool): Use VR for displaying stimulus + """ self.exp_name = exp_name self.instruction_text = """\nWelcome to the {} experiment!\nStay still, focus on the centre of the screen, and try not to blink. \nThis block will run for %s seconds.\n @@ -43,7 +49,8 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter) self.iti = iti self.soa = soa self.jitter = jitter - + self.use_vr = use_vr + @abstractmethod def load_stimulus(self): """ @@ -76,7 +83,9 @@ def setup(self, instructions=True): self.trials = DataFrame(dict(parameter=self.parameter, timestamp=np.zeros(self.n_trials))) # Setting up Graphics - self.window = visual.Window([1600, 900], monitor="testMonitor", units="deg", fullscr=True) + self.window = ( + visual.Rift(monoscopic=True, headLocked=True) if self.use_vr + else visual.Window([1600, 900], monitor="testMonitor", units="deg", fullscr=True)) # Loading the stimulus from the specific experiment, throws an error if not overwritten in the specific experiment self.stim = self.load_stimulus() @@ -112,20 +121,36 @@ def show_instructions(self): # Disabling the cursor during display of instructions self.window.mouseVisible = False - # Displaying the instructions on the screen - text = visual.TextStim(win=self.window, text=self.instruction_text, color=[-1, -1, -1]) + # Waiting for the user to press the spacebar to start the experiment + while len(event.getKeys(keyList="space")) == 0: + # Displaying the instructions on the screen + text = visual.TextStim(win=self.window, text=self.instruction_text, color=[-1, -1, -1]) + self.__draw(lambda: self.__draw_instructions(text)) + + # Enabling the cursor again + self.window.mouseVisible = True + + def __draw_instructions(self, text): text.draw() self.window.flip() - - # Waiting for the user to press the spacebar to start the experiment - event.waitKeys(keyList="space") - # Enabling the cursor again - self.window.mouseVisible = True - + def __draw(self, present_stimulus: Callable): + """ + Set the current eye position and projection for all given stimulus, + then draw all stimulus and flip the window/buffer + """ + if self.use_vr: + tracking_state = self.window.getTrackingState() + self.window.calcEyePoses(tracking_state.headPose.thePose) + self.window.setDefaultView() + present_stimulus() + def run(self, instructions=True): """ Do the present operation for a bunch of experiments """ + def iti_with_jitter(): + return self.iti + np.random.rand() * self.jitter + # Setup the experiment, alternatively could get rid of this line, something to think about self.setup(instructions) @@ -137,29 +162,38 @@ def run(self, instructions=True): print("EEG Stream started") + # Run trial until a key is pressed or experiment duration has expired. start = time() - - # Iterate through the events - for ii, trial in self.trials.iterrows(): - - # Intertrial interval - core.wait(self.iti + np.random.rand() * self.jitter) - - # Stimulus presentation overwritten by specific experiment - self.present_stimulus(ii, trial) - - # Offset - core.wait(self.soa) - self.window.flip() - - # Exiting the loop condition, looks ugly and needs to be fixed - if len(event.getKeys()) > 0 or (time() - start) > self.record_duration: - break - - # Clearing the screen for the next trial - event.clearEvents() - - # Closing the EEG stream + current_trial = current_trial_end = -1 + current_trial_begin = None + + # Current trial being rendered + rendering_trial = -1 + while len(event.getKeys()) == 0 and (time() - start) < self.record_duration: + + current_experiment_seconds = time() - start + # Do not present stimulus until current trial begins(Adhere to inter-trial interval). + if current_trial_end < current_experiment_seconds: + current_trial += 1 + current_trial_begin = current_experiment_seconds + iti_with_jitter() + current_trial_end = current_trial_begin + self.soa + + # Do not present stimulus after trial has ended(stimulus on arrival interval). + elif current_trial_begin < current_experiment_seconds: + + # if current trial number changed get new choice of image. + if rendering_trial < current_trial: + # Some form of presenting the stimulus - sometimes order changed in lower files like ssvep + # Stimulus presentation overwritten by specific experiment + self.__draw(lambda: self.present_stimulus(current_trial, current_trial)) + rendering_trial = current_trial + else: + self.__draw(lambda: self.window.flip()) + + # Clearing the screen for the next trial + event.clearEvents() + + # Closing the EEG stream if self.eeg: self.eeg.stop() diff --git a/eegnb/experiments/visual_n170/n170.py b/eegnb/experiments/visual_n170/n170.py index 7fcd05a9..4cf42aad 100644 --- a/eegnb/experiments/visual_n170/n170.py +++ b/eegnb/experiments/visual_n170/n170.py @@ -9,11 +9,6 @@ from time import time from glob import glob from random import choice -from optparse import OptionParser -import random - -import numpy as np -from pandas import DataFrame from psychopy import visual, core, event from eegnb.devices.eeg import EEG @@ -27,12 +22,12 @@ class VisualN170(Experiment.BaseExperiment): def __init__(self, duration=120, eeg: Optional[EEG]=None, save_fn=None, - n_trials = 2010, iti = 0.4, soa = 0.3, jitter = 0.2): + n_trials = 2010, iti = 0.4, soa = 0.3, jitter = 0.2, use_vr = False): # Set experiment name exp_name = "Visual N170" # Calling the super class constructor to initialize the experiment variables - super(VisualN170, self).__init__(exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter) + super(VisualN170, self).__init__(exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter, use_vr) def load_stimulus(self): diff --git a/eegnb/experiments/visual_p300/p300.py b/eegnb/experiments/visual_p300/p300.py index 6125519c..08560d1a 100644 --- a/eegnb/experiments/visual_p300/p300.py +++ b/eegnb/experiments/visual_p300/p300.py @@ -21,10 +21,10 @@ class VisualP300(Experiment.BaseExperiment): def __init__(self, duration=120, eeg: Optional[EEG]=None, save_fn=None, - n_trials = 2010, iti = 0.4, soa = 0.3, jitter = 0.2): + n_trials = 2010, iti = 0.4, soa = 0.3, jitter = 0.2, use_vr = False): exp_name = "Visual P300" - super().__init__(exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter) + super().__init__(exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter, use_vr) def load_stimulus(self): diff --git a/eegnb/experiments/visual_ssvep/ssvep.py b/eegnb/experiments/visual_ssvep/ssvep.py index 5c77d2e4..46119f55 100644 --- a/eegnb/experiments/visual_ssvep/ssvep.py +++ b/eegnb/experiments/visual_ssvep/ssvep.py @@ -17,18 +17,20 @@ class VisualSSVEP(Experiment.BaseExperiment): - def __init__(self, duration=120, eeg: Optional[EEG]=None, save_fn=None, n_trials = 2010, iti = 0.5, soa = 3.0, jitter = 0.2): + def __init__(self, duration=120, eeg: Optional[EEG]=None, save_fn=None, n_trials = 2010, iti = 0.5, soa = 3.0, jitter = 0.2, use_vr=False): + self.use_vr = use_vr exp_name = "Visual SSVEP" - super().__init__(exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter) + super().__init__(exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter, use_vr) def load_stimulus(self): - self.grating = visual.GratingStim(win=self.window, mask="circle", size=80, sf=0.2) + grating_sf = 400 if self.use_vr else 0.2 + self.grating = visual.GratingStim(win=self.window, mask="circle", size=80, sf=grating_sf) + self.grating_neg = visual.GratingStim(win=self.window, mask="circle", size=80, sf=grating_sf, phase=0.5) - self.grating_neg = visual.GratingStim(win=self.window, mask="circle", size=80, sf=0.2, phase=0.5) - - fixation = visual.GratingStim(win=self.window, size=0.2, pos=[0, 0], sf=0.2, color=[1, 0, 0], autoDraw=True) + self.fixation = visual.GratingStim(win=self.window, pos=[0, 0], sf=grating_sf, color=[1, 0, 0]) + self.fixation.size = 0.02 if self.use_vr else 0.2 # Generate the possible ssvep frequencies based on monitor refresh rate def get_possible_ssvep_freqs(frame_rate, stim_type="single"): @@ -65,7 +67,10 @@ def init_flicker_stim(frame_rate, cycle, soa): return {"cycle": cycle, "freq": stim_freq, "n_cycles": n_cycles} # Set up stimuli - frame_rate = np.round(self.window.getActualFrameRate()) # Frame rate, in Hz + + # Frame rate, in Hz + # GetActualFrameRate() crashes in psychxr due to 'EndFrame called before BeginFrame' + frame_rate = np.round(self.window.displayRefreshRate if self.use_vr else self.window.getActualFrameRate()) freqs = get_possible_ssvep_freqs(frame_rate, stim_type="reversal") self.stim_patterns = [ init_flicker_stim(frame_rate, 2, self.soa), @@ -102,14 +107,22 @@ def present_stimulus(self, idx, trial): # Present flickering stim for _ in range(int(self.stim_patterns[ind]["n_cycles"])): - self.grating.setAutoDraw(True) + for _ in range(int(self.stim_patterns[ind]["cycle"][0])): + if self.use_vr: + tracking_state = self.window.getTrackingState() + self.window.calcEyePoses(tracking_state.headPose.thePose) + self.window.setDefaultView() + self.grating.draw() + self.fixation.draw() self.window.flip() - self.grating.setAutoDraw(False) - self.grating_neg.setAutoDraw(True) + for _ in range(self.stim_patterns[ind]["cycle"][1]): + if self.use_vr: + tracking_state = self.window.getTrackingState() + self.window.calcEyePoses(tracking_state.headPose.thePose) + self.window.setDefaultView() + self.grating_neg.draw() + self.fixation.draw() self.window.flip() - self.grating_neg.setAutoDraw(False) - pass - - self.window.flip() \ No newline at end of file + pass \ No newline at end of file diff --git a/examples/visual_n170/00x__n170_run_experiment.py b/examples/visual_n170/00x__n170_run_experiment.py index 03249e5d..b7188696 100644 --- a/examples/visual_n170/00x__n170_run_experiment.py +++ b/examples/visual_n170/00x__n170_run_experiment.py @@ -30,7 +30,7 @@ eeg_device = EEG(device=board_name) # Experiment type -experiment = VisualN170(duration=record_duration, eeg=eeg_device, save_fn=save_fn) +experiment = VisualN170(duration=record_duration, eeg=eeg_device, save_fn=save_fn, use_vr=False) ################################################################################################### # Run experiment diff --git a/requirements.txt b/requirements.txt index 11c6aea4..71d155dd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -70,6 +70,9 @@ pywinhook @ https://github.com/ActivityWatch/wheels/raw/master/pywinhook/pyWinho # See issue: https://github.com/psychopy/psychopy/issues/2876 pyglet==1.4.11 ; platform_system == "Windows" +# Oculus/Quest VR support - currently only supported on Windows. +psychxr>=0.2.4rc2; platform_system == "Windows" + ## ~~ Docsbuild Requirements ~~ @@ -84,6 +87,11 @@ sphinx_rtd_theme sphinx-tabs sphinx-copybutton sphinxcontrib-httpdomain +numpydoc +recommonmark +versioneer +rst2pdf +docutils # Tests mypy @@ -94,4 +102,3 @@ nbval # Types types-requests -