From d1fb1c5c2ee1114572038b7d6eaa6ee727c0f1e0 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Wed, 16 Jul 2025 20:29:55 +1000 Subject: [PATCH 01/39] Added new Visual Pattern Reversal VEP experiment implementation, improved rendering code, added new present_iti delegate and update dependencies to include stimupy. --- eegnb/experiments/Experiment.py | 41 ++++++--- .../visual_vep/pattern_reversal_vep.py | 91 +++++++++++++++++++ requirements.txt | 3 +- 3 files changed, 121 insertions(+), 14 deletions(-) create mode 100644 eegnb/experiments/visual_vep/pattern_reversal_vep.py diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index 95efa4bd..b7170323 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -78,6 +78,18 @@ def present_stimulus(self, idx : int): """ raise NotImplementedError + def present_iti(self): + """ + Method that presents the inter-trial interval display for the specific experiment. + + This method defines what is shown on the screen during the period between stimuli. + It could be a blank screen, a fixation cross, or any other appropriate display. + + This is an optional method - the default implementation simply flips the window with no additional content. + Subclasses can override this method to provide custom ITI displays. + """ + self.window.flip() + def setup(self, instructions=True): # Initializing the record duration and the marker names @@ -156,6 +168,9 @@ def __user_input(self, input_type): ('Xbox', 'B', None) ] + else: + raise Exception(f'Invalid input_type: {input_type}') + if len(event.getKeys(keyList=key_input)) > 0: return True @@ -236,9 +251,9 @@ def iti_with_jitter(): print("EEG Stream started") # Run trial until a key is pressed or experiment duration has expired. - start = time() - current_trial = current_trial_end = -1 - current_trial_begin = None + start_time = time() + current_trial = trial_end_time = -1 + trial_start_time = None # Current trial being rendered rendering_trial = -1 @@ -246,26 +261,26 @@ def iti_with_jitter(): # Clear/reset user input buffer self.__clear_user_input() - while not self.__user_input('cancel') and (time() - start) < self.record_duration: + while not self.__user_input('cancel') and (time() - start_time) < self.record_duration: - current_experiment_seconds = time() - start + elapsed_time = time() - start_time # Do not present stimulus until current trial begins(Adhere to inter-trial interval). - if current_trial_end < current_experiment_seconds: + if elapsed_time > trial_end_time: current_trial += 1 - current_trial_begin = current_experiment_seconds + iti_with_jitter() - current_trial_end = current_trial_begin + self.soa + trial_start_time = elapsed_time + iti_with_jitter() + trial_end_time = trial_start_time + self.soa + self.__draw(lambda: self.present_iti()) # Do not present stimulus after trial has ended(stimulus on arrival interval). - elif current_trial_begin < current_experiment_seconds: + elif elapsed_time > trial_start_time: - # if current trial number changed get new choice of image. - if rendering_trial < current_trial: - # Some form of presenting the stimulus - sometimes order changed in lower files like ssvep + # if current trial number changed present new stimulus. + if current_trial > rendering_trial: # Stimulus presentation overwritten by specific experiment self.__draw(lambda: self.present_stimulus(current_trial)) rendering_trial = current_trial else: - self.__draw(lambda: self.window.flip()) + self.__draw(lambda: self.present_iti()) # Clearing the screen for the next trial event.clearEvents() diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py new file mode 100644 index 00000000..406cd1b5 --- /dev/null +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -0,0 +1,91 @@ +from time import time + +from psychopy import visual +from typing import Optional, Any, List +from eegnb.devices.eeg import EEG +from eegnb.experiments import Experiment +from stimupy.stimuli.checkerboards import contrast_contrast + + +class VisualPatternReversalVEP(Experiment.BaseExperiment): + + def __init__(self, duration=120, eeg: Optional[EEG] = None, save_fn=None, + n_trials=2000, iti=0, soa=0.5, jitter=0, use_vr=False, use_fullscr=True): + + self.black_background = None + self.stim = None + exp_name = "Visual Pattern Reversal VEP" + super().__init__(exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter, use_vr, use_fullscr) + + @staticmethod + def create_monitor_checkerboard(intensity_checks): + # Standard parameters for monitor-based pattern reversal VEP + # Using standard 1 degree check size at 30 pixels per degree + return contrast_contrast( + visual_size=(16, 16), # aspect ratio in degrees + ppd=72, # pixels per degree + frequency=(0.5, 0.5), # spatial frequency of the checkerboard (0.5 cpd = 1 degree check size) + intensity_checks=intensity_checks, + target_shape=(1, 1), + alpha=0, + tau=0 + ) + + @staticmethod + def create_vr_checkerboard(intensity_checks): + # Optimized parameters for Oculus/Meta Quest 2 with PC link + # Quest 2 has approximately 20 pixels per degree and a ~90° FOV + # Using standard 1 degree check size (0.5 cpd) + return contrast_contrast( + visual_size=(20, 20), # size in degrees - covers a good portion of the FOV + ppd=20, # pixels per degree for Quest 2 + frequency=(0.5, 0.5), # spatial frequency (0.5 cpd = 1 degree check size) + intensity_checks=intensity_checks, + target_shape=(1, 1), + alpha=0, + tau=0 + ) + + def load_stimulus(self): + if self.use_vr: + # Create VR checkerboard + create_checkerboard = self.create_vr_checkerboard + + else: + # Create Monitor checkerboard + create_checkerboard = self.create_monitor_checkerboard + + if self.use_vr: + # the window is large over the eye, checkerboard should only cover the central vision + size = self.window.size / 1.5 + else: + size = (self.window_size[1], self.window_size[1]) + + # the surrounding / periphery needs to be dark + self.black_background = visual.Rect(self.window, + width=self.window.size[0], + height=self.window.size[1], + fillColor='black') + + def create_checkerboard_stim(intensity_checks): + return visual.ImageStim(self.window, + image=create_checkerboard(intensity_checks)['img'], + units='pix', size=size, color='white') + + self.stim = [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] + + def present_stimulus(self, idx: int): + self.black_background.draw() + + # draw checkerboard + checkerboard_frame = idx % 2 + image = self.stim[checkerboard_frame] + image.draw() + self.window.flip() + + # Pushing the sample to the EEG + self.eeg.push_sample(marker=checkerboard_frame + 1, timestamp=time()) + + def present_iti(self): + self.black_background.draw() + self.window.flip() diff --git a/requirements.txt b/requirements.txt index f474e126..dd394139 100644 --- a/requirements.txt +++ b/requirements.txt @@ -80,7 +80,8 @@ pyglet==1.4.11 ; platform_system == "Windows" # Oculus/Quest VR support - currently only supported on Windows. psychxr>=0.2.4rc2; platform_system == "Windows" - +# Used for generating checkerboard in pattern reversal experiment +stimupy ## ~~ Docsbuild Requirements ~~ recommonmark From 5f7cbbd56172de5ae658cff2cced6a83bec63738 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sat, 2 Aug 2025 09:37:45 +1000 Subject: [PATCH 02/39] cleanup --- eegnb/experiments/Experiment.py | 31 ++++++++++--------- .../visual_vep/pattern_reversal_vep.py | 21 ++++++++++--- 2 files changed, 33 insertions(+), 19 deletions(-) diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index df036e79..f5d15a85 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -8,8 +8,8 @@ obj.run() """ -from abc import abstractmethod -from typing import Callable +from abc import abstractmethod, ABC +from typing import Callable, Optional from psychopy import prefs from psychopy.visual.rift import Rift #change the pref libraty to PTB and set the latency mode to high precision @@ -26,7 +26,7 @@ from eegnb import generate_save_fn -class BaseExperiment: +class BaseExperiment(ABC): def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, soa: float, jitter: float, use_vr=False, use_fullscr = True): @@ -51,11 +51,18 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, self.soa = soa self.jitter = jitter self.use_vr = use_vr - if use_vr: - # VR interface accessible by specific experiment classes for customizing and using controllers. - self.rift: Rift = visual.Rift(monoscopic=True, headLocked=True) self.use_fullscr = use_fullscr + self.window_size = [1600,800] + self.rift: Optional[Rift] = None + + # Initializing the record duration and the marker names + self.record_duration = np.float32(self.duration) + self.markernames = [1, 2] + + # Setting up the trial and parameter list + self.parameter = np.random.binomial(1, 0.5, self.n_trials) + self.trials = DataFrame(dict(parameter=self.parameter, timestamp=np.zeros(self.n_trials))) @abstractmethod def load_stimulus(self): @@ -92,15 +99,11 @@ def present_iti(self): def setup(self, instructions=True): - # Initializing the record duration and the marker names - self.record_duration = np.float32(self.duration) - self.markernames = [1, 2] - - # Setting up the trial and parameter list - self.parameter = np.random.binomial(1, 0.5, self.n_trials) - self.trials = DataFrame(dict(parameter=self.parameter, timestamp=np.zeros(self.n_trials))) + if self.use_vr is True and self.rift is None: + # VR interface accessible by specific experiment classes for customizing and using controllers. + self.rift: Rift = visual.Rift(monoscopic=True, headLocked=True) - # Setting up Graphics + # Setting up Graphics self.window = ( self.rift if self.use_vr else visual.Window(self.window_size, monitor="testMonitor", units="deg", fullscr=self.use_fullscr)) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 406cd1b5..44ed8fc4 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -1,7 +1,8 @@ from time import time +from pandas import DataFrame from psychopy import visual -from typing import Optional, Any, List +from typing import Optional from eegnb.devices.eeg import EEG from eegnb.experiments import Experiment from stimupy.stimuli.checkerboards import contrast_contrast @@ -12,10 +13,14 @@ class VisualPatternReversalVEP(Experiment.BaseExperiment): def __init__(self, duration=120, eeg: Optional[EEG] = None, save_fn=None, n_trials=2000, iti=0, soa=0.5, jitter=0, use_vr=False, use_fullscr=True): + super().__init__("Visual Pattern Reversal VEP", duration, eeg, save_fn, n_trials, iti, soa, jitter, use_vr, use_fullscr) + self.black_background = None self.stim = None - exp_name = "Visual Pattern Reversal VEP" - super().__init__(exp_name, duration, eeg, save_fn, n_trials, iti, soa, jitter, use_vr, use_fullscr) + + # Setting up the trial and parameter list + self.parameter = self.n_trials + self.trials = DataFrame(dict(parameter=self.parameter)) @staticmethod def create_monitor_checkerboard(intensity_checks): @@ -50,7 +55,6 @@ def load_stimulus(self): if self.use_vr: # Create VR checkerboard create_checkerboard = self.create_vr_checkerboard - else: # Create Monitor checkerboard create_checkerboard = self.create_monitor_checkerboard @@ -75,6 +79,12 @@ def create_checkerboard_stim(intensity_checks): self.stim = [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] def present_stimulus(self, idx: int): + # Get the label of the trial + label = self.trials["parameter"].iloc[idx] + + # eye for presentation + eye = 'left' if label is 0 else 'right' + self.black_background.draw() # draw checkerboard @@ -84,7 +94,8 @@ def present_stimulus(self, idx: int): self.window.flip() # Pushing the sample to the EEG - self.eeg.push_sample(marker=checkerboard_frame + 1, timestamp=time()) + marker = self.markernames[label] + self.eeg.push_sample(marker=marker, timestamp=time()) def present_iti(self): self.black_background.draw() From a4f2e0b24af33ea780fe9286bd92725d2a5c49aa Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sat, 2 Aug 2025 09:45:10 +1000 Subject: [PATCH 03/39] set up params and trial dataframe --- .../experiments/visual_vep/pattern_reversal_vep.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 44ed8fc4..86ad4b55 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -1,4 +1,5 @@ from time import time +import numpy as np from pandas import DataFrame from psychopy import visual @@ -10,8 +11,8 @@ class VisualPatternReversalVEP(Experiment.BaseExperiment): - def __init__(self, duration=120, eeg: Optional[EEG] = None, save_fn=None, - n_trials=2000, iti=0, soa=0.5, jitter=0, use_vr=False, use_fullscr=True): + def __init__(self, duration=200, eeg: Optional[EEG] = None, save_fn=None, + n_trials=400, iti=0, soa=0.5, jitter=0, use_vr=False, use_fullscr=True): super().__init__("Visual Pattern Reversal VEP", duration, eeg, save_fn, n_trials, iti, soa, jitter, use_vr, use_fullscr) @@ -19,7 +20,14 @@ def __init__(self, duration=120, eeg: Optional[EEG] = None, save_fn=None, self.stim = None # Setting up the trial and parameter list - self.parameter = self.n_trials + # Show stimulus in left eye for first half of block, right eye for second half + block_size = 50 + n_repeats = self.n_trials // block_size + left_eye = 0 + right_eye = 1 + # First half of block (25 trials) = left eye, second half (25 trials) = right eye + block = [left_eye] * 25 + [right_eye] * 25 + self.parameter = np.array(block * n_repeats) self.trials = DataFrame(dict(parameter=self.parameter)) @staticmethod From cbb389960eb914029eae546080b2c52585461dc1 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 3 Aug 2025 16:34:26 +1000 Subject: [PATCH 04/39] created BlockExperiment.py --- eegnb/experiments/BlockExperiment.py | 126 ++++++++++++++++++ eegnb/experiments/Experiment.py | 86 ++++++++---- .../visual_vep/pattern_reversal_vep.py | 53 ++++++-- 3 files changed, 227 insertions(+), 38 deletions(-) create mode 100644 eegnb/experiments/BlockExperiment.py diff --git a/eegnb/experiments/BlockExperiment.py b/eegnb/experiments/BlockExperiment.py new file mode 100644 index 00000000..4432077c --- /dev/null +++ b/eegnb/experiments/BlockExperiment.py @@ -0,0 +1,126 @@ +""" +BlockExperiment Class - Extends BaseExperiment with block-based functionality + +This class provides block-based experiment capabilities by inheriting from BaseExperiment +and overriding the run method to handle multiple blocks. It loads stimulus only once +and reuses it across blocks, while allowing block-specific instructions. + +Experiments that need block-based execution should inherit from this class instead of BaseExperiment. +""" +from abc import ABC +from time import time + +from .Experiment import BaseExperiment + + +class BlockExperiment(BaseExperiment, ABC): + """ + Extended experiment class that inherits from BaseExperiment to provide block-based functionality. + + This class is designed for experiments that need to run multiple blocks, with each block + having its own instructions and duration. It loads stimulus only once and reuses it across blocks. + """ + + def __init__(self, exp_name, block_duration, eeg, save_fn, block_trial_size, n_blocks, iti: float, soa: float, jitter: float, + use_vr=False, use_fullscr=True): + """ Initializer for the Block Experiment Class + + Args: + exp_name (str): Name of the experiment + eeg: EEG device object for recording + save_fn (str): Save filename for data + block_duration (float): Duration of each block in seconds + block_trial_size (int): Number of trials per block + n_blocks (int): Number of blocks to run + iti (float): Inter-trial interval + soa (float): Stimulus on arrival + jitter (float): Random delay between stimulus + use_vr (bool): Use VR for displaying stimulus + use_fullscr (bool): Use fullscreen mode + """ + # Calculate total trials for the base class + total_trials = block_trial_size * n_blocks + + # Initialize the base experiment with total trials + # Pass None for duration if block_duration is None to ignore time spent in instructions + super().__init__(exp_name, block_duration, eeg, save_fn, total_trials, iti, soa, jitter, use_vr, use_fullscr) + + # Store block-specific parameters + self.block_duration = block_duration + self.block_trial_size = block_trial_size + self.n_blocks = n_blocks + + # Current block index + self.current_block_index = 0 + + # Original save filename + self.original_save_fn = save_fn + + # Flag to track if stimulus has been loaded + self.stimulus_loaded = False + + def present_block_instructions(self, trial_number): + self.window.flip() + + def __show_block_instructions(self, block_number): + """ + Show instructions for a specific block + + Args: + block_number (int): Current block number (0-indexed) + + Returns: + tuple: (continue_experiment, instruction_end_time) + - continue_experiment (bool): Whether to continue the experiment + """ + + # Clear any previous input + self._clear_user_input() + + # Wait for user input to continue + while True: + # Display the instruction text + self.present_block_instructions(block_number) + + if self._user_input('start'): + return True + elif self._user_input('cancel'): + return False + + def run(self, instructions=True): + """ + Run the experiment as a series of blocks + + This method overrides BaseExperiment.run() to handle multiple blocks. + + Args: + instructions (bool): Whether to show the initial experiment instructions + """ + # Setup the experiment (creates window, loads stimulus once) + self.setup(instructions) + + # Start EEG Stream once for all blocks + if self.eeg: + print("Wait for the EEG-stream to start...") + self.eeg.start(self.save_fn) + print("EEG Stream started") + + # Run each block + for block_index in range(self.n_blocks): + self.current_block_index = block_index + print(f"Starting block {block_index + 1} of {self.n_blocks}") + + # Show block-specific instructions + if not self.__show_block_instructions(block_index): + break + + # Run this block + if not self._run_trial_loop(start_time=time(), duration=self.block_duration): + break + + # Stop EEG Stream after all blocks + if self.eeg: + self.eeg.stop() + + # Close window at the end of all blocks + self.window.close() diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index f5d15a85..dd2fa773 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -10,6 +10,7 @@ from abc import abstractmethod, ABC from typing import Callable, Optional +from eegnb.devices.eeg import EEG from psychopy import prefs from psychopy.visual.rift import Rift #change the pref libraty to PTB and set the latency mode to high precision @@ -33,18 +34,23 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, """ Initializer for the Base Experiment Class Args: + exp_name (str): Name of the experiment + duration (float): Duration of the experiment in seconds + eeg: EEG device object for recording + save_fn (str): Save filename function for data n_trials (int): Number of trials/stimulus iti (float): Inter-trial interval soa (float): Stimulus on arrival jitter (float): Random delay between stimulus use_vr (bool): Use VR for displaying stimulus + use_fullscr (bool): Use fullscreen mode """ self.exp_name = exp_name self.instruction_text = """\nWelcome to the {} experiment!\nStay still, focus on the centre of the screen, and try not to blink. \nThis block will run for %s seconds.\n Press spacebar to continue. \n""".format(self.exp_name) self.duration = duration - self.eeg = eeg + self.eeg: EEG = eeg self.save_fn = save_fn self.n_trials = n_trials self.iti = iti @@ -143,10 +149,10 @@ def show_instructions(self): self.window.mouseVisible = False # clear/reset any old key/controller events - self.__clear_user_input() + self._clear_user_input() # Waiting for the user to press the spacebar or controller button or trigger to start the experiment - while not self.__user_input('start'): + while not self._user_input('start'): # Displaying the instructions on the screen text = visual.TextStim(win=self.window, text=self.instruction_text, color=[-1, -1, -1]) self.__draw(lambda: self.__draw_instructions(text)) @@ -154,7 +160,7 @@ def show_instructions(self): # Enabling the cursor again self.window.mouseVisible = True - def __user_input(self, input_type): + def _user_input(self, input_type): if input_type == 'start': key_input = 'spacebar' vr_inputs = [ @@ -225,7 +231,7 @@ def __draw(self, present_stimulus: Callable): self.window.setDefaultView() present_stimulus() - def __clear_user_input(self): + def _clear_user_input(self): event.getKeys() self.clear_vr_input() @@ -235,48 +241,46 @@ def clear_vr_input(self): """ if self.use_vr: self.rift.updateInputState() + + def _run_trial_loop(self, start_time, duration): + """ + Run the trial presentation loop + + This method handles the common trial presentation logic used by both + BaseExperiment.run() and BlockExperiment._run_block(). + + Args: + start_time (float): Time when the trial loop started + duration (float): Maximum duration of the trial loop in seconds - def run(self, instructions=True): - """ Do the present operation for a bunch of experiments """ + """ def iti_with_jitter(): return self.iti + np.random.rand() * self.jitter - # Setup the experiment, alternatively could get rid of this line, something to think about - self.setup(instructions) - - print("Wait for the EEG-stream to start...") - - # Start EEG Stream, wait for signal to settle, and then pull timestamp for start point - if self.eeg: - self.eeg.start(self.save_fn, duration=self.record_duration + 5) - - print("EEG Stream started") - - # Run trial until a key is pressed or experiment duration has expired. - start_time = time() + # Initialize trial variables current_trial = trial_end_time = -1 trial_start_time = None - - # Current trial being rendered rendering_trial = -1 - + # Clear/reset user input buffer - self.__clear_user_input() - - while not self.__user_input('cancel') and (time() - start_time) < self.record_duration: - + self._clear_user_input() + + # Run the trial loop + while (time() - start_time) < duration: elapsed_time = time() - start_time + # Do not present stimulus until current trial begins(Adhere to inter-trial interval). if elapsed_time > trial_end_time: current_trial += 1 + + # Calculate timing for this trial trial_start_time = elapsed_time + iti_with_jitter() trial_end_time = trial_start_time + self.soa self.__draw(lambda: self.present_iti()) # Do not present stimulus after trial has ended(stimulus on arrival interval). elif elapsed_time > trial_start_time: - # if current trial number changed present new stimulus. if current_trial > rendering_trial: # Stimulus presentation overwritten by specific experiment @@ -285,6 +289,32 @@ def iti_with_jitter(): else: self.__draw(lambda: self.present_iti()) + if self._user_input('cancel'): + return False + + # Return the number of trials that were run + return True + + def run(self, instructions=True): + """ Do the present operation for a bunch of experiments """ + + # Setup the experiment + self.setup(instructions) + + print("Wait for the EEG-stream to start...") + + # Start EEG Stream, wait for signal to settle, and then pull timestamp for start point + if self.eeg: + self.eeg.start(self.save_fn, duration=self.record_duration + 5) + + print("EEG Stream started") + + # Record experiment until a key is pressed or duration has expired. + record_start_time = time() + + # Run the trial loop + self._run_trial_loop(record_start_time, self.record_duration) + # Clearing the screen for the next trial event.clearEvents() diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 86ad4b55..ee84df03 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -5,31 +5,64 @@ from psychopy import visual from typing import Optional from eegnb.devices.eeg import EEG -from eegnb.experiments import Experiment +from eegnb.experiments.BlockExperiment import BlockExperiment from stimupy.stimuli.checkerboards import contrast_contrast -class VisualPatternReversalVEP(Experiment.BaseExperiment): +class VisualPatternReversalVEP(BlockExperiment): - def __init__(self, duration=200, eeg: Optional[EEG] = None, save_fn=None, - n_trials=400, iti=0, soa=0.5, jitter=0, use_vr=False, use_fullscr=True): + def __init__(self, eeg: Optional[EEG] = None, save_fn=None, + block_duration_seconds=25, block_trial_size: int=50, n_blocks: int=8, iti=0, soa=0.5, jitter=0, + use_vr=False, use_fullscr=True): - super().__init__("Visual Pattern Reversal VEP", duration, eeg, save_fn, n_trials, iti, soa, jitter, use_vr, use_fullscr) + super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, + use_vr, use_fullscr) self.black_background = None self.stim = None # Setting up the trial and parameter list # Show stimulus in left eye for first half of block, right eye for second half - block_size = 50 - n_repeats = self.n_trials // block_size + self.block_trial_size = 50 + n_repeats = self.n_trials // self.block_trial_size left_eye = 0 right_eye = 1 # First half of block (25 trials) = left eye, second half (25 trials) = right eye - block = [left_eye] * 25 + [right_eye] * 25 + block = [left_eye] * (self.block_trial_size//2) + [right_eye] * (self.block_trial_size//2) self.parameter = np.array(block * n_repeats) self.trials = DataFrame(dict(parameter=self.parameter)) + def present_block_instructions(self, trial_number): + if trial_number % 2 == 0: + instruction_text = """ + CLOSE YOUR RIGHT EYE + KEEP YOUR LEFT EYE OPEN + + For the next 25 trials, please: + - Close your right eye completely + - Keep your left eye open and focused on the center of the screen + - Stay as still as possible + + Press SPACEBAR when ready to continue + """ + else: + instruction_text = """ + CLOSE YOUR LEFT EYE + KEEP YOUR RIGHT EYE OPEN + + For the next 25 trials, please: + - Close your left eye completely + - Keep your right eye open and focused on the center of the screen + - Stay as still as possible + + Press SPACEBAR when ready to continue + """ + + text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) + text.draw() + self.window.flip() + + @staticmethod def create_monitor_checkerboard(intensity_checks): # Standard parameters for monitor-based pattern reversal VEP @@ -84,14 +117,14 @@ def create_checkerboard_stim(intensity_checks): image=create_checkerboard(intensity_checks)['img'], units='pix', size=size, color='white') - self.stim = [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] + return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] def present_stimulus(self, idx: int): # Get the label of the trial label = self.trials["parameter"].iloc[idx] # eye for presentation - eye = 'left' if label is 0 else 'right' + eye = 'left' if label == 0 else 'right' self.black_background.draw() From 39147a521aec955ded66d09a60bda71348eee03f Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Sun, 3 Aug 2025 21:40:14 +1000 Subject: [PATCH 05/39] fixed psychxr on 3.9 --- requirements.txt | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 8edb7cc6..259611f1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,8 @@ scikit-learn>=0.23.2 pandas>=1.1.4 -numpy>=1.26.0; python_version >= "3.9" +# psychxr build pinned to this version of numpy. +numpy<1.26; python_version >= "3.9" numpy<=1.24.4; python_version == "3.8" mne>=0.20.8 seaborn>=0.11.0 @@ -58,7 +59,8 @@ ffpyplayer==4.5.2 # 4.5.3 fails to build as wheel. psychtoolbox scikit-learn>=0.23.2 pandas>=1.1.4 -numpy>=1.26.0; python_version >= "3.9" +# psychxr build pinned to this version of numpy. +numpy>=1.26,<1.27; python_version >= "3.9" numpy==1.24.4; python_version == "3.8" mne>=0.20.8 seaborn>=0.11.0 From f758b2691be44c12b08e5d1ca70a92030a4be528 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Sun, 3 Aug 2025 22:09:16 +1000 Subject: [PATCH 06/39] refactored experiment instructions --- eegnb/experiments/BlockExperiment.py | 22 +++++++-- eegnb/experiments/Experiment.py | 7 +-- .../visual_vep/pattern_reversal_vep.py | 46 +++++++------------ 3 files changed, 38 insertions(+), 37 deletions(-) diff --git a/eegnb/experiments/BlockExperiment.py b/eegnb/experiments/BlockExperiment.py index 4432077c..93910d91 100644 --- a/eegnb/experiments/BlockExperiment.py +++ b/eegnb/experiments/BlockExperiment.py @@ -27,9 +27,9 @@ def __init__(self, exp_name, block_duration, eeg, save_fn, block_trial_size, n_b Args: exp_name (str): Name of the experiment + block_duration (float): Duration of each block in seconds eeg: EEG device object for recording save_fn (str): Save filename for data - block_duration (float): Duration of each block in seconds block_trial_size (int): Number of trials per block n_blocks (int): Number of blocks to run iti (float): Inter-trial interval @@ -59,10 +59,24 @@ def __init__(self, exp_name, block_duration, eeg, save_fn, block_trial_size, n_b # Flag to track if stimulus has been loaded self.stimulus_loaded = False - def present_block_instructions(self, trial_number): + def present_block_instructions(self, current_block): + """ + Display instructions for the current block to the user. + + This method is meant to be overridden by child classes to provide + experiment-specific instructions before each block. The base implementation + simply flips the window without adding any text. + + This method is called by __show_block_instructions in a loop until the user + provides input to continue or cancel the experiment. + + Args: + current_block (int): The current block number (0-indexed), used to customize + instructions for specific blocks if needed. + """ self.window.flip() - def __show_block_instructions(self, block_number): + def _show_block_instructions(self, block_number): """ Show instructions for a specific block @@ -111,7 +125,7 @@ def run(self, instructions=True): print(f"Starting block {block_index + 1} of {self.n_blocks}") # Show block-specific instructions - if not self.__show_block_instructions(block_index): + if not self._show_block_instructions(block_index): break # Run this block diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index dd2fa773..331539be 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -47,8 +47,7 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, """ self.exp_name = exp_name - self.instruction_text = """\nWelcome to the {} experiment!\nStay still, focus on the centre of the screen, and try not to blink. \nThis block will run for %s seconds.\n - Press spacebar to continue. \n""".format(self.exp_name) + self.instruction_text = None self.duration = duration self.eeg: EEG = eeg self.save_fn = save_fn @@ -143,7 +142,9 @@ def show_instructions(self): """ # Splitting instruction text into lines - self.instruction_text = self.instruction_text % self.duration + if self.instruction_text is None: + self.instruction_text = """\nWelcome to the {} experiment!\nStay still, focus on the centre of the screen, and try not to blink. \nThis block will run for %s seconds.\n + Press spacebar to continue. \n""".format(self.exp_name) % self.duration # Disabling the cursor during display of instructions self.window.mouseVisible = False diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index ee84df03..1fd50b9d 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -1,4 +1,5 @@ -from time import time +from abc import ABC +from time import time import numpy as np from pandas import DataFrame @@ -9,17 +10,22 @@ from stimupy.stimuli.checkerboards import contrast_contrast -class VisualPatternReversalVEP(BlockExperiment): +class VisualPatternReversalVEP(BlockExperiment,ABC): def __init__(self, eeg: Optional[EEG] = None, save_fn=None, block_duration_seconds=25, block_trial_size: int=50, n_blocks: int=8, iti=0, soa=0.5, jitter=0, use_vr=False, use_fullscr=True): - super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, - use_vr, use_fullscr) + super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr) - self.black_background = None - self.stim = None + self.instruction_text = f"""Welcome to the Visual Pattern Reversal VEP experiment! + + Stay still and focus on the red dot in the centre of the screen. + + This experiment will run for {n_blocks} blocks of {block_duration_seconds} seconds each. + + Press spacebar or controller to continue. + """ # Setting up the trial and parameter list # Show stimulus in left eye for first half of block, right eye for second half @@ -32,31 +38,11 @@ def __init__(self, eeg: Optional[EEG] = None, save_fn=None, self.parameter = np.array(block * n_repeats) self.trials = DataFrame(dict(parameter=self.parameter)) - def present_block_instructions(self, trial_number): - if trial_number % 2 == 0: - instruction_text = """ - CLOSE YOUR RIGHT EYE - KEEP YOUR LEFT EYE OPEN - - For the next 25 trials, please: - - Close your right eye completely - - Keep your left eye open and focused on the center of the screen - - Stay as still as possible - - Press SPACEBAR when ready to continue - """ + def present_block_instructions(self, current_block): + if current_block % 2 == 0: + instruction_text = "Close your right eye, then focus on the red dot with your left eye. Press spacebar or controller when ready." else: - instruction_text = """ - CLOSE YOUR LEFT EYE - KEEP YOUR RIGHT EYE OPEN - - For the next 25 trials, please: - - Close your left eye completely - - Keep your right eye open and focused on the center of the screen - - Stay as still as possible - - Press SPACEBAR when ready to continue - """ + instruction_text = "Close your left eye, then focus on the red dot with your right eye. Press spacebar or controller when ready." text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) text.draw() From 43f6e58043800d778bb9daea3b5995d7e1545049 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Sun, 3 Aug 2025 22:17:09 +1000 Subject: [PATCH 07/39] fix --- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 1fd50b9d..d9a6aa46 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -1,5 +1,4 @@ -from abc import ABC -from time import time +from time import time import numpy as np from pandas import DataFrame @@ -10,7 +9,7 @@ from stimupy.stimuli.checkerboards import contrast_contrast -class VisualPatternReversalVEP(BlockExperiment,ABC): +class VisualPatternReversalVEP(BlockExperiment): def __init__(self, eeg: Optional[EEG] = None, save_fn=None, block_duration_seconds=25, block_trial_size: int=50, n_blocks: int=8, iti=0, soa=0.5, jitter=0, From 7654574dc3333675702f19c6422ff15cc8130986 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Mon, 4 Aug 2025 22:36:57 +1000 Subject: [PATCH 08/39] fixed blocks and event markers --- .../visual_vep/pattern_reversal_vep.py | 46 +++++++++++-------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index d9a6aa46..1225cb39 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -27,26 +27,16 @@ def __init__(self, eeg: Optional[EEG] = None, save_fn=None, """ # Setting up the trial and parameter list - # Show stimulus in left eye for first half of block, right eye for second half - self.block_trial_size = 50 - n_repeats = self.n_trials // self.block_trial_size left_eye = 0 right_eye = 1 - # First half of block (25 trials) = left eye, second half (25 trials) = right eye - block = [left_eye] * (self.block_trial_size//2) + [right_eye] * (self.block_trial_size//2) - self.parameter = np.array(block * n_repeats) + # Alternate between left and right eye blocks + block_eyes = [] + for block_num in range(n_blocks): + eye = left_eye if block_num % 2 == 0 else right_eye + block_eyes.extend([eye] * block_trial_size) + self.parameter = np.array(block_eyes) self.trials = DataFrame(dict(parameter=self.parameter)) - def present_block_instructions(self, current_block): - if current_block % 2 == 0: - instruction_text = "Close your right eye, then focus on the red dot with your left eye. Press spacebar or controller when ready." - else: - instruction_text = "Close your left eye, then focus on the red dot with your right eye. Press spacebar or controller when ready." - - text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) - text.draw() - self.window.flip() - @staticmethod def create_monitor_checkerboard(intensity_checks): @@ -57,7 +47,7 @@ def create_monitor_checkerboard(intensity_checks): ppd=72, # pixels per degree frequency=(0.5, 0.5), # spatial frequency of the checkerboard (0.5 cpd = 1 degree check size) intensity_checks=intensity_checks, - target_shape=(1, 1), + target_shape=(0, 0), alpha=0, tau=0 ) @@ -72,7 +62,7 @@ def create_vr_checkerboard(intensity_checks): ppd=20, # pixels per degree for Quest 2 frequency=(0.5, 0.5), # spatial frequency (0.5 cpd = 1 degree check size) intensity_checks=intensity_checks, - target_shape=(1, 1), + target_shape=(0, 0), alpha=0, tau=0 ) @@ -97,6 +87,11 @@ def load_stimulus(self): height=self.window.size[1], fillColor='black') + # fixation + grating_sf = 400 if self.use_vr else 0.2 + self.fixation = visual.GratingStim(win=self.window, pos=[0, 0], sf=grating_sf, color=[1, 0, 0]) + self.fixation.size = 0.02 if self.use_vr else 0.4 + def create_checkerboard_stim(intensity_checks): return visual.ImageStim(self.window, image=create_checkerboard(intensity_checks)['img'], @@ -104,9 +99,21 @@ def create_checkerboard_stim(intensity_checks): return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] + def present_block_instructions(self, current_block): + if current_block % 2 == 0: + instruction_text = "Close your right eye, then focus on the red dot with your left eye. Press spacebar or controller when ready." + else: + instruction_text = "Close your left eye, then focus on the red dot with your right eye. Press spacebar or controller when ready." + + text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) + text.draw() + self.fixation.draw() + self.window.flip() + def present_stimulus(self, idx: int): # Get the label of the trial - label = self.trials["parameter"].iloc[idx] + block_trial_offset = self.current_block_index*self.block_trial_size + label = self.trials["parameter"].iloc[idx+block_trial_offset] # eye for presentation eye = 'left' if label == 0 else 'right' @@ -117,6 +124,7 @@ def present_stimulus(self, idx: int): checkerboard_frame = idx % 2 image = self.stim[checkerboard_frame] image.draw() + self.fixation.draw() self.window.flip() # Pushing the sample to the EEG From d786e474b0440541fbd62a4dfa764842a9de326c Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Wed, 6 Aug 2025 21:09:11 +1000 Subject: [PATCH 09/39] only 4 blocks --- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 1225cb39..72a1e1d9 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -12,7 +12,7 @@ class VisualPatternReversalVEP(BlockExperiment): def __init__(self, eeg: Optional[EEG] = None, save_fn=None, - block_duration_seconds=25, block_trial_size: int=50, n_blocks: int=8, iti=0, soa=0.5, jitter=0, + block_duration_seconds=50, block_trial_size: int=100, n_blocks: int=4, iti=0, soa=0.5, jitter=0, use_vr=False, use_fullscr=True): super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr) From 4202631059b71d4e3d4977f8ce765552580f9ae9 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Wed, 13 Aug 2025 16:50:25 -0400 Subject: [PATCH 10/39] block example --- ...__block_pattern_reversal_run_experiment.py | 58 ++++ .../01r__block_pattern_reversal_viz.ipynb | 277 ++++++++++++++++++ 2 files changed, 335 insertions(+) create mode 100644 examples/visual_block_pattern_reversal/00x__block_pattern_reversal_run_experiment.py create mode 100644 examples/visual_block_pattern_reversal/01r__block_pattern_reversal_viz.ipynb diff --git a/examples/visual_block_pattern_reversal/00x__block_pattern_reversal_run_experiment.py b/examples/visual_block_pattern_reversal/00x__block_pattern_reversal_run_experiment.py new file mode 100644 index 00000000..0cf6fe89 --- /dev/null +++ b/examples/visual_block_pattern_reversal/00x__block_pattern_reversal_run_experiment.py @@ -0,0 +1,58 @@ +""" +P100 run experiment +=============================== + +This example demonstrates the initiation of an EEG stream with eeg-notebooks, and how to run +an experiment. + +""" +from os import path, getenv + +################################################################################################### +# Setup +# --------------------- +# +# Imports +from eegnb import generate_save_fn +from eegnb.devices.eeg import EEG +from eegnb.experiments.visual_vep.pattern_reversal_vep import VisualPatternReversalVEP +import platform + +################################################################################################### +# Initiate EEG device +# --------------------- +# +# Start EEG device + +if platform.system() == "Windows": + serial_port = "COM3" +else: + serial_port = "/dev/cu.usbserial-DM03H289" +eeg_device = EEG(device="cyton", + ch_names=['CFz', 'CPz', 'C3', 'C4', 'PO3', 'PO4', 'POz', 'Oz'], + serial_port=serial_port) +# eeg_device = EEG(device="synthetic") + +# Create save file name +data_dir = getenv('DATA_DIR') +data_dir = path.join(path.expanduser("~/"), data_dir, "data") +save_fn = generate_save_fn(eeg_device.device_name, + experiment="block_both_eyes_pattern_reversal-mark_iv_headset", + site=platform.system() + "_acer_34_100hz", + subject_id=0, + session_nb=1, + data_dir=data_dir) +print(save_fn) + +# replace filename with new filename + +################################################################################################### +# Run experiment +# --------------------- +# + +pattern_reversal_vep = VisualPatternReversalVEP(eeg=eeg_device, save_fn=save_fn, use_fullscr=True) +pattern_reversal_vep.run() + +# TODO: save latency info and other metadata +# pattern_reversal_vep.save_metadata() diff --git a/examples/visual_block_pattern_reversal/01r__block_pattern_reversal_viz.ipynb b/examples/visual_block_pattern_reversal/01r__block_pattern_reversal_viz.ipynb new file mode 100644 index 00000000..ed5c0263 --- /dev/null +++ b/examples/visual_block_pattern_reversal/01r__block_pattern_reversal_viz.ipynb @@ -0,0 +1,277 @@ +{ + "cells": [ + { + "cell_type": "code", + "metadata": {}, + "source": [ + "import np\n", + "import vep_utils\n", + "\n", + "\"\"\"\n", + "Pattern reversal Load and Visualize Data\n", + "===============================\n", + "\n", + "This example demonstrates loading, organizing, and visualizing EP response data from the visual P100 experiment. \n", + "\n", + "An animation of a checkerboard reversal is shown(the checkerboard squares' colours are toggled once each half a second).\n", + "\n", + "The data used is the first subject and first session of the one of the eeg-notebooks P100 example datasets.\n", + "It was recorded using an OpenBCI Ultracortex EEG headset(Mark IV) with it's last five electrodes placed in the headset's\n", + "node locations of (PO1, Oz, PO2, P3 and P4).\n", + "These headset node locations were used to fit around a Meta Quest 2 headset, which tilted/angled the headset backwards\n", + "so that the real locations of the electrodes are closer to the occipital lobe - O1, Iz, O2, PO1 and PO2.\n", + "The session consisted of using the Meta Quest 2 linked with a PC to display the checkerboard reversal animation\n", + "for thirty seconds of continuous recording. \n", + "\n", + "We first use the `fetch_datasets` to obtain a list of filenames. If these files are not already present \n", + "in the specified data directory, they will be quickly downloaded from the cloud. \n", + "\n", + "After loading the data from the occiptal channels, we place it in an MNE `Epochs` object, and then an `Evoked` object to obtain\n", + "the trial-averaged delay of the response. \n", + "\n", + "The final figure plotted at the end shows the P100 response EP waveform.\n", + "\"\"\"\n", + "\n", + "###################################################################################################\n", + "# Setup\n", + "# ---------------------\n", + "\n", + "# Some standard pythonic imports\n", + "import os\n", + "from collections import OrderedDict\n", + "import warnings\n", + "warnings.filterwarnings('ignore')\n", + "\n", + "# MNE functions\n", + "from mne import Epochs,find_events\n", + "\n", + "# EEG-Notebooks functions\n", + "from eegnb.analysis.utils import load_data\n", + "from vep_utils import plot_vep\n", + "from os import path, getenv\n", + "\n", + "###################################################################################################\n", + "# Load Data\n", + "# ---------------------\n", + "#\n", + "# We will use the eeg-notebooks P100 example dataset\n", + "#\n", + "# Note that if you are running this locally, the following cell will download\n", + "# the example dataset, if you do not already have it.\n", + "#\n", + "\n", + "###################################################################################################\n", + "\n", + "data_dir = path.join(path.expanduser(\"~/\"), getenv('DATA_DIR'), \"data\")\n", + "raw = load_data(subject=0,session=1,\n", + " experiment='block_both_eyes_pattern_reversal-mark_iv_headset', site='windows_acer_34_100hz', device_name='cyton',\n", + " data_dir=data_dir)" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "code", + "metadata": {}, + "source": [ + "###################################################################################################\n", + "# Visualize the power spectrum\n", + "# ----------------------------\n", + "\n", + "raw.plot_psd()\n", + "\n", + "###################################################################################################\n", + "# Filtering\n", + "# ----------------------------\n", + "\n", + "raw.filter(1,30, method='fir')\n", + "raw.plot_psd(fmin=1, fmax=30)" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "code", + "metadata": {}, + "source": [ + "\n", + "###################################################################################################\n", + "# Epoching\n", + "# ----------------------------\n", + "\n", + "# Create an array containing the timestamps and which eye was presented the stimulus\n", + "events = find_events(raw)\n", + "event_id = {'left_eye': 1, 'right_eye': 2}\n", + "\n", + "# Create an MNE Epochs object representing all the epochs around stimulus presentation\n", + "epochs = Epochs(raw, events=events, event_id=event_id,\n", + " tmin=-0.1, tmax=0.4, baseline=None,\n", + " reject={'eeg': 65e-6}, preload=True,\n", + " verbose=False, picks=[7])\n", + "epochs.shift_time(-vep_utils.windows_lag())\n", + "print('sample drop %: ', (1 - len(epochs.events)/len(events)) * 100)\n", + "epochs" + ], + "outputs": [], + "execution_count": null + }, + { + "metadata": {}, + "cell_type": "code", + "source": [ + "###################################################################################################\n", + "# Epoch average\n", + "# ----------------------------\n", + "evoked = epochs.average()\n", + "evoked.plot(spatial_colors=True, show=False)" + ], + "outputs": [], + "execution_count": null + }, + { + "cell_type": "code", + "source": [ + "evoked_potentials = epochs['left_eye'].average(picks=['Oz'])\n", + "plot_vep(evoked_potentials)" + ], + "metadata": { + "collapsed": false + }, + "outputs": [], + "execution_count": null + }, + { + "metadata": {}, + "cell_type": "code", + "source": [ + "evoked_potentials = epochs['right_eye'].average(picks=['Oz'])\n", + "plot_vep(evoked_potentials)" + ], + "outputs": [], + "execution_count": null + }, + { + "metadata": {}, + "cell_type": "code", + "source": [ + "###################################################################################################\n", + "# Compare evoked potentials by event type\n", + "# ----------------------------\n", + "\n", + "# Create separate evoked responses for each event type\n", + "evoked_left = epochs['left_eye'].average(picks=['Oz'])\n", + "evoked_right = epochs['right_eye'].average(picks=['Oz'])\n", + "\n", + "# Plot both conditions on the same figure for comparison\n", + "import matplotlib.pyplot as plt\n", + "\n", + "fig, ax = plt.subplots(figsize=(10, 6))\n", + "\n", + "# Extract time points and data\n", + "times = evoked_left.times * 1000 # Convert to milliseconds\n", + "left_data = evoked_left.data[0] * 1e6 # Convert to microvolts\n", + "right_data = evoked_right.data[0] * 1e6 # Convert to microvolts\n", + "\n", + "# Plot both conditions\n", + "ax.plot(times, left_data, label='Left Eye', color='blue', linewidth=2)\n", + "ax.plot(times, right_data, label='Right Eye', color='red', linewidth=2)\n", + "\n", + "# Add formatting\n", + "ax.set_xlabel('Time (ms)')\n", + "ax.set_ylabel('Amplitude (μV)')\n", + "ax.set_title('Comparison of Evoked Potentials: Left Eye vs Right Eye')\n", + "ax.legend()\n", + "ax.grid(True, alpha=0.3)\n", + "ax.axhline(y=0, color='black', linestyle='-', alpha=0.3)\n", + "ax.axvline(x=0, color='black', linestyle='--', alpha=0.5, label='Stimulus Onset')\n", + "\n", + "plt.tight_layout()\n", + "plt.show()\n", + "\n", + "# Print summary statistics\n", + "print(f\"Left eye - Number of epochs: {len(epochs['left_eye'])}\")\n", + "print(f\"Right eye - Number of epochs: {len(epochs['right_eye'])}\")\n", + "\n", + "# Find P100 peak for each condition (typically around 100ms)\n", + "p100_window = (80, 120) # milliseconds\n", + "time_mask = (times >= p100_window[0]) & (times <= p100_window[1])\n", + "\n", + "left_p100_idx = np.argmax(left_data[time_mask])\n", + "right_p100_idx = np.argmax(right_data[time_mask])\n", + "\n", + "left_p100_time = times[time_mask][left_p100_idx]\n", + "left_p100_amp = left_data[time_mask][left_p100_idx]\n", + "\n", + "right_p100_time = times[time_mask][right_p100_idx]\n", + "right_p100_amp = right_data[time_mask][right_p100_idx]\n", + "\n", + "print(f\"\\nP100 Peak Analysis:\")\n", + "print(f\"Left eye - Peak at {left_p100_time:.1f}ms, amplitude: {left_p100_amp:.2f}μV\")\n", + "print(f\"Right eye - Peak at {right_p100_time:.1f}ms, amplitude: {right_p100_amp:.2f}μV\")" + ], + "outputs": [], + "execution_count": null + }, + { + "metadata": {}, + "cell_type": "code", + "source": [ + "###################################################################################################\n", + "# Create difference wave\n", + "# ----------------------------\n", + "\n", + "# Calculate the difference between conditions\n", + "difference_data = left_data - right_data\n", + "\n", + "fig, ax = plt.subplots(figsize=(10, 6))\n", + "ax.plot(times, difference_data, label='Left - Right', color='green', linewidth=2)\n", + "ax.set_xlabel('Time (ms)')\n", + "ax.set_ylabel('Amplitude Difference (μV)')\n", + "ax.set_title('Difference Wave: Left Eye - Right Eye')\n", + "ax.grid(True, alpha=0.3)\n", + "ax.axhline(y=0, color='black', linestyle='-', alpha=0.3)\n", + "ax.axvline(x=0, color='black', linestyle='--', alpha=0.5, label='Stimulus Onset')\n", + "ax.legend()\n", + "\n", + "plt.tight_layout()\n", + "plt.show()" + ], + "outputs": [], + "execution_count": null + }, + { + "metadata": {}, + "cell_type": "code", + "source": "", + "outputs": [], + "execution_count": null + } + ], + "metadata": { + "kernelspec": { + "name": "python3", + "language": "python", + "display_name": "Python 3 (ipykernel)" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.13" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "6c096d3d5a52aa51b1da1c53f69d12a5c697c7b765ecfb9c622a0b909667c12d" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 456555f362773f4496ba1b38bc2dee378707946f Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Fri, 15 Aug 2025 16:36:40 -0400 Subject: [PATCH 11/39] fixed vr display --- eegnb/experiments/BlockExperiment.py | 2 +- eegnb/experiments/Experiment.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eegnb/experiments/BlockExperiment.py b/eegnb/experiments/BlockExperiment.py index 93910d91..cb739524 100644 --- a/eegnb/experiments/BlockExperiment.py +++ b/eegnb/experiments/BlockExperiment.py @@ -94,7 +94,7 @@ def _show_block_instructions(self, block_number): # Wait for user input to continue while True: # Display the instruction text - self.present_block_instructions(block_number) + super()._draw(lambda: self.present_block_instructions(block_number)) if self._user_input('start'): return True diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index 331539be..ab602158 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -156,7 +156,7 @@ def show_instructions(self): while not self._user_input('start'): # Displaying the instructions on the screen text = visual.TextStim(win=self.window, text=self.instruction_text, color=[-1, -1, -1]) - self.__draw(lambda: self.__draw_instructions(text)) + self._draw(lambda: self.__draw_instructions(text)) # Enabling the cursor again self.window.mouseVisible = True @@ -221,7 +221,7 @@ def __draw_instructions(self, text): text.draw() self.window.flip() - def __draw(self, present_stimulus: Callable): + def _draw(self, present_stimulus: Callable): """ Set the current eye position and projection for all given stimulus, then draw all stimulus and flip the window/buffer @@ -278,17 +278,17 @@ def iti_with_jitter(): # Calculate timing for this trial trial_start_time = elapsed_time + iti_with_jitter() trial_end_time = trial_start_time + self.soa - self.__draw(lambda: self.present_iti()) + self._draw(lambda: self.present_iti()) # Do not present stimulus after trial has ended(stimulus on arrival interval). elif elapsed_time > trial_start_time: # if current trial number changed present new stimulus. if current_trial > rendering_trial: # Stimulus presentation overwritten by specific experiment - self.__draw(lambda: self.present_stimulus(current_trial)) + self._draw(lambda: self.present_stimulus(current_trial)) rendering_trial = current_trial else: - self.__draw(lambda: self.present_iti()) + self._draw(lambda: self.present_iti()) if self._user_input('cancel'): return False From 4bcf3cf84d4ebd6623ea255728a99b28936cdf10 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 17 Aug 2025 10:38:47 -0400 Subject: [PATCH 12/39] dont bother with specifying python version in yml --- environments/eeg-expy-docsbuild.yml | 5 ++++- environments/eeg-expy-full.yml | 5 ++++- environments/eeg-expy-stimpres.yml | 5 ++++- environments/eeg-expy-streaming.yml | 5 ++++- environments/eeg-expy-streamstim.yml | 5 ++++- 5 files changed, 20 insertions(+), 5 deletions(-) diff --git a/environments/eeg-expy-docsbuild.yml b/environments/eeg-expy-docsbuild.yml index 06dbaa5d..6a5dada1 100644 --- a/environments/eeg-expy-docsbuild.yml +++ b/environments/eeg-expy-docsbuild.yml @@ -3,7 +3,10 @@ channels: - conda-forge dependencies: # System-level dependencies - - python>=3.8,<=3.13 + + # conda overrides current environment python version when not using --freeze-installed, and installs a random version of python... + # - python>=3.8,<=3.13 + - pytables # install pytables for macOS arm64, so do not need to build from source. - rust # used by docsbuild - pip diff --git a/environments/eeg-expy-full.yml b/environments/eeg-expy-full.yml index 05cbd476..f3c440ad 100644 --- a/environments/eeg-expy-full.yml +++ b/environments/eeg-expy-full.yml @@ -3,7 +3,10 @@ channels: - conda-forge dependencies: # System-level dependencies - - python>=3.8,<=3.10 # psychopy <= 3.10 + + # conda overrides current environment python version when not using --freeze-installed, and installs a random version of python... + # - python>=3.8,<=3.10 # psychopy <= 3.10 + - dukpy==0.2.3 # psychopy dependency, avoid failing due to building wheel on win 3.9. - pytables # install pytables for macOS arm64, so do not need to build from source. - rust # used by docsbuild diff --git a/environments/eeg-expy-stimpres.yml b/environments/eeg-expy-stimpres.yml index c704b04d..5cea820c 100644 --- a/environments/eeg-expy-stimpres.yml +++ b/environments/eeg-expy-stimpres.yml @@ -3,7 +3,10 @@ channels: - conda-forge dependencies: # System-level dependencies - - python>=3.8,<=3.10 # psychopy <= 3.10 + + # conda overrides current environment python version when not using --freeze-installed, and installs a random version of python... + #- python>=3.8,<=3.10 # psychopy <= 3.10 + - dukpy==0.2.3 # psychopy dependency, avoid failing due to building wheel on win 3.9. - wxpython>=4.0 # install wxpython to prevent error on macOS arm64: "site-packages/wx/_core.cpython-38-darwin.so, 0x0002): symbol not found in flat namespace '__ZN10wxBoxSizer20InformFirstDirectionEiii'" - pip diff --git a/environments/eeg-expy-streaming.yml b/environments/eeg-expy-streaming.yml index 8a8a751a..91513696 100644 --- a/environments/eeg-expy-streaming.yml +++ b/environments/eeg-expy-streaming.yml @@ -3,7 +3,10 @@ channels: - conda-forge dependencies: # System-level dependencies - - python>=3.8,<=3.13 + + # conda overrides current environment python version when not using --freeze-installed, and installs a random version of python... + #- python>=3.8,<=3.13 + - liblsl # install liblsl to prevent error on macOS and Ubuntu: "RuntimeError: LSL binary library file was not found." - pip - pip: diff --git a/environments/eeg-expy-streamstim.yml b/environments/eeg-expy-streamstim.yml index ec355171..8dd175da 100644 --- a/environments/eeg-expy-streamstim.yml +++ b/environments/eeg-expy-streamstim.yml @@ -4,7 +4,10 @@ channels: - defaults dependencies: # System-level dependencies - - python>=3.8,<=3.10 # psychopy <= 3.10 + + # conda overrides current environment python version when not using --freeze-installed, and installs a random version of python... + #- python>=3.8,<=3.10 # psychopy <= 3.10 + - dukpy==0.2.3 # psychopy dependency, avoid failing due to building wheel on win 3.9. - liblsl # install liblsl to prevent error on macOS and Ubuntu: "RuntimeError: LSL binary library file was not found." - wxpython>=4.0 # install wxpython to prevent error on macOS arm64: "site-packages/wx/_core.cpython-38-darwin.so, 0x0002): symbol not found in flat namespace '__ZN10wxBoxSizer20InformFirstDirectionEiii'" From c1ac2fdebca692d08deb6b6238e06a2f11c0ee27 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 17 Aug 2025 11:01:16 -0400 Subject: [PATCH 13/39] fixed numpy pin --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 259611f1..e7f200b5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ scikit-learn>=0.23.2 pandas>=1.1.4 # psychxr build pinned to this version of numpy. -numpy<1.26; python_version >= "3.9" +numpy>=1.26,<1.27; python_version >= "3.9" numpy<=1.24.4; python_version == "3.8" mne>=0.20.8 seaborn>=0.11.0 From 669a24e2b11fac83d0b79b7effb9a147b8d0aa63 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 17 Aug 2025 11:10:25 -0400 Subject: [PATCH 14/39] remove the name field so it doesnt hardcode the environment name --- environments/eeg-expy-docsbuild.yml | 1 - environments/eeg-expy-full.yml | 1 - environments/eeg-expy-stimpres.yml | 1 - environments/eeg-expy-streaming.yml | 1 - environments/eeg-expy-streamstim.yml | 1 - 5 files changed, 5 deletions(-) diff --git a/environments/eeg-expy-docsbuild.yml b/environments/eeg-expy-docsbuild.yml index 6a5dada1..e001934e 100644 --- a/environments/eeg-expy-docsbuild.yml +++ b/environments/eeg-expy-docsbuild.yml @@ -1,4 +1,3 @@ -name: eeg-expy-docsbuild channels: - conda-forge dependencies: diff --git a/environments/eeg-expy-full.yml b/environments/eeg-expy-full.yml index f3c440ad..90040b2c 100644 --- a/environments/eeg-expy-full.yml +++ b/environments/eeg-expy-full.yml @@ -1,4 +1,3 @@ -name: eeg-expy-full channels: - conda-forge dependencies: diff --git a/environments/eeg-expy-stimpres.yml b/environments/eeg-expy-stimpres.yml index 5cea820c..d89da65b 100644 --- a/environments/eeg-expy-stimpres.yml +++ b/environments/eeg-expy-stimpres.yml @@ -1,4 +1,3 @@ -name: eeg-expy-stimpres channels: - conda-forge dependencies: diff --git a/environments/eeg-expy-streaming.yml b/environments/eeg-expy-streaming.yml index 91513696..129370bb 100644 --- a/environments/eeg-expy-streaming.yml +++ b/environments/eeg-expy-streaming.yml @@ -1,4 +1,3 @@ -name: eeg-expy-streaming channels: - conda-forge dependencies: diff --git a/environments/eeg-expy-streamstim.yml b/environments/eeg-expy-streamstim.yml index 8dd175da..d65eb0c9 100644 --- a/environments/eeg-expy-streamstim.yml +++ b/environments/eeg-expy-streamstim.yml @@ -1,4 +1,3 @@ -name: eeg-expy-streamstim channels: - conda-forge - defaults From 5dea45d86639d61de797bf06c7411ee9d91f5d76 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 17 Aug 2025 12:29:02 -0400 Subject: [PATCH 15/39] try individual eyes --- eegnb/experiments/Experiment.py | 4 ++-- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index ab602158..f9d7c578 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -30,7 +30,7 @@ class BaseExperiment(ABC): def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, soa: float, jitter: float, - use_vr=False, use_fullscr = True): + use_vr=False, use_fullscr = True, rift: Optional[Rift] = None): """ Initializer for the Base Experiment Class Args: @@ -59,7 +59,7 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, self.use_fullscr = use_fullscr self.window_size = [1600,800] - self.rift: Optional[Rift] = None + self.rift = rift # Initializing the record duration and the marker names self.record_duration = np.float32(self.duration) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 72a1e1d9..05aa36dd 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -15,7 +15,7 @@ def __init__(self, eeg: Optional[EEG] = None, save_fn=None, block_duration_seconds=50, block_trial_size: int=100, n_blocks: int=4, iti=0, soa=0.5, jitter=0, use_vr=False, use_fullscr=True): - super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr) + super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr, rift = visual.Rift(monoscopic=False, headLocked=True)) self.instruction_text = f"""Welcome to the Visual Pattern Reversal VEP experiment! @@ -37,7 +37,6 @@ def __init__(self, eeg: Optional[EEG] = None, save_fn=None, self.parameter = np.array(block_eyes) self.trials = DataFrame(dict(parameter=self.parameter)) - @staticmethod def create_monitor_checkerboard(intensity_checks): # Standard parameters for monitor-based pattern reversal VEP @@ -117,6 +116,7 @@ def present_stimulus(self, idx: int): # eye for presentation eye = 'left' if label == 0 else 'right' + self.window.setBuffer(eye) self.black_background.draw() From 1619e3d498f472c5b57c81c705525531815c122a Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 17 Aug 2025 17:06:43 -0400 Subject: [PATCH 16/39] try drawing instructions for both eyes --- .../visual_vep/pattern_reversal_vep.py | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 05aa36dd..982276e8 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -99,15 +99,17 @@ def create_checkerboard_stim(intensity_checks): return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] def present_block_instructions(self, current_block): - if current_block % 2 == 0: - instruction_text = "Close your right eye, then focus on the red dot with your left eye. Press spacebar or controller when ready." - else: - instruction_text = "Close your left eye, then focus on the red dot with your right eye. Press spacebar or controller when ready." - - text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) - text.draw() - self.fixation.draw() - self.window.flip() + for eye in ['left', 'right']: + self.window.setBuffer(eye) + if current_block % 2 == 0: + instruction_text = "Close your right eye, then focus on the red dot with your left eye. Press spacebar or controller when ready." + else: + instruction_text = "Close your left eye, then focus on the red dot with your right eye. Press spacebar or controller when ready." + + text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) + text.draw() + self.fixation.draw() + self.window.flip() def present_stimulus(self, idx: int): # Get the label of the trial @@ -132,5 +134,7 @@ def present_stimulus(self, idx: int): self.eeg.push_sample(marker=marker, timestamp=time()) def present_iti(self): - self.black_background.draw() - self.window.flip() + for eye in ['left', 'right']: + self.window.setBuffer(eye) + self.black_background.draw() + self.window.flip() From f698d67c56e52f006ccf4e03d21a5395d7eb2ad4 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 17 Aug 2025 17:48:10 -0400 Subject: [PATCH 17/39] draw block instructions correctly on monitor --- .../visual_vep/pattern_reversal_vep.py | 37 +++++++++++++------ 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 982276e8..a63836a1 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -98,18 +98,31 @@ def create_checkerboard_stim(intensity_checks): return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] - def present_block_instructions(self, current_block): - for eye in ['left', 'right']: - self.window.setBuffer(eye) - if current_block % 2 == 0: - instruction_text = "Close your right eye, then focus on the red dot with your left eye. Press spacebar or controller when ready." - else: - instruction_text = "Close your left eye, then focus on the red dot with your right eye. Press spacebar or controller when ready." - - text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) - text.draw() - self.fixation.draw() - self.window.flip() + def _draw_block_instruction(self, current_block: int) -> None: + if current_block % 2 == 0: + instruction_text = ( + "Close your right eye, then focus on the red dot with your left eye. " + "Press spacebar or controller when ready." + ) + else: + instruction_text = ( + "Close your left eye, then focus on the red dot with your right eye. " + "Press spacebar or controller when ready." + ) + + text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) + text.draw() + self.fixation.draw() + self.window.flip() + + def present_block_instructions(self, current_block: int) -> None: + if self.use_vr: + for eye in ["left", "right"]: + self.window.setBuffer(eye) + self._draw_block_instruction(current_block) + else: + self._draw_block_instruction(current_block) + def present_stimulus(self, idx: int): # Get the label of the trial From 63cdfe2aa6720d39890c3c405f9620560fd91a79 Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 17 Aug 2025 18:07:59 -0400 Subject: [PATCH 18/39] draw iti for monitor and vr eyes --- .../visual_vep/pattern_reversal_vep.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index a63836a1..68c6d373 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -123,7 +123,6 @@ def present_block_instructions(self, current_block: int) -> None: else: self._draw_block_instruction(current_block) - def present_stimulus(self, idx: int): # Get the label of the trial block_trial_offset = self.current_block_index*self.block_trial_size @@ -146,8 +145,14 @@ def present_stimulus(self, idx: int): marker = self.markernames[label] self.eeg.push_sample(marker=marker, timestamp=time()) + def _draw_iti(self) -> None: + self.black_background.draw() + self.window.flip() + def present_iti(self): - for eye in ['left', 'right']: - self.window.setBuffer(eye) - self.black_background.draw() - self.window.flip() + if self.use_vr: + for eye in ['left', 'right']: + self.window.setBuffer(eye) + self._draw_iti() + else: + self._draw_iti() From ba485ef911bbed16905966d8e02e2e203b27403e Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Sun, 17 Aug 2025 23:23:16 -0400 Subject: [PATCH 19/39] try simplifying instructions per block for vr --- .../experiments/visual_vep/pattern_reversal_vep.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 68c6d373..239f0d28 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -99,14 +99,16 @@ def create_checkerboard_stim(intensity_checks): return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] def _draw_block_instruction(self, current_block: int) -> None: - if current_block % 2 == 0: - instruction_text = ( - "Close your right eye, then focus on the red dot with your left eye. " - "Press spacebar or controller when ready." - ) + if self.use_vr: + instruction_text = "Press spacebar or controller when ready." + elif current_block % 2 == 0: + instruction_text = ( + "Close your right eye, then focus on the red dot with your left eye. " + "Press spacebar or controller when ready." + ) else: instruction_text = ( - "Close your left eye, then focus on the red dot with your right eye. " + "Close your right eye, then focus on the red dot with your left eye. " "Press spacebar or controller when ready." ) From 70febad378a609f5e7d50db912b9dbf9fd71c396 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Mon, 18 Aug 2025 12:08:13 -0400 Subject: [PATCH 20/39] added refresh frame rate check fixed class params added display and refresh rate to experiment name --- eegnb/experiments/BlockExperiment.py | 4 ++-- .../visual_vep/pattern_reversal_vep.py | 22 +++++++++++++++---- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/eegnb/experiments/BlockExperiment.py b/eegnb/experiments/BlockExperiment.py index cb739524..355027a6 100644 --- a/eegnb/experiments/BlockExperiment.py +++ b/eegnb/experiments/BlockExperiment.py @@ -22,7 +22,7 @@ class BlockExperiment(BaseExperiment, ABC): """ def __init__(self, exp_name, block_duration, eeg, save_fn, block_trial_size, n_blocks, iti: float, soa: float, jitter: float, - use_vr=False, use_fullscr=True): + use_vr=False, use_fullscr=True, rift=None): """ Initializer for the Block Experiment Class Args: @@ -43,7 +43,7 @@ def __init__(self, exp_name, block_duration, eeg, save_fn, block_trial_size, n_b # Initialize the base experiment with total trials # Pass None for duration if block_duration is None to ignore time spent in instructions - super().__init__(exp_name, block_duration, eeg, save_fn, total_trials, iti, soa, jitter, use_vr, use_fullscr) + super().__init__(exp_name, block_duration, eeg, save_fn, total_trials, iti, soa, jitter, use_vr, use_fullscr, rift=rift) # Store block-specific parameters self.block_duration = block_duration diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 239f0d28..9f28404a 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -11,11 +11,17 @@ class VisualPatternReversalVEP(BlockExperiment): - def __init__(self, eeg: Optional[EEG] = None, save_fn=None, - block_duration_seconds=50, block_trial_size: int=100, n_blocks: int=4, iti=0, soa=0.5, jitter=0, - use_vr=False, use_fullscr=True): + def __init__(self, display_refresh_rate: int, eeg: Optional[EEG] = None, save_fn=None, + block_duration_seconds=50, block_trial_size: int=100, n_blocks: int=4, use_vr=False, use_fullscr=True): - super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr, rift = visual.Rift(monoscopic=False, headLocked=True)) + self.display_refresh_rate = display_refresh_rate + soa=0.5 + iti=0 + jitter=0 + + rift = visual.Rift(monoscopic=False, headLocked=True) if use_vr else None + + super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr, rift=rift) self.instruction_text = f"""Welcome to the Visual Pattern Reversal VEP experiment! @@ -67,6 +73,14 @@ def create_vr_checkerboard(intensity_checks): ) def load_stimulus(self): + + # Frame rate, in Hz + # GetActualFrameRate() crashes in psychxr due to 'EndFrame called before BeginFrame' + actual_frame_rate = np.round(self.window.displayRefreshRate if self.use_vr else self.window.getActualFrameRate()) + # Ensure the expected frame rate matches and is divisable by the stimulus rate(soa) + assert actual_frame_rate % self.soa == 0 + assert self.display_refresh_rate == actual_frame_rate + if self.use_vr: # Create VR checkerboard create_checkerboard = self.create_vr_checkerboard From 342c68358d67200888a41c5bc17a1c8fac202377 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Mon, 18 Aug 2025 13:28:01 -0400 Subject: [PATCH 21/39] fixed display on monitor which was crashing draw black colour for closed eye on vr. --- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 9f28404a..7132798e 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -145,8 +145,11 @@ def present_stimulus(self, idx: int): label = self.trials["parameter"].iloc[idx+block_trial_offset] # eye for presentation - eye = 'left' if label == 0 else 'right' - self.window.setBuffer(eye) + open_eye = 'left' if label == 0 else 'right' + closed_eye = 'left' if label == 1 else 'right' + + if self.use_vr: + self.window.setBuffer(open_eye) self.black_background.draw() @@ -157,6 +160,11 @@ def present_stimulus(self, idx: int): self.fixation.draw() self.window.flip() + if self.use_vr: + self.window.setBuffer(closed_eye) + self.black_background.draw() + self.window.flip() + # Pushing the sample to the EEG marker = self.markernames[label] self.eeg.push_sample(marker=marker, timestamp=time()) From 37d3252177d9448095b4a3c5664bf0e16b23ccc0 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Mon, 18 Aug 2025 16:12:48 -0400 Subject: [PATCH 22/39] fixed iti presentation to not occur mid-experiment --- eegnb/experiments/Experiment.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index f9d7c578..d54c5f2e 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -7,6 +7,11 @@ obj = VisualP300({parameters}) obj.run() """ +import logging + +# Add this near the top of your file with other imports +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) from abc import abstractmethod, ABC from typing import Callable, Optional @@ -278,10 +283,9 @@ def iti_with_jitter(): # Calculate timing for this trial trial_start_time = elapsed_time + iti_with_jitter() trial_end_time = trial_start_time + self.soa - self._draw(lambda: self.present_iti()) # Do not present stimulus after trial has ended(stimulus on arrival interval). - elif elapsed_time > trial_start_time: + if elapsed_time >= trial_start_time: # if current trial number changed present new stimulus. if current_trial > rendering_trial: # Stimulus presentation overwritten by specific experiment @@ -289,6 +293,9 @@ def iti_with_jitter(): rendering_trial = current_trial else: self._draw(lambda: self.present_iti()) + # log 'present iti' with the elapsed time and trial end time + # Log the ITI presentation + # logger.info(f"Present ITI - Trial: {current_trial}, Elapsed Time: {elapsed_time:.3f}s, Trial End Time: {trial_end_time:.3f}s, Trial Start Time: {trial_start_time:.3f}s") if self._user_input('cancel'): return False From f63569b92eb88adf1e696edf97c01af2c4ee2922 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Mon, 18 Aug 2025 19:27:14 -0400 Subject: [PATCH 23/39] showing double vision --- eegnb/experiments/BlockExperiment.py | 4 +- eegnb/experiments/Experiment.py | 42 ++++++++++++------- .../visual_vep/pattern_reversal_vep.py | 19 +++------ 3 files changed, 36 insertions(+), 29 deletions(-) diff --git a/eegnb/experiments/BlockExperiment.py b/eegnb/experiments/BlockExperiment.py index 355027a6..6ff4d2a8 100644 --- a/eegnb/experiments/BlockExperiment.py +++ b/eegnb/experiments/BlockExperiment.py @@ -22,7 +22,7 @@ class BlockExperiment(BaseExperiment, ABC): """ def __init__(self, exp_name, block_duration, eeg, save_fn, block_trial_size, n_blocks, iti: float, soa: float, jitter: float, - use_vr=False, use_fullscr=True, rift=None): + use_vr=False, use_fullscr=True, stereoscopic=False): """ Initializer for the Block Experiment Class Args: @@ -43,7 +43,7 @@ def __init__(self, exp_name, block_duration, eeg, save_fn, block_trial_size, n_b # Initialize the base experiment with total trials # Pass None for duration if block_duration is None to ignore time spent in instructions - super().__init__(exp_name, block_duration, eeg, save_fn, total_trials, iti, soa, jitter, use_vr, use_fullscr, rift=rift) + super().__init__(exp_name, block_duration, eeg, save_fn, total_trials, iti, soa, jitter, use_vr, use_fullscr, stereoscopic) # Store block-specific parameters self.block_duration = block_duration diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index d54c5f2e..5ebdaefa 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -7,11 +7,6 @@ obj = VisualP300({parameters}) obj.run() """ -import logging - -# Add this near the top of your file with other imports -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -logger = logging.getLogger(__name__) from abc import abstractmethod, ABC from typing import Callable, Optional @@ -35,7 +30,7 @@ class BaseExperiment(ABC): def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, soa: float, jitter: float, - use_vr=False, use_fullscr = True, rift: Optional[Rift] = None): + use_vr=False, use_fullscr = True, stereoscopic = False): """ Initializer for the Base Experiment Class Args: @@ -61,10 +56,12 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, self.soa = soa self.jitter = jitter self.use_vr = use_vr + self.stereoscopic = stereoscopic + if use_vr: + # VR interface accessible by specific experiment classes for customizing and using controllers. + self.rift: Rift = visual.Rift(monoscopic=not stereoscopic, headLocked=True) self.use_fullscr = use_fullscr - self.window_size = [1600,800] - self.rift = rift # Initializing the record duration and the marker names self.record_duration = np.float32(self.duration) @@ -108,11 +105,6 @@ def present_iti(self): self.window.flip() def setup(self, instructions=True): - - if self.use_vr is True and self.rift is None: - # VR interface accessible by specific experiment classes for customizing and using controllers. - self.rift: Rift = visual.Rift(monoscopic=True, headLocked=True) - # Setting up Graphics self.window = ( self.rift if self.use_vr @@ -223,7 +215,29 @@ def get_vr_input(self, vr_controller, button=None, trigger=False): return False def __draw_instructions(self, text): - text.draw() + print(f"Window type: {type(self.window)}") + print(f"Window size: {self.window.size}") + print(f"use_vr: {self.use_vr}") + print(f"rift object: {self.rift}") + + if self.use_vr and self.stereoscopic: + for eye in ["left", "right"]: + self.window.setBuffer(eye) + text.draw() + + # Draw different shapes to each eye + # rift = self.rift + # left_rect = visual.Rect(rift, pos=(-2, 0), fillColor='red') + # right_rect = visual.Rect(rift, pos=(2, 0), fillColor='blue') + # + # rift.setBuffer("left") + # left_rect.draw() + # + # rift.setBuffer("right") + # right_rect.draw() + + else: + text.draw() self.window.flip() def _draw(self, present_stimulus: Callable): diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 7132798e..c0f744fd 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -19,9 +19,7 @@ def __init__(self, display_refresh_rate: int, eeg: Optional[EEG] = None, save_fn iti=0 jitter=0 - rift = visual.Rift(monoscopic=False, headLocked=True) if use_vr else None - - super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr, rift=rift) + super().__init__("Visual Pattern Reversal VEP", block_duration_seconds, eeg, save_fn, block_trial_size, n_blocks, iti, soa, jitter, use_vr, use_fullscr, stereoscopic=True) self.instruction_text = f"""Welcome to the Visual Pattern Reversal VEP experiment! @@ -129,7 +127,6 @@ def _draw_block_instruction(self, current_block: int) -> None: text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) text.draw() self.fixation.draw() - self.window.flip() def present_block_instructions(self, current_block: int) -> None: if self.use_vr: @@ -138,6 +135,7 @@ def present_block_instructions(self, current_block: int) -> None: self._draw_block_instruction(current_block) else: self._draw_block_instruction(current_block) + self.window.flip() def present_stimulus(self, idx: int): # Get the label of the trial @@ -158,25 +156,20 @@ def present_stimulus(self, idx: int): image = self.stim[checkerboard_frame] image.draw() self.fixation.draw() - self.window.flip() if self.use_vr: self.window.setBuffer(closed_eye) self.black_background.draw() - self.window.flip() + + self.window.flip() # Pushing the sample to the EEG marker = self.markernames[label] self.eeg.push_sample(marker=marker, timestamp=time()) - def _draw_iti(self) -> None: - self.black_background.draw() - self.window.flip() - def present_iti(self): if self.use_vr: for eye in ['left', 'right']: self.window.setBuffer(eye) - self._draw_iti() - else: - self._draw_iti() + self.black_background.draw() + self.window.flip() From fe79d1103d471a7f34005a15833a623eb947c412 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Mon, 18 Aug 2025 21:45:14 -0400 Subject: [PATCH 24/39] fixed stereoscopic positioning --- eegnb/experiments/Experiment.py | 20 ++--------------- .../visual_vep/pattern_reversal_vep.py | 22 ++++++++++++------- 2 files changed, 16 insertions(+), 26 deletions(-) diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index 5ebdaefa..77a9790b 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -215,27 +215,11 @@ def get_vr_input(self, vr_controller, button=None, trigger=False): return False def __draw_instructions(self, text): - print(f"Window type: {type(self.window)}") - print(f"Window size: {self.window.size}") - print(f"use_vr: {self.use_vr}") - print(f"rift object: {self.rift}") - if self.use_vr and self.stereoscopic: - for eye in ["left", "right"]: + for eye, x_pos in [("left", 0.1), ("right", -0.1)]: self.window.setBuffer(eye) + text.pos = (x_pos, 0) text.draw() - - # Draw different shapes to each eye - # rift = self.rift - # left_rect = visual.Rect(rift, pos=(-2, 0), fillColor='red') - # right_rect = visual.Rect(rift, pos=(2, 0), fillColor='blue') - # - # rift.setBuffer("left") - # left_rect.draw() - # - # rift.setBuffer("right") - # right_rect.draw() - else: text.draw() self.window.flip() diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index c0f744fd..e2335a11 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -8,6 +8,7 @@ from eegnb.experiments.BlockExperiment import BlockExperiment from stimupy.stimuli.checkerboards import contrast_contrast +QUEST_PPD = 20 class VisualPatternReversalVEP(BlockExperiment): @@ -62,7 +63,7 @@ def create_vr_checkerboard(intensity_checks): # Using standard 1 degree check size (0.5 cpd) return contrast_contrast( visual_size=(20, 20), # size in degrees - covers a good portion of the FOV - ppd=20, # pixels per degree for Quest 2 + ppd=QUEST_PPD, # pixels per degree for Quest 2 frequency=(0.5, 0.5), # spatial frequency (0.5 cpd = 1 degree check size) intensity_checks=intensity_checks, target_shape=(0, 0), @@ -110,7 +111,7 @@ def create_checkerboard_stim(intensity_checks): return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] - def _draw_block_instruction(self, current_block: int) -> None: + def _draw_block_instruction(self, current_block: int, x_pos: float) -> None: if self.use_vr: instruction_text = "Press spacebar or controller when ready." elif current_block % 2 == 0: @@ -124,15 +125,16 @@ def _draw_block_instruction(self, current_block: int) -> None: "Press spacebar or controller when ready." ) - text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) + text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1], pos=(x_pos, 0)) text.draw() + self.fixation.pos = (x_pos, 0) self.fixation.draw() def present_block_instructions(self, current_block: int) -> None: if self.use_vr: - for eye in ["left", "right"]: + for eye, x_pos in [("left", 0.1), ("right", -0.1)]: self.window.setBuffer(eye) - self._draw_block_instruction(current_block) + self._draw_block_instruction(current_block, x_pos) else: self._draw_block_instruction(current_block) self.window.flip() @@ -143,8 +145,8 @@ def present_stimulus(self, idx: int): label = self.trials["parameter"].iloc[idx+block_trial_offset] # eye for presentation - open_eye = 'left' if label == 0 else 'right' - closed_eye = 'left' if label == 1 else 'right' + open_eye, open_x = ('left', 0.1) if label == 0 else ('right', -0.1) + closed_eye, closed_x = ('left', 0.1) if label == 1 else ('right', -0.1) if self.use_vr: self.window.setBuffer(open_eye) @@ -154,13 +156,17 @@ def present_stimulus(self, idx: int): # draw checkerboard checkerboard_frame = idx % 2 image = self.stim[checkerboard_frame] + + window_width = self.window.size[0] + open_x_pix = open_x * (window_width / 2) # Convert norm to pixels + image.pos = (open_x_pix, 0) image.draw() + self.fixation.pos = (open_x, 0) self.fixation.draw() if self.use_vr: self.window.setBuffer(closed_eye) self.black_background.draw() - self.window.flip() # Pushing the sample to the EEG From f8cc28d41da3a2120fbf875af15ef5ab3be5807f Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Tue, 19 Aug 2025 10:35:25 -0400 Subject: [PATCH 25/39] fixed monitor positioning --- eegnb/experiments/Experiment.py | 10 +++++++- .../visual_vep/pattern_reversal_vep.py | 23 +++++++++---------- 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index 77a9790b..0ed69987 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -60,6 +60,14 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, if use_vr: # VR interface accessible by specific experiment classes for customizing and using controllers. self.rift: Rift = visual.Rift(monoscopic=not stereoscopic, headLocked=True) + # eye for presentation + if use_vr and stereoscopic: + self.left_eye_x_pos = 0.1 + self.right_eye_x_pos = -0.1 + else: + self.left_eye_x_pos = 0 + self.right_eye_x_pos = 0 + self.use_fullscr = use_fullscr self.window_size = [1600,800] @@ -216,7 +224,7 @@ def get_vr_input(self, vr_controller, button=None, trigger=False): def __draw_instructions(self, text): if self.use_vr and self.stereoscopic: - for eye, x_pos in [("left", 0.1), ("right", -0.1)]: + for eye, x_pos in [("left", self.left_eye_x_pos), ("right", self.right_eye_x_pos)]: self.window.setBuffer(eye) text.pos = (x_pos, 0) text.draw() diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index e2335a11..1ecd4e1c 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -72,13 +72,13 @@ def create_vr_checkerboard(intensity_checks): ) def load_stimulus(self): - # Frame rate, in Hz # GetActualFrameRate() crashes in psychxr due to 'EndFrame called before BeginFrame' actual_frame_rate = np.round(self.window.displayRefreshRate if self.use_vr else self.window.getActualFrameRate()) # Ensure the expected frame rate matches and is divisable by the stimulus rate(soa) - assert actual_frame_rate % self.soa == 0 - assert self.display_refresh_rate == actual_frame_rate + assert actual_frame_rate % self.soa == 0, f"Expected frame rate divisable by stimulus rate: {self.soa}, but got {actual_frame_rate} Hz" + assert self.display_refresh_rate == actual_frame_rate, f"Expected frame rate {self.display_refresh_rate} Hz, but got {actual_frame_rate} Hz" + if self.use_vr: # Create VR checkerboard @@ -132,11 +132,11 @@ def _draw_block_instruction(self, current_block: int, x_pos: float) -> None: def present_block_instructions(self, current_block: int) -> None: if self.use_vr: - for eye, x_pos in [("left", 0.1), ("right", -0.1)]: + for eye, x_pos in [("left", self.left_eye_x_pos), ("right", self.right_eye_x_pos)]: self.window.setBuffer(eye) self._draw_block_instruction(current_block, x_pos) else: - self._draw_block_instruction(current_block) + self._draw_block_instruction(current_block, x_pos=0) self.window.flip() def present_stimulus(self, idx: int): @@ -144,9 +144,8 @@ def present_stimulus(self, idx: int): block_trial_offset = self.current_block_index*self.block_trial_size label = self.trials["parameter"].iloc[idx+block_trial_offset] - # eye for presentation - open_eye, open_x = ('left', 0.1) if label == 0 else ('right', -0.1) - closed_eye, closed_x = ('left', 0.1) if label == 1 else ('right', -0.1) + open_eye, open_x = ('left', self.left_eye_x_pos) if label == 0 else ('right', self.right_eye_x_pos) + closed_eye, closed_x = ('left', self.left_eye_x_pos) if label == 1 else ('right', self.right_eye_x_pos) if self.use_vr: self.window.setBuffer(open_eye) @@ -156,10 +155,10 @@ def present_stimulus(self, idx: int): # draw checkerboard checkerboard_frame = idx % 2 image = self.stim[checkerboard_frame] - - window_width = self.window.size[0] - open_x_pix = open_x * (window_width / 2) # Convert norm to pixels - image.pos = (open_x_pix, 0) + if self.stereoscopic: + window_width = self.window.size[0] + open_pix_x_pos = open_x * (window_width / 2) # Convert norm to pixels + image.pos = (open_pix_x_pos, 0) image.draw() self.fixation.pos = (open_x, 0) self.fixation.draw() From 647be894a36b2129b8dd2fc97e4777189c38bbbe Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Tue, 19 Aug 2025 11:59:47 -0400 Subject: [PATCH 26/39] fixed instructions --- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 1ecd4e1c..9963282b 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -121,7 +121,7 @@ def _draw_block_instruction(self, current_block: int, x_pos: float) -> None: ) else: instruction_text = ( - "Close your right eye, then focus on the red dot with your left eye. " + "Close your left eye, then focus on the red dot with your right eye. " "Press spacebar or controller when ready." ) From 2915061414a2e9ddf85e38a37f322e52c235a5b2 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Tue, 19 Aug 2025 20:55:02 -0400 Subject: [PATCH 27/39] try drawing block instructions to single eye --- .../visual_vep/pattern_reversal_vep.py | 43 ++++++++++--------- 1 file changed, 23 insertions(+), 20 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 9963282b..de43b42a 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -111,32 +111,35 @@ def create_checkerboard_stim(intensity_checks): return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] - def _draw_block_instruction(self, current_block: int, x_pos: float) -> None: - if self.use_vr: - instruction_text = "Press spacebar or controller when ready." - elif current_block % 2 == 0: - instruction_text = ( - "Close your right eye, then focus on the red dot with your left eye. " - "Press spacebar or controller when ready." - ) - else: - instruction_text = ( - "Close your left eye, then focus on the red dot with your right eye. " - "Press spacebar or controller when ready." - ) - - text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1], pos=(x_pos, 0)) + def _present_block_instructions(self, open_eye, closed_eye, open_x): + self.window.setBuffer(open_eye) + text = visual.TextStim(win=self.window, text="Press spacebar or controller when ready.", color=[-1, -1, -1], pos=(open_x, 0)) text.draw() - self.fixation.pos = (x_pos, 0) + self.fixation.pos = (open_x, 0) self.fixation.draw() + self.window.setBuffer(closed_eye) + self.black_background.draw() def present_block_instructions(self, current_block: int) -> None: if self.use_vr: - for eye, x_pos in [("left", self.left_eye_x_pos), ("right", self.right_eye_x_pos)]: - self.window.setBuffer(eye) - self._draw_block_instruction(current_block, x_pos) + if current_block % 2 == 0: + self._present_block_instructions(open_eye="left", closed_eye="right", open_x=self.left_eye_x_pos) + else: + self._present_block_instructions(open_eye="right", closed_eye="left", open_x=self.right_eye_x_pos) else: - self._draw_block_instruction(current_block, x_pos=0) + if current_block % 2 == 0: + instruction_text = ( + "Close your right eye, then focus on the red dot with your left eye. " + "Press spacebar or controller when ready." + ) + else: + instruction_text = ( + "Close your left eye, then focus on the red dot with your right eye. " + "Press spacebar or controller when ready." + ) + text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) + text.draw() + self.fixation.draw() self.window.flip() def present_stimulus(self, idx: int): From 1fede30353e530850ae14d89a493f695018530dd Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Tue, 19 Aug 2025 21:54:44 -0400 Subject: [PATCH 28/39] fix for globbing multiple sessions/subjects --- eegnb/__init__.py | 3 ++- eegnb/analysis/utils.py | 3 +-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eegnb/__init__.py b/eegnb/__init__.py index 02ebd828..01bab5ee 100644 --- a/eegnb/__init__.py +++ b/eegnb/__init__.py @@ -36,7 +36,8 @@ def _get_recording_dir( ) # check if directory exists, if not, make the directory - if not path.exists(recording_dir): + # Skip directory creation if wildcards are present (for pattern matching) + if not any('*' in str(part) for part in [subject_str, session_str]) and not path.exists(recording_dir): makedirs(recording_dir) return recording_dir diff --git a/eegnb/analysis/utils.py b/eegnb/analysis/utils.py index d9450981..ef0cebf3 100644 --- a/eegnb/analysis/utils.py +++ b/eegnb/analysis/utils.py @@ -175,10 +175,9 @@ def load_data( """ subject_int = int(subject) - session_int = int(session) subject_str = "*" if subject == "all" else f"subject{subject_int:04}" - session_str = "*" if session == "all" else f"session{session_int:03}" + session_str = "*" if session == "all" else f"session{int(session):03}" recdir = _get_recording_dir(device_name, experiment, subject_str, session_str, site, data_dir) data_path = os.path.join(data_dir, recdir, "*.csv") From 755be8c2a66538bdc4ee561dd263d4e89597e630 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Tue, 19 Aug 2025 22:54:19 -0400 Subject: [PATCH 29/39] try using consistent luminance in headset --- .../visual_vep/pattern_reversal_vep.py | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index de43b42a..d0430482 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -93,12 +93,19 @@ def load_stimulus(self): else: size = (self.window_size[1], self.window_size[1]) - # the surrounding / periphery needs to be dark + # the surrounding / periphery needs to be dark when not vr. self.black_background = visual.Rect(self.window, width=self.window.size[0], height=self.window.size[1], fillColor='black') + # a grey background must be used in vr to maintain luminence. + self.grey_background = visual.Rect(self.window, + width=self.window.size[0], + height=self.window.size[1], + fillColor=[-0.22, -0.22, -0.22]) + + # fixation grating_sf = 400 if self.use_vr else 0.2 self.fixation = visual.GratingStim(win=self.window, pos=[0, 0], sf=grating_sf, color=[1, 0, 0]) @@ -111,21 +118,21 @@ def create_checkerboard_stim(intensity_checks): return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] - def _present_block_instructions(self, open_eye, closed_eye, open_x): + def _present_vr_block_instructions(self, open_eye, closed_eye, open_x): self.window.setBuffer(open_eye) text = visual.TextStim(win=self.window, text="Press spacebar or controller when ready.", color=[-1, -1, -1], pos=(open_x, 0)) text.draw() self.fixation.pos = (open_x, 0) self.fixation.draw() self.window.setBuffer(closed_eye) - self.black_background.draw() + self.grey_background.draw() def present_block_instructions(self, current_block: int) -> None: if self.use_vr: if current_block % 2 == 0: - self._present_block_instructions(open_eye="left", closed_eye="right", open_x=self.left_eye_x_pos) + self._present_vr_block_instructions(open_eye="left", closed_eye="right", open_x=self.left_eye_x_pos) else: - self._present_block_instructions(open_eye="right", closed_eye="left", open_x=self.right_eye_x_pos) + self._present_vr_block_instructions(open_eye="right", closed_eye="left", open_x=self.right_eye_x_pos) else: if current_block % 2 == 0: instruction_text = ( @@ -152,8 +159,9 @@ def present_stimulus(self, idx: int): if self.use_vr: self.window.setBuffer(open_eye) - - self.black_background.draw() + self.grey_background.draw() + else: + self.black_background.draw() # draw checkerboard checkerboard_frame = idx % 2 From 78f965dc739781ad7a0dcf1087c2123d75e78c5f Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Wed, 20 Aug 2025 08:58:23 -0400 Subject: [PATCH 30/39] use black background for other eye during instructions --- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index d0430482..b4abdc7c 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -125,7 +125,7 @@ def _present_vr_block_instructions(self, open_eye, closed_eye, open_x): self.fixation.pos = (open_x, 0) self.fixation.draw() self.window.setBuffer(closed_eye) - self.grey_background.draw() + self.black_background.draw() def present_block_instructions(self, current_block: int) -> None: if self.use_vr: From 2e8522c0a250c0f71bbe7072a6b4aa51274bca9c Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Thu, 28 Aug 2025 20:09:01 +1000 Subject: [PATCH 31/39] refactored to improve performance --- .../visual_vep/pattern_reversal_vep.py | 106 +++++++++++------- 1 file changed, 66 insertions(+), 40 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index b4abdc7c..b308ca7a 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -3,7 +3,7 @@ from pandas import DataFrame from psychopy import visual -from typing import Optional +from typing import Optional, Dict, Any from eegnb.devices.eeg import EEG from eegnb.experiments.BlockExperiment import BlockExperiment from stimupy.stimuli.checkerboards import contrast_contrast @@ -71,7 +71,7 @@ def create_vr_checkerboard(intensity_checks): tau=0 ) - def load_stimulus(self): + def load_stimulus(self) -> Dict[str, Any]: # Frame rate, in Hz # GetActualFrameRate() crashes in psychxr due to 'EndFrame called before BeginFrame' actual_frame_rate = np.round(self.window.displayRefreshRate if self.use_vr else self.window.getActualFrameRate()) @@ -79,60 +79,90 @@ def load_stimulus(self): assert actual_frame_rate % self.soa == 0, f"Expected frame rate divisable by stimulus rate: {self.soa}, but got {actual_frame_rate} Hz" assert self.display_refresh_rate == actual_frame_rate, f"Expected frame rate {self.display_refresh_rate} Hz, but got {actual_frame_rate} Hz" - if self.use_vr: - # Create VR checkerboard + # Create the VR checkerboard create_checkerboard = self.create_vr_checkerboard - else: - # Create Monitor checkerboard - create_checkerboard = self.create_monitor_checkerboard - - if self.use_vr: # the window is large over the eye, checkerboard should only cover the central vision size = self.window.size / 1.5 else: + # Create the Monitor checkerboard + create_checkerboard = self.create_monitor_checkerboard size = (self.window_size[1], self.window_size[1]) - # the surrounding / periphery needs to be dark when not vr. + # The surrounding / periphery needs to be dark when not using vr. + # Also used for covering eye which is not being stimulated. self.black_background = visual.Rect(self.window, width=self.window.size[0], height=self.window.size[1], fillColor='black') - # a grey background must be used in vr to maintain luminence. + # A grey background behind the checkerboard must be used in vr to maintain luminence. self.grey_background = visual.Rect(self.window, width=self.window.size[0], height=self.window.size[1], fillColor=[-0.22, -0.22, -0.22]) - - # fixation - grating_sf = 400 if self.use_vr else 0.2 - self.fixation = visual.GratingStim(win=self.window, pos=[0, 0], sf=grating_sf, color=[1, 0, 0]) - self.fixation.size = 0.02 if self.use_vr else 0.4 - - def create_checkerboard_stim(intensity_checks): + # Create checkerboard stimuli + def create_checkerboard_stim(intensity_checks, pos): return visual.ImageStim(self.window, image=create_checkerboard(intensity_checks)['img'], - units='pix', size=size, color='white') + units='pix', size=size, color='white', pos=pos) + + # Create fixation stimuli + def create_fixation_stim(pos): + fixation = visual.GratingStim( + win=self.window, + pos=pos, + sf=400 if self.use_vr else 0.2, + color=[1, 0, 0] + ) + fixation.size = 0.02 if self.use_vr else 0.4 + return fixation + + # Create VR block instruction stimuli + def create_vr_block_instruction(pos): + return visual.TextStim(win=self.window, text="Press spacebar or controller when ready.", color=[-1, -1, -1], pos=pos) + + # Create and position stimulus + def create_eye_stimuli(eye_x_pos, pix_x_pos): + return { + 'checkerboards': [ + create_checkerboard_stim((1, -1), pos=(pix_x_pos, 0)), + create_checkerboard_stim((-1, 1), pos=(pix_x_pos, 0)) + ], + 'fixation': create_fixation_stim([eye_x_pos, 0]), + 'vr_block_instructions': create_vr_block_instruction((eye_x_pos, 0)) + } + + # Structure all stimuli in organized dictionary + if self.use_vr: + # Calculate pixel positions for stereoscopic presentation + window_width = self.window.size[0] + left_pix_x_pos = self.left_eye_x_pos * (window_width / 2) + right_pix_x_pos = self.right_eye_x_pos * (window_width / 2) - return [create_checkerboard_stim((1, -1)), create_checkerboard_stim((-1, 1))] + return { + 'left': create_eye_stimuli(self.left_eye_x_pos, left_pix_x_pos), + 'right': create_eye_stimuli(self.right_eye_x_pos, right_pix_x_pos) + } + else: + return { + 'monoscopic': create_eye_stimuli(0, 0) + } - def _present_vr_block_instructions(self, open_eye, closed_eye, open_x): + def _present_vr_block_instructions(self, open_eye, closed_eye): self.window.setBuffer(open_eye) - text = visual.TextStim(win=self.window, text="Press spacebar or controller when ready.", color=[-1, -1, -1], pos=(open_x, 0)) - text.draw() - self.fixation.pos = (open_x, 0) - self.fixation.draw() + self.stim[open_eye]['vr_block_instructions'].draw() + self.stim[open_eye]['fixation'].draw() self.window.setBuffer(closed_eye) self.black_background.draw() def present_block_instructions(self, current_block: int) -> None: if self.use_vr: if current_block % 2 == 0: - self._present_vr_block_instructions(open_eye="left", closed_eye="right", open_x=self.left_eye_x_pos) + self._present_vr_block_instructions(open_eye="left", closed_eye="right") else: - self._present_vr_block_instructions(open_eye="right", closed_eye="left", open_x=self.right_eye_x_pos) + self._present_vr_block_instructions(open_eye="right", closed_eye="left") else: if current_block % 2 == 0: instruction_text = ( @@ -146,7 +176,7 @@ def present_block_instructions(self, current_block: int) -> None: ) text = visual.TextStim(win=self.window, text=instruction_text, color=[-1, -1, -1]) text.draw() - self.fixation.draw() + self.stim['monoscopic']['fixation'].draw() self.window.flip() def present_stimulus(self, idx: int): @@ -154,25 +184,21 @@ def present_stimulus(self, idx: int): block_trial_offset = self.current_block_index*self.block_trial_size label = self.trials["parameter"].iloc[idx+block_trial_offset] - open_eye, open_x = ('left', self.left_eye_x_pos) if label == 0 else ('right', self.right_eye_x_pos) - closed_eye, closed_x = ('left', self.left_eye_x_pos) if label == 1 else ('right', self.right_eye_x_pos) + open_eye = 'left' if label == 0 else 'right' + closed_eye = 'left' if label == 1 else 'right' + # draw checkerboard and fixation if self.use_vr: self.window.setBuffer(open_eye) self.grey_background.draw() + display_key = 'left' if label == 0 else 'right' else: self.black_background.draw() - - # draw checkerboard + display_key = 'monoscopic' + checkerboard_frame = idx % 2 - image = self.stim[checkerboard_frame] - if self.stereoscopic: - window_width = self.window.size[0] - open_pix_x_pos = open_x * (window_width / 2) # Convert norm to pixels - image.pos = (open_pix_x_pos, 0) - image.draw() - self.fixation.pos = (open_x, 0) - self.fixation.draw() + self.stim[display_key]['checkerboards'][checkerboard_frame].draw() + self.stim[display_key]['fixation'].draw() if self.use_vr: self.window.setBuffer(closed_eye) From 19349a96e860988b16054e64cb315238439c571b Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Thu, 28 Aug 2025 20:11:22 +1000 Subject: [PATCH 32/39] allow early exit from instructions --- eegnb/experiments/BlockExperiment.py | 3 ++- eegnb/experiments/Experiment.py | 7 ++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/eegnb/experiments/BlockExperiment.py b/eegnb/experiments/BlockExperiment.py index 6ff4d2a8..4eeb1d69 100644 --- a/eegnb/experiments/BlockExperiment.py +++ b/eegnb/experiments/BlockExperiment.py @@ -111,7 +111,8 @@ def run(self, instructions=True): instructions (bool): Whether to show the initial experiment instructions """ # Setup the experiment (creates window, loads stimulus once) - self.setup(instructions) + if not self.setup(instructions): + return False # Start EEG Stream once for all blocks if self.eeg: diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index 0ed69987..d702ec88 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -123,7 +123,7 @@ def setup(self, instructions=True): # Show Instruction Screen if not skipped by the user if instructions: - self.show_instructions() + return self.show_instructions() # Checking for EEG to setup the EEG stream if self.eeg: @@ -138,6 +138,7 @@ def setup(self, instructions=True): print( f"No path for a save file was passed to the experiment. Saving data to {self.save_fn}" ) + return True def show_instructions(self): """ @@ -166,6 +167,10 @@ def show_instructions(self): # Enabling the cursor again self.window.mouseVisible = True + if self._user_input('cancel'): + return False + return True + def _user_input(self, input_type): if input_type == 'start': key_input = 'spacebar' From 74a587cbf541cd093aeaa60e6077d7f1ba12d76a Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Thu, 28 Aug 2025 20:14:44 +1000 Subject: [PATCH 33/39] clean up --- eegnb/experiments/Experiment.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index d702ec88..75513c94 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -9,7 +9,7 @@ """ from abc import abstractmethod, ABC -from typing import Callable, Optional +from typing import Callable from eegnb.devices.eeg import EEG from psychopy import prefs from psychopy.visual.rift import Rift @@ -304,14 +304,10 @@ def iti_with_jitter(): rendering_trial = current_trial else: self._draw(lambda: self.present_iti()) - # log 'present iti' with the elapsed time and trial end time - # Log the ITI presentation - # logger.info(f"Present ITI - Trial: {current_trial}, Elapsed Time: {elapsed_time:.3f}s, Trial End Time: {trial_end_time:.3f}s, Trial Start Time: {trial_start_time:.3f}s") if self._user_input('cancel'): return False - - # Return the number of trials that were run + return True def run(self, instructions=True): From 7e085c7b5907c1807cad31bd88835bc2dd6a4445 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Thu, 28 Aug 2025 20:25:45 +1000 Subject: [PATCH 34/39] made more performant, no loading animation now --- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index b308ca7a..5ba1e5cb 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -40,7 +40,6 @@ def __init__(self, display_refresh_rate: int, eeg: Optional[EEG] = None, save_fn eye = left_eye if block_num % 2 == 0 else right_eye block_eyes.extend([eye] * block_trial_size) self.parameter = np.array(block_eyes) - self.trials = DataFrame(dict(parameter=self.parameter)) @staticmethod def create_monitor_checkerboard(intensity_checks): @@ -181,8 +180,8 @@ def present_block_instructions(self, current_block: int) -> None: def present_stimulus(self, idx: int): # Get the label of the trial - block_trial_offset = self.current_block_index*self.block_trial_size - label = self.trials["parameter"].iloc[idx+block_trial_offset] + trial_idx = self.current_block_index * self.block_trial_size + idx + label = self.parameter[trial_idx] open_eye = 'left' if label == 0 else 'right' closed_eye = 'left' if label == 1 else 'right' From 3b2fedc66dacad34f90c372580c726358755440c Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Thu, 28 Aug 2025 20:29:56 +1000 Subject: [PATCH 35/39] optimize again --- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 5ba1e5cb..7605f05f 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -1,6 +1,5 @@ from time import time import numpy as np -from pandas import DataFrame from psychopy import visual from typing import Optional, Dict, Any @@ -190,14 +189,14 @@ def present_stimulus(self, idx: int): if self.use_vr: self.window.setBuffer(open_eye) self.grey_background.draw() - display_key = 'left' if label == 0 else 'right' + display = self.stim['left' if label == 0 else 'right'] else: self.black_background.draw() - display_key = 'monoscopic' + display = self.stim['monoscopic'] checkerboard_frame = idx % 2 - self.stim[display_key]['checkerboards'][checkerboard_frame].draw() - self.stim[display_key]['fixation'].draw() + display['checkerboards'][checkerboard_frame].draw() + display['fixation'].draw() if self.use_vr: self.window.setBuffer(closed_eye) From 464f3a5dc4e9db2e9f09c205f47050ecc5135c12 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Sun, 31 Aug 2025 18:35:02 +1000 Subject: [PATCH 36/39] improved focus --- eegnb/experiments/Experiment.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index 75513c94..726ea604 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -61,9 +61,9 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, # VR interface accessible by specific experiment classes for customizing and using controllers. self.rift: Rift = visual.Rift(monoscopic=not stereoscopic, headLocked=True) # eye for presentation - if use_vr and stereoscopic: - self.left_eye_x_pos = 0.1 - self.right_eye_x_pos = -0.1 + if stereoscopic: + self.left_eye_x_pos = 0.2 + self.right_eye_x_pos = -0.2 else: self.left_eye_x_pos = 0 self.right_eye_x_pos = 0 From 5227ced2b7b8e7e74ac8f6b3596195aa72a1ce26 Mon Sep 17 00:00:00 2001 From: Benjamin Pettit Date: Sun, 31 Aug 2025 18:35:22 +1000 Subject: [PATCH 37/39] fixed instructions --- eegnb/experiments/visual_vep/pattern_reversal_vep.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/eegnb/experiments/visual_vep/pattern_reversal_vep.py b/eegnb/experiments/visual_vep/pattern_reversal_vep.py index 7605f05f..8b450512 100644 --- a/eegnb/experiments/visual_vep/pattern_reversal_vep.py +++ b/eegnb/experiments/visual_vep/pattern_reversal_vep.py @@ -23,8 +23,6 @@ def __init__(self, display_refresh_rate: int, eeg: Optional[EEG] = None, save_fn self.instruction_text = f"""Welcome to the Visual Pattern Reversal VEP experiment! - Stay still and focus on the red dot in the centre of the screen. - This experiment will run for {n_blocks} blocks of {block_duration_seconds} seconds each. Press spacebar or controller to continue. @@ -119,7 +117,8 @@ def create_fixation_stim(pos): # Create VR block instruction stimuli def create_vr_block_instruction(pos): - return visual.TextStim(win=self.window, text="Press spacebar or controller when ready.", color=[-1, -1, -1], pos=pos) + return visual.TextStim(win=self.window, text="Focus on the red dot, and try not to blink whilst the squares are flashing, press the spacebar or pull the controller trigger when ready to commence.", color=[-1, -1, -1], + pos=pos, height=0.1) # Create and position stimulus def create_eye_stimuli(eye_x_pos, pix_x_pos): From d2e47c979086d8c5309b39d672b89978eeb177bf Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Thu, 18 Sep 2025 20:45:12 +1000 Subject: [PATCH 38/39] use conda --- .github/workflows/docs.yml | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 5f895369..9f627f11 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,13 +20,15 @@ jobs: with: python-version: 3.8 - - name: Install dependencies - run: | - make install-deps-apt - python -m pip install --upgrade pip wheel - python -m pip install attrdict - - make install-deps-wxpython + - name: Install conda + uses: conda-incubator/setup-miniconda@v3 + with: + environment-file: environments/eeg-expy-docsbuild.yml + auto-activate-base: false + python-version: ${{ matrix.python_version }} + activate-environment: eeg-expy-full + channels: conda-forge + miniconda-version: "latest" - name: Build project run: | From fc9d1e3ffcf352572d558f4eedfbe5a6757d88bf Mon Sep 17 00:00:00 2001 From: Ben Pettit Date: Fri, 3 Oct 2025 06:54:49 +1000 Subject: [PATCH 39/39] revert change --- eegnb/experiments/Experiment.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/eegnb/experiments/Experiment.py b/eegnb/experiments/Experiment.py index 726ea604..ef16864d 100644 --- a/eegnb/experiments/Experiment.py +++ b/eegnb/experiments/Experiment.py @@ -47,7 +47,8 @@ def __init__(self, exp_name, duration, eeg, save_fn, n_trials: int, iti: float, """ self.exp_name = exp_name - self.instruction_text = None + self.instruction_text = """\nWelcome to the {} experiment!\nStay still, focus on the centre of the screen, and try not to blink. \nThis block will run for %s seconds.\n + Press spacebar to continue. \n""".format(self.exp_name) self.duration = duration self.eeg: EEG = eeg self.save_fn = save_fn @@ -148,9 +149,7 @@ def show_instructions(self): """ # Splitting instruction text into lines - if self.instruction_text is None: - self.instruction_text = """\nWelcome to the {} experiment!\nStay still, focus on the centre of the screen, and try not to blink. \nThis block will run for %s seconds.\n - Press spacebar to continue. \n""".format(self.exp_name) % self.duration + self.instruction_text = self.instruction_text % self.duration # Disabling the cursor during display of instructions self.window.mouseVisible = False