Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
91 changes: 65 additions & 26 deletions analysis/glm.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,25 @@
from os.path import join, expanduser
import nibabel, nilearn.image, pandas, numpy
from templateflow import api as tflow_api
from analysis.glm_wrapper import make_design, whiten_data, fit_glm
from rsatoolbox.data.dataset import Dataset
""""

# The atlas namings correspond to the original FSL’s acronyms for them (
# HOCPA=Harvard-Oxford Cortical Probabilistic Atlas;
# HOCPAL=Harvard-Oxford Cortical Probabilistic Atlas Lateralized;
# HOSPA=Harvard-Oxford Subcortical Probabilistic Atlas
# )
"""

from os.path import join, expanduser
import nibabel, nilearn.image, pandas, numpy
from scipy.interpolate import pchip
from templateflow import api as tflow_api
from rsatoolbox.data.dataset import Dataset


sub = 2
run = 1
tr = 1.2
space = 'MNI152NLin2009cAsym'
atlas_name = 'HOCPA'
bids_dir = expanduser('~/Data/caos/BIDS')
bids_dir = expanduser('/Volumes/T7/caos/BIDS')
prep_dir = join(bids_dir, 'derivatives', 'fmriprep')
fname_evts = f'sub-{sub}_task-exp_run-0{run}_events.tsv'
fname_bold = f'sub-{sub}_task-exp_run-0{run}_bold_space-{space}_preproc.nii.gz'
Expand All @@ -36,13 +40,15 @@
bold_resampled_img = nilearn.image.resample_img(
bold_img,
target_affine=atlas_img.affine,
target_shape=atlas_img.shape
target_shape=atlas_img.shape,
interpolation='continuous'
)
print('resampling mask..')
mask_resampled_img = nilearn.image.resample_img(
mask_img,
target_affine=atlas_img.affine,
target_shape=atlas_img.shape
target_shape=atlas_img.shape,
interpolation='nearest'
)

## get data out
Expand All @@ -53,31 +59,64 @@
## reshape data to (volumes x voxels)
n_vols = bold3d.shape[-1]
xyz = bold3d.shape[:3]
data = bold3d[mask3d].T
data = bold3d[mask3d, :].T
atlas = atlas3d[mask3d]

## Load events from file and convolve with HRF
events = pandas.read_csv(fpath_evts, sep='\t')
design = make_design(events, n_vols, tr)
## make design matrix
events_df = pandas.read_csv(fpath_evts, sep='\t')
events_df['trial_type'] = events_df.entity
block_dur = numpy.median(events_df.duration)

## convolve a standard HRF to the block shape in the design
STANDARD_HRF = numpy.load('hrf.npy')
STANDARD_TR = 0.1
hrf = numpy.convolve(STANDARD_HRF, numpy.ones(int(block_dur/STANDARD_TR)))

## timepoints in block (32x)
timepts_block = numpy.arange(0, int((hrf.size-1)*STANDARD_TR), tr)

# resample to desired TR
hrf = pchip(numpy.arange(hrf.size)*STANDARD_TR, hrf)(timepts_block)
hrf = hrf / hrf.max()

## make design matrix
conditions = events_df.trial_type.unique()
dm = numpy.zeros((n_vols, conditions.size))
all_times = numpy.linspace(0, tr*(n_vols-1), n_vols)
hrf_times = numpy.linspace(0, tr*(len(hrf)-1), len(hrf))
for c, condition in enumerate(conditions):
onsets = events_df[events_df.trial_type == condition].onset.values
yvals = numpy.zeros((n_vols))
# loop over blocks
for o in onsets:
# interpolate to find values at the data sampling time points
f = pchip(o + hrf_times, hrf, extrapolate=False)(all_times)
yvals = yvals + numpy.nan_to_num(f)
dm[:, c] = yvals

## add polynomials
#pdata = wdata / wdata.mean(axis=0)

## least square fitting
# The matrix addition is equivalent to concatenating the list of data and the list of
# design and fit it all at once. However, this is more memory efficient.
design = [dm]
data = [data.T]
X = numpy.vstack(design)
X = numpy.linalg.inv(X.T @ X) @ X.T

## Regress out polynomials
wdata, wdesign = whiten_data(data, design)
## makes them into matrix???
betas = 0
start_col = 0
for run in range(len(data)):
n_vols = data[run].shape[0]
these_cols = numpy.arange(n_vols) + start_col
betas += X[:, these_cols] @ data[run]
start_col += data[run].shape[0]

## Transform to Percent Signal Change
## https://www.brainvoyager.com/bv/doc/UsersGuide/StatisticalAnalysis/TimeCourseNormalization.html
pdata = wdata / wdata.mean(axis=0)
## mean is not exactly 1, typically 0.995 - 1.005. problem?

## Fit the GLM
betas = fit_glm(wdata, wdesign)
# In [3]: betas.std(axis=0).mean() ## over timepoints
# Out[3]: 12.219756516863749
# In [4]: betas.std(axis=1).mean() ## over voxels
# Out[4]: 9.616672491255056

## export RSAtoolbox dataset
conds = events.entity.to_list()
conds = events_df.entity.to_list()
dataset = Dataset(
measurements=betas,
obs_descriptors=dict(conds=conds),
Expand Down
11 changes: 6 additions & 5 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
Dcm2Bids
#Dcm2Bids
nibabel
pandas
glmdenoise==0.0.5
pybids==0.12.4
templateflow==0.6.3
rsatoolbox==0.0.2
#pybids==0.12.4
templateflow
rsatoolbox
ipython
nilearn