Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Small changes to dual regression script #3

Open
wants to merge 19 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
028fd93
Revert to one regressor per bad volume, edited doc string
May 1, 2013
6fb63e9
allows for saving residuals image from dr stage 2
May 2, 2013
d577107
add rapid_art.py and change imports in run_image_qa.py to take care o…
Sep 16, 2013
0e39449
create gica_dir variable and added some comments
Sep 16, 2013
06678e1
begin script to create difference maps, determines whether each voxel…
Nov 6, 2013
0ff1063
gets index and value of max stat for each voxel
Nov 6, 2013
ad22ceb
broadcasts data array to 2D and takes mean difference between primary…
Nov 6, 2013
d27f5d9
moved functionality to functions. corrected the way max values are ma…
Nov 6, 2013
2543877
function to calculate difference map for subject is working
Nov 6, 2013
33b82e7
add lines to save to nifti image, added if main statement
Nov 6, 2013
41d0eb6
fixed nibabel import and print statement
Nov 6, 2013
4120e7c
fixed subid pattern
Nov 6, 2013
2a3a4b6
begin adding functionality to create summed difference maps and scaling
Nov 7, 2013
d0ce186
added in functionality to create image of difference between primary …
Nov 7, 2013
a6542ff
renamed sum calculatio to split and added new sum calculation which t…
Nov 7, 2013
7da6cc2
added lines to save out split and sum maps
Nov 7, 2013
b0f25fb
Replaced difference measures with calculation to take mean of other n…
Nov 25, 2013
32ec26c
fixed averaging of other nets and saving to nii
Nov 25, 2013
42f4a9e
Merge branch 'master' of https://github.com/cindeem/connectivity
Jan 27, 2016
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions ica/python_dual_regress.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def concat_regressors(a,b, outdir = None):
return outf


def sub_spatial_map(infile, design, mask, outdir, desnorm=True, mvt=None):
def sub_spatial_map(infile, design, mask, outdir, desnorm=True, out_res=False, mvt=None):
""" glm on ts data using stage1 txt file as model
Parameters
----------
Expand All @@ -150,6 +150,8 @@ def sub_spatial_map(infile, design, mask, outdir, desnorm=True, mvt=None):
desnorm : bool (True, False, default = True)
switch on normalisation of the design matrix
columns to unit std. deviation
out_res : bool (True, False, default = False)
opion to output residuals image from glm
mvt : file
file containing movement regressors which will be concatenated
to design (output from stage 1 glm)
Expand All @@ -159,18 +161,20 @@ def sub_spatial_map(infile, design, mask, outdir, desnorm=True, mvt=None):
file of 4D timeseries components
stage2_tsz : str
file of z-transfomred 4D timesereis components


Notes
-----
fsl_glm -i <file> -d <outdir>/dr_stage1_${subid}.txt
-o <outdir>/dr_stage2_$s --out_z=<outdir>/dr_stage2_${subid}_Z
--demean -m $OUTPUT/mask <desnorm>;
--demean -m $OUTPUT/mask <desnorm> --out_res=<outdir>/dr_stage2_${subid}_res;
"""

subid = get_subid(infile)
# define outfiles
stage2_ts = os.path.join(outdir, 'dr_stage2_%s'%(subid))
stage2_tsz = os.path.join(outdir,'dr_stage2_%s_Z'%(subid))
stage2_res = os.path.join(outdir,'dr_stage2_%s_res'%(subid))
ext = get_fsl_outputtype()
# add movment regressor to design if necessary
if not mvt is None:
Expand All @@ -185,6 +189,8 @@ def sub_spatial_map(infile, design, mask, outdir, desnorm=True, mvt=None):
# Append des_norm flag to command if desnorm=True (defaults is True).
if desnorm:
cmd = ' '.join([cmd, '--des_norm'])
if out_res:
cmd = ' '.join([cmd, '--out_res=%s'%(stage2_res)])
cout = CommandLine(cmd).run()
if not cout.runtime.returncode == 0:
print cmd
Expand Down
38 changes: 22 additions & 16 deletions scripts/dualregress_userscript_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,26 +52,32 @@
if __name__ == '__main__':

###Set paths and specify ICA/template data
#########################################################
#######################################################################
basedir = '/home/jagust/jelman/rsfmri_ica/data'

gica_dir = os.path.join(basedir, 'OldICA_IC0_ecat_2mm_6fwhm_125.gica')
# suffic of subject-specific ica directories created by melodic
# These will be appended to subj code later
sub_icadirname = '_4d_OldICA_IC0_ecat_2mm_6fwhm_125.ica'
### get mask from groupica
mask = os.path.join(basedir, 'OldICA_IC40_ecat_7mm_125.gica',
'groupmelodic.ica',
'mask.nii.gz')
mask = os.path.join(gica_dir,
'groupmelodic.ica',
'mask.nii.gz')

### location for 4D template components (eg melodic_ICA, laird_4D)
template = os.path.join(basedir, 'OldICA_IC40_ecat_7mm_125.gica',
'groupmelodic.ica',
'melodic_IC.nii.gz')

template = os.path.join(gica_dir,
'groupmelodic.ica',
'melodic_IC.nii.gz')
## Note: Set name of confound file name in the variable mvtfile located
## in dr stage 2 below.
#######################################################################

#Get number of vols in template file so that only ICs of interest
#are merged, not those corresponding to nuisance regressors.
cmd = ' '.join(['fslnvols', template])
num_ics = int(commands.getoutput(cmd))

### output directory
outdir = os.path.join(basedir, 'OldICA_IC40_ecat_7mm_125.gica', 'dual_regress')
outdir = os.path.join(gica_dir, 'dual_regress')
if os.path.isdir(outdir)==False:
os.mkdir(outdir)
else:
Expand All @@ -83,7 +89,7 @@
os.mkdir(outdir)
print outdir, 'exists, moving to ', newdir

### Specify input data filelist, otherwise searches basedir for data
### Specify file listing input data files, otherwise searches basedir for data
if len(sys.argv) ==2: #If specified, load file as list of infiles
args = sys.argv[1]
print 'Using data specified in ', args
Expand Down Expand Up @@ -118,15 +124,15 @@
## If you want to add movement params & spike regressors to stage2 of model
## mvtfile = <confound file>
## Must change input of mvt parameter from None in dr_stage2 below
sub_icadir = ''.join([subid, '_4d_OldICA_IC0_ecat_8mm_125.ica'])
sub_icadir = ''.join([subid, sub_icadirname])
mvtfile = os.path.join(basedir,
subid,
'func',
sub_icadir, 'mc',
'prefiltered_func_data_mcf.par') #Name of confound file

'confound_regressors_6mm.txt') #Name of confound file

## If you want residuals to be output, change out_res to True below
stage2_ts, stage2_tsz = pydr.sub_spatial_map(tmpf, txtf, mask, outdir,
desnorm=True, mvt=mvtfile)
desnorm=True, out_res=True,mvt=mvtfile)

subd.update({subid:stage2_ts})

Expand Down
100 changes: 100 additions & 0 deletions tools/net_affiliation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import os, sys
import numpy as np
import nibabel as nib
from glob import glob
import general_utilities as gu

def get_dims(data):
"""
Given an array, returns dimensions necessary to broadcast to a 2D array.

Input:
data : numpy array

Returns:
n_voxels : int
product of all axis shapes except the last
n_scans : int
equal to the shape of the last axis
"""
n_voxels = np.prod(data.shape[:-1])
n_scans = data.shape[-1]
return n_voxels, n_scans

def get_primary_net(dat_array2d):
"""
Given a 2D numpy array, returns an array with the max value across
the last axis and a masked array with all values except the max

Input:
dat_array2d : numpy array

Returns:
prim_array : numpy array
array of max values across the last dimension
other_array : numpy masked array
masked array of all values from dat_array2d
except those contained in prim_array
"""
assert len(dat_array2d.shape) == 2
prim_net_idx = dat_array2d.argmax(axis=1)
prim_net = dat_array2d.max(axis=1)
prim_net = np.reshape(prim_net, (dat_array2d.shape[0],1))
other_nets = np.ma.array(dat_array2d, mask=False)
other_nets.mask[np.arange(len(dat_array2d)),prim_net_idx] = True
return prim_net, other_nets

def calculate_diff_map(dat_array):
"""
Takes a numpy array and finds the mean difference between the max
value and all others across time/scans.

Input:
dat_array : numpy array (at least 2d)

Returns:
meandiff_array : numpy array
The mean difference between the primary network and
each of the other networks
sumdiff_array : numpy array
The sum of sifferences beterrn prmaru network and
each of the other networks
splitdiff_array : numpy array
The difference between the primary network and the sum
of all other networks
"""
dat_array = np.atleast_2d(dat_array)
n_voxels, n_nets = get_dims(dat_array)
dat_array2d = np.reshape(dat_array, (n_voxels, n_nets))
prim_net, other_nets = get_primary_net(dat_array2d)
other_nets_mean = other_nets.mean(axis=1)
other_nets_mean = np.reshape(other_nets_mean, (other_nets_mean.shape[0],1))
nets_diff = prim_net - other_nets_mean
nets_diff_array = np.reshape(nets_diff, (dat_array.shape[:-1]))
return nets_diff_array

def main(datafiles):
for subj_file in datafiles:
subid = gu.get_subid(subj_file, subjstr)
print 'Starting on subject %s'%(subid)
dat, aff = gu.load_nii(subj_file)
nets_dat = dat[:,:,:,net_idx]
nets_diff_array = calculate_diff_map(nets_dat)
nets_diff_outfile = os.path.join(outdir, ''.join(['nets_diff_',subid,'.nii.gz']))
gu.save_nii(nets_diff_outfile, nets_diff_array, aff)


if __name__ == '__main__':

#Path to 4D files in which networks are concatenated over time
datadir = '/home/jagust/rsfmri_ica/data/Allsubs_YoungICA_2mm_IC30.gica/dual_regress'
#Directory to output difference maps
outdir = '/home/jagust/rsfmri_ica/data/Allsubs_YoungICA_2mm_IC30.gica/difference_maps'
subjstr = 'subject[0-9]{5}'
dataglobstr = 'dr_stage2_subject*_Z.nii.gz'
datafiles = glob(os.path.join(datadir, dataglobstr))
#Indicies of networks to include. Start count at 0.
net_idx = [0,1,2,3,4,6,7,8,9,12,14,15,24,29]

main(datafiles)

Loading