Skip to content

Commit 4177482

Browse files
committed
Merge pull request nipy#264 from matthew-brett/open-fmri-spm-example
MRG: make SPM processing example for Open FMRI ds105 Example of how to batch script SPM8 preprocessing for the Open FMRI ds105 dataset.
2 parents a492f81 + da010df commit 4177482

File tree

2 files changed

+362
-14
lines changed

2 files changed

+362
-14
lines changed

examples/interfaces/process_ds105.py

Lines changed: 325 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,325 @@
1+
#!/usr/bin/env python
2+
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
3+
# vi: set ft=python sts=4 ts=4 sw=4 et:
4+
''' Single subject analysis script for SPM / Open FMRI ds105
5+
6+
https://openfmri.org/dataset/ds000105
7+
8+
Download and extract the ds105 archive to some directory.
9+
10+
Run this script with::
11+
12+
process_ds105.py ~/data/ds105
13+
14+
where ``~/data/ds105`` is the directory containing the ds105 data.
15+
16+
The example uses the very basic MATLAB / SPM interface routines in NIPY.
17+
18+
If you need more than very basic use, please consider using nipype. nipype has
19+
extended capabilities to interface with external tools and for dataflow
20+
management. nipype can handle vanilla SPM in MATLAB or SPM run through the
21+
MATLAB common runtime (free from MATLAB Licensing).
22+
'''
23+
import sys
24+
from copy import deepcopy
25+
from os.path import join as pjoin, abspath, splitext, isfile
26+
from glob import glob
27+
from warnings import warn
28+
import gzip
29+
30+
import numpy as np
31+
32+
import nipy.interfaces.matlab as nimat
33+
from nipy.interfaces.spm import (spm_info, make_job, scans_for_fnames,
34+
run_jobdef, fnames_presuffix, fname_presuffix,
35+
fltcols)
36+
37+
38+
# The batch scripts currently need SPM5
39+
nimat.matlab_cmd = 'matlab-spm8 -nodesktop -nosplash'
40+
41+
# This definition is partly for slice timing. We can't do slice timing for this
42+
# dataset because the slice dimension is the first, and SPM assumes it is the
43+
# last.
44+
N_SLICES = 40 # X slices
45+
STUDY_DEF = dict(
46+
TR = 2.5,
47+
n_slices = N_SLICES,
48+
time_to_space = range(1, N_SLICES, 2) + range(2, N_SLICES, 2)
49+
)
50+
51+
52+
def _sorted_prefer_nii(file_list):
53+
""" Strip any filanames ending nii.gz if matching .nii filename in list
54+
"""
55+
preferred = []
56+
for fname in file_list:
57+
if not fname.endswith('.gz'):
58+
preferred.append(fname)
59+
else:
60+
nogz, ext = splitext(fname)
61+
if not nogz in file_list:
62+
preferred.append(fname)
63+
return sorted(preferred)
64+
65+
66+
def get_data(data_path, subj_id):
67+
data_path = abspath(data_path)
68+
data_def = {}
69+
subject_path = pjoin(data_path, 'sub%03d' % subj_id)
70+
functionals = _sorted_prefer_nii(
71+
glob(pjoin(subject_path, 'BOLD', 'task*', 'bold*.nii*')))
72+
anatomicals = _sorted_prefer_nii(
73+
glob(pjoin(subject_path, 'anatomy', 'highres001.nii*')))
74+
for flist in (anatomicals, functionals):
75+
for i, fname in enumerate(flist):
76+
nogz, gz_ext = splitext(fname)
77+
if gz_ext == '.gz':
78+
if not isfile(nogz):
79+
contents = gzip.open(fname, 'rb').read()
80+
with open(nogz, 'wb') as fobj:
81+
fobj.write(contents)
82+
flist[i] = nogz
83+
if len(anatomicals) == 0:
84+
data_def['anatomical'] = None
85+
else:
86+
data_def['anatomical'] = anatomicals[0]
87+
data_def['functionals'] = functionals
88+
return data_def
89+
90+
91+
def default_ta(tr, nslices):
92+
slice_time = tr / float(nslices)
93+
return slice_time * (nslices - 1)
94+
95+
96+
class SPMSubjectAnalysis(object):
97+
""" Class to preprocess single subject in SPM
98+
"""
99+
def __init__(self, data_def, study_def, ana_def):
100+
self.data_def = deepcopy(data_def)
101+
self.study_def = self.add_study_defaults(study_def)
102+
self.ana_def = self.add_ana_defaults(deepcopy(ana_def))
103+
104+
def add_study_defaults(self, study_def):
105+
full_study_def = deepcopy(study_def)
106+
if 'TA' not in full_study_def:
107+
full_study_def['TA'] = default_ta(
108+
full_study_def['TR'], full_study_def['n_slices'])
109+
return full_study_def
110+
111+
def add_ana_defaults(self, ana_def):
112+
full_ana_def = deepcopy(ana_def)
113+
if 'fwhm' not in full_ana_def:
114+
full_ana_def['fwhm'] = 8.0
115+
return full_ana_def
116+
117+
def slicetime(self, in_prefix='', out_prefix='a'):
118+
sess_scans = scans_for_fnames(
119+
fnames_presuffix(self.data_def['functionals'], in_prefix))
120+
sdef = self.study_def
121+
stinfo = make_job('temporal', 'st', {
122+
'scans': sess_scans,
123+
'so': sdef['time_to_space'],
124+
'tr': sdef['TR'],
125+
'ta': sdef['TA'],
126+
'nslices': float(sdef['n_slices']),
127+
'refslice':1,
128+
'prefix': out_prefix,
129+
})
130+
run_jobdef(stinfo)
131+
return out_prefix + in_prefix
132+
133+
134+
def realign(self, in_prefix=''):
135+
sess_scans = scans_for_fnames(
136+
fnames_presuffix(self.data_def['functionals'], in_prefix))
137+
rinfo = make_job('spatial', 'realign', [{
138+
'estimate':{
139+
'data':sess_scans,
140+
'eoptions':{
141+
'quality': 0.9,
142+
'sep': 4.0,
143+
'fwhm': 5.0,
144+
'rtm': True,
145+
'interp': 2.0,
146+
'wrap': [0.0,0.0,0.0],
147+
'weight': []
148+
}
149+
}
150+
}])
151+
run_jobdef(rinfo)
152+
return in_prefix
153+
154+
def reslice(self, in_prefix='', out_prefix='r', out=('1..n', 'mean')):
155+
which = [0, 0]
156+
if 'mean' in out:
157+
which[1] = 1
158+
if '1..n' in out or 'all' in out:
159+
which[0] = 2
160+
elif '2..n' in out:
161+
which[0] = 1
162+
sess_scans = scans_for_fnames(
163+
fnames_presuffix(self.data_def['functionals'], in_prefix))
164+
rsinfo = make_job('spatial', 'realign', [{
165+
'write':{
166+
'data': np.vstack(sess_scans.flat),
167+
'roptions':{
168+
'which': which,
169+
'interp':4.0,
170+
'wrap':[0.0,0.0,0.0],
171+
'mask':True,
172+
'prefix': out_prefix
173+
}
174+
}
175+
}])
176+
run_jobdef(rsinfo)
177+
return out_prefix + in_prefix
178+
179+
def coregister(self, in_prefix=''):
180+
func1 = self.data_def['functionals'][0]
181+
mean_fname = fname_presuffix(func1, 'mean' + in_prefix)
182+
crinfo = make_job('spatial', 'coreg', [{
183+
'estimate':{
184+
'ref': np.asarray(mean_fname, dtype=object),
185+
'source': np.asarray(self.data_def['anatomical'],
186+
dtype=object),
187+
'other': [''],
188+
'eoptions':{
189+
'cost_fun':'nmi',
190+
'sep':[4.0, 2.0],
191+
'tol':np.array(
192+
[0.02,0.02,0.02,
193+
0.001,0.001,0.001,
194+
0.01,0.01,0.01,
195+
0.001,0.001,0.001]).reshape(1,12),
196+
'fwhm':[7.0, 7.0]
197+
}
198+
}
199+
}])
200+
run_jobdef(crinfo)
201+
return in_prefix
202+
203+
def seg_norm(self, in_prefix=''):
204+
def_tpms = np.zeros((3,1), dtype=np.object)
205+
spm_path = spm_info.spm_path
206+
def_tpms[0] = pjoin(spm_path, 'tpm', 'grey.nii'),
207+
def_tpms[1] = pjoin(spm_path, 'tpm', 'white.nii'),
208+
def_tpms[2] = pjoin(spm_path, 'tpm', 'csf.nii')
209+
data = np.zeros((1,), dtype=object)
210+
data[0] = self.data_def['anatomical']
211+
sninfo = make_job('spatial', 'preproc', {
212+
'data': data,
213+
'output':{
214+
'GM':fltcols([0,0,1]),
215+
'WM':fltcols([0,0,1]),
216+
'CSF':fltcols([0,0,0]),
217+
'biascor':1.0,
218+
'cleanup':False,
219+
},
220+
'opts':{
221+
'tpm':def_tpms,
222+
'ngaus':fltcols([2,2,2,4]),
223+
'regtype':'mni',
224+
'warpreg':1.0,
225+
'warpco':25.0,
226+
'biasreg':0.0001,
227+
'biasfwhm':60.0,
228+
'samp':3.0,
229+
'msk':np.array([], dtype=object),
230+
}
231+
})
232+
run_jobdef(sninfo)
233+
return in_prefix
234+
235+
def norm_write(self, in_prefix='', out_prefix='w'):
236+
sess_scans = scans_for_fnames(
237+
fnames_presuffix(self.data_def['functionals'], in_prefix))
238+
matname = fname_presuffix(self.data_def['anatomical'],
239+
suffix='_seg_sn.mat',
240+
use_ext=False)
241+
subj = {
242+
'matname': np.zeros((1,), dtype=object),
243+
'resample': np.vstack(sess_scans.flat),
244+
}
245+
subj['matname'][0] = matname
246+
roptions = {
247+
'preserve':False,
248+
'bb':np.array([[-78,-112, -50],[78,76,85.0]]),
249+
'vox':fltcols([2.0,2.0,2.0]),
250+
'interp':1.0,
251+
'wrap':[0.0,0.0,0.0],
252+
'prefix': out_prefix,
253+
}
254+
nwinfo = make_job('spatial', 'normalise', [{
255+
'write':{
256+
'subj': subj,
257+
'roptions': roptions,
258+
}
259+
}])
260+
run_jobdef(nwinfo)
261+
# knock out the list of images, replacing with only one
262+
subj['resample'] = np.zeros((1,), dtype=object)
263+
subj['resample'][0] = self.data_def['anatomical']
264+
roptions['interp'] = 4.0
265+
run_jobdef(nwinfo)
266+
return out_prefix + in_prefix
267+
268+
def smooth(self, in_prefix='', out_prefix='s'):
269+
fwhm = self.ana_def['fwhm']
270+
try:
271+
len(fwhm)
272+
except TypeError:
273+
fwhm = [fwhm] * 3
274+
fwhm = np.asarray(fwhm, dtype=np.float).reshape(1,3)
275+
sess_scans = scans_for_fnames(
276+
fnames_presuffix(self.data_def['functionals'], in_prefix))
277+
sinfo = make_job('spatial', 'smooth',
278+
{'data':np.vstack(sess_scans.flat),
279+
'fwhm':fwhm,
280+
'dtype':0})
281+
run_jobdef(sinfo)
282+
return out_prefix + in_prefix
283+
284+
285+
def process_subject(ddef, study_def, ana_def):
286+
""" Process subject from subject data dict `ddef`
287+
"""
288+
if not ddef['anatomical']:
289+
warn("No anatomical, aborting processing")
290+
return
291+
ana = SPMSubjectAnalysis(ddef, study_def, ana_def)
292+
# st_prefix = ana.slicetime('') # We can't run slice timing
293+
st_prefix = ''
294+
ana.realign(in_prefix=st_prefix)
295+
ana.reslice(in_prefix=st_prefix, out=('mean',))
296+
ana.coregister(in_prefix=st_prefix)
297+
ana.seg_norm()
298+
n_st_prefix = ana.norm_write(st_prefix)
299+
ana.smooth(n_st_prefix)
300+
301+
302+
def get_subjects(data_path, subj_ids, study_def, ana_def):
303+
ddefs = []
304+
for subj_id in subj_ids:
305+
ddefs.append(get_data(data_path, subj_id))
306+
return ddefs
307+
308+
309+
def main():
310+
try:
311+
data_path = sys.argv[1]
312+
except IndexError:
313+
raise OSError('Need ds105 data path as input')
314+
if len(sys.argv) > 2:
315+
subj_ids = [int(id) for id in sys.argv[2:]]
316+
else:
317+
subj_ids = range(1, 7)
318+
for subj_id in subj_ids:
319+
ddef = get_data(data_path, subj_id)
320+
assert len(ddef['functionals']) in (11, 12)
321+
process_subject(ddef, STUDY_DEF, {})
322+
323+
324+
if __name__ == '__main__':
325+
main()

0 commit comments

Comments
 (0)