Skip to content

Commit 566eaa3

Browse files
committed
Merge pull request #2 from nipy/master
Merge to update with master from nipy
2 parents c5f27a7 + 1bef289 commit 566eaa3

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

59 files changed

+5032
-166
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,4 @@
1414
.pydevproject
1515
.idea/
1616
/documentation.zip
17+
.DS_Store

.travis.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,9 @@ install:
2626
- source activate testenv
2727
- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then pip install ordereddict; fi
2828
- conda install --yes numpy scipy nose traits networkx dateutil
29-
- pip install nibabel --use-mirrors
30-
- pip install python-coveralls --use-mirrors
31-
- pip install nose-cov --use-mirrors
29+
- pip install nibabel
30+
- pip install python-coveralls
31+
- pip install nose-cov
3232
- pip install https://github.com/RDFLib/rdflib/archive/master.zip
3333
- pip install https://github.com/trungdong/prov/archive/rdf.zip
3434
- python setup.py install

CHANGES

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ Next release
5353
* ENH: Added csvReader() utility (https://github.com/nipy/nipype/pull/1044)
5454
* FIX: typo in nipype.interfaces.freesurfer.utils.py Tkregister2 (https://github.com/nipy/nipype/pull/1083)
5555
* FIX: SSHDataGrabber outputs now return full path to the grabbed/downloaded files. (https://github.com/nipy/nipype/pull/1086)
56+
* FIX: Add QA output for TSNR to resting workflow (https://github.com/nipy/nipype/pull/1088)
5657

5758
Release 0.10.0 (October 10, 2014)
5859
============

bin/nipype2boutiques

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
#!/usr/bin/env python
2+
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
3+
# vi: set ft=python sts=4 ts=4 sw=4 et:
4+
import sys
5+
from nipype.utils.nipype2boutiques import main
6+
7+
if __name__ == '__main__':
8+
main(sys.argv)

doc/quickstart.rst

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,3 +50,13 @@ Developer guides
5050
devel/gitwash/index
5151

5252
.. include:: links_names.txt
53+
54+
Useful links for beginners
55+
===========================
56+
57+
Getting started with Python - Tutorials. `Available here`__
58+
59+
Python for Beginners `Available here`__
60+
61+
__ http://www.codecademy.com/en/tracks/python
62+
__ https://www.python.org/about/gettingstarted/

doc/users/tutorial_101.rst

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -99,15 +99,15 @@ This results in a workflow containing two isolated nodes:
9999

100100
**5. Connecting nodes to each other**
101101

102-
We want to connect the output produced by realignment to the input of
103-
smoothing. This is done as follows.
102+
We want to connect the output produced by the node realignment to the input of
103+
the node smoothing. This is done as follows.
104104

105105
.. testcode::
106106

107107
workflow.connect(realigner, 'realigned_files', smoother, 'in_files')
108108

109-
or alternatively, a more flexible notation can be used. Although not shown here,
110-
the following notation can be used to connect multiple outputs from one node to
109+
110+
Although not shown here, the following notation can be used to connect multiple outputs from one node to
111111
multiple inputs (see step 7 below).
112112

113113
.. testcode::
@@ -189,3 +189,22 @@ inside which are three folders: realign, smooth and artdetect (the names
189189
of the nodes). The outputs of these routines are in these folders.
190190

191191
.. include:: ../links_names.txt
192+
193+
.. glossary::
194+
195+
pipeline
196+
Connected series of processes (processes can be run parallel and or sequential)
197+
198+
workflow
199+
(kind of synonymous to pipeline) = hosting the nodes
200+
201+
node
202+
= switching-point within a pipeline, you can give it a name (in the above example e.g. realigner),
203+
a node usually requires an or several inputs and will produce an or several outputs
204+
205+
interface
206+
= specific software (e.g. FSL, SPM ...) are wrapped in interfaces, within a node instances of an
207+
interface can be run
208+
209+
modules
210+
for each interface the according modules have to be imported in the usual pythonic manner

examples/rsfmri_vol_surface_preprocessing_nipy.py

Lines changed: 27 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -387,7 +387,8 @@ def create_reg_workflow(name='registration'):
387387
'transformed_mean',
388388
'segmentation_files',
389389
'anat2target',
390-
'aparc'
390+
'aparc',
391+
'min_cost_file'
391392
]),
392393
name='outputspec')
393394

@@ -505,7 +506,7 @@ def create_reg_workflow(name='registration'):
505506
reg.inputs.args = '--float'
506507
reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
507508
reg.inputs.num_threads = 4
508-
reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'}
509+
reg.plugin_args = {'sbatch_args': '-c%d' % 4}
509510
register.connect(stripper, 'out_file', reg, 'moving_image')
510511
register.connect(inputnode,'target_image', reg,'fixed_image')
511512

@@ -531,6 +532,7 @@ def create_reg_workflow(name='registration'):
531532
warpmean.inputs.terminal_output = 'file'
532533
warpmean.inputs.args = '--float'
533534
warpmean.inputs.num_threads = 4
535+
warpmean.plugin_args = {'sbatch_args': '-c%d' % 4}
534536

535537
register.connect(inputnode,'target_image', warpmean,'reference_image')
536538
register.connect(inputnode, 'mean_image', warpmean, 'input_image')
@@ -554,6 +556,8 @@ def create_reg_workflow(name='registration'):
554556
register.connect(reg, 'composite_transform',
555557
outputnode, 'anat2target_transform')
556558
register.connect(merge, 'out', outputnode, 'transforms')
559+
register.connect(bbregister, 'min_cost_file',
560+
outputnode, 'min_cost_file')
557561

558562
return register
559563

@@ -593,6 +597,7 @@ def create_workflow(files,
593597
realign.inputs.slice_times = slice_times
594598
realign.inputs.tr = TR
595599
realign.inputs.slice_info = 2
600+
realign.plugin_args = {'sbatch_args': '-c%d' % 4}
596601

597602

598603
# Comute TSNR on realigned data regressing polynomials upto order 2
@@ -615,6 +620,13 @@ def create_workflow(files,
615620
registration.inputs.inputspec.subjects_dir = subjects_dir
616621
registration.inputs.inputspec.target_image = target_file
617622

623+
"""Quantify TSNR in each freesurfer ROI
624+
"""
625+
get_roi_tsnr = MapNode(fs.SegStats(default_color_table=True),
626+
iterfield=['in_file'], name='get_aparc_tsnr')
627+
get_roi_tsnr.inputs.avgwf_txt_file = True
628+
wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
629+
wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, 'segmentation_file')
618630

619631
"""Use :class:`nipype.algorithms.rapidart` to determine which of the
620632
images in the functional series are outliers based on deviations in
@@ -754,7 +766,8 @@ def merge_files(in1, in2):
754766
warpall.inputs.terminal_output = 'file'
755767
warpall.inputs.reference_image = target_file
756768
warpall.inputs.args = '--float'
757-
warpall.inputs.num_threads = 1
769+
warpall.inputs.num_threads = 2
770+
warpall.plugin_args = {'sbatch_args': '-c%d' % 2}
758771

759772
# transform to target
760773
wf.connect(collector, 'out', warpall, 'input_image')
@@ -874,13 +887,14 @@ def get_names(files, suffix):
874887
substitutions += [("_filtermotion%d" % i,"") for i in range(11)[::-1]]
875888
substitutions += [("_filter_noise_nosmooth%d" % i,"") for i in range(11)[::-1]]
876889
substitutions += [("_makecompcorfilter%d" % i,"") for i in range(11)[::-1]]
890+
substitutions += [("_get_aparc_tsnr%d/" % i, "run%d_" % (i + 1)) for i in range(11)[::-1]]
877891

878-
substitutions += [("T1_out_brain_pve_0_maths_warped","compcor_csf"),
879-
("T1_out_brain_pve_1_maths_warped","compcor_gm"),
892+
substitutions += [("T1_out_brain_pve_0_maths_warped", "compcor_csf"),
893+
("T1_out_brain_pve_1_maths_warped", "compcor_gm"),
880894
("T1_out_brain_pve_2_maths_warped", "compcor_wm"),
881-
("output_warped_image_maths","target_brain_mask"),
882-
("median_brain_mask","native_brain_mask"),
883-
("corr_","")]
895+
("output_warped_image_maths", "target_brain_mask"),
896+
("median_brain_mask", "native_brain_mask"),
897+
("corr_", "")]
884898

885899
regex_subs = [('_combiner.*/sar', '/smooth/'),
886900
('_combiner.*/ar', '/unsmooth/'),
@@ -910,6 +924,11 @@ def get_names(files, suffix):
910924
wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
911925
wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
912926
wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
927+
wf.connect(registration, 'outputspec.min_cost_file', datasink, 'resting.qa.mincost')
928+
wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr.@map')
929+
wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'resting.qa.tsnr'),
930+
('summary_file', 'resting.qa.tsnr.@summary')])])
931+
913932
wf.connect(bandpass, 'out_files', datasink, 'resting.timeseries.@bandpassed')
914933
wf.connect(smooth, 'out_file', datasink, 'resting.timeseries.@smoothed')
915934
wf.connect(createfilter1, 'out_files',

nipype/interfaces/dcmstack.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Provides interfaces to various commands provided by FreeSurfer
1+
"""Provides interfaces to various commands provided by dcmstack
22
33
Change directory to provide relative paths for doctests
44
>>> import os
@@ -56,7 +56,7 @@ class NiftiGeneratorBaseInputSpec(TraitedSpec):
5656

5757
class NiftiGeneratorBase(BaseInterface):
5858
'''Base class for interfaces that produce Nifti files, potentially with
59-
embeded meta data.'''
59+
embedded meta data.'''
6060
def _get_out_path(self, meta, idx=None):
6161
'''Return the output path for the gernerated Nifti.'''
6262
if self.inputs.out_format:
@@ -215,7 +215,7 @@ class LookupMetaInputSpec(TraitedSpec):
215215

216216

217217
class LookupMeta(BaseInterface):
218-
'''Lookup meta data values from a Nifti with embeded meta data.
218+
'''Lookup meta data values from a Nifti with embedded meta data.
219219
220220
Example
221221
-------

nipype/interfaces/dipy/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
from .tracks import TrackDensityMap
2-
from .tensors import TensorMode
2+
from .tensors import TensorMode, DTI
33
from .preprocess import Resample, Denoise

nipype/interfaces/dipy/tensors.py

Lines changed: 104 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -25,16 +25,116 @@
2525
have_dipy = False
2626
else:
2727
import dipy.reconst.dti as dti
28-
from dipy.core.gradients import GradientTable
28+
from dipy.core.gradients import gradient_table
29+
from dipy.io.utils import nifti1_symmat
2930

3031

32+
def tensor_fitting(data, bvals, bvecs, mask_file=None):
33+
"""
34+
Use dipy to fit DTI
35+
36+
Parameters
37+
----------
38+
in_file : str
39+
Full path to a DWI data file.
40+
bvals : str
41+
Full path to a file containing gradient magnitude information (b-values).
42+
bvecs : str
43+
Full path to a file containing gradient direction information (b-vectors).
44+
mask_file : str, optional
45+
Full path to a file containing a binary mask. Defaults to use the entire volume.
46+
47+
Returns
48+
-------
49+
TensorFit object, affine
50+
"""
51+
img = nb.load(in_file).get_data()
52+
data = img.get_data()
53+
affine = img.get_affine()
54+
if mask_file is not None:
55+
mask = nb.load(self.inputs.mask_file).get_data()
56+
else:
57+
mask=None
58+
59+
# Load information about the gradients:
60+
gtab = grad.gradient_table(self.inputs.bvals, self.inputs.bvecs)
61+
62+
# Fit it
63+
tenmodel = dti.TensorModel(gtab)
64+
return tenmodel.fit(data, mask), affine
65+
66+
67+
class DTIInputSpec(TraitedSpec):
68+
in_file = File(exists=True, mandatory=True,
69+
desc='The input 4D diffusion-weighted image file')
70+
bvecs = File(exists=True, mandatory=True,
71+
desc='The input b-vector text file')
72+
bvals = File(exists=True, mandatory=True,
73+
desc='The input b-value text file')
74+
mask_file = File(exists=True, mandatory=False,
75+
desc='An optional white matter mask')
76+
out_filename = File(
77+
genfile=True, desc='The output filename for the DTI parameters image')
78+
79+
80+
class DTIOutputSpec(TraitedSpec):
81+
out_file = File(exists=True)
82+
83+
84+
class DTI(BaseInterface):
85+
"""
86+
Calculates the diffusion tensor model parameters
87+
88+
Example
89+
-------
90+
91+
>>> import nipype.interfaces.dipy as dipy
92+
>>> dti = dipy.DTI()
93+
>>> dti.inputs.in_file = 'diffusion.nii'
94+
>>> dti.inputs.bvecs = 'bvecs'
95+
>>> dti.inputs.bvals = 'bvals'
96+
>>> dti.run() # doctest: +SKIP
97+
"""
98+
input_spec = DTIInputSpec
99+
output_spec = DTIOutputSpec
100+
101+
def _run_interface(self, runtime):
102+
ten_fit, affine = tensor_fitting(self.inputs.in_file,
103+
self.inputs.bvals,
104+
self.inputs.bvecs,
105+
self.inputs.mask_file)
106+
lower_triangular = tenfit.lower_triangular()
107+
img = nifti1_symmat(lower_triangular, affine)
108+
out_file = op.abspath(self._gen_outfilename())
109+
nb.save(img, out_file)
110+
iflogger.info('DTI parameters image saved as {i}'.format(i=out_file))
111+
return runtime
112+
113+
def _list_outputs(self):
114+
outputs = self._outputs().get()
115+
outputs['out_file'] = op.abspath(self._gen_outfilename())
116+
return outputs
117+
118+
def _gen_filename(self, name):
119+
if name is 'out_filename':
120+
return self._gen_outfilename()
121+
else:
122+
return None
123+
124+
def _gen_outfilename(self):
125+
_, name, _ = split_filename(self.inputs.in_file)
126+
return name + '_dti.nii'
127+
128+
31129
class TensorModeInputSpec(TraitedSpec):
32130
in_file = File(exists=True, mandatory=True,
33131
desc='The input 4D diffusion-weighted image file')
34132
bvecs = File(exists=True, mandatory=True,
35133
desc='The input b-vector text file')
36134
bvals = File(exists=True, mandatory=True,
37135
desc='The input b-value text file')
136+
mask_file = File(exists=True, mandatory=False,
137+
desc='An optional white matter mask')
38138
out_filename = File(
39139
genfile=True, desc='The output filename for the Tensor mode image')
40140

@@ -69,32 +169,11 @@ class TensorMode(BaseInterface):
69169
output_spec = TensorModeOutputSpec
70170

71171
def _run_interface(self, runtime):
72-
## Load the 4D image files
73-
img = nb.load(self.inputs.in_file)
74-
data = img.get_data()
75-
affine = img.get_affine()
76-
77-
## Load the gradient strengths and directions
78-
bvals = np.loadtxt(self.inputs.bvals)
79-
gradients = np.loadtxt(self.inputs.bvecs).T
80-
81-
## Place in Dipy's preferred format
82-
gtab = GradientTable(gradients)
83-
gtab.bvals = bvals
84-
85-
## Mask the data so that tensors are not fit for
86-
## unnecessary voxels
87-
mask = data[..., 0] > 50
88-
89-
## Fit the tensors to the data
90-
tenmodel = dti.TensorModel(gtab)
91-
tenfit = tenmodel.fit(data, mask)
92-
93-
## Calculate the mode of each voxel's tensor
94-
mode_data = tenfit.mode
172+
ten_fit = tensor_fitting(self.inputs.in_file, self.inputs.bvals, self.inputs.bvecs,
173+
self.inputs.mask_file)
95174

96175
## Write as a 3D Nifti image with the original affine
97-
img = nb.Nifti1Image(mode_data, affine)
176+
img = nb.Nifti1Image(tenfit.mode, affine)
98177
out_file = op.abspath(self._gen_outfilename())
99178
nb.save(img, out_file)
100179
iflogger.info('Tensor mode image saved as {i}'.format(i=out_file))

0 commit comments

Comments
 (0)