|
| 1 | +# coding: utf-8 |
| 2 | +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- |
| 3 | +# vi: set ft=python sts=4 ts=4 sw=4 et: |
| 4 | + |
| 5 | +""" |
| 6 | +=================== |
| 7 | +dMRI: Preprocessing |
| 8 | +=================== |
| 9 | +
|
| 10 | +Introduction |
| 11 | +============ |
| 12 | +
|
| 13 | +This script, dmri_preprocessing.py, demonstrates how to prepare dMRI data |
| 14 | +for tractography and connectivity analysis with nipype. |
| 15 | +
|
| 16 | +We perform this analysis using the FSL course data, which can be acquired from |
| 17 | +here: http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz |
| 18 | +
|
| 19 | +Can be executed in command line using ``python dmri_preprocessing.py`` |
| 20 | +
|
| 21 | +
|
| 22 | +Import necessary modules from nipype. |
| 23 | +""" |
| 24 | + |
| 25 | +import os # system functions |
| 26 | +import nipype.interfaces.io as nio # Data i/o |
| 27 | +import nipype.interfaces.utility as niu # utility |
| 28 | +import nipype.algorithms.misc as misc |
| 29 | + |
| 30 | +import nipype.pipeline.engine as pe # pypeline engine |
| 31 | + |
| 32 | +from nipype.interfaces import fsl |
| 33 | +from nipype.interfaces import ants |
| 34 | + |
| 35 | + |
| 36 | +""" |
| 37 | +Load specific nipype's workflows for preprocessing of dMRI data: |
| 38 | +:class:`nipype.workflows.dmri.preprocess.epi.all_peb_pipeline`, |
| 39 | +as data include a *b0* volume with reverse encoding direction |
| 40 | +(*P>>>A*, or *y*), in contrast with the general acquisition encoding |
| 41 | +that is *A>>>P* or *-y* (in RAS systems). |
| 42 | +""" |
| 43 | + |
| 44 | +from nipype.workflows.dmri.fsl.artifacts import all_fsl_pipeline, remove_bias |
| 45 | + |
| 46 | +""" |
| 47 | +Map field names into individual subject runs |
| 48 | +""" |
| 49 | + |
| 50 | +info = dict(dwi=[['subject_id', 'dwidata']], |
| 51 | + bvecs=[['subject_id', 'bvecs']], |
| 52 | + bvals=[['subject_id', 'bvals']], |
| 53 | + dwi_rev=[['subject_id', 'nodif_PA']]) |
| 54 | + |
| 55 | +infosource = pe.Node(interface=niu.IdentityInterface(fields=['subject_id']), |
| 56 | + name="infosource") |
| 57 | + |
| 58 | +# Set the subject 1 identifier in subject_list, |
| 59 | +# we choose the preproc dataset as it contains uncorrected files. |
| 60 | +subject_list = ['subj1_preproc'] |
| 61 | + |
| 62 | + |
| 63 | +"""Here we set up iteration over all the subjects. The following line |
| 64 | +is a particular example of the flexibility of the system. The |
| 65 | +``datasource`` attribute ``iterables`` tells the pipeline engine that |
| 66 | +it should repeat the analysis on each of the items in the |
| 67 | +``subject_list``. In the current example, the entire first level |
| 68 | +preprocessing and estimation will be repeated for each subject |
| 69 | +contained in subject_list. |
| 70 | +""" |
| 71 | + |
| 72 | +infosource.iterables = ('subject_id', subject_list) |
| 73 | + |
| 74 | + |
| 75 | +""" |
| 76 | +Now we create a :class:`nipype.interfaces.io.DataGrabber` object and |
| 77 | +fill in the information from above about the layout of our data. The |
| 78 | +:class:`~nipype.pipeline.engine.Node` module wraps the interface object |
| 79 | +and provides additional housekeeping and pipeline specific |
| 80 | +functionality. |
| 81 | +""" |
| 82 | + |
| 83 | +datasource = pe.Node(nio.DataGrabber(infields=['subject_id'], |
| 84 | + outfields=info.keys()), name='datasource') |
| 85 | + |
| 86 | +datasource.inputs.template = "%s/%s" |
| 87 | + |
| 88 | +# This needs to point to the fdt folder you can find after extracting |
| 89 | +# http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz |
| 90 | +datasource.inputs.base_directory = os.path.abspath('fdt1') |
| 91 | +datasource.inputs.field_template = dict(dwi='%s/%s.nii.gz', |
| 92 | + dwi_rev='%s/%s.nii.gz') |
| 93 | +datasource.inputs.template_args = info |
| 94 | +datasource.inputs.sort_filelist = True |
| 95 | + |
| 96 | + |
| 97 | +""" |
| 98 | +An inputnode is used to pass the data obtained by the data grabber to the |
| 99 | +actual processing functions |
| 100 | +""" |
| 101 | + |
| 102 | +inputnode = pe.Node(niu.IdentityInterface(fields=["dwi", "bvecs", "bvals", |
| 103 | + "dwi_rev"]), name="inputnode") |
| 104 | + |
| 105 | + |
| 106 | +""" |
| 107 | +
|
| 108 | +Setup for dMRI preprocessing |
| 109 | +============================ |
| 110 | +
|
| 111 | +In this section we initialize the appropriate workflow for preprocessing of |
| 112 | +diffusion images. |
| 113 | +
|
| 114 | +Artifacts correction |
| 115 | +-------------------- |
| 116 | +
|
| 117 | +We will use the combination of ``topup`` and ``eddy`` as suggested by FSL. |
| 118 | +
|
| 119 | +In order to configure the susceptibility distortion correction (SDC), we first |
| 120 | +write the specific parameters of our echo-planar imaging (EPI) images. |
| 121 | +
|
| 122 | +Particularly, we look into the ``acqparams.txt`` file of the selected subject |
| 123 | +to gather the encoding direction, acceleration factor (in parallel sequences |
| 124 | +it is > 1), and readout time or echospacing. |
| 125 | +
|
| 126 | +""" |
| 127 | + |
| 128 | +epi_AP = {'echospacing': 66.5e-3, 'enc_dir': 'y-'} |
| 129 | +epi_PA = {'echospacing': 66.5e-3, 'enc_dir': 'y'} |
| 130 | +prep = all_fsl_pipeline(epi_params=epi_AP, altepi_params=epi_PA) |
| 131 | + |
| 132 | + |
| 133 | +""" |
| 134 | +
|
| 135 | +Bias field correction |
| 136 | +--------------------- |
| 137 | +
|
| 138 | +Finally, we set up a node to correct for a single multiplicative bias field |
| 139 | +from computed on the *b0* image, as suggested in [Jeurissen2014]_. |
| 140 | +
|
| 141 | +""" |
| 142 | + |
| 143 | +bias = remove_bias() |
| 144 | + |
| 145 | + |
| 146 | +""" |
| 147 | +Connect nodes in workflow |
| 148 | +========================= |
| 149 | +
|
| 150 | +We create a higher level workflow to connect the nodes. Please excuse the |
| 151 | +author for writing the arguments of the ``connect`` function in a not-standard |
| 152 | +style with readability aims. |
| 153 | +""" |
| 154 | + |
| 155 | +wf = pe.Workflow(name="dMRI_Preprocessing") |
| 156 | +wf.base_dir = os.path.abspath('preprocessing_dmri_tutorial') |
| 157 | +wf.connect([ |
| 158 | + (infosource, datasource, [('subject_id', 'subject_id')]) |
| 159 | + ,(datasource, prep, [('dwi', 'inputnode.in_file'), |
| 160 | + ('dwi_rev', 'inputnode.alt_file'), |
| 161 | + ('bvals', 'inputnode.in_bval'), |
| 162 | + ('bvecs', 'inputnode.in_bvec')]) |
| 163 | + ,(prep, bias, [('outputnode.out_file', 'inputnode.in_file'), |
| 164 | + ('outputnode.out_mask', 'inputnode.in_mask')]) |
| 165 | + ,(datasource, bias, [('bvals', 'inputnode.in_bval')]) |
| 166 | +]) |
| 167 | + |
| 168 | + |
| 169 | +""" |
| 170 | +Run the workflow as command line executable |
| 171 | +""" |
| 172 | + |
| 173 | +if __name__ == '__main__': |
| 174 | + wf.run() |
| 175 | + wf.write_graph() |
0 commit comments