From 6c63ad9c3768bcc19d59d3ef9e95acff1774d655 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sat, 29 Apr 2017 01:05:45 -0700 Subject: [PATCH 1/6] [FIX] Ensure build fails in Circle when tests fail - Added a dummy test that always fails --- docker/files/run_pytests.sh | 2 ++ nipype/tests/test_nipype.py | 6 +++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docker/files/run_pytests.sh b/docker/files/run_pytests.sh index 7bd1cfb010..f76734ad45 100644 --- a/docker/files/run_pytests.sh +++ b/docker/files/run_pytests.sh @@ -40,4 +40,6 @@ fi # Collect crashfiles find ${WORKDIR} -name "crash-*" -maxdepth 1 -exec mv {} ${WORKDIR}/crashfiles/ \; +echo "Unit tests finished with exit code ${exit_code}" exit ${exit_code} + diff --git a/nipype/tests/test_nipype.py b/nipype/tests/test_nipype.py index 5c1b714617..1b24e3f6b7 100644 --- a/nipype/tests/test_nipype.py +++ b/nipype/tests/test_nipype.py @@ -6,4 +6,8 @@ def test_nipype_info(): get_info() except Exception as e: exception_not_raised = False - assert exception_not_raised \ No newline at end of file + assert exception_not_raised + +def test_fail_always(): + assert False + From b45d764e0e8d77b30a55b1c96bb8008a9c075c87 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sat, 29 Apr 2017 01:10:04 -0700 Subject: [PATCH 2/6] [skip ci] Update CHANGES --- CHANGES | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES b/CHANGES index cfc2a54135..6bf101b819 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,7 @@ Upcoming Release ===================== +* FIX: Ensure build fails in Circle when tests fail (https://github.com/nipy/nipype/pull/1981) * ENH: AFNI motion parameter support for FrameWiseDisplacement (https://github.com/nipy/nipype/pull/1840) * ENH: Add ANTs KellyKapowski interface (https://github.com/nipy/nipype/pull/1845) * FIX: AFNI interface bug setting OMP_NUM_THREADS to 1 (https://github.com/nipy/nipype/pull/1728) From 22eeb1f2cf2069674752dbbd313f3ea93c8af596 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sat, 29 Apr 2017 02:33:18 -0700 Subject: [PATCH 3/6] force exit if any test failed --- .circle/tests.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.circle/tests.sh b/.circle/tests.sh index 6f1fd5459c..856c1a9678 100644 --- a/.circle/tests.sh +++ b/.circle/tests.sh @@ -22,22 +22,30 @@ case ${CIRCLE_NODE_INDEX} in docker run --rm=false -it -v $WORKDIR:/work -w /src/nipype/doc nipype/nipype:py35 /usr/bin/run_builddocs.sh && \ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d && \ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d + exitcode=$? ;; 1) docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ level1 && \ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ l2pipeline + exitcode=$? ;; 2) docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \ docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline + exitcode=$? ;; 3) docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_fsl_feeds Linear /data/examples/ l1pipeline && \ docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py35 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /data/examples/ level1_workflow + exitcode=$? ;; esac cp ${WORKDIR}/tests/*.xml ${CIRCLE_TEST_REPORTS}/tests/ + +# Exit with error if any of the tests failed +if [ "$exitcode" != "0" ]; then exit 1; fi codecov -f "coverage*.xml" -s "${WORKDIR}/tests/" -R "${HOME}/nipype/" -F unittests -e CIRCLE_NODE_INDEX codecov -f "smoketest*.xml" -s "${WORKDIR}/tests/" -R "${HOME}/nipype/" -F smoketests -e CIRCLE_NODE_INDEX + From 56cfc2520b600bce97b85d7cd5b8969979e89838 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sat, 29 Apr 2017 03:16:16 -0700 Subject: [PATCH 4/6] disable dummy test, close #1933 --- nipype/tests/test_nipype.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/tests/test_nipype.py b/nipype/tests/test_nipype.py index 1b24e3f6b7..05aa1abf86 100644 --- a/nipype/tests/test_nipype.py +++ b/nipype/tests/test_nipype.py @@ -8,6 +8,6 @@ def test_nipype_info(): exception_not_raised = False assert exception_not_raised -def test_fail_always(): - assert False +# def test_fail_always(): +# assert False From 6c1d962ef1c521812a2c60a11f42aacbbe5182a9 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sat, 29 Apr 2017 12:07:50 -0700 Subject: [PATCH 5/6] fixing the confounds doc build --- nipype/algorithms/confounds.py | 169 +++++++++++++++++---------------- 1 file changed, 88 insertions(+), 81 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 3b5cdf1e16..0a74fcd57f 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -330,7 +330,7 @@ class CompCorOutputSpec(TraitedSpec): desc='text file containing the noise components') class CompCor(BaseInterface): - ''' + """ Interface with core CompCor computation, used in aCompCor and tCompCor Example @@ -342,7 +342,8 @@ class CompCor(BaseInterface): >>> ccinterface.inputs.num_components = 1 >>> ccinterface.inputs.use_regress_poly = True >>> ccinterface.inputs.regress_poly_degree = 2 - ''' + + """ input_spec = CompCorInputSpec output_spec = CompCorOutputSpec references_ = [{'entry': BibTeX("@article{compcor_2007," @@ -465,8 +466,11 @@ def _make_headers(self, num_col): class ACompCor(CompCor): - ''' Anatomical compcor; for input/output, see CompCor. - If the mask provided is an anatomical mask, CompCor == ACompCor ''' + """ + Anatomical compcor: for inputs and outputs, see CompCor. + When the mask provided is an anatomical mask, then CompCor + is equivalent to ACompCor. + """ def __init__(self, *args, **kwargs): ''' exactly the same as compcor except the header ''' @@ -492,7 +496,7 @@ class TCompCorOutputSpec(CompCorInputSpec): desc="voxels excedding the variance threshold")) class TCompCor(CompCor): - ''' + """ Interface for tCompCor. Computes a ROI mask based on variance of voxels. Example @@ -505,7 +509,8 @@ class TCompCor(CompCor): >>> ccinterface.inputs.use_regress_poly = True >>> ccinterface.inputs.regress_poly_degree = 2 >>> ccinterface.inputs.percentile_threshold = .03 - ''' + + """ input_spec = TCompCorInputSpec output_spec = TCompCorOutputSpec @@ -634,7 +639,8 @@ class TSNROutputSpec(TraitedSpec): class TSNR(BaseInterface): - """Computes the time-course SNR for a time series + """ + Computes the time-course SNR for a time series Typically you want to run this on a realigned time-series. @@ -719,80 +725,6 @@ def _run_interface(self, runtime): def _list_outputs(self): return self._results -def is_outlier(points, thresh=3.5): - """ - Returns a boolean array with True if points are outliers and False - otherwise. - - Parameters: - ----------- - points : An numobservations by numdimensions array of observations - thresh : The modified z-score to use as a threshold. Observations with - a modified z-score (based on the median absolute deviation) greater - than this value will be classified as outliers. - - Returns: - -------- - mask : A numobservations-length boolean array. - - References: - ---------- - Boris Iglewicz and David Hoaglin (1993), "Volume 16: How to Detect and - Handle Outliers", The ASQC Basic References in Quality Control: - Statistical Techniques, Edward F. Mykytka, Ph.D., Editor. - """ - if len(points.shape) == 1: - points = points[:, None] - median = np.median(points, axis=0) - diff = np.sum((points - median) ** 2, axis=-1) - diff = np.sqrt(diff) - med_abs_deviation = np.median(diff) - - modified_z_score = 0.6745 * diff / med_abs_deviation - - timepoints_to_discard = 0 - for i in range(len(modified_z_score)): - if modified_z_score[i] <= thresh: - break - else: - timepoints_to_discard += 1 - - return timepoints_to_discard - - -def regress_poly(degree, data, remove_mean=True, axis=-1): - ''' returns data with degree polynomial regressed out. - Be default it is calculated along the last axis (usu. time). - If remove_mean is True (default), the data is demeaned (i.e. degree 0). - If remove_mean is false, the data is not. - ''' - IFLOG.debug('Performing polynomial regression on data of shape ' + str(data.shape)) - - datashape = data.shape - timepoints = datashape[axis] - - # Rearrange all voxel-wise time-series in rows - data = data.reshape((-1, timepoints)) - - # Generate design matrix - X = np.ones((timepoints, 1)) # quick way to calc degree 0 - for i in range(degree): - polynomial_func = Legendre.basis(i + 1) - value_array = np.linspace(-1, 1, timepoints) - X = np.hstack((X, polynomial_func(value_array)[:, np.newaxis])) - - # Calculate coefficients - betas = np.linalg.pinv(X).dot(data.T) - - # Estimation - if remove_mean: - datahat = X.dot(betas).T - else: # disregard the first layer of X, which is degree 0 - datahat = X[:, 1:].dot(betas[1:, ...]).T - regressed_data = data - datahat - - # Back to original shape - return regressed_data.reshape(datashape) def compute_dvars(in_file, in_mask, remove_zerovariance=False, intensity_normalization=1000): @@ -921,3 +853,78 @@ def plot_confound(tseries, figsize, name, units=None, ax.set_ylim(ylim) ax.set_yticklabels([]) return fig + +def is_outlier(points, thresh=3.5): + """ + Returns a boolean array with True if points are outliers and False + otherwise. + + :param nparray points: an numobservations by numdimensions numpy array of observations + :param float thresh: the modified z-score to use as a threshold. Observations with + a modified z-score (based on the median absolute deviation) greater + than this value will be classified as outliers. + + :return: A bolean mask, of size numobservations-length array. + + .. note:: References + + Boris Iglewicz and David Hoaglin (1993), "Volume 16: How to Detect and + Handle Outliers", The ASQC Basic References in Quality Control: + Statistical Techniques, Edward F. Mykytka, Ph.D., Editor. + + """ + if len(points.shape) == 1: + points = points[:, None] + median = np.median(points, axis=0) + diff = np.sum((points - median) ** 2, axis=-1) + diff = np.sqrt(diff) + med_abs_deviation = np.median(diff) + + modified_z_score = 0.6745 * diff / med_abs_deviation + + timepoints_to_discard = 0 + for i in range(len(modified_z_score)): + if modified_z_score[i] <= thresh: + break + else: + timepoints_to_discard += 1 + + return timepoints_to_discard + + +def regress_poly(degree, data, remove_mean=True, axis=-1): + """ + Returns data with degree polynomial regressed out. + + :param bool remove_mean: whether or not demean data (i.e. degree 0), + :param int axis: numpy array axes along which regression is performed + + """ + IFLOG.debug('Performing polynomial regression on data of shape ' + str(data.shape)) + + datashape = data.shape + timepoints = datashape[axis] + + # Rearrange all voxel-wise time-series in rows + data = data.reshape((-1, timepoints)) + + # Generate design matrix + X = np.ones((timepoints, 1)) # quick way to calc degree 0 + for i in range(degree): + polynomial_func = Legendre.basis(i + 1) + value_array = np.linspace(-1, 1, timepoints) + X = np.hstack((X, polynomial_func(value_array)[:, np.newaxis])) + + # Calculate coefficients + betas = np.linalg.pinv(X).dot(data.T) + + # Estimation + if remove_mean: + datahat = X.dot(betas).T + else: # disregard the first layer of X, which is degree 0 + datahat = X[:, 1:].dot(betas[1:, ...]).T + regressed_data = data - datahat + + # Back to original shape + return regressed_data.reshape(datashape) + From 2c684f1a18447f6fb68dd6dd35888b0d5b1e3f98 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sat, 29 Apr 2017 12:44:14 -0700 Subject: [PATCH 6/6] fix errors in niftyreg workflows docstrings --- nipype/workflows/smri/niftyreg/groupwise.py | 31 ++++++++++++--------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/nipype/workflows/smri/niftyreg/groupwise.py b/nipype/workflows/smri/niftyreg/groupwise.py index 125a017866..e6f7edd9ad 100644 --- a/nipype/workflows/smri/niftyreg/groupwise.py +++ b/nipype/workflows/smri/niftyreg/groupwise.py @@ -1,12 +1,10 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' -This file provides some common registration routines useful for a variety of -pipelines. - -Including linear and non-linear image co-registration -''' +""" +Example of registration workflows using niftyreg, useful for a variety of +pipelines. Including linear and non-linear image co-registration +""" from builtins import str, range import nipype.interfaces.utility as niu @@ -20,7 +18,7 @@ def create_linear_gw_step(name="linear_gw_niftyreg", use_mask=False, verbose=False): """ - Creates a workflow that perform linear co-registration of a set of images + Creates a workflow that performs linear co-registration of a set of images using RegAladin, producing an average image and a set of affine transformation matrices linking each of the floating images to the average. @@ -38,6 +36,7 @@ def create_linear_gw_step(name="linear_gw_niftyreg", outputspec.aff_files - The affine transformation files Optional arguments:: + linear_options_hash - An options dictionary containing a list of parameters for RegAladin that take the same form as given in the interface (default None) @@ -51,8 +50,8 @@ def create_linear_gw_step(name="linear_gw_niftyreg", >>> from nipype.workflows.smri.niftyreg import create_linear_gw_step >>> lgw = create_linear_gw_step('my_linear_coreg') # doctest: +SKIP - >>> lgw.inputs.inputspec.in_files = ['file1.nii.gz', 'file2.nii.gz'] \ -# doctest: +SKIP + >>> lgw.inputs.inputspec.in_files = [ + ... 'file1.nii.gz', 'file2.nii.gz'] # doctest: +SKIP >>> lgw.inputs.inputspec.ref_file = ['ref.nii.gz'] # doctest: +SKIP >>> lgw.run() # doctest: +SKIP @@ -121,6 +120,7 @@ def create_nonlinear_gw_step(name="nonlinear_gw_niftyreg", cpp transformation linking each of the floating images to the average. Inputs:: + inputspec.in_files - The input files to be registered inputspec.ref_file - The initial reference image that the input files are registered to @@ -134,6 +134,7 @@ def create_nonlinear_gw_step(name="nonlinear_gw_niftyreg", outputspec.cpp_files - The bspline transformation files Optional arguments:: + nonlinear_options_hash - An options dictionary containing a list of parameters for RegAladin that take the same form as given in the interface (default None) @@ -144,8 +145,8 @@ def create_nonlinear_gw_step(name="nonlinear_gw_niftyreg", ------- >>> from nipype.workflows.smri.niftyreg import create_nonlinear_gw_step >>> nlc = create_nonlinear_gw_step('nonlinear_coreg') # doctest: +SKIP - >>> nlc.inputs.inputspec.in_files = ['file1.nii.gz', 'file2.nii.gz'] \ -# doctest: +SKIP + >>> nlc.inputs.inputspec.in_files = [ + ... 'file1.nii.gz', 'file2.nii.gz'] # doctest: +SKIP >>> nlc.inputs.inputspec.ref_file = ['ref.nii.gz'] # doctest: +SKIP >>> nlc.run() # doctest: +SKIP @@ -246,6 +247,7 @@ def create_groupwise_average(name="atlas_creation", non-linear components. Inputs:: + inputspec.in_files - The input files to be registered inputspec.ref_file - The initial reference image that the input files are registered to @@ -258,12 +260,14 @@ def create_groupwise_average(name="atlas_creation", outputspec.average_image - The average image outputspec.cpp_files - The bspline transformation files + Example ------- + >>> from nipype.workflows.smri.niftyreg import create_groupwise_average >>> node = create_groupwise_average('groupwise_av') # doctest: +SKIP - >>> node.inputs.inputspec.in_files = ['file1.nii.gz', 'file2.nii.gz'] \ -# doctest: +SKIP + >>> node.inputs.inputspec.in_files = [ + ... 'file1.nii.gz', 'file2.nii.gz'] # doctest: +SKIP >>> node.inputs.inputspec.ref_file = ['ref.nii.gz'] # doctest: +SKIP >>> node.inputs.inputspec.rmask_file = ['mask.nii.gz'] # doctest: +SKIP >>> node.run() # doctest: +SKIP @@ -384,3 +388,4 @@ def create_groupwise_average(name="atlas_creation", ]) return workflow +