Skip to content

Commit 01a78ec

Browse files
authored
Merge pull request #1 from oesteban/rciric-patch-1
FIX: some traits-5.0.0 don't work with Python 2.7
2 parents 689d064 + fd41b74 commit 01a78ec

File tree

4 files changed

+79
-72
lines changed

4 files changed

+79
-72
lines changed

nipype/algorithms/confounds.py

Lines changed: 66 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -388,24 +388,26 @@ class CompCorInputSpec(BaseInterfaceInputSpec):
388388
requires=['mask_files'],
389389
desc=('Position of mask in `mask_files` to use - '
390390
'first is the default.'))
391-
mask_names = traits.List(traits.Str,
391+
mask_names = traits.List(
392+
traits.Str,
392393
desc='Names for provided masks (for printing into metadata). '
393394
'If provided, it must be as long as the final mask list '
394395
'(after any merge and indexing operations).')
395396
components_file = traits.Str(
396397
'components_file.txt',
397398
usedefault=True,
398399
desc='Filename to store physiological components')
399-
num_components = traits.Either('all', traits.Range(low=1),
400-
xor=['variance_threshold'],
400+
num_components = traits.Either(
401+
'all', traits.Range(low=1), xor=['variance_threshold'],
401402
desc='Number of components to return from the decomposition. If '
402403
'`num_components` is `all`, then all components will be '
403404
'retained.')
404-
# 6 for BOLD, 4 for ASL
405-
# automatically instantiated to 6 in CompCor below if neither
406-
# `num_components` nor `variance_threshold` is defined (for
407-
# backward compatibility)
408-
variance_threshold = traits.Float(xor=['num_components'],
405+
# 6 for BOLD, 4 for ASL
406+
# automatically instantiated to 6 in CompCor below if neither
407+
# `num_components` nor `variance_threshold` is defined (for
408+
# backward compatibility)
409+
variance_threshold = traits.Range(
410+
low=0.0, high=1.0, exclude_low=True, exclude_high=True, xor=['num_components'],
409411
desc='Select the number of components to be returned automatically '
410412
'based on their ability to explain variance in the dataset. '
411413
'`variance_threshold` is a fractional value between 0 and 1; '
@@ -438,9 +440,11 @@ class CompCorInputSpec(BaseInterfaceInputSpec):
438440
desc='Repetition time (TR) of series - derived from image header if '
439441
'unspecified')
440442
save_pre_filter = traits.Either(
441-
traits.Bool, File, desc='Save pre-filter basis as text file')
443+
traits.Bool, File, default=False, usedefault=True,
444+
desc='Save pre-filter basis as text file')
442445
save_metadata = traits.Either(
443-
traits.Bool, File, desc='Save component metadata as text file')
446+
traits.Bool, File, default=False, usedefault=True,
447+
desc='Save component metadata as text file')
444448
ignore_initial_volumes = traits.Range(
445449
low=0,
446450
usedefault=True,
@@ -497,20 +501,20 @@ class CompCor(SimpleInterface):
497501
input_spec = CompCorInputSpec
498502
output_spec = CompCorOutputSpec
499503
references_ = [{
504+
'tags': ['method', 'implementation'],
500505
'entry':
501-
BibTeX(
502-
"@article{compcor_2007,"
503-
"title = {A component based noise correction method (CompCor) for BOLD and perfusion based},"
504-
"volume = {37},"
505-
"number = {1},"
506-
"doi = {10.1016/j.neuroimage.2007.04.042},"
507-
"urldate = {2016-08-13},"
508-
"journal = {NeuroImage},"
509-
"author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.},"
510-
"year = {2007},"
511-
"pages = {90-101},}"),
512-
'tags': ['method', 'implementation']
513-
}]
506+
BibTeX("""\
507+
@article{compcor_2007,
508+
title = {A component based noise correction method (CompCor) for BOLD and perfusion based},
509+
volume = {37},
510+
number = {1},
511+
doi = {10.1016/j.neuroimage.2007.04.042},
512+
urldate = {2016-08-13},
513+
journal = {NeuroImage},
514+
author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.},
515+
year = {2007},
516+
pages = {90-101}
517+
}""")}]
514518

515519
def __init__(self, *args, **kwargs):
516520
''' exactly the same as compcor except the header '''
@@ -606,57 +610,60 @@ def _run_interface(self, runtime):
606610
delimiter='\t',
607611
header='\t'.join(components_header),
608612
comments='')
609-
self._results['components_file'] = os.path.abspath(
610-
self.inputs.components_file)
613+
self._results['components_file'] = os.path.join(
614+
runtime.cwd, self.inputs.components_file)
615+
616+
save_pre_filter = False
617+
if self.inputs.pre_filter in ['polynomial', 'cosine']:
618+
save_pre_filter = self.inputs.save_pre_filter
611619

612-
save_pre_filter = self.inputs.save_pre_filter
613620
if save_pre_filter:
614621
self._results['pre_filter_file'] = save_pre_filter
615622
if save_pre_filter is True:
616-
self._results['pre_filter_file'] = os.path.abspath('pre_filter.tsv')
617-
if self.inputs.pre_filter:
618-
ftype = {
619-
'polynomial': 'Legendre',
620-
'cosine': 'Cosine'
621-
}[self.inputs.pre_filter]
622-
ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0
623-
header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)]
624-
if skip_vols:
625-
old_basis = filter_basis
626-
# nrows defined above
627-
filter_basis = np.zeros(
628-
(nrows, ncols + skip_vols), dtype=filter_basis.dtype)
629-
if old_basis.size > 0:
630-
filter_basis[skip_vols:, :ncols] = old_basis
631-
filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols)
632-
header.extend([
633-
'NonSteadyStateOutlier{:02d}'.format(i)
634-
for i in range(skip_vols)
635-
])
636-
np.savetxt(
637-
self._results['pre_filter_file'],
638-
filter_basis,
639-
fmt=b'%.10f',
640-
delimiter='\t',
641-
header='\t'.join(header),
642-
comments='')
623+
self._results['pre_filter_file'] = os.path.join(
624+
runtime.cwd, 'pre_filter.tsv')
625+
626+
ftype = {
627+
'polynomial': 'Legendre',
628+
'cosine': 'Cosine'
629+
}[self.inputs.pre_filter]
630+
ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0
631+
header = ['{}{:02d}'.format(ftype, i) for i in range(ncols)]
632+
if skip_vols:
633+
old_basis = filter_basis
634+
# nrows defined above
635+
filter_basis = np.zeros(
636+
(nrows, ncols + skip_vols), dtype=filter_basis.dtype)
637+
if old_basis.size > 0:
638+
filter_basis[skip_vols:, :ncols] = old_basis
639+
filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols)
640+
header.extend([
641+
'NonSteadyStateOutlier{:02d}'.format(i)
642+
for i in range(skip_vols)
643+
])
644+
np.savetxt(
645+
self._results['pre_filter_file'],
646+
filter_basis,
647+
fmt=b'%.10f',
648+
delimiter='\t',
649+
header='\t'.join(header),
650+
comments='')
643651

644652
metadata_file = self.inputs.save_metadata
645653
if metadata_file:
646654
self._results['metadata_file'] = metadata_file
647655
if metadata_file is True:
648656
self._results['metadata_file'] = (
649-
os.path.abspath('component_metadata.tsv'))
657+
os.path.join(runtime.cwd, 'component_metadata.tsv'))
650658
components_names = np.empty(len(metadata['mask']),
651-
dtype='object_')
659+
dtype='object_')
652660
retained = np.where(metadata['retained'])
653661
not_retained = np.where(np.logical_not(metadata['retained']))
654662
components_names[retained] = components_header
655663
components_names[not_retained] = ([
656664
'dropped{}'.format(i) for i in range(len(not_retained[0]))])
657665
with open(self._results['metadata_file'], 'w') as f:
658-
f.write('{}\t{}\t{}\t{}\t{}\n'.format('component',
659-
*list(metadata.keys())))
666+
f.write('\t'.join(['component'] + list(metadata.keys())) + '\n')
660667
for i in zip(components_names, *metadata.values()):
661668
f.write('{0[0]}\t{0[1]}\t{0[2]:.10f}\t'
662669
'{0[3]:.10f}\t{0[4]:.10f}\n'.format(i))
@@ -1317,9 +1324,8 @@ def compute_noise_components(imgseries, mask_images, components_criterion=0.5,
13171324
metadata['cumulative_variance_explained'] = (
13181325
np.hstack((metadata['cumulative_variance_explained'],
13191326
cumulative_variance_explained)))
1320-
metadata['retained'] = (metadata['retained']
1321-
+ [i < num_components
1322-
for i in range(len(s))])
1327+
metadata['retained'] = (
1328+
metadata['retained'] + [i < num_components for i in range(len(s))])
13231329
if components is None:
13241330
if failure_mode == 'error':
13251331
raise ValueError('No components found')

nipype/info.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,8 @@ def get_nipype_gitversion():
142142
'numpy>=%s ; python_version >= "3.7"' % NUMPY_MIN_VERSION_37,
143143
'python-dateutil>=%s' % DATEUTIL_MIN_VERSION,
144144
'scipy>=%s' % SCIPY_MIN_VERSION,
145-
'traits>=%s' % TRAITS_MIN_VERSION,
145+
'traits>=%s,<%s ; python_version == "2.7"' % (TRAITS_MIN_VERSION, '5.0.0'),
146+
'traits>=%s ; python_version >= "3.0"' % TRAITS_MIN_VERSION,
146147
'future>=%s' % FUTURE_MIN_VERSION,
147148
'simplejson>=%s' % SIMPLEJSON_MIN_VERSION,
148149
'prov>=%s' % PROV_VERSION,

nipype/workflows/rsfmri/fsl/resting.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
# vi: set ft=python sts=4 ts=4 sw=4 et:
44
from __future__ import (print_function, division, unicode_literals,
55
absolute_import)
6-
from builtins import str
76

87
from ....interfaces import fsl as fsl # fsl
98
from ....interfaces import utility as util # utility

nipype/workflows/rsfmri/fsl/tests/test_resting.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -89,16 +89,17 @@ def test_create_resting_preproc(self, mock_node, mock_realign_wf):
8989
# assert
9090
expected_file = os.path.abspath(self.out_filenames['components_file'])
9191
with open(expected_file, 'r') as components_file:
92-
components_data = [line.split() for line in components_file]
93-
num_got_components = len(components_data)
94-
assert (num_got_components == self.num_noise_components
95-
or num_got_components == self.fake_data.shape[3])
96-
first_two = [row[:2] for row in components_data[1:]]
97-
assert first_two == [['-0.5172356654', '-0.6973053243'], [
98-
'0.2574722644', '0.1645270737'
99-
], ['-0.0806469590',
100-
'0.5156853779'], ['0.7187176051', '-0.3235820287'],
101-
['-0.3783072450', '0.3406749013']]
92+
components_data = [line.split()
93+
for line in components_file.read().splitlines()]
94+
num_got_components = len(components_data)
95+
assert (num_got_components == self.num_noise_components or
96+
num_got_components == self.fake_data.shape[3])
97+
first_two = [row[:2] for row in components_data[1:]]
98+
assert first_two == [['-0.5172356654', '-0.6973053243'],
99+
['0.2574722644', '0.1645270737'],
100+
['-0.0806469590', '0.5156853779'],
101+
['0.7187176051', '-0.3235820287'],
102+
['-0.3783072450', '0.3406749013']]
102103

103104
fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]],
104105
[[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]],

0 commit comments

Comments
 (0)