From 0720aa1ac9c672e8b7a6ede4828e603e62778b86 Mon Sep 17 00:00:00 2001 From: mauriliogenovese Date: Fri, 22 Mar 2024 13:56:24 +0100 Subject: [PATCH 01/90] support for gpu queue --- nipype/pipeline/engine/nodes.py | 4 ++ nipype/pipeline/plugins/multiproc.py | 62 ++++++++++++++++--- .../pipeline/plugins/tests/test_multiproc.py | 15 +++++ 3 files changed, 74 insertions(+), 7 deletions(-) diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index 3756d00ce8..5afea316c2 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -821,6 +821,10 @@ def update(self, **opts): """Update inputs""" self.inputs.update(**opts) + def is_gpu_node(self): + return ((hasattr(self.inputs, 'use_cuda') and self.inputs.use_cuda) + or (hasattr(self.inputs, 'use_gpu') and self.inputs.use_gpu)) + class JoinNode(Node): """Wraps interface objects that join inputs into a list. diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 401b01b388..8213c6c821 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -100,6 +100,7 @@ class MultiProcPlugin(DistributedPluginBase): - non_daemon: boolean flag to execute as non-daemon processes - n_procs: maximum number of threads to be executed in parallel + - n_gpu_procs: maximum number of GPU threads to be executed in parallel - memory_gb: maximum memory (in GB) that can be used at once. - raise_insufficient: raise error if the requested resources for a node over the maximum `n_procs` and/or `memory_gb` @@ -130,10 +131,22 @@ def __init__(self, plugin_args=None): ) self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) + # GPU found on system + self.n_gpus_visible = MultiProcPlugin.gpu_count() + # proc per GPU set by user + self.n_gpu_procs = plugin_args.get('n_gpu_procs', self.n_gpus_visible) + + # total no. of processes allowed on all gpus + if self.n_gpu_procs > self.n_gpus_visible: + logger.info( + 'Total number of GPUs proc requested (%d) exceeds the available number of GPUs (%d) on the system. Using requested GPU slots at your own risk!' % ( + self.n_gpu_procs, self.n_gpus_visible)) + # Instantiate different thread pools for non-daemon processes logger.debug( - "[MultiProc] Starting (n_procs=%d, mem_gb=%0.2f, cwd=%s)", + "[MultiProc] Starting (n_procs=%d, n_gpu_procs=%d, mem_gb=%0.2f, cwd=%s)", self.processors, + self.n_gpu_procs, self.memory_gb, self._cwd, ) @@ -184,9 +197,12 @@ def _prerun_check(self, graph): """Check if any node exceeds the available resources""" tasks_mem_gb = [] tasks_num_th = [] + tasks_gpu_th = [] for node in graph.nodes(): tasks_mem_gb.append(node.mem_gb) tasks_num_th.append(node.n_procs) + if node.is_gpu_node(): + tasks_gpu_th.append(node.n_procs) if np.any(np.array(tasks_mem_gb) > self.memory_gb): logger.warning( @@ -203,6 +219,12 @@ def _prerun_check(self, graph): ) if self.raise_insufficient: raise RuntimeError("Insufficient resources available for job") + if np.any(np.array(tasks_gpu_th) > self.n_gpu_procs): + logger.warning( + 'Nodes demand more GPU than allowed (%d).', + self.n_gpu_procs) + if self.raise_insufficient: + raise RuntimeError('Insufficient GPU resources available for job') def _postrun_check(self): self.pool.shutdown() @@ -213,11 +235,14 @@ def _check_resources(self, running_tasks): """ free_memory_gb = self.memory_gb free_processors = self.processors + free_gpu_slots = self.n_gpu_procs for _, jobid in running_tasks: free_memory_gb -= min(self.procs[jobid].mem_gb, free_memory_gb) free_processors -= min(self.procs[jobid].n_procs, free_processors) + if self.procs[jobid].is_gpu_node(): + free_gpu_slots -= min(self.procs[jobid].n_procs, free_gpu_slots) - return free_memory_gb, free_processors + return free_memory_gb, free_processors, free_gpu_slots def _send_procs_to_workers(self, updatehash=False, graph=None): """ @@ -232,7 +257,7 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): ) # Check available resources by summing all threads and memory used - free_memory_gb, free_processors = self._check_resources(self.pending_tasks) + free_memory_gb, free_processors, free_gpu_slots = self._check_resources(self.pending_tasks) stats = ( len(self.pending_tasks), @@ -241,6 +266,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self.memory_gb, free_processors, self.processors, + free_gpu_slots, + self.n_gpu_procs ) if self._stats != stats: tasks_list_msg = "" @@ -256,13 +283,15 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): tasks_list_msg = indent(tasks_list_msg, " " * 21) logger.info( "[MultiProc] Running %d tasks, and %d jobs ready. Free " - "memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s", + "memory (GB): %0.2f/%0.2f, Free processors: %d/%d, Free GPU slot:%d/%d.%s", len(self.pending_tasks), len(jobids), free_memory_gb, self.memory_gb, free_processors, self.processors, + free_gpu_slots, + self.n_gpu_procs, tasks_list_msg, ) self._stats = stats @@ -304,28 +333,36 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): # Check requirements of this job next_job_gb = min(self.procs[jobid].mem_gb, self.memory_gb) next_job_th = min(self.procs[jobid].n_procs, self.processors) + next_job_gpu_th = min(self.procs[jobid].n_procs, self.n_gpu_procs) + + is_gpu_node = self.procs[jobid].is_gpu_node() # If node does not fit, skip at this moment - if next_job_th > free_processors or next_job_gb > free_memory_gb: + if (next_job_th > free_processors or next_job_gb > free_memory_gb + or (is_gpu_node and next_job_gpu_th > free_gpu_slots)): logger.debug( - "Cannot allocate job %d (%0.2fGB, %d threads).", + "Cannot allocate job %d (%0.2fGB, %d threads, %d GPU slots).", jobid, next_job_gb, next_job_th, + next_job_gpu_th, ) continue free_memory_gb -= next_job_gb free_processors -= next_job_th + if is_gpu_node: + free_gpu_slots -= next_job_gpu_th logger.debug( "Allocating %s ID=%d (%0.2fGB, %d threads). Free: " - "%0.2fGB, %d threads.", + "%0.2fGB, %d threads, %d GPU slots.", self.procs[jobid].fullname, jobid, next_job_gb, next_job_th, free_memory_gb, free_processors, + free_gpu_slots, ) # change job status in appropriate queues @@ -352,6 +389,8 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self._remove_node_dirs() free_memory_gb += next_job_gb free_processors += next_job_th + if is_gpu_node: + free_gpu_slots -= next_job_gpu_th # Display stats next loop self._stats = None @@ -379,3 +418,12 @@ def _sort_jobs(self, jobids, scheduler="tsort"): key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) return jobids + + @staticmethod + def gpu_count(): + n_gpus = 1 + try: + import GPUtil + return len(GPUtil.getGPUs()) + except ImportError: + return n_gpus diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index 938e1aab9e..b954cb9517 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -56,6 +56,7 @@ def test_run_multiproc(tmpdir): class InputSpecSingleNode(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") + use_gpu = nib.traits.Bool(False, mandatory = False, desc="boolean for GPU nodes") class OutputSpecSingleNode(nib.TraitedSpec): @@ -116,6 +117,20 @@ def test_no_more_threads_than_specified(tmpdir): with pytest.raises(RuntimeError): pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) +def test_no_more_gpu_threads_than_specified(tmpdir): + tmpdir.chdir() + + pipe = pe.Workflow(name="pipe") + n1 = pe.Node(SingleNodeTestInterface(), name="n1", n_procs=2) + n1.inputs.use_gpu = True + n1.inputs.input1 = 4 + pipe.add_nodes([n1]) + + max_threads = 2 + max_gpu = 1 + with pytest.raises(RuntimeError): + pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads, 'n_gpu_procs': max_gpu}) + @pytest.mark.skipif( sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" From 6c47dc009aeac77be993d17d5b1fb6ab1fa5d9d2 Mon Sep 17 00:00:00 2001 From: mauriliogenovese Date: Sun, 24 Mar 2024 10:52:44 +0100 Subject: [PATCH 02/90] gputil requirement gputils is required for gpu queue management --- nipype/info.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nipype/info.py b/nipype/info.py index a550e4b389..f4fc365e7e 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -149,6 +149,7 @@ def get_nipype_gitversion(): "filelock>=3.0.0", "etelemetry>=0.2.0", "looseversion!=1.2", + "gputil=1.4.0", ] TESTS_REQUIRES = [ From f1f5d764a3d7452a04c807388b454cf216744e8b Mon Sep 17 00:00:00 2001 From: mauriliogenovese Date: Mon, 25 Mar 2024 07:38:29 +0100 Subject: [PATCH 03/90] Update info.py --- nipype/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index f4fc365e7e..280c641ed6 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -149,7 +149,7 @@ def get_nipype_gitversion(): "filelock>=3.0.0", "etelemetry>=0.2.0", "looseversion!=1.2", - "gputil=1.4.0", + "gputil==1.4.0", ] TESTS_REQUIRES = [ From a6424301d7bd3440fe5f465ba05fde0b38e37aed Mon Sep 17 00:00:00 2001 From: mauriliogenovese Date: Mon, 25 Mar 2024 09:59:41 +0100 Subject: [PATCH 04/90] refactor and fix --- nipype/pipeline/engine/nodes.py | 5 ++-- nipype/pipeline/plugins/multiproc.py | 25 +++++++++++-------- .../pipeline/plugins/tests/test_multiproc.py | 8 ++++-- 3 files changed, 24 insertions(+), 14 deletions(-) diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index 5afea316c2..d9c066a795 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -822,8 +822,9 @@ def update(self, **opts): self.inputs.update(**opts) def is_gpu_node(self): - return ((hasattr(self.inputs, 'use_cuda') and self.inputs.use_cuda) - or (hasattr(self.inputs, 'use_gpu') and self.inputs.use_gpu)) + return (hasattr(self.inputs, 'use_cuda') and self.inputs.use_cuda) or ( + hasattr(self.inputs, 'use_gpu') and self.inputs.use_gpu + ) class JoinNode(Node): diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 8213c6c821..9aec6ae072 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -134,13 +134,14 @@ def __init__(self, plugin_args=None): # GPU found on system self.n_gpus_visible = MultiProcPlugin.gpu_count() # proc per GPU set by user - self.n_gpu_procs = plugin_args.get('n_gpu_procs', self.n_gpus_visible) + self.n_gpu_procs = self.plugin_args.get('n_gpu_procs', self.n_gpus_visible) # total no. of processes allowed on all gpus if self.n_gpu_procs > self.n_gpus_visible: logger.info( - 'Total number of GPUs proc requested (%d) exceeds the available number of GPUs (%d) on the system. Using requested GPU slots at your own risk!' % ( - self.n_gpu_procs, self.n_gpus_visible)) + 'Total number of GPUs proc requested (%d) exceeds the available number of GPUs (%d) on the system. Using requested GPU slots at your own risk!' + % (self.n_gpu_procs, self.n_gpus_visible) + ) # Instantiate different thread pools for non-daemon processes logger.debug( @@ -220,9 +221,7 @@ def _prerun_check(self, graph): if self.raise_insufficient: raise RuntimeError("Insufficient resources available for job") if np.any(np.array(tasks_gpu_th) > self.n_gpu_procs): - logger.warning( - 'Nodes demand more GPU than allowed (%d).', - self.n_gpu_procs) + logger.warning('Nodes demand more GPU than allowed (%d).', self.n_gpu_procs) if self.raise_insufficient: raise RuntimeError('Insufficient GPU resources available for job') @@ -257,7 +256,9 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): ) # Check available resources by summing all threads and memory used - free_memory_gb, free_processors, free_gpu_slots = self._check_resources(self.pending_tasks) + free_memory_gb, free_processors, free_gpu_slots = self._check_resources( + self.pending_tasks + ) stats = ( len(self.pending_tasks), @@ -267,7 +268,7 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): free_processors, self.processors, free_gpu_slots, - self.n_gpu_procs + self.n_gpu_procs, ) if self._stats != stats: tasks_list_msg = "" @@ -338,8 +339,11 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): is_gpu_node = self.procs[jobid].is_gpu_node() # If node does not fit, skip at this moment - if (next_job_th > free_processors or next_job_gb > free_memory_gb - or (is_gpu_node and next_job_gpu_th > free_gpu_slots)): + if ( + next_job_th > free_processors + or next_job_gb > free_memory_gb + or (is_gpu_node and next_job_gpu_th > free_gpu_slots) + ): logger.debug( "Cannot allocate job %d (%0.2fGB, %d threads, %d GPU slots).", jobid, @@ -424,6 +428,7 @@ def gpu_count(): n_gpus = 1 try: import GPUtil + return len(GPUtil.getGPUs()) except ImportError: return n_gpus diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index b954cb9517..484c0d07bc 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -56,7 +56,7 @@ def test_run_multiproc(tmpdir): class InputSpecSingleNode(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") - use_gpu = nib.traits.Bool(False, mandatory = False, desc="boolean for GPU nodes") + use_gpu = nib.traits.Bool(False, mandatory=False, desc="boolean for GPU nodes") class OutputSpecSingleNode(nib.TraitedSpec): @@ -117,6 +117,7 @@ def test_no_more_threads_than_specified(tmpdir): with pytest.raises(RuntimeError): pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) + def test_no_more_gpu_threads_than_specified(tmpdir): tmpdir.chdir() @@ -129,7 +130,10 @@ def test_no_more_gpu_threads_than_specified(tmpdir): max_threads = 2 max_gpu = 1 with pytest.raises(RuntimeError): - pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads, 'n_gpu_procs': max_gpu}) + pipe.run( + plugin="MultiProc", + plugin_args={"n_procs": max_threads, 'n_gpu_procs': max_gpu}, + ) @pytest.mark.skipif( From 69b62a408c8e909e5a9fd2412abd84b6824d127a Mon Sep 17 00:00:00 2001 From: Umesh Kumar Date: Thu, 5 Oct 2023 17:26:07 +0530 Subject: [PATCH 05/90] Adding .wci.yml file to the project --- .wci.yml | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .wci.yml diff --git a/.wci.yml b/.wci.yml new file mode 100644 index 0000000000..c3909ea8e5 --- /dev/null +++ b/.wci.yml @@ -0,0 +1,32 @@ +# Project available at https://github.com/nipy/nipype + +name: nipype + +headline: Neuroimaging in Python pipelines and interfaces package. + +description: Algorithms,Image manipulation,I/O Operations,Self-reporting interfaces,Utilities + +language: Python3 + +documentation: + general: https://nipype.readthedocs.io/en/latest/ + installation: https://nipype.readthedocs.io/en/latest/users/install.html + tutorial: https://miykael.github.io/nipype_tutorial/ + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.10" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: doc/conf.py + +execution_environment: + interfaces: + - docker + - conda + - pypi + + From f54475cfb84a07aa12eea3794365bd2f5c1bf2f1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 17 Mar 2024 11:54:35 -0400 Subject: [PATCH 06/90] Apply suggestions from code review --- .wci.yml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.wci.yml b/.wci.yml index c3909ea8e5..6439e9fe08 100644 --- a/.wci.yml +++ b/.wci.yml @@ -2,9 +2,14 @@ name: nipype -headline: Neuroimaging in Python pipelines and interfaces package. - -description: Algorithms,Image manipulation,I/O Operations,Self-reporting interfaces,Utilities +headline: Neuroimaging in Python: Pipelines and Interfaces + +description: | + Nipype, an open-source, community-developed initiative under the umbrella of NiPy, is a Python project that + provides a uniform interface to existing neuroimaging software and facilitates interaction between these + packages within a single workflow. Nipype provides an environment that encourages interactive exploration of + algorithms from different packages (e.g., SPM, FSL, FreeSurfer, AFNI, Slicer, ANTS), eases the design of + workflows within and between packages, and reduces the learning curve necessary to use different packages. language: Python3 From 6fb6a4e9447d4679c8278a598eaea77543cdb7aa Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 5 May 2024 21:22:05 -0400 Subject: [PATCH 07/90] Update .wci.yml --- .wci.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.wci.yml b/.wci.yml index 6439e9fe08..937cfc6dae 100644 --- a/.wci.yml +++ b/.wci.yml @@ -33,5 +33,3 @@ execution_environment: - docker - conda - pypi - - From 06209a440bc72c63394ac24678aa5cc3ab3bdfd0 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 31 Oct 2024 10:43:14 -0400 Subject: [PATCH 08/90] Bump versions --- doc/interfaces.rst | 2 +- nipype/info.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index e7b9dca2f4..4a8714e630 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -8,7 +8,7 @@ Interfaces and Workflows :Release: |version| :Date: |today| -Previous versions: `1.8.6 `_ `1.8.5 `_ +Previous versions: `1.9.0 `_ `1.8.6 `_ Workflows --------- diff --git a/nipype/info.py b/nipype/info.py index 3b006ae161..8ab5caba56 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.9.0" +__version__ = "1.9.1.dev0" def get_nipype_gitversion(): From 35403c13f35b3287f936b53b873f0458fd939899 Mon Sep 17 00:00:00 2001 From: Hans Johnson Date: Fri, 8 Nov 2024 16:55:27 -0600 Subject: [PATCH 09/90] ENH: Remove unused and recently unsupported antsRegistration flag https://github.com/ANTsX/ANTs/commit/e1e47994b233441726c1440cc2fb077a24287d6bo The flag --use-estimate-learning-rate-once was not used inside of antsRegistration, and was removed on 2022-08-09 in ants commit e1e47994b e1e47994b Examples/antsRegistration.cxx (Nick Tustison 2022-08-09 16:45:01 -0700 453) // option->SetLongName("use-estimate-learning-rate-once"); --- nipype/interfaces/ants/registration.py | 67 +++++++++++++------------- 1 file changed, 33 insertions(+), 34 deletions(-) diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 41037ffc5f..91b131bbf3 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -710,9 +710,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' >>> reg.run() # doctest: +SKIP @@ -726,9 +726,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 1.0 ] --write-composite-transform 1' >>> reg1.run() # doctest: +SKIP @@ -742,9 +742,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 0.975 ] --write-composite-transform 1' Clip extremely low intensity data points using winsorize_lower_quantile. All data points @@ -759,9 +759,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 0.975 ] --write-composite-transform 1' Use float instead of double for computations (saves memory usage) @@ -773,10 +773,10 @@ class Registration(ANTSCommand): --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' Force to use double instead of float for computations (more precision and memory usage). @@ -788,10 +788,10 @@ class Registration(ANTSCommand): --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' 'collapse_output_transforms' can be used to put all transformation in a single 'composite_transform'- @@ -823,10 +823,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' @@ -857,10 +857,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 0' One can use multiple similarity metrics in a single registration stage.The Node below first @@ -885,10 +885,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, None, 0.05 ] \ --metric CC[ fixed1.nii, moving1.nii, 0.5, 4, None, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' ANTS Registration can also use multiple modalities to perform the registration. Here it is assumed @@ -906,10 +906,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, None, 0.05 ] \ --metric CC[ fixed2.nii, moving2.nii, 0.5, 4, None, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Different methods can be used for the interpolation when applying transformations. @@ -923,9 +923,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation BSpline[ 3 ] --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ +--use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' >>> # Test Interpolation Parameters (MultiLabel/Gaussian) @@ -937,10 +937,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Gaussian[ 1.0, 1.0 ] \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' BSplineSyN non-linear registration with custom parameters. @@ -954,9 +954,9 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform BSplineSyN[ 0.25, 26, 0, 3 ] \ +--use-histogram-matching 1 --transform BSplineSyN[ 0.25, 26, 0, 3 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ ---smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ +--smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Mask the fixed image in the second stage of the registration (but not the first). @@ -969,10 +969,10 @@ class Registration(ANTSCommand): --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --masks [ NULL, NULL ] \ +--use-histogram-matching 1 --masks [ NULL, NULL ] \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --masks [ fixed1.nii, NULL ] \ +--use-histogram-matching 1 --masks [ fixed1.nii, NULL ] \ --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Here we use both a warpfield and a linear transformation, before registration commences. Note that @@ -988,10 +988,10 @@ class Registration(ANTSCommand): [ func_to_struct.mat, 0 ] [ ants_Warp.nii.gz, 0 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ ---smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ +--smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ ---use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ +--use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' """ @@ -1155,10 +1155,9 @@ def _format_registration(self): % self._format_xarray(self.inputs.shrink_factors[ii]) ) if isdefined(self.inputs.use_estimate_learning_rate_once): - retval.append( - "--use-estimate-learning-rate-once %d" - % self.inputs.use_estimate_learning_rate_once[ii] - ) + # this flag was removed because it was never used in the ants codebase + # removed from Ants in commit e1e47994b on 2022-08-09 + pass if isdefined(self.inputs.use_histogram_matching): # use_histogram_matching is either a common flag for all transforms # or a list of transform-specific flags From 44e9d675d9773baaaa9aeddf38ce6a8d04a208b0 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 11 Nov 2024 14:06:46 -0500 Subject: [PATCH 10/90] =?UTF-8?q?[FIX]=20`ts=5FZ=5Fcorr`=20=E2=86=92=20`ts?= =?UTF-8?q?=5Fwb=5FZ`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- nipype/interfaces/afni/preprocess.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index d3daebcf4c..b5e27ea53a 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -2741,7 +2741,7 @@ def _list_outputs(self): odir = os.path.dirname(os.path.abspath(prefix)) outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0] - if isdefined(self.inputs.ts_wb_corr) or isdefined(self.inputs.ts_Z_corr): + if self.inputs.ts_wb_corr or self.inputs.ts_wb_Z: corrdir = os.path.join(odir, prefix + "_000_INDIV") outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz")) From cbcb487a90eb293acfabf1a444383c4a4dfa64c8 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 11 Nov 2024 14:18:06 -0500 Subject: [PATCH 11/90] [DOC] Add Jon Cluce to `.zenodo.json` --- .zenodo.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.zenodo.json b/.zenodo.json index 5ddb9a71dd..e380a177dc 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -920,6 +920,11 @@ "name": "Mihai, Paul Glad", "orcid": "0000-0001-5715-6442" }, + { + "affiliation": "Child Mind Institute", + "name": "Cluce, Jon", + "orcid": "0000-0001-7590-5806" + }, { "affiliation": "Department of Psychology, Stanford University", "name": "Gorgolewski, Krzysztof J.", From 195af8ad456215b506fbcab3d20b8950b5ca4aa2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 04:34:53 +0000 Subject: [PATCH 12/90] Bump codecov/codecov-action from 4 to 5 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4 to 5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v4...v5) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/contrib.yml | 2 +- .github/workflows/tests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml index 6b3ef96f0c..640d04bf23 100644 --- a/.github/workflows/contrib.yml +++ b/.github/workflows/contrib.yml @@ -71,7 +71,7 @@ jobs: - name: Run tests run: tools/ci/check.sh if: ${{ matrix.check != 'skiptests' }} - - uses: codecov/codecov-action@v4 + - uses: codecov/codecov-action@v5 with: file: coverage.xml token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 41776bc188..b6c435059e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -145,7 +145,7 @@ jobs: - name: Run tests run: tools/ci/check.sh if: ${{ matrix.check != 'skiptests' }} - - uses: codecov/codecov-action@v4 + - uses: codecov/codecov-action@v5 with: file: coverage.xml token: ${{ secrets.CODECOV_TOKEN }} From 7b564aee79de03e2412f809f2576db66ece979bf Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 06:47:49 -0500 Subject: [PATCH 13/90] Update .github/workflows/tests.yml --- .github/workflows/tests.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b6c435059e..5456709412 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -147,7 +147,6 @@ jobs: if: ${{ matrix.check != 'skiptests' }} - uses: codecov/codecov-action@v5 with: - file: coverage.xml token: ${{ secrets.CODECOV_TOKEN }} if: ${{ always() }} - name: Upload pytest test results From ccdf4d220fcdc40d31afe636addce69adaad8c67 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 06:47:55 -0500 Subject: [PATCH 14/90] Update .github/workflows/contrib.yml --- .github/workflows/contrib.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml index 640d04bf23..dca5bbdecb 100644 --- a/.github/workflows/contrib.yml +++ b/.github/workflows/contrib.yml @@ -73,7 +73,6 @@ jobs: if: ${{ matrix.check != 'skiptests' }} - uses: codecov/codecov-action@v5 with: - file: coverage.xml token: ${{ secrets.CODECOV_TOKEN }} if: ${{ always() }} - name: Upload pytest test results From 73e47b50a9f2a7e56e71e6b1fb0bdbeec7424a6e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 17 Nov 2024 19:57:28 -0500 Subject: [PATCH 15/90] MNT: Drop numpy 2.0 limit --- nipype/info.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index 8ab5caba56..d57edcb437 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -102,7 +102,6 @@ def get_nipype_gitversion(): NIBABEL_MIN_VERSION = "2.1.0" NETWORKX_MIN_VERSION = "2.0" NUMPY_MIN_VERSION = "1.17" -NUMPY_MAX_VERSION = "2.0" SCIPY_MIN_VERSION = "0.14" TRAITS_MIN_VERSION = "4.6" DATEUTIL_MIN_VERSION = "2.2" @@ -136,7 +135,7 @@ def get_nipype_gitversion(): "click>=%s" % CLICK_MIN_VERSION, "networkx>=%s" % NETWORKX_MIN_VERSION, "nibabel>=%s" % NIBABEL_MIN_VERSION, - "numpy>=%s,<%s" % (NUMPY_MIN_VERSION, NUMPY_MAX_VERSION), + "numpy>=%s" % NUMPY_MIN_VERSION, "packaging", "prov>=%s" % PROV_MIN_VERSION, "pydot>=%s" % PYDOT_MIN_VERSION, From b41fb1cbe582fe659fd0914f0b3abe44788d15db Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 07:53:47 -0500 Subject: [PATCH 16/90] fix: Numpy stopped accepting b-strings as savetxt args --- nipype/algorithms/confounds.py | 12 ++++++------ nipype/algorithms/rapidart.py | 6 +++--- nipype/interfaces/fsl/epi.py | 2 +- nipype/interfaces/nilearn.py | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 157d1e48d7..5e3588f4fc 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -188,7 +188,7 @@ def _run_interface(self, runtime): if self.inputs.save_std: out_file = self._gen_fname("dvars_std", ext="tsv") - np.savetxt(out_file, dvars[0], fmt=b"%0.6f") + np.savetxt(out_file, dvars[0], fmt="%0.6f") self._results["out_std"] = out_file if self.inputs.save_plot: @@ -228,7 +228,7 @@ def _run_interface(self, runtime): if self.inputs.save_vxstd: out_file = self._gen_fname("dvars_vxstd", ext="tsv") - np.savetxt(out_file, dvars[2], fmt=b"%0.6f") + np.savetxt(out_file, dvars[2], fmt="%0.6f") self._results["out_vxstd"] = out_file if self.inputs.save_plot: @@ -251,8 +251,8 @@ def _run_interface(self, runtime): np.savetxt( out_file, np.vstack(dvars).T, - fmt=b"%0.8f", - delimiter=b"\t", + fmt="%0.8f", + delimiter="\t", header="std DVARS\tnon-std DVARS\tvx-wise std DVARS", comments="", ) @@ -689,7 +689,7 @@ def _run_interface(self, runtime): np.savetxt( components_file, components, - fmt=b"%.10f", + fmt="%.10f", delimiter="\t", header="\t".join(components_header), comments="", @@ -729,7 +729,7 @@ def _run_interface(self, runtime): np.savetxt( self._results["pre_filter_file"], filter_basis, - fmt=b"%.10f", + fmt="%.10f", delimiter="\t", header="\t".join(header), comments="", diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index ff867ae26c..78fff0a18e 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -600,10 +600,10 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx))) # write output to outputfile - np.savetxt(artifactfile, outliers, fmt=b"%d", delimiter=" ") - np.savetxt(intensityfile, g, fmt=b"%.2f", delimiter=" ") + np.savetxt(artifactfile, outliers, fmt="%d", delimiter=" ") + np.savetxt(intensityfile, g, fmt="%.2f", delimiter=" ") if self.inputs.use_norm: - np.savetxt(normfile, normval, fmt=b"%.4f", delimiter=" ") + np.savetxt(normfile, normval, fmt="%.4f", delimiter=" ") if isdefined(self.inputs.save_plot) and self.inputs.save_plot: import matplotlib diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index 09daacb17f..7dda9a49d7 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -417,7 +417,7 @@ def _generate_encfile(self): float(val[0] == encdir[0]) * direction for val in ["x", "y", "z"] ] + [durations[idx]] lines.append(line) - np.savetxt(out_file, np.array(lines), fmt=b"%d %d %d %.8f") + np.savetxt(out_file, np.array(lines), fmt="%d %d %d %.8f") return out_file def _overload_extension(self, value, name=None): diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 9d78517f79..df6413320e 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -105,7 +105,7 @@ def _run_interface(self, runtime): # save output self._results["out_file"] = os.path.join(runtime.cwd, self.inputs.out_file) - np.savetxt(self._results["out_file"], output, fmt=b"%s", delimiter="\t") + np.savetxt(self._results["out_file"], output, fmt="%s", delimiter="\t") return runtime def _process_inputs(self): From ea4164c2c904b3b3c6f940e4e27589cdfc33de19 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 09:33:04 -0500 Subject: [PATCH 17/90] fix: Replace recfromcsv to genfromtxt --- nipype/interfaces/nitime/tests/test_nitime.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index 64bb8366a0..8351a3c38a 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -51,7 +51,9 @@ def test_coherence_analysis(tmpdir): # This is the nitime analysis: TR = 1.89 - data_rec = np.recfromcsv(example_data("fmri_timeseries.csv")) + data_rec = np.genfromtxt( + example_data("fmri_timeseries.csv"), delimiter=',', names=True + ) roi_names = np.array(data_rec.dtype.names) n_samples = data_rec.shape[0] data = np.zeros((len(roi_names), n_samples)) From b713b4dd4a708d2927e6c9f78e7fdf65df6042ea Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 08:37:39 -0500 Subject: [PATCH 18/90] fix(traits): Replace deprecated traits.List$Sub with traits.List($Sub) --- nipype/algorithms/rapidart.py | 3 +- .../tests/test_auto_ArtifactDetect.py | 2 - nipype/interfaces/ants/segmentation.py | 3 +- .../ants/tests/test_auto_JointFusion.py | 2 - nipype/interfaces/base/__init__.py | 3 +- nipype/interfaces/base/specs.py | 3 +- nipype/interfaces/dipy/base.py | 11 ++-- nipype/interfaces/dipy/tests/test_base.py | 51 ++++++++++--------- nipype/interfaces/mrtrix3/preprocess.py | 5 +- .../mrtrix3/tests/test_auto_MRDeGibbs.py | 2 - nipype/interfaces/spm/preprocess.py | 15 ++++-- .../spm/tests/test_auto_ApplyVDM.py | 2 - .../interfaces/spm/tests/test_auto_Realign.py | 2 - .../spm/tests/test_auto_RealignUnwarp.py | 6 --- 14 files changed, 54 insertions(+), 56 deletions(-) diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index 78fff0a18e..65aae2ef1c 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -189,7 +189,8 @@ class ArtifactDetectInputSpec(BaseInterfaceInputSpec): desc="Source of movement parameters", mandatory=True, ) - use_differences = traits.ListBool( + use_differences = traits.List( + traits.Bool, [True, False], minlen=2, maxlen=2, diff --git a/nipype/algorithms/tests/test_auto_ArtifactDetect.py b/nipype/algorithms/tests/test_auto_ArtifactDetect.py index 51010aea3a..4d5a7ca53b 100644 --- a/nipype/algorithms/tests/test_auto_ArtifactDetect.py +++ b/nipype/algorithms/tests/test_auto_ArtifactDetect.py @@ -48,8 +48,6 @@ def test_ArtifactDetect_inputs(): xor=["norm_threshold"], ), use_differences=dict( - maxlen=2, - minlen=2, usedefault=True, ), use_norm=dict( diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 3c87b71975..47592d70b5 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -1328,7 +1328,8 @@ class JointFusionInputSpec(ANTSCommandInputSpec): usedefault=True, desc=("Constrain solution to non-negative weights."), ) - patch_radius = traits.ListInt( + patch_radius = traits.List( + traits.Int, minlen=3, maxlen=3, argstr="-p %s", diff --git a/nipype/interfaces/ants/tests/test_auto_JointFusion.py b/nipype/interfaces/ants/tests/test_auto_JointFusion.py index f234ceea7c..98d8d696a1 100644 --- a/nipype/interfaces/ants/tests/test_auto_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_auto_JointFusion.py @@ -70,8 +70,6 @@ def test_JointFusion_inputs(): ), patch_radius=dict( argstr="-p %s", - maxlen=3, - minlen=3, ), retain_atlas_voting_images=dict( argstr="-f", diff --git a/nipype/interfaces/base/__init__.py b/nipype/interfaces/base/__init__.py index 2e54847958..2af425d284 100644 --- a/nipype/interfaces/base/__init__.py +++ b/nipype/interfaces/base/__init__.py @@ -7,7 +7,8 @@ This module defines the API of all nipype interfaces. """ -from traits.trait_handlers import TraitDictObject, TraitListObject +from traits.trait_dict_object import TraitDictObject +from traits.trait_list_object import TraitListObject from traits.trait_errors import TraitError from .core import ( diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index a7f61e6889..defbca7f43 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -15,7 +15,8 @@ from packaging.version import Version from traits.trait_errors import TraitError -from traits.trait_handlers import TraitDictObject, TraitListObject +from traits.trait_dict_object import TraitDictObject +from traits.trait_list_object import TraitListObject from ...utils.filemanip import md5, hash_infile, hash_timestamp from .traits_extension import ( traits, diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index ec19d1fe7b..1b9bdea6d5 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -2,6 +2,7 @@ import os.path as op import inspect +from functools import partial import numpy as np from ..base import ( traits, @@ -109,15 +110,15 @@ def convert_to_traits_type(dipy_type, is_file=False): dipy_type = dipy_type.lower() is_mandatory = bool("optional" not in dipy_type) if "variable" in dipy_type and "str" in dipy_type: - return traits.ListStr, is_mandatory + return partial(traits.List, traits.Str), is_mandatory elif "variable" in dipy_type and "int" in dipy_type: - return traits.ListInt, is_mandatory + return partial(traits.List, traits.Int), is_mandatory elif "variable" in dipy_type and "float" in dipy_type: - return traits.ListFloat, is_mandatory + return partial(traits.List, traits.Float), is_mandatory elif "variable" in dipy_type and "bool" in dipy_type: - return traits.ListBool, is_mandatory + return partial(traits.List, traits.Bool), is_mandatory elif "variable" in dipy_type and "complex" in dipy_type: - return traits.ListComplex, is_mandatory + return partial(traits.List, traits.Complex), is_mandatory elif "str" in dipy_type and not is_file: return traits.Str, is_mandatory elif "str" in dipy_type and is_file: diff --git a/nipype/interfaces/dipy/tests/test_base.py b/nipype/interfaces/dipy/tests/test_base.py index d2d81ec005..015215054d 100644 --- a/nipype/interfaces/dipy/tests/test_base.py +++ b/nipype/interfaces/dipy/tests/test_base.py @@ -16,7 +16,7 @@ def test_convert_to_traits_type(): Params = namedtuple("Params", "traits_type is_file") - Res = namedtuple("Res", "traits_type is_mandatory") + Res = namedtuple("Res", "traits_type subtype is_mandatory") l_entries = [ Params("variable string", False), Params("variable int", False), @@ -42,35 +42,38 @@ def test_convert_to_traits_type(): Params("complex, optional", False), ] l_expected = [ - Res(traits.ListStr, True), - Res(traits.ListInt, True), - Res(traits.ListFloat, True), - Res(traits.ListBool, True), - Res(traits.ListComplex, True), - Res(traits.ListInt, False), - Res(traits.ListStr, False), - Res(traits.ListFloat, False), - Res(traits.ListBool, False), - Res(traits.ListComplex, False), - Res(traits.Str, True), - Res(traits.Int, True), - Res(File, True), - Res(traits.Float, True), - Res(traits.Bool, True), - Res(traits.Complex, True), - Res(traits.Str, False), - Res(traits.Int, False), - Res(File, False), - Res(traits.Float, False), - Res(traits.Bool, False), - Res(traits.Complex, False), + Res(traits.List, traits.Str, True), + Res(traits.List, traits.Int, True), + Res(traits.List, traits.Float, True), + Res(traits.List, traits.Bool, True), + Res(traits.List, traits.Complex, True), + Res(traits.List, traits.Int, False), + Res(traits.List, traits.Str, False), + Res(traits.List, traits.Float, False), + Res(traits.List, traits.Bool, False), + Res(traits.List, traits.Complex, False), + Res(traits.Str, None, True), + Res(traits.Int, None, True), + Res(File, None, True), + Res(traits.Float, None, True), + Res(traits.Bool, None, True), + Res(traits.Complex, None, True), + Res(traits.Str, None, False), + Res(traits.Int, None, False), + Res(File, None, False), + Res(traits.Float, None, False), + Res(traits.Bool, None, False), + Res(traits.Complex, None, False), ] for entry, res in zip(l_entries, l_expected): traits_type, is_mandatory = convert_to_traits_type( entry.traits_type, entry.is_file ) - assert traits_type == res.traits_type + trait_instance = traits_type() + assert isinstance(trait_instance, res.traits_type) + if res.subtype: + assert isinstance(trait_instance.inner_traits()[0].trait_type, res.subtype) assert is_mandatory == res.is_mandatory with pytest.raises(IOError): diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index 57cdad0168..0165087376 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -99,8 +99,9 @@ class MRDeGibbsInputSpec(MRTrix3BaseInputSpec): mandatory=True, desc="input DWI image", ) - axes = traits.ListInt( - default_value=[0, 1], + axes = traits.List( + traits.Int, + [0, 1], usedefault=True, sep=",", minlen=2, diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py index cd15f36ac6..83f5bfef4b 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py @@ -9,8 +9,6 @@ def test_MRDeGibbs_inputs(): ), axes=dict( argstr="-axes %s", - maxlen=2, - minlen=2, sep=",", usedefault=True, ), diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 8a3a479705..c7f69785ff 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -273,7 +273,8 @@ class ApplyVDMInputSpec(SPMCommandInputSpec): desc="phase encode direction input data have been acquired with", usedefault=True, ) - write_which = traits.ListInt( + write_which = traits.List( + traits.Int, [2, 1], field="roptions.which", minlen=2, @@ -524,7 +525,8 @@ class RealignInputSpec(SPMCommandInputSpec): field="eoptions.wrap", desc="Check if interpolation should wrap in [x,y,z]", ) - write_which = traits.ListInt( + write_which = traits.List( + traits.Int, [2, 1], field="roptions.which", minlen=2, @@ -731,7 +733,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): "maximization and smoothness maximization of the estimated field." ), ) - est_reg_factor = traits.ListInt( + est_reg_factor = traits.List( + traits.Int, [100000], field="uweoptions.lambda", minlen=1, @@ -769,7 +772,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): field="uweoptions.rem", desc="Re-estimate movement parameters at each unwarping iteration.", ) - est_num_of_iterations = traits.ListInt( + est_num_of_iterations = traits.List( + traits.Int, [5], field="uweoptions.noi", minlen=1, @@ -783,7 +787,8 @@ class RealignUnwarpInputSpec(SPMCommandInputSpec): usedefault=True, desc="Point in position space to perform Taylor-expansion around.", ) - reslice_which = traits.ListInt( + reslice_which = traits.List( + traits.Int, [2, 1], field="uwroptions.uwwhich", minlen=2, diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py b/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py index 2f56b49ef2..6d3b3c360d 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyVDM.py @@ -41,8 +41,6 @@ def test_ApplyVDM_inputs(): ), write_which=dict( field="roptions.which", - maxlen=2, - minlen=2, usedefault=True, ), write_wrap=dict( diff --git a/nipype/interfaces/spm/tests/test_auto_Realign.py b/nipype/interfaces/spm/tests/test_auto_Realign.py index 5165d6f33e..8262243a61 100644 --- a/nipype/interfaces/spm/tests/test_auto_Realign.py +++ b/nipype/interfaces/spm/tests/test_auto_Realign.py @@ -56,8 +56,6 @@ def test_Realign_inputs(): ), write_which=dict( field="roptions.which", - maxlen=2, - minlen=2, usedefault=True, ), write_wrap=dict( diff --git a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py index bb27419547..dc996c130e 100644 --- a/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py +++ b/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py @@ -15,8 +15,6 @@ def test_RealignUnwarp_inputs(): ), est_num_of_iterations=dict( field="uweoptions.noi", - maxlen=1, - minlen=1, usedefault=True, ), est_re_est_mov_par=dict( @@ -24,8 +22,6 @@ def test_RealignUnwarp_inputs(): ), est_reg_factor=dict( field="uweoptions.lambda", - maxlen=1, - minlen=1, usedefault=True, ), est_reg_order=dict( @@ -80,8 +76,6 @@ def test_RealignUnwarp_inputs(): ), reslice_which=dict( field="uwroptions.uwwhich", - maxlen=2, - minlen=2, usedefault=True, ), reslice_wrap=dict( From eb6ad74eb5d34b418226299cddaff201ba016762 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 09:19:48 -0500 Subject: [PATCH 19/90] MNT: Add tox config --- tox.ini | 90 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) create mode 100644 tox.ini diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000000..89f5fe1118 --- /dev/null +++ b/tox.ini @@ -0,0 +1,90 @@ +[tox] +requires = + tox>=4 +envlist = + py3{9,10,11,12,13}-none # Test nipype functionality on all versions + py3{9,12,13}-full # Test with extra dependencies on oldest and two newest + py39-min # Test with minimal dependencies + py3{11,12,13}-pre # Test with pre-release on SPEC0-supported Python +skip_missing_interpreters = true + +# Configuration that allows us to split tests across GitHub runners effectively +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + 3.12: py312 + 3.13: py313 + +[gh-actions:env] +DEPENDS = + min: min + none: none + full: full + pre: pre + +[testenv] +description = Pytest with coverage +labels = test +pip_pre = + pre: true +pass_env = + # Parsed from `git grep getenv` and `git grep os.environ` + # May not all be needed + NIPYPE_NO_ET + NO_ET + ANTSPATH + CI_SKIP_TEST + FREESURFER_HOME + USER + FSLDIR + FSLOUTPUTTYPE + FSL_COURSE_DATA + NIPYPE_NO_MATLAB + OMP_NUM_THREADS + NIPYPE_NO_R + SPMMCRCMD + FORCE_SPMMCR + LOGNAME + AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY + MATLABCMD + MRTRIX3_HOME + RCMD + ETS_TOOLKIT + NIPYPE_CONFIG_DIR + DISPLAY + PATHEXT + # getpass.getuser() sources for Windows: + LOGNAME + USER + LNAME + USERNAME + # Pass user color preferences through + PY_COLORS + FORCE_COLOR + NO_COLOR + CLICOLOR + CLICOLOR_FORCE + PYTHON_GIL +deps = + py313: traits @ git+https://github.com/enthought/traits.git@10954eb + full: dipy @ git+https://github.com/dipy/dipy@master +extras = + tests + full: doc + full: profiler + full: duecredit + full: ssh + full: nipy +setenv = + FSLOUTPUTTYPE=NIFTI_GZ + pre: PIP_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + pre: UV_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple +uv_resolution = + min: lowest-direct + +commands = + python -c "import nipype; print(nipype.__version__)" + pytest --durations=20 --durations-min=1.0 --cov-report term-missing {posargs:-n auto} From cb32982ea4f0725105a61b3e58c37a7b2aed3097 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 09:36:02 -0500 Subject: [PATCH 20/90] test: Depend on pytest-xdist --- nipype/info.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nipype/info.py b/nipype/info.py index d57edcb437..edee6b3283 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -158,6 +158,7 @@ def get_nipype_gitversion(): "pytest-env", "pytest-timeout", "pytest-doctestplus", + "pytest-xdist", "sphinx", ] From a150eeb67f2fa3e5b98c1fcee9dd20223ff8af10 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 09:43:49 -0500 Subject: [PATCH 21/90] chore(ci): Run GHA with tox --- .github/workflows/tests.yml | 125 ++++++++++++++---------------------- 1 file changed, 48 insertions(+), 77 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5456709412..669cbf9285 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,22 +1,11 @@ -name: Stable tests - -# This file tests the claimed support range of nipype including -# -# * Operating systems: Linux, OSX -# * Dependencies: minimum requirements, optional requirements -# * Installation methods: setup.py, sdist, wheel, archive +name: Tox on: push: - branches: - - master - - maint/* - tags: - - "*" + branches: [ master, main, 'maint/*' ] + tags: [ '*' ] pull_request: - branches: - - master - - maint/* + branches: [ master, main, 'maint/*' ] schedule: # 8am EST / 9am EDT Mondays - cron: "0 13 * * 1" @@ -26,27 +15,28 @@ defaults: shell: bash concurrency: - group: tests-${{ github.ref }} + group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -permissions: {} +permissions: + contents: read + +env: + # Force tox and pytest to use color + FORCE_COLOR: true + + jobs: build: - permissions: - contents: read # to fetch code (actions/checkout) - runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-python@v5 - with: - python-version: 3 - - run: pip install --upgrade build twine - - name: Build sdist and wheel - run: python -m build - - run: twine check dist/* + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 + - run: uv build + - run: uvx twine check dist/* - uses: actions/upload-artifact@v4 with: name: dist @@ -82,70 +72,51 @@ jobs: - name: Run tests run: pytest --doctest-modules -v --pyargs nipype - stable: + test: # Check each OS, all supported Python, minimum versions and latest releases - permissions: - contents: read # to fetch code (actions/checkout) - runs-on: ${{ matrix.os }} strategy: matrix: - os: ["ubuntu-22.04"] - python-version: ["3.9", "3.10", "3.11", "3.12"] - check: ["test"] - pip-flags: [""] - depends: ["REQUIREMENTS"] - deb-depends: [false] - nipype-extras: ["doc,tests,profiler"] - include: - - os: ubuntu-22.04 - python-version: "3.9" - check: test - pip-flags: "" - depends: REQUIREMENTS - deb-depends: true - nipype-extras: doc,tests,profiler,duecredit,ssh - - os: ubuntu-20.04 - python-version: "3.9" - check: test - pip-flags: "" - depends: REQUIREMENTS - deb-depends: true - nipype-extras: doc,tests,nipy,profiler,duecredit,ssh + os: ["ubuntu-latest"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + dependencies: [none, full, pre] + # include: + # - os: ubuntu-latest + # python-version: "3.9" + # dependencies: min + exclude: + # Skip some intermediate versions for full tests + - python-version: "3.10" + dependencies: full + - python-version: "3.11" + dependencies: full + # Do not test pre-releases for versions out of SPEC0 + - python-version: "3.9" + dependencies: pre + - python-version: "3.10" + dependencies: pre + env: - DEPENDS: ${{ matrix.depends }} - CHECK_TYPE: ${{ matrix.check }} - EXTRA_PIP_FLAGS: ${{ matrix.pip-flags }} - INSTALL_DEB_DEPENDENCIES: ${{ matrix.deb-depends }} - NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} - INSTALL_TYPE: pip - CI_SKIP_TEST: 1 + DEPENDS: ${{ matrix.dependencies }} steps: - uses: actions/checkout@v4 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" - - name: Create virtual environment - run: tools/ci/create_venv.sh - - name: Build archive + - name: Install tox run: | - source tools/ci/build_archive.sh - echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV - - name: Install Debian dependencies - run: tools/ci/install_deb_dependencies.sh - if: ${{ matrix.os == 'ubuntu-latest' }} - - name: Install dependencies - run: tools/ci/install_dependencies.sh - - name: Install Nipype - run: tools/ci/install.sh - - name: Run tests - run: tools/ci/check.sh - if: ${{ matrix.check != 'skiptests' }} - - uses: codecov/codecov-action@v5 + uv tool install tox --with=tox-uv --with=tox-gh-actions + - name: Show tox config + run: tox c + - name: Run tox + run: tox -v --exit-and-dump-after 1200 + - uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} if: ${{ always() }} @@ -159,7 +130,7 @@ jobs: publish: runs-on: ubuntu-latest environment: "Package deployment" - needs: [stable, test-package] + needs: [test, test-package] if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') steps: - uses: actions/download-artifact@v4 From c51abf83af17595a448f3c09705db40779e78de4 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 09:54:48 -0500 Subject: [PATCH 22/90] FIX: Set legacy printoptions in doctests --- nipype/conftest.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/nipype/conftest.py b/nipype/conftest.py index 18b8a1ca6d..151906678f 100644 --- a/nipype/conftest.py +++ b/nipype/conftest.py @@ -2,7 +2,7 @@ import shutil from tempfile import mkdtemp import pytest -import numpy +import numpy as np import py.path as pp NIPYPE_DATADIR = os.path.realpath( @@ -15,12 +15,17 @@ @pytest.fixture(autouse=True) def add_np(doctest_namespace): - doctest_namespace["np"] = numpy + doctest_namespace["np"] = np doctest_namespace["os"] = os doctest_namespace["pytest"] = pytest doctest_namespace["datadir"] = data_dir +@pytest.fixture(scope='session', autouse=True) +def legacy_printoptions(): + np.set_printoptions(legacy='1.21') + + @pytest.fixture(autouse=True) def _docdir(request): """Grabbed from https://stackoverflow.com/a/46991331""" From 1d324ad8c2ac94c546d2ac5f7d651a1aa76adf7e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 09:56:57 -0500 Subject: [PATCH 23/90] chore: Configure pytest globally --- nipype/info.py | 2 +- pyproject.toml | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index edee6b3283..38a84e5a6b 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -153,7 +153,7 @@ def get_nipype_gitversion(): TESTS_REQUIRES = [ "codecov", "coverage", - "pytest", + "pytest >= 6", "pytest-cov", "pytest-env", "pytest-timeout", diff --git a/pyproject.toml b/pyproject.toml index 06f4d798c7..2b1282eb74 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,3 +4,24 @@ build-backend = "setuptools.build_meta" [tool.black] skip-string-normalization = true + +[tool.pytest.ini_options] +minversion = "6" +testpaths = ["nipype"] +log_cli_level = "INFO" +xfail_strict = true +norecursedirs = [".git"] +addopts = [ + "-svx", + "-ra", + "--strict-config", + "--strict-markers", + "--doctest-modules", + "--cov=nipype", + "--cov-report=xml", + "--cov-config=pyproject.toml", +] +doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" +env = "PYTHONHASHSEED=0" +filterwarnings = ["ignore::DeprecationWarning"] +junit_family = "xunit2" From 621c89411cd4f71101ad8247952d9c3f47f2a5b8 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 6 Jan 2021 09:53:11 -0500 Subject: [PATCH 24/90] FIX: Convert timing values to datetimes from strings * exclude nodes without timing information from Gantt chart * fall back on "id" or empty string if no "name" in node --- nipype/utils/draw_gantt_chart.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 3ae4b77246..fbfe502afe 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -307,7 +307,7 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, co "offset": offset, "scale_duration": scale_duration, "color": color, - "node_name": node["name"], + "node_name": node.get("name", node.get("id", "")), "node_dur": node["duration"] / 60.0, "node_start": node_start.strftime("%Y-%m-%d %H:%M:%S"), "node_finish": node_finish.strftime("%Y-%m-%d %H:%M:%S"), @@ -527,6 +527,20 @@ def generate_gantt_chart( # Read in json-log to get list of node dicts nodes_list = log_to_dict(logfile) + # Only include nodes with timing information, and covert timestamps + # from strings to datetimes + nodes_list = [{ + k: datetime.datetime.strptime( + i[k], "%Y-%m-%dT%H:%M:%S.%f" + ) if k in {"start", "finish"} else i[k] for k in i + } for i in nodes_list if "start" in i and "finish" in i] + + for node in nodes_list: + if "duration" not in node: + node["duration"] = ( + node["finish"] - node["start"] + ).total_seconds() + # Create the header of the report with useful information start_node = nodes_list[0] last_node = nodes_list[-1] From 2cf2d37b83496a87f0d2c37a8af940e3023d60ee Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 6 Jan 2021 10:16:59 -0500 Subject: [PATCH 25/90] REF: Reduce double logging from exception to warning --- nipype/utils/draw_gantt_chart.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index fbfe502afe..8c003b98b6 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -10,6 +10,7 @@ import simplejson as json from collections import OrderedDict +from warnings import warn # Pandas try: @@ -66,9 +67,9 @@ def create_event_dict(start_time, nodes_list): finish_delta = (node["finish"] - start_time).total_seconds() # Populate dictionary - if events.get(start_delta) or events.get(finish_delta): + if events.get(start_delta): err_msg = "Event logged twice or events started at exact same time!" - raise KeyError(err_msg) + warn(str(KeyError(err_msg)), category=Warning) events[start_delta] = start_node events[finish_delta] = finish_node From 2e50f46be0adb71e77267025ca2a5076e1d49db5 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 6 Jan 2021 12:35:54 -0500 Subject: [PATCH 26/90] TST: Add test for `draw_gantt_chart` --- .../pipeline/plugins/tests/test_callback.py | 33 +++++++++++++++++++ nipype/utils/draw_gantt_chart.py | 5 +-- 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index f7606708c7..34e7cff2ee 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -60,3 +60,36 @@ def test_callback_exception(tmpdir, plugin, stop_on_first_crash): sleep(0.5) # Wait for callback to be called (python 2.7) assert so.statuses == [("f_node", "start"), ("f_node", "exception")] + + +@pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) +def test_callback_gantt(tmpdir, plugin): + import logging + import logging.handlers + + from os import path + + from nipype.utils.profiler import log_nodes_cb + from nipype.utils.draw_gantt_chart import generate_gantt_chart + + log_filename = 'callback.log' + logger = logging.getLogger('callback') + logger.setLevel(logging.DEBUG) + handler = logging.FileHandler(log_filename) + logger.addHandler(handler) + + #create workflow + wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) + f_node = pe.Node( + niu.Function(function=func, input_names=[], output_names=[]), name="f_node" + ) + wf.add_nodes([f_node]) + wf.config["execution"] = {"crashdump_dir": wf.base_dir, "poll_sleep_duration": 2} + + plugin_args = {"status_callback": log_nodes_cb} + if plugin != "Linear": + plugin_args['n_procs'] = 8 + wf.run(plugin=plugin, plugin_args=plugin_args) + + generate_gantt_chart('callback.log', 1 if plugin == "Linear" else 8) + assert path.exists('callback.log.html') \ No newline at end of file diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 8c003b98b6..c373ba24fe 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -153,13 +153,14 @@ def calculate_resource_timeseries(events, resource): all_res = 0.0 # Iterate through the events + nan = {"Unknown", "N/A"} for _, event in sorted(events.items()): if event["event"] == "start": - if resource in event and event[resource] != "Unknown": + if resource in event and event[resource] not in nan: all_res += float(event[resource]) current_time = event["start"] elif event["event"] == "finish": - if resource in event and event[resource] != "Unknown": + if resource in event and event[resource] not in nan: all_res -= float(event[resource]) current_time = event["finish"] res[current_time] = all_res From 7272623cf9d0c371f0738a24054216b4b0fd69ff Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 8 Jan 2021 09:35:29 -0500 Subject: [PATCH 27/90] STY: Automatic linting by pre-commit --- .../pipeline/plugins/tests/test_callback.py | 12 ++++++------ nipype/utils/draw_gantt_chart.py | 19 +++++++++++-------- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 34e7cff2ee..e568a2bd72 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -72,13 +72,13 @@ def test_callback_gantt(tmpdir, plugin): from nipype.utils.profiler import log_nodes_cb from nipype.utils.draw_gantt_chart import generate_gantt_chart - log_filename = 'callback.log' - logger = logging.getLogger('callback') + log_filename = "callback.log" + logger = logging.getLogger("callback") logger.setLevel(logging.DEBUG) handler = logging.FileHandler(log_filename) logger.addHandler(handler) - #create workflow + # create workflow wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) f_node = pe.Node( niu.Function(function=func, input_names=[], output_names=[]), name="f_node" @@ -88,8 +88,8 @@ def test_callback_gantt(tmpdir, plugin): plugin_args = {"status_callback": log_nodes_cb} if plugin != "Linear": - plugin_args['n_procs'] = 8 + plugin_args["n_procs"] = 8 wf.run(plugin=plugin, plugin_args=plugin_args) - generate_gantt_chart('callback.log', 1 if plugin == "Linear" else 8) - assert path.exists('callback.log.html') \ No newline at end of file + generate_gantt_chart("callback.log", 1 if plugin == "Linear" else 8) + assert path.exists("callback.log.html") diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index c373ba24fe..aed861f7ad 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -531,17 +531,20 @@ def generate_gantt_chart( # Only include nodes with timing information, and covert timestamps # from strings to datetimes - nodes_list = [{ - k: datetime.datetime.strptime( - i[k], "%Y-%m-%dT%H:%M:%S.%f" - ) if k in {"start", "finish"} else i[k] for k in i - } for i in nodes_list if "start" in i and "finish" in i] + nodes_list = [ + { + k: datetime.datetime.strptime(i[k], "%Y-%m-%dT%H:%M:%S.%f") + if k in {"start", "finish"} + else i[k] + for k in i + } + for i in nodes_list + if "start" in i and "finish" in i + ] for node in nodes_list: if "duration" not in node: - node["duration"] = ( - node["finish"] - node["start"] - ).total_seconds() + node["duration"] = (node["finish"] - node["start"]).total_seconds() # Create the header of the report with useful information start_node = nodes_list[0] From ea4def1837005a2a5546a4769d74105164bcbf47 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 8 Jan 2021 15:11:11 -0500 Subject: [PATCH 28/90] TST: Use tmpdir for Gantt test --- nipype/pipeline/plugins/tests/test_callback.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index e568a2bd72..c19687958a 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -72,7 +72,7 @@ def test_callback_gantt(tmpdir, plugin): from nipype.utils.profiler import log_nodes_cb from nipype.utils.draw_gantt_chart import generate_gantt_chart - log_filename = "callback.log" + log_filename = path.join(tmpdir, "callback.log") logger = logging.getLogger("callback") logger.setLevel(logging.DEBUG) handler = logging.FileHandler(log_filename) @@ -91,5 +91,7 @@ def test_callback_gantt(tmpdir, plugin): plugin_args["n_procs"] = 8 wf.run(plugin=plugin, plugin_args=plugin_args) - generate_gantt_chart("callback.log", 1 if plugin == "Linear" else 8) - assert path.exists("callback.log.html") + generate_gantt_chart( + path.join(tmpdir, "callback.log"), 1 if plugin == "Linear" else 8 + ) + assert path.exists(path.join(tmpdir, "callback.log.html")) From 169c09e6fe89e430d919756d755c10ead10c410d Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 8 Jan 2021 15:24:12 -0500 Subject: [PATCH 29/90] REF: Don't restrict nan timestamps to predetermined options --- nipype/utils/draw_gantt_chart.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index aed861f7ad..d94d339509 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -153,15 +153,20 @@ def calculate_resource_timeseries(events, resource): all_res = 0.0 # Iterate through the events - nan = {"Unknown", "N/A"} for _, event in sorted(events.items()): if event["event"] == "start": - if resource in event and event[resource] not in nan: - all_res += float(event[resource]) + if resource in event: + try: + all_res += float(event[resource]) + except ValueError: + next current_time = event["start"] elif event["event"] == "finish": - if resource in event and event[resource] not in nan: - all_res -= float(event[resource]) + if resource in event: + try: + all_res -= float(event[resource]) + except ValueError: + next current_time = event["finish"] res[current_time] = all_res From 9637b0f8139a9732ad38c75c554d46a678c3a823 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 1 Apr 2021 16:03:50 +0000 Subject: [PATCH 30/90] STY: Simplify warning Co-authored-by: Mathias Goncalves --- nipype/utils/draw_gantt_chart.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index d94d339509..c7a1a5153f 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -69,7 +69,7 @@ def create_event_dict(start_time, nodes_list): # Populate dictionary if events.get(start_delta): err_msg = "Event logged twice or events started at exact same time!" - warn(str(KeyError(err_msg)), category=Warning) + warn(err_msg, category=Warning) events[start_delta] = start_node events[finish_delta] = finish_node From f336c22b7d21e9347e9941417d1e29aec682ef95 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 1 Apr 2021 16:04:25 +0000 Subject: [PATCH 31/90] REF: Remove unnecessary import Co-authored-by: Mathias Goncalves --- nipype/pipeline/plugins/tests/test_callback.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index c19687958a..02234522fa 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -65,7 +65,6 @@ def test_callback_exception(tmpdir, plugin, stop_on_first_crash): @pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) def test_callback_gantt(tmpdir, plugin): import logging - import logging.handlers from os import path From d76af5773e1074cd32d4d7fa6d46e91523fbd81a Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 1 Apr 2021 12:17:20 -0400 Subject: [PATCH 32/90] =?UTF-8?q?FIX:=20next=20=E2=89=A0=20continue?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ref https://github.com/nipy/nipype/pull/3290#discussion_r605706537, https://github.com/nipy/nipype/pull/3290#discussion_r605711954 Co-authored-by: Mathias Goncalves --- nipype/utils/draw_gantt_chart.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index c7a1a5153f..fe6cc7626d 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -159,14 +159,14 @@ def calculate_resource_timeseries(events, resource): try: all_res += float(event[resource]) except ValueError: - next + continue current_time = event["start"] elif event["event"] == "finish": if resource in event: try: all_res -= float(event[resource]) except ValueError: - next + continue current_time = event["finish"] res[current_time] = all_res From a80923f6e1a6e7a0fd09ae6c410b583900d2093f Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 5 Apr 2021 09:37:05 -0400 Subject: [PATCH 33/90] TST: Skip test that requires pandas if pandas not installed Co-authored-by: Chris Markiewicz --- nipype/pipeline/plugins/tests/test_callback.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 02234522fa..5c82f11343 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -63,6 +63,7 @@ def test_callback_exception(tmpdir, plugin, stop_on_first_crash): @pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) +@pytest.mark.skipif(not has_pandas, "Test requires pandas") def test_callback_gantt(tmpdir, plugin): import logging From 9096a5be85909d4570491c18c42629e17645efd3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 13 Apr 2021 10:02:19 -0400 Subject: [PATCH 34/90] TEST: Add pandas import check --- nipype/pipeline/plugins/tests/test_callback.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 5c82f11343..d2e0d26be8 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -7,6 +7,11 @@ import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe +try: + import pandas + has_pandas = True +except ImportError: + has_pandas = False def func(): return From b1690d5beb391e08c1e5463f1e3c641cf1e9f58e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 13 Apr 2021 10:16:17 -0400 Subject: [PATCH 35/90] STY: black --- nipype/pipeline/plugins/tests/test_callback.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index d2e0d26be8..66526c76c4 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -11,7 +11,7 @@ import pandas has_pandas = True except ImportError: - has_pandas = False + has_pandas = False def func(): return From de6657e1ebdde1c6ed8c2c2914dfca70f5de7358 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 13 Apr 2021 10:34:55 -0400 Subject: [PATCH 36/90] STY/TEST: black and skipif syntax --- nipype/pipeline/plugins/tests/test_callback.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 66526c76c4..af6cbc76a1 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -9,10 +9,12 @@ try: import pandas + has_pandas = True except ImportError: has_pandas = False + def func(): return @@ -68,7 +70,7 @@ def test_callback_exception(tmpdir, plugin, stop_on_first_crash): @pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) -@pytest.mark.skipif(not has_pandas, "Test requires pandas") +@pytest.mark.skipif(not has_pandas, reason="Test requires pandas") def test_callback_gantt(tmpdir, plugin): import logging From 6830e3ac4c5062b700a895e267c15f4aaf2e9ae1 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 09:55:18 -0500 Subject: [PATCH 37/90] STY: Fix typo (co{^n}vert) --- nipype/utils/draw_gantt_chart.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index fe6cc7626d..78dc589859 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -534,13 +534,15 @@ def generate_gantt_chart( # Read in json-log to get list of node dicts nodes_list = log_to_dict(logfile) - # Only include nodes with timing information, and covert timestamps + # Only include nodes with timing information, and convert timestamps # from strings to datetimes nodes_list = [ { - k: datetime.datetime.strptime(i[k], "%Y-%m-%dT%H:%M:%S.%f") - if k in {"start", "finish"} - else i[k] + k: ( + datetime.datetime.strptime(i[k], "%Y-%m-%dT%H:%M:%S.%f") + if k in {"start", "finish"} + else i[k] + ) for k in i } for i in nodes_list From 8abddf988000dd90f4e57196e71368fe34936eca Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 10:10:38 -0500 Subject: [PATCH 38/90] chore: Update minimum dependencies, test them --- .github/workflows/tests.yml | 8 ++++---- nipype/info.py | 21 ++++++++++----------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 669cbf9285..83652f4e4f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -80,10 +80,10 @@ jobs: os: ["ubuntu-latest"] python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] dependencies: [none, full, pre] - # include: - # - os: ubuntu-latest - # python-version: "3.9" - # dependencies: min + include: + - os: ubuntu-latest + python-version: "3.9" + dependencies: min exclude: # Skip some intermediate versions for full tests - python-version: "3.10" diff --git a/nipype/info.py b/nipype/info.py index 38a84e5a6b..de202e017e 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -101,9 +101,9 @@ def get_nipype_gitversion(): # versions NIBABEL_MIN_VERSION = "2.1.0" NETWORKX_MIN_VERSION = "2.0" -NUMPY_MIN_VERSION = "1.17" -SCIPY_MIN_VERSION = "0.14" -TRAITS_MIN_VERSION = "4.6" +NUMPY_MIN_VERSION = "1.21" +SCIPY_MIN_VERSION = "1.8" +TRAITS_MIN_VERSION = "6.2" DATEUTIL_MIN_VERSION = "2.2" SIMPLEJSON_MIN_VERSION = "3.8.0" PROV_MIN_VERSION = "1.5.2" @@ -143,23 +143,22 @@ def get_nipype_gitversion(): "rdflib>=%s" % RDFLIB_MIN_VERSION, "scipy>=%s" % SCIPY_MIN_VERSION, "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, - "traits>=%s,!=5.0" % TRAITS_MIN_VERSION, + "traits>=%s" % TRAITS_MIN_VERSION, "filelock>=3.0.0", - "etelemetry>=0.2.0", + "etelemetry>=0.3.1", "looseversion!=1.2", "puremagic", ] TESTS_REQUIRES = [ - "codecov", - "coverage", + "coverage >= 5.2.1", "pytest >= 6", - "pytest-cov", + "pytest-cov >=2.11", "pytest-env", - "pytest-timeout", + "pytest-timeout >=1.4", "pytest-doctestplus", - "pytest-xdist", - "sphinx", + "pytest-xdist >= 2.5", + "sphinx >=7", ] EXTRA_REQUIRES = { From ddfb69c0fb35b79d7a05a8734f2c8e8da0ee2b0b Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 10:10:54 -0500 Subject: [PATCH 39/90] fix: Replace pkg_resources with acres --- nipype/info.py | 1 + nipype/interfaces/base/tests/test_support.py | 9 ++++----- nipype/interfaces/fsl/model.py | 12 +++--------- 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index de202e017e..57dc37fc26 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -145,6 +145,7 @@ def get_nipype_gitversion(): "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, "traits>=%s" % TRAITS_MIN_VERSION, "filelock>=3.0.0", + "acres", "etelemetry>=0.3.1", "looseversion!=1.2", "puremagic", diff --git a/nipype/interfaces/base/tests/test_support.py b/nipype/interfaces/base/tests/test_support.py index 52770e476c..406e6e9358 100644 --- a/nipype/interfaces/base/tests/test_support.py +++ b/nipype/interfaces/base/tests/test_support.py @@ -3,7 +3,7 @@ import os import pytest -from pkg_resources import resource_filename as pkgrf +import acres from ....utils.filemanip import md5 from ... import base as nib @@ -42,14 +42,13 @@ def test_bunch_methods(): def test_bunch_hash(): # NOTE: Since the path to the json file is included in the Bunch, # the hash will be unique to each machine. - json_pth = pkgrf("nipype", os.path.join("testing", "data", "realign_json.json")) + json_pth = acres.Loader('nipype.testing').cached('data', 'realign_json.json') - b = nib.Bunch(infile=json_pth, otherthing="blue", yat=True) + b = nib.Bunch(infile=str(json_pth), otherthing="blue", yat=True) newbdict, bhash = b._get_bunch_hash() assert bhash == "d1f46750044c3de102efc847720fc35f" # Make sure the hash stored in the json file for `infile` is correct. jshash = md5() - with open(json_pth) as fp: - jshash.update(fp.read().encode("utf-8")) + jshash.update(json_pth.read_bytes()) assert newbdict["infile"][0][1] == jshash.hexdigest() assert newbdict["yat"] is True diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 2a148025f5..2ada4ab969 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -9,6 +9,7 @@ from shutil import rmtree from string import Template +import acres import numpy as np from looseversion import LooseVersion from nibabel import load @@ -2547,12 +2548,5 @@ def load_template(name): template : string.Template """ - from pkg_resources import resource_filename as pkgrf - - full_fname = pkgrf( - "nipype", os.path.join("interfaces", "fsl", "model_templates", name) - ) - with open(full_fname) as template_file: - template = Template(template_file.read()) - - return template + loader = acres.Loader('nipype.interfaces.fsl') + return Template(loader.readable('model_templates', name).read_text()) From d986f535ede6bcb0e637ade3d23c44917e82b1da Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 10:29:38 -0500 Subject: [PATCH 40/90] Update minimum networkx, nibabel --- nipype/info.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index 57dc37fc26..bce47c3e3a 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -99,8 +99,8 @@ def get_nipype_gitversion(): """ # versions -NIBABEL_MIN_VERSION = "2.1.0" -NETWORKX_MIN_VERSION = "2.0" +NIBABEL_MIN_VERSION = "3.0" +NETWORKX_MIN_VERSION = "2.5" NUMPY_MIN_VERSION = "1.21" SCIPY_MIN_VERSION = "1.8" TRAITS_MIN_VERSION = "6.2" From 8e271230ef6b1b6b3053de930ec31ccd80b71ff2 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 10:43:00 -0500 Subject: [PATCH 41/90] fix: Handle new/old networkx graph emissions --- nipype/pipeline/engine/tests/test_engine.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index abf9426d43..f1b6817e74 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -541,7 +541,9 @@ def test_write_graph_dotfile(tmpdir, graph_type, simple): pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: - graph_str = f.read() + # Replace handles change in networkx behavior when graph is missing a name + # Probably around 3, but I haven't tracked it down. + graph_str = f.read().replace(' {', ' {') if simple: for line in dotfiles[graph_type]: @@ -635,7 +637,9 @@ def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: - graph_str = f.read() + # Replace handles change in networkx behavior when graph is missing a name + # Probably around 3, but I haven't tracked it down. + graph_str = f.read().replace(' {', ' {') if simple: for line in dotfiles_iter[graph_type]: From 376d6e22fcc776a1c3bba297fbdf1e0dff4d7a57 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 11:02:12 -0500 Subject: [PATCH 42/90] FIX: Don't try to `strptime` something that's already a `datetime` --- nipype/utils/draw_gantt_chart.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 78dc589859..21e449d333 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -540,7 +540,7 @@ def generate_gantt_chart( { k: ( datetime.datetime.strptime(i[k], "%Y-%m-%dT%H:%M:%S.%f") - if k in {"start", "finish"} + if k in {"start", "finish"} and isinstance(i[k], str) else i[k] ) for k in i From ddb73acf1e7ae5762b75944bade9393ef4856eb2 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 11:15:32 -0500 Subject: [PATCH 43/90] chore(ci): Replace contrib.yml with tox --- .github/workflows/contrib.yml | 83 ----------------------------------- .github/workflows/tests.yml | 18 ++++++++ tox.ini | 21 +++++++++ 3 files changed, 39 insertions(+), 83 deletions(-) delete mode 100644 .github/workflows/contrib.yml diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml deleted file mode 100644 index dca5bbdecb..0000000000 --- a/.github/workflows/contrib.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: Contribution checks - -# This checks validate contributions meet baseline checks -# -# * specs - Ensure make - -on: - push: - branches: - - master - - maint/* - pull_request: - branches: - - master - - maint/* - -defaults: - run: - shell: bash - -concurrency: - group: contrib-${{ github.ref }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - stable: - # Check each OS, all supported Python, minimum versions and latest releases - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: ["ubuntu-latest"] - python-version: ["3.12"] - nipype-extras: ["dev"] - check: ["specs", "style"] - env: - DEPENDS: "" - CHECK_TYPE: ${{ matrix.check }} - NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} - EXTRA_PIP_FLAGS: "" - INSTALL_DEB_DEPENDENCIES: false - INSTALL_TYPE: pip - CI_SKIP_TEST: 1 - - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Display Python version - run: python -c "import sys; print(sys.version)" - - name: Create virtual environment - run: tools/ci/create_venv.sh - - name: Build archive - run: | - source tools/ci/build_archive.sh - echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV - - name: Install Debian dependencies - run: tools/ci/install_deb_dependencies.sh - if: ${{ matrix.os == 'ubuntu-18.04' }} - - name: Install dependencies - run: tools/ci/install_dependencies.sh - - name: Install Nipype - run: tools/ci/install.sh - - name: Run tests - run: tools/ci/check.sh - if: ${{ matrix.check != 'skiptests' }} - - uses: codecov/codecov-action@v5 - with: - token: ${{ secrets.CODECOV_TOKEN }} - if: ${{ always() }} - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }} - path: test-results.xml - if: ${{ always() && matrix.check == 'test' }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 83652f4e4f..ee5786af8f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -141,3 +141,21 @@ jobs: with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} + + checks: + runs-on: 'ubuntu-latest' + continue-on-error: true + strategy: + matrix: + check: ['specs', 'style'] + + steps: + - uses: actions/checkout@v4 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v3 + - name: Show tox config + run: uvx tox c + - name: Show tox config (this call) + run: uvx tox c -e ${{ matrix.check }} + - name: Run check + run: uvx tox -e ${{ matrix.check }} diff --git a/tox.ini b/tox.ini index 89f5fe1118..e8bc7e8f04 100644 --- a/tox.ini +++ b/tox.ini @@ -88,3 +88,24 @@ uv_resolution = commands = python -c "import nipype; print(nipype.__version__)" pytest --durations=20 --durations-min=1.0 --cov-report term-missing {posargs:-n auto} + +[testenv:specs] +description = Rebuild spec tests +deps = + black + # Rebuild dipy specs + dipy + # Faster to install old numpy than unreleased Dipy + # This can be dropped once a Dipy release supports numpy 2 + numpy<2 +commands = + python tools/checkspecs.py + +[testenv:style] +description = Check our style guide +labels = check +deps = + black +skip_install = true +commands = + black --check --diff nipype setup.py From 19a03554486385334babf85511deccb04665c289 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 11:27:58 -0500 Subject: [PATCH 44/90] TEST: Update Gantt chart tests for coverage --- .../pipeline/plugins/tests/test_callback.py | 33 ++++++++++++++----- nipype/utils/draw_gantt_chart.py | 24 ++++++++++---- 2 files changed, 41 insertions(+), 16 deletions(-) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index af6cbc76a1..246f2b8ecf 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -1,8 +1,9 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for workflow callbacks -""" +"""Tests for workflow callbacks.""" +from pathlib import Path from time import sleep +import json import pytest import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe @@ -71,7 +72,7 @@ def test_callback_exception(tmpdir, plugin, stop_on_first_crash): @pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) @pytest.mark.skipif(not has_pandas, reason="Test requires pandas") -def test_callback_gantt(tmpdir, plugin): +def test_callback_gantt(tmp_path: Path, plugin: str) -> None: import logging from os import path @@ -79,14 +80,14 @@ def test_callback_gantt(tmpdir, plugin): from nipype.utils.profiler import log_nodes_cb from nipype.utils.draw_gantt_chart import generate_gantt_chart - log_filename = path.join(tmpdir, "callback.log") + log_filename = tmp_path / "callback.log" logger = logging.getLogger("callback") logger.setLevel(logging.DEBUG) handler = logging.FileHandler(log_filename) logger.addHandler(handler) # create workflow - wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) + wf = pe.Workflow(name="test", base_dir=str(tmp_path)) f_node = pe.Node( niu.Function(function=func, input_names=[], output_names=[]), name="f_node" ) @@ -98,7 +99,21 @@ def test_callback_gantt(tmpdir, plugin): plugin_args["n_procs"] = 8 wf.run(plugin=plugin, plugin_args=plugin_args) - generate_gantt_chart( - path.join(tmpdir, "callback.log"), 1 if plugin == "Linear" else 8 - ) - assert path.exists(path.join(tmpdir, "callback.log.html")) + with open(log_filename, "r") as _f: + loglines = _f.readlines() + + # test missing duration + first_line = json.loads(loglines[0]) + if "duration" in first_line: + del first_line["duration"] + loglines[0] = f"{json.dumps(first_line)}\n" + + # test duplicate timestamp warning + loglines.append(loglines[-1]) + + with open(log_filename, "w") as _f: + _f.write("".join(loglines)) + + with pytest.warns(Warning): + generate_gantt_chart(str(log_filename), 1 if plugin == "Linear" else 8) + assert (tmp_path / "callback.log.html").exists() diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 21e449d333..92d9bc363c 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -102,15 +102,25 @@ def log_to_dict(logfile): nodes_list = [json.loads(l) for l in lines] - def _convert_string_to_datetime(datestring): - try: + def _convert_string_to_datetime( + datestring: str | datetime.datetime, + ) -> datetime.datetime: + """Convert a date string to a datetime object.""" + if isinstance(datestring, datetime.datetime): + datetime_object = datestring + elif isinstance(datestring, str): + date_format = ( + "%Y-%m-%dT%H:%M:%S.%f%z" + if "+" in datestring + else "%Y-%m-%dT%H:%M:%S.%f" + ) datetime_object: datetime.datetime = datetime.datetime.strptime( - datestring, "%Y-%m-%dT%H:%M:%S.%f" + datestring, date_format ) - return datetime_object - except Exception as _: - pass - return datestring + else: + msg = f"{datestring} is not a string or datetime object." + raise TypeError(msg) + return datetime_object date_object_node_list: list = list() for n in nodes_list: From 73f657f09fa7bda5cb4030f2bbe07eed38cfba2e Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 13:17:00 -0500 Subject: [PATCH 45/90] REF: Require Pandas for tests Ref https://github.com/nipy/nipype/pull/3290#discussion_r1846980527 --- nipype/info.py | 12 +++++++----- nipype/pipeline/plugins/tests/test_callback.py | 8 -------- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index bce47c3e3a..84b84d34ad 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -99,11 +99,12 @@ def get_nipype_gitversion(): """ # versions -NIBABEL_MIN_VERSION = "3.0" -NETWORKX_MIN_VERSION = "2.5" -NUMPY_MIN_VERSION = "1.21" -SCIPY_MIN_VERSION = "1.8" -TRAITS_MIN_VERSION = "6.2" +NIBABEL_MIN_VERSION = "2.1.0" +NETWORKX_MIN_VERSION = "2.0" +NUMPY_MIN_VERSION = "1.17" +NUMPY_MAX_VERSION = "2.0" +SCIPY_MIN_VERSION = "0.14" +TRAITS_MIN_VERSION = "4.6" DATEUTIL_MIN_VERSION = "2.2" SIMPLEJSON_MIN_VERSION = "3.8.0" PROV_MIN_VERSION = "1.5.2" @@ -153,6 +154,7 @@ def get_nipype_gitversion(): TESTS_REQUIRES = [ "coverage >= 5.2.1", + "pandas > 1.5.0, <= 2.0", "pytest >= 6", "pytest-cov >=2.11", "pytest-env", diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 246f2b8ecf..f5240043a2 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -8,13 +8,6 @@ import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe -try: - import pandas - - has_pandas = True -except ImportError: - has_pandas = False - def func(): return @@ -71,7 +64,6 @@ def test_callback_exception(tmpdir, plugin, stop_on_first_crash): @pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) -@pytest.mark.skipif(not has_pandas, reason="Test requires pandas") def test_callback_gantt(tmp_path: Path, plugin: str) -> None: import logging From 8329d088c97c24bf904eb662736ec2f4f3d7bd8b Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 13:18:15 -0500 Subject: [PATCH 46/90] REF: 3.9-friendly typing.Union --- nipype/utils/draw_gantt_chart.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 92d9bc363c..393d7f7308 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -8,6 +8,7 @@ import random import datetime import simplejson as json +from typing import Union from collections import OrderedDict from warnings import warn @@ -103,7 +104,7 @@ def log_to_dict(logfile): nodes_list = [json.loads(l) for l in lines] def _convert_string_to_datetime( - datestring: str | datetime.datetime, + datestring: Union[str, datetime.datetime], ) -> datetime.datetime: """Convert a date string to a datetime object.""" if isinstance(datestring, datetime.datetime): From 4c0835f608e9e2fd5861aab947fde6577a58d3e8 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 13:51:04 -0500 Subject: [PATCH 47/90] REF: Handle absence/presence of tzinfo --- nipype/info.py | 11 +++++------ nipype/utils/draw_gantt_chart.py | 9 ++++++++- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index 84b84d34ad..f7ca6e66a9 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -99,12 +99,11 @@ def get_nipype_gitversion(): """ # versions -NIBABEL_MIN_VERSION = "2.1.0" -NETWORKX_MIN_VERSION = "2.0" -NUMPY_MIN_VERSION = "1.17" -NUMPY_MAX_VERSION = "2.0" -SCIPY_MIN_VERSION = "0.14" -TRAITS_MIN_VERSION = "4.6" +NIBABEL_MIN_VERSION = "3.0" +NETWORKX_MIN_VERSION = "2.5" +NUMPY_MIN_VERSION = "1.21" +SCIPY_MIN_VERSION = "1.8" +TRAITS_MIN_VERSION = "6.2" DATEUTIL_MIN_VERSION = "2.2" SIMPLEJSON_MIN_VERSION = "3.8.0" PROV_MIN_VERSION = "1.5.2" diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 393d7f7308..64a0d793db 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -302,7 +302,14 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, co # Left left = 60 for core in range(len(end_times)): - if end_times[core] < node_start: + try: + end_time_condition = end_times[core] < node_start + except TypeError: + # if one has a timezone and one does not + end_time_condition = end_times[core].replace( + tzinfo=None + ) < node_start.replace(tzinfo=None) + if end_time_condition: left += core * 30 end_times[core] = datetime.datetime( node_finish.year, From 12b6e3732ae4a9a97cb65db90a64ed6c6c31bbfd Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 14:13:38 -0500 Subject: [PATCH 48/90] FIX: Drop pandas ceiling --- nipype/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index f7ca6e66a9..c8a8b9686d 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -153,7 +153,7 @@ def get_nipype_gitversion(): TESTS_REQUIRES = [ "coverage >= 5.2.1", - "pandas > 1.5.0, <= 2.0", + "pandas > 1.5.0", "pytest >= 6", "pytest-cov >=2.11", "pytest-env", From a693e129dc1fe40297d3fc4e0c228dcb41e184d4 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 14:27:34 -0500 Subject: [PATCH 49/90] =?UTF-8?q?REF:=20=E2=89=A5=201.5.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Chris Markiewicz --- nipype/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index c8a8b9686d..47d765b34e 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -153,7 +153,7 @@ def get_nipype_gitversion(): TESTS_REQUIRES = [ "coverage >= 5.2.1", - "pandas > 1.5.0", + "pandas >= 1.5.0", "pytest >= 6", "pytest-cov >=2.11", "pytest-env", From 72239141a9d6b2d684d44447aec188372091ccc6 Mon Sep 17 00:00:00 2001 From: Jon Cluce Date: Mon, 18 Nov 2024 14:27:56 -0500 Subject: [PATCH 50/90] FIX: Too much indentation Co-authored-by: Chris Markiewicz --- nipype/pipeline/plugins/tests/test_callback.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index f5240043a2..b10238ec4a 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -94,14 +94,14 @@ def test_callback_gantt(tmp_path: Path, plugin: str) -> None: with open(log_filename, "r") as _f: loglines = _f.readlines() - # test missing duration - first_line = json.loads(loglines[0]) - if "duration" in first_line: - del first_line["duration"] - loglines[0] = f"{json.dumps(first_line)}\n" - - # test duplicate timestamp warning - loglines.append(loglines[-1]) + # test missing duration + first_line = json.loads(loglines[0]) + if "duration" in first_line: + del first_line["duration"] + loglines[0] = f"{json.dumps(first_line)}\n" + + # test duplicate timestamp warning + loglines.append(loglines[-1]) with open(log_filename, "w") as _f: _f.write("".join(loglines)) From 3c31346b5de7ae5133cfceb23bad26b316606d0c Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 17:00:28 -0500 Subject: [PATCH 51/90] doc: 1.9.1 changelog --- doc/changelog/1.X.X-changelog.rst | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index b150eece35..79b583c871 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,8 +1,21 @@ +1.9.1 (November 19, 2024) +========================= + +Bug fix release in the 1.9.x series. + +This release adds support for Numpy 2 and Python 3.13. + + * FIX: Restore generate_gantt_chart functionality (https://github.com/nipy/nipype/pull/3290) + * FIX: Address numpy and traits deprecations (https://github.com/nipy/nipype/pull/3699) + * FIX: `ts_Z_corr` → `ts_wb_Z` (https://github.com/nipy/nipype/pull/3697) + * ENH: Remove unused and recently unsupported antsRegistration flag (https://github.com/nipy/nipype/pull/3695) + * MAINT: Bump codecov/codecov-action from 4 to 5 (https://github.com/nipy/nipype/pull/3698) + 1.9.0 (October 31, 2024) ======================== -New feature release in the 1.9.0 series. +New feature release in the 1.9.x series. * FIX: Remove exists=True from fsl.MotionOutliers output that might not exist (https://github.com/nipy/nipype/pull/1428) * FIX: Improve evaluate_connect_function errors across Python versions (https://github.com/nipy/nipype/pull/3655) From bd38c27f803f43db2e861cfde9ee96f8c33d09d1 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 17:03:29 -0500 Subject: [PATCH 52/90] Update mailmap --- .mailmap | 1 + 1 file changed, 1 insertion(+) diff --git a/.mailmap b/.mailmap index 4c5bd5de4d..35ccbf89d2 100644 --- a/.mailmap +++ b/.mailmap @@ -107,6 +107,7 @@ Joerg Stadler Joerg Stadler John A. Lee John A. Lee +Jon Cluce Joke Durnez Jordi Huguet Josh Warner From 02325e90b7cc4796415797c4770d637c13004db3 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 17:03:46 -0500 Subject: [PATCH 53/90] doc: Update Zenodo ordering --- .zenodo.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index e380a177dc..3e2c2be6f6 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -285,6 +285,11 @@ { "name": "Mordom, David" }, + { + "affiliation": "Child Mind Institute", + "name": "Cluce, Jon", + "orcid": "0000-0001-7590-5806" + }, { "affiliation": "ARAMIS LAB, Brain and Spine Institute (ICM), Paris, France.", "name": "Guillon, Jérémy", @@ -920,11 +925,6 @@ "name": "Mihai, Paul Glad", "orcid": "0000-0001-5715-6442" }, - { - "affiliation": "Child Mind Institute", - "name": "Cluce, Jon", - "orcid": "0000-0001-7590-5806" - }, { "affiliation": "Department of Psychology, Stanford University", "name": "Gorgolewski, Krzysztof J.", From 32b53071080fb52d793a9700cb48533aec2b98b8 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 18 Nov 2024 17:10:50 -0500 Subject: [PATCH 54/90] rel: 1.9.1 --- nipype/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index 47d765b34e..d5ddd6bfb8 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.9.1.dev0" +__version__ = "1.9.1" def get_nipype_gitversion(): From 06ef92421be546fd7fcb37fa8c7d83e33ca0f87f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 04:22:51 +0000 Subject: [PATCH 55/90] Bump astral-sh/setup-uv from 3 to 4 Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 3 to 4. - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v3...v4) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/tests.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ee5786af8f..9f1a97e0a9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -34,7 +34,7 @@ jobs: with: fetch-depth: 0 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@v4 - run: uv build - run: uvx twine check dist/* - uses: actions/upload-artifact@v4 @@ -102,7 +102,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: @@ -152,7 +152,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@v4 - name: Show tox config run: uvx tox c - name: Show tox config (this call) From 8ef6791d93b376958ec6a08e9b63fb3842361245 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 04:22:56 +0000 Subject: [PATCH 56/90] Bump codecov/codecov-action from 4 to 5 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4 to 5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v4...v5) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ee5786af8f..ea3224d7ca 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -116,7 +116,7 @@ jobs: run: tox c - name: Run tox run: tox -v --exit-and-dump-after 1200 - - uses: codecov/codecov-action@v4 + - uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} if: ${{ always() }} From 014e1367f1cd4c7fa60f18c7587e50085daa5b79 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 19 Nov 2024 09:11:29 -0500 Subject: [PATCH 57/90] mnt: Bump version to 1.9.2.dev0 --- doc/interfaces.rst | 2 +- nipype/info.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index 4a8714e630..da817fa163 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -8,7 +8,7 @@ Interfaces and Workflows :Release: |version| :Date: |today| -Previous versions: `1.9.0 `_ `1.8.6 `_ +Previous versions: `1.9.1 `_ `1.9.0 `_ Workflows --------- diff --git a/nipype/info.py b/nipype/info.py index d5ddd6bfb8..99b8c74919 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.9.1" +__version__ = "1.9.2.dev0" def get_nipype_gitversion(): From 37546c7b6264a18c2e83d1040771e665fb3bb9d6 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 16 Dec 2024 09:15:21 -0500 Subject: [PATCH 58/90] chore(tox): Remove dipy@master dependency --- tox.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/tox.ini b/tox.ini index e8bc7e8f04..9704158bec 100644 --- a/tox.ini +++ b/tox.ini @@ -70,7 +70,6 @@ pass_env = PYTHON_GIL deps = py313: traits @ git+https://github.com/enthought/traits.git@10954eb - full: dipy @ git+https://github.com/dipy/dipy@master extras = tests full: doc From 292e35cacbf590e10732f3460a6f97ae0d66f32c Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 17 Dec 2024 10:50:13 -0500 Subject: [PATCH 59/90] FIX: Missed savetxt bstring --- nipype/algorithms/confounds.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 5e3588f4fc..d2e6168ea7 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -208,7 +208,7 @@ def _run_interface(self, runtime): if self.inputs.save_nstd: out_file = self._gen_fname("dvars_nstd", ext="tsv") - np.savetxt(out_file, dvars[1], fmt=b"%0.6f") + np.savetxt(out_file, dvars[1], fmt="%0.6f") self._results["out_nstd"] = out_file if self.inputs.save_plot: From c01223ae6ee04d0bb34fc396fb71b485ac8365f7 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 17 Dec 2024 12:45:34 -0500 Subject: [PATCH 60/90] REL: 1.9.2 --- doc/changelog/1.X.X-changelog.rst | 10 ++++++++++ nipype/info.py | 3 ++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index 79b583c871..a51ef7f13e 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,3 +1,13 @@ +1.9.2 (December 17, 2024) +========================= + +Bug fix release in the 1.9.x series. + + * FIX: Missed np.savetxt bstring (https://github.com/nipy/nipype/pull/3704) + * MAINT: Bump astral-sh/setup-uv from 3 to 4 (https://github.com/nipy/nipype/pull/3702) + * MAINT: Bump codecov/codecov-action from 4 to 5 (https://github.com/nipy/nipype/pull/3703) + + 1.9.1 (November 19, 2024) ========================= diff --git a/nipype/info.py b/nipype/info.py index 99b8c74919..3fd328e995 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.9.2.dev0" +__version__ = "1.9.2" def get_nipype_gitversion(): @@ -58,6 +58,7 @@ def get_nipype_gitversion(): "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Scientific/Engineering", ] PYTHON_REQUIRES = ">= 3.9" From ceb1cc851443dac074ecc0c4ed8b891ef02af1d5 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Tue, 17 Dec 2024 12:48:18 -0500 Subject: [PATCH 61/90] chore: Bump dev version --- doc/interfaces.rst | 2 +- nipype/info.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index da817fa163..795574a5e6 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -8,7 +8,7 @@ Interfaces and Workflows :Release: |version| :Date: |today| -Previous versions: `1.9.1 `_ `1.9.0 `_ +Previous versions: `1.9.2 `_ `1.9.1 `_ Workflows --------- diff --git a/nipype/info.py b/nipype/info.py index 3fd328e995..1341fc5ba8 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.9.2" +__version__ = "1.9.3.dev0" def get_nipype_gitversion(): From 478b663859dccb0c8dd5b4f916b02080a7f99610 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 04:20:57 +0000 Subject: [PATCH 62/90] Chore(deps): Bump astral-sh/setup-uv from 4 to 5 Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 4 to 5. - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v4...v5) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/tests.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5b0943c4ca..d789ec9061 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -34,7 +34,7 @@ jobs: with: fetch-depth: 0 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v5 - run: uv build - run: uvx twine check dist/* - uses: actions/upload-artifact@v4 @@ -102,7 +102,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v5 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: @@ -152,7 +152,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v5 - name: Show tox config run: uvx tox c - name: Show tox config (this call) From 2717267423c26597ffde6f598ebd58eb19c8af81 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 17 Jan 2025 08:35:12 -0500 Subject: [PATCH 63/90] Update .wci.yml --- .wci.yml | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/.wci.yml b/.wci.yml index 937cfc6dae..ff7937c045 100644 --- a/.wci.yml +++ b/.wci.yml @@ -18,16 +18,6 @@ documentation: installation: https://nipype.readthedocs.io/en/latest/users/install.html tutorial: https://miykael.github.io/nipype_tutorial/ -# Set the OS, Python version and other tools you might need -build: - os: ubuntu-22.04 - tools: - python: "3.10" - -# Build documentation in the docs/ directory with Sphinx -sphinx: - configuration: doc/conf.py - execution_environment: interfaces: - docker From ce9511ffa8ac93f3fe311356253d19c747f4068e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 17 Jan 2025 08:42:29 -0500 Subject: [PATCH 64/90] Update .wci.yml --- .wci.yml | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.wci.yml b/.wci.yml index ff7937c045..1e6fbc8ddf 100644 --- a/.wci.yml +++ b/.wci.yml @@ -19,7 +19,12 @@ documentation: tutorial: https://miykael.github.io/nipype_tutorial/ execution_environment: - interfaces: - - docker - - conda - - pypi + resource_managers: + - SLURM + - Condor + - DAGMan + - LSF + - OAR + - PBS + - SGE + - Soma-workflow From 9e61c82848e3c00fe6e3b73e85ff6823b20908af Mon Sep 17 00:00:00 2001 From: Rafael Ferreira da Silva Date: Sat, 18 Jan 2025 08:28:05 -0300 Subject: [PATCH 65/90] Update to .wci.yml The headline information contains a colon, thus it is necessary to put the text within quotes. --- .wci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.wci.yml b/.wci.yml index 1e6fbc8ddf..2adbae9fcc 100644 --- a/.wci.yml +++ b/.wci.yml @@ -2,7 +2,7 @@ name: nipype -headline: Neuroimaging in Python: Pipelines and Interfaces +headline: "Neuroimaging in Python: Pipelines and Interfaces" description: | Nipype, an open-source, community-developed initiative under the umbrella of NiPy, is a Python project that From 684b9b0e15618537f78248bf3c8953ad3f4f6eeb Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sat, 18 Jan 2025 16:37:31 +0100 Subject: [PATCH 66/90] removed hard pin --- nipype/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index 280c641ed6..ad45791a50 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -149,7 +149,7 @@ def get_nipype_gitversion(): "filelock>=3.0.0", "etelemetry>=0.2.0", "looseversion!=1.2", - "gputil==1.4.0", + "gputil>=1.4.0", ] TESTS_REQUIRES = [ From 8f74c5dd362e73e28282900297492c9dd7da7ed8 Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sat, 18 Jan 2025 16:42:00 +0100 Subject: [PATCH 67/90] gpu_count refactor --- nipype/pipeline/plugins/multiproc.py | 12 ++---------- nipype/pipeline/plugins/tools.py | 9 +++++++++ 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 9aec6ae072..4d7eaa6c6b 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -21,6 +21,7 @@ from ...utils.profiler import get_system_total_memory_gb from ..engine import MapNode from .base import DistributedPluginBase +from .tools import gpu_count try: from textwrap import indent @@ -132,7 +133,7 @@ def __init__(self, plugin_args=None): self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) # GPU found on system - self.n_gpus_visible = MultiProcPlugin.gpu_count() + self.n_gpus_visible = gpu_count() # proc per GPU set by user self.n_gpu_procs = self.plugin_args.get('n_gpu_procs', self.n_gpus_visible) @@ -423,12 +424,3 @@ def _sort_jobs(self, jobids, scheduler="tsort"): ) return jobids - @staticmethod - def gpu_count(): - n_gpus = 1 - try: - import GPUtil - - return len(GPUtil.getGPUs()) - except ImportError: - return n_gpus diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index 8c28f36246..c767be398e 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -178,3 +178,12 @@ def create_pyscript(node, updatehash=False, store_exception=True): with open(pyscript, "w") as fp: fp.writelines(cmdstr) return pyscript + +def gpu_count(): + n_gpus = 1 + try: + import GPUtil + except ImportError: + return 1 + else: + return len(GPUtil.getGPUs()) From a307845390115b1b65659159c662334b39575ff5 Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sat, 18 Jan 2025 16:45:24 +0100 Subject: [PATCH 68/90] more readable --- nipype/pipeline/engine/nodes.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index d9c066a795..f9036529ac 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -822,9 +822,8 @@ def update(self, **opts): self.inputs.update(**opts) def is_gpu_node(self): - return (hasattr(self.inputs, 'use_cuda') and self.inputs.use_cuda) or ( - hasattr(self.inputs, 'use_gpu') and self.inputs.use_gpu - ) + return bool(getattr(self.inputs, 'use_cuda', False)) or bool( + getattr(self.inputs, 'use_gpu', False)) class JoinNode(Node): From 27448bcfb2ea34029f561dff5efd275f31609096 Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sat, 18 Jan 2025 16:46:21 +0100 Subject: [PATCH 69/90] logger argument --- nipype/pipeline/plugins/multiproc.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 4d7eaa6c6b..e1aa07d13b 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -140,9 +140,8 @@ def __init__(self, plugin_args=None): # total no. of processes allowed on all gpus if self.n_gpu_procs > self.n_gpus_visible: logger.info( - 'Total number of GPUs proc requested (%d) exceeds the available number of GPUs (%d) on the system. Using requested GPU slots at your own risk!' - % (self.n_gpu_procs, self.n_gpus_visible) - ) + 'Total number of GPUs proc requested (%d) exceeds the available number of GPUs (%d) on the system. Using requested GPU slots at your own risk!', + self.n_gpu_procs, self.n_gpus_visible) # Instantiate different thread pools for non-daemon processes logger.debug( From 2c2c066d846f12738c3ddd57af0e5ca3dc97df31 Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sat, 18 Jan 2025 18:52:56 +0100 Subject: [PATCH 70/90] code refactory --- nipype/pipeline/engine/nodes.py | 3 ++- nipype/pipeline/plugins/multiproc.py | 9 +++++---- nipype/pipeline/plugins/tools.py | 3 ++- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index f9036529ac..2d524c3efe 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -823,7 +823,8 @@ def update(self, **opts): def is_gpu_node(self): return bool(getattr(self.inputs, 'use_cuda', False)) or bool( - getattr(self.inputs, 'use_gpu', False)) + getattr(self.inputs, 'use_gpu', False) + ) class JoinNode(Node): diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index e1aa07d13b..054d0150e6 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -141,7 +141,9 @@ def __init__(self, plugin_args=None): if self.n_gpu_procs > self.n_gpus_visible: logger.info( 'Total number of GPUs proc requested (%d) exceeds the available number of GPUs (%d) on the system. Using requested GPU slots at your own risk!', - self.n_gpu_procs, self.n_gpus_visible) + self.n_gpu_procs, + self.n_gpus_visible, + ) # Instantiate different thread pools for non-daemon processes logger.debug( @@ -394,7 +396,7 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): free_memory_gb += next_job_gb free_processors += next_job_th if is_gpu_node: - free_gpu_slots -= next_job_gpu_th + free_gpu_slots += next_job_gpu_th # Display stats next loop self._stats = None @@ -421,5 +423,4 @@ def _sort_jobs(self, jobids, scheduler="tsort"): jobids, key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) - return jobids - + return jobids \ No newline at end of file diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index c767be398e..eb50d4b40c 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -179,6 +179,7 @@ def create_pyscript(node, updatehash=False, store_exception=True): fp.writelines(cmdstr) return pyscript + def gpu_count(): n_gpus = 1 try: @@ -186,4 +187,4 @@ def gpu_count(): except ImportError: return 1 else: - return len(GPUtil.getGPUs()) + return len(GPUtil.getGPUs()) \ No newline at end of file From 66d628022155fbb2dcbacc798abd5e0242727804 Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sat, 18 Jan 2025 18:56:10 +0100 Subject: [PATCH 71/90] newlines for style check --- nipype/pipeline/plugins/multiproc.py | 2 +- nipype/pipeline/plugins/tools.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 054d0150e6..ce191b0f7c 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -423,4 +423,4 @@ def _sort_jobs(self, jobids, scheduler="tsort"): jobids, key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) - return jobids \ No newline at end of file + return jobids diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index 43e50c276b..dabd75116c 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -185,4 +185,5 @@ def gpu_count(): except ImportError: return 1 else: - return len(GPUtil.getGPUs()) \ No newline at end of file + return len(GPUtil.getGPUs()) + \ No newline at end of file From 610f1cbe33490ef5e6f4d2e43037655594a64f18 Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sun, 19 Jan 2025 10:25:09 +0100 Subject: [PATCH 72/90] newline for code check --- nipype/pipeline/plugins/tools.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index dabd75116c..3d879a1971 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -186,4 +186,3 @@ def gpu_count(): return 1 else: return len(GPUtil.getGPUs()) - \ No newline at end of file From 4298707009e58fdab8eb5db4113059cda975b4a1 Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sun, 19 Jan 2025 10:27:59 +0100 Subject: [PATCH 73/90] fix for updatehash crash --- nipype/pipeline/engine/nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index 31ee29e04d..ace18e8fec 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -452,7 +452,7 @@ def run(self, updatehash=False): cached, updated = self.is_cached() # If the node is cached, check on pklz files and finish - if not force_run and (updated or (not updated and updatehash)): + if cached and not force_run and (updated or (not updated and updatehash)): logger.debug("Only updating node hashes or skipping execution") inputs_file = op.join(outdir, "_inputs.pklz") if not op.exists(inputs_file): From 3e555989af20b2d2d2acf7d66192307475de788b Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sun, 19 Jan 2025 16:56:59 +0100 Subject: [PATCH 74/90] Update nipype/pipeline/engine/nodes.py Co-authored-by: Chris Markiewicz --- nipype/pipeline/engine/nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index ace18e8fec..eb524a6a6f 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -452,7 +452,7 @@ def run(self, updatehash=False): cached, updated = self.is_cached() # If the node is cached, check on pklz files and finish - if cached and not force_run and (updated or (not updated and updatehash)): + if cached and not force_run and (updated or updatehash): logger.debug("Only updating node hashes or skipping execution") inputs_file = op.join(outdir, "_inputs.pklz") if not op.exists(inputs_file): From 88e30f8e0442d12ba3cfb194e857b118bc727aef Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Sun, 19 Jan 2025 17:43:26 +0100 Subject: [PATCH 75/90] keep multiprocess with updatehash=True --- nipype/pipeline/plugins/multiproc.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 401b01b388..9b27b47383 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -336,8 +336,9 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): if self._local_hash_check(jobid, graph): continue + cached, updated = self.procs[jobid].is_cached() # updatehash and run_without_submitting are also run locally - if updatehash or self.procs[jobid].run_without_submitting: + if (cached and updatehash and not updated) or self.procs[jobid].run_without_submitting: logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) From 0afbb81b98b38dba4d0a54501ba05acf730b54f8 Mon Sep 17 00:00:00 2001 From: Himanshu Aggarwal Date: Tue, 11 Feb 2025 16:32:03 +0100 Subject: [PATCH 76/90] [FIX] Set length to 1 when np.squeeze returns a 0D array --- nipype/algorithms/misc.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index e1a67f0b08..a29b629703 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1495,7 +1495,11 @@ def merge_rois(in_files, in_idxs, in_ref, dtype=None, out_file=None): for d, fname in enumerate(nii): data = np.asanyarray(nb.load(fname).dataobj).reshape(-1) cdata = nb.load(cname).dataobj[..., d].reshape(-1) - nels = len(idxs) + try: + nels = len(idxs) + except TypeError: + nels = 1 + idata = (idxs,) data[idata] = cdata[0:nels] nb.Nifti1Image(data.reshape(rsh[:3]), aff, hdr).to_filename(fname) From 2c8017df7e2c16ae76f55ce5939d73f4784f6d17 Mon Sep 17 00:00:00 2001 From: Himanshu Aggarwal Date: Tue, 11 Feb 2025 17:38:39 +0100 Subject: [PATCH 77/90] apply code review suggestion --- nipype/algorithms/misc.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index a29b629703..fe27b877a2 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1490,18 +1490,13 @@ def merge_rois(in_files, in_idxs, in_ref, dtype=None, out_file=None): for cname, iname in zip(in_files, in_idxs): f = np.load(iname) - idxs = np.squeeze(f["arr_0"]) + idxs = np.atleast_1d(np.squeeze(f["arr_0"])) + nels = len(idxs) for d, fname in enumerate(nii): data = np.asanyarray(nb.load(fname).dataobj).reshape(-1) cdata = nb.load(cname).dataobj[..., d].reshape(-1) - try: - nels = len(idxs) - except TypeError: - nels = 1 - - idata = (idxs,) - data[idata] = cdata[0:nels] + data[idxs] = cdata[:nels] nb.Nifti1Image(data.reshape(rsh[:3]), aff, hdr).to_filename(fname) imgs = [nb.load(im) for im in nii] From 8ed2b2306aeb7d89de4958b5293223ffe27a4f34 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 21 Feb 2025 14:06:53 -0500 Subject: [PATCH 78/90] sty: black [ignore-rev] --- nipype/caching/tests/test_memory.py | 3 +-- nipype/external/cloghandler.py | 2 +- nipype/info.py | 2 +- nipype/interfaces/ants/registration.py | 2 +- nipype/interfaces/ants/resampling.py | 3 +-- nipype/interfaces/ants/visualization.py | 3 +-- nipype/interfaces/bru2nii.py | 3 +-- nipype/interfaces/camino/__init__.py | 3 +-- nipype/interfaces/cmtk/base.py | 2 +- nipype/interfaces/diffusion_toolkit/dti.py | 3 +-- nipype/interfaces/diffusion_toolkit/odf.py | 3 +-- nipype/interfaces/diffusion_toolkit/postproc.py | 3 +-- nipype/interfaces/dipy/base.py | 2 +- nipype/interfaces/freesurfer/longitudinal.py | 3 +-- nipype/interfaces/freesurfer/model.py | 2 +- nipype/interfaces/freesurfer/petsurfer.py | 3 +-- nipype/interfaces/freesurfer/preprocess.py | 3 +-- nipype/interfaces/freesurfer/registration.py | 3 +-- nipype/interfaces/freesurfer/utils.py | 3 +-- nipype/interfaces/io.py | 14 +++++++------- nipype/interfaces/mixins/reporting.py | 2 +- nipype/interfaces/nipy/base.py | 2 +- nipype/interfaces/nitime/base.py | 2 +- nipype/interfaces/spm/preprocess.py | 3 +-- nipype/interfaces/utility/base.py | 6 +++--- nipype/interfaces/utility/csv.py | 3 +-- nipype/pipeline/engine/tests/test_engine.py | 3 +-- nipype/pipeline/engine/tests/test_join.py | 3 +-- nipype/pipeline/engine/tests/test_utils.py | 3 +-- nipype/pipeline/engine/tests/test_workflows.py | 3 +-- nipype/pipeline/plugins/condor.py | 3 +-- nipype/pipeline/plugins/dagman.py | 3 +-- nipype/pipeline/plugins/debug.py | 3 +-- nipype/pipeline/plugins/ipython.py | 3 +-- nipype/pipeline/plugins/linear.py | 3 +-- nipype/pipeline/plugins/lsf.py | 3 +-- nipype/pipeline/plugins/multiproc.py | 4 +++- nipype/pipeline/plugins/oar.py | 3 +-- nipype/pipeline/plugins/pbs.py | 3 +-- nipype/pipeline/plugins/pbsgraph.py | 3 +-- nipype/pipeline/plugins/sge.py | 3 +-- nipype/pipeline/plugins/sgegraph.py | 3 +-- nipype/pipeline/plugins/slurmgraph.py | 3 +-- nipype/pipeline/plugins/somaflow.py | 3 +-- nipype/pipeline/plugins/tests/test_base.py | 3 +-- .../tests/test_legacymultiproc_nondaemon.py | 3 +-- nipype/pipeline/plugins/tests/test_tools.py | 3 +-- nipype/pipeline/plugins/tools.py | 3 +-- nipype/testing/tests/test_utils.py | 3 +-- nipype/testing/utils.py | 3 +-- nipype/utils/filemanip.py | 3 +-- nipype/utils/matlabtools.py | 2 +- nipype/utils/misc.py | 3 +-- nipype/utils/subprocess.py | 3 +-- 54 files changed, 64 insertions(+), 103 deletions(-) diff --git a/nipype/caching/tests/test_memory.py b/nipype/caching/tests/test_memory.py index 5bd9fad528..cd5b8f8075 100644 --- a/nipype/caching/tests/test_memory.py +++ b/nipype/caching/tests/test_memory.py @@ -1,5 +1,4 @@ -""" Test the nipype interface caching mechanism -""" +"""Test the nipype interface caching mechanism""" from .. import Memory from ...pipeline.engine.tests.test_engine import EngineTestInterface diff --git a/nipype/external/cloghandler.py b/nipype/external/cloghandler.py index 289c8dfa2f..680ba30e2e 100644 --- a/nipype/external/cloghandler.py +++ b/nipype/external/cloghandler.py @@ -9,7 +9,7 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -""" cloghandler.py: A smart replacement for the standard RotatingFileHandler +"""cloghandler.py: A smart replacement for the standard RotatingFileHandler ConcurrentRotatingFileHandler: This class is a log handler which is a drop-in replacement for the python standard log handler 'RotateFileHandler', the primary diff --git a/nipype/info.py b/nipype/info.py index 1341fc5ba8..729689ae5d 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -1,4 +1,4 @@ -""" This file contains defines parameters for nipy that we use to fill +"""This file contains defines parameters for nipy that we use to fill settings in setup.py, the nipy top-level docstring, and for building the docs. In setup.py in particular, we exec this file, so it cannot import nipy """ diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 91b131bbf3..55e9738170 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1,5 +1,5 @@ """The ants module provides basic functions for interfacing with ants - functions. +functions. """ import os diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index 95f29d5982..883eff1de3 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -1,5 +1,4 @@ -"""ANTS Apply Transforms interface -""" +"""ANTS Apply Transforms interface""" import os diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index c73b64c632..cdfa3529a7 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -1,5 +1,4 @@ -"""The ants visualisation module provides basic functions based on ITK. -""" +"""The ants visualisation module provides basic functions based on ITK.""" import os diff --git a/nipype/interfaces/bru2nii.py b/nipype/interfaces/bru2nii.py index 746af18f1a..b07f6a58d3 100644 --- a/nipype/interfaces/bru2nii.py +++ b/nipype/interfaces/bru2nii.py @@ -1,5 +1,4 @@ -"""The bru2nii module provides basic functions for dicom conversion -""" +"""The bru2nii module provides basic functions for dicom conversion""" import os from .base import ( diff --git a/nipype/interfaces/camino/__init__.py b/nipype/interfaces/camino/__init__.py index 766fa9c906..67e973df66 100644 --- a/nipype/interfaces/camino/__init__.py +++ b/nipype/interfaces/camino/__init__.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Camino top level namespace -""" +"""Camino top level namespace""" from .connectivity import Conmat from .convert import ( diff --git a/nipype/interfaces/cmtk/base.py b/nipype/interfaces/cmtk/base.py index d0c226dc49..c4c997288b 100644 --- a/nipype/interfaces/cmtk/base.py +++ b/nipype/interfaces/cmtk/base.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Base interface for cmtk """ +"""Base interface for cmtk""" from ..base import LibraryBaseInterface from ...utils.misc import package_check diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index fa031799e3..bf6336c96d 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands provided by diffusion toolkit -""" +"""Provides interfaces to various commands provided by diffusion toolkit""" import os import re diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index 00f86a322c..daadffc200 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands provided by diffusion toolkit -""" +"""Provides interfaces to various commands provided by diffusion toolkit""" import os import re diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 5190843875..d05cfadff6 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands provided by diffusion toolkit -""" +"""Provides interfaces to various commands provided by diffusion toolkit""" import os from ..base import ( diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index 1b9bdea6d5..44290cd1d7 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,4 +1,4 @@ -""" Base interfaces for dipy """ +"""Base interfaces for dipy""" import os.path as op import inspect diff --git a/nipype/interfaces/freesurfer/longitudinal.py b/nipype/interfaces/freesurfer/longitudinal.py index 227ea76775..41e95c091b 100644 --- a/nipype/interfaces/freesurfer/longitudinal.py +++ b/nipype/interfaces/freesurfer/longitudinal.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various longitudinal commands provided by freesurfer -""" +"""Provides interfaces to various longitudinal commands provided by freesurfer""" import os diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 6376c1b971..5e245a9a85 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -1,7 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with - freesurfer tools. +freesurfer tools. """ import os diff --git a/nipype/interfaces/freesurfer/petsurfer.py b/nipype/interfaces/freesurfer/petsurfer.py index 4505985127..28aa763b06 100644 --- a/nipype/interfaces/freesurfer/petsurfer.py +++ b/nipype/interfaces/freesurfer/petsurfer.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands for running PET analyses provided by FreeSurfer -""" +"""Provides interfaces to various commands for running PET analyses provided by FreeSurfer""" import os diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 5b2fd19a0b..89c218f969 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various commands provided by FreeSurfer -""" +"""Provides interfaces to various commands provided by FreeSurfer""" import os import os.path as op from glob import glob diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index bc70fc44a6..790066d0ec 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provides interfaces to various longitudinal commands provided by freesurfer -""" +"""Provides interfaces to various longitudinal commands provided by freesurfer""" import os import os.path diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 777f42f019..2c1cdbcc94 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Interfaces to assorted Freesurfer utility programs. -""" +"""Interfaces to assorted Freesurfer utility programs.""" import os import re import shutil diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 46cdfb44f2..d6af1ba073 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -1,14 +1,14 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Set of interfaces that allow interaction with data. Currently - available interfaces are: +"""Set of interfaces that allow interaction with data. Currently +available interfaces are: - DataSource: Generic nifti to named Nifti interface - DataSink: Generic named output from interfaces to data store - XNATSource: preliminary interface to XNAT +DataSource: Generic nifti to named Nifti interface +DataSink: Generic named output from interfaces to data store +XNATSource: preliminary interface to XNAT - To come : - XNATSink +To come : +XNATSink """ import glob import fnmatch diff --git a/nipype/interfaces/mixins/reporting.py b/nipype/interfaces/mixins/reporting.py index 90ca804618..a836cfa3fa 100644 --- a/nipype/interfaces/mixins/reporting.py +++ b/nipype/interfaces/mixins/reporting.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" class mixin and utilities for enabling reports for nipype interfaces """ +"""class mixin and utilities for enabling reports for nipype interfaces""" import os from abc import abstractmethod diff --git a/nipype/interfaces/nipy/base.py b/nipype/interfaces/nipy/base.py index 25aef8b873..1f8f1e4657 100644 --- a/nipype/interfaces/nipy/base.py +++ b/nipype/interfaces/nipy/base.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Base interface for nipy """ +"""Base interface for nipy""" from ..base import LibraryBaseInterface from ...utils.misc import package_check diff --git a/nipype/interfaces/nitime/base.py b/nipype/interfaces/nitime/base.py index 7e434f1d3e..4109bc3a74 100644 --- a/nipype/interfaces/nitime/base.py +++ b/nipype/interfaces/nitime/base.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Base interface for nitime """ +"""Base interface for nitime""" from ..base import LibraryBaseInterface diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index c7f69785ff..8d931a72ba 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""SPM wrappers for preprocessing data -""" +"""SPM wrappers for preprocessing data""" import os from copy import deepcopy diff --git a/nipype/interfaces/utility/base.py b/nipype/interfaces/utility/base.py index 564966cb5b..ecc1bf7935 100644 --- a/nipype/interfaces/utility/base.py +++ b/nipype/interfaces/utility/base.py @@ -1,9 +1,9 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ - # changing to temporary directories - >>> tmp = getfixture('tmpdir') - >>> old = tmp.chdir() +# changing to temporary directories +>>> tmp = getfixture('tmpdir') +>>> old = tmp.chdir() """ import os import re diff --git a/nipype/interfaces/utility/csv.py b/nipype/interfaces/utility/csv.py index 979e328bb6..7470eecbfe 100644 --- a/nipype/interfaces/utility/csv.py +++ b/nipype/interfaces/utility/csv.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""CSV Handling utilities -""" +"""CSV Handling utilities""" import csv from ..base import traits, TraitedSpec, DynamicTraitedSpec, File, BaseInterface from ..io import add_traits diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index f1b6817e74..7650be1cd3 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine module -""" +"""Tests for the engine module""" from copy import deepcopy from glob import glob import os diff --git a/nipype/pipeline/engine/tests/test_join.py b/nipype/pipeline/engine/tests/test_join.py index 2fe5f70564..c177ad24d3 100644 --- a/nipype/pipeline/engine/tests/test_join.py +++ b/nipype/pipeline/engine/tests/test_join.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for join expansion -""" +"""Tests for join expansion""" import pytest from .... import config diff --git a/nipype/pipeline/engine/tests/test_utils.py b/nipype/pipeline/engine/tests/test_utils.py index 78483b6923..7ae8ce5b33 100644 --- a/nipype/pipeline/engine/tests/test_utils.py +++ b/nipype/pipeline/engine/tests/test_utils.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine utils module -""" +"""Tests for the engine utils module""" import os from copy import deepcopy import pytest diff --git a/nipype/pipeline/engine/tests/test_workflows.py b/nipype/pipeline/engine/tests/test_workflows.py index 12d56de285..980b54fa28 100644 --- a/nipype/pipeline/engine/tests/test_workflows.py +++ b/nipype/pipeline/engine/tests/test_workflows.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine workflows module -""" +"""Tests for the engine workflows module""" from glob import glob import os from shutil import rmtree diff --git a/nipype/pipeline/plugins/condor.py b/nipype/pipeline/plugins/condor.py index 0fff477377..789eaecfab 100644 --- a/nipype/pipeline/plugins/condor.py +++ b/nipype/pipeline/plugins/condor.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via Condor -""" +"""Parallel workflow execution via Condor""" import os from time import sleep diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index 55f3f03bee..1c424c24ef 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via Condor DAGMan -""" +"""Parallel workflow execution via Condor DAGMan""" import os import sys diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 1dac35cf8f..4798e083bd 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Debug plugin -""" +"""Debug plugin""" import networkx as nx from .base import PluginBase, logger diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index f52b3e6282..2c80eb4655 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Parallel workflow execution via IPython controller -""" +"""Parallel workflow execution via IPython controller""" from pickle import dumps import sys diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 93029ee1b9..aa29a5951b 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Local serial workflow execution -""" +"""Local serial workflow execution""" import os from .base import PluginBase, logger, report_crash, report_nodes_not_run, str2bool diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index cf334be051..4ca380dfaa 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via LSF -""" +"""Parallel workflow execution via LSF""" import os import re diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 9b27b47383..086ee4430c 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -338,7 +338,9 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): cached, updated = self.procs[jobid].is_cached() # updatehash and run_without_submitting are also run locally - if (cached and updatehash and not updated) or self.procs[jobid].run_without_submitting: + if (cached and updatehash and not updated) or self.procs[ + jobid + ].run_without_submitting: logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) diff --git a/nipype/pipeline/plugins/oar.py b/nipype/pipeline/plugins/oar.py index df56391bae..b9c4a050ab 100644 --- a/nipype/pipeline/plugins/oar.py +++ b/nipype/pipeline/plugins/oar.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via OAR http://oar.imag.fr -""" +"""Parallel workflow execution via OAR http://oar.imag.fr""" import os import stat diff --git a/nipype/pipeline/plugins/pbs.py b/nipype/pipeline/plugins/pbs.py index d967af0bed..01c80efc5a 100644 --- a/nipype/pipeline/plugins/pbs.py +++ b/nipype/pipeline/plugins/pbs.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via PBS/Torque -""" +"""Parallel workflow execution via PBS/Torque""" import os from time import sleep diff --git a/nipype/pipeline/plugins/pbsgraph.py b/nipype/pipeline/plugins/pbsgraph.py index 4b245dedb7..0cb925af38 100644 --- a/nipype/pipeline/plugins/pbsgraph.py +++ b/nipype/pipeline/plugins/pbsgraph.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via PBS/Torque -""" +"""Parallel workflow execution via PBS/Torque""" import os import sys diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index 38079e947d..ce8e046f01 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via SGE -""" +"""Parallel workflow execution via SGE""" import os import pwd diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index 5cd1c7bfb7..3b33b73dee 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via SGE -""" +"""Parallel workflow execution via SGE""" import os import sys diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index c74ab05a87..05824b016b 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via SLURM -""" +"""Parallel workflow execution via SLURM""" import os import sys diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index 2105204979..16bedaab23 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -1,5 +1,4 @@ -"""Parallel workflow execution via PBS/Torque -""" +"""Parallel workflow execution via PBS/Torque""" import os import sys diff --git a/nipype/pipeline/plugins/tests/test_base.py b/nipype/pipeline/plugins/tests/test_base.py index 43471a7d64..11acb369e9 100644 --- a/nipype/pipeline/plugins/tests/test_base.py +++ b/nipype/pipeline/plugins/tests/test_base.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine module -""" +"""Tests for the engine module""" import numpy as np import scipy.sparse as ssp diff --git a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py index 2f35579a40..cd79fbe31c 100644 --- a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Testing module for functions and classes from multiproc.py -""" +"""Testing module for functions and classes from multiproc.py""" # Import packages import os import sys diff --git a/nipype/pipeline/plugins/tests/test_tools.py b/nipype/pipeline/plugins/tests/test_tools.py index e21ef42072..e352253dbe 100644 --- a/nipype/pipeline/plugins/tests/test_tools.py +++ b/nipype/pipeline/plugins/tests/test_tools.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Tests for the engine module -""" +"""Tests for the engine module""" import re from unittest import mock diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index bce3eb82da..7e066b0ea3 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Common graph operations for execution -""" +"""Common graph operations for execution""" import os import getpass from socket import gethostname diff --git a/nipype/testing/tests/test_utils.py b/nipype/testing/tests/test_utils.py index 9217d54694..c3b1cae638 100644 --- a/nipype/testing/tests/test_utils.py +++ b/nipype/testing/tests/test_utils.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Test testing utilities -""" +"""Test testing utilities""" import os import subprocess diff --git a/nipype/testing/utils.py b/nipype/testing/utils.py index 71a75a41c7..96a94d6564 100644 --- a/nipype/testing/utils.py +++ b/nipype/testing/utils.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Additional handy utilities for testing -""" +"""Additional handy utilities for testing""" import os import time import shutil diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index 52558f59f0..4916cbacef 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Miscellaneous file manipulation functions -""" +"""Miscellaneous file manipulation functions""" import sys import pickle import errno diff --git a/nipype/utils/matlabtools.py b/nipype/utils/matlabtools.py index ea06cd4126..d871885c06 100644 --- a/nipype/utils/matlabtools.py +++ b/nipype/utils/matlabtools.py @@ -1,6 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Useful Functions for working with matlab""" +"""Useful Functions for working with matlab""" # Stdlib imports import os diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index ed8a539e66..3f76fbab3c 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Miscellaneous utility functions -""" +"""Miscellaneous utility functions""" import os import sys import re diff --git a/nipype/utils/subprocess.py b/nipype/utils/subprocess.py index acd6b63256..2fa9e52c3b 100644 --- a/nipype/utils/subprocess.py +++ b/nipype/utils/subprocess.py @@ -1,7 +1,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Miscellaneous utility functions -""" +"""Miscellaneous utility functions""" import os import sys import gc From 0c67bb385bec4b0cb1d64010cb8b3c5b6a6e09c4 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Fri, 21 Feb 2025 14:07:40 -0500 Subject: [PATCH 79/90] chore: Update .git-blame-ignore-revs --- .git-blame-ignore-revs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 826a62fe9f..6f762e919b 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,3 +1,7 @@ +# Fri Feb 21 14:06:53 2025 -0500 - markiewicz@stanford.edu - sty: black [ignore-rev] +8ed2b2306aeb7d89de4958b5293223ffe27a4f34 +# Tue Apr 13 10:16:17 2021 -0400 - markiewicz@stanford.edu - STY: black +b1690d5beb391e08c1e5463f1e3c641cf1e9f58e # Thu Oct 31 10:01:38 2024 -0400 - effigies@gmail.com - STY: black [ignore-rev] bd0d5856d183ba3918eda31f80db3b1d4387c55c # Thu Mar 21 13:34:09 2024 -0400 - effigies@gmail.com - STY: black [ignore-rev] From 74149e03713def8a1552f12a72a73902e4736327 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 26 Feb 2025 07:31:26 -0500 Subject: [PATCH 80/90] fix: Allow nipype.sphinx.ext.apidoc Config to work with Sphinx 8.2.1+ --- nipype/sphinxext/apidoc/__init__.py | 36 +++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 9 deletions(-) diff --git a/nipype/sphinxext/apidoc/__init__.py b/nipype/sphinxext/apidoc/__init__.py index 151011bdfc..429848d2f5 100644 --- a/nipype/sphinxext/apidoc/__init__.py +++ b/nipype/sphinxext/apidoc/__init__.py @@ -2,6 +2,9 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Settings for sphinxext.interfaces and connection to sphinx-apidoc.""" import re +from packaging.version import Version + +import sphinx from sphinx.ext.napoleon import ( Config as NapoleonConfig, _patch_python_domain, @@ -39,13 +42,24 @@ class Config(NapoleonConfig): """ - _config_values = { - "nipype_skip_classes": ( - ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester"], - "env", - ), - **NapoleonConfig._config_values, - } + if Version(sphinx.__version__) >= Version("8.2.1"): + _config_values = ( + ( + "nipype_skip_classes", + ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester"], + "env", + frozenset({list[str]}), + ), + *NapoleonConfig._config_values, + ) + else: + _config_values = { + "nipype_skip_classes": ( + ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester"], + "env", + ), + **NapoleonConfig._config_values, + } def setup(app): @@ -82,8 +96,12 @@ def setup(app): app.connect("autodoc-process-docstring", _process_docstring) app.connect("autodoc-skip-member", _skip_member) - for name, (default, rebuild) in Config._config_values.items(): - app.add_config_value(name, default, rebuild) + if Version(sphinx.__version__) >= Version("8.2.1"): + for name, default, rebuild, types in Config._config_values: + app.add_config_value(name, default, rebuild, types=types) + else: + for name, (default, rebuild) in Config._config_values.items(): + app.add_config_value(name, default, rebuild) return {"version": __version__, "parallel_read_safe": True} From 4dfbbbee48fb92927dacb1905dc77bb5ed7382e4 Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Tue, 4 Mar 2025 18:43:14 +0100 Subject: [PATCH 81/90] reimplementation of gpu_count --- nipype/pipeline/plugins/multiproc.py | 2 +- nipype/pipeline/plugins/tools.py | 10 ----- nipype/utils/gpu_count.py | 55 ++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 11 deletions(-) create mode 100644 nipype/utils/gpu_count.py diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index b403749ff9..be0e006229 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -21,7 +21,7 @@ from ...utils.profiler import get_system_total_memory_gb from ..engine import MapNode from .base import DistributedPluginBase -from .tools import gpu_count +from ...utils.gpu_count import gpu_count try: from textwrap import indent diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index 37c841a208..7e066b0ea3 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -175,13 +175,3 @@ def create_pyscript(node, updatehash=False, store_exception=True): with open(pyscript, "w") as fp: fp.writelines(cmdstr) return pyscript - - -def gpu_count(): - n_gpus = 1 - try: - import GPUtil - except ImportError: - return 1 - else: - return len(GPUtil.getGPUs()) diff --git a/nipype/utils/gpu_count.py b/nipype/utils/gpu_count.py new file mode 100644 index 0000000000..57f749baa2 --- /dev/null +++ b/nipype/utils/gpu_count.py @@ -0,0 +1,55 @@ +# -*- DISCLAIMER: this file contains code derived from gputil (https://github.com/anderskm/gputil) +# and therefore is distributed under to the following license: +# +# MIT License +# +# Copyright (c) 2017 anderskm +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import platform +import shutil +from subprocess import Popen, PIPE +import os + + +def gpu_count(): + try: + if platform.system() == "Windows": + nvidia_smi = shutil.which('nvidia-smi') + if nvidia_smi is None: + nvidia_smi = ( + "%s\\Program Files\\NVIDIA Corporation\\NVSMI\\nvidia-smi.exe" + % os.environ['systemdrive'] + ) + else: + nvidia_smi = "nvidia-smi" + + p = Popen( + [nvidia_smi, "--query-gpu=name", "--format=csv,noheader,nounits"], + stdout=PIPE, + ) + stdout, stderror = p.communicate() + + output = stdout.decode('UTF-8') + lines = output.split(os.linesep) + num_devices = len(lines) - 1 + return num_devices + except: + return 0 From 151facd7ad97615a6ebe1c5907cec87dada7dffa Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Wed, 5 Mar 2025 19:41:20 +0100 Subject: [PATCH 82/90] remove gputil requirement --- nipype/info.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index 072a0330ef..729689ae5d 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -149,7 +149,6 @@ def get_nipype_gitversion(): "acres", "etelemetry>=0.3.1", "looseversion!=1.2", - "gputil>=1.4.0", "puremagic", ] From e23315c4e787daa932472d2edfdb65b25b461527 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Thu, 6 Mar 2025 11:06:47 -0500 Subject: [PATCH 83/90] chore: Remove direct URL for traits on py313 --- tox.ini | 2 -- 1 file changed, 2 deletions(-) diff --git a/tox.ini b/tox.ini index 9704158bec..571b93628b 100644 --- a/tox.ini +++ b/tox.ini @@ -68,8 +68,6 @@ pass_env = CLICOLOR CLICOLOR_FORCE PYTHON_GIL -deps = - py313: traits @ git+https://github.com/enthought/traits.git@10954eb extras = tests full: doc From c18991264169a3787df7d16a072d5fbb23b79acf Mon Sep 17 00:00:00 2001 From: mauriliogenovese <125388969+mauriliogenovese@users.noreply.github.com> Date: Fri, 7 Mar 2025 08:15:38 +0100 Subject: [PATCH 84/90] code cleanup --- nipype/utils/gpu_count.py | 31 +++++++++++-------------------- 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/nipype/utils/gpu_count.py b/nipype/utils/gpu_count.py index 57f749baa2..70eb6d724e 100644 --- a/nipype/utils/gpu_count.py +++ b/nipype/utils/gpu_count.py @@ -25,31 +25,22 @@ import platform import shutil -from subprocess import Popen, PIPE +import subprocess import os def gpu_count(): + nvidia_smi = shutil.which('nvidia-smi') + if nvidia_smi is None and platform.system() == "Windows": + nvidia_smi = f'{os.environ["systemdrive"]}\\Program Files\\NVIDIA Corporation\\NVSMI\\nvidia-smi.exe' + if nvidia_smi is None: + return 0 try: - if platform.system() == "Windows": - nvidia_smi = shutil.which('nvidia-smi') - if nvidia_smi is None: - nvidia_smi = ( - "%s\\Program Files\\NVIDIA Corporation\\NVSMI\\nvidia-smi.exe" - % os.environ['systemdrive'] - ) - else: - nvidia_smi = "nvidia-smi" - - p = Popen( + p = subprocess.run( [nvidia_smi, "--query-gpu=name", "--format=csv,noheader,nounits"], - stdout=PIPE, + stdout=subprocess.PIPE, + text=True, ) - stdout, stderror = p.communicate() - - output = stdout.decode('UTF-8') - lines = output.split(os.linesep) - num_devices = len(lines) - 1 - return num_devices - except: + except (OSError, UnicodeDecodeError): return 0 + return len(p.stdout.splitlines()) From f2119d663257a5de8aac5255b88b4304198061cc Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 19 Mar 2025 19:10:02 -0400 Subject: [PATCH 85/90] doc: 1.10.0 changelog --- doc/changelog/1.X.X-changelog.rst | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index a51ef7f13e..6373d501f7 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,3 +1,24 @@ +1.10.0 (March 20, 2025) +======================= + +New feature release in the 1.10.x series. + +This release adds GPUs to multiprocess resource management. +In general, no changes to existing code should be required if the GPU-enabled +interface has a ``use_gpu`` input. +The ``n_gpu_procs`` can be used to set the number of GPU processes that may +be run in parallel, which will override the default of GPUs identified by +``nvidia-smi``, or 1 if no GPUs are detected. + + * FIX: Reimplement ``gpu_count()`` (https://github.com/nipy/nipype/pull/3718) + * FIX: Avoid 0D array in ``algorithms.misc.merge_rois`` (https://github.com/nipy/nipype/pull/3713) + * FIX: Allow nipype.sphinx.ext.apidoc Config to work with Sphinx 8.2.1+ (https://github.com/nipy/nipype/pull/3716) + * FIX: Resolve crashes when running workflows with updatehash=True (https://github.com/nipy/nipype/pull/3709) + * ENH: Support for gpu queue (https://github.com/nipy/nipype/pull/3642) + * ENH: Update to .wci.yml (https://github.com/nipy/nipype/pull/3708) + * ENH: Add Workflow Community Initiative (WCI) descriptor (https://github.com/nipy/nipype/pull/3608) + + 1.9.2 (December 17, 2024) ========================= From 1c9953f3397d434fb7296c189b2c828490612d66 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 19 Mar 2025 19:17:56 -0400 Subject: [PATCH 86/90] Update doc/changelog/1.X.X-changelog.rst --- doc/changelog/1.X.X-changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index 6373d501f7..e31e508edf 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,4 +1,4 @@ -1.10.0 (March 20, 2025) +1.10.0 (March 19, 2025) ======================= New feature release in the 1.10.x series. From 15801539f05a204c0ef1ee662ecbe5324f262408 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 19 Mar 2025 19:20:28 -0400 Subject: [PATCH 87/90] rel: 1.10.0 --- nipype/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index 729689ae5d..c546e4c2fc 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.9.3.dev0" +__version__ = "1.10.0" def get_nipype_gitversion(): From 16a5bdd65e6bfcb01e29bf2c4e8aecfaee2583c3 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 19 Mar 2025 19:22:18 -0400 Subject: [PATCH 88/90] chore: Bump dev version --- doc/interfaces.rst | 2 +- nipype/info.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index 795574a5e6..1079d15607 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -8,7 +8,7 @@ Interfaces and Workflows :Release: |version| :Date: |today| -Previous versions: `1.9.2 `_ `1.9.1 `_ +Previous versions: `1.10.0 `_ `1.9.2 `_ Workflows --------- diff --git a/nipype/info.py b/nipype/info.py index c546e4c2fc..f1d6e4cf28 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.10.0" +__version__ = "1.10.1.dev0" def get_nipype_gitversion(): From 6933639f0ebf81e1dc553bc1c0fff0c3d2ad9f0b Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Wed, 19 Mar 2025 19:22:44 -0400 Subject: [PATCH 89/90] chore: Bump dev version --- nipype/info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/info.py b/nipype/info.py index 729689ae5d..7ad5aba5bb 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -5,7 +5,7 @@ # nipype version information # Remove .dev0 for release -__version__ = "1.9.3.dev0" +__version__ = "1.11.0.dev0" def get_nipype_gitversion(): From 18c6466f221b572f08cdb70404b8e0461c0e7bf2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 05:02:26 +0000 Subject: [PATCH 90/90] Chore(deps): Bump astral-sh/setup-uv from 5 to 6 Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 5 to 6. - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v5...v6) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/tests.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d789ec9061..7934de87a7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -34,7 +34,7 @@ jobs: with: fetch-depth: 0 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 - run: uv build - run: uvx twine check dist/* - uses: actions/upload-artifact@v4 @@ -102,7 +102,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: @@ -152,7 +152,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install the latest version of uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 - name: Show tox config run: uvx tox c - name: Show tox config (this call)