diff --git a/.circleci/config.yml b/.circleci/config.yml
deleted file mode 100644
index 9a19242..0000000
--- a/.circleci/config.yml
+++ /dev/null
@@ -1,73 +0,0 @@
-version: 2
-jobs:
- build:
- docker:
- - image: docker:18.05.0-ce-git
- steps:
- - checkout
- test_1:
- docker:
- - image: docker:18.05.0-ce-git
- steps:
- - checkout
- - setup_remote_docker:
- docker_layer_caching: true
- - run:
- name: docker build 1
- no_output_timeout: 60m
- command: |
- docker build -t miykael/nipype_tutorial:$CIRCLE_BRANCH .
- - run:
- name: run tests 1
- no_output_timeout: 120m
- command: |
- docker run -it --rm miykael/nipype_tutorial:$CIRCLE_BRANCH python /home/neuro/nipype_tutorial/test_notebooks.py 1
- test_2:
- docker:
- - image: docker:18.05.0-ce-git
- steps:
- - checkout
- - setup_remote_docker:
- docker_layer_caching: true
- - run:
- name: docker build 2
- no_output_timeout: 60m
- command: |
- docker build -t miykael/nipype_tutorial:$CIRCLE_BRANCH .
- - run:
- name: run tests 2
- no_output_timeout: 120m
- command: |
- docker run -it --rm miykael/nipype_tutorial:$CIRCLE_BRANCH python /home/neuro/nipype_tutorial/test_notebooks.py 2
- test_3:
- docker:
- - image: docker:18.05.0-ce-git
- steps:
- - checkout
- - setup_remote_docker:
- docker_layer_caching: true
- - run:
- name: docker build 3
- no_output_timeout: 60m
- command: |
- docker build -t miykael/nipype_tutorial:$CIRCLE_BRANCH .
- - run:
- name: run tests 3
- no_output_timeout: 120m
- command: |
- docker run -it --rm miykael/nipype_tutorial:$CIRCLE_BRANCH python /home/neuro/nipype_tutorial/test_notebooks.py 3
-
-workflows:
- version: 2
- build_and_test:
- jobs:
- - build
- - test_1:
- requires:
- - build
- - test_2:
- requires:
- - build
- - test_3:
- requires:
- - build
diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml
new file mode 100644
index 0000000..5ac3bc4
--- /dev/null
+++ b/.github/workflows/testing.yml
@@ -0,0 +1,76 @@
+name: Build & run notebooks
+
+on:
+ push:
+ branches: [ master ]
+ pull_request:
+ branches: [ master ]
+ workflow_dispatch:
+ inputs:
+ nipype_branch:
+ description: 'Build specific Nipype branch'
+ required: true
+ default: 'master'
+
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: generate the Dockerfile from generate.sh
+ run: |
+ BRANCH=${{ github.event.inputs.nipype_branch }}
+ BRANCH=${BRANCH:-"master"}
+ bash generate.sh $BRANCH
+ # In this step, this action saves a list of existing images,
+ # the cache is created without them in the post run.
+ # It also restores the cache if it exists.
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ with:
+ key: tutorial-docker-cache-{hash}
+ restore-keys: |
+ tutorial-docker-cache-
+ layer-tutorial-docker-cache-
+ - name: build the image
+ run: docker build . --file Dockerfile -t nipype_tutorial:latest
+
+ test_1:
+ needs: build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ with:
+ key: tutorial-docker-cache-{hash}
+ restore-keys: |
+ tutorial-docker-cache-
+ layer-tutorial-docker-cache-
+ - name: run test 1
+ run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 1
+
+ test_2:
+ needs: build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ with:
+ key: tutorial-docker-cache-{hash}
+ restore-keys: |
+ tutorial-docker-cache-
+ layer-tutorial-docker-cache-
+ - name: run test 2
+ run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 2
+
+ test_3:
+ needs: build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: satackey/action-docker-layer-caching@v0.0.11
+ with:
+ key: tutorial-docker-cache-{hash}
+ restore-keys: |
+ tutorial-docker-cache-
+ layer-tutorial-docker-cache-
+ - name: run test 3
+ run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 3
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..91493dc
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,46 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at michaelnotter@hotmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..9b2e17b
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,89 @@
+# Contributing to `nipype_tutorial`
+
+Welcome to the `nipype_tutorial` repository! We're excited you're here and want to contribute.
+
+These guidelines are designed to make it as easy as possible to get involved.
+If you have any questions that aren't discussed below, please let us know by opening an [issue][link_issues]!
+
+Before you start you'll need to set up a free [GitHub][link_github] account and sign in.
+Here are some [instructions][link_signupinstructions] on how to do just that!
+
+### Labels
+
+The current list of labels are [here][link_labels] and include:
+
+* [][link_helpwanted]
+*These issues contain a task that a member of the team has determined we need additional help with.*
+
+ If you feel that you can contribute to one of these issues, we especially encourage you to do so!
+
+* [][link_bugs]
+*These issues point to problems in the project.*
+
+ If you find new a bug, please give as much detail as possible in your issue, including steps to recreate the error.
+ If you experience the same bug as one already listed, please add any additional information that you have as a comment.
+
+* [][link_feature]
+*These issues are asking for enhancements to be added to the project.*
+
+ Please try to make sure that your requested feature is distinct from any others that have already been requested or implemented.
+ If you find one that's similar but there are subtle differences please reference the other request in your issue.
+
+## Making a change
+
+We appreciate all contributions to `nipype_tutorial`, but those accepted fastest will follow a workflow similar to the following:
+
+**1. Comment on an existing issue or open a new issue referencing your addition.**
+
+This allows other members of the `nipype_tutorial` development team to confirm that you aren't overlapping with work that's currently underway and that everyone is on the same page with the goal of the work you're going to carry out.
+
+[This blog][link_pushpullblog] is a nice explanation of why putting this work in up front is so useful to everyone involved.
+
+**2. [Fork][link_fork] the [`nipype_tutorial` repository][link_nipype_tutorial] to your profile.**
+
+This is now your own unique copy of `nipype_tutorial`.
+Changes here won't effect anyone else's work, so it's a safe space to explore edits to the code!
+
+Make sure to [keep your fork up to date][link_updateupstreamwiki] with the original repository.
+
+**3. Make the changes you've discussed.**
+
+Try to keep the changes focused.
+If you feel tempted to "branch out" then please make a [new branch][link_branches].
+
+**4. Submit a [pull request][link_pullrequest].**
+
+A member of the development team will review your changes to confirm that they can be merged into the main codebase.
+
+## Recognizing contributions
+
+We welcome and recognize all contributions from documentation to testing to code development.
+You can see a list of our current contributors in the [contributors tab][link_contributors].
+
+## Thank you!
+
+You're awesome. :wave::smiley:
+
+
+
+*— Based on contributing guidelines from the [STEMMRoleModels][link_stemmrolemodels] project.*
+
+[link_github]: https://github.com/
+[link_nipype_tutorial]: https://github.com/rmarkello/nipype_tutorial
+[link_signupinstructions]: https://help.github.com/articles/signing-up-for-a-new-github-account
+[link_react]: https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments
+[link_issues]: https://github.com/rmarkello/nipype_tutorial/issues
+[link_labels]: https://github.com/rmarkello/nipype_tutorial/labels
+[link_discussingissues]: https://help.github.com/articles/discussing-projects-in-issues-and-pull-requests
+
+[link_bugs]: https://github.com/rmarkello/nipype_tutorial/labels/bug
+[link_helpwanted]: https://github.com/rmarkello/nipype_tutorial/labels/help%20wanted
+[link_feature]: https://github.com/rmarkello/nipype_tutorial/labels/enhancement
+
+[link_pullrequest]: https://help.github.com/articles/creating-a-pull-request/
+[link_fork]: https://help.github.com/articles/fork-a-repo/
+[link_pushpullblog]: https://www.igvita.com/2011/12/19/dont-push-your-pull-requests/
+[link_branches]: https://help.github.com/articles/creating-and-deleting-branches-within-your-repository/
+[link_updateupstreamwiki]: https://help.github.com/articles/syncing-a-fork/
+[link_contributors]: https://github.com/rmarkello/nipype_tutorial/graphs/contributors
+[link_stemmrolemodels]: https://github.com/KirstieJane/STEMMRoleModels
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 33e12cc..0000000
--- a/Dockerfile
+++ /dev/null
@@ -1,312 +0,0 @@
-# Generated by Neurodocker version 0.4.0
-# Timestamp: 2018-06-26 13:39:06 UTC
-#
-# Thank you for using Neurodocker. If you discover any issues
-# or ways to improve this software, please submit an issue or
-# pull request on our GitHub repository:
-#
-# https://github.com/kaczmarj/neurodocker
-
-FROM neurodebian:stretch-non-free
-
-ARG DEBIAN_FRONTEND="noninteractive"
-
-ENV LANG="en_US.UTF-8" \
- LC_ALL="en_US.UTF-8" \
- ND_ENTRYPOINT="/neurodocker/startup.sh"
-RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \
- && apt-get update -qq \
- && apt-get install -y -q --no-install-recommends \
- apt-utils \
- bzip2 \
- ca-certificates \
- curl \
- locales \
- unzip \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
- && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \
- && dpkg-reconfigure --frontend=noninteractive locales \
- && update-locale LANG="en_US.UTF-8" \
- && chmod 777 /opt && chmod a+s /opt \
- && mkdir -p /neurodocker \
- && if [ ! -f "$ND_ENTRYPOINT" ]; then \
- echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" \
- && echo 'set -e' >> "$ND_ENTRYPOINT" \
- && echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; \
- fi \
- && chmod -R 777 /neurodocker && chmod a+s /neurodocker
-
-ENTRYPOINT ["/neurodocker/startup.sh"]
-
-RUN apt-get update -qq \
- && apt-get install -y -q --no-install-recommends \
- convert3d \
- ants \
- fsl \
- gcc \
- g++ \
- graphviz \
- tree \
- git-annex-standalone \
- vim \
- emacs-nox \
- nano \
- less \
- ncdu \
- tig \
- git-annex-remote-rclone \
- octave \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
-
-RUN sed -i '$isource /etc/fsl/fsl.sh' $ND_ENTRYPOINT
-
-ENV FORCE_SPMMCR="1" \
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2018a/v94/runtime/glnxa64:/opt/matlabmcr-2018a/v94/bin/glnxa64:/opt/matlabmcr-2018a/v94/sys/os/glnxa64:/opt/matlabmcr-2018a/v94/extern/bin/glnxa64" \
- MATLABCMD="/opt/matlabmcr-2018a/v94/toolbox/matlab"
-RUN apt-get update -qq \
- && apt-get install -y -q --no-install-recommends \
- bc \
- libxext6 \
- libxpm-dev \
- libxt6 \
- && apt-get clean \
- && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
- && echo "Downloading MATLAB Compiler Runtime ..." \
- && curl -fsSL --retry 5 -o /tmp/mcr.zip https://ssd.mathworks.com/supportfiles/downloads/R2018a/deployment_files/R2018a/installers/glnxa64/MCR_R2018a_glnxa64_installer.zip \
- && unzip -q /tmp/mcr.zip -d /tmp/mcrtmp \
- && /tmp/mcrtmp/install -destinationFolder /opt/matlabmcr-2018a -mode silent -agreeToLicense yes \
- && rm -rf /tmp/* \
- && echo "Downloading standalone SPM ..." \
- && curl -fsSL --retry 5 -o /tmp/spm12.zip http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/dev/spm12_latest_Linux_R2018a.zip \
- && unzip -q /tmp/spm12.zip -d /tmp \
- && mkdir -p /opt/spm12-dev \
- && mv /tmp/spm12/* /opt/spm12-dev/ \
- && chmod -R 777 /opt/spm12-dev \
- && rm -rf /tmp/* \
- && /opt/spm12-dev/run_spm12.sh /opt/matlabmcr-2018a/v94 quit \
- && sed -i '$iexport SPMMCRCMD=\"/opt/spm12-dev/run_spm12.sh /opt/matlabmcr-2018a/v94 script\"' $ND_ENTRYPOINT
-
-RUN useradd --no-user-group --create-home --shell /bin/bash neuro
-USER neuro
-
-ENV CONDA_DIR="/opt/miniconda-latest" \
- PATH="/opt/miniconda-latest/bin:$PATH"
-RUN export PATH="/opt/miniconda-latest/bin:$PATH" \
- && echo "Downloading Miniconda installer ..." \
- && conda_installer="/tmp/miniconda.sh" \
- && curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
- && bash "$conda_installer" -b -p /opt/miniconda-latest \
- && rm -f "$conda_installer" \
- && conda update -yq -nbase conda \
- && conda config --system --prepend channels conda-forge \
- && conda config --system --set auto_update_conda false \
- && conda config --system --set show_channel_urls true \
- && sync && conda clean -tipsy && sync \
- && conda create -y -q --name neuro \
- && conda install -y -q --name neuro \
- python=3.6 \
- pytest \
- jupyter \
- jupyterlab \
- jupyter_contrib_nbextensions \
- traits \
- pandas \
- matplotlib \
- scikit-learn \
- scikit-image \
- seaborn \
- nbformat \
- nb_conda \
- && sync && conda clean -tipsy && sync \
- && bash -c "source activate neuro \
- && pip install --no-cache-dir \
- https://github.com/nipy/nipype/tarball/master \
- https://github.com/INCF/pybids/tarball/master \
- nilearn \
- datalad[full] \
- nipy \
- duecredit \
- nbval" \
- && rm -rf ~/.cache/pip/* \
- && sync \
- && sed -i '$isource activate neuro' $ND_ENTRYPOINT
-
-RUN bash -c 'source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main'
-
-USER root
-
-RUN mkdir /data && chmod 777 /data && chmod a+s /data
-
-RUN mkdir /output && chmod 777 /output && chmod a+s /output
-
-USER neuro
-
-RUN printf "[user]\n\tname = miykael\n\temail = michaelnotter@hotmail.com\n" > ~/.gitconfig
-
-RUN bash -c 'source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*'
-
-RUN curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete
-
-COPY [".", "/home/neuro/nipype_tutorial"]
-
-USER root
-
-RUN chown -R neuro /home/neuro/nipype_tutorial
-
-RUN rm -rf /opt/conda/pkgs/*
-
-USER neuro
-
-RUN mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py
-
-WORKDIR /home/neuro/nipype_tutorial
-
-CMD ["jupyter-notebook"]
-
-RUN echo '{ \
- \n "pkg_manager": "apt", \
- \n "instructions": [ \
- \n [ \
- \n "base", \
- \n "neurodebian:stretch-non-free" \
- \n ], \
- \n [ \
- \n "install", \
- \n [ \
- \n "convert3d", \
- \n "ants", \
- \n "fsl", \
- \n "gcc", \
- \n "g++", \
- \n "graphviz", \
- \n "tree", \
- \n "git-annex-standalone", \
- \n "vim", \
- \n "emacs-nox", \
- \n "nano", \
- \n "less", \
- \n "ncdu", \
- \n "tig", \
- \n "git-annex-remote-rclone", \
- \n "octave" \
- \n ] \
- \n ], \
- \n [ \
- \n "add_to_entrypoint", \
- \n "source /etc/fsl/fsl.sh" \
- \n ], \
- \n [ \
- \n "spm12", \
- \n { \
- \n "version": "dev" \
- \n } \
- \n ], \
- \n [ \
- \n "user", \
- \n "neuro" \
- \n ], \
- \n [ \
- \n "miniconda", \
- \n { \
- \n "miniconda_version": "4.3.31", \
- \n "conda_install": [ \
- \n "python=3.6", \
- \n "pytest", \
- \n "jupyter", \
- \n "jupyterlab", \
- \n "jupyter_contrib_nbextensions", \
- \n "traits", \
- \n "pandas", \
- \n "matplotlib", \
- \n "scikit-learn", \
- \n "scikit-image", \
- \n "seaborn", \
- \n "nbformat", \
- \n "nb_conda" \
- \n ], \
- \n "pip_install": [ \
- \n "https://github.com/nipy/nipype/tarball/master", \
- \n "https://github.com/INCF/pybids/tarball/master", \
- \n "nilearn", \
- \n "datalad[full]", \
- \n "nipy", \
- \n "duecredit", \
- \n "nbval" \
- \n ], \
- \n "create_env": "neuro", \
- \n "activate": true \
- \n } \
- \n ], \
- \n [ \
- \n "run_bash", \
- \n "source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main" \
- \n ], \
- \n [ \
- \n "user", \
- \n "root" \
- \n ], \
- \n [ \
- \n "run", \
- \n "mkdir /data && chmod 777 /data && chmod a+s /data" \
- \n ], \
- \n [ \
- \n "run", \
- \n "mkdir /output && chmod 777 /output && chmod a+s /output" \
- \n ], \
- \n [ \
- \n "user", \
- \n "neuro" \
- \n ], \
- \n [ \
- \n "run", \
- \n "printf \"[user]\\\n\\tname = miykael\\\n\\temail = michaelnotter@hotmail.com\\\n\" > ~/.gitconfig" \
- \n ], \
- \n [ \
- \n "run_bash", \
- \n "source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*" \
- \n ], \
- \n [ \
- \n "run", \
- \n "curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete" \
- \n ], \
- \n [ \
- \n "copy", \
- \n [ \
- \n ".", \
- \n "/home/neuro/nipype_tutorial" \
- \n ] \
- \n ], \
- \n [ \
- \n "user", \
- \n "root" \
- \n ], \
- \n [ \
- \n "run", \
- \n "chown -R neuro /home/neuro/nipype_tutorial" \
- \n ], \
- \n [ \
- \n "run", \
- \n "rm -rf /opt/conda/pkgs/*" \
- \n ], \
- \n [ \
- \n "user", \
- \n "neuro" \
- \n ], \
- \n [ \
- \n "run", \
- \n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" \
- \n ], \
- \n [ \
- \n "workdir", \
- \n "/home/neuro/nipype_tutorial" \
- \n ], \
- \n [ \
- \n "cmd", \
- \n [ \
- \n "jupyter-notebook" \
- \n ] \
- \n ] \
- \n ] \
- \n}' > /neurodocker/neurodocker_specs.json
diff --git a/LICENSE b/LICENSE
index 1a1c296..13bd7ed 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,6 @@
-Copyright (c) 2017,
+BSD 3-Clause License
+
+Copyright (c) 2017, Michael Notter and the nipype_tutorial developers
All rights reserved.
Redistribution and use in source and binary forms, with or without
@@ -11,7 +13,7 @@ modification, are permitted provided that the following conditions are met:
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
-* Neither the name of nipype_tutorial nor the names of its
+* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
diff --git a/README.md b/README.md
index ecbd933..4a35427 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,22 @@
# Nipype Tutorial Notebooks
+[](https://github.com/miykael/nipype_tutorial/actions?query=workflow%3ACI)
+[](https://github.com/miykael/nipype_tutorial/issues/)
+[](https://github.com/miykael/nipype_tutorial/pulls/)
+[](https://GitHub.com/miykael/nipype_tutorial/graphs/contributors/)
+[](https://github.com/miykael/nipype_tutorial/commits/master)
+[](https://github.com/miykael/nipype_tutorial/archive/master.zip)
+[](https://hub.docker.com/r/miykael/nipype_tutorial/)
+[](http://hits.dwyl.io/miykael/nipype_tutorial)
This is the Nipype Tutorial in Jupyter Notebook format. You can access the tutorial in two ways:
1. [Nipype Tutorial Homepage](https://miykael.github.io/nipype_tutorial/): This website contains a static, read-only version of all the notebooks.
-2. [Nipype Tutorial Docker Image](https://miykael.github.io/nipype_tutorial/notebooks/introduction_docker.html): This guide explains how to use docker to run the notebooks interactively on your own computer. The nipype tutorial docker image is the best interactive way to learn Nipype.
+2. [Nipype Tutorial Docker Image](https://miykael.github.io/nipype_tutorial/notebooks/introduction_docker.html): This guide explains how to use Docker to run the notebooks interactively on your own computer. The nipype tutorial docker image is the best interactive way to learn Nipype.
# Feedback, Help & Support
-If you want to help with this tutorial or have any questions, fell free to fork the repo of the [Notebooks](https://github.com/miykael/nipype_tutorial) or interact with other contributors on the slack channel [brainhack.slack.com/messages/nipype/](https://brainhack.slack.com/messages/nipype/). If you have any questions or found a problem, open a new [issue on github](https://github.com/miykael/nipype_tutorial/issues).
+If you want to help with this tutorial or have any questions, feel free to fork the repo of the [Notebooks](https://github.com/miykael/nipype_tutorial) or interact with other contributors on the slack channel [brainhack.slack.com/messages/nipype/](https://brainhack.slack.com/messages/nipype/). If you have any questions or found a problem, open a new [issue on github](https://github.com/miykael/nipype_tutorial/issues).
# Thanks and Acknowledgment
diff --git a/casts/cast_live_python b/casts/cast_live_python
old mode 100755
new mode 100644
index b7afb8f..6637128
--- a/casts/cast_live_python
+++ b/casts/cast_live_python
@@ -26,7 +26,7 @@ geometry=${width}x${height}
this_window=$(xdotool getwindowfocus)
# For consistent appearance
-xterm +sb -fa Hermit -fs $fs -bg black -fg white -geometry $geometry -title Screencast-xterm -e "bash --rcfile cast_ipython.rc" &
+xterm +sb -fa Hermit -fs $fs -bg white -fg black -geometry $geometry -title Screencast-xterm -e "bash --rcfile cast_ipython.rc" &
xterm_pid=$!
sleep 2
diff --git a/create_dockerfile.sh b/create_dockerfile.sh
deleted file mode 100755
index 1ff159d..0000000
--- a/create_dockerfile.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash
-
-docker run --rm kaczmarj/neurodocker:master generate docker \
- --base neurodebian:stretch-non-free \
- --pkg-manager apt \
- --install convert3d ants fsl gcc g++ graphviz tree \
- git-annex-standalone vim emacs-nox nano less ncdu \
- tig git-annex-remote-rclone octave \
- --add-to-entrypoint "source /etc/fsl/fsl.sh" \
- --spm12 version=dev \
- --user=neuro \
- --miniconda miniconda_version="4.3.31" \
- conda_install="python=3.6 pytest jupyter jupyterlab jupyter_contrib_nbextensions
- traits pandas matplotlib scikit-learn scikit-image seaborn nbformat nb_conda" \
- pip_install="https://github.com/nipy/nipype/tarball/master
- https://github.com/INCF/pybids/tarball/master
- nilearn datalad[full] nipy duecredit nbval" \
- create_env="neuro" \
- activate=True \
- --run-bash "source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main" \
- --user=root \
- --run 'mkdir /data && chmod 777 /data && chmod a+s /data' \
- --run 'mkdir /output && chmod 777 /output && chmod a+s /output' \
- --user=neuro \
- --run 'printf "[user]\n\tname = miykael\n\temail = michaelnotter@hotmail.com\n" > ~/.gitconfig' \
- --run-bash 'source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*' \
- --run 'curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete' \
- --copy . "/home/neuro/nipype_tutorial" \
- --user=root \
- --run 'chown -R neuro /home/neuro/nipype_tutorial' \
- --run 'rm -rf /opt/conda/pkgs/*' \
- --user=neuro \
- --run 'mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py' \
- --workdir /home/neuro/nipype_tutorial \
- --cmd "jupyter-notebook" > Dockerfile
diff --git a/docs/index.html b/docs/index.html
index e9a115e..0ac42d3 100644
--- a/docs/index.html
+++ b/docs/index.html
@@ -11787,6 +11787,9 @@
%%html
+
+ <!–– TUTORIAL USERS: PLEASE EXECUTE THIS CELL ––>
+
<style>.container { width:75% !important; }</style>
<link rel='stylesheet' type='text/css' href='static/css/mobile.css'>
<link rel='stylesheet' type='text/css' href='static/css/homepage.css'>
@@ -11801,8 +11804,8 @@
you everything so that you can start creating your own workflows in no time. We recommend that you start with
the introduction section to familiarize yourself with the tools used in this tutorial and then move on to the
basic concepts section to learn everything you need to know for your everyday life with Nipype. The workflow
- examples section shows you a real example how you can use Nipype to analyze an actual dataset. For a very
- quick non-imaging introduction, you can check the Nipype Quickstart notebook in the introduciton section.
+ examples section shows you a real example of how you can use Nipype to analyze an actual dataset. For a very
+ quick non-imaging introduction, you can check the Nipype Quickstart notebooks in the introduction section.
</p><p>
All of the notebooks used in this tutorial can be found on <a href="https://github.com/miykael/nipype_tutorial">github.com/miykael/nipype_tutorial</a>.
But if you want to have the real experience and want to go through the computations by yourself, we highly
@@ -11816,9 +11819,9 @@
with working Python 3 software (including Nipype, dipy, matplotlib, nibabel, nipy, numpy, pandas, scipy, seaborn and more),
FSL, ANTs and SPM12 (no license needed). We used <a href="https://github.com/kaczmarj/neurodocker">Neurodocker</a> to create this docker image.
</p><p>
- If you do not want to run tutorial locally, you can also use
+ If you do not want to run the tutorial locally, you can also use
<a href="https://mybinder.org/v2/gh/miykael/nipype_tutorial/master">Binder service</a>.
- Binder automatically launch the Docker container for you and you have access to all of the notebooks.
+ Binder automatically launches the Docker container for you and you have access to all of the notebooks.
Note, that Binder provides between 1G and 4G RAM memory, some notebooks from Workflow Examples might not work.
All notebooks from Introduction and Basic Concepts parts should work.
</p><p>
@@ -11844,9 +11847,10 @@
<a class="subject-link pure-u-1-4" target="_blank" href="notebooks/introduction_python.html">Python</a>
<a class="subject-link pure-u-1-4" target="_blank" href="notebooks/introduction_showcase.html">Nipype Showcase</a>
<a class="subject-link pure-u-1-4" target="_blank" href="notebooks/introduction_quickstart.html">Nipype Quickstart</a>
+ <a class="subject-link pure-u-1-4" target="_blank" href="notebooks/introduction_quickstart_non-neuroimaging.html">Nipype Quickstart (non-neuroimaging examples)</a>
</div>
<p>This section is meant as a general overview. It should give you a short introduction to the main topics that
- you need to understand to use Nipype and this tutorial. The section also contains a very short neuroimaging showcase, as well as as quick non-imaging introduction to Nipype workflows.</p>
+ you need to understand to use Nipype and this tutorial. The section also contains a very short neuroimaging showcase, as well as quick non-imaging introduction to Nipype workflows.</p>
<h2 class="domain-header color02"><a class="domain-title">Basic Concepts</a></h2>
<div class="pure-g domain-table-container color02">
@@ -11881,7 +11885,7 @@
<a class="subject-link pure-u-1-4" target="_blank" href="notebooks/handson_preprocessing.html">Hands-on 1: Preprocessing</a>
<a class="subject-link pure-u-1-4" target="_blank" href="notebooks/handson_analysis.html">Hands-on 1: Analysis</a>
</div>
- <p>In this section you will find some practical examples and hands-on that show you how to use Nipype in a "real world" scenario.</p>
+ <p>In this section, you will find some practical examples and hands-on that show you how to use Nipype in a "real world" scenario.</p>
<h2 class="domain-header color04"><a class="domain-title">Advanced Concepts</a></h2>
<div class="pure-g domain-table-container color04">
@@ -11910,10 +11914,10 @@
<a class="subject-link pure-u-1-4" target="_blank" href="http://bids-apps.neuroimaging.io">BIDS Apps</a>
<a class="subject-link pure-u-1-4" target="_blank" href="http://fmriprep.readthedocs.io/en/latest/index.html">fmriprep</a>
<a class="subject-link pure-u-1-4" target="_blank" href="https://mriqc.readthedocs.io/en/latest/#">MRIQC</a>
- <a class="subject-link pure-u-1-4" target="_blank" href="https://www.mindboggle.info/">Mindboggle</a>
+ <a class="subject-link pure-u-1-4" target="_blank" href="https://mindboggle.info/">Mindboggle</a>
<a class="subject-link pure-u-1-4" target="_blank" href="https://timvanmourik.github.io/Porcupine/">PORcupine</a>
</div>
- <p>This section will give you helpful links and resources, so that you always know where to go to learn more.</p>
+ <p>This section will give you helpful links and resources so that you always know where to go to learn more.</p>
</div>
</article>
@@ -11971,8 +11975,8 @@
you everything so that you can start creating your own workflows in no time. We recommend that you start with
the introduction section to familiarize yourself with the tools used in this tutorial and then move on to the
basic concepts section to learn everything you need to know for your everyday life with Nipype. The workflow
- examples section shows you a real example how you can use Nipype to analyze an actual dataset. For a very
- quick non-imaging introduction, you can check the Nipype Quickstart notebook in the introduciton section.
+ examples section shows you a real example of how you can use Nipype to analyze an actual dataset. For a very
+ quick non-imaging introduction, you can check the Nipype Quickstart notebooks in the introduction section.
All of the notebooks used in this tutorial can be found on github.com/miykael/nipype_tutorial.
But if you want to have the real experience and want to go through the computations by yourself, we highly
@@ -11986,9 +11990,9 @@
with working Python 3 software (including Nipype, dipy, matplotlib, nibabel, nipy, numpy, pandas, scipy, seaborn and more),
FSL, ANTs and SPM12 (no license needed). We used Neurodocker to create this docker image.
- If you do not want to run tutorial locally, you can also use
+ If you do not want to run the tutorial locally, you can also use
Binder service.
- Binder automatically launch the Docker container for you and you have access to all of the notebooks.
+ Binder automatically launches the Docker container for you and you have access to all of the notebooks.
Note, that Binder provides between 1G and 4G RAM memory, some notebooks from Workflow Examples might not work.
All notebooks from Introduction and Basic Concepts parts should work.
@@ -12014,9 +12018,10 @@
Introduction
Python
Nipype Showcase
Nipype Quickstart
+ Nipype Quickstart (non-neuroimaging examples)
This section is meant as a general overview. It should give you a short introduction to the main topics that - you need to understand to use Nipype and this tutorial. The section also contains a very short neuroimaging showcase, as well as as quick non-imaging introduction to Nipype workflows.
+ you need to understand to use Nipype and this tutorial. The section also contains a very short neuroimaging showcase, as well as quick non-imaging introduction to Nipype workflows.In this section you will find some practical examples and hands-on that show you how to use Nipype in a "real world" scenario.
+In this section, you will find some practical examples and hands-on that show you how to use Nipype in a "real world" scenario.
This section will give you helpful links and resources, so that you always know where to go to learn more.
+This section will give you helpful links and resources so that you always know where to go to learn more.
%matplotlib inline
-import nibabel as nb
-nb.load('/data/ds000114/sub-01/ses-test/anat/sub-01_ses-test_T1w.nii.gz').orthoview();
+from nilearn.plotting import plot_anat
+%matplotlib inline
+
+
+
plot_anat('/data/ds000114/sub-01/ses-test/anat/sub-01_ses-test_T1w.nii.gz', dim=-1);
BET
In [ ]:
-nb.load('/data/ds000114/sub-01/ses-test/anat/sub-01_ses-test_T1w_bet.nii.gz').orthoview();
+plot_anat('/data/ds000114/sub-01/ses-test/anat/sub-01_ses-test_T1w_bet.nii.gz', dim=-1);
@@ -12029,7 +12041,7 @@ Example of interface: FSL's BET
In [ ]:
-nb.load(res.outputs.out_file).orthoview();
+plot_anat(res.outputs.out_file, dim=-1);
@@ -12187,7 +12199,7 @@ The CommandLine
interfaceIn [ ]:
-nipype_ls = CommandLine('ls', args='-lh /data/ds000114', terminal_output='allatonce')
+nipype_ls = CommandLine('ls', args='-lh', terminal_output='allatonce')
@@ -12199,7 +12211,7 @@ The CommandLine
interface
-Now, we have a Python object nipype_ls
that is a runnable nipype interface. After execution, Nipype interface return a result object. We can retrieve the output of our ls
invocation from the result.runtime
property:
+Now, we have a Python object nipype_ls
that is a runnable nipype interface. After execution, Nipype interface returns a result object. We can retrieve the output of our ls
invocation from the result.runtime
property:
@@ -12264,7 +12276,7 @@ The CommandLine
interface
-Create your own CommandLine
interface¶
Let's create a Nipype Interface for a very simple tool called antsTransformInfo
from the ANTs package. This tool is so simple it does not even have an usage description for bash. Using it with a file, gives us the following result:
+Create your own CommandLine
interface¶
Let's create a Nipype Interface for a very simple tool called antsTransformInfo
from the ANTs package. This tool is so simple it does not even have a usage description for bash. Using it with a file, gives us the following result:
@@ -12298,20 +12310,20 @@ Create your own CommandLine
Reference Count: 3
Modified Time: 660
Debug: Off
- Object Name:
- Observers:
+ Object Name:
+ Observers:
none
- Matrix:
- 1.0201 -0.00984231 0.00283729
- -0.245557 0.916396 0.324585
- -0.0198016 -0.00296066 0.988634
+ Matrix:
+ 1.0201 -0.00984231 0.00283729
+ -0.245557 0.916396 0.324585
+ -0.0198016 -0.00296066 0.988634
Offset: [2.00569, -15.15, -1.26341]
Center: [-3.37801, 17.4338, 8.46811]
Translation: [1.79024, -13.0295, -1.34439]
- Inverse:
- 0.982713 0.0105343 -0.00627888
- 0.256084 1.09282 -0.359526
- 0.0204499 0.00348366 1.01029
+ Inverse:
+ 0.982713 0.0105343 -0.00627888
+ 0.256084 1.09282 -0.359526
+ 0.0204499 0.00348366 1.01029
Singular: 0
@@ -12421,7 +12433,7 @@ So let's plan our implementation:
-Specifying the inputs¶
However, the args
argument is too generic and does not deviate much from just running it in bash, or directly using subprocess.Popen
. Let's define the intputs specification for the interface, extending the nipype.interfaces.base.CommandLineInputSpec
class.
+Specifying the inputs¶
However, the args
argument is too generic and does not deviate much from just running it in bash, or directly using subprocess.Popen
. Let's define the inputs specification for the interface, extending the nipype.interfaces.base.CommandLineInputSpec
class.
The inputs are implemented using the Enthought traits package. For now, we'll use the File
trait extension of nipype:
@@ -12590,7 +12602,7 @@ Specifying the inputs
-Nipype will make sure that the parameters fullfil their prescribed attributes. For instance in_file
is mandatory. An error is issued if we build the command line or try to run this interface without it:
+Nipype will make sure that the parameters fulfill their prescribed attributes. For instance, in_file
is mandatory. An error is issued if we build the command line or try to run this interface without it:
@@ -12700,7 +12712,7 @@ Specifying the outputsclass TransformInfoOutputSpec(TraitedSpec):
translation = traits.List(traits.Float, desc='the translation component of the input transform')
-
+
class TransformInfo(CommandLine):
_cmd = 'antsTransformInfo'
input_spec = TransformInfoInputSpec
@@ -12835,20 +12847,20 @@ We are almost there - final needs
@@ -12863,7 +12875,7 @@ We are almost there - final needs
-We need to complete the functionallity of the run()
member of our interface to parse the standard output. This is done extending its _run_interface()
member.
+We need to complete the functionality of the run()
member of our interface to parse the standard output. This is done extending its _run_interface()
member.
When we define outputs, generally they need to be explicitly wired in the _list_outputs()
member of the core class. Let's see how we can complete those:
@@ -12878,25 +12890,25 @@ We are almost there - final needs_cmd = 'antsTransformInfo'
input_spec = TransformInfoInputSpec
output_spec = TransformInfoOutputSpec
-
+
def _run_interface(self, runtime):
import re
-
+
# Run the command line as a natural CommandLine interface
runtime = super(TransformInfo, self)._run_interface(runtime)
# Search transform in the standard output
expr_tra = re.compile('Translation:\s+\[(?P<translation>[0-9\.-]+,\s[0-9\.-]+,\s[0-9\.-]+)\]')
trans = [float(v) for v in expr_tra.search(runtime.stdout).group('translation').split(', ')]
-
+
# Save it for later use in _list_outputs
setattr(self, '_result', trans)
-
+
# Good to go
return runtime
-
+
def _list_outputs(self):
-
+
# Get the attribute saved during _run_interface
return {'translation': getattr(self, '_result')}
@@ -13000,25 +13012,25 @@ Summary of a CommandLine
_cmd = 'antsTransformInfo'
input_spec = TransformInfoInputSpec
output_spec = TransformInfoOutputSpec
-
+
def _run_interface(self, runtime):
import re
-
+
# Run the command line as a natural CommandLine interface
runtime = super(TransformInfo, self)._run_interface(runtime)
# Search transform in the standard output
expr_tra = re.compile('Translation:\s+\[(?P<translation>[0-9\.-]+,\s[0-9\.-]+,\s[0-9\.-]+)\]')
trans = [float(v) for v in expr_tra.search(runtime.stdout).group('translation').split(', ')]
-
+
# Save it for later use in _list_outputs
setattr(self, '_result', trans)
-
+
# Good to go
return runtime
-
+
def _list_outputs(self):
-
+
# Get the attribute saved during _run_interface
return {'translation': getattr(self, '_result')}
@@ -13087,16 +13099,16 @@ Wrapping up
# Do not set exists=True for output files!
out_file = File(mandatory=True, argstr='%s', position=1, desc='the output image')
-
+
class CustomBETOutputSpec(TraitedSpec):
out_file = File(desc='the output image')
mask_file = File(desc="path/name of binary brain mask (if generated)")
-
+
class CustomBET(CommandLine):
_cmd = 'bet'
input_spec = CustomBETInputSpec
output_spec = CustomBETOutputSpec
-
+
def _list_outputs(self):
# Get the attribute saved during _run_interface
@@ -13131,7 +13143,7 @@ Wrapping up
In [ ]:
-nb.load(result.outputs.out_file).orthoview()
+plot_anat(result.outputs.out_file, dim=-1);
@@ -13179,7 +13191,7 @@ Wrapping up
Create your own Python
interface¶
CommandLine
interface is great, but my tool is already in Python - can I wrap it natively?
-Sure. Let's solve the following problem: Let's say we have a Python function that takes an input image and a list of three translations (x, y, z) in mm, and than writes a resampled image after the translation has been applied:
+Sure. Let's solve the following problem: Let's say we have a Python function that takes an input image and a list of three translations (x, y, z) in mm, and then writes a resampled image after the translation has been applied:
@@ -13194,21 +13206,21 @@ Create your own Python
in
import nibabel as nb
import numpy as np
from scipy.ndimage.interpolation import affine_transform
-
+
# Load the data
nii = nb.load(img)
data = nii.get_data()
-
+
# Create the transformation matrix
matrix = np.eye(3)
trans = (np.array(translation) / nii.header.get_zooms()[:3]) * np.array([1.0, -1.0, -1.0])
-
+
# Apply the transformation matrix
newdata = affine_transform(data, matrix=matrix, offset=trans)
-
+
# Save the new data in a new NIfTI image
nb.Nifti1Image(newdata, nii.affine, nii.header).to_filename(out_file)
-
+
print('Translated file now is here: %s' % out_file)
@@ -13278,7 +13290,7 @@ Create your own Python
in
In [ ]:
-nb.load(orig_image).orthoview()
+plot_anat(orig_image, dim=-1);
@@ -13326,7 +13338,7 @@ Create your own Python
in
In [ ]:
-nb.load('translated.nii.gz').orthoview()
+plot_anat('translated.nii.gz', dim=-1);
@@ -13382,7 +13394,7 @@ Create your own Python
in
-Quick approach - Function
interface¶
Don't reinvent the wheel if it's not necessary. If, like in this case, we have a well-defined function we want to run with Nipype, it is fairly easy to solve it with the Function
interface:
+Quick approach - Function
interface¶
Don't reinvent the wheel if it's not necessary. If like in this case, we have a well-defined function we want to run with Nipype, it is fairly easy to solve it with the Function
interface:
@@ -13412,8 +13424,8 @@ Quick approach - Function
The arguments of translate_image
should ideally be listed in the same order and with the same names as in the signature of the function. The same should be the case for the outputs. Finally, the Function
interface takes a function
input that is pointed to your python code.
Note: The inputs and outputs do not pass any kind of conformity checking: the function node will take any kind of data type for their inputs and outputs.
-There are some other limitations for the Function
interface when used inside workflows. Additionally, the function must be totally self-contained, since it will run with no global context. In practice, it means that all the imported modules and variables must be defined within the context of the function.
-For more, checkout the Function Node notebook.
+There are some other limitations to the Function
interface when used inside workflows. Additionally, the function must be totally self-contained, since it will run with no global context. In practice, it means that all the imported modules and variables must be defined within the context of the function.
+For more, check out the Function Node notebook.
@@ -13481,7 +13493,7 @@ Quick approach - Function
# Plot the result
-nb.load('translated_functioninterface.nii.gz').orthoview()
+plot_anat('translated_functioninterface.nii.gz', dim=-1);
@@ -13529,7 +13541,7 @@ Quick approach - Function
Complete approach - pure Python
interface¶
Now, we face the problem of interfacing something different from a command line. Therefore, the CommandLine
base class will not help us here. The specification of the inputs and outputs, though, will work the same way.
-Let's start from that point. Our Python function takes in three inputs: (1) the input image, (2) the translation and (3) an output image.
+Let's start from that point on. Our Python function takes in three inputs: (1) the input image, (2) the translation and (3) an output image.
The specification of inputs and outputs must be familiar to you at this point. Please note that now, input specification is derived from BaseInterfaceInputSpec
, which is a bit thinner than CommandLineInputSpec
. The output specification can be derived from TraitedSpec
as before:
@@ -13547,7 +13559,7 @@ Complete approach - pure out_file = File(mandatory=True, desc='the output image') # Do not set exists=True !!
translation = traits.List([50.0, 0.0, 0.0], traits.Float, usedefault=True,
desc='the translation component of the input transform')
-
+
class TranslateImageOutputSpec(TraitedSpec):
out_file = File(desc='the output image')
@@ -13699,7 +13711,7 @@ Complete approach - pure
It crashed with...
-NotImplementedError:
+NotImplementedError:
@@ -13725,16 +13737,16 @@ Complete approach - pure class TranslateImage(BaseInterface):
input_spec = TranslateImageInputSpec
output_spec = TranslateImageOutputSpec
-
+
def _run_interface(self, runtime):
-
+
# Call our python code here:
translate_image(
self.inputs.in_file,
self.inputs.translation,
self.inputs.out_file
)
-
+
# And we are done
return runtime
@@ -13799,7 +13811,7 @@ Complete approach - pure
Translated file now is here: translated_nipype.nii.gz
It crashed with...
-NotImplementedError:
+NotImplementedError:
@@ -13830,14 +13842,14 @@ Complete approach - pure out_file = File(mandatory=True, desc='the output image') # Do not set exists=True !!
translation = traits.List([50.0, 0.0, 0.0], traits.Float, usedefault=True,
desc='the translation component of the input transform')
-
+
class TranslateImageOutputSpec(TraitedSpec):
out_file = File(desc='the output image')
class TranslateImage(BaseInterface):
input_spec = TranslateImageInputSpec
output_spec = TranslateImageOutputSpec
-
+
def _run_interface(self, runtime):
# Call our python code here:
@@ -13907,7 +13919,7 @@ Complete approach - pure In [ ]:
-nb.load(result.outputs.out_file).orthoview()
+plot_anat(result.outputs.out_file, dim=-1);
@@ -13959,7 +13971,7 @@ Create your own MATLAB
in
load input_image.mat;
total = sum(data(:) > 0)
-
The following example, uses scipy.io.savemat
to convert the input image to MATLAB
format. Once the file is loaded we can quickly extract the estimated total volume.
+The following example uses scipy.io.savemat
to convert the input image to MATLAB
format. Once the file is loaded we can quickly extract the estimated total volume.
Note: For the purpose of this example, we will be using the freely available MATLAB
alternative Octave
. But the implementation of a MATLAB
interface will be identical.
@@ -13986,7 +13998,7 @@ Preparation¶
class BrainVolumeMATLABInputSpec(BaseInterfaceInputSpec):
in_file = File(exists=True, mandatory=True)
script_file = File(exists=True, mandatory=True)
-
+
class BrainVolumeMATLABOutputSpec(TraitedSpec):
volume = traits.Int(desc='brain volume')
@@ -14062,7 +14074,7 @@ Step by step implementation
-load input_image.mat;
+load input_image.mat;
total = sum(data(:) > 0)