diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index e80375b..0000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,73 +0,0 @@ -version: 2 -jobs: - build: - docker: - - image: docker:18.05.0-ce-git - steps: - - checkout - test_1: - docker: - - image: docker:18.05.0-ce-git - steps: - - checkout - - setup_remote_docker: - docker_layer_caching: true - - run: - name: docker build 1 - no_output_timeout: 60m - command: | - docker build -t miykael/nipype_tutorial:$(echo $CIRCLE_BRANCH | sed 's|/|_|g') . - - run: - name: run tests 1 - no_output_timeout: 120m - command: | - docker run -it --rm miykael/nipype_tutorial:$(echo $CIRCLE_BRANCH | sed 's|/|_|g') python /home/neuro/nipype_tutorial/test_notebooks.py 1 - test_2: - docker: - - image: docker:18.05.0-ce-git - steps: - - checkout - - setup_remote_docker: - docker_layer_caching: true - - run: - name: docker build 2 - no_output_timeout: 60m - command: | - docker build -t miykael/nipype_tutorial:$(echo $CIRCLE_BRANCH | sed 's|/|_|g') . - - run: - name: run tests 2 - no_output_timeout: 120m - command: | - docker run -it --rm miykael/nipype_tutorial:$(echo $CIRCLE_BRANCH | sed 's|/|_|g') python /home/neuro/nipype_tutorial/test_notebooks.py 2 - test_3: - docker: - - image: docker:18.05.0-ce-git - steps: - - checkout - - setup_remote_docker: - docker_layer_caching: true - - run: - name: docker build 3 - no_output_timeout: 60m - command: | - docker build -t miykael/nipype_tutorial:$(echo $CIRCLE_BRANCH | sed 's|/|_|g') . - - run: - name: run tests 3 - no_output_timeout: 120m - command: | - docker run -it --rm miykael/nipype_tutorial:$(echo $CIRCLE_BRANCH | sed 's|/|_|g') python /home/neuro/nipype_tutorial/test_notebooks.py 3 - -workflows: - version: 2 - build_and_test: - jobs: - - build - - test_1: - requires: - - build - - test_2: - requires: - - build - - test_3: - requires: - - build diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml new file mode 100644 index 0000000..5ac3bc4 --- /dev/null +++ b/.github/workflows/testing.yml @@ -0,0 +1,76 @@ +name: Build & run notebooks + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + workflow_dispatch: + inputs: + nipype_branch: + description: 'Build specific Nipype branch' + required: true + default: 'master' + + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: generate the Dockerfile from generate.sh + run: | + BRANCH=${{ github.event.inputs.nipype_branch }} + BRANCH=${BRANCH:-"master"} + bash generate.sh $BRANCH + # In this step, this action saves a list of existing images, + # the cache is created without them in the post run. + # It also restores the cache if it exists. + - uses: satackey/action-docker-layer-caching@v0.0.11 + with: + key: tutorial-docker-cache-{hash} + restore-keys: | + tutorial-docker-cache- + layer-tutorial-docker-cache- + - name: build the image + run: docker build . --file Dockerfile -t nipype_tutorial:latest + + test_1: + needs: build + runs-on: ubuntu-latest + steps: + - uses: satackey/action-docker-layer-caching@v0.0.11 + with: + key: tutorial-docker-cache-{hash} + restore-keys: | + tutorial-docker-cache- + layer-tutorial-docker-cache- + - name: run test 1 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 1 + + test_2: + needs: build + runs-on: ubuntu-latest + steps: + - uses: satackey/action-docker-layer-caching@v0.0.11 + with: + key: tutorial-docker-cache-{hash} + restore-keys: | + tutorial-docker-cache- + layer-tutorial-docker-cache- + - name: run test 2 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 2 + + test_3: + needs: build + runs-on: ubuntu-latest + steps: + - uses: satackey/action-docker-layer-caching@v0.0.11 + with: + key: tutorial-docker-cache-{hash} + restore-keys: | + tutorial-docker-cache- + layer-tutorial-docker-cache- + - name: run test 3 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 3 diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index a07d650..0000000 --- a/Dockerfile +++ /dev/null @@ -1,330 +0,0 @@ -# Generated by Neurodocker version 0.4.2-dev -# Timestamp: 2018-10-15 12:01:29 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/kaczmarj/neurodocker - -FROM neurodebian:stretch-non-free - -ARG DEBIAN_FRONTEND="noninteractive" - -ENV LANG="en_US.UTF-8" \ - LC_ALL="en_US.UTF-8" \ - ND_ENTRYPOINT="/neurodocker/startup.sh" -RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - apt-utils \ - bzip2 \ - ca-certificates \ - curl \ - locales \ - unzip \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ - && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \ - && dpkg-reconfigure --frontend=noninteractive locales \ - && update-locale LANG="en_US.UTF-8" \ - && chmod 777 /opt && chmod a+s /opt \ - && mkdir -p /neurodocker \ - && if [ ! -f "$ND_ENTRYPOINT" ]; then \ - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" \ - && echo 'set -e' >> "$ND_ENTRYPOINT" \ - && echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; \ - fi \ - && chmod -R 777 /neurodocker && chmod a+s /neurodocker - -ENTRYPOINT ["/neurodocker/startup.sh"] - -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - convert3d \ - ants \ - fsl \ - gcc \ - g++ \ - graphviz \ - tree \ - git-annex-standalone \ - vim \ - emacs-nox \ - nano \ - less \ - ncdu \ - tig \ - git-annex-remote-rclone \ - octave \ - netbase \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -RUN sed -i '$isource /etc/fsl/fsl.sh' $ND_ENTRYPOINT - -ENV FORCE_SPMMCR="1" \ - LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2010a/v713/runtime/glnxa64:/opt/matlabmcr-2010a/v713/bin/glnxa64:/opt/matlabmcr-2010a/v713/sys/os/glnxa64:/opt/matlabmcr-2010a/v713/extern/bin/glnxa64" \ - MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - bc \ - libncurses5 \ - libxext6 \ - libxmu6 \ - libxpm-dev \ - libxt6 \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ - && echo "Downloading MATLAB Compiler Runtime ..." \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && apt-get install -f \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ - && curl -fsSL --retry 5 -o /tmp/MCRInstaller.bin https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin \ - && chmod +x /tmp/MCRInstaller.bin \ - && /tmp/MCRInstaller.bin -silent -P installLocation="/opt/matlabmcr-2010a" \ - && rm -rf /tmp/* \ - && echo "Downloading standalone SPM ..." \ - && curl -fsSL --retry 5 -o /tmp/spm12.zip http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip \ - && unzip -q /tmp/spm12.zip -d /tmp \ - && mkdir -p /opt/spm12-r7219 \ - && mv /tmp/spm12/* /opt/spm12-r7219/ \ - && chmod -R 777 /opt/spm12-r7219 \ - && rm -rf /tmp/* \ - && /opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 quit \ - && sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT - -RUN useradd --no-user-group --create-home --shell /bin/bash neuro -USER neuro - -ENV CONDA_DIR="/opt/miniconda-latest" \ - PATH="/opt/miniconda-latest/bin:$PATH" -RUN export PATH="/opt/miniconda-latest/bin:$PATH" \ - && echo "Downloading Miniconda installer ..." \ - && conda_installer="/tmp/miniconda.sh" \ - && curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ - && bash "$conda_installer" -b -p /opt/miniconda-latest \ - && rm -f "$conda_installer" \ - && conda update -yq -nbase conda \ - && conda config --system --prepend channels conda-forge \ - && conda config --system --set auto_update_conda false \ - && conda config --system --set show_channel_urls true \ - && sync && conda clean -tipsy && sync \ - && conda create -y -q --name neuro \ - && conda install -y -q --name neuro \ - python=3.6 \ - pytest \ - jupyter \ - jupyterlab \ - jupyter_contrib_nbextensions \ - traits \ - pandas \ - matplotlib \ - scikit-learn \ - scikit-image \ - seaborn \ - nbformat \ - nb_conda \ - && sync && conda clean -tipsy && sync \ - && bash -c "source activate neuro \ - && pip install --no-cache-dir \ - https://github.com/nipy/nipype/tarball/master \ - https://github.com/INCF/pybids/tarball/0.6.5 \ - nilearn \ - datalad[full] \ - nipy \ - duecredit \ - nbval" \ - && rm -rf ~/.cache/pip/* \ - && sync \ - && sed -i '$isource activate neuro' $ND_ENTRYPOINT - -ENV LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" - -RUN bash -c 'source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main' - -USER root - -RUN mkdir /data && chmod 777 /data && chmod a+s /data - -RUN mkdir /output && chmod 777 /output && chmod a+s /output - -USER neuro - -RUN printf "[user]\n\tname = miykael\n\temail = michaelnotter@hotmail.com\n" > ~/.gitconfig - -RUN bash -c 'source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*' - -RUN curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete - -COPY [".", "/home/neuro/nipype_tutorial"] - -USER root - -RUN chown -R neuro /home/neuro/nipype_tutorial - -RUN rm -rf /opt/conda/pkgs/* - -USER neuro - -RUN mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py - -WORKDIR /home/neuro/nipype_tutorial - -CMD ["jupyter-notebook"] - -RUN echo '{ \ - \n "pkg_manager": "apt", \ - \n "instructions": [ \ - \n [ \ - \n "base", \ - \n "neurodebian:stretch-non-free" \ - \n ], \ - \n [ \ - \n "install", \ - \n [ \ - \n "convert3d", \ - \n "ants", \ - \n "fsl", \ - \n "gcc", \ - \n "g++", \ - \n "graphviz", \ - \n "tree", \ - \n "git-annex-standalone", \ - \n "vim", \ - \n "emacs-nox", \ - \n "nano", \ - \n "less", \ - \n "ncdu", \ - \n "tig", \ - \n "git-annex-remote-rclone", \ - \n "octave", \ - \n "netbase" \ - \n ] \ - \n ], \ - \n [ \ - \n "add_to_entrypoint", \ - \n "source /etc/fsl/fsl.sh" \ - \n ], \ - \n [ \ - \n "spm12", \ - \n { \ - \n "version": "r7219" \ - \n } \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "miniconda", \ - \n { \ - \n "miniconda_version": "4.3.31", \ - \n "conda_install": [ \ - \n "python=3.6", \ - \n "pytest", \ - \n "jupyter", \ - \n "jupyterlab", \ - \n "jupyter_contrib_nbextensions", \ - \n "traits", \ - \n "pandas", \ - \n "matplotlib", \ - \n "scikit-learn", \ - \n "scikit-image", \ - \n "seaborn", \ - \n "nbformat", \ - \n "nb_conda" \ - \n ], \ - \n "pip_install": [ \ - \n "https://github.com/nipy/nipype/tarball/master", \ - \n "https://github.com/INCF/pybids/tarball/0.6.5", \ - \n "nilearn", \ - \n "datalad[full]", \ - \n "nipy", \ - \n "duecredit", \ - \n "nbval" \ - \n ], \ - \n "create_env": "neuro", \ - \n "activate": true \ - \n } \ - \n ], \ - \n [ \ - \n "env", \ - \n { \ - \n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" \ - \n } \ - \n ], \ - \n [ \ - \n "run_bash", \ - \n "source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main" \ - \n ], \ - \n [ \ - \n "user", \ - \n "root" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir /data && chmod 777 /data && chmod a+s /data" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir /output && chmod 777 /output && chmod a+s /output" \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "run", \ - \n "printf \"[user]\\\n\\tname = miykael\\\n\\temail = michaelnotter@hotmail.com\\\n\" > ~/.gitconfig" \ - \n ], \ - \n [ \ - \n "run_bash", \ - \n "source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*" \ - \n ], \ - \n [ \ - \n "run", \ - \n "curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete" \ - \n ], \ - \n [ \ - \n "copy", \ - \n [ \ - \n ".", \ - \n "/home/neuro/nipype_tutorial" \ - \n ] \ - \n ], \ - \n [ \ - \n "user", \ - \n "root" \ - \n ], \ - \n [ \ - \n "run", \ - \n "chown -R neuro /home/neuro/nipype_tutorial" \ - \n ], \ - \n [ \ - \n "run", \ - \n "rm -rf /opt/conda/pkgs/*" \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" \ - \n ], \ - \n [ \ - \n "workdir", \ - \n "/home/neuro/nipype_tutorial" \ - \n ], \ - \n [ \ - \n "cmd", \ - \n [ \ - \n "jupyter-notebook" \ - \n ] \ - \n ] \ - \n ] \ - \n}' > /neurodocker/neurodocker_specs.json diff --git a/README.md b/README.md index 9a93c5e..4a35427 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ # Nipype Tutorial Notebooks - -[![CircleCi](https://circleci.com/gh/miykael/nipype_tutorial.svg?style=shield)](https://circleci.com/gh/miykael/nipype_tutorial/tree/master) +[![Github Action CI](https://github.com/miykael/nipype_tutorial/workflows/CI/badge.svg?branch=master)](https://github.com/miykael/nipype_tutorial/actions?query=workflow%3ACI) [![GitHub issues](https://img.shields.io/github/issues/miykael/nipype_tutorial.svg)](https://github.com/miykael/nipype_tutorial/issues/) [![GitHub pull-requests](https://img.shields.io/github/issues-pr/miykael/nipype_tutorial.svg)](https://github.com/miykael/nipype_tutorial/pulls/) [![GitHub contributors](https://img.shields.io/github/contributors/miykael/nipype_tutorial.svg)](https://GitHub.com/miykael/nipype_tutorial/graphs/contributors/) diff --git a/Singularity b/Singularity deleted file mode 100644 index d43a3b5..0000000 --- a/Singularity +++ /dev/null @@ -1,333 +0,0 @@ -# Generated by Neurodocker version 0.4.2-dev -# Timestamp: 2018-10-15 12:01:30 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/kaczmarj/neurodocker - -Bootstrap: docker -From: neurodebian:stretch-non-free - -%post -export ND_ENTRYPOINT="/neurodocker/startup.sh" -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - apt-utils \ - bzip2 \ - ca-certificates \ - curl \ - locales \ - unzip -apt-get clean -rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen -dpkg-reconfigure --frontend=noninteractive locales -update-locale LANG="en_US.UTF-8" -chmod 777 /opt && chmod a+s /opt -mkdir -p /neurodocker -if [ ! -f "$ND_ENTRYPOINT" ]; then - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" - echo 'set -e' >> "$ND_ENTRYPOINT" - echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; -fi -chmod -R 777 /neurodocker && chmod a+s /neurodocker - -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - convert3d \ - ants \ - fsl \ - gcc \ - g++ \ - graphviz \ - tree \ - git-annex-standalone \ - vim \ - emacs-nox \ - nano \ - less \ - ncdu \ - tig \ - git-annex-remote-rclone \ - octave \ - netbase -apt-get clean -rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -sed -i '$isource /etc/fsl/fsl.sh' $ND_ENTRYPOINT - -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - bc \ - libncurses5 \ - libxext6 \ - libxmu6 \ - libxpm-dev \ - libxt6 -apt-get clean -rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -echo "Downloading MATLAB Compiler Runtime ..." -curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb -dpkg -i /tmp/toinstall.deb -rm /tmp/toinstall.deb -apt-get install -f -apt-get clean -rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -curl -fsSL --retry 5 -o /tmp/MCRInstaller.bin https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin -chmod +x /tmp/MCRInstaller.bin -/tmp/MCRInstaller.bin -silent -P installLocation="/opt/matlabmcr-2010a" -rm -rf /tmp/* -echo "Downloading standalone SPM ..." -curl -fsSL --retry 5 -o /tmp/spm12.zip http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip -unzip -q /tmp/spm12.zip -d /tmp -mkdir -p /opt/spm12-r7219 -mv /tmp/spm12/* /opt/spm12-r7219/ -chmod -R 777 /opt/spm12-r7219 -rm -rf /tmp/* -/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 quit -sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT - -useradd --no-user-group --create-home --shell /bin/bash neuro -su - neuro - -export PATH="/opt/miniconda-latest/bin:$PATH" -echo "Downloading Miniconda installer ..." -conda_installer="/tmp/miniconda.sh" -curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -bash "$conda_installer" -b -p /opt/miniconda-latest -rm -f "$conda_installer" -conda update -yq -nbase conda -conda config --system --prepend channels conda-forge -conda config --system --set auto_update_conda false -conda config --system --set show_channel_urls true -sync && conda clean -tipsy && sync -conda create -y -q --name neuro -conda install -y -q --name neuro \ - python=3.6 \ - pytest \ - jupyter \ - jupyterlab \ - jupyter_contrib_nbextensions \ - traits \ - pandas \ - matplotlib \ - scikit-learn \ - scikit-image \ - seaborn \ - nbformat \ - nb_conda -sync && conda clean -tipsy && sync -bash -c "source activate neuro - pip install --no-cache-dir \ - https://github.com/nipy/nipype/tarball/master \ - https://github.com/INCF/pybids/tarball/0.6.5 \ - nilearn \ - datalad[full] \ - nipy \ - duecredit \ - nbval" -rm -rf ~/.cache/pip/* -sync -sed -i '$isource activate neuro' $ND_ENTRYPOINT - - -bash -c 'source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main' - -su - root - -mkdir /data && chmod 777 /data && chmod a+s /data - -mkdir /output && chmod 777 /output && chmod a+s /output - -su - neuro - -printf "[user]\n\tname = miykael\n\temail = michaelnotter@hotmail.com\n" > ~/.gitconfig - -bash -c 'source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*' - -curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete - -su - root - -chown -R neuro /home/neuro/nipype_tutorial - -rm -rf /opt/conda/pkgs/* - -su - neuro - -mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py - -cd /home/neuro/nipype_tutorial - -echo '{ -\n "pkg_manager": "apt", -\n "instructions": [ -\n [ -\n "base", -\n "neurodebian:stretch-non-free" -\n ], -\n [ -\n "_header", -\n { -\n "version": "generic", -\n "method": "custom" -\n } -\n ], -\n [ -\n "install", -\n [ -\n "convert3d", -\n "ants", -\n "fsl", -\n "gcc", -\n "g++", -\n "graphviz", -\n "tree", -\n "git-annex-standalone", -\n "vim", -\n "emacs-nox", -\n "nano", -\n "less", -\n "ncdu", -\n "tig", -\n "git-annex-remote-rclone", -\n "octave", -\n "netbase" -\n ] -\n ], -\n [ -\n "add_to_entrypoint", -\n "source /etc/fsl/fsl.sh" -\n ], -\n [ -\n "spm12", -\n { -\n "version": "r7219" -\n } -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "miniconda", -\n { -\n "miniconda_version": "4.3.31", -\n "conda_install": [ -\n "python=3.6", -\n "pytest", -\n "jupyter", -\n "jupyterlab", -\n "jupyter_contrib_nbextensions", -\n "traits", -\n "pandas", -\n "matplotlib", -\n "scikit-learn", -\n "scikit-image", -\n "seaborn", -\n "nbformat", -\n "nb_conda" -\n ], -\n "pip_install": [ -\n "https://github.com/nipy/nipype/tarball/master", -\n "https://github.com/INCF/pybids/tarball/0.6.5", -\n "nilearn", -\n "datalad[full]", -\n "nipy", -\n "duecredit", -\n "nbval" -\n ], -\n "create_env": "neuro", -\n "activate": true -\n } -\n ], -\n [ -\n "env", -\n { -\n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" -\n } -\n ], -\n [ -\n "run_bash", -\n "source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main" -\n ], -\n [ -\n "user", -\n "root" -\n ], -\n [ -\n "run", -\n "mkdir /data && chmod 777 /data && chmod a+s /data" -\n ], -\n [ -\n "run", -\n "mkdir /output && chmod 777 /output && chmod a+s /output" -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "run", -\n "printf \"[user]\\\n\\tname = miykael\\\n\\temail = michaelnotter@hotmail.com\\\n\" > ~/.gitconfig" -\n ], -\n [ -\n "run_bash", -\n "source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*" -\n ], -\n [ -\n "run", -\n "curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete" -\n ], -\n [ -\n "copy", -\n [ -\n ".", -\n "/home/neuro/nipype_tutorial" -\n ] -\n ], -\n [ -\n "user", -\n "root" -\n ], -\n [ -\n "run", -\n "chown -R neuro /home/neuro/nipype_tutorial" -\n ], -\n [ -\n "run", -\n "rm -rf /opt/conda/pkgs/*" -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "run", -\n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" -\n ], -\n [ -\n "workdir", -\n "/home/neuro/nipype_tutorial" -\n ] -\n ] -\n}' > /neurodocker/neurodocker_specs.json - -%environment -export LANG="en_US.UTF-8" -export LC_ALL="en_US.UTF-8" -export ND_ENTRYPOINT="/neurodocker/startup.sh" -export FORCE_SPMMCR="1" -export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2010a/v713/runtime/glnxa64:/opt/matlabmcr-2010a/v713/bin/glnxa64:/opt/matlabmcr-2010a/v713/sys/os/glnxa64:/opt/matlabmcr-2010a/v713/extern/bin/glnxa64" -export MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" -export CONDA_DIR="/opt/miniconda-latest" -export PATH="/opt/miniconda-latest/bin:$PATH" -export LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" - -%files -. /home/neuro/nipype_tutorial - -%runscript -/neurodocker/startup.sh "$@" diff --git a/docs/index.html b/docs/index.html index 40d8f6b..0ac42d3 100644 --- a/docs/index.html +++ b/docs/index.html @@ -11891,7 +11891,7 @@ <div class="pure-g domain-table-container color04"> <a class="subject-link pure-u-1-4" target="_blank" href="notebooks/advanced_create_interfaces.html">Create Interfaces</a> <a class="subject-link pure-u-1-4" target="_blank" href="notebooks/advanced_interfaces_caching.html">Interfaces Caching</a> - <a class="subject-link pure-u-1-4" target="_blank" href="notebooks/advanced_nipypecli.html">Nipype Command Line Interface</a> + <a class="subject-link pure-u-1-4" target="_blank" href="notebooks/advanced_command_line_interface.html">Nipype Command Line Interface</a> <a class="subject-link pure-u-1-4" target="_blank" href="notebooks/advanced_aws.html">Amazon Web Services (AWS)</a> <a class="subject-link pure-u-1-4" target="_blank" href="notebooks/advanced_sphinx_ext.html">Sphinx extensions</a> <a class="subject-link pure-u-1-4" target="_blank" href="notebooks/advanced_spmmcr.html">SPM with MATLAB Common Runtime (MCR)</a> @@ -11914,7 +11914,7 @@ <a class="subject-link pure-u-1-4" target="_blank" href="http://bids-apps.neuroimaging.io">BIDS Apps</a> <a class="subject-link pure-u-1-4" target="_blank" href="http://fmriprep.readthedocs.io/en/latest/index.html">fmriprep</a> <a class="subject-link pure-u-1-4" target="_blank" href="https://mriqc.readthedocs.io/en/latest/#">MRIQC</a> - <a class="subject-link pure-u-1-4" target="_blank" href="https://www.mindboggle.info/">Mindboggle</a> + <a class="subject-link pure-u-1-4" target="_blank" href="https://mindboggle.info/">Mindboggle</a> <a class="subject-link pure-u-1-4" target="_blank" href="https://timvanmourik.github.io/Porcupine/">PORcupine</a> </div> <p>This section will give you helpful links and resources so that you always know where to go to learn more.</p> @@ -12062,7 +12062,7 @@

Advanced Concepts<
Create Interfaces Interfaces Caching - Nipype Command Line Interface + Nipype Command Line Interface Amazon Web Services (AWS) Sphinx extensions SPM with MATLAB Common Runtime (MCR) @@ -12085,7 +12085,7 @@

Useful Resources & Lin BIDS Apps fmriprep MRIQC - Mindboggle + Mindboggle PORcupine

This section will give you helpful links and resources so that you always know where to go to learn more.

diff --git a/docs/notebooks/basic_data_input.html b/docs/notebooks/basic_data_input.html index 4bc10f8..4d1d3d9 100644 --- a/docs/notebooks/basic_data_input.html +++ b/docs/notebooks/basic_data_input.html @@ -12816,7 +12816,7 @@

Exercise 2
-

FreeSurferSource

FreeSurferSource is a specific case of a file grabber that felicitates the data import of outputs from the FreeSurfer recon-all algorithm. This, of course, requires that you've already run recon-all on your subject.

+

FreeSurferSource

FreeSurferSource is a specific case of a file grabber that facilitates the data import of outputs from the FreeSurfer recon-all algorithm. This, of course, requires that you've already run recon-all on your subject.

diff --git a/generate.sh b/generate.sh index 7e5cca6..81ec0f1 100644 --- a/generate.sh +++ b/generate.sh @@ -2,6 +2,16 @@ set -e +NIPYPE_BRANCH=${1:-"master"} +case $NIPYPE_BRANCH in + master) + NIPYPE_URL="https://github.com/nipy/nipype/tarball/master" + ;; + *) + NIPYPE_URL="git+https://github.com/nipy/nipype.git@${NIPYPE_BRANCH}" + ;; +esac + # Generate Dockerfile generate_docker() { docker run --rm kaczmarj/neurodocker:master generate docker \ @@ -13,12 +23,13 @@ generate_docker() { --add-to-entrypoint "source /etc/fsl/fsl.sh" \ --spm12 version=r7219 \ --user=neuro \ - --miniconda miniconda_version="4.3.31" \ - conda_install="python=3.6 pytest jupyter jupyterlab jupyter_contrib_nbextensions + --workdir /home/neuro \ + --miniconda \ + conda_install="python=3.8 pytest jupyter jupyterlab jupyter_contrib_nbextensions traits pandas matplotlib scikit-learn scikit-image seaborn nbformat nb_conda" \ - pip_install="https://github.com/nipy/nipype/tarball/master - https://github.com/INCF/pybids/tarball/0.6.5 - nilearn datalad[full] nipy duecredit nbval" \ + pip_install="$NIPYPE_URL + pybids==0.13.1 + nilearn datalad[full] nipy duecredit nbval niflow-nipype1-workflows" \ create_env="neuro" \ activate=True \ --env LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:$LD_LIBRARY_PATH" \ @@ -51,12 +62,13 @@ generate_singularity() { --add-to-entrypoint "source /etc/fsl/fsl.sh" \ --spm12 version=r7219 \ --user=neuro \ - --miniconda miniconda_version="4.3.31" \ - conda_install="python=3.6 pytest jupyter jupyterlab jupyter_contrib_nbextensions + --workdir /home/neuro \ + --miniconda \ + conda_install="python=3.8 pytest jupyter jupyterlab jupyter_contrib_nbextensions traits pandas matplotlib scikit-learn scikit-image seaborn nbformat nb_conda" \ - pip_install="https://github.com/nipy/nipype/tarball/master - https://github.com/INCF/pybids/tarball/0.6.5 - nilearn datalad[full] nipy duecredit nbval" \ + pip_install="$NIPYPE_URL + pybids==0.13.1 + nilearn datalad[full] nipy duecredit nbval niflow-nipype1-workflows" \ create_env="neuro" \ activate=True \ --env LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:$LD_LIBRARY_PATH" \ diff --git a/index.ipynb b/index.ipynb index 44a9920..99e977c 100644 --- a/index.ipynb +++ b/index.ipynb @@ -111,7 +111,7 @@ "
\n", " Create Interfaces\n", " Interfaces Caching\n", - " Nipype Command Line Interface\n", + " Nipype Command Line Interface\n", " Amazon Web Services (AWS)\n", " Sphinx extensions\n", " SPM with MATLAB Common Runtime (MCR)\n", @@ -134,7 +134,7 @@ " BIDS Apps\n", " fmriprep\n", " MRIQC\n", - " Mindboggle\n", + " Mindboggle\n", " PORcupine\n", "
\n", "

This section will give you helpful links and resources so that you always know where to go to learn more.

\n", @@ -280,7 +280,7 @@ "
\n", " Create Interfaces\n", " Interfaces Caching\n", - " Nipype Command Line Interface\n", + " Nipype Command Line Interface\n", " Amazon Web Services (AWS)\n", " Sphinx extensions\n", " SPM with MATLAB Common Runtime (MCR)\n", @@ -303,7 +303,7 @@ " BIDS Apps\n", " fmriprep\n", " MRIQC\n", - " Mindboggle\n", + " Mindboggle\n", " PORcupine\n", "
\n", "

This section will give you helpful links and resources so that you always know where to go to learn more.

\n", diff --git a/notebooks/advanced_create_interfaces.ipynb b/notebooks/advanced_create_interfaces.ipynb index ce1052e..33c47db 100644 --- a/notebooks/advanced_create_interfaces.ipynb +++ b/notebooks/advanced_create_interfaces.ipynb @@ -1214,9 +1214,9 @@ "from scipy.io import savemat\n", "\n", "# 1. save the image in matlab format as tmp_image.mat\n", - "tmp_image = 'tmp_image'\n", + "tmp_image = 'tmp_image.mat'\n", "data = nb.load(in_file).get_data()\n", - "savemat(tmp_image, {b'data': data}, do_compression=False)" + "savemat(tmp_image, {'data': data}, do_compression=False)" ] }, { @@ -1298,9 +1298,9 @@ "\n", " def _run_interface(self, runtime): \n", " # Save the image in matlab format as tmp_image.mat\n", - " tmp_image = 'tmp_image'\n", + " tmp_image = 'tmp_image.mat'\n", " data = nb.load(self.inputs.in_file).get_data()\n", - " savemat(tmp_image, {b'data': data}, do_compression=False)\n", + " savemat(tmp_image, {'data': data}, do_compression=False)\n", " \n", " # Load script\n", " with open(self.inputs.script_file) as script_file:\n", @@ -1374,7 +1374,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "solution2": "hidden", + "solution2": "shown", "solution2_first": true }, "outputs": [], @@ -1386,7 +1386,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "solution2": "hidden" + "solution2": "shown" }, "outputs": [], "source": [ @@ -1411,9 +1411,9 @@ "\n", " def _run_interface(self, runtime): \n", " # Save the image in matlab format as tmp_image.mat\n", - " tmp_image = 'tmp_image'\n", + " tmp_image = 'tmp_image.mat'\n", " data = nb.load(self.inputs.in_file).get_data()\n", - " savemat(tmp_image, {b'data': data}, do_compression=False)\n", + " savemat(tmp_image, {'data': data}, do_compression=False)\n", " \n", " # Load script\n", " with open(self.inputs.script_file) as script_file:\n", @@ -1495,11 +1495,18 @@ "source": [ "!cat volume.txt" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -1513,7 +1520,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.7.8" } }, "nbformat": 4, diff --git a/notebooks/basic_data_input.ipynb b/notebooks/basic_data_input.ipynb index e8636cf..2857f8c 100644 --- a/notebooks/basic_data_input.ipynb +++ b/notebooks/basic_data_input.ipynb @@ -635,7 +635,7 @@ "source": [ "## FreeSurferSource\n", "\n", - "`FreeSurferSource` is a specific case of a file grabber that felicitates the data import of outputs from the FreeSurfer recon-all algorithm. This, of course, requires that you've already run `recon-all` on your subject." + "`FreeSurferSource` is a specific case of a file grabber that facilitates the data import of outputs from the FreeSurfer recon-all algorithm. This, of course, requires that you've already run `recon-all` on your subject." ] }, { @@ -651,7 +651,7 @@ "metadata": {}, "outputs": [], "source": [ - "!datalad get -r -J 4 /data/ds000114/derivatives/freesurfer/sub-01" + "!datalad get -r -J 4 -d /data/ds000114 /data/ds000114/derivatives/freesurfer/sub-01" ] }, { diff --git a/notebooks/basic_data_input_bids.ipynb b/notebooks/basic_data_input_bids.ipynb index b6e4d67..e87d70a 100644 --- a/notebooks/basic_data_input_bids.ipynb +++ b/notebooks/basic_data_input_bids.ipynb @@ -2,10 +2,7 @@ "cells": [ { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "## Data input for BIDS datasets\n", "`DataGrabber` and `SelectFiles` are great if you are dealing with generic datasets with arbitrary organization. However, if you have decided to use Brain Imaging Data Structure (BIDS) to organize your data (or got your hands on a BIDS dataset) you can take advantage of a formal structure BIDS imposes. In this short tutorial, you will learn how to do this." @@ -13,10 +10,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "## `pybids` - a Python API for working with BIDS datasets\n", "`pybids` is a lightweight python API for querying BIDS folder structure for specific files and metadata. You can install it from PyPi:\n", @@ -28,10 +22,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "## The `layout` object and simple queries\n", "To begin working with pybids we need to initialize a layout object. We will need it to do all of our queries" @@ -40,11 +31,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "from bids.layout import BIDSLayout\n", @@ -54,11 +41,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "!tree -L 4 /data/ds000114/" @@ -66,10 +49,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "Let's figure out what are the subject labels in this dataset" ] @@ -77,11 +57,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "layout.get_subjects()" @@ -89,56 +65,39 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ - "What modalities are included in this dataset?" + "What datatypes are included in this dataset?" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "layout.get_modalities()" + "layout.get_datatypes()" ] }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ - "Which different data types are included in this dataset?" + "Which different data suffixes are included in this dataset?" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "layout.get_types(modality='func')" + "layout.get_suffixes(datatype='func')" ] }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "What are the different tasks included in this dataset?" ] @@ -146,11 +105,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "layout.get_tasks()" @@ -158,33 +113,23 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ - "We can also ask for all of the data for a particular subject and one modality." + "We can also ask for all of the data for a particular subject and one datatype." ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "layout.get(subject='01', modality=\"anat\", session=\"test\")" + "layout.get(subject='01', datatype=\"anat\", session=\"test\")" ] }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "We can also ask for a specific subset of data. Note that we are using extension filter to get just the imaging data (BIDS allows both .nii and .nii.gz so we need to include both)." ] @@ -192,22 +137,15 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "layout.get(subject='01', type='bold', extensions=['nii', 'nii.gz'])" + "layout.get(subject='01', suffix='bold', extension=['.nii', '.nii.gz'])" ] }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "You probably noticed that this method does not only return the file paths, but objects with relevant query fields. We can easily extract just the file paths." ] @@ -215,22 +153,15 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "layout.get(subject='01', type='bold', extensions=['nii', 'nii.gz'], return_type='file')" + "layout.get(subject='01', suffix='bold', extension=['.nii', '.nii.gz'], return_type='file')" ] }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "### Exercise 1:\n", "List all files for the \"linebisection\" task for subject 02." @@ -240,9 +171,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true, - "deletable": true, - "editable": true, "solution2": "shown", "solution2_first": true }, @@ -255,9 +183,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true, - "deletable": true, - "editable": true, "solution2": "shown" }, "outputs": [], @@ -270,10 +195,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "## `BIDSDataGrabber`: Including `pybids` in your `nipype` workflow\n", "This is great, but what we really want is to include this into our nipype workflows. To do this, we can import `BIDSDataGrabber`, which provides an `Interface` for `BIDSLayout.get`" @@ -282,11 +204,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "from nipype.interfaces.io import BIDSDataGrabber\n", @@ -299,10 +217,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "You can define static filters, that will apply to all queries, by modifying the appropriate input" ] @@ -310,11 +225,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "bg.inputs.subject = '01'\n", @@ -324,12 +235,9 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ - "Note that by default `BIDSDataGrabber` will fetch `nifti` files matching modality `func` and `anat`, and output them as two output fields. \n", + "Note that by default `BIDSDataGrabber` will fetch `nifti` files matching datatype `func` and `anat`, and output them as two output fields. \n", "\n", "To define custom fields, simply define the arguments to pass to `BIDSLayout.get` as dictionary, like so:" ] @@ -337,26 +245,19 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ - "bg.inputs.output_query = {'bolds': dict(type='bold')}\n", + "bg.inputs.output_query = {'bolds': dict(suffix='bold')}\n", "res = bg.run()\n", "res.outputs" ] }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ - "This results in a single output field `bold`, which returns all files with `type:bold` for `subject:\"01\"` \n", + "This results in a single output field `bold`, which returns all files with `suffix:bold` for `subject:\"01\"` \n", "\n", "Now, lets put it in a workflow. We are not going to analyze any data, but for demonstration purposes, we will add a couple of nodes that pretend to analyze their inputs" ] @@ -364,11 +265,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "def printMe(paths):\n", @@ -381,11 +278,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "wf = Workflow(name=\"bids_demo\")\n", @@ -395,10 +288,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "### Exercise 2:\n", "Modify the `BIDSDataGrabber` and the workflow to collect T1ws images for subject `10`." @@ -408,9 +298,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true, - "deletable": true, - "editable": true, "solution2": "shown", "solution2_first": true }, @@ -423,9 +310,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true, - "deletable": true, - "editable": true, "solution2": "shown" }, "outputs": [], @@ -436,7 +320,7 @@ "ex2_BIDSDataGrabber = BIDSDataGrabber()\n", "ex2_BIDSDataGrabber.inputs.base_dir = '/data/ds000114'\n", "ex2_BIDSDataGrabber.inputs.subject = '10'\n", - "ex2_BIDSDataGrabber.inputs.output_query = {'T1w': dict(modality='anat')}\n", + "ex2_BIDSDataGrabber.inputs.output_query = {'T1w': dict(datatype='anat')}\n", "\n", "ex2_res = ex2_BIDSDataGrabber.run()\n", "ex2_res.outputs" @@ -444,10 +328,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "## Iterating over subject labels\n", "In the previous example, we demonstrated how to use `pybids` to \"analyze\" one subject. How can we scale it for all subjects? Easy - using `iterables` (more in [Iteration/Iterables](basic_iteration.ipynb))." @@ -456,16 +337,12 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "bg_all = Node(BIDSDataGrabber(), name='bids-grabber')\n", "bg_all.inputs.base_dir = '/data/ds000114'\n", - "bg_all.inputs.output_query = {'bolds': dict(type='bold')}\n", + "bg_all.inputs.output_query = {'bolds': dict(suffix='bold')}\n", "bg_all.iterables = ('subject', layout.get_subjects()[:2])\n", "wf = Workflow(name=\"bids_demo\")\n", "wf.connect(bg_all, \"bolds\", analyzeBOLD, \"paths\")\n", @@ -474,10 +351,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "## Accessing additional metadata\n", "Querying different files is nice, but sometimes you want to access more metadata. For example `RepetitionTime`. `pybids` can help with that as well" @@ -486,11 +360,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "layout.get_metadata('/data/ds000114/sub-01/ses-test/func/sub-01_ses-test_task-fingerfootlips_bold.nii.gz')" @@ -498,10 +368,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "Can we incorporate this into our pipeline? Yes, we can! To do so, let's use a `Function` node to use `BIDSLayout` in a custom way.\n", "(More about MapNode in [MapNode](basic_mapnodes.ipynb))" @@ -510,11 +377,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "collapsed": true, - "deletable": true, - "editable": true - }, + "metadata": {}, "outputs": [], "source": [ "def printMetadata(path, data_dir):\n", @@ -531,9 +394,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true, - "deletable": true, - "editable": true, "scrolled": false }, "outputs": [], @@ -545,10 +405,7 @@ }, { "cell_type": "markdown", - "metadata": { - "deletable": true, - "editable": true - }, + "metadata": {}, "source": [ "### Exercise 3:\n", "Modify the `printMetadata` function to also print `EchoTime` " @@ -558,9 +415,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true, - "deletable": true, - "editable": true, "solution2": "shown", "solution2_first": true }, @@ -573,9 +427,6 @@ "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true, - "deletable": true, - "editable": true, "solution2": "shown" }, "outputs": [], @@ -586,16 +437,13 @@ "ex3_BIDSDataGrabber = Node(BIDSDataGrabber(), name='bids-grabber')\n", "ex3_BIDSDataGrabber.inputs.base_dir = '/data/ds000114'\n", "ex3_BIDSDataGrabber.inputs.subject = '01'\n", - "ex3_BIDSDataGrabber.inputs.output_query = {'bolds': dict(type='bold')}" + "ex3_BIDSDataGrabber.inputs.output_query = {'bolds': dict(suffix='bold')}" ] }, { "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true, - "deletable": true, - "editable": true, "solution2": "shown" }, "outputs": [], @@ -619,11 +467,18 @@ "ex3_wf.connect(ex3_BIDSDataGrabber, \"bolds\", ex3_analyzeBOLD2, \"path\")\n", "ex3_wf.run()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -637,7 +492,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.6" + "version": "3.6.7" } }, "nbformat": 4, diff --git a/notebooks/basic_function_interface.ipynb b/notebooks/basic_function_interface.ipynb index f4b4076..5dc2b89 100644 --- a/notebooks/basic_function_interface.ipynb +++ b/notebooks/basic_function_interface.ipynb @@ -247,8 +247,8 @@ "# Run node\n", "try:\n", " rndArray.run()\n", - "except(NameError) as err:\n", - " print(\"NameError:\", err)\n", + "except Exception as err:\n", + " print(err)\n", "else:\n", " raise" ] @@ -259,8 +259,12 @@ "source": [ "As you can see, if we don't import `random` inside the scope of the function, we receive the following error:\n", "\n", - " NameError: global name 'random' is not defined\n", - " Interface Function failed to run. " + " Exception raised while executing Node rndArray_node.\n", + "\n", + " Traceback (most recent call last):\n", + " [...]\n", + " File \"\", line 3, in get_random_array\n", + " NameError: name 'random' is not defined" ] } ], diff --git a/notebooks/basic_graph_visualization.ipynb b/notebooks/basic_graph_visualization.ipynb index 7b0c89a..6522844 100644 --- a/notebooks/basic_graph_visualization.ipynb +++ b/notebooks/basic_graph_visualization.ipynb @@ -41,7 +41,7 @@ "outputs": [], "source": [ "# Import the function to create an spm fmri preprocessing workflow\n", - "from nipype.workflows.fmri.spm import create_spm_preproc\n", + "from niflow.nipype1.workflows.fmri.spm import create_spm_preproc\n", "\n", "# Create the workflow object\n", "spmflow = create_spm_preproc()" @@ -274,7 +274,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -288,7 +288,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/basic_import_workflows.ipynb b/notebooks/basic_import_workflows.ipynb index 6b163a5..4151ffc 100644 --- a/notebooks/basic_import_workflows.ipynb +++ b/notebooks/basic_import_workflows.ipynb @@ -40,7 +40,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth\n", + "from niflow.nipype1.workflows.fmri.fsl.preprocess import create_susan_smooth\n", "smoothwf = create_susan_smooth()" ] }, @@ -323,7 +323,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -337,7 +337,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/basic_nodes.ipynb b/notebooks/basic_nodes.ipynb index b8e2627..20cdbfe 100644 --- a/notebooks/basic_nodes.ipynb +++ b/notebooks/basic_nodes.ipynb @@ -170,8 +170,10 @@ "metadata": {}, "outputs": [], "source": [ + "in_file = '/data/ds000114/sub-01/ses-test/anat/sub-01_ses-test_T1w.nii.gz'\n", + "\n", "# Specify node inputs\n", - "bet.inputs.in_file = '/data/ds000114/sub-01/ses-test/anat/sub-01_ses-test_T1w.nii.gz'\n", + "bet.inputs.in_file = in_file\n", "bet.inputs.out_file = '/output/node_T1w_bet.nii.gz'" ] }, @@ -200,7 +202,7 @@ "from nilearn.plotting import plot_anat\n", "%matplotlib inline\n", "import matplotlib.pyplot as plt\n", - "plot_anat(bet.inputs.in_file, title='BET input', cut_coords=(10,10,10),\n", + "plot_anat(in_file, title='BET input', cut_coords=(10,10,10),\n", " display_mode='ortho', dim=-1, draw_cross=False, annotate=False);\n", "plot_anat(res.outputs.out_file, title='BET output', cut_coords=(10,10,10),\n", " display_mode='ortho', dim=-1, draw_cross=False, annotate=False);" diff --git a/notebooks/basic_workflow.ipynb b/notebooks/basic_workflow.ipynb index 60f3b93..09bde6f 100644 --- a/notebooks/basic_workflow.ipynb +++ b/notebooks/basic_workflow.ipynb @@ -484,7 +484,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl import create_susan_smooth" + "from niflow.nipype1.workflows.fmri.fsl import create_susan_smooth" ] }, { @@ -946,7 +946,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -960,7 +960,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/example_normalize.ipynb b/notebooks/example_normalize.ipynb index 7b5a492..dcf85ca 100644 --- a/notebooks/example_normalize.ipynb +++ b/notebooks/example_normalize.ipynb @@ -27,7 +27,7 @@ "outputs": [], "source": [ "%%bash\n", - "datalad get -J 4 /data/ds000114/derivatives/fmriprep/sub-0[2345789]/anat/*h5" + "datalad get -J 4 -d /data/ds000114 /data/ds000114/derivatives/fmriprep/sub-0[2345789]/anat/*h5" ] }, { diff --git a/notebooks/example_preprocessing.ipynb b/notebooks/example_preprocessing.ipynb index 072aeb7..9be7a98 100644 --- a/notebooks/example_preprocessing.ipynb +++ b/notebooks/example_preprocessing.ipynb @@ -32,8 +32,9 @@ "outputs": [], "source": [ "%%bash\n", - "datalad get -J 4 /data/ds000114/derivatives/fmriprep/sub-*/anat/*preproc.nii.gz \\\n", - " /data/ds000114/sub-*/ses-test/func/*fingerfootlips*" + "datalad get -J 4 -d /data/ds000114 \\\n", + " /data/ds000114/derivatives/fmriprep/sub-*/anat/*preproc.nii.gz \\\n", + " /data/ds000114/sub-*/ses-test/func/*fingerfootlips*" ] }, { diff --git a/notebooks/handson_analysis.ipynb b/notebooks/handson_analysis.ipynb index 988fe88..dd97805 100644 --- a/notebooks/handson_analysis.ipynb +++ b/notebooks/handson_analysis.ipynb @@ -998,8 +998,11 @@ "outputs": [], "source": [ "plot_glass_brain('/output/datasink_handson/normalized/sub-07/wess_0008.nii',\n", + " output_file=\"/output/datasink_handson/normalized/sub-07/f-contr_activation.png\",\n", " colorbar=True, display_mode='lyrz', black_bg=True, threshold=25,\n", - " title='subject 7 - F-contrast: Activation');" + " title='subject 7 - F-contrast: Activation');\n", + "\n", + "Image(filename='/output/datasink_handson/normalized/sub-07/f-contr_activation.png')" ] }, { @@ -1009,8 +1012,11 @@ "outputs": [], "source": [ "plot_glass_brain('/output/datasink_handson/normalized/sub-07/wess_0009.nii',\n", + " output_file=\"/output/datasink_handson/normalized/sub-07/f-contr_differences.png\",\n", " colorbar=True, display_mode='lyrz', black_bg=True, threshold=25,\n", - " title='subject 7 - F-contrast: Differences');" + " title='subject 7 - F-contrast: Differences');\n", + "\n", + "Image(filename='/output/datasink_handson/normalized/sub-07/f-contr_differences.png')" ] }, { @@ -1648,7 +1654,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -1662,7 +1668,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.7.8" } }, "nbformat": 4, diff --git a/notebooks/handson_preprocessing.ipynb b/notebooks/handson_preprocessing.ipynb index c1a5915..444c171 100644 --- a/notebooks/handson_preprocessing.ipynb +++ b/notebooks/handson_preprocessing.ipynb @@ -29,8 +29,9 @@ "outputs": [], "source": [ "%%bash\n", - "datalad get -J 4 /data/ds000114/sub-0[234789]/ses-test/anat/sub-0[234789]_ses-test_T1w.nii.gz \\\n", - " /data/ds000114/sub-0[234789]/ses-test/func/*fingerfootlips*" + "datalad get -J 4 -d /data/ds000114 \\\n", + " /data/ds000114/sub-0[234789]/ses-test/anat/sub-0[234789]_ses-test_T1w.nii.gz \\\n", + " /data/ds000114/sub-0[234789]/ses-test/func/*fingerfootlips*" ] }, { @@ -764,7 +765,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth" + "from niflow.nipype1.workflows.fmri.fsl.preprocess import create_susan_smooth" ] }, { @@ -1752,7 +1753,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -1766,7 +1767,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/introduction_dataset.ipynb b/notebooks/introduction_dataset.ipynb index c951b97..d075c06 100644 --- a/notebooks/introduction_dataset.ipynb +++ b/notebooks/introduction_dataset.ipynb @@ -68,9 +68,9 @@ "source": [ "%%bash\n", "cd /data/ds000114\n", - "datalad get -J 4 /data/ds000114/derivatives/fmriprep/sub-*/anat/*preproc.nii.gz \\\n", - " /data/ds000114/sub-01/ses-test/anat \\\n", - " /data/ds000114/sub-*/ses-test/func/*fingerfootlips*" + "datalad get -J 4 derivatives/fmriprep/sub-*/anat/*preproc.nii.gz \\\n", + " sub-01/ses-test/anat \\\n", + " sub-*/ses-test/func/*fingerfootlips*" ] }, { @@ -121,7 +121,7 @@ "source": [ "%%bash\n", "cd /data/ds000114\n", - "datalad get /data/ds000114/sub-01/ses-test/func/sub-01_ses-test_task-linebisection_events.tsv" + "datalad get sub-01/ses-test/func/sub-01_ses-test_task-linebisection_events.tsv" ] }, { diff --git a/notebooks/introduction_docker.ipynb b/notebooks/introduction_docker.ipynb index b4c1343..8dac7af 100644 --- a/notebooks/introduction_docker.ipynb +++ b/notebooks/introduction_docker.ipynb @@ -68,7 +68,7 @@ "\n", " docker run -it --rm -p 8888:8888 miykael/nipype_tutorial jupyter notebook\n", " \n", - "However, if you want to use your version of notebooks, safe notebook outputs locally or use you local data, you can also mount your local directories, e.g.: \n", + "However, if you want to use your version of notebooks, save notebook outputs locally or use you local data, you can also mount your local directories, e.g.: \n", "\n", " docker run -it --rm -v /path/to/nipype_tutorial/:/home/neuro/nipype_tutorial -v /path/to/data/:/data -v /path/to/output/:/output -p 8888:8888 miykael/nipype_tutorial jupyter notebook\n", "\n", diff --git a/test_notebooks.py b/test_notebooks.py index 8a11c4f..2d2db6d 100644 --- a/test_notebooks.py +++ b/test_notebooks.py @@ -1,7 +1,6 @@ -import os import sys -import time from glob import glob +import pytest def test_version(): import nipype @@ -90,7 +89,6 @@ def reduce_notebook_load(path): print('Reducing: %s' % n) notebooks.append(reduce_notebook_load(n)) - for test in notebooks: - pytest_cmd = 'pytest --nbval-lax --nbval-cell-timeout 7200 -v -s %s' % test - print(pytest_cmd) - os.system(pytest_cmd) + # testing all tests from the notebooks list + pytest_exit_code = pytest.main(["--nbval-lax", "--nbval-cell-timeout", "7200", "-vs"] + notebooks) + sys.exit(pytest_exit_code)