diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index b08ad56..0000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,164 +0,0 @@ -version: 2 -jobs: - build: - machine: - image: circleci/classic:201808-01 - steps: - - restore_cache: - keys: - - build-v1-{{ .Branch }}-{{ epoch }} - - build-v1-{{ .Branch }}- - - build-v1-master- - - build-v1- - paths: - - /tmp/docker - - run: - name: Set-up a Docker registry - command: | - docker run -d -p 5000:5000 --restart=always --name=registry \ - -v /tmp/docker:/var/lib/registry registry:2 - - run: - name: Pull existing images - command: | - set +e - docker pull localhost:5000/neurodebian - success=$? - set -e - if [[ "$success" = "0" ]]; then - echo "Pulling from local registry" - docker tag localhost:5000/neurodebian neurodebian:stretch-non-free - docker pull localhost:5000/nipype_tutorial - docker tag localhost:5000/nipype_tutorial miykael/nipype_tutorial:latest - else - echo "Pulling from Docker Hub" - docker pull neurodebian:stretch-non-free - docker tag neurodebian:stretch-non-free localhost:5000/neurodebian - docker push localhost:5000/neurodebian - fi - - - checkout - - run: - name: Build Docker image & push to registry - no_output_timeout: 60m - command: | - docker build --rm --cache-from=miykael/nipype_tutorial:latest \ - -t miykael/nipype_tutorial:latest . - docker tag miykael/nipype_tutorial:latest localhost:5000/nipype_tutorial - docker push localhost:5000/nipype_tutorial - - run: - name: Docker registry garbage collection - command: | - docker exec -it registry /bin/registry garbage-collect --delete-untagged \ - /etc/docker/registry/config.yml - - save_cache: - key: build-v1-{{ .Branch }}-{{ epoch }} - paths: - - /tmp/docker - - test_1: - machine: - image: circleci/classic:201808-01 - steps: - - restore_cache: - keys: - - build-v1-{{ .Branch }}-{{ epoch }} - - build-v1-{{ .Branch }}- - - build-v1-master- - - build-v1- - paths: - - /tmp/docker - - run: - name: Set-up a Docker registry & pull - command: | - docker run -d -p 5000:5000 --restart=always --name=registry \ - -v /tmp/docker:/var/lib/registry registry:2 - docker pull localhost:5000/nipype_tutorial - docker tag localhost:5000/nipype_tutorial miykael/nipype_tutorial:latest - - run: - name: run tests 1 - no_output_timeout: 120m - command: | - docker run -it --rm miykael/nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 1 - test_2: - machine: - image: circleci/classic:201808-01 - steps: - - restore_cache: - keys: - - build-v1-{{ .Branch }}-{{ epoch }} - - build-v1-{{ .Branch }}- - - build-v1-master- - - build-v1- - paths: - - /tmp/docker - - run: - name: Set-up a Docker registry & pull - command: | - docker run -d -p 5000:5000 --restart=always --name=registry \ - -v /tmp/docker:/var/lib/registry registry:2 - docker pull localhost:5000/nipype_tutorial - docker tag localhost:5000/nipype_tutorial miykael/nipype_tutorial:latest - - run: - name: run tests 2 - no_output_timeout: 120m - command: | - docker run -it --rm miykael/nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 2 - test_3: - machine: - image: circleci/classic:201808-01 - steps: - - restore_cache: - keys: - - build-v1-{{ .Branch }}-{{ epoch }} - - build-v1-{{ .Branch }}- - - build-v1-master- - - build-v1- - paths: - - /tmp/docker - - run: - name: Set-up a Docker registry & pull - command: | - docker run -d -p 5000:5000 --restart=always --name=registry \ - -v /tmp/docker:/var/lib/registry registry:2 - docker pull localhost:5000/nipype_tutorial - docker tag localhost:5000/nipype_tutorial miykael/nipype_tutorial:latest - - run: - name: run tests 3 - no_output_timeout: 120m - command: | - docker run -it --rm miykael/nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 3 - -workflows: - version: 2 - build_and_test: - jobs: - - build - - test_1: - requires: - - build - - test_2: - requires: - - build - - test_3: - requires: - - build - - nightly: - triggers: - - schedule: - cron: "0 10 * * *" - filters: - branches: - only: - - master - jobs: - - build - - test_1: - requires: - - build - - test_2: - requires: - - build - - test_3: - requires: - - build diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml new file mode 100644 index 0000000..5ac3bc4 --- /dev/null +++ b/.github/workflows/testing.yml @@ -0,0 +1,76 @@ +name: Build & run notebooks + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + workflow_dispatch: + inputs: + nipype_branch: + description: 'Build specific Nipype branch' + required: true + default: 'master' + + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: generate the Dockerfile from generate.sh + run: | + BRANCH=${{ github.event.inputs.nipype_branch }} + BRANCH=${BRANCH:-"master"} + bash generate.sh $BRANCH + # In this step, this action saves a list of existing images, + # the cache is created without them in the post run. + # It also restores the cache if it exists. + - uses: satackey/action-docker-layer-caching@v0.0.11 + with: + key: tutorial-docker-cache-{hash} + restore-keys: | + tutorial-docker-cache- + layer-tutorial-docker-cache- + - name: build the image + run: docker build . --file Dockerfile -t nipype_tutorial:latest + + test_1: + needs: build + runs-on: ubuntu-latest + steps: + - uses: satackey/action-docker-layer-caching@v0.0.11 + with: + key: tutorial-docker-cache-{hash} + restore-keys: | + tutorial-docker-cache- + layer-tutorial-docker-cache- + - name: run test 1 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 1 + + test_2: + needs: build + runs-on: ubuntu-latest + steps: + - uses: satackey/action-docker-layer-caching@v0.0.11 + with: + key: tutorial-docker-cache-{hash} + restore-keys: | + tutorial-docker-cache- + layer-tutorial-docker-cache- + - name: run test 2 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 2 + + test_3: + needs: build + runs-on: ubuntu-latest + steps: + - uses: satackey/action-docker-layer-caching@v0.0.11 + with: + key: tutorial-docker-cache-{hash} + restore-keys: | + tutorial-docker-cache- + layer-tutorial-docker-cache- + - name: run test 3 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 3 diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index bd39263..0000000 --- a/Dockerfile +++ /dev/null @@ -1,340 +0,0 @@ -# Generated by Neurodocker version 0.5.0 -# Timestamp: 2019-07-14 08:54:07 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/kaczmarj/neurodocker - -FROM neurodebian:stretch-non-free - -ARG DEBIAN_FRONTEND="noninteractive" - -ENV LANG="en_US.UTF-8" \ - LC_ALL="en_US.UTF-8" \ - ND_ENTRYPOINT="/neurodocker/startup.sh" -RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - apt-utils \ - bzip2 \ - ca-certificates \ - curl \ - locales \ - unzip \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \ - && dpkg-reconfigure --frontend=noninteractive locales \ - && update-locale LANG="en_US.UTF-8" \ - && chmod 777 /opt && chmod a+s /opt \ - && mkdir -p /neurodocker \ - && if [ ! -f "$ND_ENTRYPOINT" ]; then \ - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" \ - && echo 'set -e' >> "$ND_ENTRYPOINT" \ - && echo 'export USER="${USER:=`whoami`}"' >> "$ND_ENTRYPOINT" \ - && echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; \ - fi \ - && chmod -R 777 /neurodocker && chmod a+s /neurodocker - -ENTRYPOINT ["/neurodocker/startup.sh"] - -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - convert3d \ - ants \ - fsl \ - gcc \ - g++ \ - graphviz \ - tree \ - git-annex-standalone \ - vim \ - emacs-nox \ - nano \ - less \ - ncdu \ - tig \ - git-annex-remote-rclone \ - octave \ - netbase \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -RUN sed -i '$isource /etc/fsl/fsl.sh' $ND_ENTRYPOINT - -ENV FORCE_SPMMCR="1" \ - LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2010a/v713/runtime/glnxa64:/opt/matlabmcr-2010a/v713/bin/glnxa64:/opt/matlabmcr-2010a/v713/sys/os/glnxa64:/opt/matlabmcr-2010a/v713/extern/bin/glnxa64" \ - MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" -RUN export TMPDIR="$(mktemp -d)" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - bc \ - libncurses5 \ - libxext6 \ - libxmu6 \ - libxpm-dev \ - libxt6 \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && echo "Downloading MATLAB Compiler Runtime ..." \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && apt-get install -f \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && curl -fsSL --retry 5 -o "$TMPDIR/MCRInstaller.bin" https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin \ - && chmod +x "$TMPDIR/MCRInstaller.bin" \ - && "$TMPDIR/MCRInstaller.bin" -silent -P installLocation="/opt/matlabmcr-2010a" \ - && rm -rf "$TMPDIR" \ - && unset TMPDIR \ - && echo "Downloading standalone SPM ..." \ - && curl -fsSL --retry 5 -o /tmp/spm12.zip http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip \ - && unzip -q /tmp/spm12.zip -d /tmp \ - && mkdir -p /opt/spm12-r7219 \ - && mv /tmp/spm12/* /opt/spm12-r7219/ \ - && chmod -R 777 /opt/spm12-r7219 \ - && rm -rf /tmp/* \ - && /opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 quit \ - && sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT - -RUN test "$(getent passwd neuro)" || useradd --no-user-group --create-home --shell /bin/bash neuro -USER neuro - -WORKDIR /home/neuro - -ENV CONDA_DIR="/opt/miniconda-latest" \ - PATH="/opt/miniconda-latest/bin:$PATH" -RUN export PATH="/opt/miniconda-latest/bin:$PATH" \ - && echo "Downloading Miniconda installer ..." \ - && conda_installer="/tmp/miniconda.sh" \ - && curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ - && bash "$conda_installer" -b -p /opt/miniconda-latest \ - && rm -f "$conda_installer" \ - && conda update -yq -nbase conda \ - && conda config --system --prepend channels conda-forge \ - && conda config --system --set auto_update_conda false \ - && conda config --system --set show_channel_urls true \ - && sync && conda clean --all && sync \ - && conda create -y -q --name neuro \ - && conda install -y -q --name neuro \ - 'python=3.6' \ - 'pytest' \ - 'jupyter' \ - 'jupyterlab' \ - 'jupyter_contrib_nbextensions' \ - 'traits' \ - 'pandas' \ - 'matplotlib' \ - 'scikit-learn' \ - 'scikit-image' \ - 'seaborn' \ - 'nbformat' \ - 'nb_conda' \ - && sync && conda clean --all && sync \ - && bash -c "source activate neuro \ - && pip install --no-cache-dir \ - https://github.com/nipy/nipype/tarball/master \ - https://github.com/INCF/pybids/tarball/0.7.1 \ - niflow-nipype1-workflows \ - nilearn \ - datalad[full] \ - nipy \ - duecredit \ - nbval" \ - && rm -rf ~/.cache/pip/* \ - && sync \ - && sed -i '$isource activate neuro' $ND_ENTRYPOINT - -ENV LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" - -RUN bash -c 'source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main' - -USER root - -RUN mkdir /data && chmod 777 /data && chmod a+s /data - -RUN mkdir /output && chmod 777 /output && chmod a+s /output - -USER neuro - -RUN printf "[user]\n\tname = miykael\n\temail = michaelnotter@hotmail.com\n" > ~/.gitconfig - -RUN bash -c 'source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*' - -RUN curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete - -COPY [".", "/home/neuro/nipype_tutorial"] - -USER root - -RUN chown -R neuro /home/neuro/nipype_tutorial - -RUN rm -rf /opt/conda/pkgs/* - -USER neuro - -RUN mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py - -WORKDIR /home/neuro/nipype_tutorial - -CMD ["jupyter-notebook"] - -RUN echo '{ \ - \n "pkg_manager": "apt", \ - \n "instructions": [ \ - \n [ \ - \n "base", \ - \n "neurodebian:stretch-non-free" \ - \n ], \ - \n [ \ - \n "install", \ - \n [ \ - \n "convert3d", \ - \n "ants", \ - \n "fsl", \ - \n "gcc", \ - \n "g++", \ - \n "graphviz", \ - \n "tree", \ - \n "git-annex-standalone", \ - \n "vim", \ - \n "emacs-nox", \ - \n "nano", \ - \n "less", \ - \n "ncdu", \ - \n "tig", \ - \n "git-annex-remote-rclone", \ - \n "octave", \ - \n "netbase" \ - \n ] \ - \n ], \ - \n [ \ - \n "add_to_entrypoint", \ - \n "source /etc/fsl/fsl.sh" \ - \n ], \ - \n [ \ - \n "spm12", \ - \n { \ - \n "version": "r7219" \ - \n } \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "workdir", \ - \n "/home/neuro" \ - \n ], \ - \n [ \ - \n "miniconda", \ - \n { \ - \n "miniconda_version": "4.3.31", \ - \n "conda_install": [ \ - \n "python=3.6", \ - \n "pytest", \ - \n "jupyter", \ - \n "jupyterlab", \ - \n "jupyter_contrib_nbextensions", \ - \n "traits", \ - \n "pandas", \ - \n "matplotlib", \ - \n "scikit-learn", \ - \n "scikit-image", \ - \n "seaborn", \ - \n "nbformat", \ - \n "nb_conda" \ - \n ], \ - \n "pip_install": [ \ - \n "https://github.com/nipy/nipype/tarball/master", \ - \n "https://github.com/INCF/pybids/tarball/0.7.1", \ - \n "nilearn", \ - \n "datalad[full]", \ - \n "nipy", \ - \n "duecredit", \ - \n "nbval" \ - \n ], \ - \n "create_env": "neuro", \ - \n "activate": true \ - \n } \ - \n ], \ - \n [ \ - \n "env", \ - \n { \ - \n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" \ - \n } \ - \n ], \ - \n [ \ - \n "run_bash", \ - \n "source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main" \ - \n ], \ - \n [ \ - \n "user", \ - \n "root" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir /data && chmod 777 /data && chmod a+s /data" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir /output && chmod 777 /output && chmod a+s /output" \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "run", \ - \n "printf \"[user]\\\n\\tname = miykael\\\n\\temail = michaelnotter@hotmail.com\\\n\" > ~/.gitconfig" \ - \n ], \ - \n [ \ - \n "run_bash", \ - \n "source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*" \ - \n ], \ - \n [ \ - \n "run", \ - \n "curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete" \ - \n ], \ - \n [ \ - \n "copy", \ - \n [ \ - \n ".", \ - \n "/home/neuro/nipype_tutorial" \ - \n ] \ - \n ], \ - \n [ \ - \n "user", \ - \n "root" \ - \n ], \ - \n [ \ - \n "run", \ - \n "chown -R neuro /home/neuro/nipype_tutorial" \ - \n ], \ - \n [ \ - \n "run", \ - \n "rm -rf /opt/conda/pkgs/*" \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" \ - \n ], \ - \n [ \ - \n "workdir", \ - \n "/home/neuro/nipype_tutorial" \ - \n ], \ - \n [ \ - \n "cmd", \ - \n [ \ - \n "jupyter-notebook" \ - \n ] \ - \n ] \ - \n ] \ - \n}' > /neurodocker/neurodocker_specs.json diff --git a/README.md b/README.md index 9a93c5e..4a35427 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ # Nipype Tutorial Notebooks - -[![CircleCi](https://circleci.com/gh/miykael/nipype_tutorial.svg?style=shield)](https://circleci.com/gh/miykael/nipype_tutorial/tree/master) +[![Github Action CI](https://github.com/miykael/nipype_tutorial/workflows/CI/badge.svg?branch=master)](https://github.com/miykael/nipype_tutorial/actions?query=workflow%3ACI) [![GitHub issues](https://img.shields.io/github/issues/miykael/nipype_tutorial.svg)](https://github.com/miykael/nipype_tutorial/issues/) [![GitHub pull-requests](https://img.shields.io/github/issues-pr/miykael/nipype_tutorial.svg)](https://github.com/miykael/nipype_tutorial/pulls/) [![GitHub contributors](https://img.shields.io/github/contributors/miykael/nipype_tutorial.svg)](https://GitHub.com/miykael/nipype_tutorial/graphs/contributors/) diff --git a/Singularity b/Singularity deleted file mode 100644 index bce0d3c..0000000 --- a/Singularity +++ /dev/null @@ -1,342 +0,0 @@ -# Generated by Neurodocker version 0.5.0 -# Timestamp: 2019-07-14 08:54:09 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/kaczmarj/neurodocker - -Bootstrap: docker -From: neurodebian:stretch-non-free - -%post -export ND_ENTRYPOINT="/neurodocker/startup.sh" -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - apt-utils \ - bzip2 \ - ca-certificates \ - curl \ - locales \ - unzip -apt-get clean -rm -rf /var/lib/apt/lists/* -sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen -dpkg-reconfigure --frontend=noninteractive locales -update-locale LANG="en_US.UTF-8" -chmod 777 /opt && chmod a+s /opt -mkdir -p /neurodocker -if [ ! -f "$ND_ENTRYPOINT" ]; then - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" - echo 'set -e' >> "$ND_ENTRYPOINT" - echo 'export USER="${USER:=`whoami`}"' >> "$ND_ENTRYPOINT" - echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; -fi -chmod -R 777 /neurodocker && chmod a+s /neurodocker - -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - convert3d \ - ants \ - fsl \ - gcc \ - g++ \ - graphviz \ - tree \ - git-annex-standalone \ - vim \ - emacs-nox \ - nano \ - less \ - ncdu \ - tig \ - git-annex-remote-rclone \ - octave \ - netbase -apt-get clean -rm -rf /var/lib/apt/lists/* - -sed -i '$isource /etc/fsl/fsl.sh' $ND_ENTRYPOINT - -export TMPDIR="$(mktemp -d)" -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - bc \ - libncurses5 \ - libxext6 \ - libxmu6 \ - libxpm-dev \ - libxt6 -apt-get clean -rm -rf /var/lib/apt/lists/* -echo "Downloading MATLAB Compiler Runtime ..." -curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb -dpkg -i /tmp/toinstall.deb -rm /tmp/toinstall.deb -apt-get install -f -apt-get clean -rm -rf /var/lib/apt/lists/* -curl -fsSL --retry 5 -o "$TMPDIR/MCRInstaller.bin" https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin -chmod +x "$TMPDIR/MCRInstaller.bin" -"$TMPDIR/MCRInstaller.bin" -silent -P installLocation="/opt/matlabmcr-2010a" -rm -rf "$TMPDIR" -unset TMPDIR -echo "Downloading standalone SPM ..." -curl -fsSL --retry 5 -o /tmp/spm12.zip http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip -unzip -q /tmp/spm12.zip -d /tmp -mkdir -p /opt/spm12-r7219 -mv /tmp/spm12/* /opt/spm12-r7219/ -chmod -R 777 /opt/spm12-r7219 -rm -rf /tmp/* -/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 quit -sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT - -test "$(getent passwd neuro)" || useradd --no-user-group --create-home --shell /bin/bash neuro -su - neuro - -cd /home/neuro - -export PATH="/opt/miniconda-latest/bin:$PATH" -echo "Downloading Miniconda installer ..." -conda_installer="/tmp/miniconda.sh" -curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -bash "$conda_installer" -b -p /opt/miniconda-latest -rm -f "$conda_installer" -conda update -yq -nbase conda -conda config --system --prepend channels conda-forge -conda config --system --set auto_update_conda false -conda config --system --set show_channel_urls true -sync && conda clean --all && sync -conda create -y -q --name neuro -conda install -y -q --name neuro \ - 'python=3.6' \ - 'pytest' \ - 'jupyter' \ - 'jupyterlab' \ - 'jupyter_contrib_nbextensions' \ - 'traits' \ - 'pandas' \ - 'matplotlib' \ - 'scikit-learn' \ - 'scikit-image' \ - 'seaborn' \ - 'nbformat' \ - 'nb_conda' -sync && conda clean --all && sync -bash -c "source activate neuro - pip install --no-cache-dir \ - https://github.com/nipy/nipype/tarball/master \ - https://github.com/INCF/pybids/tarball/0.7.1 \ - nilearn \ - datalad[full] \ - nipy \ - duecredit \ - nbval" -rm -rf ~/.cache/pip/* -sync -sed -i '$isource activate neuro' $ND_ENTRYPOINT - - -bash -c 'source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main' - -su - root - -mkdir /data && chmod 777 /data && chmod a+s /data - -mkdir /output && chmod 777 /output && chmod a+s /output - -su - neuro - -printf "[user]\n\tname = miykael\n\temail = michaelnotter@hotmail.com\n" > ~/.gitconfig - -bash -c 'source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*' - -curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete - -su - root - -chown -R neuro /home/neuro/nipype_tutorial - -rm -rf /opt/conda/pkgs/* - -su - neuro - -mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py - -cd /home/neuro/nipype_tutorial - -echo '{ -\n "pkg_manager": "apt", -\n "instructions": [ -\n [ -\n "base", -\n "neurodebian:stretch-non-free" -\n ], -\n [ -\n "_header", -\n { -\n "version": "generic", -\n "method": "custom" -\n } -\n ], -\n [ -\n "install", -\n [ -\n "convert3d", -\n "ants", -\n "fsl", -\n "gcc", -\n "g++", -\n "graphviz", -\n "tree", -\n "git-annex-standalone", -\n "vim", -\n "emacs-nox", -\n "nano", -\n "less", -\n "ncdu", -\n "tig", -\n "git-annex-remote-rclone", -\n "octave", -\n "netbase" -\n ] -\n ], -\n [ -\n "add_to_entrypoint", -\n "source /etc/fsl/fsl.sh" -\n ], -\n [ -\n "spm12", -\n { -\n "version": "r7219" -\n } -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "workdir", -\n "/home/neuro" -\n ], -\n [ -\n "miniconda", -\n { -\n "miniconda_version": "4.3.31", -\n "conda_install": [ -\n "python=3.6", -\n "pytest", -\n "jupyter", -\n "jupyterlab", -\n "jupyter_contrib_nbextensions", -\n "traits", -\n "pandas", -\n "matplotlib", -\n "scikit-learn", -\n "scikit-image", -\n "seaborn", -\n "nbformat", -\n "nb_conda" -\n ], -\n "pip_install": [ -\n "https://github.com/nipy/nipype/tarball/master", -\n "https://github.com/INCF/pybids/tarball/0.7.1", -\n "nilearn", -\n "datalad[full]", -\n "nipy", -\n "duecredit", -\n "nbval" -\n ], -\n "create_env": "neuro", -\n "activate": true -\n } -\n ], -\n [ -\n "env", -\n { -\n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" -\n } -\n ], -\n [ -\n "run_bash", -\n "source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main" -\n ], -\n [ -\n "user", -\n "root" -\n ], -\n [ -\n "run", -\n "mkdir /data && chmod 777 /data && chmod a+s /data" -\n ], -\n [ -\n "run", -\n "mkdir /output && chmod 777 /output && chmod a+s /output" -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "run", -\n "printf \"[user]\\\n\\tname = miykael\\\n\\temail = michaelnotter@hotmail.com\\\n\" > ~/.gitconfig" -\n ], -\n [ -\n "run_bash", -\n "source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*" -\n ], -\n [ -\n "run", -\n "curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete" -\n ], -\n [ -\n "copy", -\n [ -\n ".", -\n "/home/neuro/nipype_tutorial" -\n ] -\n ], -\n [ -\n "user", -\n "root" -\n ], -\n [ -\n "run", -\n "chown -R neuro /home/neuro/nipype_tutorial" -\n ], -\n [ -\n "run", -\n "rm -rf /opt/conda/pkgs/*" -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "run", -\n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" -\n ], -\n [ -\n "workdir", -\n "/home/neuro/nipype_tutorial" -\n ] -\n ] -\n}' > /neurodocker/neurodocker_specs.json - -%environment -export LANG="en_US.UTF-8" -export LC_ALL="en_US.UTF-8" -export ND_ENTRYPOINT="/neurodocker/startup.sh" -export FORCE_SPMMCR="1" -export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2010a/v713/runtime/glnxa64:/opt/matlabmcr-2010a/v713/bin/glnxa64:/opt/matlabmcr-2010a/v713/sys/os/glnxa64:/opt/matlabmcr-2010a/v713/extern/bin/glnxa64" -export MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" -export CONDA_DIR="/opt/miniconda-latest" -export PATH="/opt/miniconda-latest/bin:$PATH" -export LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" - -%files -. /home/neuro/nipype_tutorial - -%runscript -/neurodocker/startup.sh "$@" diff --git a/docs/notebooks/basic_data_input.html b/docs/notebooks/basic_data_input.html index 4bc10f8..4d1d3d9 100644 --- a/docs/notebooks/basic_data_input.html +++ b/docs/notebooks/basic_data_input.html @@ -12816,7 +12816,7 @@

Exercise 2
-

FreeSurferSource

FreeSurferSource is a specific case of a file grabber that felicitates the data import of outputs from the FreeSurfer recon-all algorithm. This, of course, requires that you've already run recon-all on your subject.

+

FreeSurferSource

FreeSurferSource is a specific case of a file grabber that facilitates the data import of outputs from the FreeSurfer recon-all algorithm. This, of course, requires that you've already run recon-all on your subject.

diff --git a/generate.sh b/generate.sh index a8a3e94..81ec0f1 100644 --- a/generate.sh +++ b/generate.sh @@ -2,6 +2,16 @@ set -e +NIPYPE_BRANCH=${1:-"master"} +case $NIPYPE_BRANCH in + master) + NIPYPE_URL="https://github.com/nipy/nipype/tarball/master" + ;; + *) + NIPYPE_URL="git+https://github.com/nipy/nipype.git@${NIPYPE_BRANCH}" + ;; +esac + # Generate Dockerfile generate_docker() { docker run --rm kaczmarj/neurodocker:master generate docker \ @@ -14,12 +24,12 @@ generate_docker() { --spm12 version=r7219 \ --user=neuro \ --workdir /home/neuro \ - --miniconda miniconda_version="4.3.31" \ - conda_install="python=3.6 pytest jupyter jupyterlab jupyter_contrib_nbextensions + --miniconda \ + conda_install="python=3.8 pytest jupyter jupyterlab jupyter_contrib_nbextensions traits pandas matplotlib scikit-learn scikit-image seaborn nbformat nb_conda" \ - pip_install="https://github.com/nipy/nipype/tarball/master - https://github.com/INCF/pybids/tarball/0.7.1 - nilearn datalad[full] nipy duecredit nbval" \ + pip_install="$NIPYPE_URL + pybids==0.13.1 + nilearn datalad[full] nipy duecredit nbval niflow-nipype1-workflows" \ create_env="neuro" \ activate=True \ --env LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:$LD_LIBRARY_PATH" \ @@ -53,12 +63,12 @@ generate_singularity() { --spm12 version=r7219 \ --user=neuro \ --workdir /home/neuro \ - --miniconda miniconda_version="4.3.31" \ - conda_install="python=3.6 pytest jupyter jupyterlab jupyter_contrib_nbextensions + --miniconda \ + conda_install="python=3.8 pytest jupyter jupyterlab jupyter_contrib_nbextensions traits pandas matplotlib scikit-learn scikit-image seaborn nbformat nb_conda" \ - pip_install="https://github.com/nipy/nipype/tarball/master - https://github.com/INCF/pybids/tarball/0.7.1 - nilearn datalad[full] nipy duecredit nbval" \ + pip_install="$NIPYPE_URL + pybids==0.13.1 + nilearn datalad[full] nipy duecredit nbval niflow-nipype1-workflows" \ create_env="neuro" \ activate=True \ --env LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:$LD_LIBRARY_PATH" \ diff --git a/notebooks/advanced_create_interfaces.ipynb b/notebooks/advanced_create_interfaces.ipynb index ce1052e..33c47db 100644 --- a/notebooks/advanced_create_interfaces.ipynb +++ b/notebooks/advanced_create_interfaces.ipynb @@ -1214,9 +1214,9 @@ "from scipy.io import savemat\n", "\n", "# 1. save the image in matlab format as tmp_image.mat\n", - "tmp_image = 'tmp_image'\n", + "tmp_image = 'tmp_image.mat'\n", "data = nb.load(in_file).get_data()\n", - "savemat(tmp_image, {b'data': data}, do_compression=False)" + "savemat(tmp_image, {'data': data}, do_compression=False)" ] }, { @@ -1298,9 +1298,9 @@ "\n", " def _run_interface(self, runtime): \n", " # Save the image in matlab format as tmp_image.mat\n", - " tmp_image = 'tmp_image'\n", + " tmp_image = 'tmp_image.mat'\n", " data = nb.load(self.inputs.in_file).get_data()\n", - " savemat(tmp_image, {b'data': data}, do_compression=False)\n", + " savemat(tmp_image, {'data': data}, do_compression=False)\n", " \n", " # Load script\n", " with open(self.inputs.script_file) as script_file:\n", @@ -1374,7 +1374,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "solution2": "hidden", + "solution2": "shown", "solution2_first": true }, "outputs": [], @@ -1386,7 +1386,7 @@ "cell_type": "code", "execution_count": null, "metadata": { - "solution2": "hidden" + "solution2": "shown" }, "outputs": [], "source": [ @@ -1411,9 +1411,9 @@ "\n", " def _run_interface(self, runtime): \n", " # Save the image in matlab format as tmp_image.mat\n", - " tmp_image = 'tmp_image'\n", + " tmp_image = 'tmp_image.mat'\n", " data = nb.load(self.inputs.in_file).get_data()\n", - " savemat(tmp_image, {b'data': data}, do_compression=False)\n", + " savemat(tmp_image, {'data': data}, do_compression=False)\n", " \n", " # Load script\n", " with open(self.inputs.script_file) as script_file:\n", @@ -1495,11 +1495,18 @@ "source": [ "!cat volume.txt" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -1513,7 +1520,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.7.8" } }, "nbformat": 4, diff --git a/notebooks/basic_data_input.ipynb b/notebooks/basic_data_input.ipynb index a9bd700..2857f8c 100644 --- a/notebooks/basic_data_input.ipynb +++ b/notebooks/basic_data_input.ipynb @@ -635,7 +635,7 @@ "source": [ "## FreeSurferSource\n", "\n", - "`FreeSurferSource` is a specific case of a file grabber that felicitates the data import of outputs from the FreeSurfer recon-all algorithm. This, of course, requires that you've already run `recon-all` on your subject." + "`FreeSurferSource` is a specific case of a file grabber that facilitates the data import of outputs from the FreeSurfer recon-all algorithm. This, of course, requires that you've already run `recon-all` on your subject." ] }, { diff --git a/notebooks/basic_data_input_bids.ipynb b/notebooks/basic_data_input_bids.ipynb index 3275f9d..e87d70a 100644 --- a/notebooks/basic_data_input_bids.ipynb +++ b/notebooks/basic_data_input_bids.ipynb @@ -140,7 +140,7 @@ "metadata": {}, "outputs": [], "source": [ - "layout.get(subject='01', suffix='bold', extensions=['nii', 'nii.gz'])" + "layout.get(subject='01', suffix='bold', extension=['.nii', '.nii.gz'])" ] }, { @@ -156,7 +156,7 @@ "metadata": {}, "outputs": [], "source": [ - "layout.get(subject='01', suffix='bold', extensions=['nii', 'nii.gz'], return_type='file')" + "layout.get(subject='01', suffix='bold', extension=['.nii', '.nii.gz'], return_type='file')" ] }, { diff --git a/notebooks/basic_function_interface.ipynb b/notebooks/basic_function_interface.ipynb index f4b4076..5dc2b89 100644 --- a/notebooks/basic_function_interface.ipynb +++ b/notebooks/basic_function_interface.ipynb @@ -247,8 +247,8 @@ "# Run node\n", "try:\n", " rndArray.run()\n", - "except(NameError) as err:\n", - " print(\"NameError:\", err)\n", + "except Exception as err:\n", + " print(err)\n", "else:\n", " raise" ] @@ -259,8 +259,12 @@ "source": [ "As you can see, if we don't import `random` inside the scope of the function, we receive the following error:\n", "\n", - " NameError: global name 'random' is not defined\n", - " Interface Function failed to run. " + " Exception raised while executing Node rndArray_node.\n", + "\n", + " Traceback (most recent call last):\n", + " [...]\n", + " File \"\", line 3, in get_random_array\n", + " NameError: name 'random' is not defined" ] } ], diff --git a/notebooks/basic_graph_visualization.ipynb b/notebooks/basic_graph_visualization.ipynb index 7b0c89a..6522844 100644 --- a/notebooks/basic_graph_visualization.ipynb +++ b/notebooks/basic_graph_visualization.ipynb @@ -41,7 +41,7 @@ "outputs": [], "source": [ "# Import the function to create an spm fmri preprocessing workflow\n", - "from nipype.workflows.fmri.spm import create_spm_preproc\n", + "from niflow.nipype1.workflows.fmri.spm import create_spm_preproc\n", "\n", "# Create the workflow object\n", "spmflow = create_spm_preproc()" @@ -274,7 +274,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -288,7 +288,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/basic_import_workflows.ipynb b/notebooks/basic_import_workflows.ipynb index 6b163a5..4151ffc 100644 --- a/notebooks/basic_import_workflows.ipynb +++ b/notebooks/basic_import_workflows.ipynb @@ -40,7 +40,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth\n", + "from niflow.nipype1.workflows.fmri.fsl.preprocess import create_susan_smooth\n", "smoothwf = create_susan_smooth()" ] }, @@ -323,7 +323,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -337,7 +337,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/basic_nodes.ipynb b/notebooks/basic_nodes.ipynb index b8e2627..20cdbfe 100644 --- a/notebooks/basic_nodes.ipynb +++ b/notebooks/basic_nodes.ipynb @@ -170,8 +170,10 @@ "metadata": {}, "outputs": [], "source": [ + "in_file = '/data/ds000114/sub-01/ses-test/anat/sub-01_ses-test_T1w.nii.gz'\n", + "\n", "# Specify node inputs\n", - "bet.inputs.in_file = '/data/ds000114/sub-01/ses-test/anat/sub-01_ses-test_T1w.nii.gz'\n", + "bet.inputs.in_file = in_file\n", "bet.inputs.out_file = '/output/node_T1w_bet.nii.gz'" ] }, @@ -200,7 +202,7 @@ "from nilearn.plotting import plot_anat\n", "%matplotlib inline\n", "import matplotlib.pyplot as plt\n", - "plot_anat(bet.inputs.in_file, title='BET input', cut_coords=(10,10,10),\n", + "plot_anat(in_file, title='BET input', cut_coords=(10,10,10),\n", " display_mode='ortho', dim=-1, draw_cross=False, annotate=False);\n", "plot_anat(res.outputs.out_file, title='BET output', cut_coords=(10,10,10),\n", " display_mode='ortho', dim=-1, draw_cross=False, annotate=False);" diff --git a/notebooks/basic_workflow.ipynb b/notebooks/basic_workflow.ipynb index 60f3b93..09bde6f 100644 --- a/notebooks/basic_workflow.ipynb +++ b/notebooks/basic_workflow.ipynb @@ -484,7 +484,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl import create_susan_smooth" + "from niflow.nipype1.workflows.fmri.fsl import create_susan_smooth" ] }, { @@ -946,7 +946,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -960,7 +960,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/handson_analysis.ipynb b/notebooks/handson_analysis.ipynb index 988fe88..dd97805 100644 --- a/notebooks/handson_analysis.ipynb +++ b/notebooks/handson_analysis.ipynb @@ -998,8 +998,11 @@ "outputs": [], "source": [ "plot_glass_brain('/output/datasink_handson/normalized/sub-07/wess_0008.nii',\n", + " output_file=\"/output/datasink_handson/normalized/sub-07/f-contr_activation.png\",\n", " colorbar=True, display_mode='lyrz', black_bg=True, threshold=25,\n", - " title='subject 7 - F-contrast: Activation');" + " title='subject 7 - F-contrast: Activation');\n", + "\n", + "Image(filename='/output/datasink_handson/normalized/sub-07/f-contr_activation.png')" ] }, { @@ -1009,8 +1012,11 @@ "outputs": [], "source": [ "plot_glass_brain('/output/datasink_handson/normalized/sub-07/wess_0009.nii',\n", + " output_file=\"/output/datasink_handson/normalized/sub-07/f-contr_differences.png\",\n", " colorbar=True, display_mode='lyrz', black_bg=True, threshold=25,\n", - " title='subject 7 - F-contrast: Differences');" + " title='subject 7 - F-contrast: Differences');\n", + "\n", + "Image(filename='/output/datasink_handson/normalized/sub-07/f-contr_differences.png')" ] }, { @@ -1648,7 +1654,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -1662,7 +1668,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.7.8" } }, "nbformat": 4, diff --git a/notebooks/handson_preprocessing.ipynb b/notebooks/handson_preprocessing.ipynb index 5f37b8e..444c171 100644 --- a/notebooks/handson_preprocessing.ipynb +++ b/notebooks/handson_preprocessing.ipynb @@ -765,7 +765,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth" + "from niflow.nipype1.workflows.fmri.fsl.preprocess import create_susan_smooth" ] }, { @@ -1753,7 +1753,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -1767,7 +1767,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/introduction_docker.ipynb b/notebooks/introduction_docker.ipynb index b4c1343..8dac7af 100644 --- a/notebooks/introduction_docker.ipynb +++ b/notebooks/introduction_docker.ipynb @@ -68,7 +68,7 @@ "\n", " docker run -it --rm -p 8888:8888 miykael/nipype_tutorial jupyter notebook\n", " \n", - "However, if you want to use your version of notebooks, safe notebook outputs locally or use you local data, you can also mount your local directories, e.g.: \n", + "However, if you want to use your version of notebooks, save notebook outputs locally or use you local data, you can also mount your local directories, e.g.: \n", "\n", " docker run -it --rm -v /path/to/nipype_tutorial/:/home/neuro/nipype_tutorial -v /path/to/data/:/data -v /path/to/output/:/output -p 8888:8888 miykael/nipype_tutorial jupyter notebook\n", "\n", diff --git a/test_notebooks.py b/test_notebooks.py index 8a11c4f..2d2db6d 100644 --- a/test_notebooks.py +++ b/test_notebooks.py @@ -1,7 +1,6 @@ -import os import sys -import time from glob import glob +import pytest def test_version(): import nipype @@ -90,7 +89,6 @@ def reduce_notebook_load(path): print('Reducing: %s' % n) notebooks.append(reduce_notebook_load(n)) - for test in notebooks: - pytest_cmd = 'pytest --nbval-lax --nbval-cell-timeout 7200 -v -s %s' % test - print(pytest_cmd) - os.system(pytest_cmd) + # testing all tests from the notebooks list + pytest_exit_code = pytest.main(["--nbval-lax", "--nbval-cell-timeout", "7200", "-vs"] + notebooks) + sys.exit(pytest_exit_code)