diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000..1492f60 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,827 @@ +version: 2.1 +orbs: + codecov: codecov/codecov@3.2.4 + +.dockersetup: + &dockersetup + docker: + - image: pennlinc/xcp_d_build:0.0.19 + working_directory: /src/smripost_linc + +runinstall: + &runinstall + name: Install smripost_linc + command: | + VERSION=0+build + if [[ -n "$CIRCLE_TAG" ]]; then + VERSION="$CIRCLE_TAG" + fi + git checkout $CIRCLE_BRANCH + echo "${VERSION}" > /src/smripost_linc/smripost_linc/VERSION + echo "include smripost_linc/VERSION" >> /src/smripost_linc/MANIFEST.in + pip install .[tests] --progress-bar off + + # Write the config file + mkdir ~/.nipype + CFG=~/.nipype/nipype.cfg + printf "[execution]\nstop_on_first_crash = true\n" > ${CFG} + echo "poll_sleep_duration = 0.01" >> ${CFG} + echo "hash_method = content" >> ${CFG} + +jobs: + build: + <<: *dockersetup + steps: + - checkout + - run: *runinstall + + download_data_ds001419: + <<: *dockersetup + steps: + - checkout + - restore_cache: + key: ds001419-10 + - run: *runinstall + - run: + name: Download ds001419 fMRIPrep test data + command: | + cd /src/smripost_linc/.circleci + python get_data.py $PWD/data ds001419 + - save_cache: + key: ds001419-10 + paths: + - /src/smripost_linc/.circleci/data/ds001419 + # The resource_class feature allows configuring CPU and RAM resources for each job. Different resource classes are available for different executors. https://circleci.com/docs/2.0/configuration-reference/#resourceclass + # Why do we need a big executor for this job? + resource_class: large + + download_data_ds001419_aroma: + <<: *dockersetup + steps: + - checkout + - restore_cache: + key: ds001419-aroma-02 + - run: *runinstall + - run: + name: Download ds001419 fMRIPost-AROMA test data + command: | + cd /src/smripost_linc/.circleci + python get_data.py $PWD/data ds001419-aroma + - save_cache: + key: ds001419-aroma-02 + paths: + - /src/smripost_linc/.circleci/data/ds001419-aroma + + download_data_schaefer100: + <<: *dockersetup + steps: + - checkout + - restore_cache: + key: schaefer100-02 + - run: *runinstall + - run: + name: Download BIDS-Atlas dataset + command: | + cd /src/smripost_linc/.circleci + python get_data.py $PWD/data schaefer100 + - save_cache: + key: schaefer100-02 + paths: + - /src/smripost_linc/.circleci/data/schaefer100 + + download_data_pnc: + <<: *dockersetup + steps: + - checkout + - restore_cache: + key: pnc-02 + - run: *runinstall + - run: + name: Download pnc test data + command: | + cd /src/smripost_linc/.circleci + python get_data.py $PWD/data pnc + - save_cache: + key: pnc-02 + paths: + - /src/smripost_linc/.circleci/data/pnc + # The resource_class feature allows configuring CPU and RAM resources for each job. Different resource classes are available for different executors. https://circleci.com/docs/2.0/configuration-reference/#resourceclass + resource_class: large + + download_data_ukbiobank: + <<: *dockersetup + steps: + - checkout + - restore_cache: + key: ukbiobank-08 + - run: *runinstall + - run: + name: Download ukbiobank test data + command: | + cd /src/smripost_linc/.circleci + python get_data.py $PWD/data ukbiobank + - save_cache: + key: ukbiobank-08 + paths: + - /src/smripost_linc/.circleci/data/ukbiobank + # The resource_class feature allows configuring CPU and RAM resources for each job. Different resource classes are available for different executors. https://circleci.com/docs/2.0/configuration-reference/#resourceclass + resource_class: large + + download_data_fmriprepwithoutfreesurfer: + <<: *dockersetup + steps: + - checkout + - restore_cache: + key: fmriprepwithoutfreesurfer-03 + - run: *runinstall + - run: + name: Download fmriprepwithoutfreesurfer test data + command: | + cd /src/smripost_linc/.circleci + python get_data.py $PWD/data fmriprepwithoutfreesurfer + - save_cache: + key: fmriprepwithoutfreesurfer-03 + paths: + - /src/smripost_linc/.circleci/data/fmriprepwithoutfreesurfer + + download_data_nibabies: + <<: *dockersetup + steps: + - checkout + - restore_cache: + key: nibabies-04 + - run: *runinstall + - run: + name: Download nibabies test data + command: | + cd /src/smripost_linc/.circleci + python get_data.py $PWD/data nibabies + - save_cache: + key: nibabies-04 + paths: + - /src/smripost_linc/.circleci/data/nibabies + + nifti_without_freesurfer: + <<: *dockersetup + resource_class: large + environment: + CIRCLE_CPUS: 4 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?integration\]' )" != "" ]]; then + echo "Skipping nifti_without_freesurfer build" + circleci step halt + fi + - restore_cache: + key: fmriprepwithoutfreesurfer-03 + - restore_cache: + key: schaefer100-02 + - run: *runinstall + - run: + name: Run full smripost_linc on nifti without freesurfer + no_output_timeout: 1h + command: | + pytest -rP -o log_cli=true -m "fmriprep_without_freesurfer" \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.fmriprep_without_freesurfer + # remove nifti files before uploading artifacts + find /src/smripost_linc/.circleci/out/ -name "*.nii.gz" -type f -delete + - persist_to_workspace: + root: /src/coverage/ + paths: + - .coverage.fmriprep_without_freesurfer + - store_artifacts: + path: /src/smripost_linc/.circleci/out/test_fmriprep_without_freesurfer/ + + nifti_without_freesurfer_with_main: + <<: *dockersetup + resource_class: medium + environment: + CIRCLE_CPUS: 2 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?integration\]' )" != "" ]]; then + echo "Skipping nifti_without_freesurfer build" + circleci step halt + fi + - restore_cache: + key: fmriprepwithoutfreesurfer-03 + - run: *runinstall + - run: + name: Run full smripost_linc on nifti without freesurfer + no_output_timeout: 1h + command: | + pytest -rP -o log_cli=true -m "fmriprep_without_freesurfer_with_main" \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.fmriprep_without_freesurfer_with_main + # remove nifti files before uploading artifacts + find /src/smripost_linc/.circleci/out/ -name "*.nii.gz" -type f -delete + - persist_to_workspace: + root: /src/coverage/ + paths: + - .coverage.fmriprep_without_freesurfer_with_main + - store_artifacts: + path: /src/smripost_linc/.circleci/out/test_fmriprep_without_freesurfer/ + + ds001419_nifti: + <<: *dockersetup + resource_class: large + environment: + CIRCLE_CPUS: 4 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?integration\]' )" != "" ]]; then + echo "Skipping ds001419_nifti build" + circleci step halt + fi + - restore_cache: + key: ds001419-10 + - restore_cache: + key: ds001419-aroma-02 + - run: *runinstall + - run: + name: Run full smripost_linc on nifti with freesurfer + no_output_timeout: 1h + command: | + pytest -rP -o log_cli=true -m "ds001419_nifti" \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.ds001419_nifti + # remove nifti files before uploading artifacts + find /src/smripost_linc/.circleci/out/ -name "*.nii.gz" -type f -delete + - persist_to_workspace: + root: /src/coverage/ + paths: + - .coverage.ds001419_nifti + - store_artifacts: + path: /src/smripost_linc/.circleci/out/test_ds001419_nifti/ + + ds001419_cifti: + <<: *dockersetup + resource_class: large + environment: + CIRCLE_CPUS: 4 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?integration\]' )" != "" ]]; then + echo "Skipping ds001419_cifti build" + circleci step halt + fi + - restore_cache: + key: ds001419-10 + - run: *runinstall + - run: + name: Run full smripost_linc on cifti with freesurfer + no_output_timeout: 1h + command: | + pytest -rP -o log_cli=true -m "ds001419_cifti" \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.ds001419_cifti + # remove nifti files before uploading artifacts + find /src/smripost_linc/.circleci/out/ -name "*.nii.gz" -type f -delete + - persist_to_workspace: + root: /src/coverage/ + paths: + - .coverage.ds001419_cifti + - store_artifacts: + path: /src/smripost_linc/.circleci/out/test_ds001419_cifti/ + + ukbiobank: + <<: *dockersetup + resource_class: medium + environment: + CIRCLE_CPUS: 2 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?integration\]' )" != "" ]]; then + echo "Skipping ukbiobank build" + circleci step halt + fi + - restore_cache: + key: ukbiobank-08 + - run: *runinstall + - run: + name: Run full smripost_linc on UK Biobank data + no_output_timeout: 1h + command: | + pytest -rP -o log_cli=true -m "ukbiobank" \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.ukbiobank + # remove nifti files before uploading artifacts + find /src/smripost_linc/.circleci/out/ -name "*.nii.gz" -type f -delete + - persist_to_workspace: + root: /src/coverage/ + paths: + - .coverage.ukbiobank + - store_artifacts: + path: /src/smripost_linc/.circleci/out/test_ukbiobank/ + + nibabies: + <<: *dockersetup + resource_class: medium + environment: + CIRCLE_CPUS: 2 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?integration\]' )" != "" ]]; then + echo "Skipping nibabies build" + circleci step halt + fi + - restore_cache: + key: nibabies-04 + - run: *runinstall + - run: + name: Run full smripost_linc on nibabies + no_output_timeout: 1h + command: | + pytest -rP -o log_cli=true -m "nibabies" \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.nibabies + # remove nifti files before uploading artifacts + find /src/smripost_linc/.circleci/out/ -name "*.nii.gz" -type f -delete + - persist_to_workspace: + root: /src/coverage/ + paths: + - .coverage.nibabies + - store_artifacts: + path: /src/smripost_linc/.circleci/out/test_nibabies/ + + pnc_cifti: + <<: *dockersetup + resource_class: medium + environment: + CIRCLE_CPUS: 2 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?integration\]' )" != "" ]]; then + echo "Skipping pnc_cifti build" + circleci step halt + fi + - restore_cache: + key: pnc-02 + - run: *runinstall + - run: + name: Run full smripost_linc on cifti with freesurfer + no_output_timeout: 5h + command: | + pytest -rP -o log_cli=true -m "pnc_cifti" \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.pnc_cifti + # remove nifti files before uploading artifacts + find /src/smripost_linc/.circleci/out/ -name "*.nii.gz" -type f -delete + - persist_to_workspace: + root: /src/coverage + paths: + - .coverage.pnc_cifti + - store_artifacts: + path: /src/smripost_linc/.circleci/out/test_pnc_cifti/ + + pnc_cifti_t2wonly: + <<: *dockersetup + resource_class: medium + environment: + CIRCLE_CPUS: 2 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?integration\]' )" != "" ]]; then + echo "Skipping pnc_cifti_t2wonly build" + circleci step halt + fi + - restore_cache: + key: pnc-02 + - run: *runinstall + - run: + name: Run full smripost_linc on cifti with freesurfer + no_output_timeout: 5h + command: | + pytest -rP -o log_cli=true -m "pnc_cifti_t2wonly" \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.pnc_cifti_t2wonly + # remove nifti files before uploading artifacts + find /src/smripost_linc/.circleci/out/ -name "*.nii.gz" -type f -delete + - persist_to_workspace: + root: /src/coverage + paths: + - .coverage.pnc_cifti_t2wonly + - store_artifacts: + path: /src/smripost_linc/.circleci/out/test_pnc_cifti_t2wonly/ + + pytests: + <<: *dockersetup + resource_class: large + environment: + CIRCLE_CPUS: 4 + steps: + - checkout + - run: + name: Check whether build should be skipped + command: | + cd /src/smripost_linc + if [[ "$( git log --format=oneline -n 1 $CIRCLE_SHA1 | grep -i -E '\[skip[ _]?pytests\]' )" != "" ]]; then + echo "Skipping pytests build" + circleci step halt + fi + - restore_cache: + key: pnc-02 + - restore_cache: + key: ds001419-10 + - restore_cache: + key: fmriprepwithoutfreesurfer-03 + - restore_cache: + key: nibabies-04 + - restore_cache: + key: schaefer100-02 + - run: *runinstall + - run: + name: Run pytest on the tests directory + no_output_timeout: 1h + command: | + pytest \ + -n ${CIRCLE_CPUS} \ + --cov-append \ + --cov-branch \ + --cov-report term-missing \ + --cov=smripost_linc \ + --data_dir=/src/smripost_linc/.circleci/data \ + --output_dir=/src/smripost_linc/.circleci/out \ + --working_dir=/src/smripost_linc/.circleci/work \ + smripost_linc + mkdir /src/coverage + mv /src/smripost_linc/.coverage /src/coverage/.coverage.pytests + - persist_to_workspace: + root: /src/coverage + paths: + - .coverage.pytests + - store_artifacts: + path: /src/smripost_linc/.circleci/out + + merge_coverage: + <<: *dockersetup + steps: + - checkout + - attach_workspace: + at: /src/coverage + - run: *runinstall + - run: + name: Merge coverage files + command: | + cd /src/coverage/ + coverage combine + coverage xml + - store_artifacts: + path: /src/coverage + - codecov/upload: + file: /src/coverage/coverage.xml + + deployable: + docker: + - image: busybox:latest + steps: + - run: echo Deploying! + + build_and_deploy: + environment: + TZ: "/usr/share/zoneinfo/America/New_York" + docker: + - image: cimg/base:2020.09 + working_directory: /tmp/src/xcpd_build + steps: + - checkout + - setup_remote_docker: + version: docker24 + docker_layer_caching: true + - run: + name: Build Docker image + no_output_timeout: 3h + command: | + # Get version, update files. + THISVERSION=$(python3 -c "from smripost_linc import __version__; print(__version__)") + sed -i "s/title = {smripost_linc}/title = {smripost_linc ${CIRCLE_TAG:-$THISVERSION}}/" smripost_linc/data/boilerplate.bib + # Build docker image + e=1 && for i in {1..5}; do + docker build \ + --cache-from=pennlinc/smripost_linc \ + --rm=false \ + -t pennlinc/smripost_linc:latest \ + --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \ + --build-arg VCS_REF=`git rev-parse --short HEAD` \ + --build-arg VERSION="${CIRCLE_TAG:-$THISVERSION}" . \ + && e=0 && break || sleep 15 + done && [ "$e" -eq "0" ] + - run: + name: Deploy to Docker Hub + no_output_timeout: 40m + command: | + if [[ -n "$DOCKERHUB_TOKEN" ]]; then + docker login -u $DOCKERHUB_USERNAME -p $DOCKERHUB_TOKEN + docker tag pennlinc/smripost_linc pennlinc/smripost_linc:unstable + docker push pennlinc/smripost_linc:unstable + if [[ -n "$CIRCLE_TAG" ]]; then + docker push pennlinc/smripost_linc:latest + docker tag pennlinc/smripost_linc pennlinc/smripost_linc:$CIRCLE_TAG + docker push pennlinc/smripost_linc:$CIRCLE_TAG + fi + fi + +workflows: + version: 2 + build_test_deploy: + jobs: + - build: + filters: + tags: + only: /.*/ + + - download_data_ds001419: + requires: + - build + filters: + tags: + only: /.*/ + + - download_data_ds001419_aroma: + requires: + - build + filters: + tags: + only: /.*/ + + - download_data_pnc: + requires: + - build + filters: + tags: + only: /.*/ + + - download_data_ukbiobank: + requires: + - build + filters: + tags: + only: /.*/ + + - download_data_fmriprepwithoutfreesurfer: + requires: + - build + filters: + tags: + only: /.*/ + + - download_data_nibabies: + requires: + - build + filters: + tags: + only: /.*/ + + - download_data_schaefer100: + requires: + - build + filters: + tags: + only: /.*/ + + - pnc_cifti: + requires: + - download_data_pnc + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - pnc_cifti_t2wonly: + requires: + - download_data_pnc + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - ukbiobank: + requires: + - download_data_ukbiobank + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - ds001419_nifti: + requires: + - download_data_ds001419 + - download_data_ds001419_aroma + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - ds001419_cifti: + requires: + - download_data_ds001419 + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - nibabies: + requires: + - download_data_nibabies + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - nifti_without_freesurfer: + requires: + - download_data_fmriprepwithoutfreesurfer + - download_data_schaefer100 + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - nifti_without_freesurfer_with_main: + requires: + - download_data_fmriprepwithoutfreesurfer + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - pytests: + requires: + - download_data_pnc + - download_data_fmriprepwithoutfreesurfer + - download_data_ds001419 + - download_data_nibabies + - download_data_schaefer100 + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - merge_coverage: + requires: + - pnc_cifti + - pnc_cifti_t2wonly + - ds001419_nifti + - ds001419_cifti + - ukbiobank + - nibabies + - nifti_without_freesurfer + - nifti_without_freesurfer_with_main + - pytests + filters: + branches: + ignore: + - /docs?\/.*/ + - /tests?\/.*/ + tags: + only: /.*/ + + - deployable: + requires: + - ds001419_nifti + - ds001419_cifti + - ukbiobank + - nifti_without_freesurfer + - nifti_without_freesurfer_with_main + - nibabies + - pnc_cifti + - pnc_cifti_t2wonly + - pytests + filters: + branches: + only: main + tags: + only: /.*/ + + - build_and_deploy: + requires: + - deployable + filters: + branches: + only: main + tags: + only: /.*/ diff --git a/.circleci/get_data.py b/.circleci/get_data.py new file mode 100755 index 0000000..6d5afc9 --- /dev/null +++ b/.circleci/get_data.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 +"""Download test data.""" + +import sys + +from smripost_linc.tests.utils import download_test_data + +if __name__ == '__main__': + data_dir = sys.argv[1] + dset = sys.argv[2] + download_test_data(dset, data_dir) diff --git a/.codespellrc b/.codespellrc new file mode 100644 index 0000000..15340ee --- /dev/null +++ b/.codespellrc @@ -0,0 +1,6 @@ +[codespell] +skip = .git,*.pdf,*.svg,*.html,dataset_description.json,*.bib +# te - TE +# Weill - name +# reson - Reson. abbreviation in citation +ignore-words-list = te,weill,reson diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..61dfeb6 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,14 @@ +# python cache +__pycache__/**/* +__pycache__ +*.pyc + +# python distribution +build/**/* +build +dist/**/* +dist +smripost_linc.egg-info/**/* +smripost_linc.egg-info +.eggs/**/* +.eggs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..e69de29 diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000..8fb235d --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ +ref-names: $Format:%D$ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..00a7b00 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +.git_archival.txt export-subst diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..30b91a3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,33 @@ +--- +name: Bug report +about: Something not working as described? Missing/incorrect documentation? This is the place. +title: '' +labels: 'bug' +assignees: '' + +--- +## Summary + + +## Additional details + +- sMRIPost-LINC version: +- Docker version: +- Apptainer version: + +### What were you trying to do? + +### What did you expect to happen? + +### What actually happened? + +## Reproducing the bug + + diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..232b322 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,4 @@ +contact_links: + - name: Usage question + url: https://neurostars.org/tags/c/software-support/234/smripost-linc + about: Please ask questions about using sMRIPost-LINC on NeuroStars. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..f3d8ef6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,16 @@ +--- +name: Feature request +about: Got an idea for a new feature, or changing an existing one? This is the place. +title: '' +labels: 'enhancement' +assignees: '' + +--- +## Summary + + +## Additional details + + +## Next steps + diff --git a/.github/config.yml b/.github/config.yml new file mode 100644 index 0000000..7cd8e4a --- /dev/null +++ b/.github/config.yml @@ -0,0 +1,21 @@ +# Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome + +# Comment to be posted to on PRs from first time contributors in your repository +newPRWelcomeComment: > + Thanks for opening this pull request! + We have detected this is the first time you have contributed + to *sMRIPost-LINC*. + We ask you to read through the Contributing Guide: + https://github.com/pennlinc/smripost_linc/blob/main/CONTRIBUTING.md + + These are guidelines intended to make communication easier by describing a consistent process, but + don't worry if you don't get it everything exactly "right" on the first try. + + To boil it down, here are some highlights: + + 1. Consider starting a conversation in the issues list before submitting a pull request. + The discussion might save you a lot of time coding. + 2. Any code you submit will be licensed under the same terms (BSD 3-Clause) as the rest of smripost_linc. + + A pull request is a conversation. We may ask you to make some changes before accepting your PR, + and likewise, you should feel free to ask us any questions you have. diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..2b5fd11 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,18 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "github-actions" # See documentation for possible values + directory: "/" # Location of package manifests + labels: ["maintenance", "ignore-for-release"] + assignees: ["tsalo"] + schedule: + interval: "weekly" + - package-ecosystem: pip + directory: "/" + labels: ["maintenance", "ignore-for-release"] + schedule: + interval: weekly diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..d39bbe7 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,16 @@ +Closes + +## Changes proposed in this pull request + + +## Documentation that should be reviewed + diff --git a/.github/release.yml b/.github/release.yml new file mode 100644 index 0000000..0c8fee8 --- /dev/null +++ b/.github/release.yml @@ -0,0 +1,20 @@ +changelog: + exclude: + labels: + - ignore-for-release + categories: + - title: 🛠 Breaking Changes + labels: + - breaking-change + - title: 🎉 Exciting New Features + labels: + - enhancement + - title: 👎 Deprecations + labels: + - deprecation + - title: 🐛 Bug Fixes + labels: + - bug + - title: Other Changes + labels: + - "*" diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml new file mode 100644 index 0000000..ae7833e --- /dev/null +++ b/.github/workflows/contrib.yml @@ -0,0 +1,40 @@ +name: Contribution checks + +on: + push: + branches: + - main + - maint/* + pull_request: + branches: + - main + - maint/* + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + style: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: pipx run ruff check . + - run: pipx run ruff format --diff . + + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Codespell + uses: codespell-project/actions-codespell@v2 diff --git a/.gitignore b/.gitignore index 82f9275..af1752f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,11 @@ +.vscode +.DS_Store +.*.swp +.mypy_cache +.pytest_cache +.ruff_cache +.coverage + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..e69de29 diff --git a/.mailmap b/.mailmap new file mode 100644 index 0000000..92b96c3 --- /dev/null +++ b/.mailmap @@ -0,0 +1,2 @@ +Taylor Salo +Taylor Salo diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..177fd90 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,21 @@ +exclude: ".*/data/.*" +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + exclude: '.*\.svg' + - id: end-of-file-fixer + exclude: '.*\.svg' + - id: check-yaml + - id: check-json + - id: check-toml + - id: check-added-large-files + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.3 + hooks: + - id: ruff + args: [ --fix ] + - id: ruff-format + - id: ruff + args: [ --select, ISC001, --fix ] diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..edad265 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,22 @@ +version: 2 + +build: + os: ubuntu-22.04 + apt_packages: + - graphviz + tools: + python: "3.11" + jobs: + post_checkout: + # Fetch full history, but don't fail if we already have it + - git fetch --unshallow || true + +sphinx: + configuration: docs/conf.py + +python: + install: + - method: pip + path: . + extra_requirements: + - doc diff --git a/.versions.json b/.versions.json new file mode 100644 index 0000000..81cd2f4 --- /dev/null +++ b/.versions.json @@ -0,0 +1,4 @@ +{ + "flagged": { + } +} diff --git a/.zenodo.json b/.zenodo.json new file mode 100644 index 0000000..c403eea --- /dev/null +++ b/.zenodo.json @@ -0,0 +1,44 @@ +{ + "title": "sMRIPost-LINC: a robust preprocessing pipeline for functional MRI", + "description": "

sMRIPost-LINC is a robust and easy-to-use pipeline for running ICA-AROMA on preprocessed fMRI data organized in BIDS format.

", + "contributors": [], + "creators": [ + { + "affiliation": "University of Pennsylvania", + "name": "Salo, Taylor", + "orcid": "0000-0001-9813-3167" + }, + { + "affiliation": "University of Pennsylvania", + "name": "Cieslak, Matthew", + "orcid": "0000-0002-1931-4734" + }, + { + "affiliation": "University of Pennsylvania", + "name": "Satterthwaite, Theodore", + "orcid": "0000-0001-7072-9399" + } + ], + "keywords": [ + "neuroimaging", + "workflow", + "pipeline", + "postprocessing", + "sMRI", + "BIDS" + ], + "license": "Apache-2.0", + "related_identifiers": [ + { + "identifier": "https://smripost_linc.org", + "relation": "documents", + "scheme": "url" + }, + { + "identifier": "10.1038/s41592-018-0235-4", + "relation": "isPartOf", + "scheme": "doi" + } + ], + "upload_type": "software" +} diff --git a/CHANGES.rst b/CHANGES.rst new file mode 100644 index 0000000..ed39ce1 --- /dev/null +++ b/CHANGES.rst @@ -0,0 +1,4 @@ +23.0.0 () +========= + +sMRIPost-LINC's initial release. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..1b5ee97 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,4 @@ +# sMRIPost-LINC Code of Conduct + +*sMRIPost-LINC* is a project of the +[*NiPreps* Community, and is under its code of conduct](https://www.pennlinc.org/community/CODE_OF_CONDUCT/). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..9ee6d11 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,4 @@ +# Contributing to *sMRIPost-LINC* + +*sMRIPost-LINC* is a project of the +[*NiPreps* Community, which specifies the contributing guidelines](https://www.pennlinc.org/community/). diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..dca1a4a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,245 @@ +# sMRIPost-LINC Docker Container Image distribution +# +# MIT License +# +# Copyright (c) The NiPreps Developers +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# Ubuntu 22.04 LTS - Jammy +ARG BASE_IMAGE=ubuntu:jammy-20240125 + +# +# Build wheel +# +FROM python:slim AS src +RUN pip install build +RUN apt-get update && \ + apt-get install -y --no-install-recommends git +COPY . /src +RUN python -m build /src + +# +# Download stages +# + +# Utilities for downloading packages +FROM ${BASE_IMAGE} AS downloader +# Bump the date to current to refresh curl/certificates/etc +RUN echo "2023.07.20" +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + binutils \ + bzip2 \ + ca-certificates \ + curl \ + unzip && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +# AFNI +FROM downloader AS afni +# Bump the date to current to update AFNI +RUN echo "2023.07.20" +RUN mkdir -p /opt/afni-latest \ + && curl -fsSL --retry 5 https://afni.nimh.nih.gov/pub/dist/tgz/linux_openmp_64.tgz \ + | tar -xz -C /opt/afni-latest --strip-components 1 \ + --exclude "linux_openmp_64/*.gz" \ + --exclude "linux_openmp_64/funstuff" \ + --exclude "linux_openmp_64/shiny" \ + --exclude "linux_openmp_64/afnipy" \ + --exclude "linux_openmp_64/lib/RetroTS" \ + --exclude "linux_openmp_64/lib_RetroTS" \ + --exclude "linux_openmp_64/meica.libs" \ + # Keep only what we use + && find /opt/afni-latest -type f -not -name "3dTshift" -delete + +# Connectome Workbench 1.5.0 +FROM downloader AS workbench +RUN mkdir /opt/workbench && \ + curl -sSLO https://www.humanconnectome.org/storage/app/media/workbench/workbench-linux64-v1.5.0.zip && \ + unzip workbench-linux64-v1.5.0.zip -d /opt && \ + rm workbench-linux64-v1.5.0.zip && \ + rm -rf /opt/workbench/libs_linux64_software_opengl /opt/workbench/plugins_linux64 && \ + strip --remove-section=.note.ABI-tag /opt/workbench/libs_linux64/libQt5Core.so.5 + +# Micromamba +FROM downloader AS micromamba + +# Install a C compiler to build extensions when needed. +# traits<6.4 wheels are not available for Python 3.11+, but build easily. +RUN apt-get update && \ + apt-get install -y --no-install-recommends build-essential && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +WORKDIR / +# Bump the date to current to force update micromamba +RUN echo "2024.02.06" +RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba + +ENV MAMBA_ROOT_PREFIX="/opt/conda" +COPY env.yml /tmp/env.yml +WORKDIR /tmp +RUN micromamba create -y -f /tmp/env.yml && \ + micromamba clean -y -a + +# UV_USE_IO_URING for apparent race-condition (https://github.com/nodejs/node/issues/48444) +# Check if this is still necessary when updating the base image. +ENV PATH="/opt/conda/envs/smripost_linc/bin:$PATH" \ + UV_USE_IO_URING=0 +RUN npm install -g svgo@^3.2.0 bids-validator@1.14.10 && \ + rm -r ~/.npm + +# +# Main stage +# +FROM ${BASE_IMAGE} AS smripost_linc + +# Configure apt +ENV DEBIAN_FRONTEND="noninteractive" \ + LANG="en_US.UTF-8" \ + LC_ALL="en_US.UTF-8" + +# Some baseline tools; bc is needed for FreeSurfer, so don't drop it +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + bc \ + ca-certificates \ + curl \ + gcc \ + git \ + gnupg \ + libc-dev \ + lsb-release \ + netbase \ + xvfb && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + +# Configure PPAs for libpng12 and libxp6 +RUN GNUPGHOME=/tmp gpg --keyserver hkps://keyserver.ubuntu.com --no-default-keyring --keyring /usr/share/keyrings/linuxuprising.gpg --recv 0xEA8CACC073C3DB2A \ + && GNUPGHOME=/tmp gpg --keyserver hkps://keyserver.ubuntu.com --no-default-keyring --keyring /usr/share/keyrings/zeehio.gpg --recv 0xA1301338A3A48C4A \ + && echo "deb [signed-by=/usr/share/keyrings/linuxuprising.gpg] https://ppa.launchpadcontent.net/linuxuprising/libpng12/ubuntu jammy main" > /etc/apt/sources.list.d/linuxuprising.list \ + && echo "deb [signed-by=/usr/share/keyrings/zeehio.gpg] https://ppa.launchpadcontent.net/zeehio/libxp/ubuntu jammy main" > /etc/apt/sources.list.d/zeehio.list + +# Dependencies for AFNI; requires a discontinued multiarch-support package from bionic (18.04) +RUN apt-get update -qq \ + && apt-get install -y -q --no-install-recommends \ + ed \ + gsl-bin \ + libglib2.0-0 \ + libglu1-mesa-dev \ + libglw1-mesa \ + libgomp1 \ + libjpeg62 \ + libpng12-0 \ + libxm4 \ + libxp6 \ + netpbm \ + tcsh \ + xfonts-base \ + xvfb \ + && curl -sSL --retry 5 -o /tmp/multiarch.deb http://archive.ubuntu.com/ubuntu/pool/main/g/glibc/multiarch-support_2.27-3ubuntu1.5_amd64.deb \ + && dpkg -i /tmp/multiarch.deb \ + && rm /tmp/multiarch.deb \ + && apt-get install -f \ + && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ + && gsl2_path="$(find / -name 'libgsl.so.19' || printf '')" \ + && if [ -n "$gsl2_path" ]; then \ + ln -sfv "$gsl2_path" "$(dirname $gsl2_path)/libgsl.so.0"; \ + fi \ + && ldconfig + +# Install files from stages +COPY --from=afni /opt/afni-latest /opt/afni-latest +COPY --from=workbench /opt/workbench /opt/workbench + +# AFNI config +ENV PATH="/opt/afni-latest:$PATH" \ + AFNI_IMSAVE_WARNINGS="NO" \ + AFNI_PLUGINPATH="/opt/afni-latest" + +# Workbench config +ENV PATH="/opt/workbench/bin_linux64:$PATH" \ + LD_LIBRARY_PATH="/opt/workbench/lib_linux64:$LD_LIBRARY_PATH" + +# Create a shared $HOME directory +RUN useradd -m -s /bin/bash -G users smripost_linc +WORKDIR /home/smripost_linc +ENV HOME="/home/smripost_linc" \ + LD_LIBRARY_PATH="/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH" + +COPY --from=micromamba /bin/micromamba /bin/micromamba +COPY --from=micromamba /opt/conda/envs/smripost_linc /opt/conda/envs/smripost_linc + +ENV MAMBA_ROOT_PREFIX="/opt/conda" +RUN micromamba shell init -s bash && \ + echo "micromamba activate smripost_linc" >> $HOME/.bashrc +ENV PATH="/opt/conda/envs/smripost_linc/bin:$PATH" \ + CPATH="/opt/conda/envs/smripost_linc/include:$CPATH" \ + LD_LIBRARY_PATH="/opt/conda/envs/smripost_linc/lib:$LD_LIBRARY_PATH" + +# Precaching atlases +COPY scripts/fetch_templates.py fetch_templates.py +RUN python fetch_templates.py && \ + rm fetch_templates.py && \ + find $HOME/.cache/templateflow -type d -exec chmod go=u {} + && \ + find $HOME/.cache/templateflow -type f -exec chmod go=u {} + + +# FSL environment +ENV LANG="C.UTF-8" \ + LC_ALL="C.UTF-8" \ + PYTHONNOUSERSITE=1 \ + FSLDIR="/opt/conda/envs/smripost_linc" \ + FSLOUTPUTTYPE="NIFTI_GZ" \ + FSLMULTIFILEQUIT="TRUE" \ + FSLLOCKDIR="" \ + FSLMACHINELIST="" \ + FSLREMOTECALL="" \ + FSLGECUDAQ="cuda.q" + +# Unless otherwise specified each process should only use one thread - nipype +# will handle parallelization +ENV MKL_NUM_THREADS=1 \ + OMP_NUM_THREADS=1 + +# Installing sMRIPost-LINC +COPY --from=src /src/dist/*.whl . +RUN pip install --no-cache-dir $( ls *.whl )[test] + +RUN find $HOME -type d -exec chmod go=u {} + && \ + find $HOME -type f -exec chmod go=u {} + && \ + rm -rf $HOME/.npm $HOME/.conda $HOME/.empty + +# For detecting the container +ENV IS_DOCKER_8395080871=1 + +RUN ldconfig +WORKDIR /tmp +ENTRYPOINT ["/opt/conda/envs/smripost_linc/bin/smripost-linc"] + +ARG BUILD_DATE +ARG VCS_REF +ARG VERSION +LABEL org.label-schema.build-date=$BUILD_DATE \ + org.label-schema.name="sMRIPost-LINC" \ + org.label-schema.description="sMRIPost-LINC - fMRI post-processing with AROMA" \ + org.label-schema.url="https://smripost_linc.org" \ + org.label-schema.vcs-ref=$VCS_REF \ + org.label-schema.vcs-url="https://github.com/pennlinc/smripost_linc" \ + org.label-schema.version=$VERSION \ + org.label-schema.schema-version="1.0" diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..145eaa0 --- /dev/null +++ b/Makefile @@ -0,0 +1,17 @@ +.PHONY: help docker-build +.DEFAULT: help + +tag="smripost_linc" + +help: + @echo "Premade recipes" + @echo + @echo "make docker-build [tag=TAG]" + @echo "\tBuilds a docker image from source. Defaults to 'smripost_linc' tag." + + +docker-build: + docker build --rm -t $(tag) \ + --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \ + --build-arg VCS_REF=`git rev-parse --short HEAD` \ + --build-arg VERSION=`hatch version` . diff --git a/README.md b/README.md index 245e895..67dffe3 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,22 @@ # smripost-linc -Aggregate interesting derivatives from sMRIPrep/Freesurfer derivatives + +[![Docker Image](https://img.shields.io/badge/docker-pennlinc/smripost--linc-brightgreen.svg?logo=docker&style=flat)](https://hub.docker.com/r/pennlinc/smripost-linc/tags/) + +sMRIPost-LINC is a BIDS App for aggregating interesting outputs from from sMRIPrep/Freesurfer derivatives. + +----- + +**Table of Contents** + +- [Installation](#installation) +- [License](#license) + +## Installation + +```console +docker pull pennlinc/smripost-linc:main +``` + +## License + +`sMRIPost-LINC` is distributed under the terms of the [BSD-3](https://spdx.org/licenses/BSD-3-Clause.html) license. diff --git a/REFERENCES.md b/REFERENCES.md new file mode 100644 index 0000000..0cceb35 --- /dev/null +++ b/REFERENCES.md @@ -0,0 +1,15 @@ +| Tool (**Package**) | Citation(s) | Link to code or documentation | +|-----|-----|-----| +| **FSL** | | https://doi.org/10.1016/j.neuroimage.2004.07.051 https://doi.org/10.1016/j.neuroimage.2008.10.055 https://doi.org/10.1016/j.neuroimage.2011.09.015 +| SUSAN | https://doi.org/10.1023/A:1007963824710 | https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/SUSAN | +| MELODIC | | https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/MELODIC | +| ICA-AROMA | http://www.sciencedirect.com/science/article/pii/S1053811915001822 | https://github.com/rhr-pruim/ICA-AROMA/ | +| **Other** | | | +| nibabel | https://doi.org/10.5281/zenodo.60808 | https://github.com/nipy/nibabel/ | +| nilearn | https://doi.org/10.3389/fninf.2014.00014 | https://github.com/nilearn/nilearn/ | +| nipype | https://doi.org/10.3389/fninf.2011.00013 https://doi.org/10.5281/zenodo.581704 | https://github.com/nipy/nipype/ | +| **Graphics** | | | +| seaborn | https://doi.org/10.5281/zenodo.883859 | https://github.com/mwaskom/seaborn | +| matplotlib 2.0.0 | https://doi.org/10.5281/zenodo.248351 | https://github.com/matplotlib/matplotlib | +| cwebp | https://developers.google.com/speed/webp/docs/webp_study https://developers.google.com/speed/webp/docs/webp_lossless_alpha_study | https://developers.google.com/speed/webp/ | +| SVGO | | https://github.com/svg/svgo | diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_static/theme_overrides.css b/docs/_static/theme_overrides.css new file mode 100644 index 0000000..fad2f17 --- /dev/null +++ b/docs/_static/theme_overrides.css @@ -0,0 +1,21 @@ +/* override table width restrictions */ +@media screen and (min-width: 767px) { + + .wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; + } + + .wy-table-responsive { + overflow: visible !important; + } +} + + +/* Fix parameter type style */ +.function .classifier { + margin: 0 0 0 5px; + padding: 3px; + background: rgba(0, 0, 0, .1); +} diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 0000000..0d90a37 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,38 @@ +.. include:: links.rst + +================ +Developers - API +================ + +The *NiPreps* community and contributing guidelines +--------------------------------------------------- +*sMRIPost-LINC* is a *NiPreps* application, and abides by the +`NiPreps Community guidelines `__. +Please, make sure you have read and understood all the documentation +provided in the `NiPreps portal `__ before +you get started. + +Setting up your development environment +--------------------------------------- +We believe that *sMRIPost-LINC* must be free to use, inspect, and critique. +Correspondingly, you should be free to modify our software to improve it +or adapt it to new use cases and we especially welcome contributions to +improve it or its documentation. + +We actively direct efforts into making the scrutiny and improvement processes +as easy as possible. +As part of such efforts, we maintain some +`tips and guidelines for developers `__ +to help minimize your burden if you want to modify the software. + +Internal configuration system +----------------------------- + +.. automodule:: smripost_linc.config + :members: from_dict, load, get, dumps, to_filename, init_spaces + +Workflows +--------- + +.. automodule:: smripost_linc.workflows.base +.. automodule:: smripost_linc.workflows.aroma diff --git a/docs/changes.rst b/docs/changes.rst new file mode 100644 index 0000000..ac10c4f --- /dev/null +++ b/docs/changes.rst @@ -0,0 +1,7 @@ +.. include:: links.rst + +---------- +What's new +---------- + +.. include:: ../CHANGES.rst diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..1bb2e0c --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,356 @@ +# smripost_linc documentation build configuration file, created by +# sphinx-quickstart on Mon May 9 09:04:25 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import sys + +from packaging import ( + version as pver, # Avoid distutils.LooseVersion which is deprecated +) +from sphinx import __version__ as sphinxversion + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.append(os.path.abspath('sphinxext')) +sys.path.insert(0, os.path.abspath('../wrapper')) + +from github_link import make_linkcode_resolve # noqa: E402 + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = '1.5.3' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named "sphinx.ext.*") or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.linkcode', + 'sphinx.ext.napoleon', + 'sphinxarg.ext', # argparse extension + 'nipype.sphinxext.plot_workflow', +] + +# Mock modules in autodoc: +autodoc_mock_imports = [ + 'numpy', + 'nitime', + 'matplotlib', +] + +if pver.parse(sphinxversion) >= pver.parse('1.7.0'): + autodoc_mock_imports += [ + 'pandas', + 'nilearn', + 'seaborn', + ] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# Accept custom section names to be parsed for numpy-style docstrings +# of parameters. +# Requires pinning sphinxcontrib-napoleon to a specific commit while +# https://github.com/sphinx-contrib/napoleon/pull/10 is merged. +napoleon_use_param = False +napoleon_custom_sections = [ + ('Inputs', 'Parameters'), + ('Outputs', 'Parameters'), +] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = [".rst", ".md"] +source_suffix = '.rst' + +# The encoding of source files. +# source_encoding = "utf-8-sig" + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'sMRIPost-LINC' +author = 'The sMRIPost-LINC developers' +copyright = f'2016-, {author}' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = 'version' +# The full version, including alpha/beta/rc tags. +release = 'version' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. +# " v documentation" by default. +# html_title = u'smripost_linc vversion' + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (relative to this directory) to use as a favicon of +# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not None, a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# The empty string is equivalent to '%b %d, %Y'. +# html_last_updated_fmt = None + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g., ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# 'ja' uses this config value. +# 'zh' user can custom change `jieba` dictionary path. +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'smripost_linc_doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'smripost_linc.tex', 'sMRIPost-LINC Documentation', author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'smripost-linc', 'sMRIPost-LINC Documentation', [author], 1)] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + 'sMRIPost-LINC', + 'sMRIPost-LINC Documentation', + author, + 'sMRIPost-LINC', + 'One line description of project.', + 'Miscellaneous', + ), +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + +# The following is used by sphinx.ext.linkcode to provide links to github +linkcode_resolve = make_linkcode_resolve( + 'smripost_linc', + 'https://github.com/pennlinc/sMRIPost-LINC/blob/{revision}/{package}/{path}#L{lineno}', +) + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('https://docs.python.org/3/', None), + 'numpy': ('https://numpy.org/doc/stable/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/', None), + 'matplotlib': ('https://matplotlib.org/stable/', None), + 'bids': ('https://bids-standard.github.io/pybids/', None), + 'nibabel': ('https://nipy.org/nibabel/', None), + 'nipype': ('https://nipype.readthedocs.io/en/latest/', None), + 'niworkflows': ('https://www.pennlinc.org/niworkflows/', None), + 'fmriprep': ('https://fmriprep.org/en/stable/', None), + 'sdcflows': ('https://www.pennlinc.org/sdcflows/', None), + 'smriprep': ('https://www.pennlinc.org/smriprep/', None), + 'templateflow': ('https://www.templateflow.org/python-client', None), + 'tedana': ('https://tedana.readthedocs.io/en/latest/', None), +} + +suppress_warnings = ['image.nonlocal_uri'] + + +def setup(app): + app.add_css_file('theme_overrides.css') + # We need this for the boilerplate script + app.add_js_file('https://cdn.rawgit.com/chrisfilo/zenodo.js/v0.1/zenodo.js') diff --git a/docs/faq.rst b/docs/faq.rst new file mode 100644 index 0000000..49b67b0 --- /dev/null +++ b/docs/faq.rst @@ -0,0 +1,21 @@ +.. include:: links.rst + +================================ +FAQ - Frequently Asked Questions +================================ + +.. contents:: + :local: + :depth: 1 + +What is fMRIPost? +----------------- + +fMRIPost workflows are BIDS Apps that ingest fMRI preprocessing derivative datasets. +They fall under the broader umbrella of NiPost workflows, +which are pipelines that perform post-processing on BIDS-Derivative datasets. + +These workflows are primarily tested against the outputs of the `fMRIPrep`_ pipeline, +but we plan to ensure that they can work with derivatives from any pipeline that produces BIDS-Derivative compliant datasets. + +.. _fMRIPrep: https://fmriprep.org diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..f622292 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,22 @@ +.. smripost_linc documentation master file, created by + sphinx-quickstart on Mon May 9 09:04:25 2016. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. include:: links.rst +.. include:: ../README.rst + +Contents +-------- + +.. toctree:: + :maxdepth: 3 + + installation + usage + workflows + outputs + spaces + faq + api + changes diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 0000000..7d6ce50 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,49 @@ +.. include:: links.rst + +------------ +Installation +------------ +*sMRIPost-LINC* should be installed using container technologies. + +.. code-block:: bash + docker pull pennlinc/smripost-linc:main + + +Containerized execution (Docker and Singularity) +================================================ +*sMRIPost-LINC* is a *NiPreps* application, and therefore follows some overarching principles +of containerized execution drawn from the BIDS-Apps protocols. +For detailed information of containerized execution of *NiPreps*, please visit the corresponding +`Docker `__ +or `Singularity `__ subsections. + +External Dependencies +--------------------- +*sMRIPost-LINC* is written using Python 3.8 (or above), and is based on +nipype_. + +*sMRIPost-LINC* requires some other neuroimaging software tools that are +not handled by the Python's packaging system (Pypi): + +- FSL_ (version 6.0.7.7) +- ANTs_ (version 2.5.1) +- AFNI_ (version 24.0.05) +- `C3D `_ (version 1.4.0) +- FreeSurfer_ (version 7.3.2) +- `bids-validator `_ (version 1.14.0) +- `connectome-workbench `_ (version 1.5.0) + +Not running on a local machine? - Data transfer +=============================================== +If you intend to run *sMRIPost-LINC* on a remote system, you will need to +make your data available within that system first. + +For instance, here at the Poldrack Lab we use Stanford's +:abbr:`HPC (high-performance computing)` system, called Sherlock. +Sherlock enables `the following data transfer options +`_. + +Alternatively, more comprehensive solutions such as `Datalad +`_ will handle data transfers with the appropriate +settings and commands. +Datalad also performs version control over your data. diff --git a/docs/license.rst b/docs/license.rst new file mode 100644 index 0000000..e9006f3 --- /dev/null +++ b/docs/license.rst @@ -0,0 +1,25 @@ +About the *NiPreps* framework licensing +--------------------------------------- +Please check https://www.pennlinc.org/community/licensing/ for detailed +information on the criteria we use to license *sMRIPost-LINC* and other +projects of the framework. + +License information +------------------- +Copyright (c) the *NiPreps* Developers. + +*sMRIPost-LINC* is licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +Copyright (c), the *sMRIPost-LINC* developers and the CRN. +All rights reserved. + +All trademarks referenced herein are property of their respective holders. diff --git a/docs/links.rst b/docs/links.rst new file mode 100644 index 0000000..7e2c2c9 --- /dev/null +++ b/docs/links.rst @@ -0,0 +1,25 @@ +.. _Nipype: https://nipype.readthedocs.io/en/latest/ +.. _BIDS: https://bids.neuroimaging.io/ +.. _`BIDS Derivatives`: https://bids-specification.readthedocs.io/en/stable/05-derivatives/01-introduction.html +.. _`BEP 011`: https://bids-specification.readthedocs.io/en/bep011/05-derivatives/04-structural-derivatives.html +.. _`BEP 012`: https://bids-specification.readthedocs.io/en/bep012/05-derivatives/05-functional-derivatives.html +.. _Installation: installation.html +.. _workflows: workflows.html +.. _FSL: https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/ +.. _ANTs: https://stnava.github.io/ANTs/ +.. _FreeSurfer: https://surfer.nmr.mgh.harvard.edu/ +.. _`submillimeter reconstruction`: https://surfer.nmr.mgh.harvard.edu/fswiki/SubmillimeterRecon +.. _`mri_robust_template`: https://surfer.nmr.mgh.harvard.edu/fswiki/mri_robust_template +.. _AFNI: https://afni.nimh.nih.gov/ +.. _GIFTI: https://www.nitrc.org/projects/gifti/ +.. _`Connectome Workbench`: https://www.humanconnectome.org/software/connectome-workbench.html +.. _`HCP Pipelines`: https://humanconnectome.org/software/hcp-mr-pipelines/ +.. _`Docker Engine`: https://www.docker.com/products/container-runtime +.. _`Docker installation`: https://docs.docker.com/install/ +.. _`Docker Hub`: https://hub.docker.com/r/pennlinc/smripost_linc/tags +.. _Singularity: https://github.com/singularityware/singularity +.. _SPM: https://www.fil.ion.ucl.ac.uk/spm/software/spm12/ +.. _TACC: https://www.tacc.utexas.edu/ +.. _tedana: https://github.com/me-ica/tedana +.. _`T2* workflow`: https://tedana.readthedocs.io/en/latest/generated/tedana.workflows.t2smap_workflow.html#tedana.workflows.t2smap_workflow # noqa +.. _`citation boilerplate`: https://www.pennlinc.org/intro/transparency/#citation-boilerplates diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..32bb245 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/outputs.rst b/docs/outputs.rst new file mode 100644 index 0000000..c43b906 --- /dev/null +++ b/docs/outputs.rst @@ -0,0 +1,135 @@ +.. include:: links.rst + +.. _outputs: + +--------------------------- +Outputs of *sMRIPost-LINC* +--------------------------- + +*sMRIPost-LINC* outputs conform to the :abbr:`BIDS (brain imaging data structure)` +Derivatives specification (see `BIDS Derivatives`_, along with the +upcoming `BEP 011`_ and `BEP 012`_). +*sMRIPost-LINC* generates three broad classes of outcomes: + +1. **Visual QA (quality assessment) reports**: + One :abbr:`HTML (hypertext markup language)` per subject, + that allows the user a thorough visual assessment of the quality + of processing and ensures the transparency of *sMRIPost-LINC* operation. + +2. **ICA outputs**: + Outputs from the independent component analysis (ICA). + For example, the mixing matrix and component weight maps. + +3. **Derivatives (denoised data)**: + Denoised fMRI data in the requested output spaces and resolutions. + +4. **Confounds**: + Time series of ICA components classified as noise. + + +Layout +------ + +Assuming sMRIPost-LINC is invoked with:: + + smripost_linc / / participant [OPTIONS] + +The outputs will be a `BIDS Derivatives`_ dataset of the form:: + + / + logs/ + sub-