diff --git a/.dockerignore b/.dockerignore index db74497f3f..f41447e32c 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,4 +2,6 @@ *.pyc *.egg-info __pycache__ - +docker/nipype_* +docker/test-* +.coverage \ No newline at end of file diff --git a/.noserc b/.noserc index 9507b1179b..d8c30da437 100644 --- a/.noserc +++ b/.noserc @@ -1,8 +1,9 @@ [nosetests] verbosity=3 - logging-level=DEBUG with-doctest=1 +with-doctest-ignore-unicode=1 + with-xunit=1 with-coverage=1 cover-branches=1 diff --git a/.travis.yml b/.travis.yml index dff3b123de..2c64797512 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,58 +10,40 @@ env: - INSTALL_DEB_DEPENDECIES=false - INSTALL_DEB_DEPENDECIES=true DUECREDIT_ENABLE=yes before_install: -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then wget http://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh - -O miniconda.sh; else wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh - -O miniconda.sh; fi -- chmod +x miniconda.sh -- "./miniconda.sh -b" -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then export PATH=/home/travis/miniconda2/bin:$PATH; else export PATH=/home/travis/miniconda3/bin:$PATH; fi +- wget http://repo.continuum.io/miniconda/Miniconda${TRAVIS_PYTHON_VERSION:0:1}-latest-Linux-x86_64.sh + -O /home/travis/.cache/miniconda.sh +- bash /home/travis/.cache/miniconda.sh -b -p /home/travis/miniconda +- export PATH=/home/travis/miniconda/bin:$PATH - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) -- sudo apt-get update -- sudo apt-get install xvfb -- if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends - fsl afni elastix; fi -- if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; - fi -- if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi -- if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi +- sudo apt-get -y update +- sudo apt-get -y install xvfb fusefat +- if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -y -qq + fsl afni elastix fsl-atlases; fi +- if $INSTALL_DEB_DEPENDECIES; then + source /etc/fsl/fsl.sh; + source /etc/afni/afni.sh; fi - export FSLOUTPUTTYPE=NIFTI_GZ -# Install vtk and fix numpy installation problem -# Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then travis_retry sudo apt-get install -qq libx11-dev swig; - echo '[x11]' >> $HOME/.numpy-site.cfg; - echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; - echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; - fi install: -- sudo apt-get install fusefat +- conda config --add channels conda-forge - conda update --yes conda -- conda create -n testenv --yes pip python=$TRAVIS_PYTHON_VERSION -- source activate testenv -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then pip install ordereddict; fi -- conda install --yes numpy scipy nose networkx python-dateutil -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes traits; else pip install traits; fi -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes vtk; fi -- pip install python-coveralls -- pip install nose-cov -- if [ ! -z "$DUECREDIT_ENABLE"]; then pip install --user -v duecredit; fi -# Add tvtk (PIL is required by blockcanvas) -# Install mayavi (see https://github.com/enthought/mayavi/issues/271) -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then - pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz; - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools; - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas; - pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy; - pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip; - pip install -e git+https://github.com/enthought/ets.git#egg=ets; - fi +- conda update --all -y python=$TRAVIS_PYTHON_VERSION +# - if [[ "${INSTALL_DEB_DEPENDECIES}" == "true" && ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]]; then +# conda install -y vtk mayavi; fi +- conda install -y nipype +- pip install python-coveralls coverage doctest-ignore-unicode +- if [ ! -z "$DUECREDIT_ENABLE"]; then pip install duecredit; fi +- rm -r /home/travis/miniconda/lib/python${TRAVIS_PYTHON_VERSION}/site-packages/nipype* +- pip install -r requirements.txt - pip install -e . +- export COVERAGE_PROCESS_START=$(pwd)/.coveragerc +- export COVERAGE_DATA_FILE=$(pwd)/.coverage +- echo "data_file = ${COVERAGE_DATA_FILE}" >> ${COVERAGE_PROCESS_START} script: -- python -W once:FSL:UserWarning:nipype `which nosetests` --with-doctest --with-cov --cover-package nipype --cov-config .coveragerc --logging-level=DEBUG --verbosity=3 +- python -W once:FSL:UserWarning:nipype `which nosetests` --with-doctest --with-doctest-ignore-unicode --with-cov --cover-package nipype --logging-level=DEBUG --verbosity=3 after_success: -- coveralls --config_file .coveragerc +- coveralls --config_file ${COVERAGE_PROCESS_START} deploy: provider: pypi user: satra diff --git a/CHANGES b/CHANGES index 1eb5dd81ab..d581872b38 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,7 @@ Upcoming release 0.13 ===================== +* FIX: Use builtins open and unicode literals for py3 compatibility (https://github.com/nipy/nipype/pull/1572) * TST: reduce the size of docker images & use tags for images (https://github.com/nipy/nipype/pull/1564) * ENH: Implement missing inputs/outputs in FSL AvScale (https://github.com/nipy/nipype/pull/1563) * FIX: Fix symlink test in copyfile (https://github.com/nipy/nipype/pull/1570, https://github.com/nipy/nipype/pull/1586) diff --git a/Makefile b/Makefile index 8c7856e7ca..5ae05c5c98 100644 --- a/Makefile +++ b/Makefile @@ -46,21 +46,24 @@ clean-ctags: clean-doc: rm -rf doc/_build -clean: clean-build clean-pyc clean-so clean-ctags clean-doc +clean-tests: + rm -f .coverage + +clean: clean-build clean-pyc clean-so clean-ctags clean-doc clean-tests in: inplace # just a shortcut inplace: $(PYTHON) setup.py build_ext -i test-code: in - $(NOSETESTS) -s nipype --with-doctest + $(NOSETESTS) -s nipype --with-doctest --with-doctest-ignore-unicode test-doc: - $(NOSETESTS) -s --with-doctest --doctest-tests --doctest-extension=rst \ + $(NOSETESTS) -s --with-doctest --with-doctest-ignore-unicode --doctest-tests --doctest-extension=rst \ --doctest-fixtures=_fixture doc/ -test-coverage: - $(NOSETESTS) -s --with-doctest --with-coverage --cover-package=nipype \ +test-coverage: clean-tests in + $(NOSETESTS) -s --with-doctest --with-doctest-ignore-unicode --with-coverage --cover-package=nipype \ --config=.coveragerc test: clean test-code diff --git a/build_docs.py b/build_docs.py index 77b55eba53..e2ae75b7b9 100644 --- a/build_docs.py +++ b/build_docs.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -7,8 +8,8 @@ python setup.py build_sphinx """ - -from __future__ import print_function +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, str # Standard library imports import sys @@ -22,7 +23,7 @@ _info_fname = pjoin(os.path.dirname(__file__), 'nipype', 'info.py') INFO_VARS = {} -exec(open(_info_fname, 'rt').read(), {}, INFO_VARS) +exec(str(open(_info_fname, 'rt').read()), {}, INFO_VARS) DOC_BUILD_DIR = os.path.join('doc', '_build', 'html') DOC_DOCTREES_DIR = os.path.join('doc', '_build', 'doctrees') diff --git a/circle.yml b/circle.yml index e6d18aaf53..9674e095b5 100644 --- a/circle.yml +++ b/circle.yml @@ -14,33 +14,40 @@ dependencies: - sudo apt-get -y update && sudo apt-get install -y wget bzip2 override: - - mkdir -p ~/scratch/nose ~/examples - - if [[ ! -d ~/examples/nipype-tutorial ]]; then wget -q -O nipype-tutorial.tar.bz2 https://dl.dropbox.com/s/jzgq2nupxyz36bp/nipype-tutorial.tar.bz2 && tar xjf nipype-tutorial.tar.bz2 -C ~/examples/; fi + - mkdir -p ~/examples ~/scratch/nose ~/scratch/logs + - if [[ ! -d ~/examples/nipype-tutorial ]]; then wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q -O nipype-tutorial.tar.bz2 https://dl.dropbox.com/s/jzgq2nupxyz36bp/nipype-tutorial.tar.bz2 && tar xjf nipype-tutorial.tar.bz2 -C ~/examples/; fi - if [[ ! -d ~/examples/nipype-fsl_course_data ]]; then wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q https://3552243d5be815c1b09152da6525cb8fe7b900a6.googledrive.com/host/0BxI12kyv2olZVUswazA3NkFvOXM/nipype-fsl_course_data.tar.gz && tar xzf nipype-fsl_course_data.tar.gz -C ~/examples/; fi - - if [[ ! -d ~/examples/feeds ]]; then wget -q -O fsl-feeds.tar.gz https://googledrive.com/host/0BxI12kyv2olZNXBONlJKV0Y1Tm8 && tar xzf fsl-feeds.tar.gz -C ~/examples/; fi + - if [[ ! -d ~/examples/feeds ]]; then wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q https://3552243d5be815c1b09152da6525cb8fe7b900a6.googledrive.com/host/0BxI12kyv2olZVUswazA3NkFvOXM/fsl-5.0.9-feeds.tar.gz && tar xzf fsl-5.0.9-feeds.tar.gz -C ~/examples/; fi - if [[ -e ~/docker/image.tar ]]; then docker load -i ~/docker/image.tar; fi - - docker build -f docker/nipype_test_py27/Dockerfile -t nipype/nipype_test:py27 . : + - docker build -f docker/nipype_test/Dockerfile_py35 -t nipype/nipype_test:py35 . : + timeout: 1600 + - docker build -f docker/nipype_test/Dockerfile_py27 -t nipype/nipype_test:py27 . : timeout: 1600 - mkdir -p ~/docker; docker save nipype/nipype_test:py27 > ~/docker/image.tar : timeout: 1600 + - pip install xunitmerge test: override: - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /root/src/nipype/doc nipype/nipype_test:py27 /usr/bin/run_builddocs.sh - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v $(pwd)/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow3d : + - docker run -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /root/src/nipype/doc nipype/nipype_test:py35 /usr/bin/run_builddocs.sh + - docker run -v /etc/localtime:/etc/localtime:ro -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /root/src/nipype nipype/nipype_test:py35 /usr/bin/run_nosetests.sh py35 : + timeout: 2600 + - docker run -v /etc/localtime:/etc/localtime:ro -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /root/src/nipype nipype/nipype_test:py27 /usr/bin/run_nosetests.sh py27 : + timeout: 2600 + - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow3d : timeout: 1600 - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v $(pwd)/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow4d : + - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow4d : timeout: 1600 - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v $(pwd)/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh fmri_fsl_feeds Linear /root/examples/ l1pipeline - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v $(pwd)/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh fmri_spm_dartel Linear /root/examples/ level1 : + - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_fsl_feeds Linear /root/examples/ l1pipeline + - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /root/examples/ level1 : timeout: 1600 - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v $(pwd)/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh fmri_spm_dartel Linear /root/examples/ l2pipeline : + - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /root/examples/ l2pipeline : timeout: 1600 - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v $(pwd)/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /root/examples/ level1_workflow - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v $(pwd)/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh fmri_spm_nested Linear /root/examples/ level1 - - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v $(pwd)/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh fmri_spm_nested Linear /root/examples/ l2pipeline - - docker run -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" -v /etc/localtime:/etc/localtime:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_nosetests.sh : - timeout: 2600 + - docker run -v /etc/localtime:/etc/localtime:ro -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /root/examples/ level1_workflow + - docker run -v /etc/localtime:/etc/localtime:ro -e NIPYPE_NUMBER_OF_CPUS=4 -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /root/examples/ level1 + - docker run -v /etc/localtime:/etc/localtime:ro -e NIPYPE_NUMBER_OF_CPUS=4 -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /root/examples/ level1 + - docker run -v /etc/localtime:/etc/localtime:ro -e NIPYPE_NUMBER_OF_CPUS=4 -v ~/examples:/root/examples:ro -v ~/scratch:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /root/examples/ l2pipeline + post: - bash docker/circleci/teardown.sh @@ -48,7 +55,8 @@ general: artifacts: - "~/docs" - "~/logs" - - "~/coverage.xml" - - "~/nosetests.xml" - - "~/builddocs.log" + - "~/coverage_py27.xml" + - "~/coverage_py35.xml" + - "~/nosetests_py27.xml" + - "~/nosetests_py35.xml" - "~/scratch" diff --git a/doc/devel/index.rst b/doc/devel/index.rst index 4c7178b83a..44fbf40406 100644 --- a/doc/devel/index.rst +++ b/doc/devel/index.rst @@ -19,6 +19,7 @@ conventions documented in the `NIPY Developers Guide architecture provenance software_using_nipype + testing_nipype .. include:: ../links_names.txt diff --git a/doc/devel/testing_nipype.rst b/doc/devel/testing_nipype.rst new file mode 100644 index 0000000000..491925999d --- /dev/null +++ b/doc/devel/testing_nipype.rst @@ -0,0 +1,73 @@ +============== +Testing nipype +============== + +In order to ensure the stability of each release of Nipype, the project uses two +continuous integration services: `CircleCI `_ +and `Travis CI `_. +If both batteries of tests are passing, the following badges should be shown in green color: + +.. image:: https://travis-ci.org/nipy/nipype.png?branch=master + :target: https://travis-ci.org/nipy/nipype + +.. image:: https://circleci.com/gh/nipy/nipype/tree/master.svg?style=svg + :target: https://circleci.com/gh/nipy/nipype/tree/master + + +Tests implementation +-------------------- + +Nipype testing framework is built upon `nose `_. +By the time these guidelines are written, Nipype implements 17638 tests. + +To run the tests locally, first get nose installed:: + + pip install nose + + +Then, after nipype is `installed in developer mode <../users/install.html#nipype-for-developers>`_, +the tests can be run with the following simple command:: + + make tests + + +Skip tests +---------- + +Nipype will skip some tests depending on the currently available software and data +dependencies. Installing software dependencies and downloading the necessary data +will reduce the number of skip tests. + +Some tests in Nipype make use of some images distributed within the `FSL course data +`_. This reduced version of the package can be downloaded `here +`_. +To enable the tests depending on these data, just unpack the targz file and set the :code:`FSL_COURSE_DATA` environment +variable to point to that folder. + + +Testing Nipype using Docker +--------------------------- + +As of :code:`nipype-0.13`, Nipype is tested inside Docker containers. Once the developer +`has installed the Docker Engine `_, testing +Nipype is as easy as follows:: + + cd path/to/nipype/ + docker build -f docker/nipype_test/Dockerfile_py27 -t nipype/nipype_test:py27 + docker run -it --rm -v /etc/localtime:/etc/localtime:ro \ + -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" \ + -v ~/examples:/root/examples:ro \ + -v ~/scratch:/scratch \ + -w /root/src/nipype \ + nipype/nipype_test:py27 /usr/bin/run_nosetests.sh + +For running nipype in Python 3.5:: + + cd path/to/nipype/ + docker build -f docker/nipype_test/Dockerfile_py35 -t nipype/nipype_test:py35 + docker run -it --rm -v /etc/localtime:/etc/localtime:ro \ + -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" \ + -v ~/examples:/root/examples:ro \ + -v ~/scratch:/scratch \ + -w /root/src/nipype \ + nipype/nipype_test:py35 /usr/bin/run_nosetests.sh diff --git a/doc/sphinxext/autosummary_generate.py b/doc/sphinxext/autosummary_generate.py index aefd8552d8..d30edf6aec 100755 --- a/doc/sphinxext/autosummary_generate.py +++ b/doc/sphinxext/autosummary_generate.py @@ -16,9 +16,9 @@ ./ext/autosummary_generate.py -o source/generated source/*.rst """ +from __future__ import print_function, unicode_literals +from builtins import open -from __future__ import print_function -import glob import re import inspect import os diff --git a/doc/sphinxext/numpy_ext/__init__.py b/doc/sphinxext/numpy_ext/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/doc/sphinxext/numpy_ext/__init__.py +++ b/doc/sphinxext/numpy_ext/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/doc/sphinxext/numpy_ext/docscrape.py b/doc/sphinxext/numpy_ext/docscrape.py index affb1ba6b5..a4ff432715 100644 --- a/doc/sphinxext/numpy_ext/docscrape.py +++ b/doc/sphinxext/numpy_ext/docscrape.py @@ -1,7 +1,7 @@ +# -*- coding: utf-8 -*- """Extract reference documentation from the NumPy source tree. """ - from __future__ import print_function from future import standard_library standard_library.install_aliases() @@ -13,7 +13,7 @@ import pydoc from warnings import warn -from nipype.external.six import StringIO +from io import StringIO class Reader(object): diff --git a/doc/sphinxext/numpy_ext/docscrape_sphinx.py b/doc/sphinxext/numpy_ext/docscrape_sphinx.py index 783f3be85a..99ac2f4d00 100644 --- a/doc/sphinxext/numpy_ext/docscrape_sphinx.py +++ b/doc/sphinxext/numpy_ext/docscrape_sphinx.py @@ -1,11 +1,12 @@ -from __future__ import absolute_import +# -*- coding: utf-8 -*- +from __future__ import absolute_import, unicode_literals +from builtins import str, bytes import re import inspect import textwrap import pydoc import sphinx from .docscrape import NumpyDocString, FunctionDoc, ClassDoc -from nipype.external.six import string_types class SphinxDocString(NumpyDocString): @@ -146,7 +147,7 @@ def _str_references(self): out = [] if self['References']: out += self._str_header('References') - if isinstance(self['References'], string_types): + if isinstance(self['References'], (str, bytes)): self['References'] = [self['References']] out.extend(self['References']) out += [''] diff --git a/doc/sphinxext/numpy_ext/numpydoc.py b/doc/sphinxext/numpy_ext/numpydoc.py index e27a4a6184..ccce7aad03 100644 --- a/doc/sphinxext/numpy_ext/numpydoc.py +++ b/doc/sphinxext/numpy_ext/numpydoc.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ ======== numpydoc diff --git a/docker/circleci/run_examples.sh b/docker/circleci/run_examples.sh index 6c9d3357b5..99d57add58 100644 --- a/docker/circleci/run_examples.sh +++ b/docker/circleci/run_examples.sh @@ -4,7 +4,6 @@ set -x set -u mkdir -p /root/.nipype -mkdir -p /scratch/logs echo '[logging]' > /root/.nipype/nipype.cfg echo 'workflow_level = DEBUG' >> /root/.nipype/nipype.cfg echo 'interface_level = DEBUG' >> /root/.nipype/nipype.cfg @@ -13,3 +12,5 @@ echo 'log_to_file = true' >> /root/.nipype/nipype.cfg echo 'log_directory = /scratch/logs/' >> /root/.nipype/nipype.cfg python /root/src/nipype/tools/run_examples.py $@ + +chmod 777 -R /scratch/logs \ No newline at end of file diff --git a/docker/circleci/run_nosetests.sh b/docker/circleci/run_nosetests.sh index 6898bdc0d1..9e912e20de 100644 --- a/docker/circleci/run_nosetests.sh +++ b/docker/circleci/run_nosetests.sh @@ -3,9 +3,36 @@ set -e set -x set -u -cd /root/src/nipype -mkdir -p /scratch/nose -nosetests -c /root/src/nipype/.noserc --xunit-file="/scratch/nosetests.xml" --cover-xml-file="/scratch/coverage.xml" -chmod 777 /scratch/nosetests.xml -chmod 777 /scratch/coverage.xml -chmod 777 -R /scratch/nose \ No newline at end of file +PYTHON_VERSION=$( python -c "import sys; print('{}{}'.format(sys.version_info[0], sys.version_info[1]))" ) + +# Create necessary directories +mkdir -p /scratch/nose /scratch/crashfiles /scratch/logs/py${PYTHON_VERSION} + +# Create a nipype config file +mkdir -p /root/.nipype +echo '[logging]' > /root/.nipype/nipype.cfg +echo 'log_to_file = true' >> /root/.nipype/nipype.cfg +echo "log_directory = /scratch/logs/py${PYTHON_VERSION}" >> /root/.nipype/nipype.cfg + +# Enable profile_runtime tests only for python 2.7 +if [[ "${PYTHON_VERSION}" -lt "30" ]]; then + echo '[execution]' >> /root/.nipype/nipype.cfg + echo 'profile_runtime = true' >> /root/.nipype/nipype.cfg +fi + +# Run tests +cd /root/src/nipype/ +make clean +nosetests -s nipype -c /root/src/nipype/.noserc --xunit-file="/scratch/nosetests_py${PYTHON_VERSION}.xml" --cover-xml-file="/scratch/coverage_py${PYTHON_VERSION}.xml" + +# Workaround: run here the profiler tests in python 3 +if [[ "${PYTHON_VERSION}" -ge "30" ]]; then + echo '[execution]' >> /root/.nipype/nipype.cfg + echo 'profile_runtime = true' >> /root/.nipype/nipype.cfg + nosetests nipype/interfaces/tests/test_runtime_profiler.py --xunit-file="/scratch/nosetests_py${PYTHON_VERSION}_profiler.xml" --cover-xml-file="/scratch/coverage_py${PYTHON_VERSION}_profiler.xml" + nosetests nipype/pipeline/plugins/tests/test_multiproc*.py --xunit-file="/scratch/nosetests_py${PYTHON_VERSION}_multiproc.xml" --cover-xml-file="/scratch/coverage_py${PYTHON_VERSION}_multiproc.xml" +fi + +# Copy crashfiles to scratch +for i in $(find /root/src/nipype/ -name "crash-*" ); do cp $i /scratch/crashfiles/; done +chmod 777 -R /scratch/* diff --git a/docker/circleci/teardown.sh b/docker/circleci/teardown.sh index 6f64658b0d..ac14460a3c 100644 --- a/docker/circleci/teardown.sh +++ b/docker/circleci/teardown.sh @@ -7,10 +7,10 @@ set -u set -e mkdir -p ${CIRCLE_TEST_REPORTS}/nose -sudo mv ~/scratch/builddocs.log ~/builddocs.log -sudo cp ~/scratch/nosetests.xml ${CIRCLE_TEST_REPORTS}/nose/${CIRCLE_PROJECT_REPONAME}.xml -sudo mv ~/scratch/coverage.xml ~/coverage.xml +xunitmerge ~/scratch/nosetests*.xml ${CIRCLE_TEST_REPORTS}/nose/${CIRCLE_PROJECT_REPONAME}.xml +sudo mv ~/scratch/coverage*.xml ~/ mkdir -p ~/docs sudo mv ~/scratch/docs/* ~/docs/ mkdir -p ~/logs -sudo mv $(pwd)/scratch/logs/* ~/logs/ \ No newline at end of file +sudo mv ~/scratch/builddocs.log ~/logs/builddocs.log +sudo mv ~/scratch/logs/* ~/logs/ \ No newline at end of file diff --git a/docker/nipype_test_base/Dockerfile b/docker/nipype_test/Dockerfile_base similarity index 72% rename from docker/nipype_test_base/Dockerfile rename to docker/nipype_test/Dockerfile_base index a0c8760698..4e242c5f6b 100644 --- a/docker/nipype_test_base/Dockerfile +++ b/docker/nipype_test/Dockerfile_base @@ -30,9 +30,6 @@ FROM neurodebian:latest MAINTAINER Nipype developers # Preparations -RUN ln -snf /bin/bash /bin/sh -ENV SHELL /bin/bash - ARG DEBIAN_FRONTEND=noninteractive RUN sed -i -e 's,main$,main contrib non-free,g' /etc/apt/sources.list.d/neurodebian.sources.list && \ apt-get -y update && \ @@ -45,22 +42,39 @@ RUN sed -i -e 's,main$,main contrib non-free,g' /etc/apt/sources.list.d/neurodeb fusefat \ graphviz \ make \ + ruby \ fsl-core && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && \ echo ". /etc/fsl/fsl.sh" >> /etc/bash.bashrc +# Use bash +RUN ln -snf /bin/bash /bin/sh +ENV SHELL /bin/bash + +# Set FSL environment variables ENV FSLDIR=/usr/share/fsl/5.0 \ FSLOUTPUTTYPE=NIFTI_GZ \ - PATH=/usr/lib/fsl/5.0:$PATH \ FSLMULTIFILEQUIT=TRUE \ POSSUMDIR=/usr/share/fsl/5.0 \ - LD_LIBRARY_PATH=/usr/lib/fsl/5.0:$LD_LIBRARY_PATH \ FSLTCLSH=/usr/bin/tclsh \ - FSLWISH=/usr/bin/wish + FSLWISH=/usr/bin/wish \ + PATH=/usr/lib/fsl/5.0:$PATH \ + LD_LIBRARY_PATH=/usr/lib/fsl/5.0:$LD_LIBRARY_PATH -# Install Matlab: from the good old install_spm_mcr.sh of @chrisfilo -WORKDIR /opt +# Install fake-S3 +ENV GEM_HOME /usr/local/bundle +ENV BUNDLE_PATH="$GEM_HOME" \ + BUNDLE_BIN="$GEM_HOME/bin" \ + BUNDLE_SILENCE_ROOT_WARNING=1 \ + BUNDLE_APP_CONFIG="$GEM_HOME" +ENV PATH $BUNDLE_BIN:$PATH +RUN mkdir -p "$GEM_HOME" "$BUNDLE_BIN" && \ + chmod 777 "$GEM_HOME" "$BUNDLE_BIN" + +RUN gem install fakes3 +# Install Matlab MCR: from the good old install_spm_mcr.sh of @chrisfilo +WORKDIR /opt RUN echo "destinationFolder=/opt/mcr" > mcr_options.txt && \ echo "agreeToLicense=yes" >> mcr_options.txt && \ echo "outputFile=/tmp/matlabinstall_log" >> mcr_options.txt && \ @@ -72,33 +86,42 @@ RUN echo "destinationFolder=/opt/mcr" > mcr_options.txt && \ matlab_installer/install -inputFile mcr_options.txt && \ rm -rf matlab_installer mcr_options.txt -ENV SPMMCRCMD "/opt/spm12/run_spm12.sh /opt/mcr/v85/ script" -ENV FORCE_SPMMCR 1 - # Install SPM RUN curl -sSL http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/dev/spm12_r6472_Linux_R2015a.zip -o spm12.zip && \ unzip spm12.zip && \ rm -rf spm12.zip +ENV MATLABCMD="/opt/mcr/v85/toolbox/matlab" \ + SPMMCRCMD="/opt/spm12/run_spm12.sh /opt/mcr/v85/ script" \ + FORCE_SPMMCR=1 + WORKDIR /root # Install miniconda -RUN curl -sSLO https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh && \ - /bin/bash Miniconda-latest-Linux-x86_64.sh -b -p /usr/local/miniconda && \ - rm Miniconda-latest-Linux-x86_64.sh +RUN curl -sSLO https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ + /bin/bash Miniconda3-latest-Linux-x86_64.sh -b -p /usr/local/miniconda && \ + rm Miniconda3-latest-Linux-x86_64.sh ENV PATH /usr/local/miniconda/bin:$PATH # http://bugs.python.org/issue19846 # > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. -ENV LANG C.UTF-8 +ENV LANG=C.UTF-8 \ + LC_ALL=C.UTF-8 # Add conda-forge channel in conda RUN conda config --add channels conda-forge && \ - conda install -y lockfile nipype matplotlib sphinx boto coverage dipy - -# Install run scripts -COPY docker/circleci/run_* /usr/bin/ -RUN chmod +x /usr/bin/run_* + conda install -y lockfile \ + nipype \ + matplotlib \ + sphinx \ + boto \ + boto3 \ + coverage \ + mock \ + pbr \ + nitime \ + dipy \ + pandas && \ + pip install nose-cov doctest-ignore-unicode configparser CMD ["/bin/bash"] - diff --git a/docker/nipype_test_py27/Dockerfile b/docker/nipype_test/Dockerfile_py27 similarity index 81% rename from docker/nipype_test_py27/Dockerfile rename to docker/nipype_test/Dockerfile_py27 index 5665437b46..434f785f22 100644 --- a/docker/nipype_test_py27/Dockerfile +++ b/docker/nipype_test/Dockerfile_py27 @@ -26,16 +26,26 @@ # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -FROM nipype/nipype_test:base-0.0.1 +FROM nipype/nipype_test:base-0.0.2 MAINTAINER The nipype developers https://github.com/nipy/nipype -# Pre-check that requirements are installed -WORKDIR /root/src/nipype +# Downgrade python to 2.7 +RUN conda update -y conda && \ + conda update --all -y python=2.7 && \ + pip install nose-cov doctest-ignore-unicode configparser + +COPY docker/circleci/run_* /usr/bin/ +RUN chmod +x /usr/bin/run_* + +# Speed up building +RUN mkdir -p /root/src/nipype COPY requirements.txt /root/src/nipype/requirements.txt -RUN pip install -r requirements.txt +RUN pip install -r /root/src/nipype/requirements.txt +# Re-install nipype COPY . /root/src/nipype RUN rm -r /usr/local/miniconda/lib/python2.7/site-packages/nipype* && \ + cd /root/src/nipype && \ pip install -e . CMD ["/bin/bash"] diff --git a/docker/nipype_test_py34/Dockerfile b/docker/nipype_test/Dockerfile_py34 similarity index 67% rename from docker/nipype_test_py34/Dockerfile rename to docker/nipype_test/Dockerfile_py34 index 47d922a658..cbb1b36098 100644 --- a/docker/nipype_test_py34/Dockerfile +++ b/docker/nipype_test/Dockerfile_py34 @@ -26,39 +26,31 @@ # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -FROM nipype/nipype_test:base +FROM nipype/nipype_test:base-0.0.2 MAINTAINER The nipype developers https://github.com/nipy/nipype -# Preparations -RUN ln -snf /bin/bash /bin/sh -WORKDIR /root -# Install miniconda -RUN curl -sSLO https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ - /bin/bash Miniconda3-latest-Linux-x86_64.sh -b -p /usr/local/miniconda && \ - rm Miniconda3-latest-Linux-x86_64.sh +# Downgrade python to 3.4 +RUN conda update -y conda && \ + conda update --all -y python=3.4 -ENV PATH /usr/local/miniconda/bin:$PATH - -# http://bugs.python.org/issue19846 -# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. -ENV LANG C.UTF-8 +COPY docker/circleci/run_* /usr/bin/ +RUN chmod +x /usr/bin/run_* -# Add conda-forge channel in conda -RUN conda config --add channels conda-forge && \ - conda update -y conda && \ - conda update --all -y python=3.4 && \ - conda install -y lockfile nipype +# Replace imglob with a Python3 compatible version +COPY nipype/external/fsl_imglob.py /usr/bin/fsl_imglob.py +RUN rm -r ${FSLDIR}/bin/imglob && \ + chmod +x /usr/bin/fsl_imglob.py && \ + ln -s /usr/bin/fsl_imglob.py ${FSLDIR}/bin/imglob +# Speed up building RUN mkdir -p /root/src/nipype -COPY . /root/src/nipype - -RUN cd /root/src/nipype && \ - pip install -r requirements.txt && \ - pip install -e . +COPY requirements.txt /root/src/nipype/requirements.txt +RUN pip install -r /root/src/nipype/requirements.txt - -COPY docker/circleci/run_* /usr/bin/ -RUN chmod +x /usr/bin/run_* -ENV SHELL /bin/bash +# Re-install nipype +COPY . /root/src/nipype +RUN rm -r /usr/local/miniconda/lib/python3.4/site-packages/nipype* && \ + cd /root/src/nipype && \ + pip install -e . CMD ["/bin/bash"] diff --git a/docker/nipype_test_py35/Dockerfile b/docker/nipype_test/Dockerfile_py35 similarity index 70% rename from docker/nipype_test_py35/Dockerfile rename to docker/nipype_test/Dockerfile_py35 index ac81e0d99b..14f3e2d3a1 100644 --- a/docker/nipype_test_py35/Dockerfile +++ b/docker/nipype_test/Dockerfile_py35 @@ -26,37 +26,29 @@ # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -FROM nipype/nipype_test:base +FROM nipype/nipype_test:base-0.0.2 MAINTAINER The nipype developers https://github.com/nipy/nipype -# Preparations -RUN ln -snf /bin/bash /bin/sh -WORKDIR /root - -# Install miniconda -RUN curl -sSLO https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ - /bin/bash Miniconda3-latest-Linux-x86_64.sh -b -p /usr/local/miniconda && \ - rm Miniconda3-latest-Linux-x86_64.sh - -ENV PATH /usr/local/miniconda/bin:$PATH -# http://bugs.python.org/issue19846 -# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. -ENV LANG C.UTF-8 +WORKDIR /root + +COPY docker/circleci/run_* /usr/bin/ +RUN chmod +x /usr/bin/run_* -# Add conda-forge channel in conda -RUN conda config --add channels conda-forge && \ - conda install -y lockfile nipype +# Replace imglob with a Python3 compatible version +COPY nipype/external/fsl_imglob.py /usr/bin/fsl_imglob.py +RUN rm -r ${FSLDIR}/bin/imglob && \ + chmod +x /usr/bin/fsl_imglob.py && \ + ln -s /usr/bin/fsl_imglob.py ${FSLDIR}/bin/imglob +# Speed up building RUN mkdir -p /root/src/nipype -COPY . /root/src/nipype - -RUN cd /root/src/nipype && \ - pip install -r requirements.txt && \ - pip install -e . - +COPY requirements.txt /root/src/nipype/requirements.txt +RUN pip install -r /root/src/nipype/requirements.txt -COPY docker/circleci/run_* /usr/bin/ -RUN chmod +x /usr/bin/run_* -ENV SHELL /bin/bash +# Re-install nipype +COPY . /root/src/nipype +RUN rm -r /usr/local/miniconda/lib/python3.5/site-packages/nipype* && \ + cd /root/src/nipype && \ + pip install -e . CMD ["/bin/bash"] diff --git a/examples/fmri_ants_openfmri.py b/examples/fmri_ants_openfmri.py index 8c8e050097..05cbb4efa4 100755 --- a/examples/fmri_ants_openfmri.py +++ b/examples/fmri_ants_openfmri.py @@ -10,54 +10,48 @@ This script demonstrates how to use nipype to analyze a data set:: python fmri_ants_openfmri.py --datasetdir ds107 -""" - -from __future__ import division -from builtins import range -from nipype import config -config.enable_provenance() +""" +from __future__ import division, unicode_literals +from builtins import open, range, str, bytes from glob import glob import os +from nipype import config +from nipype import LooseVersion +from nipype import Workflow, Node, MapNode +from nipype.utils.filemanip import filename_to_list import nipype.pipeline.engine as pe import nipype.algorithms.modelgen as model import nipype.algorithms.rapidart as ra from nipype.algorithms.misc import TSNR -from nipype.external.six import string_types from nipype.interfaces.c3 import C3dAffineTool +from nipype.interfaces import fsl, Function, ants, freesurfer as fs import nipype.interfaces.io as nio +from nipype.interfaces.io import FreeSurferSource import nipype.interfaces.utility as niu +from nipype.interfaces.utility import Merge, IdentityInterface from nipype.workflows.fmri.fsl import (create_featreg_preproc, create_modelfit_workflow, create_fixed_effects_flow) -from nipype import LooseVersion -from nipype import Workflow, Node, MapNode -from nipype.interfaces import (fsl, Function, ants, freesurfer) - -from nipype.interfaces.utility import Merge, IdentityInterface -from nipype.utils.filemanip import filename_to_list -from nipype.interfaces.io import FreeSurferSource -import nipype.interfaces.freesurfer as fs - +config.enable_provenance() version = 0 -if fsl.Info.version() and \ - LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): +if (fsl.Info.version() and LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6')): version = 507 fsl.FSLCommand.set_default_output_type('NIFTI_GZ') -imports = ['import os', - 'import nibabel as nb', - 'import numpy as np', - 'import scipy as sp', - 'from nipype.utils.filemanip import filename_to_list, list_to_filename, split_filename', - 'from scipy.special import legendre' - ] - +imports = [ + 'import os', + 'import nibabel as nb', + 'import numpy as np', + 'import scipy as sp', + 'from nipype.utils.filemanip import filename_to_list, list_to_filename, split_filename', + 'from scipy.special import legendre' +] def median(in_files): """Computes an average of the median of each realigned timeseries @@ -774,7 +768,7 @@ def get_contrasts(contrast_file, task_id, conds): def check_behav_list(behav, run_id, conds): import numpy as np num_conds = len(conds) - if isinstance(behav, string_types): + if isinstance(behav, (str, bytes)): behav = [behav] behav_array = np.array(behav).flatten() num_elements = behav_array.shape[0] diff --git a/examples/fmri_freesurfer_smooth.py b/examples/fmri_freesurfer_smooth.py index 06c9eef847..b33235efd3 100755 --- a/examples/fmri_freesurfer_smooth.py +++ b/examples/fmri_freesurfer_smooth.py @@ -40,6 +40,7 @@ """ from __future__ import print_function +from builtins import str from builtins import range import os # system functions diff --git a/examples/fmri_fsl.py b/examples/fmri_fsl.py index 89dd96c627..7fddfc76c4 100755 --- a/examples/fmri_fsl.py +++ b/examples/fmri_fsl.py @@ -17,6 +17,7 @@ from __future__ import print_function from __future__ import division +from builtins import str from builtins import range import os # system functions diff --git a/examples/fmri_fsl_reuse.py b/examples/fmri_fsl_reuse.py index e1ba827574..beb09f0345 100755 --- a/examples/fmri_fsl_reuse.py +++ b/examples/fmri_fsl_reuse.py @@ -17,6 +17,7 @@ from __future__ import print_function from __future__ import division +from builtins import str from builtins import range import os # system functions diff --git a/examples/fmri_nipy_glm.py b/examples/fmri_nipy_glm.py index ca149a248e..a7abd9e8b7 100755 --- a/examples/fmri_nipy_glm.py +++ b/examples/fmri_nipy_glm.py @@ -17,6 +17,7 @@ """ from __future__ import print_function +from builtins import str from builtins import range from nipype.interfaces.nipy.model import FitGLM, EstimateContrast diff --git a/examples/fmri_openfmri.py b/examples/fmri_openfmri.py index af41f11833..e69de29bb2 100755 --- a/examples/fmri_openfmri.py +++ b/examples/fmri_openfmri.py @@ -1,474 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -============================ -fMRI: OpenfMRI.org data, FSL -============================ - -A growing number of datasets are available on `OpenfMRI `_. -This script demonstrates how to use nipype to analyze a data set:: - - python fmri_openfmri.py --datasetdir ds107 -""" - -from __future__ import division -from builtins import range - -from glob import glob -import os - -import nipype.pipeline.engine as pe -import nipype.algorithms.modelgen as model -import nipype.algorithms.rapidart as ra -import nipype.interfaces.fsl as fsl -import nipype.interfaces.io as nio -import nipype.interfaces.utility as niu -from nipype.external.six import string_types -from nipype.workflows.fmri.fsl import (create_featreg_preproc, - create_modelfit_workflow, - create_fixed_effects_flow, - create_reg_workflow) - -fsl.FSLCommand.set_default_output_type('NIFTI_GZ') - - -def get_subjectinfo(subject_id, base_dir, task_id, model_id): - """Get info for a given subject - - Parameters - ---------- - - subject_id : string - Subject identifier (e.g., sub001) - base_dir : string - Path to base directory of the dataset - task_id : int - Which task to process - model_id : int - Which model to process - - Returns - ------- - - run_ids : list of ints - Run numbers - conds : list of str - Condition names - TR : float - Repetition time - """ - from glob import glob - import os - import numpy as np - condition_info = [] - cond_file = os.path.join(base_dir, 'models', 'model%03d' % model_id, - 'condition_key.txt') - - with open(cond_file, 'rt') as fp: - for line in fp: - info = line.strip().split() - condition_info.append([info[0], info[1], ' '.join(info[2:])]) - if len(condition_info) == 0: - raise ValueError('No condition info found in %s' % cond_file) - taskinfo = np.array(condition_info) - n_tasks = len(np.unique(taskinfo[:, 0])) - conds = [] - run_ids = [] - if task_id > n_tasks: - raise ValueError('Task id %d does not exist' % task_id) - for idx in range(n_tasks): - taskidx = np.where(taskinfo[:, 0] == 'task%03d' % (idx + 1)) - conds.append([condition.replace(' ', '_') for condition - in taskinfo[taskidx[0], 2]]) - files = glob(os.path.join(base_dir, - subject_id, - 'BOLD', - 'task%03d_run*' % (idx + 1))) - run_ids.insert(idx, list(range(1, len(files) + 1))) - TR = np.genfromtxt(os.path.join(base_dir, 'scan_key.txt'))[1] - return run_ids[task_id - 1], conds[task_id - 1], TR - - -def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, - task_id=None, output_dir=None, subj_prefix='*'): - """Analyzes an open fmri dataset - - Parameters - ---------- - - data_dir : str - Path to the base data directory - - work_dir : str - Nipype working directory (defaults to cwd) - """ - - """ - Load nipype workflows - """ - - preproc = create_featreg_preproc(whichvol='first') - modelfit = create_modelfit_workflow() - fixed_fx = create_fixed_effects_flow() - registration = create_reg_workflow() - - """ - Remove the plotting connection so that plot iterables don't propagate - to the model stage - """ - - preproc.disconnect(preproc.get_node('plot_motion'), 'out_file', - preproc.get_node('outputspec'), 'motion_plots') - - """ - Set up openfmri data specific components - """ - - subjects = sorted([path.split(os.path.sep)[-1] for path in - glob(os.path.join(data_dir, subj_prefix))]) - - infosource = pe.Node(niu.IdentityInterface(fields=['subject_id', - 'model_id', - 'task_id']), - name='infosource') - if len(subject) == 0: - infosource.iterables = [('subject_id', subjects), - ('model_id', [model_id]), - ('task_id', task_id)] - else: - infosource.iterables = [('subject_id', - [subjects[subjects.index(subj)] for subj in subject]), - ('model_id', [model_id]), - ('task_id', task_id)] - - subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir', - 'task_id', 'model_id'], - output_names=['run_id', 'conds', 'TR'], - function=get_subjectinfo), - name='subjectinfo') - subjinfo.inputs.base_dir = data_dir - - """ - Return data components as anat, bold and behav - """ - - datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id', - 'task_id', 'model_id'], - outfields=['anat', 'bold', 'behav', - 'contrasts']), - name='datasource') - datasource.inputs.base_directory = data_dir - datasource.inputs.template = '*' - datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz', - 'bold': '%s/BOLD/task%03d_r*/bold.nii.gz', - 'behav': ('%s/model/model%03d/onsets/task%03d_' - 'run%03d/cond*.txt'), - 'contrasts': ('models/model%03d/' - 'task_contrasts.txt')} - datasource.inputs.template_args = {'anat': [['subject_id']], - 'bold': [['subject_id', 'task_id']], - 'behav': [['subject_id', 'model_id', - 'task_id', 'run_id']], - 'contrasts': [['model_id']]} - datasource.inputs.sort_filelist = True - - """ - Create meta workflow - """ - - wf = pe.Workflow(name='openfmri') - wf.connect(infosource, 'subject_id', subjinfo, 'subject_id') - wf.connect(infosource, 'model_id', subjinfo, 'model_id') - wf.connect(infosource, 'task_id', subjinfo, 'task_id') - wf.connect(infosource, 'subject_id', datasource, 'subject_id') - wf.connect(infosource, 'model_id', datasource, 'model_id') - wf.connect(infosource, 'task_id', datasource, 'task_id') - wf.connect(subjinfo, 'run_id', datasource, 'run_id') - wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]), - ]) - - def get_highpass(TR, hpcutoff): - return hpcutoff / (2. * TR) - gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'], - output_names=['highpass'], - function=get_highpass), - name='gethighpass') - wf.connect(subjinfo, 'TR', gethighpass, 'TR') - wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass') - - """ - Setup a basic set of contrasts, a t-test per condition - """ - - def get_contrasts(contrast_file, task_id, conds): - import numpy as np - contrast_def = np.genfromtxt(contrast_file, dtype=object) - if len(contrast_def.shape) == 1: - contrast_def = contrast_def[None, :] - contrasts = [] - for row in contrast_def: - if row[0] != 'task%03d' % task_id: - continue - con = [row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))], - row[2:].astype(float).tolist()] - contrasts.append(con) - # add auto contrasts for each column - for i, cond in enumerate(conds): - con = [cond, 'T', ['cond%03d' % (i + 1)], [1]] - contrasts.append(con) - return contrasts - - contrastgen = pe.Node(niu.Function(input_names=['contrast_file', - 'task_id', 'conds'], - output_names=['contrasts'], - function=get_contrasts), - name='contrastgen') - - art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False], - use_norm=True, - norm_threshold=1, - zintensity_threshold=3, - parameter_source='FSL', - mask_type='file'), - iterfield=['realigned_files', 'realignment_parameters', - 'mask_file'], - name="art") - - modelspec = pe.Node(interface=model.SpecifyModel(), - name="modelspec") - modelspec.inputs.input_units = 'secs' - - def check_behav_list(behav): - out_behav = [] - if isinstance(behav, string_types): - behav = [behav] - for val in behav: - if not isinstance(val, list): - out_behav.append([val]) - else: - out_behav.append(val) - return out_behav - - wf.connect(subjinfo, 'TR', modelspec, 'time_repetition') - wf.connect(datasource, ('behav', check_behav_list), modelspec, 'event_files') - wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval') - wf.connect(subjinfo, 'conds', contrastgen, 'conds') - wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file') - wf.connect(infosource, 'task_id', contrastgen, 'task_id') - wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts') - - wf.connect([(preproc, art, [('outputspec.motion_parameters', - 'realignment_parameters'), - ('outputspec.realigned_files', - 'realigned_files'), - ('outputspec.mask', 'mask_file')]), - (preproc, modelspec, [('outputspec.highpassed_files', - 'functional_runs'), - ('outputspec.motion_parameters', - 'realignment_parameters')]), - (art, modelspec, [('outlier_files', 'outlier_files')]), - (modelspec, modelfit, [('session_info', - 'inputspec.session_info')]), - (preproc, modelfit, [('outputspec.highpassed_files', - 'inputspec.functional_data')]) - ]) - - """ - Reorder the copes so that now it combines across runs - """ - - def sort_copes(files): - numelements = len(files[0]) - outfiles = [] - for i in range(numelements): - outfiles.insert(i, []) - for j, elements in enumerate(files): - outfiles[i].append(elements[i]) - return outfiles - - def num_copes(files): - return len(files) - - pickfirst = lambda x: x[0] - - wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst), - 'flameo.mask_file')]), - (modelfit, fixed_fx, [(('outputspec.copes', sort_copes), - 'inputspec.copes'), - ('outputspec.dof_file', - 'inputspec.dof_files'), - (('outputspec.varcopes', - sort_copes), - 'inputspec.varcopes'), - (('outputspec.copes', num_copes), - 'l2model.num_copes'), - ]) - ]) - - wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image') - wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image') - registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz') - registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz') - registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm' - - def merge_files(copes, varcopes, zstats): - out_files = [] - splits = [] - out_files.extend(copes) - splits.append(len(copes)) - out_files.extend(varcopes) - splits.append(len(varcopes)) - out_files.extend(zstats) - splits.append(len(zstats)) - return out_files, splits - - mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes', - 'zstats'], - output_names=['out_files', 'splits'], - function=merge_files), - name='merge_files') - wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, - [('copes', 'copes'), - ('varcopes', 'varcopes'), - ('zstats', 'zstats'), - ])]) - wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') - - def split_files(in_files, splits): - copes = in_files[:splits[0]] - varcopes = in_files[splits[0]:(splits[0] + splits[1])] - zstats = in_files[(splits[0] + splits[1]):] - return copes, varcopes, zstats - - splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'], - output_names=['copes', 'varcopes', - 'zstats'], - function=split_files), - name='split_files') - wf.connect(mergefunc, 'splits', splitfunc, 'splits') - wf.connect(registration, 'outputspec.transformed_files', - splitfunc, 'in_files') - - """ - Connect to a datasink - """ - - def get_subs(subject_id, conds, model_id, task_id): - subs = [('_subject_id_%s_' % subject_id, '')] - subs.append(('_model_id_%d' % model_id, 'model%03d' % model_id)) - subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id)) - subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', - 'mean')) - subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt', - 'affine')) - - for i in range(len(conds)): - subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1))) - subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1))) - subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1))) - subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1))) - subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1))) - subs.append(('_warpall%d/cope1_warp.' % i, - 'cope%02d.' % (i + 1))) - subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i), - 'varcope%02d.' % (i + 1))) - subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i), - 'zstat%02d.' % (i + 1))) - return subs - - subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds', - 'model_id', 'task_id'], - output_names=['substitutions'], - function=get_subs), - name='subsgen') - - datasink = pe.Node(interface=nio.DataSink(), - name="datasink") - wf.connect(infosource, 'subject_id', datasink, 'container') - wf.connect(infosource, 'subject_id', subsgen, 'subject_id') - wf.connect(infosource, 'model_id', subsgen, 'model_id') - wf.connect(infosource, 'task_id', subsgen, 'task_id') - wf.connect(contrastgen, 'contrasts', subsgen, 'conds') - wf.connect(subsgen, 'substitutions', datasink, 'substitutions') - wf.connect([(fixed_fx.get_node('outputspec'), datasink, - [('res4d', 'res4d'), - ('copes', 'copes'), - ('varcopes', 'varcopes'), - ('zstats', 'zstats'), - ('tstats', 'tstats')]) - ]) - wf.connect([(splitfunc, datasink, - [('copes', 'copes.mni'), - ('varcopes', 'varcopes.mni'), - ('zstats', 'zstats.mni'), - ])]) - wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni') - wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat') - wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target') - - """ - Set processing parameters - """ - - hpcutoff = 120. - preproc.inputs.inputspec.fwhm = 6.0 - gethighpass.inputs.hpcutoff = hpcutoff - modelspec.inputs.high_pass_filter_cutoff = hpcutoff - modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}} - modelfit.inputs.inputspec.model_serial_correlations = True - modelfit.inputs.inputspec.film_threshold = 1000 - - datasink.inputs.base_directory = output_dir - return wf - -""" -The following functions run the whole workflow. -""" - -if __name__ == '__main__': - import argparse - defstr = ' (default %(default)s)' - parser = argparse.ArgumentParser(prog='fmri_openfmri.py', - description=__doc__) - parser.add_argument('-d', '--datasetdir', required=True) - parser.add_argument('-s', '--subject', default=[], - nargs='+', type=str, - help="Subject name (e.g. 'sub001')") - parser.add_argument('-m', '--model', default=1, - help="Model index" + defstr) - parser.add_argument('-x', '--subjectprefix', default='sub*', - help="Subject prefix" + defstr) - parser.add_argument('-t', '--task', default=1, # nargs='+', - type=int, help="Task index" + defstr) - parser.add_argument("-o", "--output_dir", dest="outdir", - help="Output directory base") - parser.add_argument("-w", "--work_dir", dest="work_dir", - help="Working directory base") - parser.add_argument("-p", "--plugin", dest="plugin", - default='Linear', - help="Plugin to use") - parser.add_argument("--plugin_args", dest="plugin_args", - help="Plugin arguments") - args = parser.parse_args() - outdir = args.outdir - work_dir = os.getcwd() - if args.work_dir: - work_dir = os.path.abspath(args.work_dir) - if outdir: - outdir = os.path.abspath(outdir) - else: - outdir = os.path.join(work_dir, 'output') - outdir = os.path.join(outdir, 'model%02d' % int(args.model), - 'task%03d' % int(args.task)) - wf = analyze_openfmri_dataset(data_dir=os.path.abspath(args.datasetdir), - subject=args.subject, - model_id=int(args.model), - task_id=[int(args.task)], - subj_prefix=args.subjectprefix, - output_dir=outdir) - wf.base_dir = work_dir - if args.plugin_args: - wf.run(args.plugin, plugin_args=eval(args.plugin_args)) - else: - wf.run(args.plugin) diff --git a/examples/fmri_spm.py b/examples/fmri_spm.py index 7d3f735a0c..4fb2ec5be0 100755 --- a/examples/fmri_spm.py +++ b/examples/fmri_spm.py @@ -16,6 +16,7 @@ Import necessary modules from nipype.""" from __future__ import print_function +from builtins import str from builtins import range import os # system functions diff --git a/examples/fmri_spm_dartel.py b/examples/fmri_spm_dartel.py index 34a26f9ee3..746da9f810 100755 --- a/examples/fmri_spm_dartel.py +++ b/examples/fmri_spm_dartel.py @@ -16,6 +16,7 @@ Import necessary modules from nipype.""" from __future__ import print_function +from builtins import str from builtins import range import nipype.interfaces.io as nio # Data i/o diff --git a/examples/fmri_spm_nested.py b/examples/fmri_spm_nested.py index 0e01e4b4b6..9f1b51469d 100755 --- a/examples/fmri_spm_nested.py +++ b/examples/fmri_spm_nested.py @@ -16,6 +16,7 @@ Import necessary modules from nipype.""" from __future__ import print_function +from builtins import str from builtins import range import os.path as op # system functions diff --git a/examples/frontiers_paper/smoothing_comparison.py b/examples/frontiers_paper/smoothing_comparison.py index b16b229f6d..11698c0379 100644 --- a/examples/frontiers_paper/smoothing_comparison.py +++ b/examples/frontiers_paper/smoothing_comparison.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/examples/frontiers_paper/workflow_from_scratch.py b/examples/frontiers_paper/workflow_from_scratch.py index fa9bbd4cca..fd8ba8ffd3 100644 --- a/examples/frontiers_paper/workflow_from_scratch.py +++ b/examples/frontiers_paper/workflow_from_scratch.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/examples/rsfmri_vol_surface_preprocessing.py b/examples/rsfmri_vol_surface_preprocessing.py index c375278629..4b35ba37a4 100644 --- a/examples/rsfmri_vol_surface_preprocessing.py +++ b/examples/rsfmri_vol_surface_preprocessing.py @@ -43,8 +43,8 @@ - `MNI template `_ """ -from __future__ import division -from builtins import range +from __future__ import division, unicode_literals +from builtins import open, range, str import os @@ -189,7 +189,7 @@ def motion_regressors(motion_params, order=0, derivatives=1): for i in range(2, order + 1): out_params2 = np.hstack((out_params2, np.power(out_params, i))) filename = os.path.join(os.getcwd(), "motion_regressor%02d.txt" % idx) - np.savetxt(filename, out_params2, fmt="%.10f") + np.savetxt(filename, out_params2, fmt=b"%.10f") out_files.append(filename) return out_files @@ -237,7 +237,7 @@ def build_filter1(motion_params, comp_norm, outliers, detrend_poly=None): i + 1)(np.linspace(-1, 1, timepoints))[:, None])) out_params = np.hstack((out_params, X)) filename = os.path.join(os.getcwd(), "filter_regressor%02d.txt" % idx) - np.savetxt(filename, out_params, fmt="%.10f") + np.savetxt(filename, out_params, fmt=b"%.10f") out_files.append(filename) return out_files @@ -286,7 +286,7 @@ def extract_noise_components(realigned_file, mask_file, num_components=5, regressors = np.genfromtxt(extra_regressors) components = np.hstack((components, regressors)) components_file = os.path.join(os.getcwd(), 'noise_components.txt') - np.savetxt(components_file, components, fmt="%.10f") + np.savetxt(components_file, components, fmt=b"%.10f") return components_file diff --git a/examples/rsfmri_vol_surface_preprocessing_nipy.py b/examples/rsfmri_vol_surface_preprocessing_nipy.py index a59337b497..b46ae28ab6 100644 --- a/examples/rsfmri_vol_surface_preprocessing_nipy.py +++ b/examples/rsfmri_vol_surface_preprocessing_nipy.py @@ -44,8 +44,8 @@ """ -from __future__ import division -from builtins import range +from __future__ import division, unicode_literals +from builtins import open, range, str import os @@ -179,7 +179,7 @@ def motion_regressors(motion_params, order=0, derivatives=1): for i in range(2, order + 1): out_params2 = np.hstack((out_params2, np.power(out_params, i))) filename = os.path.join(os.getcwd(), "motion_regressor%02d.txt" % idx) - np.savetxt(filename, out_params2, fmt="%.10f") + np.savetxt(filename, out_params2, fmt=b"%.10f") out_files.append(filename) return out_files @@ -224,7 +224,7 @@ def build_filter1(motion_params, comp_norm, outliers, detrend_poly=None): i + 1)(np.linspace(-1, 1, timepoints))[:, None])) out_params = np.hstack((out_params, X)) filename = os.path.join(os.getcwd(), "filter_regressor%02d.txt" % idx) - np.savetxt(filename, out_params, fmt="%.10f") + np.savetxt(filename, out_params, fmt=b"%.10f") out_files.append(filename) return out_files @@ -269,7 +269,7 @@ def extract_noise_components(realigned_file, mask_file, num_components=5, regressors = np.genfromtxt(extra_regressors) components = np.hstack((components, regressors)) components_file = os.path.join(os.getcwd(), 'noise_components.txt') - np.savetxt(components_file, components, fmt="%.10f") + np.savetxt(components_file, components, fmt=b"%.10f") return components_file diff --git a/examples/smri_ants_build_template.py b/examples/smri_ants_build_template.py index 21603c956c..07b133eac2 100644 --- a/examples/smri_ants_build_template.py +++ b/examples/smri_ants_build_template.py @@ -12,7 +12,8 @@ 1. Tell python where to find the appropriate functions. """ -from __future__ import print_function +from __future__ import print_function, unicode_literals +from builtins import open from future import standard_library standard_library.install_aliases() diff --git a/examples/smri_ants_registration.py b/examples/smri_ants_registration.py index f2558d560d..91dca81436 100644 --- a/examples/smri_ants_registration.py +++ b/examples/smri_ants_registration.py @@ -11,8 +11,9 @@ 1. Tell python where to find the appropriate functions. """ +from __future__ import print_function, unicode_literals +from builtins import open -from __future__ import print_function from future import standard_library standard_library.install_aliases() diff --git a/examples/workshop_dartmouth_2010.py b/examples/workshop_dartmouth_2010.py index 1c9c587ae9..055ae8d57d 100644 --- a/examples/workshop_dartmouth_2010.py +++ b/examples/workshop_dartmouth_2010.py @@ -97,6 +97,7 @@ """ from __future__ import print_function +from builtins import str import nipype.interfaces.fsl as fsl result = fsl.BET(in_file='data/s1/struct.nii').run() diff --git a/nipype/__init__.py b/nipype/__init__.py index 66c28b97a6..b633736023 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -1,29 +1,30 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import absolute_import +from __future__ import print_function, division, unicode_literals, absolute_import import os +from distutils.version import LooseVersion from .info import (LONG_DESCRIPTION as __doc__, URL as __url__, STATUS as __status__, __version__) from .utils.config import NipypeConfig -config = NipypeConfig() -from .utils.logger import Logging -logging = Logging(config) - -from distutils.version import LooseVersion - from .fixes.numpy.testing import nosetester +from .utils.logger import Logging from .refs import due try: import faulthandler faulthandler.enable() -except (ImportError,IOError) as e: +except (ImportError, IOError) as e: pass +config = NipypeConfig() +logging = Logging(config) + + class _NoseTester(nosetester.NoseTester): """ Subclass numpy's NoseTester to add doctests by default """ diff --git a/nipype/algorithms/__init__.py b/nipype/algorithms/__init__.py index 0aa096f1f9..a2909a3501 100644 --- a/nipype/algorithms/__init__.py +++ b/nipype/algorithms/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index af61c260b9..3a9e8237e7 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -1,12 +1,13 @@ -from __future__ import division +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import from builtins import range +import os +import numpy as np from numpy import ones, kron, mean, eye, hstack, dot, tile +import nibabel as nb from scipy.linalg import pinv from ..interfaces.base import BaseInterfaceInputSpec, TraitedSpec, \ BaseInterface, traits, File -import nibabel as nb -import numpy as np -import os class ICCInputSpec(BaseInterfaceInputSpec): diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 9e18a96b90..18aa7bc864 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -11,16 +12,14 @@ >>> os.chdir(datadir) """ -from __future__ import division +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import zip, str, bytes import os.path as op import numpy as np from numpy import linalg as nla -from builtins import zip - from .. import logging -from ..external.six import string_types from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File, BaseInterfaceInputSpec) from ..interfaces.vtkbase import tvtk @@ -324,7 +323,7 @@ def _run_interface(self, runtime): operator = self.inputs.operator opfield = np.ones_like(points1) - if isinstance(operator, string_types): + if isinstance(operator, (str, bytes)): r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) vtk2 = VTKInfo.vtk_output(r2) r2.update() diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index 25ace5f012..f2ea7594c6 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: ''' @@ -11,9 +12,8 @@ >>> os.chdir(datadir) ''' -from __future__ import division -from builtins import zip -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import zip, range import os import os.path as op diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index c476ed003a..edd4ef83f1 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: ''' @@ -10,11 +11,8 @@ >>> os.chdir(datadir) ''' -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from builtins import zip -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, zip, range, open from future.utils import raise_from import os @@ -28,17 +26,15 @@ import scipy.io as sio import itertools import scipy.stats as stats - -from nipype import logging - import warnings +from .. import logging from . import metrics as nam from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File, InputMultiPath, OutputMultiPath, BaseInterfaceInputSpec, isdefined, DynamicTraitedSpec, Undefined) -from nipype.utils.filemanip import fname_presuffix, split_filename +from ..utils.filemanip import fname_presuffix, split_filename iflogger = logging.getLogger('interface') @@ -240,15 +236,17 @@ def _gen_output_file_name(self): return os.path.abspath(base + ".nii") def _run_interface(self, runtime): - hdr = nb.AnalyzeHeader.from_fileobj( - open(self.inputs.header_file, 'rb')) + with open(self.inputs.header_file, 'rb') as hdr_file: + hdr = nb.AnalyzeHeader.from_fileobj(hdr_file) if isdefined(self.inputs.affine): affine = self.inputs.affine else: affine = None - data = hdr.data_from_fileobj(open(self.inputs.data_file, 'rb')) + with open(self.inputs.header_file, 'rb') as data_file: + data = hdr.data_from_fileobj(data_file) + img = nb.Nifti1Image(data, affine, hdr) nb.save(img, self._gen_output_file_name()) @@ -368,11 +366,9 @@ def _gen_output_file_name(self): def _run_interface(self, runtime): import gzip - in_file = gzip.open(self.inputs.in_file, 'rb') - out_file = open(self._gen_output_file_name(), 'wb') - out_file.write(in_file.read()) - out_file.close() - in_file.close() + with gzip.open(self.inputs.in_file, 'rb') as in_file: + with open(self._gen_output_file_name(), 'wb') as out_file: + out_file.write(in_file.read()) return runtime def _list_outputs(self): @@ -505,8 +501,9 @@ def merge_csvs(in_list): try: in_array = np.loadtxt(in_file, delimiter=',', skiprows=1) except ValueError as ex: - first = open(in_file, 'r') - header_line = first.readline() + with open(in_file, 'r') as first: + header_line = first.readline() + header_list = header_line.split(',') n_cols = len(header_list) try: @@ -685,8 +682,8 @@ def _run_interface(self, runtime): ext = '.csv' out_file = op.abspath(name + ext) - file_handle = open(out_file, 'w') - file_handle.write(csv_headings) + with open(out_file, 'w') as file_handle: + file_handle.write(csv_headings) shape = np.shape(output_array) typelist = maketypelist( @@ -715,8 +712,9 @@ def _run_interface(self, runtime): output[extraheading] = extrafieldlist iflogger.info(output) iflogger.info(fmt) - np.savetxt(file_handle, output, fmt, delimiter=',') - file_handle.close() + with open(out_file, 'a') as file_handle: + np.savetxt(file_handle, output, fmt, delimiter=',') + return runtime def _list_outputs(self): @@ -778,6 +776,8 @@ def _run_interface(self, runtime): new_line = line.replace('\n', '') new_line = new_line + ',' + self.inputs.extra_field + '\n' out_file.write(new_line) + in_file.close() + out_file.close() return runtime def _list_outputs(self): @@ -1186,7 +1186,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self.output_spec().get() - for k, v in self._outnames.items(): + for k, v in list(self._outnames.items()): outputs[k] = v return outputs diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 93aaeb042c..df3d929b51 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -17,9 +18,8 @@ >>> os.chdir(datadir) """ - -from __future__ import division -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, str, bytes, int from copy import deepcopy import os @@ -28,7 +28,6 @@ import numpy as np from scipy.special import gammaln -from ..external.six import string_types from ..interfaces.base import (BaseInterface, TraitedSpec, InputMultiPath, traits, File, Bunch, BaseInterfaceInputSpec, isdefined) @@ -457,7 +456,7 @@ def _concatenate_info(self, infolist): for i, f in enumerate(self.inputs.functional_runs): if isinstance(f, list): numscans = len(f) - elif isinstance(f, string_types): + elif isinstance(f, (str, bytes)): img = load(f) numscans = img.shape[3] else: @@ -542,16 +541,20 @@ def _generate_design(self, infolist=None): outliers = [[]] for i, filename in enumerate(self.inputs.outlier_files): try: - out = np.loadtxt(filename, dtype=int) + out = np.loadtxt(filename) except IOError: + iflogger.warn('Error reading outliers file %s', filename) out = np.array([]) + if out.size > 0: + iflogger.debug('fname=%s, out=%s, nscans=%s', filename, out, repr(sum(nscans[0:i]))) + sumscans = out.astype(int) + sum(nscans[0:i]) + if out.size == 1: - outliers[0].extend([(np.array(out) + - sum(nscans[0:i])).tolist()]) + outliers[0]+= [np.array(sumscans, dtype=int).tolist()] else: - outliers[0].extend((np.array(out) + - sum(nscans[0:i])).tolist()) + outliers[0]+= np.array(sumscans, dtype=int).tolist() + self._sessinfo = self._generate_standard_design(concatlist, functional_runs=functional_runs, realignment_parameters=realignment_parameters, diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index 06ad009d50..084005b25b 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -17,9 +18,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ - -from __future__ import division -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, range, str, bytes import os from copy import deepcopy @@ -29,7 +29,6 @@ from scipy import signal import scipy.io as sio -from ..external.six import string_types from ..interfaces.base import (BaseInterface, traits, InputMultiPath, OutputMultiPath, TraitedSpec, File, BaseInterfaceInputSpec, isdefined) @@ -281,7 +280,7 @@ def _get_output_filenames(self, motionfile, output_dir): output_dir: string output directory in which the files will be generated """ - if isinstance(motionfile, string_types): + if isinstance(motionfile, (str, bytes)): infile = motionfile elif isinstance(motionfile, list): infile = motionfile[0] @@ -352,7 +351,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): cwd = os.getcwd() # read in functional image - if isinstance(imgfile, string_types): + if isinstance(imgfile, (str, bytes)): nim = load(imgfile) elif isinstance(imgfile, list): if len(imgfile) == 1: @@ -469,10 +468,10 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx))) # write output to outputfile - np.savetxt(artifactfile, outliers, fmt='%d', delimiter=' ') - np.savetxt(intensityfile, g, fmt='%.2f', delimiter=' ') + np.savetxt(artifactfile, outliers, fmt=b'%d', delimiter=' ') + np.savetxt(intensityfile, g, fmt=b'%.2f', delimiter=' ') if self.inputs.use_norm: - np.savetxt(normfile, normval, fmt='%.4f', delimiter=' ') + np.savetxt(normfile, normval, fmt=b'%.4f', delimiter=' ') if isdefined(self.inputs.save_plot) and self.inputs.save_plot: import matplotlib diff --git a/nipype/algorithms/tests/__init__.py b/nipype/algorithms/tests/__init__.py index 349937997e..99fb243f19 100644 --- a/nipype/algorithms/tests/__init__.py +++ b/nipype/algorithms/tests/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_auto_ArtifactDetect.py b/nipype/algorithms/tests/test_auto_ArtifactDetect.py index 961b7dd2d0..03bb917e8b 100644 --- a/nipype/algorithms/tests/test_auto_ArtifactDetect.py +++ b/nipype/algorithms/tests/test_auto_ArtifactDetect.py @@ -17,7 +17,7 @@ def test_ArtifactDetect_inputs(): mask_type=dict(mandatory=True, ), norm_threshold=dict(mandatory=True, - xor=['rotation_threshold', 'translation_threshold'], + xor=[u'rotation_threshold', u'translation_threshold'], ), parameter_source=dict(mandatory=True, ), @@ -28,18 +28,18 @@ def test_ArtifactDetect_inputs(): realignment_parameters=dict(mandatory=True, ), rotation_threshold=dict(mandatory=True, - xor=['norm_threshold'], + xor=[u'norm_threshold'], ), save_plot=dict(usedefault=True, ), translation_threshold=dict(mandatory=True, - xor=['norm_threshold'], + xor=[u'norm_threshold'], ), use_differences=dict(maxlen=2, minlen=2, usedefault=True, ), - use_norm=dict(requires=['norm_threshold'], + use_norm=dict(requires=[u'norm_threshold'], usedefault=True, ), zintensity_threshold=dict(mandatory=True, diff --git a/nipype/algorithms/tests/test_auto_ErrorMap.py b/nipype/algorithms/tests/test_auto_ErrorMap.py new file mode 100644 index 0000000000..69484529dd --- /dev/null +++ b/nipype/algorithms/tests/test_auto_ErrorMap.py @@ -0,0 +1,35 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ...testing import assert_equal +from ..metrics import ErrorMap + + +def test_ErrorMap_inputs(): + input_map = dict(ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_ref=dict(mandatory=True, + ), + in_tst=dict(mandatory=True, + ), + mask=dict(), + metric=dict(mandatory=True, + usedefault=True, + ), + out_map=dict(), + ) + inputs = ErrorMap.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + + +def test_ErrorMap_outputs(): + output_map = dict(distance=dict(), + out_map=dict(), + ) + outputs = ErrorMap.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/algorithms/tests/test_auto_Overlap.py b/nipype/algorithms/tests/test_auto_Overlap.py new file mode 100644 index 0000000000..a5a3874bd1 --- /dev/null +++ b/nipype/algorithms/tests/test_auto_Overlap.py @@ -0,0 +1,47 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ...testing import assert_equal +from ..misc import Overlap + + +def test_Overlap_inputs(): + input_map = dict(bg_overlap=dict(mandatory=True, + usedefault=True, + ), + ignore_exception=dict(nohash=True, + usedefault=True, + ), + mask_volume=dict(), + out_file=dict(usedefault=True, + ), + vol_units=dict(mandatory=True, + usedefault=True, + ), + volume1=dict(mandatory=True, + ), + volume2=dict(mandatory=True, + ), + weighting=dict(usedefault=True, + ), + ) + inputs = Overlap.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + + +def test_Overlap_outputs(): + output_map = dict(dice=dict(), + diff_file=dict(), + jaccard=dict(), + labels=dict(), + roi_di=dict(), + roi_ji=dict(), + roi_voldiff=dict(), + volume_difference=dict(), + ) + outputs = Overlap.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/algorithms/tests/test_auto_SpecifyModel.py b/nipype/algorithms/tests/test_auto_SpecifyModel.py index 69a528c3c2..aac457a283 100644 --- a/nipype/algorithms/tests/test_auto_SpecifyModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifyModel.py @@ -5,7 +5,7 @@ def test_SpecifyModel_inputs(): input_map = dict(event_files=dict(mandatory=True, - xor=['subject_info', 'event_files'], + xor=[u'subject_info', u'event_files'], ), functional_runs=dict(copyfile=False, mandatory=True, @@ -22,7 +22,7 @@ def test_SpecifyModel_inputs(): realignment_parameters=dict(copyfile=False, ), subject_info=dict(mandatory=True, - xor=['subject_info', 'event_files'], + xor=[u'subject_info', u'event_files'], ), time_repetition=dict(mandatory=True, ), diff --git a/nipype/algorithms/tests/test_auto_SpecifySPMModel.py b/nipype/algorithms/tests/test_auto_SpecifySPMModel.py index 19ccaa9ba5..6232ea0f11 100644 --- a/nipype/algorithms/tests/test_auto_SpecifySPMModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifySPMModel.py @@ -7,7 +7,7 @@ def test_SpecifySPMModel_inputs(): input_map = dict(concatenate_runs=dict(usedefault=True, ), event_files=dict(mandatory=True, - xor=['subject_info', 'event_files'], + xor=[u'subject_info', u'event_files'], ), functional_runs=dict(copyfile=False, mandatory=True, @@ -26,7 +26,7 @@ def test_SpecifySPMModel_inputs(): realignment_parameters=dict(copyfile=False, ), subject_info=dict(mandatory=True, - xor=['subject_info', 'event_files'], + xor=[u'subject_info', u'event_files'], ), time_repetition=dict(mandatory=True, ), diff --git a/nipype/algorithms/tests/test_auto_SpecifySparseModel.py b/nipype/algorithms/tests/test_auto_SpecifySparseModel.py index aa641facf7..06fa7dad34 100644 --- a/nipype/algorithms/tests/test_auto_SpecifySparseModel.py +++ b/nipype/algorithms/tests/test_auto_SpecifySparseModel.py @@ -5,7 +5,7 @@ def test_SpecifySparseModel_inputs(): input_map = dict(event_files=dict(mandatory=True, - xor=['subject_info', 'event_files'], + xor=[u'subject_info', u'event_files'], ), functional_runs=dict(copyfile=False, mandatory=True, @@ -30,13 +30,13 @@ def test_SpecifySparseModel_inputs(): stimuli_as_impulses=dict(usedefault=True, ), subject_info=dict(mandatory=True, - xor=['subject_info', 'event_files'], + xor=[u'subject_info', u'event_files'], ), time_acquisition=dict(mandatory=True, ), time_repetition=dict(mandatory=True, ), - use_temporal_deriv=dict(requires=['model_hrf'], + use_temporal_deriv=dict(requires=[u'model_hrf'], ), volumes_in_cluster=dict(usedefault=True, ), diff --git a/nipype/algorithms/tests/test_icc_anova.py b/nipype/algorithms/tests/test_icc_anova.py index 78f5f515e6..8b0262848b 100644 --- a/nipype/algorithms/tests/test_icc_anova.py +++ b/nipype/algorithms/tests/test_icc_anova.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import division import numpy as np from nipype.testing import assert_equal diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 38edb8ecef..03091ed264 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_modelgen.py b/nipype/algorithms/tests/test_modelgen.py index 05d42db4a3..72aa0eb845 100644 --- a/nipype/algorithms/tests/test_modelgen.py +++ b/nipype/algorithms/tests/test_modelgen.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from __future__ import division diff --git a/nipype/algorithms/tests/test_moments.py b/nipype/algorithms/tests/test_moments.py index b4d2c88993..3c7b0b46ce 100644 --- a/nipype/algorithms/tests/test_moments.py +++ b/nipype/algorithms/tests/test_moments.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import numpy as np from nipype.testing import assert_true import tempfile diff --git a/nipype/algorithms/tests/test_normalize_tpms.py b/nipype/algorithms/tests/test_normalize_tpms.py index d044eff77c..d612ce2708 100644 --- a/nipype/algorithms/tests/test_normalize_tpms.py +++ b/nipype/algorithms/tests/test_normalize_tpms.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_rapidart.py b/nipype/algorithms/tests/test_rapidart.py index 1ba6414e29..a75745cda5 100644 --- a/nipype/algorithms/tests/test_rapidart.py +++ b/nipype/algorithms/tests/test_rapidart.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from __future__ import division diff --git a/nipype/caching/__init__.py b/nipype/caching/__init__.py index db0261ebea..1e99ed4428 100644 --- a/nipype/caching/__init__.py +++ b/nipype/caching/__init__.py @@ -1 +1,2 @@ +# -*- coding: utf-8 -*- from .memory import Memory diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py index d8b14fb396..2e672478dc 100644 --- a/nipype/caching/memory.py +++ b/nipype/caching/memory.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Using nipype with persistence and lazy recomputation but without explicit name-steps pipeline: getting back scope in command-line based programming. @@ -8,9 +9,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ - -from __future__ import print_function -from builtins import object +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import object, open import os import hashlib @@ -106,11 +106,13 @@ def __repr__(self): def read_log(filename, run_dict=None): if run_dict is None: run_dict = dict() - for line in open(filename, 'r'): - dir_name, job_name = line[:-1].split('/') - jobs = run_dict.get(dir_name, set()) - jobs.add(job_name) - run_dict[dir_name] = jobs + + with open(filename, 'r') as logfile: + for line in logfile: + dir_name, job_name = line[:-1].split('/') + jobs = run_dict.get(dir_name, set()) + jobs.add(job_name) + run_dict[dir_name] = jobs return run_dict @@ -176,7 +178,7 @@ def __init__(self, base_dir): elif not os.path.isdir(base_dir): raise ValueError('base_dir should be a directory') self.base_dir = base_dir - open(os.path.join(base_dir, 'log.current'), 'w') + open(os.path.join(base_dir, 'log.current'), 'a').close() def cache(self, interface): """ Returns a callable that caches the output of an interface @@ -226,8 +228,9 @@ def _log_name(self, dir_name, job_name): # Every counter is a file opened in append mode and closed # immediately to avoid race conditions in parallel computing: # file appends are atomic - open(os.path.join(base_dir, 'log.current'), - 'a').write('%s/%s\n' % (dir_name, job_name)) + with open(os.path.join(base_dir, 'log.current'), 'a') as currentlog: + currentlog.write('%s/%s\n' % (dir_name, job_name)) + t = time.localtime() year_dir = os.path.join(base_dir, 'log.%i' % t.tm_year) try: @@ -239,8 +242,9 @@ def _log_name(self, dir_name, job_name): os.mkdir(month_dir) except OSError: "Dir exists" - open(os.path.join(month_dir, '%02i.log' % t.tm_mday), - 'a').write('%s/%s\n' % (dir_name, job_name)) + + with open(os.path.join(month_dir, '%02i.log' % t.tm_mday), 'a') as rotatefile: + rotatefile.write('%s/%s\n' % (dir_name, job_name)) def clear_previous_runs(self, warn=True): """ Remove all the cache that where not used in the latest run of @@ -293,7 +297,7 @@ def _clear_all_but(self, runs, warn=True): input. """ rm_all_but(self.base_dir, set(runs.keys()), warn=warn) - for dir_name, job_names in runs.items(): + for dir_name, job_names in list(runs.items()): rm_all_but(os.path.join(self.base_dir, dir_name), job_names, warn=warn) diff --git a/nipype/caching/tests/__init__.py b/nipype/caching/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/caching/tests/__init__.py +++ b/nipype/caching/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/caching/tests/test_memory.py b/nipype/caching/tests/test_memory.py index 89afcfa7a7..d32b3cd8aa 100644 --- a/nipype/caching/tests/test_memory.py +++ b/nipype/caching/tests/test_memory.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Test the nipype interface caching mechanism """ diff --git a/nipype/external/__init__.py b/nipype/external/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/external/__init__.py +++ b/nipype/external/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/external/cloghandler.py b/nipype/external/cloghandler.py index 4a02532273..3782e19374 100644 --- a/nipype/external/cloghandler.py +++ b/nipype/external/cloghandler.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright 2008 Lowell Alleman # # Licensed under the Apache License, Version 2.0 (the "License"); you may not diff --git a/nipype/external/fsl_imglob.py b/nipype/external/fsl_imglob.py new file mode 100755 index 0000000000..ca15f70108 --- /dev/null +++ b/nipype/external/fsl_imglob.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python +# imglob - expand list of image filenames +# Stephen Smith, Mark Jenkinson and Matthew Webster FMRIB Image Analysis Group +# Copyright (C) 2009 University of Oxford +# Part of FSL - FMRIB's Software Library +# http://www.fmrib.ox.ac.uk/fsl +# fsl@fmrib.ox.ac.uk +# +# Developed at FMRIB (Oxford Centre for Functional Magnetic Resonance +# Imaging of the Brain), Department of Clinical Neurology, Oxford +# University, Oxford, UK +# +# +# LICENCE +# +# FMRIB Software Library, Release 5.0 (c) 2012, The University of +# Oxford (the "Software") +# +# The Software remains the property of the University of Oxford ("the +# University"). +# +# The Software is distributed "AS IS" under this Licence solely for +# non-commercial use in the hope that it will be useful, but in order +# that the University as a charitable foundation protects its assets for +# the benefit of its educational and research purposes, the University +# makes clear that no condition is made or to be implied, nor is any +# warranty given or to be implied, as to the accuracy of the Software, +# or that it will be suitable for any particular purpose or for use +# under any specific conditions. Furthermore, the University disclaims +# all responsibility for the use which is made of the Software. It +# further disclaims any liability for the outcomes arising from using +# the Software. +# +# The Licensee agrees to indemnify the University and hold the +# University harmless from and against any and all claims, damages and +# liabilities asserted by third parties (including claims for +# negligence) which arise directly or indirectly from the use of the +# Software or the sale of any products based on the Software. +# +# No part of the Software may be reproduced, modified, transmitted or +# transferred in any form or by any means, electronic or mechanical, +# without the express permission of the University. The permission of +# the University is not required if the said reproduction, modification, +# transmission or transference is done without financial return, the +# conditions of this Licence are imposed upon the receiver of the +# product, and all original and amended source code is included in any +# transmitted product. You may be held legally responsible for any +# copyright infringement that is caused or encouraged by your failure to +# abide by these terms and conditions. +# +# You are not permitted under this Licence to use this Software +# commercially. Use for which any financial return is received shall be +# defined as commercial use, and includes (1) integration of all or part +# of the source code or the Software into a product for sale or license +# by or on behalf of Licensee to third parties or (2) use of the +# Software or any derivative of it for research with the final aim of +# developing software products for sale or license to a third party or +# (3) use of the Software or any derivative of it for research with the +# final aim of developing non-software products for sale or license to a +# third party, or (4) use of the Software to provide any service to an +# external organisation for which payment is received. If you are +# interested in using the Software commercially, please contact Isis +# Innovation Limited ("Isis"), the technology transfer company of the +# University, to negotiate a licence. Contact details are: +# innovation@isis.ox.ac.uk quoting reference DE/9564. +from __future__ import print_function +import sys +import os +import glob + +setAvailable = True +if sys.version_info < (2, 4): + import sets + from sets import Set + setAvailable = False + + +def usage(): + print("Usage: $0 [-extension/extensions] ") + print(" -extension for one image with full extension") + print(" -extensions for image list with full extensions") + sys.exit(1) + + +# Returns whether an input filename has an image extension ( and the +# basename and extension pair ) +def isImage(input, allExtensions): + for extension in allExtensions: + if input[-len(extension):] == extension: + return True, input[:-len(extension)], extension + return False, input, '' + + +def removeImageExtension(input, allExtensions): + return isImage(input, allExtensions)[1] + +if len(sys.argv) <= 1: + usage() + +deleteExtensions = True +primaryExtensions = ['.nii.gz', '.nii', '.hdr.gz', '.hdr'] +secondaryExtensions = ['.img.gz', '.img'] +allExtensions = primaryExtensions+secondaryExtensions +validExtensions = primaryExtensions +startingArg = 1 + +if sys.argv[1] == "-extensions": + validExtensions = allExtensions + deleteExtensions = False + startingArg = 2 +if sys.argv[1] == "-extension": + deleteExtensions = False + startingArg = 2 + +filelist = [] +for arg in range(startingArg, len(sys.argv)): + # if isImage(sys.argv[arg],allExtensions)[0]: #These enable a "pedantic" style mode currently not used + # filelist.extend(glob.glob(sys.argv[arg])) + # else: + # for currentExtension in validExtensions: + # filelist.extend(glob.glob(sys.argv[arg]+currentExtension)) + for currentExtension in validExtensions: + filelist.extend( + glob.glob(removeImageExtension(sys.argv[arg], allExtensions)+currentExtension)) + +if deleteExtensions: + for file in range(0, len(filelist)): + filelist[file] = removeImageExtension(filelist[file], allExtensions) +if setAvailable: + filelist = list(set(filelist)) +else: + filelist = list(Set(filelist)) +filelist.sort() + +for file in range(0, len(filelist)): + print(filelist[file], end=' ') + if file < len(filelist)-1: + print(" ", end=' ') diff --git a/nipype/external/portalocker.py b/nipype/external/portalocker.py index 40b12b3cf3..9cb186743c 100644 --- a/nipype/external/portalocker.py +++ b/nipype/external/portalocker.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # portalocker.py - Cross-platform (posix/nt) API for flock-style file locking. # Requires python 1.5.2 or better. '''Cross-platform (posix/nt) API for flock-style file locking. @@ -48,9 +49,8 @@ Version: $Id: portalocker.py 5474 2008-05-16 20:53:50Z lowell $ ''' -from __future__ import print_function -from __future__ import absolute_import - +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open __all__ = [ 'lock', @@ -131,11 +131,9 @@ def unlock(file): log = open('log.txt', 'a+') portalocker.lock(log, portalocker.LOCK_EX) - timestamp = strftime('%m/%d/%Y %H:%M:%S\n', localtime(time())) log.write(timestamp) print('Wrote lines. Hit enter to release lock.') dummy = sys.stdin.readline() - log.close() diff --git a/nipype/external/six.py b/nipype/external/six.py deleted file mode 100644 index 190c0239cd..0000000000 --- a/nipype/external/six.py +++ /dev/null @@ -1,868 +0,0 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.10.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/nipype/fixes/__init__.py b/nipype/fixes/__init__.py index 5038b83ab7..a04158a3ae 100644 --- a/nipype/fixes/__init__.py +++ b/nipype/fixes/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # We import numpy fixes during init of the testing package. We need to delay # import of the testing package until after it has initialized diff --git a/nipype/fixes/numpy/__init__.py b/nipype/fixes/numpy/__init__.py index 6fddfba5f3..7850043b8f 100644 --- a/nipype/fixes/numpy/__init__.py +++ b/nipype/fixes/numpy/__init__.py @@ -1 +1,2 @@ +# -*- coding: utf-8 -*- # numpy fixes package diff --git a/nipype/fixes/numpy/testing/__init__.py b/nipype/fixes/numpy/testing/__init__.py index 4ff76c49d7..87ed9ba529 100644 --- a/nipype/fixes/numpy/testing/__init__.py +++ b/nipype/fixes/numpy/testing/__init__.py @@ -1 +1,2 @@ +# -*- coding: utf-8 -*- # Package init for fixes.numpy.testing diff --git a/nipype/fixes/numpy/testing/noseclasses.py b/nipype/fixes/numpy/testing/noseclasses.py index 9f69dc33db..db7ae585e1 100644 --- a/nipype/fixes/numpy/testing/noseclasses.py +++ b/nipype/fixes/numpy/testing/noseclasses.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from builtins import object # These classes implement a doctest runner plugin for nose, a "known failure" diff --git a/nipype/fixes/numpy/testing/nosetester.py b/nipype/fixes/numpy/testing/nosetester.py index 22c8d1a5ef..e6b7e10a2e 100644 --- a/nipype/fixes/numpy/testing/nosetester.py +++ b/nipype/fixes/numpy/testing/nosetester.py @@ -1,19 +1,16 @@ +# -*- coding: utf-8 -*- """ Nose test running. This module implements ``test()`` and ``bench()`` functions for NumPy modules. """ - -from __future__ import print_function -from __future__ import absolute_import -from builtins import object +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import object, str, bytes import os import sys -from ....external.six import string_types - def get_package_name(filepath): """ @@ -26,7 +23,7 @@ def get_package_name(filepath): Examples -------- - >>> np.testing.nosetester.get_package_name('nonsense') + >>> np.testing.nosetester.get_package_name('nonsense') # doctest: +IGNORE_UNICODE 'numpy' """ @@ -176,7 +173,7 @@ def _test_argv(self, label, verbose, extra_argv): ''' argv = [__file__, self.package_path, '-s'] if label and label != 'full': - if not isinstance(label, string_types): + if not isinstance(label, (str, bytes)): raise TypeError('Selection label should be a string') if label == 'fast': label = 'not slow' diff --git a/nipype/fixes/numpy/testing/utils.py b/nipype/fixes/numpy/testing/utils.py index 1f7acccec4..f50cb0bd2a 100644 --- a/nipype/fixes/numpy/testing/utils.py +++ b/nipype/fixes/numpy/testing/utils.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # Allow numpy fixes noseclasses to do local import of utils from numpy.testing.utils import * diff --git a/nipype/info.py b/nipype/info.py index f72e5d21c4..ffab23276d 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -2,6 +2,7 @@ settings in setup.py, the nipy top-level docstring, and for building the docs. In setup.py in particular, we exec this file, so it cannot import nipy """ +from __future__ import print_function, division, unicode_literals, absolute_import # nipype version information. An empty _version_extra corresponds to a diff --git a/nipype/interfaces/__init__.py b/nipype/interfaces/__init__.py index c9ff7936ba..8f8b5d25c2 100644 --- a/nipype/interfaces/__init__.py +++ b/nipype/interfaces/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -5,8 +6,7 @@ Requires Packages to be installed """ - -from __future__ import absolute_import +from __future__ import print_function, division, unicode_literals, absolute_import __docformat__ = 'restructuredtext' from .io import DataGrabber, DataSink, SelectFiles diff --git a/nipype/interfaces/afni/__init__.py b/nipype/interfaces/afni/__init__.py index c7f123c4a1..4374b15343 100644 --- a/nipype/interfaces/afni/__init__.py +++ b/nipype/interfaces/afni/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The afni module provides classes for interfacing with the `AFNI diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index 3438240741..757191e486 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -1,11 +1,13 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provide interface to AFNI commands.""" +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import object, str, bytes +from future.utils import raise_from import os from sys import platform -from builtins import object -from future.utils import raise_from from ... import logging from ...utils.filemanip import split_filename diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 4b54c371d5..c65a76d50d 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft = python sts = 4 ts = 4 sw = 4 et: """Afni preprocessing interfaces @@ -8,18 +9,24 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, str, bytes import os import os.path as op import re import numpy as np -from .base import (AFNICommandBase, AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec, - Info, no_afni) -from ..base import (CommandLineInputSpec, CommandLine, Directory, TraitedSpec, - traits, isdefined, File, InputMultiPath, Undefined) -from ...external.six import string_types from ...utils.filemanip import (load_json, save_json, split_filename) +from ..base import ( + CommandLineInputSpec, CommandLine, Directory, TraitedSpec, + traits, isdefined, File, InputMultiPath, Undefined) + +from .base import ( + AFNICommandBase, AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec, + Info, no_afni) + + class BlurToFWHMInputSpec(AFNICommandInputSpec): in_file = File(desc='The dataset that will be smoothed', argstr='-input %s', mandatory=True, exists=True) @@ -31,6 +38,7 @@ class BlurToFWHMInputSpec(AFNICommandInputSpec): mask = File(desc='Mask dataset, if desired. Voxels NOT in mask will be set to zero in output.', argstr='-blurmaster %s', exists=True) + class BlurToFWHM(AFNICommand): """Blurs a 'master' dataset until it reaches a specified FWHM smoothness (approximately). @@ -44,11 +52,10 @@ class BlurToFWHM(AFNICommand): >>> blur = afni.preprocess.BlurToFWHM() >>> blur.inputs.in_file = 'epi.nii' >>> blur.inputs.fwhm = 2.5 - >>> blur.cmdline #doctest: +ELLIPSIS + >>> blur.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dBlurToFWHM -FWHM 2.500000 -input epi.nii -prefix epi_afni' """ - _cmd = '3dBlurToFWHM' input_spec = BlurToFWHMInputSpec output_spec = AFNICommandOutputSpec @@ -82,6 +89,7 @@ class To3DInputSpec(AFNICommandInputSpec): argstr='-time:zt %s alt+z2') + class To3D(AFNICommand): """Create a 3D dataset from 2D image files using AFNI to3d command @@ -97,12 +105,11 @@ class To3D(AFNICommand): >>> To3D.inputs.in_folder = '.' >>> To3D.inputs.out_file = 'dicomdir.nii' >>> To3D.inputs.filetype = "anat" - >>> To3D.cmdline #doctest: +ELLIPSIS + >>> To3D.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE 'to3d -datum float -anat -prefix dicomdir.nii ./*.dcm' >>> res = To3D.run() #doctest: +SKIP """ - _cmd = 'to3d' input_spec = To3DInputSpec output_spec = AFNICommandOutputSpec @@ -150,6 +157,7 @@ class TShiftInputSpec(AFNICommandInputSpec): argstr="-rlt+") + class TShift(AFNICommand): """Shifts voxel time series from input so that seperate slices are aligned to the same @@ -166,12 +174,11 @@ class TShift(AFNICommand): >>> tshift.inputs.in_file = 'functional.nii' >>> tshift.inputs.tpattern = 'alt+z' >>> tshift.inputs.tzero = 0.0 - >>> tshift.cmdline #doctest: + >>> tshift.cmdline #doctest: +IGNORE_UNICODE '3dTshift -prefix functional_tshift -tpattern alt+z -tzero 0.0 functional.nii' >>> res = tshift.run() # doctest: +SKIP """ - _cmd = '3dTshift' input_spec = TShiftInputSpec output_spec = AFNICommandOutputSpec @@ -212,6 +219,7 @@ class RefitInputSpec(CommandLineInputSpec): ' template type, e.g. TLRC, MNI, ORIG') + class Refit(AFNICommandBase): """Changes some of the information inside a 3D dataset's header @@ -225,12 +233,11 @@ class Refit(AFNICommandBase): >>> refit = afni.Refit() >>> refit.inputs.in_file = 'structural.nii' >>> refit.inputs.deoblique = True - >>> refit.cmdline + >>> refit.cmdline # doctest: +IGNORE_UNICODE '3drefit -deoblique structural.nii' >>> res = refit.run() # doctest: +SKIP """ - _cmd = '3drefit' input_spec = RefitInputSpec output_spec = AFNICommandOutputSpec @@ -282,6 +289,7 @@ class WarpInputSpec(AFNICommandInputSpec): argstr="-zpad %d") + class Warp(AFNICommand): """Use 3dWarp for spatially transforming a dataset @@ -296,18 +304,17 @@ class Warp(AFNICommand): >>> warp.inputs.in_file = 'structural.nii' >>> warp.inputs.deoblique = True >>> warp.inputs.out_file = "trans.nii.gz" - >>> warp.cmdline + >>> warp.cmdline # doctest: +IGNORE_UNICODE '3dWarp -deoblique -prefix trans.nii.gz structural.nii' >>> warp_2 = afni.Warp() >>> warp_2.inputs.in_file = 'structural.nii' >>> warp_2.inputs.newgrid = 1.0 >>> warp_2.inputs.out_file = "trans.nii.gz" - >>> warp_2.cmdline + >>> warp_2.cmdline # doctest: +IGNORE_UNICODE '3dWarp -newgrid 1.000000 -prefix trans.nii.gz structural.nii' """ - _cmd = '3dWarp' input_spec = WarpInputSpec output_spec = AFNICommandOutputSpec @@ -340,6 +347,7 @@ class ResampleInputSpec(AFNICommandInputSpec): desc='align dataset grid to a reference file') + class Resample(AFNICommand): """Resample or reorient an image using AFNI 3dresample command @@ -354,7 +362,7 @@ class Resample(AFNICommand): >>> resample.inputs.in_file = 'functional.nii' >>> resample.inputs.orientation= 'RPI' >>> resample.inputs.outputtype = "NIFTI" - >>> resample.cmdline + >>> resample.cmdline # doctest: +IGNORE_UNICODE '3dresample -orient RPI -prefix functional_resample.nii -inset functional.nii' >>> res = resample.run() # doctest: +SKIP @@ -392,6 +400,7 @@ class AutoTcorrelateInputSpec(AFNICommandInputSpec): argstr='-prefix %s', name_source="in_file") + class AutoTcorrelate(AFNICommand): """Computes the correlation coefficient between the time series of each pair of voxels in the input dataset, and stores the output into a @@ -407,7 +416,7 @@ class AutoTcorrelate(AFNICommand): >>> corr.inputs.eta2 = True >>> corr.inputs.mask = 'mask.nii' >>> corr.inputs.mask_only_targets = True - >>> corr.cmdline # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE + >>> corr.cmdline # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE +IGNORE_UNICODE '3dAutoTcorrelate -eta2 -mask mask.nii -mask_only_targets -prefix functional_similarity_matrix.1D -polort -1 functional.nii' >>> res = corr.run() # doctest: +SKIP """ @@ -440,6 +449,7 @@ class TStatInputSpec(AFNICommandInputSpec): argstr='%s') + class TStat(AFNICommand): """Compute voxel-wise statistics using AFNI 3dTstat command @@ -454,7 +464,7 @@ class TStat(AFNICommand): >>> tstat.inputs.in_file = 'functional.nii' >>> tstat.inputs.args= '-mean' >>> tstat.inputs.out_file = "stats" - >>> tstat.cmdline + >>> tstat.cmdline # doctest: +IGNORE_UNICODE '3dTstat -mean -prefix stats functional.nii' >>> res = tstat.run() # doctest: +SKIP @@ -477,6 +487,7 @@ class DetrendInputSpec(AFNICommandInputSpec): argstr='-prefix %s', name_source="in_file") + class Detrend(AFNICommand): """This program removes components from voxel time series using linear least squares @@ -492,7 +503,7 @@ class Detrend(AFNICommand): >>> detrend.inputs.in_file = 'functional.nii' >>> detrend.inputs.args = '-polort 2' >>> detrend.inputs.outputtype = "AFNI" - >>> detrend.cmdline + >>> detrend.cmdline # doctest: +IGNORE_UNICODE '3dDetrend -polort 2 -prefix functional_detrend functional.nii' >>> res = detrend.run() # doctest: +SKIP @@ -515,6 +526,7 @@ class DespikeInputSpec(AFNICommandInputSpec): argstr='-prefix %s', name_source="in_file") + class Despike(AFNICommand): """Removes 'spikes' from the 3D+time input dataset @@ -527,7 +539,7 @@ class Despike(AFNICommand): >>> from nipype.interfaces import afni as afni >>> despike = afni.Despike() >>> despike.inputs.in_file = 'functional.nii' - >>> despike.cmdline + >>> despike.cmdline # doctest: +IGNORE_UNICODE '3dDespike -prefix functional_despike functional.nii' >>> res = despike.run() # doctest: +SKIP @@ -587,6 +599,7 @@ class DegreeCentralityOutputSpec(AFNICommandOutputSpec): 'and affine matrix') + class DegreeCentrality(AFNICommand): """Performs degree centrality on a dataset using a given maskfile via 3dDegreeCentrality @@ -603,7 +616,7 @@ class DegreeCentrality(AFNICommand): >>> degree.inputs.mask = 'mask.nii' >>> degree.inputs.sparsity = 1 # keep the top one percent of connections >>> degree.inputs.out_file = 'out.nii' - >>> degree.cmdline + >>> degree.cmdline # doctest: +IGNORE_UNICODE '3dDegreeCentrality -mask mask.nii -prefix out.nii -sparsity 1.000000 functional.nii' >>> res = degree.run() # doctest: +SKIP """ @@ -675,6 +688,7 @@ class ECMInputSpec(CentralityInputSpec): argstr='-memory %f') + class ECM(AFNICommand): """Performs degree centrality on a dataset using a given maskfile via the 3dLFCD command @@ -691,7 +705,7 @@ class ECM(AFNICommand): >>> ecm.inputs.mask = 'mask.nii' >>> ecm.inputs.sparsity = 0.1 # keep top 0.1% of connections >>> ecm.inputs.out_file = 'out.nii' - >>> ecm.cmdline + >>> ecm.cmdline # doctest: +IGNORE_UNICODE '3dECM -mask mask.nii -prefix out.nii -sparsity 0.100000 functional.nii' >>> res = ecm.run() # doctest: +SKIP """ @@ -713,6 +727,7 @@ class LFCDInputSpec(CentralityInputSpec): copyfile=False) + class LFCD(AFNICommand): """Performs degree centrality on a dataset using a given maskfile via the 3dLFCD command @@ -729,7 +744,7 @@ class LFCD(AFNICommand): >>> lfcd.inputs.mask = 'mask.nii' >>> lfcd.inputs.thresh = 0.8 # keep all connections with corr >= 0.8 >>> lfcd.inputs.out_file = 'out.nii' - >>> lfcd.cmdline + >>> lfcd.cmdline # doctest: +IGNORE_UNICODE '3dLFCD -mask mask.nii -prefix out.nii -thresh 0.800000 functional.nii' >>> res = lfcd.run() # doctest: +SKIP """ @@ -774,6 +789,7 @@ class AutomaskOutputSpec(TraitedSpec): brain_file = File(desc='brain file (skull stripped)', exists=True) + class Automask(AFNICommand): """Create a brain-only mask of the image using AFNI 3dAutomask command @@ -788,7 +804,7 @@ class Automask(AFNICommand): >>> automask.inputs.in_file = 'functional.nii' >>> automask.inputs.dilate = 1 >>> automask.inputs.outputtype = "NIFTI" - >>> automask.cmdline #doctest: +ELLIPSIS + >>> automask.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dAutomask -apply_prefix functional_masked.nii -dilate 1 -prefix functional_mask.nii functional.nii' >>> res = automask.run() # doctest: +SKIP @@ -844,6 +860,7 @@ class VolregOutputSpec(TraitedSpec): oned_matrix_save = File(desc='matrix transformation from base to input', exists=True) + class Volreg(AFNICommand): """Register input volumes to a base volume using AFNI 3dvolreg command @@ -859,7 +876,7 @@ class Volreg(AFNICommand): >>> volreg.inputs.args = '-Fourier -twopass' >>> volreg.inputs.zpad = 4 >>> volreg.inputs.outputtype = "NIFTI" - >>> volreg.cmdline #doctest: +ELLIPSIS + >>> volreg.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dvolreg -Fourier -twopass -1Dfile functional.1D -1Dmatrix_save functional.aff12.1D -prefix functional_volreg.nii -zpad 4 -maxdisp1D functional_md.1D functional.nii' >>> res = volreg.run() # doctest: +SKIP @@ -921,6 +938,7 @@ class CopyInputSpec(AFNICommandInputSpec): argstr='%s', position=-1, name_source="in_file") + class Copy(AFNICommand): """Copies an image of one type to an image of the same or different type using 3dcopy command @@ -934,23 +952,23 @@ class Copy(AFNICommand): >>> from nipype.interfaces import afni as afni >>> copy3d = afni.Copy() >>> copy3d.inputs.in_file = 'functional.nii' - >>> copy3d.cmdline + >>> copy3d.cmdline # doctest: +IGNORE_UNICODE '3dcopy functional.nii functional_copy' >>> from copy import deepcopy >>> copy3d_2 = deepcopy(copy3d) >>> copy3d_2.inputs.outputtype = 'NIFTI' - >>> copy3d_2.cmdline + >>> copy3d_2.cmdline # doctest: +IGNORE_UNICODE '3dcopy functional.nii functional_copy.nii' >>> copy3d_3 = deepcopy(copy3d) >>> copy3d_3.inputs.outputtype = 'NIFTI_GZ' - >>> copy3d_3.cmdline + >>> copy3d_3.cmdline # doctest: +IGNORE_UNICODE '3dcopy functional.nii functional_copy.nii.gz' >>> copy3d_4 = deepcopy(copy3d) >>> copy3d_4.inputs.out_file = 'new_func.nii' - >>> copy3d_4.cmdline + >>> copy3d_4.cmdline # doctest: +IGNORE_UNICODE '3dcopy functional.nii new_func.nii' """ @@ -1411,6 +1429,7 @@ class MaskaveInputSpec(AFNICommandInputSpec): position=2) + class Maskave(AFNICommand): """Computes average of all voxels in the input dataset which satisfy the criterion in the options list @@ -1426,7 +1445,7 @@ class Maskave(AFNICommand): >>> maskave.inputs.in_file = 'functional.nii' >>> maskave.inputs.mask= 'seed_mask.nii' >>> maskave.inputs.quiet= True - >>> maskave.cmdline #doctest: +ELLIPSIS + >>> maskave.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dmaskave -mask seed_mask.nii -quiet functional.nii > functional_maskave.1D' >>> res = maskave.run() # doctest: +SKIP @@ -1657,6 +1676,7 @@ class TCorr1DOutputSpec(TraitedSpec): exists=True) + class TCorr1D(AFNICommand): """Computes the correlation coefficient between each voxel time series in the input 3D+time dataset. @@ -1667,7 +1687,7 @@ class TCorr1D(AFNICommand): >>> tcorr1D = afni.TCorr1D() >>> tcorr1D.inputs.xset= 'u_rc1s1_Template.nii' >>> tcorr1D.inputs.y_1d = 'seed.1D' - >>> tcorr1D.cmdline + >>> tcorr1D.cmdline # doctest: +IGNORE_UNICODE '3dTcorr1D -prefix u_rc1s1_Template_correlation.nii.gz u_rc1s1_Template.nii seed.1D' >>> res = tcorr1D.run() # doctest: +SKIP """ @@ -1887,6 +1907,7 @@ class MaskToolOutputSpec(TraitedSpec): exists=True) + class MaskTool(AFNICommand): """3dmask_tool - for combining/dilating/eroding/filling masks @@ -1901,7 +1922,7 @@ class MaskTool(AFNICommand): >>> automask.inputs.in_file = 'functional.nii' >>> automask.inputs.dilate = 1 >>> automask.inputs.outputtype = "NIFTI" - >>> automask.cmdline #doctest: +ELLIPSIS + >>> automask.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dAutomask -apply_prefix functional_masked.nii -dilate 1 -prefix functional_mask.nii functional.nii' >>> res = automask.run() # doctest: +SKIP @@ -2055,9 +2076,8 @@ class ROIStats(AFNICommandBase): def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() output_filename = "roi_stats.csv" - f = open(output_filename, "w") - f.write(runtime.stdout) - f.close() + with open(output_filename, "w") as f: + f.write(runtime.stdout) outputs.stats = os.path.abspath(output_filename) return outputs @@ -2082,6 +2102,7 @@ class CalcInputSpec(AFNICommandInputSpec): other = File(desc='other options', argstr='') + class Calc(AFNICommand): """This program does voxel-by-voxel arithmetic on 3D datasets @@ -2098,7 +2119,7 @@ class Calc(AFNICommand): >>> calc.inputs.expr='a*b' >>> calc.inputs.out_file = 'functional_calc.nii.gz' >>> calc.inputs.outputtype = "NIFTI" - >>> calc.cmdline #doctest: +ELLIPSIS + >>> calc.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dcalc -a functional.nii -b functional2.nii -expr "a*b" -prefix functional_calc.nii.gz' """ @@ -2157,6 +2178,7 @@ class BlurInMaskInputSpec(AFNICommandInputSpec): options = traits.Str(desc='options', argstr='%s', position=2) + class BlurInMask(AFNICommand): """ Blurs a dataset spatially inside a mask. That's all. Experimental. @@ -2171,7 +2193,7 @@ class BlurInMask(AFNICommand): >>> bim.inputs.in_file = 'functional.nii' >>> bim.inputs.mask = 'mask.nii' >>> bim.inputs.fwhm = 5.0 - >>> bim.cmdline #doctest: +ELLIPSIS + >>> bim.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dBlurInMask -input functional.nii -FWHM 5.000000 -mask mask.nii -prefix functional_blur' >>> res = bim.run() # doctest: +SKIP @@ -2450,7 +2472,7 @@ class AFNItoNIFTI(AFNICommand): >>> a2n = afni.AFNItoNIFTI() >>> a2n.inputs.in_file = 'afni_output.3D' >>> a2n.inputs.out_file = 'afni_output.nii' - >>> a2n.cmdline + >>> a2n.cmdline # doctest: +IGNORE_UNICODE '3dAFNItoNIFTI -prefix afni_output.nii afni_output.3D' """ @@ -2490,6 +2512,7 @@ class EvalInputSpec(AFNICommandInputSpec): other = File(desc='other options', argstr='') + class Eval(AFNICommand): """Evaluates an expression that may include columns of data from one or more text files @@ -2505,7 +2528,7 @@ class Eval(AFNICommand): >>> eval.inputs.expr='a*b' >>> eval.inputs.out1D = True >>> eval.inputs.out_file = 'data_calc.1D' - >>> calc.cmdline #doctest: +SKIP + >>> calc.cmdline #doctest: +SKIP +IGNORE_UNICODE '3deval -a timeseries1.1D -b timeseries2.1D -expr "a*b" -1D -prefix data_calc.1D' """ @@ -2554,6 +2577,7 @@ class MeansInputSpec(AFNICommandInputSpec): mask_union = traits.Bool(desc='create union mask', argstr='-mask_union') + class Means(AFNICommand): """Takes the voxel-by-voxel mean of all input datasets using 3dMean @@ -2567,7 +2591,7 @@ class Means(AFNICommand): >>> means.inputs.in_file_a = 'im1.nii' >>> means.inputs.in_file_b = 'im2.nii' >>> means.inputs.out_file = 'output.nii' - >>> means.cmdline + >>> means.cmdline # doctest: +IGNORE_UNICODE '3dMean im1.nii im2.nii -prefix output.nii' """ @@ -2613,7 +2637,7 @@ class Hist(AFNICommandBase): >>> from nipype.interfaces import afni as afni >>> hist = afni.Hist() >>> hist.inputs.in_file = 'functional.nii' - >>> hist.cmdline + >>> hist.cmdline # doctest: +IGNORE_UNICODE '3dHist -input functional.nii -prefix functional_hist' >>> res = hist.run() # doctest: +SKIP @@ -2701,6 +2725,7 @@ class FWHMxOutputSpec(TraitedSpec): out_acf = File(exists=True, desc='output acf file') + class FWHMx(AFNICommandBase): """ Unlike the older 3dFWHM, this program computes FWHMs for all sub-bricks @@ -2716,7 +2741,7 @@ class FWHMx(AFNICommandBase): >>> from nipype.interfaces import afni as afp >>> fwhm = afp.FWHMx() >>> fwhm.inputs.in_file = 'functional.nii' - >>> fwhm.cmdline + >>> fwhm.cmdline # doctest: +IGNORE_UNICODE '3dFWHMx -input functional.nii -out functional_subbricks.out > functional_fwhmx.out' @@ -2830,7 +2855,7 @@ def _format_arg(self, name, trait_spec, value): return None elif isinstance(value, tuple): return trait_spec.argstr + ' %s %f' % value - elif isinstance(value, string_types): + elif isinstance(value, (str, bytes)): return trait_spec.argstr + ' ' + value return super(FWHMx, self)._format_arg(name, trait_spec, value) @@ -2852,7 +2877,7 @@ def _list_outputs(self): sout = tuple(sout[0]) outputs['out_acf'] = op.abspath('3dFWHMx.1D') - if isinstance(self.inputs.acf, string_types): + if isinstance(self.inputs.acf, (str, bytes)): outputs['out_acf'] = op.abspath(self.inputs.acf) outputs['fwhm'] = tuple(sout) @@ -2898,6 +2923,7 @@ class OutlierCountOutputSpec(TraitedSpec): keep_extension=False, position=-1, desc='capture standard output') + class OutlierCount(CommandLine): """Create a 3D dataset from 2D image files using AFNI to3d command @@ -2910,7 +2936,7 @@ class OutlierCount(CommandLine): >>> from nipype.interfaces import afni >>> toutcount = afni.OutlierCount() >>> toutcount.inputs.in_file = 'functional.nii' - >>> toutcount.cmdline #doctest: +ELLIPSIS + >>> toutcount.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dToutcount functional.nii > functional_outliers' >>> res = toutcount.run() #doctest: +SKIP @@ -2966,6 +2992,7 @@ class QualityIndexOutputSpec(TraitedSpec): out_file = File(desc='file containing the caputured standard output') + class QualityIndex(CommandLine): """Create a 3D dataset from 2D image files using AFNI to3d command @@ -2978,12 +3005,11 @@ class QualityIndex(CommandLine): >>> from nipype.interfaces import afni >>> tqual = afni.QualityIndex() >>> tqual.inputs.in_file = 'functional.nii' - >>> tqual.cmdline #doctest: +ELLIPSIS + >>> tqual.cmdline #doctest: +ELLIPSIS +IGNORE_UNICODE '3dTqual functional.nii > functional_tqual' >>> res = tqual.run() #doctest: +SKIP - """ - + """ _cmd = '3dTqual' input_spec = QualityIndexInputSpec output_spec = QualityIndexOutputSpec diff --git a/nipype/interfaces/afni/svm.py b/nipype/interfaces/afni/svm.py index c2bb335d32..e3a361850e 100644 --- a/nipype/interfaces/afni/svm.py +++ b/nipype/interfaces/afni/svm.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft = python sts = 4 ts = 4 sw = 4 et: """Afni svm interfaces @@ -8,21 +9,10 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ -import warnings +from __future__ import print_function, division, unicode_literals, absolute_import -import os -import re - -from ..base import (Directory, TraitedSpec, - traits, isdefined, File, InputMultiPath, Undefined) -from ...utils.filemanip import (load_json, save_json, split_filename) -from nipype.utils.filemanip import fname_presuffix -from .base import AFNICommand, AFNICommandInputSpec,\ - AFNICommandOutputSpec -from nipype.interfaces.base import CommandLineInputSpec, CommandLine,\ - OutputMultiPath - -warn = warnings.warn +from ..base import TraitedSpec, traits, File +from .base import AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec class SVMTrainInputSpec(AFNICommandInputSpec): diff --git a/nipype/interfaces/afni/tests/__init__.py b/nipype/interfaces/afni/tests/__init__.py index 349937997e..99fb243f19 100644 --- a/nipype/interfaces/afni/tests/__init__.py +++ b/nipype/interfaces/afni/tests/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/afni/tests/test_auto_AFNICommand.py b/nipype/interfaces/afni/tests/test_auto_AFNICommand.py index f822168eb8..82774d69f4 100644 --- a/nipype/interfaces/afni/tests/test_auto_AFNICommand.py +++ b/nipype/interfaces/afni/tests/test_auto_AFNICommand.py @@ -13,7 +13,7 @@ def test_AFNICommand_inputs(): usedefault=True, ), out_file=dict(argstr='-prefix %s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_afni', ), outputtype=dict(), diff --git a/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py b/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py index 0591a7eb7f..31216252a4 100644 --- a/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py +++ b/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py @@ -22,10 +22,10 @@ def test_AutoTcorrelate_inputs(): mask=dict(argstr='-mask %s', ), mask_only_targets=dict(argstr='-mask_only_targets', - xor=['mask_source'], + xor=[u'mask_source'], ), mask_source=dict(argstr='-mask_source %s', - xor=['mask_only_targets'], + xor=[u'mask_only_targets'], ), out_file=dict(argstr='-prefix %s', name_source='in_file', diff --git a/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py b/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py index 8b5c02daed..b0c965dc07 100644 --- a/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py +++ b/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py @@ -26,7 +26,7 @@ def test_BlurToFWHM_inputs(): mask=dict(argstr='-blurmaster %s', ), out_file=dict(argstr='-prefix %s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_afni', ), outputtype=dict(), diff --git a/nipype/interfaces/afni/tests/test_auto_BrickStat.py b/nipype/interfaces/afni/tests/test_auto_BrickStat.py index 0c47101656..af318cb3ad 100644 --- a/nipype/interfaces/afni/tests/test_auto_BrickStat.py +++ b/nipype/interfaces/afni/tests/test_auto_BrickStat.py @@ -23,7 +23,7 @@ def test_BrickStat_inputs(): position=1, ), out_file=dict(argstr='-prefix %s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_afni', ), outputtype=dict(), diff --git a/nipype/interfaces/afni/tests/test_auto_Calc.py b/nipype/interfaces/afni/tests/test_auto_Calc.py index c15431a5a8..98d99d3a73 100644 --- a/nipype/interfaces/afni/tests/test_auto_Calc.py +++ b/nipype/interfaces/afni/tests/test_auto_Calc.py @@ -34,9 +34,9 @@ def test_Calc_inputs(): ), outputtype=dict(), single_idx=dict(), - start_idx=dict(requires=['stop_idx'], + start_idx=dict(requires=[u'stop_idx'], ), - stop_idx=dict(requires=['start_idx'], + stop_idx=dict(requires=[u'start_idx'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py b/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py index bdcf111934..9b5d16b094 100644 --- a/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py +++ b/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py @@ -26,7 +26,7 @@ def test_DegreeCentrality_inputs(): oned_file=dict(argstr='-out1D %s', ), out_file=dict(argstr='-prefix %s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_afni', ), outputtype=dict(), diff --git a/nipype/interfaces/afni/tests/test_auto_ECM.py b/nipype/interfaces/afni/tests/test_auto_ECM.py index 0af69ab986..1171db8d4a 100644 --- a/nipype/interfaces/afni/tests/test_auto_ECM.py +++ b/nipype/interfaces/afni/tests/test_auto_ECM.py @@ -34,7 +34,7 @@ def test_ECM_inputs(): memory=dict(argstr='-memory %f', ), out_file=dict(argstr='-prefix %s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_afni', ), outputtype=dict(), diff --git a/nipype/interfaces/afni/tests/test_auto_Eval.py b/nipype/interfaces/afni/tests/test_auto_Eval.py index 0ca8e85bc0..5f872e795a 100644 --- a/nipype/interfaces/afni/tests/test_auto_Eval.py +++ b/nipype/interfaces/afni/tests/test_auto_Eval.py @@ -36,9 +36,9 @@ def test_Eval_inputs(): ), outputtype=dict(), single_idx=dict(), - start_idx=dict(requires=['stop_idx'], + start_idx=dict(requires=[u'stop_idx'], ), - stop_idx=dict(requires=['start_idx'], + stop_idx=dict(requires=[u'start_idx'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/afni/tests/test_auto_FWHMx.py b/nipype/interfaces/afni/tests/test_auto_FWHMx.py index f77c859c76..145476b22c 100644 --- a/nipype/interfaces/afni/tests/test_auto_FWHMx.py +++ b/nipype/interfaces/afni/tests/test_auto_FWHMx.py @@ -10,7 +10,7 @@ def test_FWHMx_inputs(): args=dict(argstr='%s', ), arith=dict(argstr='-arith', - xor=['geom'], + xor=[u'geom'], ), automask=dict(argstr='-automask', usedefault=True, @@ -20,17 +20,17 @@ def test_FWHMx_inputs(): compat=dict(argstr='-compat', ), demed=dict(argstr='-demed', - xor=['detrend'], + xor=[u'detrend'], ), detrend=dict(argstr='-detrend', usedefault=True, - xor=['demed'], + xor=[u'demed'], ), environ=dict(nohash=True, usedefault=True, ), geom=dict(argstr='-geom', - xor=['arith'], + xor=[u'arith'], ), ignore_exception=dict(nohash=True, usedefault=True, diff --git a/nipype/interfaces/afni/tests/test_auto_Hist.py b/nipype/interfaces/afni/tests/test_auto_Hist.py index 0024e5f186..d5c69116b0 100644 --- a/nipype/interfaces/afni/tests/test_auto_Hist.py +++ b/nipype/interfaces/afni/tests/test_auto_Hist.py @@ -29,7 +29,7 @@ def test_Hist_inputs(): ), out_file=dict(argstr='-prefix %s', keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_hist', ), out_show=dict(argstr='> %s', diff --git a/nipype/interfaces/afni/tests/test_auto_LFCD.py b/nipype/interfaces/afni/tests/test_auto_LFCD.py index 371bce8b8d..ff53651d79 100644 --- a/nipype/interfaces/afni/tests/test_auto_LFCD.py +++ b/nipype/interfaces/afni/tests/test_auto_LFCD.py @@ -24,7 +24,7 @@ def test_LFCD_inputs(): mask=dict(argstr='-mask %s', ), out_file=dict(argstr='-prefix %s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_afni', ), outputtype=dict(), diff --git a/nipype/interfaces/afni/tests/test_auto_MaskTool.py b/nipype/interfaces/afni/tests/test_auto_MaskTool.py index 005a915ead..5e6e809767 100644 --- a/nipype/interfaces/afni/tests/test_auto_MaskTool.py +++ b/nipype/interfaces/afni/tests/test_auto_MaskTool.py @@ -19,7 +19,7 @@ def test_MaskTool_inputs(): usedefault=True, ), fill_dirs=dict(argstr='-fill_dirs %s', - requires=['fill_holes'], + requires=[u'fill_holes'], ), fill_holes=dict(argstr='-fill_holes', ), diff --git a/nipype/interfaces/afni/tests/test_auto_OutlierCount.py b/nipype/interfaces/afni/tests/test_auto_OutlierCount.py index 82e084a495..f2d7c63846 100644 --- a/nipype/interfaces/afni/tests/test_auto_OutlierCount.py +++ b/nipype/interfaces/afni/tests/test_auto_OutlierCount.py @@ -8,11 +8,11 @@ def test_OutlierCount_inputs(): ), autoclip=dict(argstr='-autoclip', usedefault=True, - xor=['in_file'], + xor=[u'in_file'], ), automask=dict(argstr='-automask', usedefault=True, - xor=['in_file'], + xor=[u'in_file'], ), environ=dict(nohash=True, usedefault=True, @@ -34,17 +34,17 @@ def test_OutlierCount_inputs(): usedefault=True, ), mask=dict(argstr='-mask %s', - xor=['autoclip', 'automask'], + xor=[u'autoclip', u'automask'], ), out_file=dict(argstr='> %s', keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_outliers', position=-1, ), outliers_file=dict(argstr='-save %s', keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_outliers', output_name='out_outliers', ), @@ -67,7 +67,7 @@ def test_OutlierCount_inputs(): def test_OutlierCount_outputs(): output_map = dict(out_file=dict(argstr='> %s', keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_tqual', position=-1, ), diff --git a/nipype/interfaces/afni/tests/test_auto_QualityIndex.py b/nipype/interfaces/afni/tests/test_auto_QualityIndex.py index 3394e7028a..cb41475a18 100644 --- a/nipype/interfaces/afni/tests/test_auto_QualityIndex.py +++ b/nipype/interfaces/afni/tests/test_auto_QualityIndex.py @@ -8,11 +8,11 @@ def test_QualityIndex_inputs(): ), autoclip=dict(argstr='-autoclip', usedefault=True, - xor=['mask'], + xor=[u'mask'], ), automask=dict(argstr='-automask', usedefault=True, - xor=['mask'], + xor=[u'mask'], ), clip=dict(argstr='-clip %f', ), @@ -30,11 +30,11 @@ def test_QualityIndex_inputs(): usedefault=True, ), mask=dict(argstr='-mask %s', - xor=['autoclip', 'automask'], + xor=[u'autoclip', u'automask'], ), out_file=dict(argstr='> %s', keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_tqual', position=-1, ), diff --git a/nipype/interfaces/afni/tests/test_auto_TCorr1D.py b/nipype/interfaces/afni/tests/test_auto_TCorr1D.py index 96ebdbe3a6..f374ce8a19 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCorr1D.py +++ b/nipype/interfaces/afni/tests/test_auto_TCorr1D.py @@ -14,7 +14,7 @@ def test_TCorr1D_inputs(): ), ktaub=dict(argstr=' -ktaub', position=1, - xor=['pearson', 'spearman', 'quadrant'], + xor=[u'pearson', u'spearman', u'quadrant'], ), out_file=dict(argstr='-prefix %s', keep_extension=True, @@ -24,15 +24,15 @@ def test_TCorr1D_inputs(): outputtype=dict(), pearson=dict(argstr=' -pearson', position=1, - xor=['spearman', 'quadrant', 'ktaub'], + xor=[u'spearman', u'quadrant', u'ktaub'], ), quadrant=dict(argstr=' -quadrant', position=1, - xor=['pearson', 'spearman', 'ktaub'], + xor=[u'pearson', u'spearman', u'ktaub'], ), spearman=dict(argstr=' -spearman', position=1, - xor=['pearson', 'quadrant', 'ktaub'], + xor=[u'pearson', u'quadrant', u'ktaub'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/afni/tests/test_auto_TCorrMap.py b/nipype/interfaces/afni/tests/test_auto_TCorrMap.py index 15c98d2aac..45edca85c5 100644 --- a/nipype/interfaces/afni/tests/test_auto_TCorrMap.py +++ b/nipype/interfaces/afni/tests/test_auto_TCorrMap.py @@ -7,7 +7,7 @@ def test_TCorrMap_inputs(): input_map = dict(absolute_threshold=dict(argstr='-Thresh %f %s', name_source='in_file', suffix='_thresh', - xor=('absolute_threshold', 'var_absolute_threshold', 'var_absolute_threshold_normalize'), + xor=(u'absolute_threshold', u'var_absolute_threshold', u'var_absolute_threshold_normalize'), ), args=dict(argstr='%s', ), @@ -16,12 +16,12 @@ def test_TCorrMap_inputs(): average_expr=dict(argstr='-Aexpr %s %s', name_source='in_file', suffix='_aexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + xor=(u'average_expr', u'average_expr_nonzero', u'sum_expr'), ), average_expr_nonzero=dict(argstr='-Cexpr %s %s', name_source='in_file', suffix='_cexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + xor=(u'average_expr', u'average_expr_nonzero', u'sum_expr'), ), bandpass=dict(argstr='-bpass %f %f', ), @@ -56,7 +56,7 @@ def test_TCorrMap_inputs(): suffix='_mean', ), out_file=dict(argstr='-prefix %s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_afni', ), outputtype=dict(), @@ -81,7 +81,7 @@ def test_TCorrMap_inputs(): sum_expr=dict(argstr='-Sexpr %s %s', name_source='in_file', suffix='_sexpr', - xor=('average_expr', 'average_expr_nonzero', 'sum_expr'), + xor=(u'average_expr', u'average_expr_nonzero', u'sum_expr'), ), terminal_output=dict(nohash=True, ), @@ -89,12 +89,12 @@ def test_TCorrMap_inputs(): var_absolute_threshold=dict(argstr='-VarThresh %f %f %f %s', name_source='in_file', suffix='_varthresh', - xor=('absolute_threshold', 'var_absolute_threshold', 'var_absolute_threshold_normalize'), + xor=(u'absolute_threshold', u'var_absolute_threshold', u'var_absolute_threshold_normalize'), ), var_absolute_threshold_normalize=dict(argstr='-VarThreshN %f %f %f %s', name_source='in_file', suffix='_varthreshn', - xor=('absolute_threshold', 'var_absolute_threshold', 'var_absolute_threshold_normalize'), + xor=(u'absolute_threshold', u'var_absolute_threshold', u'var_absolute_threshold_normalize'), ), zmean=dict(argstr='-Zmean %s', name_source='in_file', diff --git a/nipype/interfaces/afni/tests/test_auto_TShift.py b/nipype/interfaces/afni/tests/test_auto_TShift.py index a67893c811..fca649bca3 100644 --- a/nipype/interfaces/afni/tests/test_auto_TShift.py +++ b/nipype/interfaces/afni/tests/test_auto_TShift.py @@ -37,10 +37,10 @@ def test_TShift_inputs(): tr=dict(argstr='-TR %s', ), tslice=dict(argstr='-slice %s', - xor=['tzero'], + xor=[u'tzero'], ), tzero=dict(argstr='-tzero %s', - xor=['tslice'], + xor=[u'tslice'], ), ) inputs = TShift.input_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_To3D.py b/nipype/interfaces/afni/tests/test_auto_To3D.py index 4357ee96da..a18cf7bf68 100644 --- a/nipype/interfaces/afni/tests/test_auto_To3D.py +++ b/nipype/interfaces/afni/tests/test_auto_To3D.py @@ -25,7 +25,7 @@ def test_To3D_inputs(): position=-1, ), out_file=dict(argstr='-prefix %s', - name_source=['in_folder'], + name_source=[u'in_folder'], name_template='%s', ), outputtype=dict(), diff --git a/nipype/interfaces/ants/__init__.py b/nipype/interfaces/ants/__init__.py index 2f31587445..039fb2d706 100644 --- a/nipype/interfaces/ants/__init__.py +++ b/nipype/interfaces/ants/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py index 20fab05881..208cae8c25 100644 --- a/nipype/interfaces/ants/base.py +++ b/nipype/interfaces/ants/base.py @@ -1,12 +1,12 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The ants module provides basic functions for interfacing with ANTS tools.""" - +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str # Local imports -from ..base import (CommandLine, CommandLineInputSpec, traits, - isdefined) - from ... import logging +from ..base import CommandLine, CommandLineInputSpec, traits, isdefined logger = logging.getLogger('interface') # -Using -1 gives primary responsibilty to ITKv4 to do the correct diff --git a/nipype/interfaces/ants/legacy.py b/nipype/interfaces/ants/legacy.py index b68e0f7ed8..677abedaab 100644 --- a/nipype/interfaces/ants/legacy.py +++ b/nipype/interfaces/ants/legacy.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # NOTE: This implementation has been superceeded buy the antsApplyTransform # implmeentation that more closely follows the strucutre and capabilities # of the antsApplyTransform program. This implementation is here @@ -16,7 +17,6 @@ import os from glob import glob - from .base import ANTSCommand, ANTSCommandInputSpec from ..base import TraitedSpec, File, traits, isdefined, OutputMultiPath from ...utils.filemanip import split_filename @@ -86,7 +86,7 @@ class antsIntroduction(ANTSCommand): >>> warp.inputs.reference_image = 'Template_6.nii' >>> warp.inputs.input_image = 'structural.nii' >>> warp.inputs.max_iterations = [30,90,20] - >>> warp.cmdline + >>> warp.cmdline # doctest: +IGNORE_UNICODE 'antsIntroduction.sh -d 3 -i structural.nii -m 30x90x20 -o ants_ -r Template_6.nii -t GR' """ @@ -204,7 +204,7 @@ class buildtemplateparallel(ANTSCommand): >>> tmpl = buildtemplateparallel() >>> tmpl.inputs.in_files = ['T1.nii', 'structural.nii'] >>> tmpl.inputs.max_iterations = [30, 90, 20] - >>> tmpl.cmdline + >>> tmpl.cmdline # doctest: +IGNORE_UNICODE 'buildtemplateparallel.sh -d 3 -i 4 -m 30x90x20 -o antsTMPL_ -c 0 -t GR T1.nii structural.nii' """ diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index f5d6b749b2..fcc60b2f4f 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """The ants module provides basic functions for interfacing with ants functions. Change directory to provide relative paths for doctests @@ -6,13 +7,12 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, str +import os -from builtins import range - -from ..base import TraitedSpec, File, traits, InputMultiPath +from ..base import TraitedSpec, File, Str, traits, InputMultiPath, isdefined from .base import ANTSCommand, ANTSCommandInputSpec -import os -from ..traits_extension import isdefined class ANTSInputSpec(ANTSCommandInputSpec): @@ -52,7 +52,7 @@ class ANTSInputSpec(ANTSCommandInputSpec): metric_weight = traits.List(traits.Float(), requires=['metric'], desc='') radius = traits.List(traits.Int(), requires=['metric'], desc='') - output_transform_prefix = traits.Str('out', usedefault=True, + output_transform_prefix = Str('out', usedefault=True, argstr='--output-naming %s', mandatory=True, desc='') transformation_model = traits.Enum('Diff', 'Elast', 'Exp', 'Greedy Exp', @@ -120,7 +120,7 @@ class ANTS(ANTSCommand): >>> ants.inputs.regularization_gradient_field_sigma = 3 >>> ants.inputs.regularization_deformation_field_sigma = 0 >>> ants.inputs.number_of_affine_iterations = [10000,10000,10000,10000,10000] - >>> ants.cmdline + >>> ants.cmdline # doctest: +IGNORE_UNICODE 'ANTS 3 --MI-option 32x16000 --image-metric CC[ T1.nii, resting.nii, 1, 5 ] --number-of-affine-iterations \ 10000x10000x10000x10000x10000 --number-of-iterations 50x35x15 --output-naming MY --regularization Gauss[3.0,0.0] \ --transformation-model SyN[0.25] --use-Histogram-Matching 1' @@ -367,7 +367,7 @@ class RegistrationInputSpec(ANTSCommandInputSpec): convergence_window_size = traits.List(trait=traits.Int(), value=[ 10], minlen=1, requires=['convergence_threshold'], usedefault=True) # Output flags - output_transform_prefix = traits.Str( + output_transform_prefix = Str( "transform", usedefault=True, argstr="%s", desc="") output_warped_image = traits.Either( traits.Bool, File(), hash_files=False, desc="") @@ -428,7 +428,7 @@ class Registration(ANTSCommand): >>> reg.inputs.use_estimate_learning_rate_once = [True, True] >>> reg.inputs.use_histogram_matching = [True, True] # This is the default >>> reg.inputs.output_warped_image = 'output_warped_image.nii.gz' - >>> reg.cmdline + >>> reg.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 0 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ @@ -442,7 +442,7 @@ class Registration(ANTSCommand): >>> reg.inputs.invert_initial_moving_transform = True >>> reg1 = copy.deepcopy(reg) >>> reg1.inputs.winsorize_lower_quantile = 0.025 - >>> reg1.cmdline + >>> reg1.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ @@ -455,7 +455,7 @@ class Registration(ANTSCommand): >>> reg2 = copy.deepcopy(reg) >>> reg2.inputs.winsorize_upper_quantile = 0.975 - >>> reg2.cmdline + >>> reg2.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ @@ -468,7 +468,7 @@ class Registration(ANTSCommand): >>> reg3 = copy.deepcopy(reg) >>> reg3.inputs.winsorize_lower_quantile = 0.025 >>> reg3.inputs.winsorize_upper_quantile = 0.975 - >>> reg3.cmdline + >>> reg3.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ @@ -480,7 +480,7 @@ class Registration(ANTSCommand): >>> reg3a = copy.deepcopy(reg) >>> reg3a.inputs.float = True - >>> reg3a.cmdline + >>> reg3a.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --float 1 \ --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ @@ -493,7 +493,7 @@ class Registration(ANTSCommand): >>> reg3b = copy.deepcopy(reg) >>> reg3b.inputs.float = False - >>> reg3b.cmdline + >>> reg3b.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --float 0 \ --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ @@ -511,7 +511,7 @@ class Registration(ANTSCommand): >>> reg4.inputs.initialize_transforms_per_stage = True >>> reg4.inputs.collapse_output_transforms = True >>> outputs = reg4._list_outputs() - >>> pprint.pprint(outputs) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE + >>> pprint.pprint(outputs) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE +IGNORE_UNICODE {'composite_transform': '.../nipype/testing/data/output_Composite.h5', 'forward_invert_flags': [], 'forward_transforms': [], @@ -521,7 +521,7 @@ class Registration(ANTSCommand): 'reverse_transforms': [], 'save_state': '.../nipype/testing/data/trans.mat', 'warped_image': '.../nipype/testing/data/output_warped_image.nii.gz'} - >>> reg4.cmdline + >>> reg4.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 1 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ @@ -536,7 +536,7 @@ class Registration(ANTSCommand): >>> reg4b = copy.deepcopy(reg4) >>> reg4b.inputs.write_composite_transform = False >>> outputs = reg4b._list_outputs() - >>> pprint.pprint(outputs) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE + >>> pprint.pprint(outputs) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE +IGNORE_UNICODE {'composite_transform': , 'forward_invert_flags': [False, False], 'forward_transforms': ['.../nipype/testing/data/output_0GenericAffine.mat', @@ -549,7 +549,7 @@ class Registration(ANTSCommand): 'save_state': '.../nipype/testing/data/trans.mat', 'warped_image': '.../nipype/testing/data/output_warped_image.nii.gz'} >>> reg4b.aggregate_outputs() # doctest: +SKIP - >>> reg4b.cmdline + >>> reg4b.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 1 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ @@ -569,7 +569,7 @@ class Registration(ANTSCommand): >>> reg5.inputs.radius_or_number_of_bins = [32, [32, 4] ] >>> reg5.inputs.sampling_strategy = ['Random', None] # use default strategy in second stage >>> reg5.inputs.sampling_percentage = [0.05, [0.05, 0.10]] - >>> reg5.cmdline + >>> reg5.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ @@ -584,7 +584,7 @@ class Registration(ANTSCommand): >>> reg6 = copy.deepcopy(reg5) >>> reg6.inputs.fixed_image = ['fixed1.nii', 'fixed2.nii'] >>> reg6.inputs.moving_image = ['moving1.nii', 'moving2.nii'] - >>> reg6.cmdline + >>> reg6.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ @@ -599,7 +599,7 @@ class Registration(ANTSCommand): >>> reg7a = copy.deepcopy(reg) >>> reg7a.inputs.interpolation = 'BSpline' >>> reg7a.inputs.interpolation_parameters = (3,) - >>> reg7a.cmdline + >>> reg7a.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation BSpline[ 3 ] --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ @@ -613,7 +613,7 @@ class Registration(ANTSCommand): >>> reg7b = copy.deepcopy(reg) >>> reg7b.inputs.interpolation = 'Gaussian' >>> reg7b.inputs.interpolation_parameters = (1.0, 1.0) - >>> reg7b.cmdline + >>> reg7b.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Gaussian[ 1.0, 1.0 ] \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ @@ -628,7 +628,7 @@ class Registration(ANTSCommand): >>> reg8 = copy.deepcopy(reg) >>> reg8.inputs.transforms = ['Affine', 'BSplineSyN'] >>> reg8.inputs.transform_parameters = [(2.0,), (0.25, 26, 0, 3)] - >>> reg8.cmdline + >>> reg8.cmdline # doctest: +IGNORE_UNICODE 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index 6191324771..7fc9984676 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ANTS Apply Transforms interface Change directory to provide relative paths for doctests @@ -6,8 +7,8 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ - -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, str import os from .base import ANTSCommand, ANTSCommandInputSpec @@ -62,7 +63,7 @@ class WarpTimeSeriesImageMultiTransform(ANTSCommand): >>> wtsimt.inputs.input_image = 'resting.nii' >>> wtsimt.inputs.reference_image = 'ants_deformed.nii.gz' >>> wtsimt.inputs.transformation_series = ['ants_Warp.nii.gz','ants_Affine.txt'] - >>> wtsimt.cmdline + >>> wtsimt.cmdline # doctest: +IGNORE_UNICODE 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii -R ants_deformed.nii.gz ants_Warp.nii.gz \ ants_Affine.txt' @@ -158,7 +159,7 @@ class WarpImageMultiTransform(ANTSCommand): >>> wimt.inputs.input_image = 'structural.nii' >>> wimt.inputs.reference_image = 'ants_deformed.nii.gz' >>> wimt.inputs.transformation_series = ['ants_Warp.nii.gz','ants_Affine.txt'] - >>> wimt.cmdline + >>> wimt.cmdline # doctest: +IGNORE_UNICODE 'WarpImageMultiTransform 3 structural.nii structural_wimt.nii -R ants_deformed.nii.gz ants_Warp.nii.gz \ ants_Affine.txt' @@ -168,7 +169,7 @@ class WarpImageMultiTransform(ANTSCommand): >>> wimt.inputs.transformation_series = ['func2anat_coreg_Affine.txt','func2anat_InverseWarp.nii.gz', \ 'dwi2anat_Warp.nii.gz','dwi2anat_coreg_Affine.txt'] >>> wimt.inputs.invert_affine = [1] - >>> wimt.cmdline + >>> wimt.cmdline # doctest: +IGNORE_UNICODE 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ -i func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt' @@ -276,7 +277,7 @@ class ApplyTransforms(ANTSCommand): >>> at.inputs.default_value = 0 >>> at.inputs.transforms = ['ants_Warp.nii.gz', 'trans.mat'] >>> at.inputs.invert_transform_flags = [False, False] - >>> at.cmdline + >>> at.cmdline # doctest: +IGNORE_UNICODE 'antsApplyTransforms --default-value 0 --dimensionality 3 --input moving1.nii --interpolation Linear \ --output deformed_moving1.nii --reference-image fixed1.nii --transform [ ants_Warp.nii.gz, 0 ] \ --transform [ trans.mat, 0 ]' @@ -291,7 +292,7 @@ class ApplyTransforms(ANTSCommand): >>> at1.inputs.default_value = 0 >>> at1.inputs.transforms = ['ants_Warp.nii.gz', 'trans.mat'] >>> at1.inputs.invert_transform_flags = [False, False] - >>> at1.cmdline + >>> at1.cmdline # doctest: +IGNORE_UNICODE 'antsApplyTransforms --default-value 0 --dimensionality 3 --input moving1.nii --interpolation BSpline[ 5 ] \ --output deformed_moving1.nii --reference-image fixed1.nii --transform [ ants_Warp.nii.gz, 0 ] \ --transform [ trans.mat, 0 ]' @@ -399,7 +400,7 @@ class ApplyTransformsToPoints(ANTSCommand): >>> at.inputs.input_file = 'moving.csv' >>> at.inputs.transforms = ['trans.mat', 'ants_Warp.nii.gz'] >>> at.inputs.invert_transform_flags = [False, False] - >>> at.cmdline + >>> at.cmdline # doctest: +IGNORE_UNICODE 'antsApplyTransformsToPoints --dimensionality 3 --input moving.csv --output moving_transformed.csv \ --transform [ trans.mat, 0 ] --transform [ ants_Warp.nii.gz, 0 ]' diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 16a4713933..70231360d3 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """The ants module provides basic functions for interfacing with ants functions. Change directory to provide relative paths for doctests @@ -7,14 +8,13 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, str -from builtins import range - +import os +from ...utils.filemanip import split_filename, copyfile from ..base import TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, isdefined -from ...utils.filemanip import split_filename from .base import ANTSCommand, ANTSCommandInputSpec -import os -from ...utils.filemanip import copyfile class AtroposInputSpec(ANTSCommandInputSpec): @@ -89,11 +89,12 @@ class Atropos(ANTSCommand): >>> at.inputs.posterior_formulation = 'Socrates' >>> at.inputs.use_mixture_model_proportions = True >>> at.inputs.save_posteriors = True - >>> at.cmdline + >>> at.cmdline # doctest: +IGNORE_UNICODE 'Atropos --image-dimensionality 3 --icm [1,1] \ --initialization PriorProbabilityImages[2,priors/priorProbImages%02d.nii,0.8,1e-07] --intensity-image structural.nii \ --likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] \ --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] --use-random-seed 1' + """ input_spec = AtroposInputSpec output_spec = AtroposOutputSpec @@ -206,7 +207,7 @@ class LaplacianThickness(ANTSCommand): >>> cort_thick.inputs.input_wm = 'white_matter.nii.gz' >>> cort_thick.inputs.input_gm = 'gray_matter.nii.gz' >>> cort_thick.inputs.output_image = 'output_thickness.nii.gz' - >>> cort_thick.cmdline + >>> cort_thick.cmdline # doctest: +IGNORE_UNICODE 'LaplacianThickness white_matter.nii.gz gray_matter.nii.gz output_thickness.nii.gz' """ @@ -288,7 +289,7 @@ class N4BiasFieldCorrection(ANTSCommand): >>> n4.inputs.bspline_fitting_distance = 300 >>> n4.inputs.shrink_factor = 3 >>> n4.inputs.n_iterations = [50,50,30,20] - >>> n4.cmdline + >>> n4.cmdline # doctest: +IGNORE_UNICODE 'N4BiasFieldCorrection --bspline-fitting [ 300 ] \ -d 3 --input-image structural.nii \ --convergence [ 50x50x30x20 ] --output structural_corrected.nii \ @@ -296,7 +297,7 @@ class N4BiasFieldCorrection(ANTSCommand): >>> n4_2 = copy.deepcopy(n4) >>> n4_2.inputs.convergence_threshold = 1e-6 - >>> n4_2.cmdline + >>> n4_2.cmdline # doctest: +IGNORE_UNICODE 'N4BiasFieldCorrection --bspline-fitting [ 300 ] \ -d 3 --input-image structural.nii \ --convergence [ 50x50x30x20, 1e-06 ] --output structural_corrected.nii \ @@ -304,7 +305,7 @@ class N4BiasFieldCorrection(ANTSCommand): >>> n4_3 = copy.deepcopy(n4_2) >>> n4_3.inputs.bspline_order = 5 - >>> n4_3.cmdline + >>> n4_3.cmdline # doctest: +IGNORE_UNICODE 'N4BiasFieldCorrection --bspline-fitting [ 300, 5 ] \ -d 3 --input-image structural.nii \ --convergence [ 50x50x30x20, 1e-06 ] --output structural_corrected.nii \ @@ -314,7 +315,7 @@ class N4BiasFieldCorrection(ANTSCommand): >>> n4_4.inputs.input_image = 'structural.nii' >>> n4_4.inputs.save_bias = True >>> n4_4.inputs.dimension = 3 - >>> n4_4.cmdline + >>> n4_4.cmdline # doctest: +IGNORE_UNICODE 'N4BiasFieldCorrection -d 3 --input-image structural.nii \ --output [ structural_corrected.nii, structural_bias.nii ]' """ @@ -498,12 +499,15 @@ class CorticalThickness(ANTSCommand): >>> corticalthickness.inputs.anatomical_image ='T1.nii.gz' >>> corticalthickness.inputs.brain_template = 'study_template.nii.gz' >>> corticalthickness.inputs.brain_probability_mask ='ProbabilityMaskOfStudyTemplate.nii.gz' - >>> corticalthickness.inputs.segmentation_priors = ['BrainSegmentationPrior01.nii.gz', \ - 'BrainSegmentationPrior02.nii.gz', 'BrainSegmentationPrior03.nii.gz', 'BrainSegmentationPrior04.nii.gz'] + >>> corticalthickness.inputs.segmentation_priors = ['BrainSegmentationPrior01.nii.gz', + ... 'BrainSegmentationPrior02.nii.gz', + ... 'BrainSegmentationPrior03.nii.gz', + ... 'BrainSegmentationPrior04.nii.gz'] >>> corticalthickness.inputs.t1_registration_template = 'brain_study_template.nii.gz' - >>> corticalthickness.cmdline + >>> corticalthickness.cmdline # doctest: +IGNORE_UNICODE 'antsCorticalThickness.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 \ -s nii.gz -o antsCT_ -p nipype_priors/BrainSegmentationPrior%02d.nii.gz -t brain_study_template.nii.gz' + """ input_spec = CorticalThicknessInputSpec @@ -663,7 +667,7 @@ class BrainExtraction(ANTSCommand): >>> brainextraction.inputs.anatomical_image ='T1.nii.gz' >>> brainextraction.inputs.brain_template = 'study_template.nii.gz' >>> brainextraction.inputs.brain_probability_mask ='ProbabilityMaskOfStudyTemplate.nii.gz' - >>> brainextraction.cmdline + >>> brainextraction.cmdline # doctest: +IGNORE_UNICODE 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 \ -s nii.gz -o highres001_' """ @@ -754,7 +758,7 @@ class JointFusion(ANTSCommand): ... 'segmentation1.nii.gz', ... 'segmentation1.nii.gz'] >>> at.inputs.target_image = 'T1.nii' - >>> at.cmdline + >>> at.cmdline # doctest: +IGNORE_UNICODE 'jointfusion 3 1 -m Joint[0.1,2] -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz \ -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' @@ -763,7 +767,7 @@ class JointFusion(ANTSCommand): >>> at.inputs.beta = 1 >>> at.inputs.patch_radius = [3,2,1] >>> at.inputs.search_radius = [1,2,3] - >>> at.cmdline + >>> at.cmdline # doctest: +IGNORE_UNICODE 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii \ -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' """ @@ -840,20 +844,20 @@ class DenoiseImage(ANTSCommand): >>> denoise = DenoiseImage() >>> denoise.inputs.dimension = 3 >>> denoise.inputs.input_image = 'im1.nii' - >>> denoise.cmdline + >>> denoise.cmdline # doctest: +IGNORE_UNICODE 'DenoiseImage -d 3 -i im1.nii -n Gaussian -o im1_noise_corrected.nii -s 1' >>> denoise_2 = copy.deepcopy(denoise) >>> denoise_2.inputs.output_image = 'output_corrected_image.nii.gz' >>> denoise_2.inputs.noise_model = 'Rician' >>> denoise_2.inputs.shrink_factor = 2 - >>> denoise_2.cmdline + >>> denoise_2.cmdline # doctest: +IGNORE_UNICODE 'DenoiseImage -d 3 -i im1.nii -n Rician -o output_corrected_image.nii.gz -s 2' >>> denoise_3 = DenoiseImage() >>> denoise_3.inputs.input_image = 'im1.nii' >>> denoise_3.inputs.save_noise = True - >>> denoise_3.cmdline + >>> denoise_3.cmdline # doctest: +IGNORE_UNICODE 'DenoiseImage -i im1.nii -n Gaussian -o [ im1_noise_corrected.nii, im1_noise.nii ] -s 1' """ input_spec = DenoiseImageInputSpec @@ -957,12 +961,12 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'] ] >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz'] >>> antsjointfusion.inputs.target_image = ['im1.nii'] - >>> antsjointfusion.cmdline + >>> antsjointfusion.cmdline # doctest: +IGNORE_UNICODE "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" >>> antsjointfusion.inputs.target_image = [ ['im1.nii', 'im2.nii'] ] - >>> antsjointfusion.cmdline + >>> antsjointfusion.cmdline # doctest: +IGNORE_UNICODE "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" @@ -970,7 +974,7 @@ class AntsJointFusion(ANTSCommand): ... ['rc2s1.nii','rc2s2.nii'] ] >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz', ... 'segmentation1.nii.gz'] - >>> antsjointfusion.cmdline + >>> antsjointfusion.cmdline # doctest: +IGNORE_UNICODE "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ -l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii \ -s 3x3x3 -t ['im1.nii', 'im2.nii']" @@ -980,7 +984,7 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.beta = 1.0 >>> antsjointfusion.inputs.patch_radius = [3,2,1] >>> antsjointfusion.inputs.search_radius = [3] - >>> antsjointfusion.cmdline + >>> antsjointfusion.cmdline # doctest: +IGNORE_UNICODE "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii \ -p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" @@ -989,7 +993,7 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.verbose = True >>> antsjointfusion.inputs.exclusion_image = ['roi01.nii', 'roi02.nii'] >>> antsjointfusion.inputs.exclusion_image_label = ['1','2'] - >>> antsjointfusion.cmdline + >>> antsjointfusion.cmdline # doctest: +IGNORE_UNICODE "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ -o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" @@ -998,7 +1002,7 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.out_intensity_fusion_name_format = 'ants_joint_fusion_intensity_%d.nii.gz' >>> antsjointfusion.inputs.out_label_post_prob_name_format = 'ants_joint_fusion_posterior_%d.nii.gz' >>> antsjointfusion.inputs.out_atlas_voting_weight_name_format = 'ants_joint_fusion_voting_weight_%d.nii.gz' - >>> antsjointfusion.cmdline + >>> antsjointfusion.cmdline # doctest: +IGNORE_UNICODE "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ -o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, \ diff --git a/nipype/interfaces/ants/tests/__init__.py b/nipype/interfaces/ants/tests/__init__.py index 349937997e..99fb243f19 100644 --- a/nipype/interfaces/ants/tests/__init__.py +++ b/nipype/interfaces/ants/tests/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/ants/tests/test_auto_ANTS.py b/nipype/interfaces/ants/tests/test_auto_ANTS.py index 36f153c532..32c438d2ea 100644 --- a/nipype/interfaces/ants/tests/test_auto_ANTS.py +++ b/nipype/interfaces/ants/tests/test_auto_ANTS.py @@ -8,7 +8,7 @@ def test_ANTS_inputs(): ), args=dict(argstr='%s', ), - delta_time=dict(requires=['number_of_time_steps'], + delta_time=dict(requires=[u'number_of_time_steps'], ), dimension=dict(argstr='%d', position=1, @@ -19,14 +19,14 @@ def test_ANTS_inputs(): ), fixed_image=dict(mandatory=True, ), - gradient_step_length=dict(requires=['transformation_model'], + gradient_step_length=dict(requires=[u'transformation_model'], ), ignore_exception=dict(nohash=True, usedefault=True, ), metric=dict(mandatory=True, ), - metric_weight=dict(requires=['metric'], + metric_weight=dict(requires=[u'metric'], ), mi_option=dict(argstr='--MI-option %s', sep='x', @@ -43,19 +43,19 @@ def test_ANTS_inputs(): number_of_iterations=dict(argstr='--number-of-iterations %s', sep='x', ), - number_of_time_steps=dict(requires=['gradient_step_length'], + number_of_time_steps=dict(requires=[u'gradient_step_length'], ), output_transform_prefix=dict(argstr='--output-naming %s', mandatory=True, usedefault=True, ), - radius=dict(requires=['metric'], + radius=dict(requires=[u'metric'], ), regularization=dict(argstr='%s', ), - regularization_deformation_field_sigma=dict(requires=['regularization'], + regularization_deformation_field_sigma=dict(requires=[u'regularization'], ), - regularization_gradient_field_sigma=dict(requires=['regularization'], + regularization_gradient_field_sigma=dict(requires=[u'regularization'], ), smoothing_sigmas=dict(argstr='--gaussian-smoothing-sigmas %s', sep='x', @@ -63,7 +63,7 @@ def test_ANTS_inputs(): subsampling_factors=dict(argstr='--subsampling-factors %s', sep='x', ), - symmetry_type=dict(requires=['delta_time'], + symmetry_type=dict(requires=[u'delta_time'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py b/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py index 478c0c1400..0aed2d56ec 100644 --- a/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py +++ b/nipype/interfaces/ants/tests/test_auto_AntsJointFusion.py @@ -29,7 +29,7 @@ def test_AntsJointFusion_inputs(): ), exclusion_image=dict(), exclusion_image_label=dict(argstr='-e %s', - requires=['exclusion_image'], + requires=[u'exclusion_image'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -39,14 +39,14 @@ def test_AntsJointFusion_inputs(): num_threads=dict(nohash=True, usedefault=True, ), - out_atlas_voting_weight_name_format=dict(requires=['out_label_fusion', 'out_intensity_fusion_name_format', 'out_label_post_prob_name_format'], + out_atlas_voting_weight_name_format=dict(requires=[u'out_label_fusion', u'out_intensity_fusion_name_format', u'out_label_post_prob_name_format'], ), out_intensity_fusion_name_format=dict(argstr='', ), out_label_fusion=dict(argstr='%s', hash_files=False, ), - out_label_post_prob_name_format=dict(requires=['out_label_fusion', 'out_intensity_fusion_name_format'], + out_label_post_prob_name_format=dict(requires=[u'out_label_fusion', u'out_intensity_fusion_name_format'], ), patch_metric=dict(argstr='-m %s', usedefault=False, @@ -59,7 +59,7 @@ def test_AntsJointFusion_inputs(): usedefault=True, ), retain_label_posterior_images=dict(argstr='-r', - requires=['atlas_segmentation_image'], + requires=[u'atlas_segmentation_image'], usedefault=True, ), search_radius=dict(argstr='-s %s', diff --git a/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py b/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py index 63d4f78e08..ba1c9e7edf 100644 --- a/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py +++ b/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py @@ -38,7 +38,7 @@ def test_ApplyTransforms_inputs(): genfile=True, hash_files=False, ), - print_out_composite_warp_file=dict(requires=['output_image'], + print_out_composite_warp_file=dict(requires=[u'output_image'], ), reference_image=dict(argstr='--reference-image %s', mandatory=True, diff --git a/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py b/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py index 3c6be3669a..6280a7c074 100644 --- a/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py +++ b/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py @@ -23,7 +23,7 @@ def test_ApplyTransformsToPoints_inputs(): ), output_file=dict(argstr='--output %s', hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_transformed.csv', ), terminal_output=dict(nohash=True, diff --git a/nipype/interfaces/ants/tests/test_auto_Atropos.py b/nipype/interfaces/ants/tests/test_auto_Atropos.py index e19aa5591c..a6fb42b0ea 100644 --- a/nipype/interfaces/ants/tests/test_auto_Atropos.py +++ b/nipype/interfaces/ants/tests/test_auto_Atropos.py @@ -6,7 +6,7 @@ def test_Atropos_inputs(): input_map = dict(args=dict(argstr='%s', ), - convergence_threshold=dict(requires=['n_iterations'], + convergence_threshold=dict(requires=[u'n_iterations'], ), dimension=dict(argstr='--image-dimensionality %d', usedefault=True, @@ -21,7 +21,7 @@ def test_Atropos_inputs(): ), initialization=dict(argstr='%s', mandatory=True, - requires=['number_of_tissue_classes'], + requires=[u'number_of_tissue_classes'], ), intensity_images=dict(argstr='--intensity-image %s...', mandatory=True, @@ -31,9 +31,9 @@ def test_Atropos_inputs(): mask_image=dict(argstr='--mask-image %s', mandatory=True, ), - maximum_number_of_icm_terations=dict(requires=['icm_use_synchronous_update'], + maximum_number_of_icm_terations=dict(requires=[u'icm_use_synchronous_update'], ), - mrf_radius=dict(requires=['mrf_smoothing_factor'], + mrf_radius=dict(requires=[u'mrf_smoothing_factor'], ), mrf_smoothing_factor=dict(argstr='%s', ), @@ -53,13 +53,13 @@ def test_Atropos_inputs(): posterior_formulation=dict(argstr='%s', ), prior_probability_images=dict(), - prior_probability_threshold=dict(requires=['prior_weighting'], + prior_probability_threshold=dict(requires=[u'prior_weighting'], ), prior_weighting=dict(), save_posteriors=dict(), terminal_output=dict(nohash=True, ), - use_mixture_model_proportions=dict(requires=['posterior_formulation'], + use_mixture_model_proportions=dict(requires=[u'posterior_formulation'], ), use_random_seed=dict(argstr='--use-random-seed %d', usedefault=True, diff --git a/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py b/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py index 01b610ea30..7d7f7e897b 100644 --- a/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py +++ b/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py @@ -20,7 +20,7 @@ def test_DenoiseImage_inputs(): ), noise_image=dict(hash_files=False, keep_extension=True, - name_source=['input_image'], + name_source=[u'input_image'], name_template='%s_noise', ), noise_model=dict(argstr='-n %s', @@ -32,12 +32,12 @@ def test_DenoiseImage_inputs(): output_image=dict(argstr='-o %s', hash_files=False, keep_extension=True, - name_source=['input_image'], + name_source=[u'input_image'], name_template='%s_noise_corrected', ), save_noise=dict(mandatory=True, usedefault=True, - xor=['noise_image'], + xor=[u'noise_image'], ), shrink_factor=dict(argstr='-s %s', usedefault=True, diff --git a/nipype/interfaces/ants/tests/test_auto_JointFusion.py b/nipype/interfaces/ants/tests/test_auto_JointFusion.py index 76d8d46969..5b4703cf99 100644 --- a/nipype/interfaces/ants/tests/test_auto_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_auto_JointFusion.py @@ -4,7 +4,7 @@ def test_JointFusion_inputs(): - input_map = dict(alpha=dict(requires=['method'], + input_map = dict(alpha=dict(requires=[u'method'], usedefault=True, ), args=dict(argstr='%s', @@ -13,7 +13,7 @@ def test_JointFusion_inputs(): ), atlas_group_weights=dict(argstr='-gpw %d...', ), - beta=dict(requires=['method'], + beta=dict(requires=[u'method'], usedefault=True, ), dimension=dict(argstr='%d', diff --git a/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py b/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py index 18921d811c..170b80a224 100644 --- a/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py +++ b/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py @@ -10,9 +10,9 @@ def test_N4BiasFieldCorrection_inputs(): ), bspline_fitting_distance=dict(argstr='--bspline-fitting %s', ), - bspline_order=dict(requires=['bspline_fitting_distance'], + bspline_order=dict(requires=[u'bspline_fitting_distance'], ), - convergence_threshold=dict(requires=['n_iterations'], + convergence_threshold=dict(requires=[u'n_iterations'], ), dimension=dict(argstr='-d %d', usedefault=True, @@ -39,7 +39,7 @@ def test_N4BiasFieldCorrection_inputs(): ), save_bias=dict(mandatory=True, usedefault=True, - xor=['bias_image'], + xor=[u'bias_image'], ), shrink_factor=dict(argstr='--shrink-factor %d', ), diff --git a/nipype/interfaces/ants/tests/test_auto_Registration.py b/nipype/interfaces/ants/tests/test_auto_Registration.py index 990f97eea5..20eb90cabf 100644 --- a/nipype/interfaces/ants/tests/test_auto_Registration.py +++ b/nipype/interfaces/ants/tests/test_auto_Registration.py @@ -9,10 +9,10 @@ def test_Registration_inputs(): collapse_output_transforms=dict(argstr='--collapse-output-transforms %d', usedefault=True, ), - convergence_threshold=dict(requires=['number_of_iterations'], + convergence_threshold=dict(requires=[u'number_of_iterations'], usedefault=True, ), - convergence_window_size=dict(requires=['convergence_threshold'], + convergence_window_size=dict(requires=[u'convergence_threshold'], usedefault=True, ), dimension=dict(argstr='--dimensionality %d', @@ -31,10 +31,10 @@ def test_Registration_inputs(): usedefault=True, ), initial_moving_transform=dict(argstr='%s', - xor=['initial_moving_transform_com'], + xor=[u'initial_moving_transform_com'], ), initial_moving_transform_com=dict(argstr='%s', - xor=['initial_moving_transform'], + xor=[u'initial_moving_transform'], ), initialize_transforms_per_stage=dict(argstr='--initialize-transforms-per-stage %d', usedefault=True, @@ -43,29 +43,29 @@ def test_Registration_inputs(): usedefault=True, ), interpolation_parameters=dict(), - invert_initial_moving_transform=dict(requires=['initial_moving_transform'], - xor=['initial_moving_transform_com'], + invert_initial_moving_transform=dict(requires=[u'initial_moving_transform'], + xor=[u'initial_moving_transform_com'], ), metric=dict(mandatory=True, ), metric_item_trait=dict(), metric_stage_trait=dict(), metric_weight=dict(mandatory=True, - requires=['metric'], + requires=[u'metric'], usedefault=True, ), metric_weight_item_trait=dict(), metric_weight_stage_trait=dict(), moving_image=dict(mandatory=True, ), - moving_image_mask=dict(requires=['fixed_image_mask'], + moving_image_mask=dict(requires=[u'fixed_image_mask'], ), num_threads=dict(nohash=True, usedefault=True, ), number_of_iterations=dict(), output_inverse_warped_image=dict(hash_files=False, - requires=['output_warped_image'], + requires=[u'output_warped_image'], ), output_transform_prefix=dict(argstr='%s', usedefault=True, @@ -74,16 +74,16 @@ def test_Registration_inputs(): ), radius_bins_item_trait=dict(), radius_bins_stage_trait=dict(), - radius_or_number_of_bins=dict(requires=['metric_weight'], + radius_or_number_of_bins=dict(requires=[u'metric_weight'], usedefault=True, ), restore_state=dict(argstr='--restore-state %s', ), - sampling_percentage=dict(requires=['sampling_strategy'], + sampling_percentage=dict(requires=[u'sampling_strategy'], ), sampling_percentage_item_trait=dict(), sampling_percentage_stage_trait=dict(), - sampling_strategy=dict(requires=['metric_weight'], + sampling_strategy=dict(requires=[u'metric_weight'], ), sampling_strategy_item_trait=dict(), sampling_strategy_stage_trait=dict(), @@ -91,7 +91,7 @@ def test_Registration_inputs(): ), shrink_factors=dict(mandatory=True, ), - sigma_units=dict(requires=['smoothing_sigmas'], + sigma_units=dict(requires=[u'smoothing_sigmas'], ), smoothing_sigmas=dict(mandatory=True, ), diff --git a/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py b/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py index 09770d9d0f..69a573aa28 100644 --- a/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py @@ -26,23 +26,23 @@ def test_WarpImageMultiTransform_inputs(): ), out_postfix=dict(hash_files=False, usedefault=True, - xor=['output_image'], + xor=[u'output_image'], ), output_image=dict(argstr='%s', genfile=True, hash_files=False, position=3, - xor=['out_postfix'], + xor=[u'out_postfix'], ), reference_image=dict(argstr='-R %s', - xor=['tightest_box'], + xor=[u'tightest_box'], ), reslice_by_header=dict(argstr='--reslice-by-header', ), terminal_output=dict(nohash=True, ), tightest_box=dict(argstr='--tightest-bounding-box', - xor=['reference_image'], + xor=[u'reference_image'], ), transformation_series=dict(argstr='%s', mandatory=True, diff --git a/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py b/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py index 0e46ce34a5..ee18b7abba 100644 --- a/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py +++ b/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py @@ -28,14 +28,14 @@ def test_WarpTimeSeriesImageMultiTransform_inputs(): usedefault=True, ), reference_image=dict(argstr='-R %s', - xor=['tightest_box'], + xor=[u'tightest_box'], ), reslice_by_header=dict(argstr='--reslice-by-header', ), terminal_output=dict(nohash=True, ), tightest_box=dict(argstr='--tightest-bounding-box', - xor=['reference_image'], + xor=[u'reference_image'], ), transformation_series=dict(argstr='%s', copyfile=False, diff --git a/nipype/interfaces/ants/tests/test_spec_JointFusion.py b/nipype/interfaces/ants/tests/test_spec_JointFusion.py index ed6d283032..1b031f9f89 100644 --- a/nipype/interfaces/ants/tests/test_spec_JointFusion.py +++ b/nipype/interfaces/ants/tests/test_spec_JointFusion.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import division from builtins import range from nipype.testing import assert_equal, assert_raises, example_data diff --git a/nipype/interfaces/ants/utils.py b/nipype/interfaces/ants/utils.py index c3253b7256..499dc7b17f 100644 --- a/nipype/interfaces/ants/utils.py +++ b/nipype/interfaces/ants/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ANTS Apply Transforms interface Change directory to provide relative paths for doctests @@ -6,11 +7,13 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os -from .base import ANTSCommand, ANTSCommandInputSpec -from ..base import TraitedSpec, File, traits, isdefined, InputMultiPath from ...utils.filemanip import split_filename +from ..base import TraitedSpec, File, traits, isdefined, InputMultiPath +from .base import ANTSCommand, ANTSCommandInputSpec class AverageAffineTransformInputSpec(ANTSCommandInputSpec): @@ -35,7 +38,7 @@ class AverageAffineTransform(ANTSCommand): >>> avg.inputs.dimension = 3 >>> avg.inputs.transforms = ['trans.mat', 'func_to_struct.mat'] >>> avg.inputs.output_affine_transform = 'MYtemplatewarp.mat' - >>> avg.cmdline + >>> avg.cmdline # doctest: +IGNORE_UNICODE 'AverageAffineTransform 3 MYtemplatewarp.mat trans.mat func_to_struct.mat' """ _cmd = 'AverageAffineTransform' @@ -77,7 +80,7 @@ class AverageImages(ANTSCommand): >>> avg.inputs.output_average_image = "average.nii.gz" >>> avg.inputs.normalize = True >>> avg.inputs.images = ['rc1s1.nii', 'rc1s1.nii'] - >>> avg.cmdline + >>> avg.cmdline # doctest: +IGNORE_UNICODE 'AverageImages 3 average.nii.gz 1 rc1s1.nii rc1s1.nii' """ _cmd = 'AverageImages' @@ -118,7 +121,7 @@ class MultiplyImages(ANTSCommand): >>> test.inputs.first_input = 'moving2.nii' >>> test.inputs.second_input = 0.25 >>> test.inputs.output_product_image = "out.nii" - >>> test.cmdline + >>> test.cmdline # doctest: +IGNORE_UNICODE 'MultiplyImages 3 moving2.nii 0.25 out.nii' """ _cmd = 'MultiplyImages' @@ -170,7 +173,7 @@ class JacobianDeterminant(ANTSCommand): >>> jacobian.inputs.warp_file = 'ants_Warp.nii.gz' >>> jacobian.inputs.output_prefix = 'Sub001_' >>> jacobian.inputs.use_log = 1 - >>> jacobian.cmdline + >>> jacobian.cmdline # doctest: +IGNORE_UNICODE 'ANTSJacobian 3 ants_Warp.nii.gz Sub001_ 1' """ diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index 624f8e10b1..71d8e82f8d 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """The ants visualisation module provides basic functions based on ITK. Change directory to provide relative paths for doctests >>> import os @@ -5,10 +6,12 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os -from .base import ANTSCommand, ANTSCommandInputSpec from ..base import TraitedSpec, File, traits +from .base import ANTSCommand, ANTSCommandInputSpec class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): @@ -54,7 +57,7 @@ class ConvertScalarImageToRGB(ANTSCommand): >>> converter.inputs.colormap = 'jet' >>> converter.inputs.minimum_input = 0 >>> converter.inputs.maximum_input = 6 - >>> converter.cmdline + >>> converter.cmdline # doctest: +IGNORE_UNICODE 'ConvertScalarImageToRGB 3 T1.nii.gz rgb.nii.gz none jet none 0 6 0 255' """ _cmd = 'ConvertScalarImageToRGB' @@ -140,7 +143,7 @@ class CreateTiledMosaic(ANTSCommand): >>> mosaic_slicer.inputs.direction = 2 >>> mosaic_slicer.inputs.pad_or_crop = '[ -15x -50 , -15x -30 ,0]' >>> mosaic_slicer.inputs.slices = '[2 ,100 ,160]' - >>> mosaic_slicer.cmdline + >>> mosaic_slicer.cmdline # doctest: +IGNORE_UNICODE 'CreateTiledMosaic -a 0.50 -d 2 -i T1.nii.gz -x mask.nii.gz -o output.png -p [ -15x -50 , -15x -30 ,0] \ -r rgb.nii.gz -s [2 ,100 ,160]' """ diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 91df89b834..bdad7501b2 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -7,17 +8,15 @@ Requires Packages to be installed """ - -from __future__ import print_function -from __future__ import division +from __future__ import print_function, division, unicode_literals, absolute_import from future import standard_library standard_library.install_aliases() -from builtins import range -from builtins import object +from builtins import range, object, open, str, bytes from configparser import NoOptionError from copy import deepcopy import datetime +from datetime import datetime as dt import errno import locale import os @@ -28,33 +27,24 @@ import select import subprocess import sys -import random import time -import fnmatch from textwrap import wrap -from datetime import datetime as dt -from dateutil.parser import parse as parseutc from warnings import warn +from dateutil.parser import parse as parseutc -from .traits_extension import (traits, Undefined, TraitDictObject, - TraitListObject, TraitError, - isdefined, File, Directory, - has_metadata) -from ..utils.filemanip import (md5, hash_infile, FileNotFoundError, - hash_timestamp, save_json, - split_filename) -from ..utils.misc import is_container, trim, str2bool +from .. import config, logging, LooseVersion, __version__ from ..utils.provenance import write_provenance -from .. import config, logging, LooseVersion -from .. import __version__ -from ..external.six import string_types, text_type +from ..utils.misc import is_container, trim, str2bool +from ..utils.filemanip import (md5, hash_infile, FileNotFoundError, hash_timestamp, + split_filename, encode_dict) +from .traits_extension import ( + traits, Undefined, TraitDictObject, TraitListObject, TraitError, isdefined, File, + Directory, DictStrStr, has_metadata) from ..external.due import due runtime_profile = str2bool(config.get('execution', 'profile_runtime')) - nipype_version = LooseVersion(__version__) - iflogger = logging.getLogger('interface') if runtime_profile: @@ -68,6 +58,11 @@ __docformat__ = 'restructuredtext' +class Str(traits.Unicode): + pass + +traits.Str = Str + class NipypeInterfaceError(Exception): def __init__(self, value): self.value = value @@ -76,10 +71,10 @@ def __str__(self): return repr(self.value) def _exists_in_path(cmd, environ): - ''' + """ Based on a code snippet from http://orip.org/2009/08/python-checking-if-executable-exists-in.html - ''' + """ if 'PATH' in environ: input_environ = environ.get("PATH") @@ -128,10 +123,10 @@ class Bunch(object): -------- >>> from nipype.interfaces.base import Bunch >>> inputs = Bunch(infile='subj.nii', fwhm=6.0, register_to_mean=True) - >>> inputs + >>> inputs # doctest: +IGNORE_UNICODE Bunch(fwhm=6.0, infile='subj.nii', register_to_mean=True) >>> inputs.register_to_mean = False - >>> inputs + >>> inputs # doctest: +IGNORE_UNICODE Bunch(fwhm=6.0, infile='subj.nii', register_to_mean=False) @@ -144,6 +139,7 @@ class Bunch(object): """ + def __init__(self, *args, **kwargs): self.__dict__.update(*args, **kwargs) @@ -163,13 +159,13 @@ def iteritems(self): return list(self.items()) def get(self, *args): - '''Support dictionary get() functionality - ''' + """Support dictionary get() functionality + """ return self.__dict__.get(*args) def set(self, **kwargs): - '''Support dictionary get() functionality - ''' + """Support dictionary get() functionality + """ return self.__dict__.update(**kwargs) def dictcopy(self): @@ -271,13 +267,13 @@ def _get_bunch_hash(self): # Sort the items of the dictionary, before hashing the string # representation so we get a predictable order of the # dictionary. - sorted_dict = str(sorted(dict_nofilename.items())) + sorted_dict = encode_dict(sorted(dict_nofilename.items())) return dict_withhash, md5(sorted_dict.encode()).hexdigest() def __pretty__(self, p, cycle): - '''Support for the pretty module + """Support for the pretty module - pretty is included in ipython.externals for ipython > 0.10''' + pretty is included in ipython.externals for ipython > 0.10""" if cycle: p.text('Bunch(...)') else: @@ -574,7 +570,7 @@ def get_hashval(self, hash_method=None): dict_withhash.append((name, self._get_sorteddict(val, True, hash_method=hash_method, hash_files=hash_files))) - return dict_withhash, md5(str(dict_nofilename).encode()).hexdigest() + return dict_withhash, md5(encode_dict(dict_nofilename).encode()).hexdigest() def _get_sorteddict(self, object, dictwithhash=False, hash_method=None, hash_files=True): @@ -597,7 +593,7 @@ def _get_sorteddict(self, object, dictwithhash=False, hash_method=None, out = tuple(out) else: if isdefined(object): - if (hash_files and isinstance(object, string_types) and + if (hash_files and isinstance(object, (str, bytes)) and os.path.isfile(object)): if hash_method is None: hash_method = config.get('execution', 'hash_method') @@ -1092,7 +1088,7 @@ def run(self, **inputs): else: inputs_str = '' - if len(e.args) == 1 and isinstance(e.args[0], string_types): + if len(e.args) == 1 and isinstance(e.args[0], (str, bytes)): e.args = (e.args[0] + " ".join([message, inputs_str]),) else: e.args += (message, ) @@ -1530,8 +1526,8 @@ def get_dependencies(name, environ): class CommandLineInputSpec(BaseInterfaceInputSpec): - args = traits.Str(argstr='%s', desc='Additional parameters to the command') - environ = traits.DictStrStr(desc='Environment variables', usedefault=True, + args = Str(argstr='%s', desc='Additional parameters to the command') + environ = DictStrStr(desc='Environment variables', usedefault=True, nohash=True) # This input does not have a "usedefault=True" so the set_default_terminal_output() # method would work @@ -1565,20 +1561,21 @@ class must be instantiated with a command argument >>> from nipype.interfaces.base import CommandLine >>> cli = CommandLine(command='ls', environ={'DISPLAY': ':1'}) >>> cli.inputs.args = '-al' - >>> cli.cmdline + >>> cli.cmdline # doctest: +IGNORE_UNICODE 'ls -al' - >>> pprint.pprint(cli.inputs.trait_get()) # doctest: +NORMALIZE_WHITESPACE + >>> pprint.pprint(cli.inputs.trait_get()) # doctest: +NORMALIZE_WHITESPACE +IGNORE_UNICODE {'args': '-al', 'environ': {'DISPLAY': ':1'}, 'ignore_exception': False, 'terminal_output': 'stream'} - >>> cli.inputs.get_hashval() - ([('args', '-al')], '11c37f97649cd61627f4afe5136af8c0') + >>> cli.inputs.get_hashval()[0][0] # doctest: +IGNORE_UNICODE + ('args', '-al') + >>> cli.inputs.get_hashval()[1] # doctest: +IGNORE_UNICODE + '11c37f97649cd61627f4afe5136af8c0' """ - input_spec = CommandLineInputSpec _cmd = None _version = None @@ -1634,11 +1631,10 @@ def cmdline(self): return ' '.join(allargs) def raise_exception(self, runtime): - message = "Command:\n" + runtime.cmdline + "\n" - message += "Standard output:\n" + runtime.stdout + "\n" - message += "Standard error:\n" + runtime.stderr + "\n" - message += "Return code: " + str(runtime.returncode) - raise RuntimeError(message) + raise RuntimeError( + ('Command:\n{cmdline}\nStandard output:\n{stdout}\n' + 'Standard error:\n{stderr}\nReturn code: {returncode}').format( + **runtime.dictcopy())) @classmethod def help(cls, returnhelp=False): @@ -1780,14 +1776,15 @@ def _filename_from_source(self, name, chain=None): name_template = "%s_generated" ns = trait_spec.name_source - while isinstance(ns, list): + while isinstance(ns, (list, tuple)): if len(ns) > 1: iflogger.warn('Only one name_source per trait is allowed') ns = ns[0] - if not isinstance(ns, string_types): - raise ValueError(('name_source of \'%s\' trait sould be an ' - 'input trait name') % name) + if not isinstance(ns, (str, bytes)): + raise ValueError( + 'name_source of \'{}\' trait should be an input trait ' + 'name, but a type {} object was found'.format(name, type(ns))) if isdefined(getattr(self.inputs, ns)): name_source = ns @@ -1830,7 +1827,7 @@ def _list_outputs(self): traits = self.inputs.traits(**metadata) if traits: outputs = self.output_spec().get() #pylint: disable=E1102 - for name, trait_spec in traits.items(): + for name, trait_spec in list(traits.items()): out_name = name if trait_spec.output_name is not None: out_name = trait_spec.output_name @@ -1858,10 +1855,12 @@ def _parse_inputs(self, skip=None): if skip and name in skip: continue value = getattr(self.inputs, name) - if spec.genfile or spec.name_source: + if spec.name_source: value = self._filename_from_source(name) - if not isdefined(value): + elif spec.genfile: + if not isdefined(value) or value is None: value = self._gen_filename(name) + if not isdefined(value): continue arg = self._format_arg(name, spec, value) @@ -1888,7 +1887,7 @@ class StdOutCommandLine(CommandLine): input_spec = StdOutCommandLineInputSpec def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._gen_outfilename() else: return None @@ -1907,7 +1906,7 @@ class MpiCommandLineInputSpec(CommandLineInputSpec): class MpiCommandLine(CommandLine): - '''Implements functionality to interact with command line programs + """Implements functionality to interact with command line programs that can be run with MPI (i.e. using 'mpiexec'). Examples @@ -1915,14 +1914,14 @@ class MpiCommandLine(CommandLine): >>> from nipype.interfaces.base import MpiCommandLine >>> mpi_cli = MpiCommandLine(command='my_mpi_prog') >>> mpi_cli.inputs.args = '-v' - >>> mpi_cli.cmdline + >>> mpi_cli.cmdline # doctest: +IGNORE_UNICODE 'my_mpi_prog -v' >>> mpi_cli.inputs.use_mpi = True >>> mpi_cli.inputs.n_procs = 8 - >>> mpi_cli.cmdline + >>> mpi_cli.cmdline # doctest: +IGNORE_UNICODE 'mpiexec -n 8 my_mpi_prog -v' - ''' + """ input_spec = MpiCommandLineInputSpec @property @@ -2024,15 +2023,15 @@ class OutputMultiPath(MultiPath): >>> a.foo = '/software/temp/foo.txt' - >>> a.foo + >>> a.foo # doctest: +IGNORE_UNICODE '/software/temp/foo.txt' >>> a.foo = ['/software/temp/foo.txt'] - >>> a.foo + >>> a.foo # doctest: +IGNORE_UNICODE '/software/temp/foo.txt' >>> a.foo = ['/software/temp/foo.txt', '/software/temp/goo.txt'] - >>> a.foo + >>> a.foo # doctest: +IGNORE_UNICODE ['/software/temp/foo.txt', '/software/temp/goo.txt'] """ @@ -2069,15 +2068,15 @@ class InputMultiPath(MultiPath): >>> a.foo = '/software/temp/foo.txt' - >>> a.foo + >>> a.foo # doctest: +IGNORE_UNICODE ['/software/temp/foo.txt'] >>> a.foo = ['/software/temp/foo.txt'] - >>> a.foo + >>> a.foo # doctest: +IGNORE_UNICODE ['/software/temp/foo.txt'] >>> a.foo = ['/software/temp/foo.txt', '/software/temp/goo.txt'] - >>> a.foo + >>> a.foo # doctest: +IGNORE_UNICODE ['/software/temp/foo.txt', '/software/temp/goo.txt'] """ diff --git a/nipype/interfaces/brainsuite/__init__.py b/nipype/interfaces/brainsuite/__init__.py index bf6a283f63..ba8154b631 100644 --- a/nipype/interfaces/brainsuite/__init__.py +++ b/nipype/interfaces/brainsuite/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from .brainsuite import (Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit) diff --git a/nipype/interfaces/brainsuite/brainsuite.py b/nipype/interfaces/brainsuite/brainsuite.py index 3d8234b380..d870af30cc 100644 --- a/nipype/interfaces/brainsuite/brainsuite.py +++ b/nipype/interfaces/brainsuite/brainsuite.py @@ -1,14 +1,10 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import + import os import re as regex -from nipype.interfaces.base import( - TraitedSpec, - CommandLineInputSpec, - CommandLine, - File, - traits, - isdefined, -) +from ..base import TraitedSpec, CommandLineInputSpec, CommandLine, File, traits, isdefined class BseInputSpec(CommandLineInputSpec): @@ -885,4 +881,4 @@ def l_outputs(self): if not name is None: outputs[key] = name - return outputs \ No newline at end of file + return outputs diff --git a/nipype/interfaces/brainsuite/tests/__init__.py b/nipype/interfaces/brainsuite/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/brainsuite/tests/__init__.py +++ b/nipype/interfaces/brainsuite/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py b/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py index a49244d115..f2c43b209c 100644 --- a/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py +++ b/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py @@ -23,7 +23,7 @@ def test_Dfs_inputs(): noNormalsFlag=dict(argstr='--nonormals', ), nonZeroTessellation=dict(argstr='-nz', - xor=('nonZeroTessellation', 'specialTessellation'), + xor=(u'nonZeroTessellation', u'specialTessellation'), ), outputSurfaceFile=dict(argstr='-o %s', genfile=True, @@ -40,8 +40,8 @@ def test_Dfs_inputs(): ), specialTessellation=dict(argstr='%s', position=-1, - requires=['tessellationThreshold'], - xor=('nonZeroTessellation', 'specialTessellation'), + requires=[u'tessellationThreshold'], + xor=(u'nonZeroTessellation', u'specialTessellation'), ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/bru2nii.py b/nipype/interfaces/bru2nii.py index 05a3d206b6..481aefb9ec 100644 --- a/nipype/interfaces/bru2nii.py +++ b/nipype/interfaces/bru2nii.py @@ -1,54 +1,65 @@ +# -*- coding: utf-8 -*- """The bru2nii module provides basic functions for dicom conversion - Change directory to provide relative paths for doctests - >>> import os - >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) - >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) - >>> os.chdir(datadir) + Change directory to provide relative paths for doctests + >>> import os + >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) + >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) + >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import -from nipype.interfaces.base import (CommandLine, CommandLineInputSpec, traits, TraitedSpec, isdefined, File, Directory) import os - +from .base import (CommandLine, CommandLineInputSpec, + traits, TraitedSpec, isdefined, File, Directory) class Bru2InputSpec(CommandLineInputSpec): - input_dir = Directory(desc = "Input Directory", exists=True, mandatory=True, position=-1, argstr="%s") - actual_size = traits.Bool(argstr='-a', desc="Keep actual size - otherwise x10 scale so animals match human.") - force_conversion = traits.Bool(argstr='-f', desc="Force conversion of localizers images (multiple slice orientations).") - append_protocol_name = traits.Bool(argstr='-p', desc="Append protocol name to output filename.") - output_filename = traits.Str(argstr="-o %s", desc="Output filename ('.nii' will be appended)", genfile=True) + input_dir = Directory( + desc="Input Directory", exists=True, mandatory=True, position=-1, argstr="%s") + actual_size = traits.Bool( + argstr='-a', desc="Keep actual size - otherwise x10 scale so animals match human.") + force_conversion = traits.Bool( + argstr='-f', desc="Force conversion of localizers images (multiple slice orientations).") + append_protocol_name = traits.Bool( + argstr='-p', desc="Append protocol name to output filename.") + output_filename = traits.Str( + argstr="-o %s", desc="Output filename ('.nii' will be appended)", genfile=True) + class Bru2OutputSpec(TraitedSpec): - nii_file = File(exists=True) + nii_file = File(exists=True) + class Bru2(CommandLine): - """Uses bru2nii's Bru2 to convert Bruker files - - Examples - ======== - - >>> from nipype.interfaces.bru2nii import Bru2 - >>> converter = Bru2() - >>> converter.inputs.input_dir = "brukerdir" - >>> converter.cmdline # doctest: +ELLIPSIS - 'Bru2 -o .../nipype/nipype/testing/data/brukerdir brukerdir' - """ - input_spec = Bru2InputSpec - output_spec = Bru2OutputSpec - _cmd = "Bru2" - - def _list_outputs(self): - outputs = self._outputs().get() - if isdefined(self.inputs.output_filename): - output_filename1 = self.inputs.output_filename - else: - output_filename1 = self._gen_filename('output_filename') - outputs["nii_file"] = output_filename1+".nii" - return outputs - - def _gen_filename(self, name): - if name == 'output_filename': - outfile = os.path.join(os.getcwd(),os.path.basename(os.path.normpath(self.inputs.input_dir))) - return outfile + + """Uses bru2nii's Bru2 to convert Bruker files + + Examples + ======== + + >>> from nipype.interfaces.bru2nii import Bru2 + >>> converter = Bru2() + >>> converter.inputs.input_dir = "brukerdir" + >>> converter.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE + 'Bru2 -o .../nipype/nipype/testing/data/brukerdir brukerdir' + """ + input_spec = Bru2InputSpec + output_spec = Bru2OutputSpec + _cmd = "Bru2" + + def _list_outputs(self): + outputs = self._outputs().get() + if isdefined(self.inputs.output_filename): + output_filename1 = self.inputs.output_filename + else: + output_filename1 = self._gen_filename('output_filename') + outputs["nii_file"] = output_filename1+".nii" + return outputs + + def _gen_filename(self, name): + if name == 'output_filename': + outfile = os.path.join( + os.getcwd(), os.path.basename(os.path.normpath(self.inputs.input_dir))) + return outfile diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py index 8246a68786..3dd47dab49 100644 --- a/nipype/interfaces/c3.py +++ b/nipype/interfaces/c3.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """The ants module provides basic functions for interfacing with ants functions. Change directory to provide relative paths for doctests @@ -6,6 +7,7 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import from .base import (CommandLineInputSpec, traits, TraitedSpec, File, SEMLikeCommandLine) @@ -36,7 +38,7 @@ class C3dAffineTool(SEMLikeCommandLine): >>> c3.inputs.source_file = 'cmatrix.mat' >>> c3.inputs.itk_transform = 'affine.txt' >>> c3.inputs.fsl2ras = True - >>> c3.cmdline + >>> c3.cmdline # doctest: +IGNORE_UNICODE 'c3d_affine_tool -src cmatrix.mat -fsl2ras -oitk affine.txt' """ input_spec = C3dAffineToolInputSpec diff --git a/nipype/interfaces/camino/__init__.py b/nipype/interfaces/camino/__init__.py index e3f7271626..0120732ef6 100644 --- a/nipype/interfaces/camino/__init__.py +++ b/nipype/interfaces/camino/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Camino top level namespace diff --git a/nipype/interfaces/camino/calib.py b/nipype/interfaces/camino/calib.py index a56e501e7c..c4c07fa71d 100644 --- a/nipype/interfaces/camino/calib.py +++ b/nipype/interfaces/camino/calib.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,12 +7,13 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os -from ..base import (CommandLineInputSpec, CommandLine, traits, - TraitedSpec, File, StdOutCommandLine, - StdOutCommandLineInputSpec, isdefined) from ...utils.filemanip import split_filename +from ..base import (traits, TraitedSpec, File, StdOutCommandLine, + StdOutCommandLineInputSpec) class SFPICOCalibDataInputSpec(StdOutCommandLineInputSpec): diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 7f8dc7f0bd..654d71b1f1 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,11 +7,12 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import import os +from ...utils.filemanip import split_filename from ..base import (traits, TraitedSpec, File, CommandLine, CommandLineInputSpec, isdefined) -from ...utils.filemanip import split_filename class ConmatInputSpec(CommandLineInputSpec): diff --git a/nipype/interfaces/camino/convert.py b/nipype/interfaces/camino/convert.py index cdde8a2b88..6cf8f4f253 100644 --- a/nipype/interfaces/camino/convert.py +++ b/nipype/interfaces/camino/convert.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,14 +7,16 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os import glob +from ...utils.filemanip import split_filename from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, StdOutCommandLine, OutputMultiPath, StdOutCommandLineInputSpec, isdefined) -from ...utils.filemanip import split_filename class Image2VoxelInputSpec(StdOutCommandLineInputSpec): diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 8402fcf45f..5aba0560c1 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,13 +7,15 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os +from ...utils.filemanip import split_filename from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, Directory, StdOutCommandLine, StdOutCommandLineInputSpec, isdefined, InputMultiPath) -from ...utils.filemanip import split_filename class DTIFitInputSpec(StdOutCommandLineInputSpec): @@ -574,7 +577,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._gen_outfilename() else: return None diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index e39bc81117..5f16726dfc 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,12 +7,13 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os -from ..base import (CommandLineInputSpec, CommandLine, traits, - TraitedSpec, File, StdOutCommandLine, - StdOutCommandLineInputSpec, isdefined) from ...utils.filemanip import split_filename +from ..base import (traits, TraitedSpec, File, StdOutCommandLine, + StdOutCommandLineInputSpec) class QBallMXInputSpec(StdOutCommandLineInputSpec): diff --git a/nipype/interfaces/camino/tests/__init__.py b/nipype/interfaces/camino/tests/__init__.py index 349937997e..99fb243f19 100644 --- a/nipype/interfaces/camino/tests/__init__.py +++ b/nipype/interfaces/camino/tests/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/camino/tests/test_auto_Conmat.py b/nipype/interfaces/camino/tests/test_auto_Conmat.py index c5aa705c6c..d02db207e9 100644 --- a/nipype/interfaces/camino/tests/test_auto_Conmat.py +++ b/nipype/interfaces/camino/tests/test_auto_Conmat.py @@ -19,7 +19,7 @@ def test_Conmat_inputs(): genfile=True, ), scalar_file=dict(argstr='-scalarfile %s', - requires=['tract_stat'], + requires=[u'tract_stat'], ), target_file=dict(argstr='-targetfile %s', mandatory=True, @@ -30,12 +30,12 @@ def test_Conmat_inputs(): ), tract_prop=dict(argstr='-tractstat %s', units='NA', - xor=['tract_stat'], + xor=[u'tract_stat'], ), tract_stat=dict(argstr='-tractstat %s', - requires=['scalar_file'], + requires=[u'scalar_file'], units='NA', - xor=['tract_prop'], + xor=[u'tract_prop'], ), ) inputs = Conmat.input_spec() diff --git a/nipype/interfaces/camino/tests/test_auto_MESD.py b/nipype/interfaces/camino/tests/test_auto_MESD.py index 018d820a96..0424c50086 100644 --- a/nipype/interfaces/camino/tests/test_auto_MESD.py +++ b/nipype/interfaces/camino/tests/test_auto_MESD.py @@ -12,7 +12,7 @@ def test_MESD_inputs(): usedefault=True, ), fastmesd=dict(argstr='-fastmesd', - requires=['mepointset'], + requires=[u'mepointset'], ), ignore_exception=dict(nohash=True, usedefault=True, diff --git a/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py b/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py index 99ecff3624..96001c0d84 100644 --- a/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py +++ b/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py @@ -57,18 +57,18 @@ def test_ProcStreamlines_inputs(): position=-1, ), outputacm=dict(argstr='-outputacm', - requires=['outputroot', 'seedfile'], + requires=[u'outputroot', u'seedfile'], ), outputcbs=dict(argstr='-outputcbs', - requires=['outputroot', 'targetfile', 'seedfile'], + requires=[u'outputroot', u'targetfile', u'seedfile'], ), outputcp=dict(argstr='-outputcp', - requires=['outputroot', 'seedfile'], + requires=[u'outputroot', u'seedfile'], ), outputroot=dict(argstr='-outputroot %s', ), outputsc=dict(argstr='-outputsc', - requires=['outputroot', 'seedfile'], + requires=[u'outputroot', u'seedfile'], ), outputtracts=dict(argstr='-outputtracts', ), diff --git a/nipype/interfaces/camino/tests/test_auto_Track.py b/nipype/interfaces/camino/tests/test_auto_Track.py index a8fd980b79..b1ab2c0f56 100644 --- a/nipype/interfaces/camino/tests/test_auto_Track.py +++ b/nipype/interfaces/camino/tests/test_auto_Track.py @@ -11,7 +11,7 @@ def test_Track_inputs(): args=dict(argstr='%s', ), curveinterval=dict(argstr='-curveinterval %f', - requires=['curvethresh'], + requires=[u'curvethresh'], ), curvethresh=dict(argstr='-curvethresh %f', ), @@ -57,7 +57,7 @@ def test_Track_inputs(): position=2, ), stepsize=dict(argstr='-stepsize %f', - requires=['tracker'], + requires=[u'tracker'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py b/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py index 1d563f873c..7b8294db23 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py @@ -11,7 +11,7 @@ def test_TrackBallStick_inputs(): args=dict(argstr='%s', ), curveinterval=dict(argstr='-curveinterval %f', - requires=['curvethresh'], + requires=[u'curvethresh'], ), curvethresh=dict(argstr='-curvethresh %f', ), @@ -57,7 +57,7 @@ def test_TrackBallStick_inputs(): position=2, ), stepsize=dict(argstr='-stepsize %f', - requires=['tracker'], + requires=[u'tracker'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py b/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py index f2c2aecede..697a8157ca 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py @@ -11,7 +11,7 @@ def test_TrackBayesDirac_inputs(): args=dict(argstr='%s', ), curveinterval=dict(argstr='-curveinterval %f', - requires=['curvethresh'], + requires=[u'curvethresh'], ), curvepriorg=dict(argstr='-curvepriorg %G', ), @@ -77,7 +77,7 @@ def test_TrackBayesDirac_inputs(): position=2, ), stepsize=dict(argstr='-stepsize %f', - requires=['tracker'], + requires=[u'tracker'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py index 9f9e40d284..6b6ee32c0d 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py @@ -14,7 +14,7 @@ def test_TrackBedpostxDeter_inputs(): mandatory=True, ), curveinterval=dict(argstr='-curveinterval %f', - requires=['curvethresh'], + requires=[u'curvethresh'], ), curvethresh=dict(argstr='-curvethresh %f', ), @@ -63,7 +63,7 @@ def test_TrackBedpostxDeter_inputs(): position=2, ), stepsize=dict(argstr='-stepsize %f', - requires=['tracker'], + requires=[u'tracker'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py index 40f01d1b37..0e7d88071e 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py @@ -14,7 +14,7 @@ def test_TrackBedpostxProba_inputs(): mandatory=True, ), curveinterval=dict(argstr='-curveinterval %f', - requires=['curvethresh'], + requires=[u'curvethresh'], ), curvethresh=dict(argstr='-curvethresh %f', ), @@ -66,7 +66,7 @@ def test_TrackBedpostxProba_inputs(): position=2, ), stepsize=dict(argstr='-stepsize %f', - requires=['tracker'], + requires=[u'tracker'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py b/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py index 091fa6bd90..40b1a21e80 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py @@ -16,7 +16,7 @@ def test_TrackBootstrap_inputs(): mandatory=True, ), curveinterval=dict(argstr='-curveinterval %f', - requires=['curvethresh'], + requires=[u'curvethresh'], ), curvethresh=dict(argstr='-curvethresh %f', ), @@ -70,7 +70,7 @@ def test_TrackBootstrap_inputs(): position=2, ), stepsize=dict(argstr='-stepsize %f', - requires=['tracker'], + requires=[u'tracker'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/camino/tests/test_auto_TrackDT.py b/nipype/interfaces/camino/tests/test_auto_TrackDT.py index 0376ff7d55..a7f4ec098f 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackDT.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackDT.py @@ -11,7 +11,7 @@ def test_TrackDT_inputs(): args=dict(argstr='%s', ), curveinterval=dict(argstr='-curveinterval %f', - requires=['curvethresh'], + requires=[u'curvethresh'], ), curvethresh=dict(argstr='-curvethresh %f', ), @@ -57,7 +57,7 @@ def test_TrackDT_inputs(): position=2, ), stepsize=dict(argstr='-stepsize %f', - requires=['tracker'], + requires=[u'tracker'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/camino/tests/test_auto_TrackPICo.py b/nipype/interfaces/camino/tests/test_auto_TrackPICo.py index c95a7e8812..805e1871f6 100644 --- a/nipype/interfaces/camino/tests/test_auto_TrackPICo.py +++ b/nipype/interfaces/camino/tests/test_auto_TrackPICo.py @@ -11,7 +11,7 @@ def test_TrackPICo_inputs(): args=dict(argstr='%s', ), curveinterval=dict(argstr='-curveinterval %f', - requires=['curvethresh'], + requires=[u'curvethresh'], ), curvethresh=dict(argstr='-curvethresh %f', ), @@ -62,7 +62,7 @@ def test_TrackPICo_inputs(): position=2, ), stepsize=dict(argstr='-stepsize %f', - requires=['tracker'], + requires=[u'tracker'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/camino/utils.py b/nipype/interfaces/camino/utils.py index 19fe6ac768..0cb07d89f5 100644 --- a/nipype/interfaces/camino/utils.py +++ b/nipype/interfaces/camino/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,11 +7,11 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import import os -from ..base import (traits, TraitedSpec, File, - CommandLine, CommandLineInputSpec, isdefined, - InputMultiPath) +from ..base import (traits, TraitedSpec, File, CommandLine, + CommandLineInputSpec, InputMultiPath) from ...utils.filemanip import split_filename diff --git a/nipype/interfaces/camino2trackvis/__init__.py b/nipype/interfaces/camino2trackvis/__init__.py index d0c704a808..94d3e458a7 100644 --- a/nipype/interfaces/camino2trackvis/__init__.py +++ b/nipype/interfaces/camino2trackvis/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Camino2Trackvis top level namespace diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index 9075a06ee2..b9032ba1cd 100644 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Provides interfaces to various commands provided by Camino-Trackvis @@ -8,10 +9,12 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os -from ..base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File from ...utils.filemanip import split_filename +from ..base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File class Camino2TrackvisInputSpec(CommandLineInputSpec): @@ -79,7 +82,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._gen_outfilename() else: return None @@ -130,7 +133,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._gen_outfilename() else: return None diff --git a/nipype/interfaces/camino2trackvis/tests/__init__.py b/nipype/interfaces/camino2trackvis/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/camino2trackvis/tests/__init__.py +++ b/nipype/interfaces/camino2trackvis/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/cmtk/__init__.py b/nipype/interfaces/cmtk/__init__.py index 7e1236c825..60c7d636d5 100644 --- a/nipype/interfaces/cmtk/__init__.py +++ b/nipype/interfaces/cmtk/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from .cmtk import ROIGen, CreateMatrix, CreateNodes from .nx import NetworkXMetrics, AverageNetworks from .parcellation import Parcellate diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index f527cccc0a..8903321ba5 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,10 +9,8 @@ >>> os.chdir(datadir) """ - -from __future__ import print_function -from __future__ import division -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, open import pickle import os.path as op @@ -21,11 +20,10 @@ import networkx as nx import scipy.io as sio -from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, - File, TraitedSpec, InputMultiPath, Directory, - OutputMultiPath, isdefined) -from ...utils.filemanip import split_filename from ... import logging +from ...utils.filemanip import split_filename +from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, + TraitedSpec, Directory, OutputMultiPath, isdefined) iflogger = logging.getLogger('interface') @@ -713,9 +711,8 @@ def _run_interface(self, runtime): if write_dict: iflogger.info('Saving Dictionary File to {path} in Pickle format'.format(path=dict_file)) - file = open(dict_file, 'w') - pickle.dump(labelDict, file) - file.close() + with open(dict_file, 'w') as f: + pickle.dump(labelDict, f) return runtime def _list_outputs(self): diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py index 33ee7616b9..7cc9af1000 100644 --- a/nipype/interfaces/cmtk/convert.py +++ b/nipype/interfaces/cmtk/convert.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,18 +7,18 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import import os import os.path as op import datetime import string -import warnings import networkx as nx -from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, - File, TraitedSpec, InputMultiPath, isdefined) -from nipype.utils.filemanip import split_filename -from nipype.utils.misc import package_check +from ...utils.misc import package_check +from ...utils.filemanip import split_filename +from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, + File, TraitedSpec, InputMultiPath, isdefined) have_cfflib = True try: diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index 8fd539691f..fd4ff0e050 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -1,17 +1,18 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, unicode_literals, absolute_import import os.path as op -import warnings import numpy as np import networkx as nx +from ... import logging +from ...utils.misc import package_check from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, TraitedSpec, InputMultiPath, OutputMultiPath, isdefined) -from ...utils.misc import package_check -from ... import logging iflogger = logging.getLogger('interface') have_cv = True diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 64b817a746..48763256f7 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,9 +9,8 @@ >>> os.chdir(datadir) """ - -from __future__ import division -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, open, range import os.path as op import pickle @@ -19,11 +19,11 @@ import networkx as nx import scipy.io as sio -from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, - TraitedSpec, InputMultiPath, OutputMultiPath, isdefined) +from ... import logging from ...utils.filemanip import split_filename from ...utils.misc import package_check -from ... import logging +from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, + TraitedSpec, InputMultiPath, OutputMultiPath, isdefined) iflogger = logging.getLogger('interface') diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index 80b0e72ab0..5a510bcdf7 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,7 +9,7 @@ >>> os.chdir(datadir) """ - +from __future__ import print_function, division, unicode_literals, absolute_import from builtins import range import os @@ -20,10 +21,10 @@ import nibabel as nb import networkx as nx +from ... import logging +from ...utils.misc import package_check from ..base import (BaseInterface, BaseInterfaceInputSpec, traits, File, TraitedSpec, Directory, isdefined) -from ...utils.misc import package_check -from ... import logging iflogger = logging.getLogger('interface') have_cmp = True diff --git a/nipype/interfaces/cmtk/tests/__init__.py b/nipype/interfaces/cmtk/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/cmtk/tests/__init__.py +++ b/nipype/interfaces/cmtk/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py b/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py index 2869299324..2e0c9c1ba6 100644 --- a/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py +++ b/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py @@ -4,11 +4,11 @@ def test_ROIGen_inputs(): - input_map = dict(LUT_file=dict(xor=['use_freesurfer_LUT'], + input_map = dict(LUT_file=dict(xor=[u'use_freesurfer_LUT'], ), aparc_aseg_file=dict(mandatory=True, ), - freesurfer_dir=dict(requires=['use_freesurfer_LUT'], + freesurfer_dir=dict(requires=[u'use_freesurfer_LUT'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -17,7 +17,7 @@ def test_ROIGen_inputs(): ), out_roi_file=dict(genfile=True, ), - use_freesurfer_LUT=dict(xor=['LUT_file'], + use_freesurfer_LUT=dict(xor=[u'LUT_file'], ), ) inputs = ROIGen.input_spec() diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py index 3e28d26a5c..b3771e1903 100644 --- a/nipype/interfaces/dcm2nii.py +++ b/nipype/interfaces/dcm2nii.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """The dcm2nii module provides basic functions for dicom conversion Change directory to provide relative paths for doctests @@ -6,16 +7,17 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ - +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, open import os import re from copy import deepcopy +from ..utils.filemanip import split_filename from .base import (CommandLine, CommandLineInputSpec, InputMultiPath, traits, TraitedSpec, OutputMultiPath, isdefined, File, Directory) -from ..utils.filemanip import split_filename class Dcm2niiInputSpec(CommandLineInputSpec): @@ -73,7 +75,7 @@ class Dcm2nii(CommandLine): >>> converter.inputs.source_names = ['functional_1.dcm', 'functional_2.dcm'] >>> converter.inputs.gzip_output = True >>> converter.inputs.output_dir = '.' - >>> converter.cmdline + >>> converter.cmdline # doctest: +IGNORE_UNICODE 'dcm2nii -a y -c y -b config.ini -v y -d y -e y -g y -i n -n y -o . -p y -x n -f n functional_1.dcm' """ @@ -193,10 +195,9 @@ def _gen_filename(self, name): elif name == 'config_file': self._config_created = True config_file = "config.ini" - f = open(config_file, "w") - # disable interactive mode - f.write("[BOOL]\nManualNIfTIConv=0\n") - f.close() + with open(config_file, "w") as f: + # disable interactive mode + f.write("[BOOL]\nManualNIfTIConv=0\n") return config_file return None @@ -249,7 +250,7 @@ class Dcm2niix(CommandLine): 'dcm2niix -b y -z i -x n -t n -m n -f %t%p -o . -s y -v n functional_1.dcm' >>> flags = '-'.join([val.strip() + ' ' for val in sorted(' '.join(converter.cmdline.split()[1:-1]).split('-'))]) - >>> flags + >>> flags # doctest: +IGNORE_UNICODE ' -b y -f %t%p -m n -o . -s y -t n -v n -x n -z i ' """ diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index a06065fa41..ca1e8d7d1b 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """Provides interfaces to various commands provided by dcmstack Change directory to provide relative paths for doctests @@ -7,13 +8,12 @@ >>> os.chdir(datadir) """ - -from __future__ import absolute_import +from __future__ import print_function, division, unicode_literals, absolute_import import os +from os import path as op import string import errno -from os import path as op from glob import glob import nibabel as nb @@ -23,7 +23,7 @@ InputMultiPath, File, Directory, traits, BaseInterface) from .traits_extension import isdefined, Undefined -from ..external.six import string_types + have_dcmstack = True try: @@ -129,7 +129,7 @@ class DcmStack(NiftiGeneratorBase): output_spec = DcmStackOutputSpec def _get_filelist(self, trait_input): - if isinstance(trait_input, string_types): + if isinstance(trait_input, (str, bytes)): if op.isdir(trait_input): return glob(op.join(trait_input, '*.dcm')) else: @@ -182,7 +182,7 @@ def _run_interface(self, runtime): stacks = dcmstack.parse_and_stack(src_paths) self.out_list = [] - for key, stack in stacks.items(): + for key, stack in list(stacks.items()): nw = NiftiWrapper(stack.to_nifti(embed_meta=True)) const_meta = nw.meta_ext.get_class_dict(('global', 'const')) out_path = self._get_out_path(const_meta) @@ -258,7 +258,7 @@ def _run_interface(self, runtime): self._make_name_map() nw = NiftiWrapper.from_filename(self.inputs.in_file) self.result = {} - for meta_key, out_name in self._meta_keys.items(): + for meta_key, out_name in list(self._meta_keys.items()): self.result[out_name] = nw.meta_ext.get_values(meta_key) return runtime @@ -365,7 +365,7 @@ def _run_interface(self, runtime): ] if self.inputs.sort_order: sort_order = self.inputs.sort_order - if isinstance(sort_order, string_types): + if isinstance(sort_order, (str, bytes)): sort_order = [sort_order] nws.sort(key=make_key_func(sort_order)) if self.inputs.merge_dim == traits.Undefined: diff --git a/nipype/interfaces/diffusion_toolkit/__init__.py b/nipype/interfaces/diffusion_toolkit/__init__.py index 7b8f1c57b1..cef13227c4 100644 --- a/nipype/interfaces/diffusion_toolkit/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from .base import Info from .postproc import SplineFilter, TrackMerge from .dti import DTIRecon, DTITracker diff --git a/nipype/interfaces/diffusion_toolkit/base.py b/nipype/interfaces/diffusion_toolkit/base.py index 4191be77d8..0e3c5a400a 100644 --- a/nipype/interfaces/diffusion_toolkit/base.py +++ b/nipype/interfaces/diffusion_toolkit/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The dtk module provides basic functions for interfacing with @@ -12,11 +13,12 @@ See the docstrings for the individual classes for 'working' examples. """ - +from __future__ import print_function, division, unicode_literals, absolute_import from builtins import object -__docformat__ = 'restructuredtext' import re -from nipype.interfaces.base import CommandLine +from ..base import CommandLine + +__docformat__ = 'restructuredtext' class Info(object): diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index 554f2bf38a..3ba0beeafc 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit @@ -9,16 +10,17 @@ >>> os.chdir(datadir) """ -__docformat__ = 'restructuredtext' -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, open import os import re +from ...utils.filemanip import fname_presuffix, split_filename, copyfile from ..base import (TraitedSpec, File, traits, CommandLine, CommandLineInputSpec, isdefined) -from ...utils.filemanip import fname_presuffix, split_filename, copyfile +__docformat__ = 'restructuredtext' class DTIReconInputSpec(CommandLineInputSpec): DWI = File(desc='Input diffusion volume', argstr='%s', exists=True, mandatory=True, position=1) @@ -69,16 +71,16 @@ class DTIRecon(CommandLine): def _create_gradient_matrix(self, bvecs_file, bvals_file): _gradient_matrix_file = 'gradient_matrix.txt' - bvals = [val for val in re.split('\s+', open(bvals_file).readline().strip())] - bvecs_f = open(bvecs_file) - bvecs_x = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_y = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_z = [val for val in re.split('\s+', bvecs_f.readline().strip())] - bvecs_f.close() - gradient_matrix_f = open(_gradient_matrix_file, 'w') - for i in range(len(bvals)): - gradient_matrix_f.write("%s, %s, %s, %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i])) - gradient_matrix_f.close() + with open(bvals_file) as fbvals: + bvals = [val for val in re.split('\s+', fbvals.readline().strip())] + with open(bvecs_file) as fbvecs: + bvecs_y = [val for val in re.split('\s+', fbvecs.readline().strip())] + bvecs_z = [val for val in re.split('\s+', fbvecs.readline().strip())] + bvecs_x = [val for val in re.split('\s+', fbvecs.readline().strip())] + + with open(_gradient_matrix_file, 'w') as gradient_matrix_f: + for i in range(len(bvals)): + gradient_matrix_f.write("%s, %s, %s, %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i])) return _gradient_matrix_file def _format_arg(self, name, spec, value): diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index b2f0b2c6a7..ee7933f64c 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit @@ -9,16 +10,17 @@ >>> os.chdir(datadir) """ -__docformat__ = 'restructuredtext' +from __future__ import print_function, division, unicode_literals, absolute_import from builtins import range import os import re +from ...utils.filemanip import fname_presuffix, split_filename, copyfile from ..base import (TraitedSpec, File, traits, CommandLine, CommandLineInputSpec, isdefined) -from ...utils.filemanip import fname_presuffix, split_filename, copyfile +__docformat__ = 'restructuredtext' class HARDIMatInputSpec(CommandLineInputSpec): bvecs = File(exists=True, desc='b vectors file', diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 60d5b11115..4a14d96b90 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit @@ -9,13 +10,13 @@ >>> os.chdir(datadir) """ -__docformat__ = 'restructuredtext' +from __future__ import print_function, division, unicode_literals, absolute_import import os - from ..base import (TraitedSpec, File, traits, CommandLine, InputMultiPath, CommandLineInputSpec) +__docformat__ = 'restructuredtext' class SplineFilterInputSpec(CommandLineInputSpec): track_file = File(exists=True, desc="file containing tracks to be filtered", position=0, argstr="%s", mandatory=True) diff --git a/nipype/interfaces/diffusion_toolkit/tests/__init__.py b/nipype/interfaces/diffusion_toolkit/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/dipy/__init__.py b/nipype/interfaces/dipy/__init__.py index 4047e5a408..aebde630ab 100644 --- a/nipype/interfaces/dipy/__init__.py +++ b/nipype/interfaces/dipy/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from .tracks import StreamlineTractography, TrackDensityMap from .tensors import TensorMode, DTI from .preprocess import Resample, Denoise diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index b17547f60d..d5b2e9957f 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,10 +1,12 @@ # -*- coding: utf-8 -*- """ Base interfaces for dipy """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os.path as op import numpy as np -from nipype.interfaces.base import (traits, File, isdefined, - BaseInterface, BaseInterfaceInputSpec) from ... import logging +from ..base import (traits, File, isdefined, + BaseInterface, BaseInterfaceInputSpec) IFLOGGER = logging.getLogger('interface') diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 143f239e6c..163118e0cb 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -7,13 +7,15 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os.path as op import nibabel as nb import numpy as np +from ... import logging from ..base import (traits, TraitedSpec, File, isdefined) from .base import DipyBaseInterface -from ... import logging IFLOGGER = logging.getLogger('interface') diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index a0030674fa..a7d30be473 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -3,15 +3,20 @@ Interfaces to the reconstruction algorithms in dipy """ +from __future__ import print_function, division, unicode_literals, absolute_import +from future import standard_library +standard_library.install_aliases() +from builtins import str, open + import os.path as op import numpy as np import nibabel as nb -from nipype.interfaces.base import TraitedSpec, File, traits, isdefined +from ... import logging +from ..base import TraitedSpec, File, traits, isdefined from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec -from nipype import logging IFLOGGER = logging.getLogger('interface') @@ -146,7 +151,7 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() - for k in outputs.keys(): + for k in list(outputs.keys()): outputs[k] = self._gen_filename(k) return outputs @@ -317,7 +322,7 @@ def _run_interface(self, runtime): from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.data import get_sphere # import marshal as pickle - import cPickle as pickle + import pickle as pickle import gzip img = nb.load(self.inputs.in_file) diff --git a/nipype/interfaces/dipy/setup.py b/nipype/interfaces/dipy/setup.py index 6a7b5a30a4..43dfe1156c 100644 --- a/nipype/interfaces/dipy/setup.py +++ b/nipype/interfaces/dipy/setup.py @@ -1,5 +1,7 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, unicode_literals, absolute_import def configuration(parent_package='', top_path=None): diff --git a/nipype/interfaces/dipy/simulate.py b/nipype/interfaces/dipy/simulate.py index f4eb174a22..0baa50244b 100644 --- a/nipype/interfaces/dipy/simulate.py +++ b/nipype/interfaces/dipy/simulate.py @@ -5,19 +5,17 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ - -from __future__ import division +from __future__ import print_function, division, unicode_literals, absolute_import from multiprocessing import (Pool, cpu_count) import os.path as op from builtins import range - import nibabel as nb +from ... import logging from ..base import (traits, TraitedSpec, BaseInterfaceInputSpec, File, InputMultiPath, isdefined) from .base import DipyBaseInterface -from ... import logging IFLOGGER = logging.getLogger('interface') diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index 368b66cac2..a5fc05cf3a 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -5,12 +5,14 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import nibabel as nb +from ... import logging from ..base import TraitedSpec, File, isdefined from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec -from ... import logging IFLOGGER = logging.getLogger('interface') diff --git a/nipype/interfaces/dipy/tests/__init__.py b/nipype/interfaces/dipy/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/dipy/tests/__init__.py +++ b/nipype/interfaces/dipy/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py index 95e702bd50..80149df801 100644 --- a/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py +++ b/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py @@ -5,7 +5,7 @@ def test_EstimateResponseSH_inputs(): input_map = dict(auto=dict(usedefault=True, - xor=['recursive'], + xor=[u'recursive'], ), b0_thres=dict(usedefault=True, ), @@ -27,7 +27,7 @@ def test_EstimateResponseSH_inputs(): ), out_prefix=dict(), recursive=dict(usedefault=True, - xor=['auto'], + xor=[u'auto'], ), response=dict(usedefault=True, ), diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index 1b0ad6f381..cad9379a54 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -6,15 +6,17 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os.path as op import numpy as np import nibabel as nb import nibabel.trackvis as nbt +from ... import logging from ..base import (TraitedSpec, BaseInterfaceInputSpec, File, isdefined, traits) from .base import DipyBaseInterface -from ... import logging IFLOGGER = logging.getLogger('interface') @@ -165,7 +167,7 @@ def _run_interface(self, runtime): from dipy.tracking.eudx import EuDX from dipy.data import get_sphere # import marshal as pickle - import cPickle as pickle + import pickle as pickle import gzip if (not (isdefined(self.inputs.in_model) or diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index 1c26ef4acf..d38f4171f3 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -1,12 +1,15 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, unicode_literals, absolute_import + import os import warnings import xml.dom.minidom -from nipype.interfaces.base import (CommandLine, CommandLineInputSpec, - DynamicTraitedSpec, traits, Undefined, - File, isdefined) +from .base import (CommandLine, CommandLineInputSpec, + DynamicTraitedSpec, traits, Undefined, + File, isdefined) class SlicerCommandLineInputSpec(DynamicTraitedSpec, CommandLineInputSpec): diff --git a/nipype/interfaces/elastix/__init__.py b/nipype/interfaces/elastix/__init__.py index ab8b968228..2a1287ed63 100644 --- a/nipype/interfaces/elastix/__init__.py +++ b/nipype/interfaces/elastix/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/elastix/base.py b/nipype/interfaces/elastix/base.py index 55cf79138e..afdb0a1ff4 100644 --- a/nipype/interfaces/elastix/base.py +++ b/nipype/interfaces/elastix/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -9,10 +10,10 @@ """ +from __future__ import print_function, division, unicode_literals, absolute_import -from ..base import (CommandLine, CommandLineInputSpec, isdefined, - TraitedSpec, File, Directory, traits, InputMultiPath) from ... import logging +from ..base import CommandLineInputSpec, Directory, traits logger = logging.getLogger('interface') diff --git a/nipype/interfaces/elastix/registration.py b/nipype/interfaces/elastix/registration.py index b72123c321..858e12b492 100644 --- a/nipype/interfaces/elastix/registration.py +++ b/nipype/interfaces/elastix/registration.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -7,17 +8,16 @@ displacement maps to images and points. """ - -from __future__ import absolute_import +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os.path as op import re -from ..base import (CommandLine, CommandLineInputSpec, isdefined, - TraitedSpec, File, traits, InputMultiPath) +from ... import logging from .base import ElastixBaseInputSpec +from ..base import CommandLine, TraitedSpec, File, traits, InputMultiPath -from ... import logging logger = logging.getLogger('interface') @@ -52,10 +52,10 @@ class Registration(CommandLine): >>> from nipype.interfaces.elastix import Registration >>> reg = Registration() - >>> reg.inputs.fixed_image = 'fixed1.nii' # doctest: +SKIP - >>> reg.inputs.moving_image = 'moving1.nii' # doctest: +SKIP - >>> reg.inputs.parameters = ['elastix.txt'] # doctest: +SKIP - >>> reg.cmdline # doctest: +SKIP + >>> reg.inputs.fixed_image = 'fixed1.nii' + >>> reg.inputs.moving_image = 'moving1.nii' + >>> reg.inputs.parameters = ['elastix.txt'] + >>> reg.cmdline # doctest: +IGNORE_UNICODE 'elastix -f fixed1.nii -m moving1.nii -out ./ -p elastix.txt' @@ -145,9 +145,9 @@ class ApplyWarp(CommandLine): >>> from nipype.interfaces.elastix import ApplyWarp >>> reg = ApplyWarp() - >>> reg.inputs.moving_image = 'moving1.nii' # doctest: +SKIP - >>> reg.inputs.transform_file = 'TransformParameters.0.txt' # doctest: +SKIP - >>> reg.cmdline # doctest: +SKIP + >>> reg.inputs.moving_image = 'moving1.nii' + >>> reg.inputs.transform_file = 'TransformParameters.0.txt' + >>> reg.cmdline # doctest: +IGNORE_UNICODE 'transformix -in moving1.nii -out ./ -tp TransformParameters.0.txt' @@ -186,8 +186,8 @@ class AnalyzeWarp(CommandLine): >>> from nipype.interfaces.elastix import AnalyzeWarp >>> reg = AnalyzeWarp() - >>> reg.inputs.transform_file = 'TransformParameters.0.txt' # doctest: +SKIP - >>> reg.cmdline # doctest: +SKIP + >>> reg.inputs.transform_file = 'TransformParameters.0.txt' + >>> reg.cmdline # doctest: +IGNORE_UNICODE 'transformix -def all -jac all -jacmat all -out ./ -tp TransformParameters.0.txt' @@ -226,9 +226,9 @@ class PointsWarp(CommandLine): >>> from nipype.interfaces.elastix import PointsWarp >>> reg = PointsWarp() - >>> reg.inputs.points_file = 'surf1.vtk' # doctest: +SKIP - >>> reg.inputs.transform_file = 'TransformParameters.0.txt' # doctest: +SKIP - >>> reg.cmdline # doctest: +SKIP + >>> reg.inputs.points_file = 'surf1.vtk' + >>> reg.inputs.transform_file = 'TransformParameters.0.txt' + >>> reg.cmdline # doctest: +IGNORE_UNICODE 'transformix -out ./ -def surf1.vtk -tp TransformParameters.0.txt' diff --git a/nipype/interfaces/elastix/tests/__init__.py b/nipype/interfaces/elastix/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/elastix/tests/__init__.py +++ b/nipype/interfaces/elastix/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/elastix/utils.py b/nipype/interfaces/elastix/utils.py index ab034dac07..42fab68377 100644 --- a/nipype/interfaces/elastix/utils.py +++ b/nipype/interfaces/elastix/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -7,12 +8,14 @@ transform files (to configure warpings) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open + import os.path as op -import re -from ..base import (BaseInterface, BaseInterfaceInputSpec, isdefined, - TraitedSpec, File, traits, InputMultiPath) from ... import logging +from ..base import (BaseInterface, BaseInterfaceInputSpec, isdefined, + TraitedSpec, File, traits) logger = logging.getLogger('interface') diff --git a/nipype/interfaces/freesurfer/__init__.py b/nipype/interfaces/freesurfer/__init__.py index c48e6a44c4..c8ed4cabf5 100644 --- a/nipype/interfaces/freesurfer/__init__.py +++ b/nipype/interfaces/freesurfer/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Top-level namespace for freesurfer.""" diff --git a/nipype/interfaces/freesurfer/base.py b/nipype/interfaces/freesurfer/base.py index 5e88638983..4a51f92383 100644 --- a/nipype/interfaces/freesurfer/base.py +++ b/nipype/interfaces/freesurfer/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with @@ -13,17 +14,18 @@ See the docstrings for the individual classes for 'working' examples. """ -__docformat__ = 'restructuredtext' +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, object, str -from builtins import object import os +from ...utils.filemanip import fname_presuffix from ..base import (CommandLine, Directory, CommandLineInputSpec, isdefined, traits, TraitedSpec, File) -from ...utils.filemanip import fname_presuffix +__docformat__ = 'restructuredtext' class Info(object): """ Freesurfer subject directory and version information. diff --git a/nipype/interfaces/freesurfer/longitudinal.py b/nipype/interfaces/freesurfer/longitudinal.py index e5575fb0c5..1b82fecfaa 100644 --- a/nipype/interfaces/freesurfer/longitudinal.py +++ b/nipype/interfaces/freesurfer/longitudinal.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various longitudinal commands provided by freesurfer @@ -9,17 +10,17 @@ >>> os.chdir(datadir) """ -__docformat__ = 'restructuredtext' +from __future__ import print_function, division, unicode_literals, absolute_import import os #import itertools -from nipype.utils.filemanip import fname_presuffix -from nipype.interfaces.freesurfer.base import FSCommand, FSTraitedSpec -from nipype.interfaces.base import (TraitedSpec, File, traits, - InputMultiPath, OutputMultiPath, isdefined) - from ... import logging +from ..base import (TraitedSpec, File, traits, + InputMultiPath, OutputMultiPath, isdefined) +from .base import FSCommand, FSTraitedSpec + +__docformat__ = 'restructuredtext' iflogger = logging.getLogger('interface') @@ -85,15 +86,15 @@ class RobustTemplate(FSCommand): >>> template.inputs.fixed_timepoint = True >>> template.inputs.no_iteration = True >>> template.inputs.subsample_threshold = 200 - >>> template.cmdline #doctest: +NORMALIZE_WHITESPACE + >>> template.cmdline #doctest: +NORMALIZE_WHITESPACE +IGNORE_UNICODE 'mri_robust_template --satit --average 0 --fixtp --mov structural.nii functional.nii --inittp 1 --noit --template mri_robust_template_out.mgz --subsample 200' >>> template.inputs.out_file = 'T1.nii' - >>> template.cmdline #doctest: +NORMALIZE_WHITESPACE + >>> template.cmdline #doctest: +NORMALIZE_WHITESPACE +IGNORE_UNICODE 'mri_robust_template --satit --average 0 --fixtp --mov structural.nii functional.nii --inittp 1 --noit --template T1.nii --subsample 200' >>> template.inputs.transform_outputs = ['structural.lta', 'functional.lta'] >>> template.inputs.scaled_intensity_outputs = ['structural-iscale.txt', 'functional-iscale.txt'] - >>> template.cmdline #doctest: +NORMALIZE_WHITESPACE + >>> template.cmdline #doctest: +NORMALIZE_WHITESPACE +IGNORE_UNICODE 'mri_robust_template --satit --average 0 --fixtp --mov structural.nii functional.nii --inittp 1 --noit --template T1.nii --iscaleout structural-iscale.txt functional-iscale.txt --subsample 200 --lta structural.lta functional.lta' >>> template.run() #doctest: +SKIP @@ -167,7 +168,7 @@ class FuseSegmentations(FSCommand): >>> fuse.inputs.in_segmentations = ['aseg.mgz', 'aseg.mgz'] >>> fuse.inputs.in_segmentations_noCC = ['aseg.mgz', 'aseg.mgz'] >>> fuse.inputs.in_norms = ['norm.mgz', 'norm.mgz', 'norm.mgz'] - >>> fuse.cmdline + >>> fuse.cmdline # doctest: +IGNORE_UNICODE 'mri_fuse_segmentations -n norm.mgz -a aseg.mgz -c aseg.mgz tp.long.A.template tp1 tp2' """ diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 9b825b9c55..ff04a50a9d 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with @@ -10,15 +11,17 @@ >>> os.chdir(datadir) """ -__docformat__ = 'restructuredtext' +from __future__ import print_function, division, unicode_literals, absolute_import import os -from ..freesurfer.base import FSCommand, FSTraitedSpec +from ...utils.filemanip import fname_presuffix, split_filename from ..base import (TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, Directory, isdefined) -from ...utils.filemanip import fname_presuffix, split_filename -from ..freesurfer.utils import copy2subjdir +from .base import FSCommand, FSTraitedSpec +from .utils import copy2subjdir + +__docformat__ = 'restructuredtext' class MRISPreprocInputSpec(FSTraitedSpec): out_file = File(argstr='--out %s', genfile=True, @@ -88,7 +91,7 @@ class MRISPreproc(FSCommand): >>> preproc.inputs.vol_measure_file = [('cont1.nii', 'register.dat'), \ ('cont1a.nii', 'register.dat')] >>> preproc.inputs.out_file = 'concatenated_file.mgz' - >>> preproc.cmdline + >>> preproc.cmdline # doctest: +IGNORE_UNICODE 'mris_preproc --hemi lh --out concatenated_file.mgz --target fsaverage --iv cont1.nii register.dat --iv cont1a.nii register.dat' """ @@ -145,7 +148,7 @@ class MRISPreprocReconAll(MRISPreproc): >>> preproc.inputs.vol_measure_file = [('cont1.nii', 'register.dat'), \ ('cont1a.nii', 'register.dat')] >>> preproc.inputs.out_file = 'concatenated_file.mgz' - >>> preproc.cmdline + >>> preproc.cmdline # doctest: +IGNORE_UNICODE 'mris_preproc --hemi lh --out concatenated_file.mgz --s subject_id --target fsaverage --iv cont1.nii register.dat --iv cont1a.nii register.dat' """ @@ -483,7 +486,7 @@ class Binarize(FSCommand): -------- >>> binvol = Binarize(in_file='structural.nii', min=10, binary_file='foo_out.nii') - >>> binvol.cmdline + >>> binvol.cmdline # doctest: +IGNORE_UNICODE 'mri_binarize --o foo_out.nii --i structural.nii --min 10.000000' """ @@ -592,7 +595,7 @@ class Concatenate(FSCommand): >>> concat = Concatenate() >>> concat.inputs.in_files = ['cont1.nii', 'cont2.nii'] >>> concat.inputs.concatenated_file = 'bar.nii' - >>> concat.cmdline + >>> concat.cmdline # doctest: +IGNORE_UNICODE 'mri_concat --o bar.nii --i cont1.nii --i cont2.nii' """ @@ -716,7 +719,7 @@ class SegStats(FSCommand): >>> ss.inputs.subjects_dir = '.' >>> ss.inputs.avgwf_txt_file = 'avgwf.txt' >>> ss.inputs.summary_file = 'summary.stats' - >>> ss.cmdline + >>> ss.cmdline # doctest: +IGNORE_UNICODE 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --i functional.nii --sum ./summary.stats' """ @@ -838,7 +841,7 @@ class SegStatsReconAll(SegStats): >>> segstatsreconall.inputs.total_gray = True >>> segstatsreconall.inputs.euler = True >>> segstatsreconall.inputs.exclude_id = 0 - >>> segstatsreconall.cmdline + >>> segstatsreconall.cmdline # doctest: +IGNORE_UNICODE 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --brain-vol-from-seg --surf-ctx-vol --empty --etiv --euler --excl-ctxgmwm --excludeid 0 --subcortgray --subject 10335 --supratent --totalgray --surf-wm-vol --sum ./summary.stats' """ input_spec = SegStatsReconAllInputSpec @@ -950,7 +953,7 @@ class Label2Vol(FSCommand): -------- >>> binvol = Label2Vol(label_file='cortex.label', template_file='structural.nii', reg_file='register.dat', fill_thresh=0.5, vol_label_file='foo_out.nii') - >>> binvol.cmdline + >>> binvol.cmdline # doctest: +IGNORE_UNICODE 'mri_label2vol --fillthresh 0 --label cortex.label --reg register.dat --temp structural.nii --o foo_out.nii' """ @@ -1029,7 +1032,7 @@ class MS_LDA(FSCommand): shift=zero_value, vol_synth_file='synth_out.mgz', \ conform=True, use_weights=True, \ images=['FLASH1.mgz', 'FLASH2.mgz', 'FLASH3.mgz']) - >>> optimalWeights.cmdline + >>> optimalWeights.cmdline # doctest: +IGNORE_UNICODE 'mri_ms_LDA -conform -label label.mgz -lda 2 3 -shift 1 -W -synth synth_out.mgz -weight weights.txt FLASH1.mgz FLASH2.mgz FLASH3.mgz' """ @@ -1052,7 +1055,7 @@ def _verify_weights_file_exists(self): raise traits.TraitError("MS_LDA: use_weights must accompany an existing weights file") def _format_arg(self, name, spec, value): - if name is 'use_weights': + if name == 'use_weights': if self.inputs.use_weights is True: self._verify_weights_file_exists() else: @@ -1121,7 +1124,7 @@ class Label2Label(FSCommand): >>> l2l.inputs.source_label = 'lh-pial.stl' >>> l2l.inputs.source_white = 'lh.pial' >>> l2l.inputs.source_sphere_reg = 'lh.pial' - >>> l2l.cmdline + >>> l2l.cmdline # doctest: +IGNORE_UNICODE 'mri_label2label --hemi lh --trglabel lh-pial_converted.stl --regmethod surface --srclabel lh-pial.stl --srcsubject fsaverage --trgsubject 10335' """ @@ -1205,7 +1208,7 @@ class Label2Annot(FSCommand): >>> l2a.inputs.in_labels = ['lh.aparc.label'] >>> l2a.inputs.orig = 'lh.pial' >>> l2a.inputs.out_annot = 'test' - >>> l2a.cmdline + >>> l2a.cmdline # doctest: +IGNORE_UNICODE 'mris_label2annot --hemi lh --l lh.aparc.label --a test --s 10335' """ @@ -1286,7 +1289,7 @@ class SphericalAverage(FSCommand): >>> sphericalavg.inputs.subject_id = '10335' >>> sphericalavg.inputs.erode = 2 >>> sphericalavg.inputs.threshold = 5 - >>> sphericalavg.cmdline + >>> sphericalavg.cmdline # doctest: +IGNORE_UNICODE 'mris_spherical_average -erode 2 -o 10335 -t 5.0 label lh.entorhinal lh pial . test.out' """ diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 332f74cab8..a7c13d8ee9 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by FreeSurfer @@ -9,9 +10,8 @@ >>> os.chdir(datadir) """ - +from __future__ import print_function, division, unicode_literals, absolute_import from builtins import range -__docformat__ = 'restructuredtext' import os import os.path as op @@ -21,17 +21,19 @@ import numpy as np from nibabel import load +from ... import logging +from ...utils.filemanip import fname_presuffix from ..io import FreeSurferSource -from ..freesurfer.base import (FSCommand, FSTraitedSpec, - FSTraitedSpecOpenMP, - FSCommandOpenMP) from ..base import (TraitedSpec, File, traits, Directory, InputMultiPath, OutputMultiPath, CommandLine, CommandLineInputSpec, isdefined) -from ...utils.filemanip import fname_presuffix -from ... import logging -from ..freesurfer.utils import copy2subjdir +from .base import (FSCommand, FSTraitedSpec, + FSTraitedSpecOpenMP, + FSCommandOpenMP) +from .utils import copy2subjdir + +__docformat__ = 'restructuredtext' iflogger = logging.getLogger('interface') @@ -61,7 +63,7 @@ class ParseDICOMDir(FSCommand): >>> dcminfo.inputs.dicom_dir = '.' >>> dcminfo.inputs.sortbyrun = True >>> dcminfo.inputs.summarize = True - >>> dcminfo.cmdline + >>> dcminfo.cmdline # doctest: +IGNORE_UNICODE 'mri_parse_sdcmdir --d . --o dicominfo.txt --sortbyrun --summarize' """ @@ -125,7 +127,7 @@ class UnpackSDICOMDir(FSCommand): >>> unpack.inputs.output_dir = '.' >>> unpack.inputs.run_info = (5, 'mprage', 'nii', 'struct') >>> unpack.inputs.dir_structure = 'generic' - >>> unpack.cmdline + >>> unpack.cmdline # doctest: +IGNORE_UNICODE 'unpacksdcmdir -generic -targ . -run 5 mprage nii struct -src .' """ _cmd = 'unpacksdcmdir' @@ -347,7 +349,7 @@ class MRIConvert(FSCommand): >>> mc.inputs.in_file = 'structural.nii' >>> mc.inputs.out_file = 'outfile.mgz' >>> mc.inputs.out_type = 'mgz' - >>> mc.cmdline + >>> mc.cmdline # doctest: +IGNORE_UNICODE 'mri_convert --out_type mgz --input_volume structural.nii --output_volume outfile.mgz' """ @@ -573,7 +575,7 @@ class Resample(FSCommand): >>> resampler.inputs.in_file = 'structural.nii' >>> resampler.inputs.resampled_file = 'resampled.nii' >>> resampler.inputs.voxel_size = (2.1, 2.1, 2.1) - >>> resampler.cmdline + >>> resampler.cmdline # doctest: +IGNORE_UNICODE 'mri_convert -vs 2.10 2.10 2.10 -i structural.nii -o resampled.nii' """ @@ -643,7 +645,7 @@ class ReconAll(CommandLine): >>> reconall.inputs.directive = 'all' >>> reconall.inputs.subjects_dir = '.' >>> reconall.inputs.T1_files = 'structural.nii' - >>> reconall.cmdline + >>> reconall.cmdline # doctest: +IGNORE_UNICODE 'recon-all -all -i structural.nii -subjid foo -sd .' """ @@ -865,7 +867,7 @@ class BBRegister(FSCommand): >>> from nipype.interfaces.freesurfer import BBRegister >>> bbreg = BBRegister(subject_id='me', source_file='structural.nii', init='header', contrast_type='t2') - >>> bbreg.cmdline + >>> bbreg.cmdline # doctest: +IGNORE_UNICODE 'bbregister --t2 --init-header --reg structural_bbreg_me.dat --mov structural.nii --s me' """ @@ -999,7 +1001,7 @@ class ApplyVolTransform(FSCommand): >>> applyreg.inputs.reg_file = 'register.dat' >>> applyreg.inputs.transformed_file = 'struct_warped.nii' >>> applyreg.inputs.fs_target = True - >>> applyreg.cmdline + >>> applyreg.cmdline # doctest: +IGNORE_UNICODE 'mri_vol2vol --fstarg --reg register.dat --mov structural.nii --o struct_warped.nii' """ @@ -1079,7 +1081,7 @@ class Smooth(FSCommand): >>> from nipype.interfaces.freesurfer import Smooth >>> smoothvol = Smooth(in_file='functional.nii', smoothed_file = 'foo_out.nii', reg_file='register.dat', surface_fwhm=10, vol_fwhm=6) - >>> smoothvol.cmdline + >>> smoothvol.cmdline # doctest: +IGNORE_UNICODE 'mris_volsmooth --i functional.nii --reg register.dat --o foo_out.nii --fwhm 10.000000 --vol-fwhm 6.000000' """ @@ -1184,7 +1186,7 @@ class RobustRegister(FSCommand): >>> reg.inputs.target_file = 'T1.nii' >>> reg.inputs.auto_sens = True >>> reg.inputs.init_orient = True - >>> reg.cmdline + >>> reg.cmdline # doctest: +IGNORE_UNICODE 'mri_robust_register --satit --initorient --lta structural_robustreg.lta --mov structural.nii --dst T1.nii' References @@ -1270,7 +1272,7 @@ class FitMSParams(FSCommand): >>> msfit = FitMSParams() >>> msfit.inputs.in_files = ['flash_05.mgz', 'flash_30.mgz'] >>> msfit.inputs.out_dir = 'flash_parameters' - >>> msfit.cmdline + >>> msfit.cmdline # doctest: +IGNORE_UNICODE 'mri_ms_fitparms flash_05.mgz flash_30.mgz flash_parameters' """ @@ -1343,7 +1345,7 @@ class SynthesizeFLASH(FSCommand): >>> syn.inputs.t1_image = 'T1.mgz' >>> syn.inputs.pd_image = 'PD.mgz' >>> syn.inputs.out_file = 'flash_30syn.mgz' - >>> syn.cmdline + >>> syn.cmdline # doctest: +IGNORE_UNICODE 'mri_synthesize 20.00 30.00 3.000 T1.mgz PD.mgz flash_30syn.mgz' """ @@ -1416,7 +1418,7 @@ class MNIBiasCorrection(FSCommand): >>> correct.inputs.iterations = 6 >>> correct.inputs.protocol_iterations = 1000 >>> correct.inputs.distance = 50 - >>> correct.cmdline + >>> correct.cmdline # doctest: +IGNORE_UNICODE 'mri_nu_correct.mni --distance 50 --i norm.mgz --n 6 --o norm_output.mgz --proto-iters 1000' References: @@ -1478,7 +1480,7 @@ class WatershedSkullStrip(FSCommand): >>> skullstrip.inputs.t1 = True >>> skullstrip.inputs.transform = "transforms/talairach_with_skull.lta" >>> skullstrip.inputs.out_file = "brainmask.auto.mgz" - >>> skullstrip.cmdline + >>> skullstrip.cmdline # doctest: +IGNORE_UNICODE 'mri_watershed -T1 transforms/talairach_with_skull.lta T1.mgz brainmask.auto.mgz' """ _cmd = 'mri_watershed' @@ -1526,7 +1528,7 @@ class Normalize(FSCommand): >>> normalize = freesurfer.Normalize() >>> normalize.inputs.in_file = "T1.mgz" >>> normalize.inputs.gradient = 1 - >>> normalize.cmdline + >>> normalize.cmdline # doctest: +IGNORE_UNICODE 'mri_normalize -g 1 T1.mgz T1_norm.mgz' """ _cmd = "mri_normalize" @@ -1578,7 +1580,7 @@ class CANormalize(FSCommand): >>> ca_normalize.inputs.in_file = "T1.mgz" >>> ca_normalize.inputs.atlas = "atlas.nii.gz" # in practice use .gca atlases >>> ca_normalize.inputs.transform = "trans.mat" # in practice use .lta transforms - >>> ca_normalize.cmdline + >>> ca_normalize.cmdline # doctest: +IGNORE_UNICODE 'mri_ca_normalize T1.mgz atlas.nii.gz trans.mat T1_norm.mgz' """ _cmd = "mri_ca_normalize" @@ -1636,7 +1638,7 @@ class CARegister(FSCommandOpenMP): >>> ca_register = freesurfer.CARegister() >>> ca_register.inputs.in_file = "norm.mgz" >>> ca_register.inputs.out_file = "talairach.m3z" - >>> ca_register.cmdline + >>> ca_register.cmdline # doctest: +IGNORE_UNICODE 'mri_ca_register norm.mgz talairach.m3z' """ _cmd = "mri_ca_register" @@ -1707,7 +1709,7 @@ class CALabel(FSCommandOpenMP): >>> ca_label.inputs.out_file = "out.mgz" >>> ca_label.inputs.transform = "trans.mat" >>> ca_label.inputs.template = "Template_6.nii" # in practice use .gcs extension - >>> ca_label.cmdline + >>> ca_label.cmdline # doctest: +IGNORE_UNICODE 'mri_ca_label norm.mgz trans.mat Template_6.nii out.mgz' """ _cmd = "mri_ca_label" @@ -1781,7 +1783,7 @@ class MRIsCALabel(FSCommandOpenMP): >>> ca_label.inputs.sulc = "lh.pial" >>> ca_label.inputs.classifier = "im1.nii" # in pracice, use .gcs extension >>> ca_label.inputs.smoothwm = "lh.pial" - >>> ca_label.cmdline + >>> ca_label.cmdline # doctest: +IGNORE_UNICODE 'mris_ca_label test lh lh.pial im1.nii lh.aparc.annot' """ _cmd = "mris_ca_label" @@ -1867,7 +1869,7 @@ class SegmentCC(FSCommand): >>> SegmentCC_node.inputs.in_norm = "norm.mgz" >>> SegmentCC_node.inputs.out_rotation = "cc.lta" >>> SegmentCC_node.inputs.subject_id = "test" - >>> SegmentCC_node.cmdline + >>> SegmentCC_node.cmdline # doctest: +IGNORE_UNICODE 'mri_cc -aseg aseg.mgz -o aseg.auto.mgz -lta cc.lta test' """ @@ -1958,7 +1960,7 @@ class SegmentWM(FSCommand): >>> SegmentWM_node = freesurfer.SegmentWM() >>> SegmentWM_node.inputs.in_file = "norm.mgz" >>> SegmentWM_node.inputs.out_file = "wm.seg.mgz" - >>> SegmentWM_node.cmdline + >>> SegmentWM_node.cmdline # doctest: +IGNORE_UNICODE 'mri_segment norm.mgz wm.seg.mgz' """ @@ -2002,7 +2004,7 @@ class EditWMwithAseg(FSCommand): >>> editwm.inputs.seg_file = "aseg.mgz" >>> editwm.inputs.out_file = "wm.asegedit.mgz" >>> editwm.inputs.keep_in = True - >>> editwm.cmdline + >>> editwm.cmdline # doctest: +IGNORE_UNICODE 'mri_edit_wm_with_aseg -keep-in T1.mgz norm.mgz aseg.mgz wm.asegedit.mgz' """ _cmd = 'mri_edit_wm_with_aseg' @@ -2042,7 +2044,7 @@ class ConcatenateLTA(FSCommand): >>> conc_lta = ConcatenateLTA() >>> conc_lta.inputs.in_lta1 = 'trans.mat' >>> conc_lta.inputs.in_lta2 = 'trans.mat' - >>> conc_lta.cmdline + >>> conc_lta.cmdline # doctest: +IGNORE_UNICODE 'mri_concatenate_lta trans.mat trans.mat trans-long.mat' """ diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index 2df1f77111..57b1293621 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -10,21 +11,22 @@ >>> os.chdir(datadir) """ -__docformat__ = 'restructuredtext' +from __future__ import print_function, division, unicode_literals, absolute_import import os import os.path -from ... utils.filemanip import split_filename, copyfile +from ... import logging +from ...utils.filemanip import split_filename, copyfile -from ..freesurfer.base import (Info, FSCommand, FSTraitedSpec, - FSScriptCommand, - FSScriptOutputSpec, - FSCommandOpenMP, - FSTraitedSpecOpenMP) +from .base import (FSCommand, FSTraitedSpec, + FSScriptCommand, + FSScriptOutputSpec, + FSCommandOpenMP, + FSTraitedSpecOpenMP) from ..base import (isdefined, TraitedSpec, File, traits, Directory) -from ... import logging +__docformat__ = 'restructuredtext' iflogger = logging.getLogger('interface') @@ -202,7 +204,7 @@ class EMRegister(FSCommandOpenMP): >>> register.inputs.out_file = 'norm_transform.lta' >>> register.inputs.skull = True >>> register.inputs.nbrspacing = 9 - >>> register.cmdline + >>> register.cmdline # doctest: +IGNORE_UNICODE 'mri_em_register -uns 9 -skull norm.mgz aseg.mgz norm_transform.lta' """ _cmd = 'mri_em_register' @@ -252,7 +254,7 @@ class Register(FSCommand): >>> register.inputs.target = 'aseg.mgz' >>> register.inputs.out_file = 'lh.pial.reg' >>> register.inputs.curv = True - >>> register.cmdline + >>> register.cmdline # doctest: +IGNORE_UNICODE 'mris_register -curv lh.pial aseg.mgz lh.pial.reg' """ @@ -318,7 +320,7 @@ class Paint(FSCommand): >>> paint.inputs.template = 'aseg.mgz' >>> paint.inputs.averages = 5 >>> paint.inputs.out_file = 'lh.avg_curv' - >>> paint.cmdline + >>> paint.cmdline # doctest: +IGNORE_UNICODE 'mrisp_paint -a 5 aseg.mgz lh.pial lh.avg_curv' """ diff --git a/nipype/interfaces/freesurfer/tests/__init__.py b/nipype/interfaces/freesurfer/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/freesurfer/tests/__init__.py +++ b/nipype/interfaces/freesurfer/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py b/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py index 4004f91314..a4f3de7e31 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py @@ -29,7 +29,7 @@ def test_ApplyMask_inputs(): out_file=dict(argstr='%s', hash_files=True, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_masked', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py index 0912bc80dd..78446391fd 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py @@ -11,12 +11,12 @@ def test_ApplyVolTransform_inputs(): ), fs_target=dict(argstr='--fstarg', mandatory=True, - requires=['reg_file'], - xor=('target_file', 'tal', 'fs_target'), + requires=[u'reg_file'], + xor=(u'target_file', u'tal', u'fs_target'), ), fsl_reg_file=dict(argstr='--fsl %s', mandatory=True, - xor=('reg_file', 'fsl_reg_file', 'xfm_reg_file', 'reg_header', 'subject'), + xor=(u'reg_file', u'fsl_reg_file', u'xfm_reg_file', u'reg_header', u'subject'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -26,22 +26,22 @@ def test_ApplyVolTransform_inputs(): inverse=dict(argstr='--inv', ), invert_morph=dict(argstr='--inv-morph', - requires=['m3z_file'], + requires=[u'm3z_file'], ), m3z_file=dict(argstr='--m3z %s', ), no_ded_m3z_path=dict(argstr='--noDefM3zPath', - requires=['m3z_file'], + requires=[u'm3z_file'], ), no_resample=dict(argstr='--no-resample', ), reg_file=dict(argstr='--reg %s', mandatory=True, - xor=('reg_file', 'fsl_reg_file', 'xfm_reg_file', 'reg_header', 'subject'), + xor=(u'reg_file', u'fsl_reg_file', u'xfm_reg_file', u'reg_header', u'subject'), ), reg_header=dict(argstr='--regheader', mandatory=True, - xor=('reg_file', 'fsl_reg_file', 'xfm_reg_file', 'reg_header', 'subject'), + xor=(u'reg_file', u'fsl_reg_file', u'xfm_reg_file', u'reg_header', u'subject'), ), source_file=dict(argstr='--mov %s', copyfile=False, @@ -49,18 +49,18 @@ def test_ApplyVolTransform_inputs(): ), subject=dict(argstr='--s %s', mandatory=True, - xor=('reg_file', 'fsl_reg_file', 'xfm_reg_file', 'reg_header', 'subject'), + xor=(u'reg_file', u'fsl_reg_file', u'xfm_reg_file', u'reg_header', u'subject'), ), subjects_dir=dict(), tal=dict(argstr='--tal', mandatory=True, - xor=('target_file', 'tal', 'fs_target'), + xor=(u'target_file', u'tal', u'fs_target'), ), tal_resolution=dict(argstr='--talres %.10f', ), target_file=dict(argstr='--targ %s', mandatory=True, - xor=('target_file', 'tal', 'fs_target'), + xor=(u'target_file', u'tal', u'fs_target'), ), terminal_output=dict(nohash=True, ), @@ -69,7 +69,7 @@ def test_ApplyVolTransform_inputs(): ), xfm_reg_file=dict(argstr='--xfm %s', mandatory=True, - xor=('reg_file', 'fsl_reg_file', 'xfm_reg_file', 'reg_header', 'subject'), + xor=(u'reg_file', u'fsl_reg_file', u'xfm_reg_file', u'reg_header', u'subject'), ), ) inputs = ApplyVolTransform.input_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_BBRegister.py b/nipype/interfaces/freesurfer/tests/test_auto_BBRegister.py index cb3444c2f0..195a304cc9 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_BBRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_BBRegister.py @@ -19,11 +19,11 @@ def test_BBRegister_inputs(): ), init=dict(argstr='--init-%s', mandatory=True, - xor=['init_reg_file'], + xor=[u'init_reg_file'], ), init_reg_file=dict(argstr='--init-reg %s', mandatory=True, - xor=['init'], + xor=[u'init'], ), intermediate_file=dict(argstr='--int %s', ), @@ -33,10 +33,10 @@ def test_BBRegister_inputs(): genfile=True, ), reg_frame=dict(argstr='--frame %d', - xor=['reg_middle_frame'], + xor=[u'reg_middle_frame'], ), reg_middle_frame=dict(argstr='--mid-frame', - xor=['reg_frame'], + xor=[u'reg_frame'], ), registered_file=dict(argstr='--o %s', ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py b/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py index b22d6a98e6..01d37a484e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py @@ -46,12 +46,12 @@ def test_Binarize_inputs(): match=dict(argstr='--match %d...', ), max=dict(argstr='--max %f', - xor=['wm_ven_csf'], + xor=[u'wm_ven_csf'], ), merge_file=dict(argstr='--merge %s', ), min=dict(argstr='--min %f', - xor=['wm_ven_csf'], + xor=[u'wm_ven_csf'], ), out_type=dict(argstr='', ), @@ -67,7 +67,7 @@ def test_Binarize_inputs(): wm=dict(argstr='--wm', ), wm_ven_csf=dict(argstr='--wm+vcsf', - xor=['min', 'max'], + xor=[u'min', u'max'], ), zero_edges=dict(argstr='--zero-edges', ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py b/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py index 3b46642cf8..c5d32d0665 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py @@ -29,7 +29,7 @@ def test_CANormalize_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_norm', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py b/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py index b56d090435..6296509937 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py @@ -15,12 +15,12 @@ def test_CheckTalairachAlignment_inputs(): in_file=dict(argstr='-xfm %s', mandatory=True, position=-1, - xor=['subject'], + xor=[u'subject'], ), subject=dict(argstr='-subj %s', mandatory=True, position=-1, - xor=['in_file'], + xor=[u'in_file'], ), subjects_dir=dict(), terminal_output=dict(nohash=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py b/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py index a2a31c5897..12420d7aad 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py @@ -23,7 +23,7 @@ def test_ConcatenateLTA_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=True, - name_source=['in_lta1'], + name_source=[u'in_lta1'], name_template='%s-long', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py b/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py index 1400e6f626..c03dcbd4c1 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py @@ -29,7 +29,7 @@ def test_CurvatureStats_inputs(): ), out_file=dict(argstr='-o %s', hash_files=False, - name_source=['hemisphere'], + name_source=[u'hemisphere'], name_template='%s.curv.stats', ), subject_id=dict(argstr='%s', diff --git a/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py index 1551f3e44c..a24665f935 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py @@ -18,11 +18,11 @@ def test_DICOMConvert_inputs(): ignore_exception=dict(nohash=True, usedefault=True, ), - ignore_single_slice=dict(requires=['dicom_info'], + ignore_single_slice=dict(requires=[u'dicom_info'], ), out_type=dict(usedefault=True, ), - seq_list=dict(requires=['dicom_info'], + seq_list=dict(requires=[u'dicom_info'], ), subject_dir_template=dict(usedefault=True, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py b/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py index e1933778b3..ac8a79ed3a 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py @@ -24,7 +24,7 @@ def test_EMRegister_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_transform.lta', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py b/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py index 4f3edc8164..753ab44569 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py @@ -19,12 +19,12 @@ def test_GLMFit_inputs(): contrast=dict(argstr='--C %s...', ), cortex=dict(argstr='--cortex', - xor=['label_file'], + xor=[u'label_file'], ), debug=dict(argstr='--debug', ), design=dict(argstr='--X %s', - xor=('fsgd', 'design', 'one_sample'), + xor=(u'fsgd', u'design', u'one_sample'), ), diag=dict(), diag_cluster=dict(argstr='--diag-cluster', @@ -33,17 +33,17 @@ def test_GLMFit_inputs(): usedefault=True, ), fixed_fx_dof=dict(argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], + xor=[u'fixed_fx_dof_file'], ), fixed_fx_dof_file=dict(argstr='--ffxdofdat %d', - xor=['fixed_fx_dof'], + xor=[u'fixed_fx_dof'], ), fixed_fx_var=dict(argstr='--yffxvar %s', ), force_perm=dict(argstr='--perm-force', ), fsgd=dict(argstr='--fsgd %s %s', - xor=('fsgd', 'design', 'one_sample'), + xor=(u'fsgd', u'design', u'one_sample'), ), fwhm=dict(argstr='--fwhm %f', ), @@ -61,7 +61,7 @@ def test_GLMFit_inputs(): invert_mask=dict(argstr='--mask-inv', ), label_file=dict(argstr='--label %s', - xor=['cortex'], + xor=[u'cortex'], ), mask_file=dict(argstr='--mask %s', ), @@ -72,10 +72,10 @@ def test_GLMFit_inputs(): no_mask_smooth=dict(argstr='--no-mask-smooth', ), no_prune=dict(argstr='--no-prune', - xor=['prunethresh'], + xor=[u'prunethresh'], ), one_sample=dict(argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), + xor=(u'one_sample', u'fsgd', u'design', u'contrast'), ), pca=dict(argstr='--pca', ), @@ -86,7 +86,7 @@ def test_GLMFit_inputs(): prune=dict(argstr='--prune', ), prune_thresh=dict(argstr='--prune_thr %f', - xor=['noprune'], + xor=[u'noprune'], ), resynth_test=dict(argstr='--resynthtest %d', ), @@ -111,7 +111,7 @@ def test_GLMFit_inputs(): subject_id=dict(), subjects_dir=dict(), surf=dict(argstr='--surf %s %s %s', - requires=['subject_id', 'hemi'], + requires=[u'subject_id', u'hemi'], ), surf_geo=dict(usedefault=True, ), @@ -125,16 +125,16 @@ def test_GLMFit_inputs(): ), vox_dump=dict(argstr='--voxdump %d %d %d', ), - weight_file=dict(xor=['weighted_ls'], + weight_file=dict(xor=[u'weighted_ls'], ), weight_inv=dict(argstr='--w-inv', - xor=['weighted_ls'], + xor=[u'weighted_ls'], ), weight_sqrt=dict(argstr='--w-sqrt', - xor=['weighted_ls'], + xor=[u'weighted_ls'], ), weighted_ls=dict(argstr='--wls %s', - xor=('weight_file', 'weight_inv', 'weight_sqrt'), + xor=(u'weight_file', u'weight_inv', u'weight_sqrt'), ), ) inputs = GLMFit.input_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py b/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py index 2933f5d8c7..2b3fd09857 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py @@ -23,7 +23,7 @@ def test_Jacobian_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=False, - name_source=['in_origsurf'], + name_source=[u'in_origsurf'], name_template='%s.jacobian', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py b/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py index 8ca41467ec..abf2985c46 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py @@ -19,7 +19,7 @@ def test_Label2Label_inputs(): out_file=dict(argstr='--trglabel %s', hash_files=False, keep_extension=True, - name_source=['source_label'], + name_source=[u'source_label'], name_template='%s_converted', ), registration_method=dict(argstr='--regmethod %s', diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py b/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py index 4ed1bb441c..5cc4fe6f4c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py @@ -7,12 +7,12 @@ def test_Label2Vol_inputs(): input_map = dict(annot_file=dict(argstr='--annot %s', copyfile=False, mandatory=True, - requires=('subject_id', 'hemi'), - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + requires=(u'subject_id', u'hemi'), + xor=(u'label_file', u'annot_file', u'seg_file', u'aparc_aseg'), ), aparc_aseg=dict(argstr='--aparc+aseg', mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + xor=(u'label_file', u'annot_file', u'seg_file', u'aparc_aseg'), ), args=dict(argstr='%s', ), @@ -24,7 +24,7 @@ def test_Label2Vol_inputs(): hemi=dict(argstr='--hemi %s', ), identity=dict(argstr='--identity', - xor=('reg_file', 'reg_header', 'identity'), + xor=(u'reg_file', u'reg_header', u'identity'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -34,7 +34,7 @@ def test_Label2Vol_inputs(): label_file=dict(argstr='--label %s...', copyfile=False, mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + xor=(u'label_file', u'annot_file', u'seg_file', u'aparc_aseg'), ), label_hit_file=dict(argstr='--hits %s', ), @@ -45,18 +45,18 @@ def test_Label2Vol_inputs(): native_vox2ras=dict(argstr='--native-vox2ras', ), proj=dict(argstr='--proj %s %f %f %f', - requires=('subject_id', 'hemi'), + requires=(u'subject_id', u'hemi'), ), reg_file=dict(argstr='--reg %s', - xor=('reg_file', 'reg_header', 'identity'), + xor=(u'reg_file', u'reg_header', u'identity'), ), reg_header=dict(argstr='--regheader %s', - xor=('reg_file', 'reg_header', 'identity'), + xor=(u'reg_file', u'reg_header', u'identity'), ), seg_file=dict(argstr='--seg %s', copyfile=False, mandatory=True, - xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'), + xor=(u'label_file', u'annot_file', u'seg_file', u'aparc_aseg'), ), subject_id=dict(argstr='--subject %s', ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py b/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py index d523cba9dc..3a139865a4 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py @@ -26,7 +26,7 @@ def test_MNIBiasCorrection_inputs(): out_file=dict(argstr='--o %s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_output', ), protocol_iterations=dict(argstr='--proto-iters %d', diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py index ce3e7a244b..9cfa579485 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py @@ -30,7 +30,9 @@ def test_MRIPretess_inputs(): nocorners=dict(argstr='-nocorners', ), out_file=dict(argstr='%s', - genfile=True, + keep_extension=True, + name_source=[u'in_filled'], + name_template='%s_pretesswm', position=-1, ), subjects_dir=dict(), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py index 306ca4cd8e..ca56e521e2 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py @@ -10,13 +10,13 @@ def test_MRISPreproc_inputs(): usedefault=True, ), fsgd_file=dict(argstr='--fsgd %s', - xor=('subjects', 'fsgd_file', 'subject_file'), + xor=(u'subjects', u'fsgd_file', u'subject_file'), ), fwhm=dict(argstr='--fwhm %f', - xor=['num_iters'], + xor=[u'num_iters'], ), fwhm_source=dict(argstr='--fwhm-src %f', - xor=['num_iters_source'], + xor=[u'num_iters_source'], ), hemi=dict(argstr='--hemi %s', mandatory=True, @@ -25,10 +25,10 @@ def test_MRISPreproc_inputs(): usedefault=True, ), num_iters=dict(argstr='--niters %d', - xor=['fwhm'], + xor=[u'fwhm'], ), num_iters_source=dict(argstr='--niterssrc %d', - xor=['fwhm_source'], + xor=[u'fwhm_source'], ), out_file=dict(argstr='--out %s', genfile=True, @@ -40,22 +40,22 @@ def test_MRISPreproc_inputs(): source_format=dict(argstr='--srcfmt %s', ), subject_file=dict(argstr='--f %s', - xor=('subjects', 'fsgd_file', 'subject_file'), + xor=(u'subjects', u'fsgd_file', u'subject_file'), ), subjects=dict(argstr='--s %s...', - xor=('subjects', 'fsgd_file', 'subject_file'), + xor=(u'subjects', u'fsgd_file', u'subject_file'), ), subjects_dir=dict(), surf_area=dict(argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + xor=(u'surf_measure', u'surf_measure_file', u'surf_area'), ), surf_dir=dict(argstr='--surfdir %s', ), surf_measure=dict(argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + xor=(u'surf_measure', u'surf_measure_file', u'surf_area'), ), surf_measure_file=dict(argstr='--is %s...', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + xor=(u'surf_measure', u'surf_measure_file', u'surf_area'), ), target=dict(argstr='--target %s', mandatory=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py index 1425960054..7e775b0854 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py @@ -11,13 +11,13 @@ def test_MRISPreprocReconAll_inputs(): usedefault=True, ), fsgd_file=dict(argstr='--fsgd %s', - xor=('subjects', 'fsgd_file', 'subject_file'), + xor=(u'subjects', u'fsgd_file', u'subject_file'), ), fwhm=dict(argstr='--fwhm %f', - xor=['num_iters'], + xor=[u'num_iters'], ), fwhm_source=dict(argstr='--fwhm-src %f', - xor=['num_iters_source'], + xor=[u'num_iters_source'], ), hemi=dict(argstr='--hemi %s', mandatory=True, @@ -25,49 +25,49 @@ def test_MRISPreprocReconAll_inputs(): ignore_exception=dict(nohash=True, usedefault=True, ), - lh_surfreg_target=dict(requires=['surfreg_files'], + lh_surfreg_target=dict(requires=[u'surfreg_files'], ), num_iters=dict(argstr='--niters %d', - xor=['fwhm'], + xor=[u'fwhm'], ), num_iters_source=dict(argstr='--niterssrc %d', - xor=['fwhm_source'], + xor=[u'fwhm_source'], ), out_file=dict(argstr='--out %s', genfile=True, ), proj_frac=dict(argstr='--projfrac %s', ), - rh_surfreg_target=dict(requires=['surfreg_files'], + rh_surfreg_target=dict(requires=[u'surfreg_files'], ), smooth_cortex_only=dict(argstr='--smooth-cortex-only', ), source_format=dict(argstr='--srcfmt %s', ), subject_file=dict(argstr='--f %s', - xor=('subjects', 'fsgd_file', 'subject_file'), + xor=(u'subjects', u'fsgd_file', u'subject_file'), ), subject_id=dict(argstr='--s %s', usedefault=True, - xor=('subjects', 'fsgd_file', 'subject_file', 'subject_id'), + xor=(u'subjects', u'fsgd_file', u'subject_file', u'subject_id'), ), subjects=dict(argstr='--s %s...', - xor=('subjects', 'fsgd_file', 'subject_file'), + xor=(u'subjects', u'fsgd_file', u'subject_file'), ), subjects_dir=dict(), surf_area=dict(argstr='--area %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + xor=(u'surf_measure', u'surf_measure_file', u'surf_area'), ), surf_dir=dict(argstr='--surfdir %s', ), surf_measure=dict(argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + xor=(u'surf_measure', u'surf_measure_file', u'surf_area'), ), surf_measure_file=dict(argstr='--meas %s', - xor=('surf_measure', 'surf_measure_file', 'surf_area'), + xor=(u'surf_measure', u'surf_measure_file', u'surf_area'), ), surfreg_files=dict(argstr='--surfreg %s', - requires=['lh_surfreg_target', 'rh_surfreg_target'], + requires=[u'lh_surfreg_target', u'rh_surfreg_target'], ), target=dict(argstr='--target %s', mandatory=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py index 2ff1ccd31d..ce445321c6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py @@ -35,7 +35,7 @@ def test_MRIsCALabel_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=True, - name_source=['hemisphere'], + name_source=[u'hemisphere'], name_template='%s.aparc.annot', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py index b8263b5ebf..d7160510a7 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py @@ -22,15 +22,15 @@ def test_MRIsCalc_inputs(): ), in_file2=dict(argstr='%s', position=-1, - xor=['in_float', 'in_int'], + xor=[u'in_float', u'in_int'], ), in_float=dict(argstr='%f', position=-1, - xor=['in_file2', 'in_int'], + xor=[u'in_file2', u'in_int'], ), in_int=dict(argstr='%d', position=-1, - xor=['in_file2', 'in_float'], + xor=[u'in_file2', u'in_float'], ), out_file=dict(argstr='-o %s', mandatory=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py index d200a1e5df..b2b79a326e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py @@ -31,13 +31,13 @@ def test_MRIsConvert_inputs(): origname=dict(argstr='-o %s', ), out_datatype=dict(mandatory=True, - xor=['out_file'], + xor=[u'out_file'], ), out_file=dict(argstr='%s', genfile=True, mandatory=True, position=-1, - xor=['out_datatype'], + xor=[u'out_datatype'], ), parcstats_file=dict(argstr='--parcstats %s', ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py b/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py index 99ce0e1b8e..f94f3fa4a5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py @@ -18,16 +18,16 @@ def test_MRIsInflate_inputs(): position=-2, ), no_save_sulc=dict(argstr='-no-save-sulc', - xor=['out_sulc'], + xor=[u'out_sulc'], ), out_file=dict(argstr='%s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s.inflated', position=-1, ), - out_sulc=dict(xor=['no_save_sulc'], + out_sulc=dict(xor=[u'no_save_sulc'], ), subjects_dir=dict(), terminal_output=dict(nohash=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py b/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py index 5dd36f9628..65aff0de5d 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py @@ -25,7 +25,7 @@ def test_MakeSurfaces_inputs(): ), in_filled=dict(mandatory=True, ), - in_label=dict(xor=['noaparc'], + in_label=dict(xor=[u'noaparc'], ), in_orig=dict(argstr='-orig %s', mandatory=True, @@ -42,10 +42,10 @@ def test_MakeSurfaces_inputs(): no_white=dict(argstr='-nowhite', ), noaparc=dict(argstr='-noaparc', - xor=['in_label'], + xor=[u'in_label'], ), orig_pial=dict(argstr='-orig_pial %s', - requires=['in_label'], + requires=[u'in_label'], ), orig_white=dict(argstr='-orig_white %s', ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py b/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py index 304983e629..773e66997e 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py @@ -24,7 +24,7 @@ def test_Normalize_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_norm', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py b/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py index 244000ce5b..37fec80ad3 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py @@ -19,12 +19,12 @@ def test_OneSampleTTest_inputs(): contrast=dict(argstr='--C %s...', ), cortex=dict(argstr='--cortex', - xor=['label_file'], + xor=[u'label_file'], ), debug=dict(argstr='--debug', ), design=dict(argstr='--X %s', - xor=('fsgd', 'design', 'one_sample'), + xor=(u'fsgd', u'design', u'one_sample'), ), diag=dict(), diag_cluster=dict(argstr='--diag-cluster', @@ -33,17 +33,17 @@ def test_OneSampleTTest_inputs(): usedefault=True, ), fixed_fx_dof=dict(argstr='--ffxdof %d', - xor=['fixed_fx_dof_file'], + xor=[u'fixed_fx_dof_file'], ), fixed_fx_dof_file=dict(argstr='--ffxdofdat %d', - xor=['fixed_fx_dof'], + xor=[u'fixed_fx_dof'], ), fixed_fx_var=dict(argstr='--yffxvar %s', ), force_perm=dict(argstr='--perm-force', ), fsgd=dict(argstr='--fsgd %s %s', - xor=('fsgd', 'design', 'one_sample'), + xor=(u'fsgd', u'design', u'one_sample'), ), fwhm=dict(argstr='--fwhm %f', ), @@ -61,7 +61,7 @@ def test_OneSampleTTest_inputs(): invert_mask=dict(argstr='--mask-inv', ), label_file=dict(argstr='--label %s', - xor=['cortex'], + xor=[u'cortex'], ), mask_file=dict(argstr='--mask %s', ), @@ -72,10 +72,10 @@ def test_OneSampleTTest_inputs(): no_mask_smooth=dict(argstr='--no-mask-smooth', ), no_prune=dict(argstr='--no-prune', - xor=['prunethresh'], + xor=[u'prunethresh'], ), one_sample=dict(argstr='--osgm', - xor=('one_sample', 'fsgd', 'design', 'contrast'), + xor=(u'one_sample', u'fsgd', u'design', u'contrast'), ), pca=dict(argstr='--pca', ), @@ -86,7 +86,7 @@ def test_OneSampleTTest_inputs(): prune=dict(argstr='--prune', ), prune_thresh=dict(argstr='--prune_thr %f', - xor=['noprune'], + xor=[u'noprune'], ), resynth_test=dict(argstr='--resynthtest %d', ), @@ -111,7 +111,7 @@ def test_OneSampleTTest_inputs(): subject_id=dict(), subjects_dir=dict(), surf=dict(argstr='--surf %s %s %s', - requires=['subject_id', 'hemi'], + requires=[u'subject_id', u'hemi'], ), surf_geo=dict(usedefault=True, ), @@ -125,16 +125,16 @@ def test_OneSampleTTest_inputs(): ), vox_dump=dict(argstr='--voxdump %d %d %d', ), - weight_file=dict(xor=['weighted_ls'], + weight_file=dict(xor=[u'weighted_ls'], ), weight_inv=dict(argstr='--w-inv', - xor=['weighted_ls'], + xor=[u'weighted_ls'], ), weight_sqrt=dict(argstr='--w-sqrt', - xor=['weighted_ls'], + xor=[u'weighted_ls'], ), weighted_ls=dict(argstr='--wls %s', - xor=('weight_file', 'weight_inv', 'weight_sqrt'), + xor=(u'weight_file', u'weight_inv', u'weight_sqrt'), ), ) inputs = OneSampleTTest.input_spec() diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Paint.py b/nipype/interfaces/freesurfer/tests/test_auto_Paint.py index e34c646324..567cae10b1 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Paint.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Paint.py @@ -21,7 +21,7 @@ def test_Paint_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=False, - name_source=['in_surf'], + name_source=[u'in_surf'], name_template='%s.avg_curv', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py b/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py index 07fc98b147..cfdd45d9dd 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py @@ -23,12 +23,12 @@ def test_ParcellationStats_inputs(): usedefault=True, ), in_annotation=dict(argstr='-a %s', - xor=['in_label'], + xor=[u'in_label'], ), in_cortex=dict(argstr='-cortex %s', ), in_label=dict(argstr='-l %s', - xor=['in_annotatoin', 'out_color'], + xor=[u'in_annotatoin', u'out_color'], ), lh_pial=dict(mandatory=True, ), @@ -38,11 +38,11 @@ def test_ParcellationStats_inputs(): ), out_color=dict(argstr='-c %s', genfile=True, - xor=['in_label'], + xor=[u'in_label'], ), out_table=dict(argstr='-f %s', genfile=True, - requires=['tabular_output'], + requires=[u'tabular_output'], ), rh_pial=dict(mandatory=True, ), @@ -64,7 +64,7 @@ def test_ParcellationStats_inputs(): terminal_output=dict(nohash=True, ), th3=dict(argstr='-th3', - requires=['cortex_label'], + requires=[u'cortex_label'], ), thickness=dict(mandatory=True, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Register.py b/nipype/interfaces/freesurfer/tests/test_auto_Register.py index 82c15c2b32..b8e533b413 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Register.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Register.py @@ -7,7 +7,7 @@ def test_Register_inputs(): input_map = dict(args=dict(argstr='%s', ), curv=dict(argstr='-curv', - requires=['in_smoothwm'], + requires=[u'in_smoothwm'], ), environ=dict(nohash=True, usedefault=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py b/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py index 63d1bee291..860166868a 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py @@ -22,7 +22,7 @@ def test_RelabelHypointensities_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=False, - name_source=['aseg'], + name_source=[u'aseg'], name_template='%s.hypos.mgz', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py b/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py index 215476e477..f951e097fd 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py @@ -20,7 +20,7 @@ def test_RemoveIntersection_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py b/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py index 278d6848d4..271b6947e3 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py @@ -19,7 +19,7 @@ def test_RemoveNeck_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_noneck', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py b/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py index 9c52782acc..20af60b42f 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py @@ -8,7 +8,7 @@ def test_RobustRegister_inputs(): ), auto_sens=dict(argstr='--satit', mandatory=True, - xor=['outlier_sens'], + xor=[u'outlier_sens'], ), environ=dict(nohash=True, usedefault=True, @@ -59,7 +59,7 @@ def test_RobustRegister_inputs(): ), outlier_sens=dict(argstr='--sat %.4f', mandatory=True, - xor=['auto_sens'], + xor=[u'auto_sens'], ), registered_file=dict(argstr='--warp %s', ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py b/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py index 84c3daebb2..b531e3fd94 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py @@ -8,7 +8,7 @@ def test_RobustTemplate_inputs(): ), auto_detect_sensitivity=dict(argstr='--satit', mandatory=True, - xor=['outlier_sensitivity'], + xor=[u'outlier_sensitivity'], ), average_metric=dict(argstr='--average %d', ), @@ -39,7 +39,7 @@ def test_RobustTemplate_inputs(): ), outlier_sensitivity=dict(argstr='--sat %.4f', mandatory=True, - xor=['auto_detect_sensitivity'], + xor=[u'auto_detect_sensitivity'], ), scaled_intensity_outputs=dict(argstr='--iscaleout %s', ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py b/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py index 8a08a621f8..7f91440c2d 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py @@ -11,7 +11,7 @@ def test_SampleToSurface_inputs(): args=dict(argstr='%s', ), cortex_mask=dict(argstr='--cortex', - xor=['mask_label'], + xor=[u'mask_label'], ), environ=dict(nohash=True, usedefault=True, @@ -30,7 +30,7 @@ def test_SampleToSurface_inputs(): hits_type=dict(argstr='--srchit_type', ), ico_order=dict(argstr='--icoorder %d', - requires=['target_subject'], + requires=[u'target_subject'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -38,14 +38,14 @@ def test_SampleToSurface_inputs(): interp_method=dict(argstr='--interp %s', ), mask_label=dict(argstr='--mask %s', - xor=['cortex_mask'], + xor=[u'cortex_mask'], ), mni152reg=dict(argstr='--mni152reg', mandatory=True, - xor=['reg_file', 'reg_header', 'mni152reg'], + xor=[u'reg_file', u'reg_header', u'mni152reg'], ), no_reshape=dict(argstr='--noreshape', - xor=['reshape'], + xor=[u'reshape'], ), out_file=dict(argstr='--o %s', genfile=True, @@ -53,31 +53,31 @@ def test_SampleToSurface_inputs(): out_type=dict(argstr='--out_type %s', ), override_reg_subj=dict(argstr='--srcsubject %s', - requires=['subject_id'], + requires=[u'subject_id'], ), projection_stem=dict(mandatory=True, - xor=['sampling_method'], + xor=[u'sampling_method'], ), reference_file=dict(argstr='--ref %s', ), reg_file=dict(argstr='--reg %s', mandatory=True, - xor=['reg_file', 'reg_header', 'mni152reg'], + xor=[u'reg_file', u'reg_header', u'mni152reg'], ), reg_header=dict(argstr='--regheader %s', mandatory=True, - requires=['subject_id'], - xor=['reg_file', 'reg_header', 'mni152reg'], + requires=[u'subject_id'], + xor=[u'reg_file', u'reg_header', u'mni152reg'], ), reshape=dict(argstr='--reshape', - xor=['no_reshape'], + xor=[u'no_reshape'], ), reshape_slices=dict(argstr='--rf %d', ), sampling_method=dict(argstr='%s', mandatory=True, - requires=['sampling_range', 'sampling_units'], - xor=['projection_stem'], + requires=[u'sampling_range', u'sampling_units'], + xor=[u'projection_stem'], ), sampling_range=dict(), sampling_units=dict(), @@ -93,7 +93,7 @@ def test_SampleToSurface_inputs(): subject_id=dict(), subjects_dir=dict(), surf_reg=dict(argstr='--surfreg', - requires=['target_subject'], + requires=[u'target_subject'], ), surface=dict(argstr='--surf %s', ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py b/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py index 5fa5871743..0318b9c3e1 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py @@ -6,7 +6,7 @@ def test_SegStats_inputs(): input_map = dict(annot=dict(argstr='--annot %s %s %s', mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=(u'segmentation_file', u'annot', u'surf_label'), ), args=dict(argstr='%s', ), @@ -23,12 +23,12 @@ def test_SegStats_inputs(): calc_snr=dict(argstr='--snr', ), color_table_file=dict(argstr='--ctab %s', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + xor=(u'color_table_file', u'default_color_table', u'gca_color_table'), ), cortex_vol_from_surf=dict(argstr='--surf-ctx-vol', ), default_color_table=dict(argstr='--ctab-default', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + xor=(u'color_table_file', u'default_color_table', u'gca_color_table'), ), empty=dict(argstr='--empty', ), @@ -47,7 +47,7 @@ def test_SegStats_inputs(): frame=dict(argstr='--frame %d', ), gca_color_table=dict(argstr='--ctab-gca %s', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + xor=(u'color_table_file', u'default_color_table', u'gca_color_table'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -57,13 +57,13 @@ def test_SegStats_inputs(): in_intensity=dict(argstr='--in %s --in-intensity-name %s', ), intensity_units=dict(argstr='--in-intensity-units %s', - requires=['in_intensity'], + requires=[u'in_intensity'], ), mask_erode=dict(argstr='--maskerode %d', ), mask_file=dict(argstr='--mask %s', ), - mask_frame=dict(requires=['mask_file'], + mask_frame=dict(requires=[u'mask_file'], ), mask_invert=dict(argstr='--maskinvert', ), @@ -80,7 +80,7 @@ def test_SegStats_inputs(): ), segmentation_file=dict(argstr='--seg %s', mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=(u'segmentation_file', u'annot', u'surf_label'), ), sf_avg_file=dict(argstr='--sfavg %s', ), @@ -95,7 +95,7 @@ def test_SegStats_inputs(): ), surf_label=dict(argstr='--slabel %s %s %s', mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=(u'segmentation_file', u'annot', u'surf_label'), ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py b/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py index 518d119a97..8e3d3188c6 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py @@ -6,7 +6,7 @@ def test_SegStatsReconAll_inputs(): input_map = dict(annot=dict(argstr='--annot %s %s %s', mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=(u'segmentation_file', u'annot', u'surf_label'), ), args=dict(argstr='%s', ), @@ -24,13 +24,13 @@ def test_SegStatsReconAll_inputs(): calc_snr=dict(argstr='--snr', ), color_table_file=dict(argstr='--ctab %s', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + xor=(u'color_table_file', u'default_color_table', u'gca_color_table'), ), copy_inputs=dict(), cortex_vol_from_surf=dict(argstr='--surf-ctx-vol', ), default_color_table=dict(argstr='--ctab-default', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + xor=(u'color_table_file', u'default_color_table', u'gca_color_table'), ), empty=dict(argstr='--empty', ), @@ -49,7 +49,7 @@ def test_SegStatsReconAll_inputs(): frame=dict(argstr='--frame %d', ), gca_color_table=dict(argstr='--ctab-gca %s', - xor=('color_table_file', 'default_color_table', 'gca_color_table'), + xor=(u'color_table_file', u'default_color_table', u'gca_color_table'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -59,7 +59,7 @@ def test_SegStatsReconAll_inputs(): in_intensity=dict(argstr='--in %s --in-intensity-name %s', ), intensity_units=dict(argstr='--in-intensity-units %s', - requires=['in_intensity'], + requires=[u'in_intensity'], ), lh_orig_nofix=dict(mandatory=True, ), @@ -71,7 +71,7 @@ def test_SegStatsReconAll_inputs(): ), mask_file=dict(argstr='--mask %s', ), - mask_frame=dict(requires=['mask_file'], + mask_frame=dict(requires=[u'mask_file'], ), mask_invert=dict(argstr='--maskinvert', ), @@ -97,7 +97,7 @@ def test_SegStatsReconAll_inputs(): ), segmentation_file=dict(argstr='--seg %s', mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=(u'segmentation_file', u'annot', u'surf_label'), ), sf_avg_file=dict(argstr='--sfavg %s', ), @@ -116,7 +116,7 @@ def test_SegStatsReconAll_inputs(): ), surf_label=dict(argstr='--slabel %s %s %s', mandatory=True, - xor=('segmentation_file', 'annot', 'surf_label'), + xor=(u'segmentation_file', u'annot', u'surf_label'), ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py b/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py index c1b6c6585f..a80169e881 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py @@ -21,7 +21,7 @@ def test_SegmentCC_inputs(): out_file=dict(argstr='-o %s', hash_files=False, keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s.auto.mgz', ), out_rotation=dict(argstr='-lta %s', diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py b/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py index 16fefa2873..e561128b75 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py @@ -17,13 +17,13 @@ def test_Smooth_inputs(): ), num_iters=dict(argstr='--niters %d', mandatory=True, - xor=['surface_fwhm'], + xor=[u'surface_fwhm'], ), proj_frac=dict(argstr='--projfrac %s', - xor=['proj_frac_avg'], + xor=[u'proj_frac_avg'], ), proj_frac_avg=dict(argstr='--projfrac-avg %.2f %.2f %.2f', - xor=['proj_frac'], + xor=[u'proj_frac'], ), reg_file=dict(argstr='--reg %s', mandatory=True, @@ -34,8 +34,8 @@ def test_Smooth_inputs(): subjects_dir=dict(), surface_fwhm=dict(argstr='--fwhm %f', mandatory=True, - requires=['reg_file'], - xor=['num_iters'], + requires=[u'reg_file'], + xor=[u'num_iters'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py b/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py index f66f910ea7..aaf4cc6ae5 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py @@ -24,7 +24,7 @@ def test_Sphere_inputs(): num_threads=dict(), out_file=dict(argstr='%s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s.sphere', position=-1, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py index 7da293e7bd..66cec288eb 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py @@ -16,21 +16,21 @@ def test_Surface2VolTransform_inputs(): usedefault=True, ), mkmask=dict(argstr='--mkmask', - xor=['source_file'], + xor=[u'source_file'], ), projfrac=dict(argstr='--projfrac %s', ), reg_file=dict(argstr='--volreg %s', mandatory=True, - xor=['subject_id'], + xor=[u'subject_id'], ), source_file=dict(argstr='--surfval %s', copyfile=False, mandatory=True, - xor=['mkmask'], + xor=[u'mkmask'], ), subject_id=dict(argstr='--identity %s', - xor=['reg_file'], + xor=[u'reg_file'], ), subjects_dir=dict(argstr='--sd %s', ), @@ -42,12 +42,12 @@ def test_Surface2VolTransform_inputs(): ), transformed_file=dict(argstr='--outvol %s', hash_files=False, - name_source=['source_file'], + name_source=[u'source_file'], name_template='%s_asVol.nii', ), vertexvol_file=dict(argstr='--vtxvol %s', hash_files=False, - name_source=['source_file'], + name_source=[u'source_file'], name_template='%s_asVol_vertex.nii', ), ) diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py index 92d145cfc9..c0430d2676 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py @@ -13,7 +13,7 @@ def test_SurfaceSmooth_inputs(): usedefault=True, ), fwhm=dict(argstr='--fwhm %.4f', - xor=['smooth_iters'], + xor=[u'smooth_iters'], ), hemi=dict(argstr='--hemi %s', mandatory=True, @@ -30,7 +30,7 @@ def test_SurfaceSmooth_inputs(): reshape=dict(argstr='--reshape', ), smooth_iters=dict(argstr='--smooth %d', - xor=['fwhm'], + xor=[u'fwhm'], ), subject_id=dict(argstr='--s %s', mandatory=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py index 46411c9fca..f0a76a5d43 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py @@ -5,10 +5,10 @@ def test_SurfaceSnapshots_inputs(): input_map = dict(annot_file=dict(argstr='-annotation %s', - xor=['annot_name'], + xor=[u'annot_name'], ), annot_name=dict(argstr='-annotation %s', - xor=['annot_file'], + xor=[u'annot_file'], ), args=dict(argstr='%s', ), @@ -24,7 +24,7 @@ def test_SurfaceSnapshots_inputs(): position=2, ), identity_reg=dict(argstr='-overlay-reg-identity', - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], + xor=[u'overlay_reg', u'identity_reg', u'mni152_reg'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -32,29 +32,29 @@ def test_SurfaceSnapshots_inputs(): invert_overlay=dict(argstr='-invphaseflag 1', ), label_file=dict(argstr='-label %s', - xor=['label_name'], + xor=[u'label_name'], ), label_name=dict(argstr='-label %s', - xor=['label_file'], + xor=[u'label_file'], ), label_outline=dict(argstr='-label-outline', ), label_under=dict(argstr='-labels-under', ), mni152_reg=dict(argstr='-mni152reg', - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], + xor=[u'overlay_reg', u'identity_reg', u'mni152_reg'], ), orig_suffix=dict(argstr='-orig %s', ), overlay=dict(argstr='-overlay %s', - requires=['overlay_range'], + requires=[u'overlay_range'], ), overlay_range=dict(argstr='%s', ), overlay_range_offset=dict(argstr='-foffset %.3f', ), overlay_reg=dict(argstr='-overlay-reg %s', - xor=['overlay_reg', 'identity_reg', 'mni152_reg'], + xor=[u'overlay_reg', u'identity_reg', u'mni152_reg'], ), patch_file=dict(argstr='-patch %s', ), @@ -66,15 +66,15 @@ def test_SurfaceSnapshots_inputs(): show_color_text=dict(argstr='-colscaletext 1', ), show_curv=dict(argstr='-curv', - xor=['show_gray_curv'], + xor=[u'show_gray_curv'], ), show_gray_curv=dict(argstr='-gray', - xor=['show_curv'], + xor=[u'show_curv'], ), six_images=dict(), sphere_suffix=dict(argstr='-sphere %s', ), - stem_template_args=dict(requires=['screenshot_stem'], + stem_template_args=dict(requires=[u'screenshot_stem'], ), subject_id=dict(argstr='%s', mandatory=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py index 250f697402..c3a450476c 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py @@ -24,17 +24,17 @@ def test_SurfaceTransform_inputs(): ), source_annot_file=dict(argstr='--sval-annot %s', mandatory=True, - xor=['source_file'], + xor=[u'source_file'], ), source_file=dict(argstr='--sval %s', mandatory=True, - xor=['source_annot_file'], + xor=[u'source_annot_file'], ), source_subject=dict(argstr='--srcsubject %s', mandatory=True, ), source_type=dict(argstr='--sfmt %s', - requires=['source_file'], + requires=[u'source_file'], ), subjects_dir=dict(), target_ico_order=dict(argstr='--trgicoorder %d', diff --git a/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py b/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py index 150bfac675..68b66e2e41 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py @@ -14,10 +14,10 @@ def test_Tkregister2_inputs(): fsl_out=dict(argstr='--fslregout %s', ), fstal=dict(argstr='--fstal', - xor=['target_image', 'moving_image'], + xor=[u'target_image', u'moving_image'], ), fstarg=dict(argstr='--fstarg', - xor=['target_image'], + xor=[u'target_image'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -40,7 +40,7 @@ def test_Tkregister2_inputs(): ), subjects_dir=dict(), target_image=dict(argstr='--targ %s', - xor=['fstarg'], + xor=[u'fstarg'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py b/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py index 40e1c65378..ec4f0a79fa 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py @@ -8,7 +8,7 @@ def test_UnpackSDICOMDir_inputs(): ), config=dict(argstr='-cfg %s', mandatory=True, - xor=('run_info', 'config', 'seq_config'), + xor=(u'run_info', u'config', u'seq_config'), ), dir_structure=dict(argstr='-%s', ), @@ -28,13 +28,13 @@ def test_UnpackSDICOMDir_inputs(): ), run_info=dict(argstr='-run %d %s %s %s', mandatory=True, - xor=('run_info', 'config', 'seq_config'), + xor=(u'run_info', u'config', u'seq_config'), ), scan_only=dict(argstr='-scanonly %s', ), seq_config=dict(argstr='-seqcfg %s', mandatory=True, - xor=('run_info', 'config', 'seq_config'), + xor=(u'run_info', u'config', u'seq_config'), ), source_dir=dict(argstr='-src %s', mandatory=True, diff --git a/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py b/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py index 945f639739..a893fc5acf 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py @@ -6,7 +6,7 @@ def test_VolumeMask_inputs(): input_map = dict(args=dict(argstr='%s', ), - aseg=dict(xor=['in_aseg'], + aseg=dict(xor=[u'in_aseg'], ), copy_inputs=dict(), environ=dict(nohash=True, @@ -16,7 +16,7 @@ def test_VolumeMask_inputs(): usedefault=True, ), in_aseg=dict(argstr='--aseg_name %s', - xor=['aseg'], + xor=[u'aseg'], ), left_ribbonlabel=dict(argstr='--label_left_ribbon %d', mandatory=True, diff --git a/nipype/interfaces/freesurfer/tests/test_model.py b/nipype/interfaces/freesurfer/tests/test_model.py index 4a2fafa050..b1510e5335 100644 --- a/nipype/interfaces/freesurfer/tests/test_model.py +++ b/nipype/interfaces/freesurfer/tests/test_model.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index d4e1021204..121c2e7f1b 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py index fca748e081..3457531a45 100644 --- a/nipype/interfaces/freesurfer/tests/test_utils.py +++ b/nipype/interfaces/freesurfer/tests/test_utils.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os from tempfile import mkdtemp diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index efc48f7a6e..1f4b1e8a4c 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Interfaces to assorted Freesurfer utility programs. @@ -9,18 +10,20 @@ >>> os.chdir(datadir) """ -__docformat__ = 'restructuredtext' - +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, open import os import re import shutil -from ..freesurfer.base import (FSCommand, FSTraitedSpec, - FSScriptCommand, FSScriptOutputSpec, - FSTraitedSpecOpenMP, FSCommandOpenMP) -from ..base import TraitedSpec, File, traits, OutputMultiPath, isdefined, CommandLine, CommandLineInputSpec from ...utils.filemanip import fname_presuffix, split_filename +from ..base import (TraitedSpec, File, traits, OutputMultiPath, isdefined, + CommandLine, CommandLineInputSpec) +from .base import (FSCommand, FSTraitedSpec, + FSScriptCommand, FSScriptOutputSpec, + FSTraitedSpecOpenMP, FSCommandOpenMP) +__docformat__ = 'restructuredtext' filemap = dict(cor='cor', mgh='mgh', mgz='mgz', minc='mnc', afni='brik', brik='brik', bshort='bshort', @@ -68,7 +71,7 @@ def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): def createoutputdirs(outputs): """create all output directories. If not created, some freesurfer interfaces fail""" - for output in outputs.itervalues(): + for output in list(outputs.values()): dirname = os.path.dirname(output) if not os.path.isdir(dirname): os.makedirs(dirname) @@ -189,6 +192,8 @@ class SampleToSurface(FSCommand): >>> sampler.inputs.sampling_method = "average" >>> sampler.inputs.sampling_range = 1 >>> sampler.inputs.sampling_units = "frac" + >>> sampler.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE + 'mri_vol2surf --hemi lh --o ...lh.cope1.mgz --reg register.dat --projfrac-avg 1.000 --mov cope1.nii.gz' >>> res = sampler.run() # doctest: +SKIP """ @@ -310,6 +315,8 @@ class SurfaceSmooth(FSCommand): >>> smoother.inputs.subject_id = "subj_1" >>> smoother.inputs.hemi = "lh" >>> smoother.inputs.fwhm = 5 + >>> smoother.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE + 'mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval lh.cope1.mgz --tval ...lh.cope1_smooth5.mgz --s subj_1' >>> smoother.run() # doctest: +SKIP """ @@ -484,7 +491,7 @@ class Surface2VolTransform(FSCommand): >>> xfm2vol.inputs.hemi = 'lh' >>> xfm2vol.inputs.template_file = 'cope1.nii.gz' >>> xfm2vol.inputs.subjects_dir = '.' - >>> xfm2vol.cmdline + >>> xfm2vol.cmdline # doctest: +IGNORE_UNICODE 'mri_surf2vol --hemi lh --volreg register.mat --surfval lh.cope1.mgz --sd . --template cope1.nii.gz --outvol lh.cope1_asVol.nii --vtxvol lh.cope1_asVol_vertex.nii' >>> res = xfm2vol.run()# doctest: +SKIP @@ -896,7 +903,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return os.path.abspath(self._gen_outfilename()) else: return None @@ -964,7 +971,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._gen_outfilename() else: return None @@ -986,8 +993,8 @@ class MRIPretessInputSpec(FSTraitedSpec): '\'wm\' or a label value (e.g. 127 for rh or 255 for lh)')) in_norm = File(exists=True, mandatory=True, position=-2, argstr='%s', desc=('the normalized, brain-extracted T1w image. Usually norm.mgz')) - out_file = File(position=-1, argstr='%s', genfile=True, - desc=('the output file after mri_pretess.')) + out_file = File(position=-1, argstr='%s', name_source=['in_filled'], name_template='%s_pretesswm', + keep_extension=True, desc='the output file after mri_pretess.') nocorners = traits.Bool(False, argstr='-nocorners', desc=('do not remove corner configurations' ' in addition to edge ones.')) @@ -1020,32 +1027,15 @@ class MRIPretess(FSCommand): >>> pretess.inputs.in_filled = 'wm.mgz' >>> pretess.inputs.in_norm = 'norm.mgz' >>> pretess.inputs.nocorners = True - >>> pretess.cmdline + >>> pretess.cmdline # doctest: +IGNORE_UNICODE 'mri_pretess -nocorners wm.mgz wm norm.mgz wm_pretesswm.mgz' >>> pretess.run() # doctest: +SKIP + """ _cmd = 'mri_pretess' input_spec = MRIPretessInputSpec output_spec = MRIPretessOutputSpec - def _list_outputs(self): - outputs = self.output_spec().get() - outputs['out_file'] = os.path.abspath(self._gen_outfilename()) - return outputs - - def _gen_filename(self, name): - if name is 'out_file': - return self._gen_outfilename() - else: - return None - - def _gen_outfilename(self): - if isdefined(self.inputs.out_file): - return self.inputs.out_file - else: - _, name, ext = split_filename(self.inputs.in_filled) - return name + '_pretess' + str(self.inputs.label) + ext - class MRIMarchingCubesInputSpec(FSTraitedSpec): """ @@ -1091,7 +1081,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._gen_outfilename() else: return None @@ -1165,7 +1155,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._gen_outfilename() else: return None @@ -1207,7 +1197,7 @@ class MakeAverageSubject(FSCommand): >>> from nipype.interfaces.freesurfer import MakeAverageSubject >>> avg = MakeAverageSubject(subjects_ids=['s1', 's2']) - >>> avg.cmdline + >>> avg.cmdline # doctest: +IGNORE_UNICODE 'make_average_subject --out average --subjects s1 s2' """ @@ -1242,7 +1232,7 @@ class ExtractMainComponent(CommandLine): >>> from nipype.interfaces.freesurfer import ExtractMainComponent >>> mcmp = ExtractMainComponent(in_file='lh.pial') - >>> mcmp.cmdline + >>> mcmp.cmdline # doctest: +IGNORE_UNICODE 'mris_extract_main_component lh.pial lh.maincmp' """ @@ -1305,7 +1295,7 @@ class Tkregister2(FSCommand): >>> tk2.inputs.moving_image = 'T1.mgz' >>> tk2.inputs.target_image = 'structural.nii' >>> tk2.inputs.reg_header = True - >>> tk2.cmdline + >>> tk2.cmdline # doctest: +IGNORE_UNICODE 'tkregister2 --mov T1.mgz --noedit --reg T1_to_native.dat --regheader \ --targ structural.nii' >>> tk2.run() # doctest: +SKIP @@ -1318,7 +1308,7 @@ class Tkregister2(FSCommand): >>> tk2 = Tkregister2() >>> tk2.inputs.moving_image = 'epi.nii' >>> tk2.inputs.fsl_in_matrix = 'flirt.mat' - >>> tk2.cmdline + >>> tk2.cmdline # doctest: +IGNORE_UNICODE 'tkregister2 --fsl flirt.mat --mov epi.nii --noedit --reg register.dat' >>> tk2.run() # doctest: +SKIP """ @@ -1372,11 +1362,11 @@ class AddXFormToHeader(FSCommand): >>> adder = AddXFormToHeader() >>> adder.inputs.in_file = 'norm.mgz' >>> adder.inputs.transform = 'trans.mat' - >>> adder.cmdline + >>> adder.cmdline # doctest: +IGNORE_UNICODE 'mri_add_xform_to_header trans.mat norm.mgz output.mgz' >>> adder.inputs.copy_name = True - >>> adder.cmdline + >>> adder.cmdline # doctest: +IGNORE_UNICODE 'mri_add_xform_to_header -c trans.mat norm.mgz output.mgz' >>> adder.run() # doctest: +SKIP @@ -1430,7 +1420,7 @@ class CheckTalairachAlignment(FSCommand): >>> checker.inputs.in_file = 'trans.mat' >>> checker.inputs.threshold = 0.005 - >>> checker.cmdline + >>> checker.cmdline # doctest: +IGNORE_UNICODE 'talairach_afd -T 0.005 -xfm trans.mat' >>> checker.run() # doctest: +SKIP @@ -1479,7 +1469,7 @@ class TalairachAVI(FSCommand): >>> example = TalairachAVI() >>> example.inputs.in_file = 'norm.mgz' >>> example.inputs.out_file = 'trans.mat' - >>> example.cmdline + >>> example.cmdline # doctest: +IGNORE_UNICODE 'talairach_avi --i norm.mgz --xfm trans.mat' >>> example.run() # doctest: +SKIP @@ -1510,7 +1500,7 @@ class TalairachQC(FSScriptCommand): >>> from nipype.interfaces.freesurfer import TalairachQC >>> qc = TalairachQC() >>> qc.inputs.log_file = 'dirs.txt' - >>> qc.cmdline + >>> qc.cmdline # doctest: +IGNORE_UNICODE 'tal_QC_AZS dirs.txt' """ _cmd = "tal_QC_AZS" @@ -1549,7 +1539,7 @@ class RemoveNeck(FSCommand): >>> remove_neck.inputs.in_file = 'norm.mgz' >>> remove_neck.inputs.transform = 'trans.mat' >>> remove_neck.inputs.template = 'trans.mat' - >>> remove_neck.cmdline + >>> remove_neck.cmdline # doctest: +IGNORE_UNICODE 'mri_remove_neck norm.mgz trans.mat trans.mat norm_noneck.mgz' """ _cmd = "mri_remove_neck" @@ -1689,7 +1679,7 @@ class Sphere(FSCommandOpenMP): >>> from nipype.interfaces.freesurfer import Sphere >>> sphere = Sphere() >>> sphere.inputs.in_file = 'lh.pial' - >>> sphere.cmdline + >>> sphere.cmdline # doctest: +IGNORE_UNICODE 'mris_sphere lh.pial lh.sphere' """ _cmd = 'mris_sphere' @@ -1813,7 +1803,7 @@ class EulerNumber(FSCommand): >>> from nipype.interfaces.freesurfer import EulerNumber >>> ft = EulerNumber() >>> ft.inputs.in_file = 'lh.pial' - >>> ft.cmdline + >>> ft.cmdline # doctest: +IGNORE_UNICODE 'mris_euler_number lh.pial' """ _cmd = 'mris_euler_number' @@ -1849,7 +1839,7 @@ class RemoveIntersection(FSCommand): >>> from nipype.interfaces.freesurfer import RemoveIntersection >>> ri = RemoveIntersection() >>> ri.inputs.in_file = 'lh.pial' - >>> ri.cmdline + >>> ri.cmdline # doctest: +IGNORE_UNICODE 'mris_remove_intersection lh.pial lh.pial' """ @@ -1945,7 +1935,7 @@ class MakeSurfaces(FSCommand): >>> makesurfaces.inputs.in_label = 'aparc+aseg.nii' >>> makesurfaces.inputs.in_T1 = 'T1.mgz' >>> makesurfaces.inputs.orig_pial = 'lh.pial' - >>> makesurfaces.cmdline + >>> makesurfaces.cmdline # doctest: +IGNORE_UNICODE 'mris_make_surfaces -T1 T1.mgz -orig pial -orig_pial pial 10335 lh' """ @@ -2078,7 +2068,7 @@ class Curvature(FSCommand): >>> curv = Curvature() >>> curv.inputs.in_file = 'lh.pial' >>> curv.inputs.save = True - >>> curv.cmdline + >>> curv.cmdline # doctest: +IGNORE_UNICODE 'mris_curvature -w lh.pial' """ @@ -2172,7 +2162,7 @@ class CurvatureStats(FSCommand): >>> curvstats.inputs.values = True >>> curvstats.inputs.min_max = True >>> curvstats.inputs.write = True - >>> curvstats.cmdline + >>> curvstats.cmdline # doctest: +IGNORE_UNICODE 'mris_curvature_stats -m -o lh.curv.stats -F pial -G --writeCurvatureFiles subject_id lh pial pial' """ @@ -2229,7 +2219,7 @@ class Jacobian(FSCommand): >>> jacobian = Jacobian() >>> jacobian.inputs.in_origsurf = 'lh.pial' >>> jacobian.inputs.in_mappedsurf = 'lh.pial' - >>> jacobian.cmdline + >>> jacobian.cmdline # doctest: +IGNORE_UNICODE 'mris_jacobian lh.pial lh.pial lh.jacobian' """ @@ -2366,7 +2356,7 @@ class VolumeMask(FSCommand): >>> volmask.inputs.rh_white = 'lh.pial' >>> volmask.inputs.subject_id = '10335' >>> volmask.inputs.save_ribbon = True - >>> volmask.cmdline + >>> volmask.cmdline # doctest: +IGNORE_UNICODE 'mris_volmask --label_left_ribbon 3 --label_left_white 2 --label_right_ribbon 42 --label_right_white 41 --save_ribbon 10335' """ @@ -2706,7 +2696,7 @@ class RelabelHypointensities(FSCommand): >>> relabelhypos.inputs.rh_white = 'lh.pial' >>> relabelhypos.inputs.surf_directory = '.' >>> relabelhypos.inputs.aseg = 'aseg.mgz' - >>> relabelhypos.cmdline + >>> relabelhypos.cmdline # doctest: +IGNORE_UNICODE 'mri_relabel_hypointensities aseg.mgz . aseg.hypos.mgz' """ @@ -2877,7 +2867,7 @@ class Apas2Aseg(FSCommand): >>> apas2aseg = Apas2Aseg() >>> apas2aseg.inputs.in_file = 'aseg.mgz' >>> apas2aseg.inputs.out_file = 'output.mgz' - >>> apas2aseg.cmdline + >>> apas2aseg.cmdline # doctest: +IGNORE_UNICODE 'apas2aseg --i aseg.mgz --o output.mgz' """ diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index 4ef2a04e85..072c8a8f43 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL diff --git a/nipype/interfaces/fsl/base.py b/nipype/interfaces/fsl/base.py index b4fedddc7d..548ef53e92 100644 --- a/nipype/interfaces/fsl/base.py +++ b/nipype/interfaces/fsl/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -24,19 +25,17 @@ See the docstrings of the individual classes for examples. """ - -from builtins import object +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, object from glob import glob import os -import warnings -from ...utils.filemanip import fname_presuffix, split_filename, copyfile -from ..base import (traits, isdefined, - CommandLine, CommandLineInputSpec, TraitedSpec, - File, Directory, InputMultiPath, OutputMultiPath) +from ... import logging +from ...utils.filemanip import fname_presuffix +from ..base import traits, isdefined, CommandLine, CommandLineInputSpec -warn = warnings.warn +LOGGER = logging.getLogger('interface') class Info(object): @@ -113,8 +112,8 @@ def output_type(cls): try: return os.environ['FSLOUTPUTTYPE'] except KeyError: - warnings.warn(('FSL environment variables not set. setting output ' - 'type to NIFTI')) + LOGGER.warn('FSLOUTPUTTYPE environment variable is not set. ' + 'Setting FSLOUTPUTTYPE=NIFTI') return 'NIFTI' @staticmethod diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index eda1e667f3..ade78ac576 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -11,21 +12,16 @@ >>> os.chdir(datadir) """ - -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, open import os -import shutil import warnings -from ... import LooseVersion +from ...utils.filemanip import fname_presuffix, split_filename, copyfile from ..base import (TraitedSpec, isdefined, File, Directory, InputMultiPath, OutputMultiPath, traits) -from ..fsl.base import (FSLCommand, FSLCommandInputSpec, Info) -from ...utils.filemanip import fname_presuffix, split_filename, copyfile - -warn = warnings.warn - +from .base import (FSLCommand, FSLCommandInputSpec, Info) class DTIFitInputSpec(FSLCommandInputSpec): dwi = File(exists=True, desc='diffusion weighted image data file', @@ -83,7 +79,7 @@ class DTIFit(FSLCommand): >>> dti.inputs.bvals = 'bvals' >>> dti.inputs.base_name = 'TP' >>> dti.inputs.mask = 'mask.nii' - >>> dti.cmdline + >>> dti.cmdline # doctest: +IGNORE_UNICODE 'dtifit -k diffusion.nii -o TP -m mask.nii -r bvecs -b bvals' """ @@ -312,7 +308,7 @@ class BEDPOSTX5(FSLXCommand): >>> from nipype.interfaces import fsl >>> bedp = fsl.BEDPOSTX5(bvecs='bvecs', bvals='bvals', dwi='diffusion.nii', ... mask='mask.nii', n_fibres=1) - >>> bedp.cmdline + >>> bedp.cmdline # doctest: +IGNORE_UNICODE 'bedpostx bedpostx --forcedir -n 1' """ @@ -522,7 +518,7 @@ class ProbTrackX(FSLCommand): target_masks = ['targets_MASK1.nii', 'targets_MASK2.nii'], \ thsamples='merged_thsamples.nii', fsamples='merged_fsamples.nii', phsamples='merged_phsamples.nii', \ out_dir='.') - >>> pbx.cmdline + >>> pbx.cmdline # doctest: +IGNORE_UNICODE 'probtrackx --forcedir -m mask.nii --mode=seedmask --nsamples=3 --nsteps=10 --opd --os2t --dir=. --samples=merged --seed=MASK_average_thal_right.nii --targetmasks=targets.txt --xfm=trans.mat' """ @@ -686,7 +682,7 @@ class ProbTrackX2(ProbTrackX): >>> pbx2.inputs.out_dir = '.' >>> pbx2.inputs.n_samples = 3 >>> pbx2.inputs.n_steps = 10 - >>> pbx2.cmdline + >>> pbx2.cmdline # doctest: +IGNORE_UNICODE 'probtrackx2 --forcedir -m nodif_brain_mask.nii.gz --nsamples=3 --nsteps=10 --opd --dir=. --samples=merged --seed=seed_source.nii.gz' """ _cmd = 'probtrackx2' @@ -758,7 +754,7 @@ class VecReg(FSLCommand): affine_mat='trans.mat', \ ref_vol='mni.nii', \ out_file='diffusion_vreg.nii') - >>> vreg.cmdline + >>> vreg.cmdline # doctest: +IGNORE_UNICODE 'vecreg -t trans.mat -i diffusion.nii -o diffusion_vreg.nii -r mni.nii' """ @@ -785,7 +781,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._list_outputs()[name] else: return None @@ -815,7 +811,7 @@ class ProjThresh(FSLCommand): >>> from nipype.interfaces import fsl >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] >>> pThresh = fsl.ProjThresh(in_files=ldir, threshold=3) - >>> pThresh.cmdline + >>> pThresh.cmdline # doctest: +IGNORE_UNICODE 'proj_thresh seeds_to_M1.nii seeds_to_M2.nii 3' """ @@ -860,7 +856,7 @@ class FindTheBiggest(FSLCommand): >>> from nipype.interfaces import fsl >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] >>> fBig = fsl.FindTheBiggest(in_files=ldir, out_file='biggestSegmentation') - >>> fBig.cmdline + >>> fBig.cmdline # doctest: +IGNORE_UNICODE 'find_the_biggest seeds_to_M1.nii seeds_to_M2.nii biggestSegmentation' """ @@ -882,7 +878,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_file': + if name == 'out_file': return self._list_outputs()[name] else: return None diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index e528b9bbab..a798beecf7 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -11,21 +12,18 @@ ... '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str import os -import warnings -from glob import glob - import numpy as np import nibabel as nib +import warnings -from ..fsl.base import FSLCommand, FSLCommandInputSpec, Info +from ...utils.filemanip import split_filename from ..base import (traits, TraitedSpec, InputMultiPath, File, - isdefined, Undefined) -from ...utils.filemanip import (load_json, save_json, split_filename, - fname_presuffix) - -warn = warnings.warn + isdefined) +from .base import FSLCommand, FSLCommandInputSpec class PrepareFieldmapInputSpec(FSLCommandInputSpec): @@ -70,7 +68,7 @@ class PrepareFieldmap(FSLCommand): >>> prepare.inputs.in_phase = "phase.nii" >>> prepare.inputs.in_magnitude = "magnitude.nii" >>> prepare.inputs.output_type = "NIFTI_GZ" - >>> prepare.cmdline #doctest: +ELLIPSIS + >>> prepare.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fsl_prepare_fieldmap SIEMENS phase.nii magnitude.nii \ .../phase_fslprepared.nii.gz 2.460000' >>> res = prepare.run() # doctest: +SKIP @@ -233,7 +231,7 @@ class TOPUP(FSLCommand): >>> topup.inputs.in_file = "b0_b0rev.nii" >>> topup.inputs.encoding_file = "topup_encoding.txt" >>> topup.inputs.output_type = "NIFTI_GZ" - >>> topup.cmdline #doctest: +ELLIPSIS + >>> topup.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'topup --config=b02b0.cnf --datain=topup_encoding.txt \ --imain=b0_b0rev.nii --out=b0_b0rev_base --iout=b0_b0rev_corrected.nii.gz \ --fout=b0_b0rev_field.nii.gz --logout=b0_b0rev_topup.log' @@ -298,7 +296,7 @@ def _generate_encfile(self): line = [float(val[0] == encdir[0]) * direction for val in ['x', 'y', 'z']] + [durations[idx]] lines.append(line) - np.savetxt(out_file, np.array(lines), fmt='%d %d %d %.8f') + np.savetxt(out_file, np.array(lines), fmt=b'%d %d %d %.8f') return out_file def _overload_extension(self, value, name=None): @@ -361,7 +359,7 @@ class ApplyTOPUP(FSLCommand): >>> applytopup.inputs.in_topup_fieldcoef = "topup_fieldcoef.nii.gz" >>> applytopup.inputs.in_topup_movpar = "topup_movpar.txt" >>> applytopup.inputs.output_type = "NIFTI_GZ" - >>> applytopup.cmdline #doctest: +ELLIPSIS + >>> applytopup.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'applytopup --datain=topup_encoding.txt --imain=epi.nii,epi_rev.nii \ --inindex=1,2 --topup=topup --out=epi_corrected.nii.gz' >>> res = applytopup.run() # doctest: +SKIP @@ -463,7 +461,7 @@ class Eddy(FSLCommand): >>> eddy.inputs.in_acqp = 'epi_acqp.txt' >>> eddy.inputs.in_bvec = 'bvecs.scheme' >>> eddy.inputs.in_bval = 'bvals.scheme' - >>> eddy.cmdline #doctest: +ELLIPSIS + >>> eddy.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'eddy --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme \ --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii \ --out=.../eddy_corrected' @@ -543,7 +541,7 @@ class SigLoss(FSLCommand): >>> sigloss.inputs.in_file = "phase.nii" >>> sigloss.inputs.echo_time = 0.03 >>> sigloss.inputs.output_type = "NIFTI_GZ" - >>> sigloss.cmdline #doctest: +ELLIPSIS + >>> sigloss.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'sigloss --te=0.030000 -i phase.nii -s .../phase_sigloss.nii.gz' >>> res = sigloss.run() # doctest: +SKIP @@ -646,7 +644,7 @@ class EpiReg(FSLCommand): >>> epireg.inputs.fmapmagbrain='fieldmap_mag_brain.nii' >>> epireg.inputs.echospacing=0.00067 >>> epireg.inputs.pedir='y' - >>> epireg.cmdline #doctest: +ELLIPSIS + >>> epireg.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'epi_reg --echospacing=0.000670 --fmap=fieldmap_phase_fslprepared.nii \ --fmapmag=fieldmap_mag.nii --fmapmagbrain=fieldmap_mag_brain.nii --noclean \ --pedir=y --epi=epi.nii --t1=T1.nii --t1brain=T1_brain.nii --out=epi2struct' @@ -755,7 +753,7 @@ class EPIDeWarp(FSLCommand): >>> dewarp.inputs.mag_file = "magnitude.nii" >>> dewarp.inputs.dph_file = "phase.nii" >>> dewarp.inputs.output_type = "NIFTI_GZ" - >>> dewarp.cmdline #doctest: +ELLIPSIS + >>> dewarp.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'epidewarp.fsl --mag magnitude.nii --dph phase.nii --epi functional.nii \ --esp 0.58 --exfdw .../exfdw.nii.gz --nocleanup --sigma 2 --tediff 2.46 \ --tmpdir .../temp --vsm .../vsm.nii.gz' @@ -848,7 +846,7 @@ class EddyCorrect(FSLCommand): >>> from nipype.interfaces.fsl import EddyCorrect >>> eddyc = EddyCorrect(in_file='diffusion.nii', ... out_file="diffusion_edc.nii", ref_num=0) - >>> eddyc.cmdline + >>> eddyc.cmdline # doctest: +IGNORE_UNICODE 'eddy_correct diffusion.nii diffusion_edc.nii 0' """ diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py index 3d239f433f..6c01d6852d 100644 --- a/nipype/interfaces/fsl/maths.py +++ b/nipype/interfaces/fsl/maths.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -10,13 +11,12 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ - -from __future__ import division +from __future__ import print_function, division, unicode_literals, absolute_import import os import numpy as np from ..base import (TraitedSpec, File, traits, InputMultiPath, isdefined) -from ..fsl.base import FSLCommand, FSLCommandInputSpec +from .base import FSLCommand, FSLCommandInputSpec class MathsInput(FSLCommandInputSpec): @@ -317,7 +317,7 @@ class MultiImageMaths(MathsCommand): >>> maths.inputs.op_string = "-add %s -mul -1 -div %s" >>> maths.inputs.operand_files = ["functional2.nii", "functional3.nii"] >>> maths.inputs.out_file = "functional4.nii" - >>> maths.cmdline + >>> maths.cmdline # doctest: +IGNORE_UNICODE 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii' """ diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index a075cacda9..13d7764ebb 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -10,29 +11,24 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ - -from __future__ import print_function -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, open import os from glob import glob -import warnings from shutil import rmtree import numpy as np - from nibabel import load from ... import LooseVersion -from .base import (FSLCommand, FSLCommandInputSpec, Info) +from ...utils.filemanip import list_to_filename, filename_to_list +from ...utils.misc import human_order_sorted from ..base import (load_template, File, traits, isdefined, TraitedSpec, BaseInterface, Directory, InputMultiPath, OutputMultiPath, BaseInterfaceInputSpec) -from ...utils.filemanip import (list_to_filename, filename_to_list) -from ...utils.misc import human_order_sorted - -warn = warnings.warn +from .base import FSLCommand, FSLCommandInputSpec, Info class Level1DesignInputSpec(BaseInterfaceInputSpec): @@ -887,7 +883,7 @@ class FLAMEO(FSLCommand): t_con_file='design.con', \ mask_file='mask.nii', \ run_mode='fe') - >>> flameo.cmdline + >>> flameo.cmdline # doctest: +IGNORE_UNICODE 'flameo --copefile=cope.nii.gz --covsplitfile=cov_split.mat --designfile=design.mat --ld=stats --maskfile=mask.nii --runmode=fe --tcontrastsfile=design.con --varcopefile=varcope.nii.gz' """ @@ -1489,7 +1485,7 @@ class MELODIC(FSLCommand): >>> melodic_setup.inputs.s_des = 'subjectDesign.mat' >>> melodic_setup.inputs.s_con = 'subjectDesign.con' >>> melodic_setup.inputs.out_dir = 'groupICA.out' - >>> melodic_setup.cmdline + >>> melodic_setup.cmdline # doctest: +IGNORE_UNICODE 'melodic -i functional.nii,functional2.nii,functional3.nii -a tica --bgthreshold=10.000000 --mmthresh=0.500000 --nobet -o groupICA.out --Ostats --Scon=subjectDesign.con --Sdes=subjectDesign.mat --Tcon=timeDesign.con --Tdes=timeDesign.mat --tr=1.500000' >>> melodic_setup.run() # doctest: +SKIP @@ -1544,7 +1540,7 @@ class SmoothEstimate(FSLCommand): >>> est = SmoothEstimate() >>> est.inputs.zstat_file = 'zstat1.nii.gz' >>> est.inputs.mask_file = 'mask.nii' - >>> est.cmdline + >>> est.cmdline # doctest: +IGNORE_UNICODE 'smoothest --mask=mask.nii --zstat=zstat1.nii.gz' """ @@ -1647,7 +1643,7 @@ class Cluster(FSLCommand): >>> cl.inputs.in_file = 'zstat1.nii.gz' >>> cl.inputs.out_localmax_txt_file = 'stats.txt' >>> cl.inputs.use_mm = True - >>> cl.cmdline + >>> cl.cmdline # doctest: +IGNORE_UNICODE 'cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm' """ @@ -1780,7 +1776,7 @@ class Randomise(FSLCommand): ------- >>> import nipype.interfaces.fsl as fsl >>> rand = fsl.Randomise(in_file='allFA.nii', mask = 'mask.nii', tcon='design.con', design_mat='design.mat') - >>> rand.cmdline + >>> rand.cmdline # doctest: +IGNORE_UNICODE 'randomise -i allFA.nii -o "tbss_" -d design.mat -t design.con -m mask.nii' """ @@ -1917,7 +1913,7 @@ class GLM(FSLCommand): ------- >>> import nipype.interfaces.fsl as fsl >>> glm = fsl.GLM(in_file='functional.nii', design='maps.nii', output_type='NIFTI') - >>> glm.cmdline + >>> glm.cmdline # doctest: +IGNORE_UNICODE 'fsl_glm -i functional.nii -d maps.nii -o functional_glm.nii' """ diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index 64d796539b..d5fad15905 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -10,25 +11,21 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ - -from __future__ import print_function -from __future__ import division -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, open import os import os.path as op -import warnings +from warnings import warn import numpy as np from nibabel import load -from ..fsl.base import FSLCommand, FSLCommandInputSpec +from ...utils.filemanip import split_filename from ..base import (TraitedSpec, File, InputMultiPath, OutputMultiPath, Undefined, traits, - isdefined, OutputMultiPath) -from ...utils.filemanip import split_filename - -warn = warnings.warn + isdefined) +from .base import FSLCommand, FSLCommandInputSpec class BETInputSpec(FSLCommandInputSpec): @@ -536,7 +533,7 @@ class FLIRT(FSLCommand): >>> flt.inputs.in_file = 'structural.nii' >>> flt.inputs.reference = 'mni.nii' >>> flt.inputs.output_type = "NIFTI_GZ" - >>> flt.cmdline #doctest: +ELLIPSIS + >>> flt.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'flirt -in structural.nii -ref mni.nii -out structural_flirt.nii.gz -omat structural_flirt.mat -bins 640 -searchcost mutualinfo' >>> res = flt.run() #doctest: +SKIP @@ -1263,7 +1260,7 @@ class FUGUE(FSLCommand): >>> fugue.inputs.shift_in_file = 'vsm.nii' # Previously computed with fugue as well >>> fugue.inputs.unwarp_direction = 'y' >>> fugue.inputs.output_type = "NIFTI_GZ" - >>> fugue.cmdline #doctest: +ELLIPSIS + >>> fugue.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --unwarp=epi_unwarped.nii.gz' >>> fugue.run() #doctest: +SKIP @@ -1278,7 +1275,7 @@ class FUGUE(FSLCommand): >>> fugue.inputs.shift_in_file = 'vsm.nii' # Previously computed with fugue as well >>> fugue.inputs.unwarp_direction = 'y' >>> fugue.inputs.output_type = "NIFTI_GZ" - >>> fugue.cmdline #doctest: +ELLIPSIS + >>> fugue.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --warp=epi_warped.nii.gz' >>> fugue.run() #doctest: +SKIP @@ -1293,7 +1290,7 @@ class FUGUE(FSLCommand): >>> fugue.inputs.unwarp_direction = 'y' >>> fugue.inputs.save_shift = True >>> fugue.inputs.output_type = "NIFTI_GZ" - >>> fugue.cmdline #doctest: +ELLIPSIS + >>> fugue.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fugue --dwelltoasym=0.9390243902 --mask=epi_mask.nii --phasemap=epi_phasediff.nii --saveshift=epi_phasediff_vsm.nii.gz --unwarpdir=y' >>> fugue.run() #doctest: +SKIP diff --git a/nipype/interfaces/fsl/tests/__init__.py b/nipype/interfaces/fsl/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/fsl/tests/__init__.py +++ b/nipype/interfaces/fsl/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/fsl/tests/test_FILMGLS.py b/nipype/interfaces/fsl/tests/test_FILMGLS.py index 8fe231cf79..96c5dab3c9 100644 --- a/nipype/interfaces/fsl/tests/test_FILMGLS.py +++ b/nipype/interfaces/fsl/tests/test_FILMGLS.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from nipype.testing import assert_equal from nipype.interfaces.fsl.model import FILMGLS, FILMGLSInputSpec diff --git a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py index d9558e4684..886a4eaeea 100644 --- a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py +++ b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os from nose.tools import assert_true from ...base import Undefined diff --git a/nipype/interfaces/fsl/tests/test_XFibres.py b/nipype/interfaces/fsl/tests/test_XFibres.py index c38f642f86..7192d31092 100644 --- a/nipype/interfaces/fsl/tests/test_XFibres.py +++ b/nipype/interfaces/fsl/tests/test_XFibres.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from nipype.testing import assert_equal from nipype.interfaces.fsl.dti import XFibres diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py b/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py index f7a8f4983d..1f275d653d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py @@ -26,17 +26,17 @@ def test_ApplyTOPUP_inputs(): ), in_topup_fieldcoef=dict(argstr='--topup=%s', copyfile=False, - requires=['in_topup_movpar'], + requires=[u'in_topup_movpar'], ), in_topup_movpar=dict(copyfile=False, - requires=['in_topup_fieldcoef'], + requires=[u'in_topup_fieldcoef'], ), interp=dict(argstr='--interp=%s', ), method=dict(argstr='--method=%s', ), out_corrected=dict(argstr='--out=%s', - name_source=['in_files'], + name_source=[u'in_files'], name_template='%s_corrected', ), output_type=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py b/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py index 47e2703cb6..6e4d9b7460 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py @@ -5,7 +5,7 @@ def test_ApplyWarp_inputs(): input_map = dict(abswarp=dict(argstr='--abs', - xor=['relwarp'], + xor=[u'relwarp'], ), args=dict(argstr='%s', ), @@ -44,7 +44,7 @@ def test_ApplyWarp_inputs(): ), relwarp=dict(argstr='--rel', position=-1, - xor=['abswarp'], + xor=[u'abswarp'], ), superlevel=dict(argstr='--superlevel=%s', ), diff --git a/nipype/interfaces/fsl/tests/test_auto_ApplyXfm.py b/nipype/interfaces/fsl/tests/test_auto_ApplyXfm.py index 897d6478ed..818f77004a 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ApplyXfm.py +++ b/nipype/interfaces/fsl/tests/test_auto_ApplyXfm.py @@ -7,10 +7,10 @@ def test_ApplyXfm_inputs(): input_map = dict(angle_rep=dict(argstr='-anglerep %s', ), apply_isoxfm=dict(argstr='-applyisoxfm %f', - xor=['apply_xfm'], + xor=[u'apply_xfm'], ), apply_xfm=dict(argstr='-applyxfm', - requires=['in_matrix_file'], + requires=[u'in_matrix_file'], usedefault=True, ), args=dict(argstr='%s', @@ -81,19 +81,19 @@ def test_ApplyXfm_inputs(): ), out_file=dict(argstr='-out %s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_flirt', position=2, ), out_log=dict(keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_flirt.log', - requires=['save_log'], + requires=[u'save_log'], ), out_matrix_file=dict(argstr='-omat %s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_flirt.mat', position=3, ), diff --git a/nipype/interfaces/fsl/tests/test_auto_AvScale.py b/nipype/interfaces/fsl/tests/test_auto_AvScale.py index 8d728e63b9..c766d07ee0 100644 --- a/nipype/interfaces/fsl/tests/test_auto_AvScale.py +++ b/nipype/interfaces/fsl/tests/test_auto_AvScale.py @@ -17,7 +17,6 @@ def test_AvScale_inputs(): mat_file=dict(argstr='%s', position=-2, ), - output_type=dict(), ref_file=dict(argstr='%s', position=-1, ), diff --git a/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py b/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py index 9b02366022..ebad20e193 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py +++ b/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py @@ -5,7 +5,7 @@ def test_BEDPOSTX5_inputs(): input_map = dict(all_ard=dict(argstr='--allard', - xor=('no_ard', 'all_ard'), + xor=(u'no_ard', u'all_ard'), ), args=dict(argstr='%s', ), @@ -18,7 +18,7 @@ def test_BEDPOSTX5_inputs(): bvecs=dict(mandatory=True, ), cnlinear=dict(argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), dwi=dict(mandatory=True, ), @@ -26,10 +26,10 @@ def test_BEDPOSTX5_inputs(): usedefault=True, ), f0_ard=dict(argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], + xor=[u'f0_noard', u'f0_ard', u'all_ard'], ), f0_noard=dict(argstr='--f0', - xor=['f0_noard', 'f0_ard'], + xor=[u'f0_noard', u'f0_ard'], ), force_dir=dict(argstr='--forcedir', usedefault=True, @@ -55,13 +55,13 @@ def test_BEDPOSTX5_inputs(): n_jumps=dict(argstr='-j %d', ), no_ard=dict(argstr='--noard', - xor=('no_ard', 'all_ard'), + xor=(u'no_ard', u'all_ard'), ), no_spat=dict(argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), non_linear=dict(argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), out_dir=dict(argstr='%s', mandatory=True, diff --git a/nipype/interfaces/fsl/tests/test_auto_BET.py b/nipype/interfaces/fsl/tests/test_auto_BET.py index 9f91d76d2f..8c5bb1f672 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BET.py +++ b/nipype/interfaces/fsl/tests/test_auto_BET.py @@ -15,7 +15,7 @@ def test_BET_inputs(): frac=dict(argstr='-f %.2f', ), functional=dict(argstr='-F', - xor=('functional', 'reduce_bias', 'robust', 'padding', 'remove_eyes', 'surfaces', 't2_guided'), + xor=(u'functional', u'reduce_bias', u'robust', u'padding', u'remove_eyes', u'surfaces', u't2_guided'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -39,27 +39,27 @@ def test_BET_inputs(): ), output_type=dict(), padding=dict(argstr='-Z', - xor=('functional', 'reduce_bias', 'robust', 'padding', 'remove_eyes', 'surfaces', 't2_guided'), + xor=(u'functional', u'reduce_bias', u'robust', u'padding', u'remove_eyes', u'surfaces', u't2_guided'), ), radius=dict(argstr='-r %d', units='mm', ), reduce_bias=dict(argstr='-B', - xor=('functional', 'reduce_bias', 'robust', 'padding', 'remove_eyes', 'surfaces', 't2_guided'), + xor=(u'functional', u'reduce_bias', u'robust', u'padding', u'remove_eyes', u'surfaces', u't2_guided'), ), remove_eyes=dict(argstr='-S', - xor=('functional', 'reduce_bias', 'robust', 'padding', 'remove_eyes', 'surfaces', 't2_guided'), + xor=(u'functional', u'reduce_bias', u'robust', u'padding', u'remove_eyes', u'surfaces', u't2_guided'), ), robust=dict(argstr='-R', - xor=('functional', 'reduce_bias', 'robust', 'padding', 'remove_eyes', 'surfaces', 't2_guided'), + xor=(u'functional', u'reduce_bias', u'robust', u'padding', u'remove_eyes', u'surfaces', u't2_guided'), ), skull=dict(argstr='-s', ), surfaces=dict(argstr='-A', - xor=('functional', 'reduce_bias', 'robust', 'padding', 'remove_eyes', 'surfaces', 't2_guided'), + xor=(u'functional', u'reduce_bias', u'robust', u'padding', u'remove_eyes', u'surfaces', u't2_guided'), ), t2_guided=dict(argstr='-A2 %s', - xor=('functional', 'reduce_bias', 'robust', 'padding', 'remove_eyes', 'surfaces', 't2_guided'), + xor=(u'functional', u'reduce_bias', u'robust', u'padding', u'remove_eyes', u'surfaces', u't2_guided'), ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py b/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py index dfc8dcec09..5a7b643712 100644 --- a/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py +++ b/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py @@ -25,12 +25,12 @@ def test_BinaryMaths_inputs(): operand_file=dict(argstr='%s', mandatory=True, position=5, - xor=['operand_value'], + xor=[u'operand_value'], ), operand_value=dict(argstr='%.8f', mandatory=True, position=5, - xor=['operand_file'], + xor=[u'operand_file'], ), operation=dict(argstr='-%s', mandatory=True, diff --git a/nipype/interfaces/fsl/tests/test_auto_Cluster.py b/nipype/interfaces/fsl/tests/test_auto_Cluster.py index 7b460a5fd6..726391670d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Cluster.py +++ b/nipype/interfaces/fsl/tests/test_auto_Cluster.py @@ -15,8 +15,12 @@ def test_Cluster_inputs(): environ=dict(nohash=True, usedefault=True, ), - find_min=dict(), - fractional=dict(), + find_min=dict(argstr='--min', + usedefault=True, + ), + fractional=dict(argstr='--fractional', + usedefault=True, + ), ignore_exception=dict(nohash=True, usedefault=True, ), @@ -24,8 +28,11 @@ def test_Cluster_inputs(): mandatory=True, ), minclustersize=dict(argstr='--minclustersize', + usedefault=True, + ), + no_table=dict(argstr='--no_table', + usedefault=True, ), - no_table=dict(), num_maxima=dict(argstr='--num=%d', ), out_index_file=dict(argstr='--oindex=%s', @@ -56,7 +63,7 @@ def test_Cluster_inputs(): peak_distance=dict(argstr='--peakdist=%.10f', ), pthreshold=dict(argstr='--pthresh=%.10f', - requires=['dlh', 'volume'], + requires=[u'dlh', u'volume'], ), std_space_file=dict(argstr='--stdvol=%s', ), @@ -65,7 +72,9 @@ def test_Cluster_inputs(): threshold=dict(argstr='--thresh=%.10f', mandatory=True, ), - use_mm=dict(), + use_mm=dict(argstr='--mm', + usedefault=True, + ), volume=dict(argstr='--volume=%d', ), warpfield_file=dict(argstr='--warpvol=%s', diff --git a/nipype/interfaces/fsl/tests/test_auto_Complex.py b/nipype/interfaces/fsl/tests/test_auto_Complex.py index eae95be846..293386f57d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Complex.py +++ b/nipype/interfaces/fsl/tests/test_auto_Complex.py @@ -8,7 +8,7 @@ def test_Complex_inputs(): ), complex_cartesian=dict(argstr='-complex', position=1, - xor=['real_polar', 'real_cartesian', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'real_polar', u'real_cartesian', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), complex_in_file=dict(argstr='%s', position=2, @@ -18,20 +18,20 @@ def test_Complex_inputs(): ), complex_merge=dict(argstr='-complexmerge', position=1, - xor=['real_polar', 'real_cartesian', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge', 'start_vol', 'end_vol'], + xor=[u'real_polar', u'real_cartesian', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge', u'start_vol', u'end_vol'], ), complex_out_file=dict(argstr='%s', genfile=True, position=-3, - xor=['complex_out_file', 'magnitude_out_file', 'phase_out_file', 'real_out_file', 'imaginary_out_file', 'real_polar', 'real_cartesian'], + xor=[u'complex_out_file', u'magnitude_out_file', u'phase_out_file', u'real_out_file', u'imaginary_out_file', u'real_polar', u'real_cartesian'], ), complex_polar=dict(argstr='-complexpolar', position=1, - xor=['real_polar', 'real_cartesian', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'real_polar', u'real_cartesian', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), complex_split=dict(argstr='-complexsplit', position=1, - xor=['real_polar', 'real_cartesian', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'real_polar', u'real_cartesian', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), end_vol=dict(argstr='%d', position=-1, @@ -48,7 +48,7 @@ def test_Complex_inputs(): imaginary_out_file=dict(argstr='%s', genfile=True, position=-3, - xor=['complex_out_file', 'magnitude_out_file', 'phase_out_file', 'real_polar', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'complex_out_file', u'magnitude_out_file', u'phase_out_file', u'real_polar', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), magnitude_in_file=dict(argstr='%s', position=2, @@ -56,7 +56,7 @@ def test_Complex_inputs(): magnitude_out_file=dict(argstr='%s', genfile=True, position=-4, - xor=['complex_out_file', 'real_out_file', 'imaginary_out_file', 'real_cartesian', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'complex_out_file', u'real_out_file', u'imaginary_out_file', u'real_cartesian', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), output_type=dict(), phase_in_file=dict(argstr='%s', @@ -65,11 +65,11 @@ def test_Complex_inputs(): phase_out_file=dict(argstr='%s', genfile=True, position=-3, - xor=['complex_out_file', 'real_out_file', 'imaginary_out_file', 'real_cartesian', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'complex_out_file', u'real_out_file', u'imaginary_out_file', u'real_cartesian', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), real_cartesian=dict(argstr='-realcartesian', position=1, - xor=['real_polar', 'real_cartesian', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'real_polar', u'real_cartesian', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), real_in_file=dict(argstr='%s', position=2, @@ -77,11 +77,11 @@ def test_Complex_inputs(): real_out_file=dict(argstr='%s', genfile=True, position=-4, - xor=['complex_out_file', 'magnitude_out_file', 'phase_out_file', 'real_polar', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'complex_out_file', u'magnitude_out_file', u'phase_out_file', u'real_polar', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), real_polar=dict(argstr='-realpolar', position=1, - xor=['real_polar', 'real_cartesian', 'complex_cartesian', 'complex_polar', 'complex_split', 'complex_merge'], + xor=[u'real_polar', u'real_cartesian', u'complex_cartesian', u'complex_polar', u'complex_split', u'complex_merge'], ), start_vol=dict(argstr='%d', position=-2, diff --git a/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py b/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py index d140396548..63d64a4914 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py @@ -5,7 +5,7 @@ def test_ConvertWarp_inputs(): input_map = dict(abswarp=dict(argstr='--abs', - xor=['relwarp'], + xor=[u'relwarp'], ), args=dict(argstr='%s', ), @@ -24,16 +24,16 @@ def test_ConvertWarp_inputs(): midmat=dict(argstr='--midmat=%s', ), out_abswarp=dict(argstr='--absout', - xor=['out_relwarp'], + xor=[u'out_relwarp'], ), out_file=dict(argstr='--out=%s', - name_source=['reference'], + name_source=[u'reference'], name_template='%s_concatwarp', output_name='out_file', position=-1, ), out_relwarp=dict(argstr='--relout', - xor=['out_abswarp'], + xor=[u'out_abswarp'], ), output_type=dict(), postmat=dict(argstr='--postmat=%s', @@ -45,10 +45,10 @@ def test_ConvertWarp_inputs(): position=1, ), relwarp=dict(argstr='--rel', - xor=['abswarp'], + xor=[u'abswarp'], ), shift_direction=dict(argstr='--shiftdir=%s', - requires=['shift_in_file'], + requires=[u'shift_in_file'], ), shift_in_file=dict(argstr='--shiftmap=%s', ), diff --git a/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py b/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py index 21bfe5ff1c..250b6f0a9f 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py +++ b/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py @@ -8,16 +8,16 @@ def test_ConvertXFM_inputs(): ), concat_xfm=dict(argstr='-concat', position=-3, - requires=['in_file2'], - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], + requires=[u'in_file2'], + xor=[u'invert_xfm', u'concat_xfm', u'fix_scale_skew'], ), environ=dict(nohash=True, usedefault=True, ), fix_scale_skew=dict(argstr='-fixscaleskew', position=-3, - requires=['in_file2'], - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], + requires=[u'in_file2'], + xor=[u'invert_xfm', u'concat_xfm', u'fix_scale_skew'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -31,7 +31,7 @@ def test_ConvertXFM_inputs(): ), invert_xfm=dict(argstr='-inverse', position=-3, - xor=['invert_xfm', 'concat_xfm', 'fix_scale_skew'], + xor=[u'invert_xfm', u'concat_xfm', u'fix_scale_skew'], ), out_file=dict(argstr='-omat %s', genfile=True, diff --git a/nipype/interfaces/fsl/tests/test_auto_DilateImage.py b/nipype/interfaces/fsl/tests/test_auto_DilateImage.py index 7c0f3e9823..08db0833c9 100644 --- a/nipype/interfaces/fsl/tests/test_auto_DilateImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_DilateImage.py @@ -21,14 +21,14 @@ def test_DilateImage_inputs(): ), kernel_file=dict(argstr='%s', position=5, - xor=['kernel_size'], + xor=[u'kernel_size'], ), kernel_shape=dict(argstr='-kernel %s', position=4, ), kernel_size=dict(argstr='%.4f', position=5, - xor=['kernel_file'], + xor=[u'kernel_file'], ), nan2zeros=dict(argstr='-nan', position=3, diff --git a/nipype/interfaces/fsl/tests/test_auto_Eddy.py b/nipype/interfaces/fsl/tests/test_auto_Eddy.py index 07b17244c9..4581fce029 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Eddy.py +++ b/nipype/interfaces/fsl/tests/test_auto_Eddy.py @@ -35,9 +35,9 @@ def test_Eddy_inputs(): mandatory=True, ), in_topup_fieldcoef=dict(argstr='--topup=%s', - requires=['in_topup_movpar'], + requires=[u'in_topup_movpar'], ), - in_topup_movpar=dict(requires=['in_topup_fieldcoef'], + in_topup_movpar=dict(requires=[u'in_topup_fieldcoef'], ), method=dict(argstr='--resamp=%s', ), diff --git a/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py b/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py index b7f93f0b52..aab0b77983 100644 --- a/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py +++ b/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py @@ -17,7 +17,7 @@ def test_EddyCorrect_inputs(): position=0, ), out_file=dict(argstr='%s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_edc', output_name='eddy_corrected', position=1, diff --git a/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py b/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py index 3981afc1a5..a4649ada75 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py +++ b/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py @@ -21,14 +21,14 @@ def test_ErodeImage_inputs(): ), kernel_file=dict(argstr='%s', position=5, - xor=['kernel_size'], + xor=[u'kernel_size'], ), kernel_shape=dict(argstr='-kernel %s', position=4, ), kernel_size=dict(argstr='%.4f', position=5, - xor=['kernel_file'], + xor=[u'kernel_file'], ), minimum_filter=dict(argstr='%s', position=6, diff --git a/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py b/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py index 4368a41256..7d0a407c17 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py +++ b/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py @@ -8,7 +8,7 @@ def test_ExtractROI_inputs(): ), crop_list=dict(argstr='%s', position=2, - xor=['x_min', 'x_size', 'y_min', 'y_size', 'z_min', 'z_size', 't_min', 't_size'], + xor=[u'x_min', u'x_size', u'y_min', u'y_size', u'z_min', u'z_size', u't_min', u't_size'], ), environ=dict(nohash=True, usedefault=True, diff --git a/nipype/interfaces/fsl/tests/test_auto_FIRST.py b/nipype/interfaces/fsl/tests/test_auto_FIRST.py index 876f89f5b6..344c0181f2 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FIRST.py +++ b/nipype/interfaces/fsl/tests/test_auto_FIRST.py @@ -30,7 +30,7 @@ def test_FIRST_inputs(): method=dict(argstr='-m %s', position=4, usedefault=True, - xor=['method_as_numerical_threshold'], + xor=[u'method_as_numerical_threshold'], ), method_as_numerical_threshold=dict(argstr='-m %.4f', position=4, diff --git a/nipype/interfaces/fsl/tests/test_auto_FLIRT.py b/nipype/interfaces/fsl/tests/test_auto_FLIRT.py index 8bba532da8..3da1dff886 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FLIRT.py +++ b/nipype/interfaces/fsl/tests/test_auto_FLIRT.py @@ -7,10 +7,10 @@ def test_FLIRT_inputs(): input_map = dict(angle_rep=dict(argstr='-anglerep %s', ), apply_isoxfm=dict(argstr='-applyisoxfm %f', - xor=['apply_xfm'], + xor=[u'apply_xfm'], ), apply_xfm=dict(argstr='-applyxfm', - requires=['in_matrix_file'], + requires=[u'in_matrix_file'], ), args=dict(argstr='%s', ), @@ -80,19 +80,19 @@ def test_FLIRT_inputs(): ), out_file=dict(argstr='-out %s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_flirt', position=2, ), out_log=dict(keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_flirt.log', - requires=['save_log'], + requires=[u'save_log'], ), out_matrix_file=dict(argstr='-omat %s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_flirt.mat', position=3, ), diff --git a/nipype/interfaces/fsl/tests/test_auto_FNIRT.py b/nipype/interfaces/fsl/tests/test_auto_FNIRT.py index f37e3b7eb2..316880f4c4 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FNIRT.py +++ b/nipype/interfaces/fsl/tests/test_auto_FNIRT.py @@ -8,15 +8,15 @@ def test_FNIRT_inputs(): ), apply_inmask=dict(argstr='--applyinmask=%s', sep=',', - xor=['skip_inmask'], + xor=[u'skip_inmask'], ), apply_intensity_mapping=dict(argstr='--estint=%s', sep=',', - xor=['skip_intensity_mapping'], + xor=[u'skip_intensity_mapping'], ), apply_refmask=dict(argstr='--applyrefmask=%s', sep=',', - xor=['skip_refmask'], + xor=[u'skip_refmask'], ), args=dict(argstr='%s', ), @@ -98,15 +98,15 @@ def test_FNIRT_inputs(): skip_implicit_ref_masking=dict(argstr='--imprefm=0', ), skip_inmask=dict(argstr='--applyinmask=0', - xor=['apply_inmask'], + xor=[u'apply_inmask'], ), skip_intensity_mapping=dict(argstr='--estint=0', - xor=['apply_intensity_mapping'], + xor=[u'apply_intensity_mapping'], ), skip_lambda_ssq=dict(argstr='--ssqlambda=0', ), skip_refmask=dict(argstr='--applyrefmask=0', - xor=['apply_refmask'], + xor=[u'apply_refmask'], ), spline_order=dict(argstr='--splineorder=%d', ), diff --git a/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py b/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py index 57b06760d5..8a472a31f0 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py +++ b/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py @@ -5,7 +5,7 @@ def test_FSLXCommand_inputs(): input_map = dict(all_ard=dict(argstr='--allard', - xor=('no_ard', 'all_ard'), + xor=(u'no_ard', u'all_ard'), ), args=dict(argstr='%s', ), @@ -20,7 +20,7 @@ def test_FSLXCommand_inputs(): mandatory=True, ), cnlinear=dict(argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), dwi=dict(argstr='--data=%s', mandatory=True, @@ -29,10 +29,10 @@ def test_FSLXCommand_inputs(): usedefault=True, ), f0_ard=dict(argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], + xor=[u'f0_noard', u'f0_ard', u'all_ard'], ), f0_noard=dict(argstr='--f0', - xor=['f0_noard', 'f0_ard'], + xor=[u'f0_noard', u'f0_ard'], ), force_dir=dict(argstr='--forcedir', usedefault=True, @@ -57,13 +57,13 @@ def test_FSLXCommand_inputs(): n_jumps=dict(argstr='--njumps=%d', ), no_ard=dict(argstr='--noard', - xor=('no_ard', 'all_ard'), + xor=(u'no_ard', u'all_ard'), ), no_spat=dict(argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), non_linear=dict(argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), output_type=dict(), rician=dict(argstr='--rician', diff --git a/nipype/interfaces/fsl/tests/test_auto_FUGUE.py b/nipype/interfaces/fsl/tests/test_auto_FUGUE.py index 84de7126df..d9f1ef965c 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FUGUE.py +++ b/nipype/interfaces/fsl/tests/test_auto_FUGUE.py @@ -28,10 +28,10 @@ def test_FUGUE_inputs(): fourier_order=dict(argstr='--fourier=%d', ), icorr=dict(argstr='--icorr', - requires=['shift_in_file'], + requires=[u'shift_in_file'], ), icorr_only=dict(argstr='--icorronly', - requires=['unwarped_file'], + requires=[u'unwarped_file'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -57,15 +57,15 @@ def test_FUGUE_inputs(): ), poly_order=dict(argstr='--poly=%d', ), - save_fmap=dict(xor=['save_unmasked_fmap'], + save_fmap=dict(xor=[u'save_unmasked_fmap'], ), - save_shift=dict(xor=['save_unmasked_shift'], + save_shift=dict(xor=[u'save_unmasked_shift'], ), save_unmasked_fmap=dict(argstr='--unmaskfmap', - xor=['save_fmap'], + xor=[u'save_fmap'], ), save_unmasked_shift=dict(argstr='--unmaskshift', - xor=['save_shift'], + xor=[u'save_shift'], ), shift_in_file=dict(argstr='--loadshift=%s', ), @@ -80,12 +80,12 @@ def test_FUGUE_inputs(): unwarp_direction=dict(argstr='--unwarpdir=%s', ), unwarped_file=dict(argstr='--unwarp=%s', - requires=['in_file'], - xor=['warped_file'], + requires=[u'in_file'], + xor=[u'warped_file'], ), warped_file=dict(argstr='--warp=%s', - requires=['in_file'], - xor=['unwarped_file'], + requires=[u'in_file'], + xor=[u'unwarped_file'], ), ) inputs = FUGUE.input_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py b/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py index 2904b70798..664757a425 100644 --- a/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py +++ b/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py @@ -16,12 +16,12 @@ def test_FilterRegressor_inputs(): filter_all=dict(argstr="-f '%s'", mandatory=True, position=4, - xor=['filter_columns'], + xor=[u'filter_columns'], ), filter_columns=dict(argstr="-f '%s'", mandatory=True, position=4, - xor=['filter_all'], + xor=[u'filter_all'], ), ignore_exception=dict(nohash=True, usedefault=True, diff --git a/nipype/interfaces/fsl/tests/test_auto_InvWarp.py b/nipype/interfaces/fsl/tests/test_auto_InvWarp.py index ad367bf904..e719ec52bd 100644 --- a/nipype/interfaces/fsl/tests/test_auto_InvWarp.py +++ b/nipype/interfaces/fsl/tests/test_auto_InvWarp.py @@ -5,7 +5,7 @@ def test_InvWarp_inputs(): input_map = dict(absolute=dict(argstr='--abs', - xor=['relative'], + xor=[u'relative'], ), args=dict(argstr='%s', ), @@ -17,7 +17,7 @@ def test_InvWarp_inputs(): ), inverse_warp=dict(argstr='--out=%s', hash_files=False, - name_source=['warp'], + name_source=[u'warp'], name_template='%s_inverse', ), jacobian_max=dict(argstr='--jmax=%f', @@ -35,7 +35,7 @@ def test_InvWarp_inputs(): regularise=dict(argstr='--regularise=%f', ), relative=dict(argstr='--rel', - xor=['absolute'], + xor=[u'absolute'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py b/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py index 2d1023d674..ccff1d564a 100644 --- a/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py +++ b/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py @@ -12,7 +12,7 @@ def test_IsotropicSmooth_inputs(): fwhm=dict(argstr='-s %.5f', mandatory=True, position=4, - xor=['sigma'], + xor=[u'sigma'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -39,7 +39,7 @@ def test_IsotropicSmooth_inputs(): sigma=dict(argstr='-s %.5f', mandatory=True, position=4, - xor=['fwhm'], + xor=[u'fwhm'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/fsl/tests/test_auto_Overlay.py b/nipype/interfaces/fsl/tests/test_auto_Overlay.py index 14257803be..568eaf9458 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Overlay.py +++ b/nipype/interfaces/fsl/tests/test_auto_Overlay.py @@ -9,7 +9,7 @@ def test_Overlay_inputs(): auto_thresh_bg=dict(argstr='-a', mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), + xor=(u'auto_thresh_bg', u'full_bg_range', u'bg_thresh'), ), background_image=dict(argstr='%s', mandatory=True, @@ -18,7 +18,7 @@ def test_Overlay_inputs(): bg_thresh=dict(argstr='%.3f %.3f', mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), + xor=(u'auto_thresh_bg', u'full_bg_range', u'bg_thresh'), ), environ=dict(nohash=True, usedefault=True, @@ -26,7 +26,7 @@ def test_Overlay_inputs(): full_bg_range=dict(argstr='-A', mandatory=True, position=5, - xor=('auto_thresh_bg', 'full_bg_range', 'bg_thresh'), + xor=(u'auto_thresh_bg', u'full_bg_range', u'bg_thresh'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -43,7 +43,7 @@ def test_Overlay_inputs(): output_type=dict(), show_negative_stats=dict(argstr='%s', position=8, - xor=['stat_image2'], + xor=[u'stat_image2'], ), stat_image=dict(argstr='%s', mandatory=True, @@ -51,7 +51,7 @@ def test_Overlay_inputs(): ), stat_image2=dict(argstr='%s', position=9, - xor=['show_negative_stats'], + xor=[u'show_negative_stats'], ), stat_thresh=dict(argstr='%.2f %.2f', mandatory=True, diff --git a/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py b/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py index 434322da60..cbe934adb9 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py +++ b/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py @@ -8,7 +8,7 @@ def test_PRELUDE_inputs(): ), complex_phase_file=dict(argstr='--complex=%s', mandatory=True, - xor=['magnitude_file', 'phase_file'], + xor=[u'magnitude_file', u'phase_file'], ), end=dict(argstr='--end=%d', ), @@ -25,7 +25,7 @@ def test_PRELUDE_inputs(): ), magnitude_file=dict(argstr='--abs=%s', mandatory=True, - xor=['complex_phase_file'], + xor=[u'complex_phase_file'], ), mask_file=dict(argstr='--mask=%s', ), @@ -34,13 +34,13 @@ def test_PRELUDE_inputs(): output_type=dict(), phase_file=dict(argstr='--phase=%s', mandatory=True, - xor=['complex_phase_file'], + xor=[u'complex_phase_file'], ), process2d=dict(argstr='--slices', - xor=['labelprocess2d'], + xor=[u'labelprocess2d'], ), process3d=dict(argstr='--force3D', - xor=['labelprocess2d', 'process2d'], + xor=[u'labelprocess2d', u'process2d'], ), rawphase_file=dict(argstr='--rawphase=%s', hash_files=False, diff --git a/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py b/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py index e8c28c68de..3eb196cbda 100644 --- a/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py +++ b/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py @@ -26,15 +26,15 @@ def test_PlotTimeSeries_inputs(): ), output_type=dict(), plot_finish=dict(argstr='--finish=%d', - xor=('plot_range',), + xor=(u'plot_range',), ), plot_range=dict(argstr='%s', - xor=('plot_start', 'plot_finish'), + xor=(u'plot_start', u'plot_finish'), ), plot_size=dict(argstr='%s', ), plot_start=dict(argstr='--start=%d', - xor=('plot_range',), + xor=(u'plot_range',), ), sci_notation=dict(argstr='--sci', ), @@ -48,13 +48,13 @@ def test_PlotTimeSeries_inputs(): usedefault=True, ), y_max=dict(argstr='--ymax=%.2f', - xor=('y_range',), + xor=(u'y_range',), ), y_min=dict(argstr='--ymin=%.2f', - xor=('y_range',), + xor=(u'y_range',), ), y_range=dict(argstr='%s', - xor=('y_min', 'y_max'), + xor=(u'y_min', u'y_max'), ), ) inputs = PlotTimeSeries.input_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py index df69f76670..c507ab0223 100644 --- a/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py +++ b/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py @@ -58,10 +58,10 @@ def test_ProbTrackX2_inputs(): omatrix1=dict(argstr='--omatrix1', ), omatrix2=dict(argstr='--omatrix2', - requires=['target2'], + requires=[u'target2'], ), omatrix3=dict(argstr='--omatrix3', - requires=['target3', 'lrtarget3'], + requires=[u'target3', u'lrtarget3'], ), omatrix4=dict(argstr='--omatrix4', ), diff --git a/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py b/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py index d28c8845dd..114a6dad32 100644 --- a/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py +++ b/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py @@ -18,7 +18,7 @@ def test_RobustFOV_inputs(): ), out_roi=dict(argstr='-r %s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_ROI', ), output_type=dict(), diff --git a/nipype/interfaces/fsl/tests/test_auto_Slicer.py b/nipype/interfaces/fsl/tests/test_auto_Slicer.py index edcaafaa30..d8801a102d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Slicer.py +++ b/nipype/interfaces/fsl/tests/test_auto_Slicer.py @@ -6,8 +6,8 @@ def test_Slicer_inputs(): input_map = dict(all_axial=dict(argstr='-A', position=10, - requires=['image_width'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), + requires=[u'image_width'], + xor=(u'single_slice', u'middle_slices', u'all_axial', u'sample_axial'), ), args=dict(argstr='%s', ), @@ -42,7 +42,7 @@ def test_Slicer_inputs(): ), middle_slices=dict(argstr='-a', position=10, - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), + xor=(u'single_slice', u'middle_slices', u'all_axial', u'sample_axial'), ), nearest_neighbour=dict(argstr='-n', position=8, @@ -55,8 +55,8 @@ def test_Slicer_inputs(): output_type=dict(), sample_axial=dict(argstr='-S %d', position=10, - requires=['image_width'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), + requires=[u'image_width'], + xor=(u'single_slice', u'middle_slices', u'all_axial', u'sample_axial'), ), scaling=dict(argstr='-s %f', position=0, @@ -67,8 +67,8 @@ def test_Slicer_inputs(): ), single_slice=dict(argstr='-%s', position=10, - requires=['slice_number'], - xor=('single_slice', 'middle_slices', 'all_axial', 'sample_axial'), + requires=[u'slice_number'], + xor=(u'single_slice', u'middle_slices', u'all_axial', u'sample_axial'), ), slice_number=dict(argstr='-%d', position=11, diff --git a/nipype/interfaces/fsl/tests/test_auto_Smooth.py b/nipype/interfaces/fsl/tests/test_auto_Smooth.py index f1cebc39d7..69d6d3ebc4 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Smooth.py +++ b/nipype/interfaces/fsl/tests/test_auto_Smooth.py @@ -12,7 +12,7 @@ def test_Smooth_inputs(): fwhm=dict(argstr='-kernel gauss %.03f -fmean', mandatory=True, position=1, - xor=['sigma'], + xor=[u'sigma'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -25,11 +25,11 @@ def test_Smooth_inputs(): sigma=dict(argstr='-kernel gauss %.03f -fmean', mandatory=True, position=1, - xor=['fwhm'], + xor=[u'fwhm'], ), smoothed_file=dict(argstr='%s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_smooth', position=2, ), diff --git a/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py b/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py index 5c3f8c46b0..f9d6bae588 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py +++ b/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py @@ -8,7 +8,7 @@ def test_SmoothEstimate_inputs(): ), dof=dict(argstr='--dof=%d', mandatory=True, - xor=['zstat_file'], + xor=[u'zstat_file'], ), environ=dict(nohash=True, usedefault=True, @@ -21,12 +21,12 @@ def test_SmoothEstimate_inputs(): ), output_type=dict(), residual_fit_file=dict(argstr='--res=%s', - requires=['dof'], + requires=[u'dof'], ), terminal_output=dict(nohash=True, ), zstat_file=dict(argstr='--zstat=%s', - xor=['dof'], + xor=[u'dof'], ), ) inputs = SmoothEstimate.input_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py b/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py index ab605fed0b..dc32faef23 100644 --- a/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py +++ b/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py @@ -21,14 +21,14 @@ def test_SpatialFilter_inputs(): ), kernel_file=dict(argstr='%s', position=5, - xor=['kernel_size'], + xor=[u'kernel_size'], ), kernel_shape=dict(argstr='-kernel %s', position=4, ), kernel_size=dict(argstr='%.4f', position=5, - xor=['kernel_file'], + xor=[u'kernel_file'], ), nan2zeros=dict(argstr='-nan', position=3, diff --git a/nipype/interfaces/fsl/tests/test_auto_TOPUP.py b/nipype/interfaces/fsl/tests/test_auto_TOPUP.py index 3e097b26ab..b064a7e951 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TOPUP.py +++ b/nipype/interfaces/fsl/tests/test_auto_TOPUP.py @@ -11,12 +11,12 @@ def test_TOPUP_inputs(): ), encoding_direction=dict(argstr='--datain=%s', mandatory=True, - requires=['readout_times'], - xor=['encoding_file'], + requires=[u'readout_times'], + xor=[u'encoding_file'], ), encoding_file=dict(argstr='--datain=%s', mandatory=True, - xor=['encoding_direction'], + xor=[u'encoding_direction'], ), environ=dict(nohash=True, usedefault=True, @@ -41,29 +41,29 @@ def test_TOPUP_inputs(): ), out_base=dict(argstr='--out=%s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_base', ), out_corrected=dict(argstr='--iout=%s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_corrected', ), out_field=dict(argstr='--fout=%s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_field', ), out_logfile=dict(argstr='--logout=%s', hash_files=False, keep_extension=True, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_topup.log', ), output_type=dict(), readout_times=dict(mandatory=True, - requires=['encoding_direction'], - xor=['encoding_file'], + requires=[u'encoding_direction'], + xor=[u'encoding_file'], ), reg_lambda=dict(argstr='--miter=%0.f', ), diff --git a/nipype/interfaces/fsl/tests/test_auto_Threshold.py b/nipype/interfaces/fsl/tests/test_auto_Threshold.py index dfaa3594bb..ca42e915d7 100644 --- a/nipype/interfaces/fsl/tests/test_auto_Threshold.py +++ b/nipype/interfaces/fsl/tests/test_auto_Threshold.py @@ -39,7 +39,7 @@ def test_Threshold_inputs(): mandatory=True, position=4, ), - use_nonzero_voxels=dict(requires=['use_robust_range'], + use_nonzero_voxels=dict(requires=[u'use_robust_range'], ), use_robust_range=dict(), ) diff --git a/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py b/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py index 3808504b9d..9f085d0065 100644 --- a/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py +++ b/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py @@ -23,10 +23,10 @@ def test_TractSkeleton_inputs(): ), output_type=dict(), project_data=dict(argstr='-p %.3f %s %s %s %s', - requires=['threshold', 'distance_map', 'data_file'], + requires=[u'threshold', u'distance_map', u'data_file'], ), projected_data=dict(), - search_mask_file=dict(xor=['use_cingulum_mask'], + search_mask_file=dict(xor=[u'use_cingulum_mask'], ), skeleton_file=dict(argstr='-o %s', ), @@ -34,7 +34,7 @@ def test_TractSkeleton_inputs(): ), threshold=dict(), use_cingulum_mask=dict(usedefault=True, - xor=['search_mask_file'], + xor=[u'search_mask_file'], ), ) inputs = TractSkeleton.input_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py b/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py index 984b3f77a1..4731986dfa 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py @@ -7,10 +7,10 @@ def test_WarpPoints_inputs(): input_map = dict(args=dict(argstr='%s', ), coord_mm=dict(argstr='-mm', - xor=['coord_vox'], + xor=[u'coord_vox'], ), coord_vox=dict(argstr='-vox', - xor=['coord_mm'], + xor=[u'coord_mm'], ), dest_file=dict(argstr='-dest %s', mandatory=True, @@ -35,10 +35,10 @@ def test_WarpPoints_inputs(): terminal_output=dict(nohash=True, ), warp_file=dict(argstr='-warp %s', - xor=['xfm_file'], + xor=[u'xfm_file'], ), xfm_file=dict(argstr='-xfm %s', - xor=['warp_file'], + xor=[u'warp_file'], ), ) inputs = WarpPoints.input_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py b/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py index f6ecd09f2e..ce27ac22ce 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py @@ -7,10 +7,10 @@ def test_WarpPointsToStd_inputs(): input_map = dict(args=dict(argstr='%s', ), coord_mm=dict(argstr='-mm', - xor=['coord_vox'], + xor=[u'coord_vox'], ), coord_vox=dict(argstr='-vox', - xor=['coord_mm'], + xor=[u'coord_mm'], ), environ=dict(nohash=True, usedefault=True, @@ -37,10 +37,10 @@ def test_WarpPointsToStd_inputs(): terminal_output=dict(nohash=True, ), warp_file=dict(argstr='-warp %s', - xor=['xfm_file'], + xor=[u'xfm_file'], ), xfm_file=dict(argstr='-xfm %s', - xor=['warp_file'], + xor=[u'warp_file'], ), ) inputs = WarpPointsToStd.input_spec() diff --git a/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py b/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py index 065e2b455b..7f8683b883 100644 --- a/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py +++ b/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py @@ -18,7 +18,7 @@ def test_WarpUtils_inputs(): knot_space=dict(argstr='--knotspace=%d,%d,%d', ), out_file=dict(argstr='--out=%s', - name_source=['in_file'], + name_source=[u'in_file'], output_name='out_file', position=-1, ), diff --git a/nipype/interfaces/fsl/tests/test_auto_XFibres5.py b/nipype/interfaces/fsl/tests/test_auto_XFibres5.py index f877d894e5..360e08061d 100644 --- a/nipype/interfaces/fsl/tests/test_auto_XFibres5.py +++ b/nipype/interfaces/fsl/tests/test_auto_XFibres5.py @@ -5,7 +5,7 @@ def test_XFibres5_inputs(): input_map = dict(all_ard=dict(argstr='--allard', - xor=('no_ard', 'all_ard'), + xor=(u'no_ard', u'all_ard'), ), args=dict(argstr='%s', ), @@ -20,7 +20,7 @@ def test_XFibres5_inputs(): mandatory=True, ), cnlinear=dict(argstr='--cnonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), dwi=dict(argstr='--data=%s', mandatory=True, @@ -29,10 +29,10 @@ def test_XFibres5_inputs(): usedefault=True, ), f0_ard=dict(argstr='--f0 --ardf0', - xor=['f0_noard', 'f0_ard', 'all_ard'], + xor=[u'f0_noard', u'f0_ard', u'all_ard'], ), f0_noard=dict(argstr='--f0', - xor=['f0_noard', 'f0_ard'], + xor=[u'f0_noard', u'f0_ard'], ), force_dir=dict(argstr='--forcedir', usedefault=True, @@ -59,13 +59,13 @@ def test_XFibres5_inputs(): n_jumps=dict(argstr='--njumps=%d', ), no_ard=dict(argstr='--noard', - xor=('no_ard', 'all_ard'), + xor=(u'no_ard', u'all_ard'), ), no_spat=dict(argstr='--nospat', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), non_linear=dict(argstr='--nonlinear', - xor=('no_spat', 'non_linear', 'cnlinear'), + xor=(u'no_spat', u'non_linear', u'cnlinear'), ), output_type=dict(), rician=dict(argstr='--rician', diff --git a/nipype/interfaces/fsl/tests/test_base.py b/nipype/interfaces/fsl/tests/test_base.py index a8ec012905..572b0297f8 100644 --- a/nipype/interfaces/fsl/tests/test_base.py +++ b/nipype/interfaces/fsl/tests/test_base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py index 414d79421b..00287ddd24 100644 --- a/nipype/interfaces/fsl/tests/test_dti.py +++ b/nipype/interfaces/fsl/tests/test_dti.py @@ -1,5 +1,9 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from builtins import open, range + import os import tempfile import shutil diff --git a/nipype/interfaces/fsl/tests/test_epi.py b/nipype/interfaces/fsl/tests/test_epi.py index 4f2b0ed2c3..32ab1b442e 100644 --- a/nipype/interfaces/fsl/tests/test_epi.py +++ b/nipype/interfaces/fsl/tests/test_epi.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/fsl/tests/test_maths.py b/nipype/interfaces/fsl/tests/test_maths.py index 74189e2c10..d1affb8182 100644 --- a/nipype/interfaces/fsl/tests/test_maths.py +++ b/nipype/interfaces/fsl/tests/test_maths.py @@ -1,6 +1,9 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from __future__ import division +from __future__ import unicode_literals +from builtins import open import os from tempfile import mkdtemp diff --git a/nipype/interfaces/fsl/tests/test_model.py b/nipype/interfaces/fsl/tests/test_model.py index c40814139c..b5a2ccda26 100644 --- a/nipype/interfaces/fsl/tests/test_model.py +++ b/nipype/interfaces/fsl/tests/test_model.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from builtins import open diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index b52633b265..32f02642e5 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -1,6 +1,9 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from builtins import str # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from builtins import open +from builtins import open, open import os import tempfile diff --git a/nipype/interfaces/fsl/tests/test_utils.py b/nipype/interfaces/fsl/tests/test_utils.py index b55099c1a9..6a99ce480c 100644 --- a/nipype/interfaces/fsl/tests/test_utils.py +++ b/nipype/interfaces/fsl/tests/test_utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 6dfbbfb327..40f94a1c0f 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -15,27 +16,22 @@ datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) os.chdir(datadir) """ - -from __future__ import division -from builtins import map -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import map, range import os import os.path as op import re from glob import glob -import warnings import tempfile import numpy as np -from .base import FSLCommand, FSLCommandInputSpec, Info +from ...utils.filemanip import (load_json, save_json, split_filename, + fname_presuffix) from ..base import (traits, TraitedSpec, OutputMultiPath, File, CommandLine, CommandLineInputSpec, isdefined) -from ...utils.filemanip import (load_json, save_json, split_filename, - fname_presuffix, copyfile) - -warn = warnings.warn +from .base import FSLCommand, FSLCommandInputSpec, Info class CopyGeomInputSpec(FSLCommandInputSpec): @@ -169,7 +165,7 @@ class Smooth(FSLCommand): >>> sm.inputs.output_type = 'NIFTI_GZ' >>> sm.inputs.in_file = 'functional2.nii' >>> sm.inputs.sigma = 8.0 - >>> sm.cmdline #doctest: +ELLIPSIS + >>> sm.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fslmaths functional2.nii -kernel gauss 8.000 -fmean functional2_smooth.nii.gz' Setting the kernel width using fwhm: @@ -178,7 +174,7 @@ class Smooth(FSLCommand): >>> sm.inputs.output_type = 'NIFTI_GZ' >>> sm.inputs.in_file = 'functional2.nii' >>> sm.inputs.fwhm = 8.0 - >>> sm.cmdline #doctest: +ELLIPSIS + >>> sm.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fslmaths functional2.nii -kernel gauss 3.397 -fmean functional2_smooth.nii.gz' One of sigma or fwhm must be set: @@ -241,10 +237,10 @@ class Merge(FSLCommand): >>> merger.inputs.in_files = ['functional2.nii', 'functional3.nii'] >>> merger.inputs.dimension = 't' >>> merger.inputs.output_type = 'NIFTI_GZ' - >>> merger.cmdline + >>> merger.cmdline # doctest: +IGNORE_UNICODE 'fslmerge -t functional2_merged.nii.gz functional2.nii functional3.nii' >>> merger.inputs.tr = 2.25 - >>> merger.cmdline + >>> merger.cmdline # doctest: +IGNORE_UNICODE 'fslmerge -tr functional2_merged.nii.gz functional2.nii functional3.nii 2.25' @@ -1155,7 +1151,7 @@ class ConvertXFM(FSLCommand): >>> invt.inputs.in_file = "flirt.mat" >>> invt.inputs.invert_xfm = True >>> invt.inputs.out_file = 'flirt_inv.mat' - >>> invt.cmdline + >>> invt.cmdline # doctest: +IGNORE_UNICODE 'convert_xfm -omat flirt_inv.mat -inverse flirt.mat' @@ -1459,7 +1455,7 @@ class InvWarp(FSLCommand): >>> invwarp.inputs.warp = "struct2mni.nii" >>> invwarp.inputs.reference = "anatomical.nii" >>> invwarp.inputs.output_type = "NIFTI_GZ" - >>> invwarp.cmdline + >>> invwarp.cmdline # doctest: +IGNORE_UNICODE 'invwarp --out=struct2mni_inverse.nii.gz --ref=anatomical.nii --warp=struct2mni.nii' >>> res = invwarp.run() # doctest: +SKIP @@ -1682,7 +1678,7 @@ class WarpUtils(FSLCommand): >>> warputils.inputs.out_format = 'spline' >>> warputils.inputs.warp_resolution = (10,10,10) >>> warputils.inputs.output_type = "NIFTI_GZ" - >>> warputils.cmdline # doctest: +ELLIPSIS + >>> warputils.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fnirtfileutils --in=warpfield.nii --outformat=spline --ref=T1.nii --warpres=10.0000,10.0000,10.0000 --out=warpfield_coeffs.nii.gz' >>> res = invwarp.run() # doctest: +SKIP @@ -1813,7 +1809,7 @@ class ConvertWarp(FSLCommand): >>> warputils.inputs.reference = "T1.nii" >>> warputils.inputs.relwarp = True >>> warputils.inputs.output_type = "NIFTI_GZ" - >>> warputils.cmdline # doctest: +ELLIPSIS + >>> warputils.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'convertwarp --ref=T1.nii --rel --warp1=warpfield.nii --out=T1_concatwarp.nii.gz' >>> res = warputils.run() # doctest: +SKIP @@ -1871,7 +1867,7 @@ class WarpPoints(CommandLine): >>> warppoints.inputs.dest_file = 'T1.nii' >>> warppoints.inputs.warp_file = 'warpfield.nii' >>> warppoints.inputs.coord_mm = True - >>> warppoints.cmdline # doctest: +ELLIPSIS + >>> warppoints.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'img2imgcoord -mm -dest T1.nii -src epi.nii -warp warpfield.nii surf.txt' >>> res = warppoints.run() # doctest: +SKIP @@ -2029,7 +2025,7 @@ class WarpPointsToStd(WarpPoints): >>> warppoints.inputs.std_file = 'mni.nii' >>> warppoints.inputs.warp_file = 'warpfield.nii' >>> warppoints.inputs.coord_mm = True - >>> warppoints.cmdline # doctest: +ELLIPSIS + >>> warppoints.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'img2stdcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' >>> res = warppoints.run() # doctest: +SKIP @@ -2079,7 +2075,7 @@ class MotionOutliers(FSLCommand): >>> from nipype.interfaces.fsl import MotionOutliers >>> mo = MotionOutliers() >>> mo.inputs.in_file = "epi.nii" - >>> mo.cmdline # doctest: +ELLIPSIS + >>> mo.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fsl_motion_outliers -i epi.nii -o epi_outliers.txt -p epi_metrics.png -s epi_metrics.txt' >>> res = mo.run() # doctest: +SKIP """ diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index cf061ff84d..6986a519cd 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Set of interfaces that allow interaction with data. Currently @@ -17,10 +18,8 @@ >>> os.chdir(datadir) """ - -from builtins import zip -from builtins import filter -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import object, zip, filter, range, open, str import glob import fnmatch @@ -35,18 +34,12 @@ import sqlite3 -from .base import (TraitedSpec, traits, File, Directory, - BaseInterface, InputMultiPath, isdefined, - OutputMultiPath, DynamicTraitedSpec, - Undefined, BaseInterfaceInputSpec) -from .. import config -from ..external.six import string_types -from ..utils.filemanip import (copyfile, list_to_filename, - filename_to_list) -from ..utils.misc import human_order_sorted -from ..utils.misc import str2bool -from .. import logging -iflogger = logging.getLogger('interface') +from .. import config, logging +from ..utils.filemanip import copyfile, list_to_filename, filename_to_list +from ..utils.misc import human_order_sorted, str2bool +from .base import ( + TraitedSpec, traits, Str, File, Directory, BaseInterface, InputMultiPath, + isdefined, OutputMultiPath, DynamicTraitedSpec, Undefined, BaseInterfaceInputSpec) try: import pyxnat @@ -64,6 +57,7 @@ except: pass +iflogger = logging.getLogger('interface') def copytree(src, dst, use_hardlink=False): """Recursively copy a directory tree using @@ -165,7 +159,7 @@ def __call__(self, bytes_amount): with self._lock: self._seen_so_far += bytes_amount if self._size != 0: - percentage = (self._seen_so_far / self._size) * 100 + percentage = (self._seen_so_far // self._size) * 100 else: percentage = 0 progress_str = '%d / %d (%.2f%%)\r'\ @@ -184,27 +178,27 @@ class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): # Init inputspec data attributes base_directory = Directory( desc='Path to the base directory for storing data.') - container = traits.Str( + container = Str( desc='Folder within base directory in which to store output') parameterization = traits.Bool(True, usedefault=True, desc='store output in parametrized structure') strip_dir = Directory(desc='path to strip out of filename') - substitutions = InputMultiPath(traits.Tuple(traits.Str, traits.Str), + substitutions = InputMultiPath(traits.Tuple(Str, Str), desc=('List of 2-tuples reflecting string ' 'to substitute and string to replace ' 'it with')) regexp_substitutions = \ - InputMultiPath(traits.Tuple(traits.Str, traits.Str), + InputMultiPath(traits.Tuple(Str, Str), desc=('List of 2-tuples reflecting a pair of a '\ 'Python regexp pattern and a replacement '\ 'string. Invoked after string `substitutions`')) - _outputs = traits.Dict(traits.Str, value={}, usedefault=True) + _outputs = traits.Dict(Str, value={}, usedefault=True) remove_dest_dir = traits.Bool(False, usedefault=True, desc='remove dest directory when copying dirs') # AWS S3 data attributes - creds_path = traits.Str(desc='Filepath to AWS credentials file for S3 bucket '\ + creds_path = Str(desc='Filepath to AWS credentials file for S3 bucket '\ 'access; if not specified, the credentials will '\ 'be taken from the AWS_ACCESS_KEY_ID and '\ 'AWS_SECRET_ACCESS_KEY environment variables') @@ -213,7 +207,7 @@ class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): # Set this if user wishes to override the bucket with their own bucket = traits.Any(desc='Boto3 S3 bucket for manual override of bucket') # Set this if user wishes to have local copy of files as well - local_copy = traits.Str(desc='Copy files locally as well as to S3 bucket') + local_copy = Str(desc='Copy files locally as well as to S3 bucket') # Set call-able inputs attributes def __setattr__(self, key, value): @@ -699,7 +693,7 @@ def _list_outputs(self): raise(inst) # Iterate through outputs attributes {key : path(s)} - for key, files in self.inputs._outputs.items(): + for key, files in list(self.inputs._outputs.items()): if not isdefined(files): continue iflogger.debug("key: %s files: %s" % (key, str(files))) @@ -773,11 +767,11 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): anon = traits.Bool(False, usedefault=True, desc='Use anonymous connection to s3. If this is set to True, boto may print' + ' a urlopen error, but this does not prevent data from being downloaded.') - region = traits.Str('us-east-1', usedefault=True, + region = Str('us-east-1', usedefault=True, desc='Region of s3 bucket') - bucket = traits.Str(mandatory=True, + bucket = Str(mandatory=True, desc='Amazon S3 bucket where your data is stored') - bucket_path = traits.Str('', usedefault=True, + bucket_path = Str('', usedefault=True, desc='Location within your bucket for subject data.') local_directory = Directory(exists=True, desc='Path to the local directory for subject data to be downloaded ' @@ -786,10 +780,10 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): desc='Generate exception if list is empty for a given field') sort_filelist = traits.Bool(mandatory=True, desc='Sort the filelist that matches the template') - template = traits.Str(mandatory=True, + template = Str(mandatory=True, desc='Layout used to get files. Relative to bucket_path if defined.' 'Uses regex rather than glob style formatting.') - template_args = traits.Dict(key_trait=traits.Str, + template_args = traits.Dict(key_trait=Str, value_trait=traits.List(traits.List), desc='Information to plug into template') @@ -858,7 +852,7 @@ def _add_output_traits(self, base): Using traits.Any instead out OutputMultiPath till add_trait bug is fixed. """ - return add_traits(base, self.inputs.template_args.keys()) + return add_traits(base, list(self.inputs.template_args.keys())) def _list_outputs(self): # infields are mandatory, however I could not figure out how to set 'mandatory' flag dynamically @@ -878,7 +872,7 @@ def _list_outputs(self): bkt_files = list(k.key for k in bkt.list()) # keys are outfields, args are template args for the outfield - for key, args in self.inputs.template_args.items(): + for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template if hasattr(self.inputs, 'field_template') and \ @@ -906,7 +900,7 @@ def _list_outputs(self): for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: - if isinstance(arg, string_types) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): @@ -917,7 +911,7 @@ def _list_outputs(self): for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, string_types) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -953,7 +947,7 @@ def _list_outputs(self): # Outputs are currently stored as locations on S3. # We must convert to the local location specified # and download the files. - for key,val in outputs.iteritems(): + for key,val in outputs.items(): #This will basically be either list-like or string-like: #if it has the __iter__ attribute, it's list-like (list, #tuple, numpy array) and we iterate through each of its @@ -995,9 +989,9 @@ class DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): desc='Generate exception if list is empty for a given field') sort_filelist = traits.Bool(mandatory=True, desc='Sort the filelist that matches the template') - template = traits.Str(mandatory=True, + template = Str(mandatory=True, desc='Layout used to get files. relative to base directory if defined') - template_args = traits.Dict(key_trait=traits.Str, + template_args = traits.Dict(key_trait=Str, value_trait=traits.List(traits.List), desc='Information to plug into template') @@ -1145,7 +1139,7 @@ def _list_outputs(self): for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: - if isinstance(arg, string_types) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): @@ -1156,7 +1150,7 @@ def _list_outputs(self): for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, string_types) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -1197,7 +1191,7 @@ class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): desc="When matching mutliple files, return them in sorted order.") raise_on_empty = traits.Bool(True, usedefault=True, desc="Raise an exception if a template pattern matches no files.") - force_lists = traits.Either(traits.Bool(), traits.List(traits.Str()), + force_lists = traits.Either(traits.Bool(), traits.List(Str()), default=False, usedefault=True, desc=("Whether to return outputs as a list even when only one file " "matches the template. Either a boolean that applies to all " @@ -1225,7 +1219,7 @@ class SelectFiles(IOBase): ... "epi": "{subject_id}/func/f[0, 1].nii"} >>> dg = Node(SelectFiles(templates), "selectfiles") >>> dg.inputs.subject_id = "subj1" - >>> pprint.pprint(dg.outputs.get()) # doctest: +NORMALIZE_WHITESPACE + >>> pprint.pprint(dg.outputs.get()) # doctest: +NORMALIZE_WHITESPACE +IGNORE_UNICODE {'T1': , 'epi': } The same thing with dynamic grabbing of specific files: @@ -1260,7 +1254,7 @@ def __init__(self, templates, **kwargs): # Infer the infields and outfields from the template infields = [] - for name, template in templates.items(): + for name, template in list(templates.items()): for _, field_name, _, _ in string.Formatter().parse(template): if field_name is not None and field_name not in infields: infields.append(field_name) @@ -1298,7 +1292,7 @@ def _list_outputs(self): "'templates'.") % (plural, bad_fields, verb) raise ValueError(msg) - for field, template in self._templates.items(): + for field, template in list(self._templates.items()): # Build the full template path if isdefined(self.inputs.base_directory): @@ -1335,9 +1329,9 @@ def _list_outputs(self): class DataFinderInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): root_paths = traits.Either(traits.List(), - traits.Str(), + Str(), mandatory=True,) - match_regex = traits.Str('(.+)', + match_regex = Str('(.+)', usedefault=True, desc=("Regular expression for matching " "paths.")) @@ -1407,12 +1401,12 @@ def _match_path(self, target_path): for key in list(match_dict.keys()): self.result[key] = [] self.result['out_paths'].append(target_path) - for key, val in match_dict.items(): + for key, val in list(match_dict.items()): self.result[key].append(val) def _run_interface(self, runtime): # Prepare some of the inputs - if isinstance(self.inputs.root_paths, string_types): + if isinstance(self.inputs.root_paths, (str, bytes)): self.inputs.root_paths = [self.inputs.root_paths] self.match_regex = re.compile(self.inputs.match_regex) if self.inputs.max_depth is Undefined: @@ -1457,7 +1451,7 @@ def _run_interface(self, runtime): self._match_path(full_path) if (self.inputs.unpack_single and len(self.result['out_paths']) == 1): - for key, vals in self.result.items(): + for key, vals in list(self.result.items()): self.result[key] = vals[0] else: # sort all keys acording to out_paths @@ -1483,7 +1477,7 @@ def _list_outputs(self): class FSSourceInputSpec(BaseInterfaceInputSpec): subjects_dir = Directory(mandatory=True, desc='Freesurfer subjects directory.') - subject_id = traits.Str(mandatory=True, + subject_id = Str(mandatory=True, desc='Subject name for whom to retrieve data') hemi = traits.Enum('both', 'lh', 'rh', usedefault=True, desc='Selects hemisphere specific outputs') @@ -1626,26 +1620,26 @@ def _list_outputs(self): class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - query_template = traits.Str( + query_template = Str( mandatory=True, desc=('Layout used to get files. Relative to base ' 'directory if defined') ) query_template_args = traits.Dict( - traits.Str, + Str, traits.List(traits.List), value=dict(outfiles=[]), usedefault=True, desc='Information to plug into template' ) - server = traits.Str( + server = Str( mandatory=True, requires=['user', 'pwd'], xor=['config'] ) - user = traits.Str() + user = Str() pwd = traits.Password() config = File(mandatory=True, xor=['server']) @@ -1780,7 +1774,7 @@ def _list_outputs(self): for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: - if isinstance(arg, string_types) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): @@ -1793,7 +1787,7 @@ def _list_outputs(self): for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, string_types) and \ + if isinstance(arg, (str, bytes)) and \ hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): @@ -1840,34 +1834,34 @@ def _list_outputs(self): class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - _outputs = traits.Dict(traits.Str, value={}, usedefault=True) + _outputs = traits.Dict(Str, value={}, usedefault=True) - server = traits.Str(mandatory=True, + server = Str(mandatory=True, requires=['user', 'pwd'], xor=['config'] ) - user = traits.Str() + user = Str() pwd = traits.Password() config = File(mandatory=True, xor=['server']) cache_dir = Directory(desc='') - project_id = traits.Str( + project_id = Str( desc='Project in which to store the outputs', mandatory=True) - subject_id = traits.Str( + subject_id = Str( desc='Set to subject id', mandatory=True) - experiment_id = traits.Str( + experiment_id = Str( desc='Set to workflow name', mandatory=True) - assessor_id = traits.Str( + assessor_id = Str( desc=('Option to customize ouputs representation in XNAT - ' 'assessor level will be used with specified id'), xor=['reconstruction_id'] ) - reconstruction_id = traits.Str( + reconstruction_id = Str( desc=('Option to customize ouputs representation in XNAT - ' 'reconstruction level will be used with specified id'), xor=['assessor_id'] @@ -2067,7 +2061,7 @@ def push_provenance(): class SQLiteSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): database_file = File(exists=True, mandatory=True) - table_name = traits.Str(mandatory=True) + table_name = Str(mandatory=True) class SQLiteSink(IOBase): @@ -2114,16 +2108,16 @@ def _list_outputs(self): class MySQLSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): - host = traits.Str('localhost', mandatory=True, + host = Str('localhost', mandatory=True, requires=['username', 'password'], xor=['config'], usedefault=True) config = File(mandatory=True, xor=['host'], desc="MySQL Options File (same format as my.cnf)") - database_name = traits.Str( + database_name = Str( mandatory=True, desc='Otherwise known as the schema name') - table_name = traits.Str(mandatory=True) - username = traits.Str() - password = traits.Str() + table_name = Str(mandatory=True) + username = Str() + password = Str() class MySQLSink(IOBase): @@ -2174,16 +2168,16 @@ def _list_outputs(self): class SSHDataGrabberInputSpec(DataGrabberInputSpec): - hostname = traits.Str(mandatory=True, desc='Server hostname.') - username = traits.Str(desc='Server username.') + hostname = Str(mandatory=True, desc='Server hostname.') + username = Str(desc='Server username.') password = traits.Password(desc='Server password.') download_files = traits.Bool(True, usedefault=True, desc='If false it will return the file names without downloading them') - base_directory = traits.Str(mandatory=True, + base_directory = Str(mandatory=True, desc='Path to the base directory consisting of subject data.') template_expression = traits.Enum(['fnmatch', 'regexp'], usedefault=True, desc='Use either fnmatch or regexp to express templates') - ssh_log_to_file = traits.Str('', usedefault=True, + ssh_log_to_file = Str('', usedefault=True, desc='If set SSH commands will be logged to the given file') @@ -2350,7 +2344,7 @@ def _list_outputs(self): for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: - if isinstance(arg, string_types) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): @@ -2361,7 +2355,7 @@ def _list_outputs(self): for i in range(maxlen): argtuple = [] for arg in arglist: - if isinstance(arg, string_types) and hasattr(self.inputs, arg): + if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) @@ -2454,12 +2448,12 @@ class JSONFileGrabber(IOBase): >>> jsonSource = JSONFileGrabber() >>> jsonSource.inputs.defaults = {'param1': 'overrideMe', 'param3': 1.0} >>> res = jsonSource.run() - >>> pprint.pprint(res.outputs.get()) + >>> pprint.pprint(res.outputs.get()) # doctest: +IGNORE_UNICODE {'param1': 'overrideMe', 'param3': 1.0} >>> jsonSource.inputs.in_file = 'jsongrabber.txt' >>> res = jsonSource.run() - >>> pprint.pprint(res.outputs.get()) # doctest: +NORMALIZE_WHITESPACE, +ELLIPSIS - {'param1': ...'exampleStr', 'param2': 4, 'param3': 1.0} + >>> pprint.pprint(res.outputs.get()) # doctest: +NORMALIZE_WHITESPACE, +ELLIPSIS +IGNORE_UNICODE + {'param1': 'exampleStr', 'param2': 4, 'param3': 1.0} """ @@ -2478,12 +2472,12 @@ def _list_outputs(self): if not isinstance(data, dict): raise RuntimeError('JSON input has no dictionary structure') - for key, value in data.items(): + for key, value in list(data.items()): outputs[key] = value if isdefined(self.inputs.defaults): defaults = self.inputs.defaults - for key, value in defaults.items(): + for key, value in list(defaults.items()): if key not in list(outputs.keys()): outputs[key] = value @@ -2588,7 +2582,8 @@ def _list_outputs(self): out_dict[key] = val with open(out_file, 'w') as f: - simplejson.dump(out_dict, f) + f.write(str(simplejson.dumps(out_dict, ensure_ascii=False))) + outputs = self.output_spec().get() outputs['out_file'] = out_file return outputs diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index a81076b31f..d3f6f26993 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -1,11 +1,14 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ General matlab interface code """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os +from .. import config from .base import (CommandLineInputSpec, InputMultiPath, isdefined, CommandLine, traits, File, Directory) -from .. import config def get_matlab_command(): diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index f5a891465e..8cf2ae44d3 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Fixes meshes: @@ -9,12 +10,12 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import -from .base import (CommandLine, CommandLineInputSpec, - traits, TraitedSpec, isdefined, - File) import os.path as op from ..utils.filemanip import split_filename +from .base import (CommandLine, CommandLineInputSpec, + traits, TraitedSpec, isdefined, File) class MeshFixInputSpec(CommandLineInputSpec): @@ -104,7 +105,7 @@ class MeshFix(CommandLine): >>> fix.inputs.in_file1 = 'lh-pial.stl' >>> fix.inputs.in_file2 = 'rh-pial.stl' >>> fix.run() # doctest: +SKIP - >>> fix.cmdline + >>> fix.cmdline # doctest: +IGNORE_UNICODE 'meshfix lh-pial.stl rh-pial.stl -o lh-pial_fixed.off' """ _cmd = 'meshfix' @@ -127,7 +128,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None diff --git a/nipype/interfaces/minc/__init__.py b/nipype/interfaces/minc/__init__.py index 1f0ca08d50..d3235c987d 100644 --- a/nipype/interfaces/minc/__init__.py +++ b/nipype/interfaces/minc/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC diff --git a/nipype/interfaces/minc/base.py b/nipype/interfaces/minc/base.py index a557ed3e53..8edb87dce6 100644 --- a/nipype/interfaces/minc/base.py +++ b/nipype/interfaces/minc/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC @@ -7,28 +8,15 @@ Author: Carlo Hamalainen http://carlo-hamalainen.net """ - -from ..base import ( - TraitedSpec, - CommandLineInputSpec, - CommandLine, - StdOutCommandLineInputSpec, - StdOutCommandLine, - File, - Directory, - InputMultiPath, - OutputMultiPath, - traits, - isdefined, -) - -import glob +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import object import os import os.path -import re - import warnings -warn = warnings.warn + +from ..base import CommandLine + + warnings.filterwarnings('always', category=UserWarning) @@ -121,11 +109,11 @@ def aggregate_filename(files, new_suffix): >>> from nipype.interfaces.minc.base import aggregate_filename >>> f = aggregate_filename(['/tmp/foo1.mnc', '/tmp/foo2.mnc', '/tmp/foo3.mnc'], 'averaged') - >>> os.path.split(f)[1] # This has a full path, so just check the filename. + >>> os.path.split(f)[1] # This has a full path, so just check the filename. # doctest: +IGNORE_UNICODE 'foo_averaged.mnc' >>> f = aggregate_filename(['/tmp/foo1.mnc', '/tmp/blah1.mnc'], 'averaged') - >>> os.path.split(f)[1] # This has a full path, so just check the filename. + >>> os.path.split(f)[1] # This has a full path, so just check the filename. # doctest: +IGNORE_UNICODE 'foo1_averaged.mnc' """ diff --git a/nipype/interfaces/minc/minc.py b/nipype/interfaces/minc/minc.py index 36efd2eef3..a6fc9f5902 100644 --- a/nipype/interfaces/minc/minc.py +++ b/nipype/interfaces/minc/minc.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC @@ -15,30 +16,21 @@ >>> os.chdir(datadir) """ - -from ..base import ( - TraitedSpec, - CommandLineInputSpec, - CommandLine, - StdOutCommandLineInputSpec, - StdOutCommandLine, - File, - Directory, - InputMultiPath, - OutputMultiPath, - traits, - isdefined, -) +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import glob import os import os.path import re +import warnings -from ..minc.base import check_minc, no_minc, Info, aggregate_filename +from ..base import (TraitedSpec, CommandLineInputSpec, CommandLine, + StdOutCommandLineInputSpec, StdOutCommandLine, File, + Directory, InputMultiPath, OutputMultiPath, traits, + isdefined) +from .base import aggregate_filename -import warnings -warn = warnings.warn warnings.filterwarnings('always', category=UserWarning) diff --git a/nipype/interfaces/minc/testdata.py b/nipype/interfaces/minc/testdata.py index af62e42f88..c586bd6f99 100644 --- a/nipype/interfaces/minc/testdata.py +++ b/nipype/interfaces/minc/testdata.py @@ -1,3 +1,6 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import + import os from ...testing import example_data diff --git a/nipype/interfaces/minc/tests/__init__.py b/nipype/interfaces/minc/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/minc/tests/__init__.py +++ b/nipype/interfaces/minc/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/minc/tests/test_auto_Average.py b/nipype/interfaces/minc/tests/test_auto_Average.py index ffcc3d5a6b..614f16c1ad 100644 --- a/nipype/interfaces/minc/tests/test_auto_Average.py +++ b/nipype/interfaces/minc/tests/test_auto_Average.py @@ -15,13 +15,13 @@ def test_Average_inputs(): binvalue=dict(argstr='-binvalue %s', ), check_dimensions=dict(argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + xor=(u'check_dimensions', u'no_check_dimensions'), ), clobber=dict(argstr='-clobber', usedefault=True, ), copy_header=dict(argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), + xor=(u'copy_header', u'no_copy_header'), ), debug=dict(argstr='-debug', ), @@ -30,34 +30,34 @@ def test_Average_inputs(): ), filelist=dict(argstr='-filelist %s', mandatory=True, - xor=('input_files', 'filelist'), + xor=(u'input_files', u'filelist'), ), format_byte=dict(argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_double=dict(argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_filetype=dict(argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_float=dict(argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_int=dict(argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_long=dict(argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_short=dict(argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_signed=dict(argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_unsigned=dict(argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -66,31 +66,31 @@ def test_Average_inputs(): mandatory=True, position=-2, sep=' ', - xor=('input_files', 'filelist'), + xor=(u'input_files', u'filelist'), ), max_buffer_size_in_kb=dict(argstr='-max_buffer_size_in_kb %d', ), no_check_dimensions=dict(argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + xor=(u'check_dimensions', u'no_check_dimensions'), ), no_copy_header=dict(argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), + xor=(u'copy_header', u'no_copy_header'), ), nonormalize=dict(argstr='-nonormalize', - xor=('normalize', 'nonormalize'), + xor=(u'normalize', u'nonormalize'), ), normalize=dict(argstr='-normalize', - xor=('normalize', 'nonormalize'), + xor=(u'normalize', u'nonormalize'), ), output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_files'], + name_source=[u'input_files'], name_template='%s_averaged.mnc', position=-1, ), quiet=dict(argstr='-quiet', - xor=('verbose', 'quiet'), + xor=(u'verbose', u'quiet'), ), sdfile=dict(argstr='-sdfile %s', ), @@ -99,7 +99,7 @@ def test_Average_inputs(): two=dict(argstr='-2', ), verbose=dict(argstr='-verbose', - xor=('verbose', 'quiet'), + xor=(u'verbose', u'quiet'), ), voxel_range=dict(argstr='-range %d %d', ), @@ -107,7 +107,7 @@ def test_Average_inputs(): sep=',', ), width_weighted=dict(argstr='-width_weighted', - requires=('avgdim',), + requires=(u'avgdim',), ), ) inputs = Average.input_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_BBox.py b/nipype/interfaces/minc/tests/test_auto_BBox.py index 8ea5f0b34b..cda7dbfb93 100644 --- a/nipype/interfaces/minc/tests/test_auto_BBox.py +++ b/nipype/interfaces/minc/tests/test_auto_BBox.py @@ -23,7 +23,7 @@ def test_BBox_inputs(): position=-2, ), one_line=dict(argstr='-one_line', - xor=('one_line', 'two_lines'), + xor=(u'one_line', u'two_lines'), ), out_file=dict(argstr='> %s', genfile=True, @@ -31,7 +31,7 @@ def test_BBox_inputs(): ), output_file=dict(hash_files=False, keep_extension=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_bbox.txt', position=-1, ), @@ -40,7 +40,7 @@ def test_BBox_inputs(): threshold=dict(argstr='-threshold', ), two_lines=dict(argstr='-two_lines', - xor=('one_line', 'two_lines'), + xor=(u'one_line', u'two_lines'), ), ) inputs = BBox.input_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Beast.py b/nipype/interfaces/minc/tests/test_auto_Beast.py index 563fa1eb79..edb859c367 100644 --- a/nipype/interfaces/minc/tests/test_auto_Beast.py +++ b/nipype/interfaces/minc/tests/test_auto_Beast.py @@ -44,7 +44,7 @@ def test_Beast_inputs(): ), output_file=dict(argstr='%s', hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_beast_mask.mnc', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_BestLinReg.py b/nipype/interfaces/minc/tests/test_auto_BestLinReg.py index 394a7d753a..dedb5d4108 100644 --- a/nipype/interfaces/minc/tests/test_auto_BestLinReg.py +++ b/nipype/interfaces/minc/tests/test_auto_BestLinReg.py @@ -19,7 +19,7 @@ def test_BestLinReg_inputs(): genfile=True, hash_files=False, keep_extension=False, - name_source=['source'], + name_source=[u'source'], name_template='%s_bestlinreg.mnc', position=-1, ), @@ -27,7 +27,7 @@ def test_BestLinReg_inputs(): genfile=True, hash_files=False, keep_extension=False, - name_source=['source'], + name_source=[u'source'], name_template='%s_bestlinreg.xfm', position=-2, ), diff --git a/nipype/interfaces/minc/tests/test_auto_BigAverage.py b/nipype/interfaces/minc/tests/test_auto_BigAverage.py index 1ddb3f2e08..b5fb561931 100644 --- a/nipype/interfaces/minc/tests/test_auto_BigAverage.py +++ b/nipype/interfaces/minc/tests/test_auto_BigAverage.py @@ -23,7 +23,7 @@ def test_BigAverage_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_files'], + name_source=[u'input_files'], name_template='%s_bigaverage.mnc', position=-1, ), @@ -33,7 +33,7 @@ def test_BigAverage_inputs(): ), sd_file=dict(argstr='--sdfile %s', hash_files=False, - name_source=['input_files'], + name_source=[u'input_files'], name_template='%s_bigaverage_stdev.mnc', ), terminal_output=dict(nohash=True, diff --git a/nipype/interfaces/minc/tests/test_auto_Blob.py b/nipype/interfaces/minc/tests/test_auto_Blob.py index 96050e4746..a4d92e3013 100644 --- a/nipype/interfaces/minc/tests/test_auto_Blob.py +++ b/nipype/interfaces/minc/tests/test_auto_Blob.py @@ -23,7 +23,7 @@ def test_Blob_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_blob.mnc', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Blur.py b/nipype/interfaces/minc/tests/test_auto_Blur.py index 340523b0f4..c2a4eea061 100644 --- a/nipype/interfaces/minc/tests/test_auto_Blur.py +++ b/nipype/interfaces/minc/tests/test_auto_Blur.py @@ -16,14 +16,14 @@ def test_Blur_inputs(): ), fwhm=dict(argstr='-fwhm %s', mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), + xor=(u'fwhm', u'fwhm3d', u'standard_dev'), ), fwhm3d=dict(argstr='-3dfwhm %s %s %s', mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), + xor=(u'fwhm', u'fwhm3d', u'standard_dev'), ), gaussian=dict(argstr='-gaussian', - xor=('gaussian', 'rect'), + xor=(u'gaussian', u'rect'), ), gradient=dict(argstr='-gradient', ), @@ -42,11 +42,11 @@ def test_Blur_inputs(): partial=dict(argstr='-partial', ), rect=dict(argstr='-rect', - xor=('gaussian', 'rect'), + xor=(u'gaussian', u'rect'), ), standard_dev=dict(argstr='-standarddev %s', mandatory=True, - xor=('fwhm', 'fwhm3d', 'standard_dev'), + xor=(u'fwhm', u'fwhm3d', u'standard_dev'), ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Calc.py b/nipype/interfaces/minc/tests/test_auto_Calc.py index 08168c0df2..58a18e6d7c 100644 --- a/nipype/interfaces/minc/tests/test_auto_Calc.py +++ b/nipype/interfaces/minc/tests/test_auto_Calc.py @@ -7,13 +7,13 @@ def test_Calc_inputs(): input_map = dict(args=dict(argstr='%s', ), check_dimensions=dict(argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + xor=(u'check_dimensions', u'no_check_dimensions'), ), clobber=dict(argstr='-clobber', usedefault=True, ), copy_header=dict(argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), + xor=(u'copy_header', u'no_copy_header'), ), debug=dict(argstr='-debug', ), @@ -25,42 +25,42 @@ def test_Calc_inputs(): ), expfile=dict(argstr='-expfile %s', mandatory=True, - xor=('expression', 'expfile'), + xor=(u'expression', u'expfile'), ), expression=dict(argstr="-expression '%s'", mandatory=True, - xor=('expression', 'expfile'), + xor=(u'expression', u'expfile'), ), filelist=dict(argstr='-filelist %s', mandatory=True, - xor=('input_files', 'filelist'), + xor=(u'input_files', u'filelist'), ), format_byte=dict(argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_double=dict(argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_filetype=dict(argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_float=dict(argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_int=dict(argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_long=dict(argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_short=dict(argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_signed=dict(argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_unsigned=dict(argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -76,39 +76,39 @@ def test_Calc_inputs(): usedefault=False, ), no_check_dimensions=dict(argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + xor=(u'check_dimensions', u'no_check_dimensions'), ), no_copy_header=dict(argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), + xor=(u'copy_header', u'no_copy_header'), ), outfiles=dict(), output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_files'], + name_source=[u'input_files'], name_template='%s_calc.mnc', position=-1, ), output_illegal=dict(argstr='-illegal_value', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + xor=(u'output_nan', u'output_zero', u'output_illegal_value'), ), output_nan=dict(argstr='-nan', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + xor=(u'output_nan', u'output_zero', u'output_illegal_value'), ), output_zero=dict(argstr='-zero', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + xor=(u'output_nan', u'output_zero', u'output_illegal_value'), ), propagate_nan=dict(argstr='-propagate_nan', ), quiet=dict(argstr='-quiet', - xor=('verbose', 'quiet'), + xor=(u'verbose', u'quiet'), ), terminal_output=dict(nohash=True, ), two=dict(argstr='-2', ), verbose=dict(argstr='-verbose', - xor=('verbose', 'quiet'), + xor=(u'verbose', u'quiet'), ), voxel_range=dict(argstr='-range %d %d', ), diff --git a/nipype/interfaces/minc/tests/test_auto_Convert.py b/nipype/interfaces/minc/tests/test_auto_Convert.py index 74928bc100..df69156bd3 100644 --- a/nipype/interfaces/minc/tests/test_auto_Convert.py +++ b/nipype/interfaces/minc/tests/test_auto_Convert.py @@ -27,7 +27,7 @@ def test_Convert_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_convert_output.mnc', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Copy.py b/nipype/interfaces/minc/tests/test_auto_Copy.py index a3f2edff48..2674d00a6c 100644 --- a/nipype/interfaces/minc/tests/test_auto_Copy.py +++ b/nipype/interfaces/minc/tests/test_auto_Copy.py @@ -19,15 +19,15 @@ def test_Copy_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_copy.mnc', position=-1, ), pixel_values=dict(argstr='-pixel_values', - xor=('pixel_values', 'real_values'), + xor=(u'pixel_values', u'real_values'), ), real_values=dict(argstr='-real_values', - xor=('pixel_values', 'real_values'), + xor=(u'pixel_values', u'real_values'), ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Dump.py b/nipype/interfaces/minc/tests/test_auto_Dump.py index 0a41c74c90..c1de6510cf 100644 --- a/nipype/interfaces/minc/tests/test_auto_Dump.py +++ b/nipype/interfaces/minc/tests/test_auto_Dump.py @@ -5,21 +5,21 @@ def test_Dump_inputs(): input_map = dict(annotations_brief=dict(argstr='-b %s', - xor=('annotations_brief', 'annotations_full'), + xor=(u'annotations_brief', u'annotations_full'), ), annotations_full=dict(argstr='-f %s', - xor=('annotations_brief', 'annotations_full'), + xor=(u'annotations_brief', u'annotations_full'), ), args=dict(argstr='%s', ), coordinate_data=dict(argstr='-c', - xor=('coordinate_data', 'header_data'), + xor=(u'coordinate_data', u'header_data'), ), environ=dict(nohash=True, usedefault=True, ), header_data=dict(argstr='-h', - xor=('coordinate_data', 'header_data'), + xor=(u'coordinate_data', u'header_data'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -39,7 +39,7 @@ def test_Dump_inputs(): ), output_file=dict(hash_files=False, keep_extension=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_dump.txt', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Extract.py b/nipype/interfaces/minc/tests/test_auto_Extract.py index 04ecb3b7d3..4b634a7675 100644 --- a/nipype/interfaces/minc/tests/test_auto_Extract.py +++ b/nipype/interfaces/minc/tests/test_auto_Extract.py @@ -13,40 +13,40 @@ def test_Extract_inputs(): usedefault=True, ), flip_any_direction=dict(argstr='-any_direction', - xor=('flip_positive_direction', 'flip_negative_direction', 'flip_any_direction'), + xor=(u'flip_positive_direction', u'flip_negative_direction', u'flip_any_direction'), ), flip_negative_direction=dict(argstr='-negative_direction', - xor=('flip_positive_direction', 'flip_negative_direction', 'flip_any_direction'), + xor=(u'flip_positive_direction', u'flip_negative_direction', u'flip_any_direction'), ), flip_positive_direction=dict(argstr='-positive_direction', - xor=('flip_positive_direction', 'flip_negative_direction', 'flip_any_direction'), + xor=(u'flip_positive_direction', u'flip_negative_direction', u'flip_any_direction'), ), flip_x_any=dict(argstr='-xanydirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + xor=(u'flip_x_positive', u'flip_x_negative', u'flip_x_any'), ), flip_x_negative=dict(argstr='-xdirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + xor=(u'flip_x_positive', u'flip_x_negative', u'flip_x_any'), ), flip_x_positive=dict(argstr='+xdirection', - xor=('flip_x_positive', 'flip_x_negative', 'flip_x_any'), + xor=(u'flip_x_positive', u'flip_x_negative', u'flip_x_any'), ), flip_y_any=dict(argstr='-yanydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + xor=(u'flip_y_positive', u'flip_y_negative', u'flip_y_any'), ), flip_y_negative=dict(argstr='-ydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + xor=(u'flip_y_positive', u'flip_y_negative', u'flip_y_any'), ), flip_y_positive=dict(argstr='+ydirection', - xor=('flip_y_positive', 'flip_y_negative', 'flip_y_any'), + xor=(u'flip_y_positive', u'flip_y_negative', u'flip_y_any'), ), flip_z_any=dict(argstr='-zanydirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), + xor=(u'flip_z_positive', u'flip_z_negative', u'flip_z_any'), ), flip_z_negative=dict(argstr='-zdirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), + xor=(u'flip_z_positive', u'flip_z_negative', u'flip_z_any'), ), flip_z_positive=dict(argstr='+zdirection', - xor=('flip_z_positive', 'flip_z_negative', 'flip_z_any'), + xor=(u'flip_z_positive', u'flip_z_negative', u'flip_z_any'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -62,10 +62,10 @@ def test_Extract_inputs(): position=-2, ), nonormalize=dict(argstr='-nonormalize', - xor=('normalize', 'nonormalize'), + xor=(u'normalize', u'nonormalize'), ), normalize=dict(argstr='-normalize', - xor=('normalize', 'nonormalize'), + xor=(u'normalize', u'nonormalize'), ), out_file=dict(argstr='> %s', genfile=True, @@ -73,7 +73,7 @@ def test_Extract_inputs(): ), output_file=dict(hash_files=False, keep_extension=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s.raw', position=-1, ), @@ -83,33 +83,33 @@ def test_Extract_inputs(): terminal_output=dict(nohash=True, ), write_ascii=dict(argstr='-ascii', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double', 'write_signed', 'write_unsigned'), + xor=(u'write_ascii', u'write_ascii', u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double', u'write_signed', u'write_unsigned'), ), write_byte=dict(argstr='-byte', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double', 'write_signed', 'write_unsigned'), + xor=(u'write_ascii', u'write_ascii', u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double', u'write_signed', u'write_unsigned'), ), write_double=dict(argstr='-double', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double', 'write_signed', 'write_unsigned'), + xor=(u'write_ascii', u'write_ascii', u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double', u'write_signed', u'write_unsigned'), ), write_float=dict(argstr='-float', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double', 'write_signed', 'write_unsigned'), + xor=(u'write_ascii', u'write_ascii', u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double', u'write_signed', u'write_unsigned'), ), write_int=dict(argstr='-int', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double', 'write_signed', 'write_unsigned'), + xor=(u'write_ascii', u'write_ascii', u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double', u'write_signed', u'write_unsigned'), ), write_long=dict(argstr='-long', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double', 'write_signed', 'write_unsigned'), + xor=(u'write_ascii', u'write_ascii', u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double', u'write_signed', u'write_unsigned'), ), write_range=dict(argstr='-range %s %s', ), write_short=dict(argstr='-short', - xor=('write_ascii', 'write_ascii', 'write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double', 'write_signed', 'write_unsigned'), + xor=(u'write_ascii', u'write_ascii', u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double', u'write_signed', u'write_unsigned'), ), write_signed=dict(argstr='-signed', - xor=('write_signed', 'write_unsigned'), + xor=(u'write_signed', u'write_unsigned'), ), write_unsigned=dict(argstr='-unsigned', - xor=('write_signed', 'write_unsigned'), + xor=(u'write_signed', u'write_unsigned'), ), ) inputs = Extract.input_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py b/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py index 97d67c454a..4fb31a0015 100644 --- a/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py +++ b/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py @@ -22,7 +22,7 @@ def test_Gennlxfm_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['like'], + name_source=[u'like'], name_template='%s_gennlxfm.xfm', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Math.py b/nipype/interfaces/minc/tests/test_auto_Math.py index 52ee62b165..fb414daa1a 100644 --- a/nipype/interfaces/minc/tests/test_auto_Math.py +++ b/nipype/interfaces/minc/tests/test_auto_Math.py @@ -23,7 +23,7 @@ def test_Math_inputs(): calc_sub=dict(argstr='-sub', ), check_dimensions=dict(argstr='-check_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + xor=(u'check_dimensions', u'no_check_dimensions'), ), clamp=dict(argstr='-clamp -const2 %s %s', ), @@ -31,7 +31,7 @@ def test_Math_inputs(): usedefault=True, ), copy_header=dict(argstr='-copy_header', - xor=('copy_header', 'no_copy_header'), + xor=(u'copy_header', u'no_copy_header'), ), count_valid=dict(argstr='-count_valid', ), @@ -44,34 +44,34 @@ def test_Math_inputs(): ), filelist=dict(argstr='-filelist %s', mandatory=True, - xor=('input_files', 'filelist'), + xor=(u'input_files', u'filelist'), ), format_byte=dict(argstr='-byte', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_double=dict(argstr='-double', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_filetype=dict(argstr='-filetype', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_float=dict(argstr='-float', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_int=dict(argstr='-int', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_long=dict(argstr='-long', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_short=dict(argstr='-short', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_signed=dict(argstr='-signed', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_unsigned=dict(argstr='-unsigned', - xor=('format_filetype', 'format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_filetype', u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), ignore_exception=dict(nohash=True, usedefault=True, @@ -82,7 +82,7 @@ def test_Math_inputs(): mandatory=True, position=-2, sep=' ', - xor=('input_files', 'filelist'), + xor=(u'input_files', u'filelist'), ), invert=dict(argstr='-invert -const %s', ), @@ -99,28 +99,28 @@ def test_Math_inputs(): nisnan=dict(argstr='-nisnan', ), no_check_dimensions=dict(argstr='-nocheck_dimensions', - xor=('check_dimensions', 'no_check_dimensions'), + xor=(u'check_dimensions', u'no_check_dimensions'), ), no_copy_header=dict(argstr='-nocopy_header', - xor=('copy_header', 'no_copy_header'), + xor=(u'copy_header', u'no_copy_header'), ), nsegment=dict(argstr='-nsegment -const2 %s %s', ), output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_files'], + name_source=[u'input_files'], name_template='%s_mincmath.mnc', position=-1, ), output_illegal=dict(argstr='-illegal_value', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + xor=(u'output_nan', u'output_zero', u'output_illegal_value'), ), output_nan=dict(argstr='-nan', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + xor=(u'output_nan', u'output_zero', u'output_illegal_value'), ), output_zero=dict(argstr='-zero', - xor=('output_nan', 'output_zero', 'output_illegal_value'), + xor=(u'output_nan', u'output_zero', u'output_illegal_value'), ), percentdiff=dict(argstr='-percentdiff', ), diff --git a/nipype/interfaces/minc/tests/test_auto_Norm.py b/nipype/interfaces/minc/tests/test_auto_Norm.py index d0044f6ee3..d9dbd80487 100644 --- a/nipype/interfaces/minc/tests/test_auto_Norm.py +++ b/nipype/interfaces/minc/tests/test_auto_Norm.py @@ -35,13 +35,13 @@ def test_Norm_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_norm.mnc', position=-1, ), output_threshold_mask=dict(argstr='-threshold_mask %s', hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_norm_threshold_mask.mnc', ), terminal_output=dict(nohash=True, diff --git a/nipype/interfaces/minc/tests/test_auto_Pik.py b/nipype/interfaces/minc/tests/test_auto_Pik.py index 89242d649b..768d215b4c 100644 --- a/nipype/interfaces/minc/tests/test_auto_Pik.py +++ b/nipype/interfaces/minc/tests/test_auto_Pik.py @@ -9,7 +9,7 @@ def test_Pik_inputs(): args=dict(argstr='%s', ), auto_range=dict(argstr='--auto_range', - xor=('image_range', 'auto_range'), + xor=(u'image_range', u'auto_range'), ), clobber=dict(argstr='-clobber', usedefault=True, @@ -20,19 +20,19 @@ def test_Pik_inputs(): usedefault=True, ), horizontal_triplanar_view=dict(argstr='--horizontal', - xor=('vertical_triplanar_view', 'horizontal_triplanar_view'), + xor=(u'vertical_triplanar_view', u'horizontal_triplanar_view'), ), ignore_exception=dict(nohash=True, usedefault=True, ), image_range=dict(argstr='--image_range %s %s', - xor=('image_range', 'auto_range'), + xor=(u'image_range', u'auto_range'), ), input_file=dict(argstr='%s', mandatory=True, position=-2, ), - jpg=dict(xor=('jpg', 'png'), + jpg=dict(xor=(u'jpg', u'png'), ), lookup=dict(argstr='--lookup %s', ), @@ -42,11 +42,11 @@ def test_Pik_inputs(): genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s.png', position=-1, ), - png=dict(xor=('jpg', 'png'), + png=dict(xor=(u'jpg', u'png'), ), sagittal_offset=dict(argstr='--sagittal_offset %s', ), @@ -55,13 +55,13 @@ def test_Pik_inputs(): scale=dict(argstr='--scale %s', ), slice_x=dict(argstr='-x', - xor=('slice_z', 'slice_y', 'slice_x'), + xor=(u'slice_z', u'slice_y', u'slice_x'), ), slice_y=dict(argstr='-y', - xor=('slice_z', 'slice_y', 'slice_x'), + xor=(u'slice_z', u'slice_y', u'slice_x'), ), slice_z=dict(argstr='-z', - xor=('slice_z', 'slice_y', 'slice_x'), + xor=(u'slice_z', u'slice_y', u'slice_x'), ), start=dict(argstr='--slice %s', ), @@ -72,12 +72,12 @@ def test_Pik_inputs(): title=dict(argstr='%s', ), title_size=dict(argstr='--title_size %s', - requires=['title'], + requires=[u'title'], ), triplanar=dict(argstr='--triplanar', ), vertical_triplanar_view=dict(argstr='--vertical', - xor=('vertical_triplanar_view', 'horizontal_triplanar_view'), + xor=(u'vertical_triplanar_view', u'horizontal_triplanar_view'), ), width=dict(argstr='--width %s', ), diff --git a/nipype/interfaces/minc/tests/test_auto_Resample.py b/nipype/interfaces/minc/tests/test_auto_Resample.py index 38c3ac1e8f..b2720e2080 100644 --- a/nipype/interfaces/minc/tests/test_auto_Resample.py +++ b/nipype/interfaces/minc/tests/test_auto_Resample.py @@ -10,46 +10,46 @@ def test_Resample_inputs(): usedefault=True, ), coronal_slices=dict(argstr='-coronal', - xor=('transverse', 'sagittal', 'coronal'), + xor=(u'transverse', u'sagittal', u'coronal'), ), dircos=dict(argstr='-dircos %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), + xor=(u'nelements', u'nelements_x_y_or_z'), ), environ=dict(nohash=True, usedefault=True, ), fill=dict(argstr='-fill', - xor=('nofill', 'fill'), + xor=(u'nofill', u'fill'), ), fill_value=dict(argstr='-fillvalue %s', - requires=['fill'], + requires=[u'fill'], ), format_byte=dict(argstr='-byte', - xor=('format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_double=dict(argstr='-double', - xor=('format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_float=dict(argstr='-float', - xor=('format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_int=dict(argstr='-int', - xor=('format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_long=dict(argstr='-long', - xor=('format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_short=dict(argstr='-short', - xor=('format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_signed=dict(argstr='-signed', - xor=('format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), format_unsigned=dict(argstr='-unsigned', - xor=('format_byte', 'format_short', 'format_int', 'format_long', 'format_float', 'format_double', 'format_signed', 'format_unsigned'), + xor=(u'format_byte', u'format_short', u'format_int', u'format_long', u'format_float', u'format_double', u'format_signed', u'format_unsigned'), ), half_width_sinc_window=dict(argstr='-width %s', - requires=['sinc_interpolation'], + requires=[u'sinc_interpolation'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -62,59 +62,59 @@ def test_Resample_inputs(): invert_transformation=dict(argstr='-invert_transformation', ), keep_real_range=dict(argstr='-keep_real_range', - xor=('keep_real_range', 'nokeep_real_range'), + xor=(u'keep_real_range', u'nokeep_real_range'), ), like=dict(argstr='-like %s', ), nearest_neighbour_interpolation=dict(argstr='-nearest_neighbour', - xor=('trilinear_interpolation', 'tricubic_interpolation', 'nearest_neighbour_interpolation', 'sinc_interpolation'), + xor=(u'trilinear_interpolation', u'tricubic_interpolation', u'nearest_neighbour_interpolation', u'sinc_interpolation'), ), nelements=dict(argstr='-nelements %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), + xor=(u'nelements', u'nelements_x_y_or_z'), ), no_fill=dict(argstr='-nofill', - xor=('nofill', 'fill'), + xor=(u'nofill', u'fill'), ), no_input_sampling=dict(argstr='-use_input_sampling', - xor=('vio_transform', 'no_input_sampling'), + xor=(u'vio_transform', u'no_input_sampling'), ), nokeep_real_range=dict(argstr='-nokeep_real_range', - xor=('keep_real_range', 'nokeep_real_range'), + xor=(u'keep_real_range', u'nokeep_real_range'), ), origin=dict(argstr='-origin %s %s %s', ), output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_resample.mnc', position=-1, ), output_range=dict(argstr='-range %s %s', ), sagittal_slices=dict(argstr='-sagittal', - xor=('transverse', 'sagittal', 'coronal'), + xor=(u'transverse', u'sagittal', u'coronal'), ), sinc_interpolation=dict(argstr='-sinc', - xor=('trilinear_interpolation', 'tricubic_interpolation', 'nearest_neighbour_interpolation', 'sinc_interpolation'), + xor=(u'trilinear_interpolation', u'tricubic_interpolation', u'nearest_neighbour_interpolation', u'sinc_interpolation'), ), sinc_window_hamming=dict(argstr='-hamming', - requires=['sinc_interpolation'], - xor=('sinc_window_hanning', 'sinc_window_hamming'), + requires=[u'sinc_interpolation'], + xor=(u'sinc_window_hanning', u'sinc_window_hamming'), ), sinc_window_hanning=dict(argstr='-hanning', - requires=['sinc_interpolation'], - xor=('sinc_window_hanning', 'sinc_window_hamming'), + requires=[u'sinc_interpolation'], + xor=(u'sinc_window_hanning', u'sinc_window_hamming'), ), spacetype=dict(argstr='-spacetype %s', ), standard_sampling=dict(argstr='-standard_sampling', ), start=dict(argstr='-start %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), + xor=(u'nelements', u'nelements_x_y_or_z'), ), step=dict(argstr='-step %s %s %s', - xor=('nelements', 'nelements_x_y_or_z'), + xor=(u'nelements', u'nelements_x_y_or_z'), ), talairach=dict(argstr='-talairach', ), @@ -123,68 +123,68 @@ def test_Resample_inputs(): transformation=dict(argstr='-transformation %s', ), transverse_slices=dict(argstr='-transverse', - xor=('transverse', 'sagittal', 'coronal'), + xor=(u'transverse', u'sagittal', u'coronal'), ), tricubic_interpolation=dict(argstr='-tricubic', - xor=('trilinear_interpolation', 'tricubic_interpolation', 'nearest_neighbour_interpolation', 'sinc_interpolation'), + xor=(u'trilinear_interpolation', u'tricubic_interpolation', u'nearest_neighbour_interpolation', u'sinc_interpolation'), ), trilinear_interpolation=dict(argstr='-trilinear', - xor=('trilinear_interpolation', 'tricubic_interpolation', 'nearest_neighbour_interpolation', 'sinc_interpolation'), + xor=(u'trilinear_interpolation', u'tricubic_interpolation', u'nearest_neighbour_interpolation', u'sinc_interpolation'), ), two=dict(argstr='-2', ), units=dict(argstr='-units %s', ), vio_transform=dict(argstr='-tfm_input_sampling', - xor=('vio_transform', 'no_input_sampling'), + xor=(u'vio_transform', u'no_input_sampling'), ), xdircos=dict(argstr='-xdircos %s', - requires=('ydircos', 'zdircos'), - xor=('dircos', 'dircos_x_y_or_z'), + requires=(u'ydircos', u'zdircos'), + xor=(u'dircos', u'dircos_x_y_or_z'), ), xnelements=dict(argstr='-xnelements %s', - requires=('ynelements', 'znelements'), - xor=('nelements', 'nelements_x_y_or_z'), + requires=(u'ynelements', u'znelements'), + xor=(u'nelements', u'nelements_x_y_or_z'), ), xstart=dict(argstr='-xstart %s', - requires=('ystart', 'zstart'), - xor=('start', 'start_x_y_or_z'), + requires=(u'ystart', u'zstart'), + xor=(u'start', u'start_x_y_or_z'), ), xstep=dict(argstr='-xstep %s', - requires=('ystep', 'zstep'), - xor=('step', 'step_x_y_or_z'), + requires=(u'ystep', u'zstep'), + xor=(u'step', u'step_x_y_or_z'), ), ydircos=dict(argstr='-ydircos %s', - requires=('xdircos', 'zdircos'), - xor=('dircos', 'dircos_x_y_or_z'), + requires=(u'xdircos', u'zdircos'), + xor=(u'dircos', u'dircos_x_y_or_z'), ), ynelements=dict(argstr='-ynelements %s', - requires=('xnelements', 'znelements'), - xor=('nelements', 'nelements_x_y_or_z'), + requires=(u'xnelements', u'znelements'), + xor=(u'nelements', u'nelements_x_y_or_z'), ), ystart=dict(argstr='-ystart %s', - requires=('xstart', 'zstart'), - xor=('start', 'start_x_y_or_z'), + requires=(u'xstart', u'zstart'), + xor=(u'start', u'start_x_y_or_z'), ), ystep=dict(argstr='-ystep %s', - requires=('xstep', 'zstep'), - xor=('step', 'step_x_y_or_z'), + requires=(u'xstep', u'zstep'), + xor=(u'step', u'step_x_y_or_z'), ), zdircos=dict(argstr='-zdircos %s', - requires=('xdircos', 'ydircos'), - xor=('dircos', 'dircos_x_y_or_z'), + requires=(u'xdircos', u'ydircos'), + xor=(u'dircos', u'dircos_x_y_or_z'), ), znelements=dict(argstr='-znelements %s', - requires=('xnelements', 'ynelements'), - xor=('nelements', 'nelements_x_y_or_z'), + requires=(u'xnelements', u'ynelements'), + xor=(u'nelements', u'nelements_x_y_or_z'), ), zstart=dict(argstr='-zstart %s', - requires=('xstart', 'ystart'), - xor=('start', 'start_x_y_or_z'), + requires=(u'xstart', u'ystart'), + xor=(u'start', u'start_x_y_or_z'), ), zstep=dict(argstr='-zstep %s', - requires=('xstep', 'ystep'), - xor=('step', 'step_x_y_or_z'), + requires=(u'xstep', u'ystep'), + xor=(u'step', u'step_x_y_or_z'), ), ) inputs = Resample.input_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_Reshape.py b/nipype/interfaces/minc/tests/test_auto_Reshape.py index 89fe7b10d8..f6e04fee2c 100644 --- a/nipype/interfaces/minc/tests/test_auto_Reshape.py +++ b/nipype/interfaces/minc/tests/test_auto_Reshape.py @@ -22,7 +22,7 @@ def test_Reshape_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_reshape.mnc', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_ToEcat.py b/nipype/interfaces/minc/tests/test_auto_ToEcat.py index cda7a12179..8150b9838c 100644 --- a/nipype/interfaces/minc/tests/test_auto_ToEcat.py +++ b/nipype/interfaces/minc/tests/test_auto_ToEcat.py @@ -34,7 +34,7 @@ def test_ToEcat_inputs(): genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_to_ecat.v', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_ToRaw.py b/nipype/interfaces/minc/tests/test_auto_ToRaw.py index a647ed48d0..8cc9ee1439 100644 --- a/nipype/interfaces/minc/tests/test_auto_ToRaw.py +++ b/nipype/interfaces/minc/tests/test_auto_ToRaw.py @@ -17,10 +17,10 @@ def test_ToRaw_inputs(): position=-2, ), nonormalize=dict(argstr='-nonormalize', - xor=('normalize', 'nonormalize'), + xor=(u'normalize', u'nonormalize'), ), normalize=dict(argstr='-normalize', - xor=('normalize', 'nonormalize'), + xor=(u'normalize', u'nonormalize'), ), out_file=dict(argstr='> %s', genfile=True, @@ -28,37 +28,37 @@ def test_ToRaw_inputs(): ), output_file=dict(hash_files=False, keep_extension=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s.raw', position=-1, ), terminal_output=dict(nohash=True, ), write_byte=dict(argstr='-byte', - xor=('write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double'), + xor=(u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double'), ), write_double=dict(argstr='-double', - xor=('write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double'), + xor=(u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double'), ), write_float=dict(argstr='-float', - xor=('write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double'), + xor=(u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double'), ), write_int=dict(argstr='-int', - xor=('write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double'), + xor=(u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double'), ), write_long=dict(argstr='-long', - xor=('write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double'), + xor=(u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double'), ), write_range=dict(argstr='-range %s %s', ), write_short=dict(argstr='-short', - xor=('write_byte', 'write_short', 'write_int', 'write_long', 'write_float', 'write_double'), + xor=(u'write_byte', u'write_short', u'write_int', u'write_long', u'write_float', u'write_double'), ), write_signed=dict(argstr='-signed', - xor=('write_signed', 'write_unsigned'), + xor=(u'write_signed', u'write_unsigned'), ), write_unsigned=dict(argstr='-unsigned', - xor=('write_signed', 'write_unsigned'), + xor=(u'write_signed', u'write_unsigned'), ), ) inputs = ToRaw.input_spec() diff --git a/nipype/interfaces/minc/tests/test_auto_VolSymm.py b/nipype/interfaces/minc/tests/test_auto_VolSymm.py index 6200880c93..707b091480 100644 --- a/nipype/interfaces/minc/tests/test_auto_VolSymm.py +++ b/nipype/interfaces/minc/tests/test_auto_VolSymm.py @@ -31,7 +31,7 @@ def test_VolSymm_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_vol_symm.mnc', position=-1, ), @@ -41,7 +41,7 @@ def test_VolSymm_inputs(): genfile=True, hash_files=False, keep_extension=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_vol_symm.xfm', position=-2, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Volcentre.py b/nipype/interfaces/minc/tests/test_auto_Volcentre.py index 02afcf871e..bd3b4bfac1 100644 --- a/nipype/interfaces/minc/tests/test_auto_Volcentre.py +++ b/nipype/interfaces/minc/tests/test_auto_Volcentre.py @@ -26,7 +26,7 @@ def test_Volcentre_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_volcentre.mnc', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Voliso.py b/nipype/interfaces/minc/tests/test_auto_Voliso.py index e64d8f8dc1..201449c19d 100644 --- a/nipype/interfaces/minc/tests/test_auto_Voliso.py +++ b/nipype/interfaces/minc/tests/test_auto_Voliso.py @@ -28,7 +28,7 @@ def test_Voliso_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_voliso.mnc', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_Volpad.py b/nipype/interfaces/minc/tests/test_auto_Volpad.py index d7adf256f0..1fc37ece5f 100644 --- a/nipype/interfaces/minc/tests/test_auto_Volpad.py +++ b/nipype/interfaces/minc/tests/test_auto_Volpad.py @@ -28,7 +28,7 @@ def test_Volpad_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_file'], + name_source=[u'input_file'], name_template='%s_volpad.mnc', position=-1, ), diff --git a/nipype/interfaces/minc/tests/test_auto_XfmConcat.py b/nipype/interfaces/minc/tests/test_auto_XfmConcat.py index af46985fd3..075406b117 100644 --- a/nipype/interfaces/minc/tests/test_auto_XfmConcat.py +++ b/nipype/interfaces/minc/tests/test_auto_XfmConcat.py @@ -24,7 +24,7 @@ def test_XfmConcat_inputs(): output_file=dict(argstr='%s', genfile=True, hash_files=False, - name_source=['input_files'], + name_source=[u'input_files'], name_template='%s_xfmconcat.xfm', position=-1, ), diff --git a/nipype/interfaces/mipav/__init__.py b/nipype/interfaces/mipav/__init__.py index f7af18ca3c..d3a6b785fa 100644 --- a/nipype/interfaces/mipav/__init__.py +++ b/nipype/interfaces/mipav/__init__.py @@ -1,2 +1,3 @@ -from __future__ import absolute_import +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import from .developer import JistLaminarVolumetricLayering, JistBrainMgdmSegmentation, JistLaminarProfileGeometry, JistLaminarProfileCalculator, MedicAlgorithmN3, JistLaminarROIAveraging, MedicAlgorithmLesionToads, JistBrainMp2rageSkullStripping, JistCortexSurfaceMeshInflation, RandomVol, MedicAlgorithmImageCalculator, JistBrainMp2rageDuraEstimation, JistLaminarProfileSampling, MedicAlgorithmMipavReorient, MedicAlgorithmSPECTRE2010, JistBrainPartialVolumeFilter, JistIntensityMp2rageMasking, MedicAlgorithmThresholdToBinaryMask diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py index c0762332cb..2e14f3a6b5 100644 --- a/nipype/interfaces/mipav/developer.py +++ b/nipype/interfaces/mipav/developer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/mipav/generate_classes.py b/nipype/interfaces/mipav/generate_classes.py index d4c8f8d9f9..d348ef2546 100644 --- a/nipype/interfaces/mipav/generate_classes.py +++ b/nipype/interfaces/mipav/generate_classes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- if __name__ == "__main__": from nipype.interfaces.slicer.generate_classes import generate_all_classes diff --git a/nipype/interfaces/mipav/tests/__init__.py b/nipype/interfaces/mipav/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/mipav/tests/__init__.py +++ b/nipype/interfaces/mipav/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mne/__init__.py b/nipype/interfaces/mne/__init__.py index 16d95f8454..8bf3db28ed 100644 --- a/nipype/interfaces/mne/__init__.py +++ b/nipype/interfaces/mne/__init__.py @@ -1 +1,2 @@ +# -*- coding: utf-8 -*- from .base import WatershedBEM diff --git a/nipype/interfaces/mne/base.py b/nipype/interfaces/mne/base.py index 96d238f8ef..7415f22735 100644 --- a/nipype/interfaces/mne/base.py +++ b/nipype/interfaces/mne/base.py @@ -1,14 +1,16 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, bytes import os.path as op import glob -import logging -logging.basicConfig() -iflogger = logging.getLogger('interface') +from ... import logging +from ...utils.filemanip import list_to_filename from ..base import (traits, File, Directory, TraitedSpec, OutputMultiPath) from ..freesurfer.base import FSCommand, FSTraitedSpec -from ...external.six import string_types -from ...utils.filemanip import list_to_filename + +iflogger = logging.getLogger('interface') class WatershedBEMInputSpec(FSTraitedSpec): @@ -53,7 +55,7 @@ class WatershedBEM(FSCommand): >>> bem = WatershedBEM() >>> bem.inputs.subject_id = 'subj1' >>> bem.inputs.subjects_dir = '.' - >>> bem.cmdline + >>> bem.cmdline # doctest: +IGNORE_UNICODE 'mne_watershed_bem --overwrite --subject subj1 --volume T1' >>> bem.run() # doctest: +SKIP @@ -90,7 +92,7 @@ def _list_outputs(self): out_files = [] for value in value_list: out_files.append(op.abspath(value)) - elif isinstance(value_list, string_types): + elif isinstance(value_list, (str, bytes)): out_files = op.abspath(value_list) else: raise TypeError diff --git a/nipype/interfaces/mne/tests/__init__.py b/nipype/interfaces/mne/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/mne/tests/__init__.py +++ b/nipype/interfaces/mne/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mrtrix/__init__.py b/nipype/interfaces/mrtrix/__init__.py index 4b5a4270d5..ea066d4cd8 100644 --- a/nipype/interfaces/mrtrix/__init__.py +++ b/nipype/interfaces/mrtrix/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from .tracking import (Tracks2Prob, FilterTracks, StreamlineTrack, diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index 89cf1c2299..e76e847535 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,35 +9,33 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open -from __future__ import division - -# -*- coding: utf-8 -*- import os.path as op import nibabel as nb import nibabel.trackvis as trk import numpy as np from nibabel.trackvis import HeaderError from nibabel.volumeutils import native_code +from nibabel.orientations import aff2axcodes -from ..base import (TraitedSpec, BaseInterface, BaseInterfaceInputSpec, - File, isdefined, traits) +from ... import logging from ...utils.filemanip import split_filename from ...utils.misc import package_check from ...workflows.misc.utils import get_data_dims, get_vox_dims +from ..base import TraitedSpec, BaseInterface, File, isdefined import warnings have_dipy = True try: package_check('dipy') except Exception as e: - False + have_dipy = False else: from dipy.tracking.utils import move_streamlines, affine_from_fsl_mat_file -from nibabel.orientations import aff2axcodes -from ... import logging iflogger = logging.getLogger('interface') @@ -245,7 +244,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None diff --git a/nipype/interfaces/mrtrix/preprocess.py b/nipype/interfaces/mrtrix/preprocess.py index 67242e9705..1beb6bec2a 100644 --- a/nipype/interfaces/mrtrix/preprocess.py +++ b/nipype/interfaces/mrtrix/preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,11 +9,13 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os.path as op +from ...utils.filemanip import split_filename from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, InputMultiPath, isdefined) -from ...utils.filemanip import split_filename class MRConvertInputSpec(CommandLineInputSpec): @@ -77,7 +80,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -141,7 +144,7 @@ class DWI2Tensor(CommandLine): >>> dwi2tensor = mrt.DWI2Tensor() >>> dwi2tensor.inputs.in_file = 'dwi.mif' >>> dwi2tensor.inputs.encoding_file = 'encoding.txt' - >>> dwi2tensor.cmdline + >>> dwi2tensor.cmdline # doctest: +IGNORE_UNICODE 'dwi2tensor -grad encoding.txt dwi.mif dwi_tensor.mif' >>> dwi2tensor.run() # doctest: +SKIP """ @@ -190,7 +193,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -239,7 +242,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -288,7 +291,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -338,7 +341,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -448,7 +451,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_WMProb_filename': + if name == 'out_WMProb_filename': return self._gen_outfilename() else: return None @@ -498,7 +501,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -556,7 +559,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -605,7 +608,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -663,7 +666,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None diff --git a/nipype/interfaces/mrtrix/tensors.py b/nipype/interfaces/mrtrix/tensors.py index 3ef2ecc901..025d45b2e5 100644 --- a/nipype/interfaces/mrtrix/tensors.py +++ b/nipype/interfaces/mrtrix/tensors.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,15 +9,15 @@ >>> os.chdir(datadir) """ -import os.path as op +from __future__ import print_function, division, unicode_literals, absolute_import +import os.path as op import numpy as np -from ..base import (CommandLineInputSpec, CommandLine, BaseInterface, - BaseInterfaceInputSpec, traits, File, TraitedSpec, - Directory, InputMultiPath, OutputMultiPath, isdefined) -from ...utils.filemanip import split_filename from ... import logging +from ...utils.filemanip import split_filename +from ..base import (CommandLineInputSpec, CommandLine, BaseInterface, + traits, File, TraitedSpec, isdefined) iflogger = logging.getLogger('interface') @@ -85,7 +86,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -168,7 +169,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -222,7 +223,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -302,7 +303,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_encoding_file': + if name == 'out_encoding_file': return self._gen_outfilename() else: return None diff --git a/nipype/interfaces/mrtrix/tests/__init__.py b/nipype/interfaces/mrtrix/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/mrtrix/tests/__init__.py +++ b/nipype/interfaces/mrtrix/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py index 42cb14de9f..ced38246ac 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py @@ -17,13 +17,13 @@ def test_DiffusionTensorStreamlineTrack_inputs(): usedefault=True, ), exclude_file=dict(argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), exclude_spec=dict(argstr='-exclude %s', position=2, sep=',', units='mm', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), gradient_encoding_file=dict(argstr='-grad %s', mandatory=True, @@ -37,13 +37,13 @@ def test_DiffusionTensorStreamlineTrack_inputs(): position=-2, ), include_file=dict(argstr='-include %s', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), include_spec=dict(argstr='-include %s', position=2, sep=',', units='mm', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), initial_cutoff_value=dict(argstr='-initcutoff %s', units='NA', @@ -56,13 +56,13 @@ def test_DiffusionTensorStreamlineTrack_inputs(): usedefault=True, ), mask_file=dict(argstr='-mask %s', - xor=['mask_file', 'mask_spec'], + xor=[u'mask_file', u'mask_spec'], ), mask_spec=dict(argstr='-mask %s', position=2, sep=',', units='mm', - xor=['mask_file', 'mask_spec'], + xor=[u'mask_file', u'mask_spec'], ), maximum_number_of_tracks=dict(argstr='-maxnum %d', ), @@ -78,19 +78,19 @@ def test_DiffusionTensorStreamlineTrack_inputs(): no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict(argstr='%s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_tracked.tck', output_name='tracked', position=-1, ), seed_file=dict(argstr='-seed %s', - xor=['seed_file', 'seed_spec'], + xor=[u'seed_file', u'seed_spec'], ), seed_spec=dict(argstr='-seed %s', position=2, sep=',', units='mm', - xor=['seed_file', 'seed_spec'], + xor=[u'seed_file', u'seed_spec'], ), step_size=dict(argstr='-step %s', units='mm', diff --git a/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py b/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py index 1fb1d8d764..d80ad33e18 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py @@ -25,7 +25,7 @@ def test_Directions2Amplitude_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_amplitudes.mif', position=-1, ), diff --git a/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py b/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py index 099d08c2de..434ff3c90d 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py @@ -13,13 +13,13 @@ def test_FilterTracks_inputs(): usedefault=True, ), exclude_file=dict(argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), exclude_spec=dict(argstr='-exclude %s', position=2, sep=',', units='mm', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -29,13 +29,13 @@ def test_FilterTracks_inputs(): position=-2, ), include_file=dict(argstr='-include %s', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), include_spec=dict(argstr='-include %s', position=2, sep=',', units='mm', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), invert=dict(argstr='-invert', ), @@ -46,7 +46,7 @@ def test_FilterTracks_inputs(): ), out_file=dict(argstr='%s', hash_files=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_filt', position=-1, ), diff --git a/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py b/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py index d4776cb4b3..75eb43d256 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py @@ -29,7 +29,7 @@ def test_FindShPeaks_inputs(): out_file=dict(argstr='%s', hash_files=False, keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_peak_dirs.mif', position=-1, ), diff --git a/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py b/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py index bd54f78fb3..578a59e7c9 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py @@ -24,7 +24,7 @@ def test_GenerateDirections_inputs(): ), out_file=dict(argstr='%s', hash_files=False, - name_source=['num_dirs'], + name_source=[u'num_dirs'], name_template='directions_%d.txt', position=-1, ), diff --git a/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py index dfb1b57ddc..da772b4e67 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py @@ -17,13 +17,13 @@ def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_inputs(): usedefault=True, ), exclude_file=dict(argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), exclude_spec=dict(argstr='-exclude %s', position=2, sep=',', units='mm', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -33,13 +33,13 @@ def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_inputs(): position=-2, ), include_file=dict(argstr='-include %s', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), include_spec=dict(argstr='-include %s', position=2, sep=',', units='mm', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), initial_cutoff_value=dict(argstr='-initcutoff %s', units='NA', @@ -52,13 +52,13 @@ def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_inputs(): usedefault=True, ), mask_file=dict(argstr='-mask %s', - xor=['mask_file', 'mask_spec'], + xor=[u'mask_file', u'mask_spec'], ), mask_spec=dict(argstr='-mask %s', position=2, sep=',', units='mm', - xor=['mask_file', 'mask_spec'], + xor=[u'mask_file', u'mask_spec'], ), maximum_number_of_tracks=dict(argstr='-maxnum %d', ), @@ -76,19 +76,19 @@ def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_inputs(): no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict(argstr='%s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_tracked.tck', output_name='tracked', position=-1, ), seed_file=dict(argstr='-seed %s', - xor=['seed_file', 'seed_spec'], + xor=[u'seed_file', u'seed_spec'], ), seed_spec=dict(argstr='-seed %s', position=2, sep=',', units='mm', - xor=['seed_file', 'seed_spec'], + xor=[u'seed_file', u'seed_spec'], ), step_size=dict(argstr='-step %s', units='mm', diff --git a/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py index 2180af33eb..4a04d1409d 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py @@ -17,13 +17,13 @@ def test_SphericallyDeconvolutedStreamlineTrack_inputs(): usedefault=True, ), exclude_file=dict(argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), exclude_spec=dict(argstr='-exclude %s', position=2, sep=',', units='mm', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -33,13 +33,13 @@ def test_SphericallyDeconvolutedStreamlineTrack_inputs(): position=-2, ), include_file=dict(argstr='-include %s', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), include_spec=dict(argstr='-include %s', position=2, sep=',', units='mm', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), initial_cutoff_value=dict(argstr='-initcutoff %s', units='NA', @@ -52,13 +52,13 @@ def test_SphericallyDeconvolutedStreamlineTrack_inputs(): usedefault=True, ), mask_file=dict(argstr='-mask %s', - xor=['mask_file', 'mask_spec'], + xor=[u'mask_file', u'mask_spec'], ), mask_spec=dict(argstr='-mask %s', position=2, sep=',', units='mm', - xor=['mask_file', 'mask_spec'], + xor=[u'mask_file', u'mask_spec'], ), maximum_number_of_tracks=dict(argstr='-maxnum %d', ), @@ -74,19 +74,19 @@ def test_SphericallyDeconvolutedStreamlineTrack_inputs(): no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict(argstr='%s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_tracked.tck', output_name='tracked', position=-1, ), seed_file=dict(argstr='-seed %s', - xor=['seed_file', 'seed_spec'], + xor=[u'seed_file', u'seed_spec'], ), seed_spec=dict(argstr='-seed %s', position=2, sep=',', units='mm', - xor=['seed_file', 'seed_spec'], + xor=[u'seed_file', u'seed_spec'], ), step_size=dict(argstr='-step %s', units='mm', diff --git a/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py b/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py index 86f3607f34..f3007603fb 100644 --- a/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py +++ b/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py @@ -17,13 +17,13 @@ def test_StreamlineTrack_inputs(): usedefault=True, ), exclude_file=dict(argstr='-exclude %s', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), exclude_spec=dict(argstr='-exclude %s', position=2, sep=',', units='mm', - xor=['exclude_file', 'exclude_spec'], + xor=[u'exclude_file', u'exclude_spec'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -33,13 +33,13 @@ def test_StreamlineTrack_inputs(): position=-2, ), include_file=dict(argstr='-include %s', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), include_spec=dict(argstr='-include %s', position=2, sep=',', units='mm', - xor=['include_file', 'include_spec'], + xor=[u'include_file', u'include_spec'], ), initial_cutoff_value=dict(argstr='-initcutoff %s', units='NA', @@ -52,13 +52,13 @@ def test_StreamlineTrack_inputs(): usedefault=True, ), mask_file=dict(argstr='-mask %s', - xor=['mask_file', 'mask_spec'], + xor=[u'mask_file', u'mask_spec'], ), mask_spec=dict(argstr='-mask %s', position=2, sep=',', units='mm', - xor=['mask_file', 'mask_spec'], + xor=[u'mask_file', u'mask_spec'], ), maximum_number_of_tracks=dict(argstr='-maxnum %d', ), @@ -74,19 +74,19 @@ def test_StreamlineTrack_inputs(): no_mask_interpolation=dict(argstr='-nomaskinterp', ), out_file=dict(argstr='%s', - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s_tracked.tck', output_name='tracked', position=-1, ), seed_file=dict(argstr='-seed %s', - xor=['seed_file', 'seed_spec'], + xor=[u'seed_file', u'seed_spec'], ), seed_spec=dict(argstr='-seed %s', position=2, sep=',', units='mm', - xor=['seed_file', 'seed_spec'], + xor=[u'seed_file', u'seed_spec'], ), step_size=dict(argstr='-step %s', units='mm', diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index c4d49118a8..b03c7814b4 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,12 +9,14 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os import os.path as op +from ...utils.filemanip import split_filename from ..base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File from ..traits_extension import isdefined -from ...utils.filemanip import split_filename class FilterTracksInputSpec(CommandLineInputSpec): @@ -117,7 +120,7 @@ def _list_outputs(self): return outputs def _gen_filename(self, name): - if name is 'out_filename': + if name == 'out_filename': return self._gen_outfilename() else: return None @@ -207,7 +210,7 @@ class StreamlineTrack(CommandLine): >>> strack.inputs.in_file = 'data.Bfloat' >>> strack.inputs.seed_file = 'seed_mask.nii' >>> strack.inputs.mask_file = 'mask.nii' - >>> strack.cmdline + >>> strack.cmdline # doctest: +IGNORE_UNICODE 'streamtrack -mask mask.nii -seed seed_mask.nii SD_PROB data.Bfloat data_tracked.tck' >>> strack.run() # doctest: +SKIP """ diff --git a/nipype/interfaces/mrtrix3/base.py b/nipype/interfaces/mrtrix3/base.py index 40a8e93a88..ab982b816a 100644 --- a/nipype/interfaces/mrtrix3/base.py +++ b/nipype/interfaces/mrtrix3/base.py @@ -11,14 +11,11 @@ >>> os.chdir(datadir) """ -import os -import os.path as op +from __future__ import print_function, division, unicode_literals, absolute_import -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, - InputMultiPath) -from ..traits_extension import isdefined -from ...utils.filemanip import split_filename from ... import logging +from ..traits_extension import isdefined +from ..base import (CommandLineInputSpec, CommandLine, traits, File) logger = logging.getLogger('interface') diff --git a/nipype/interfaces/mrtrix3/connectivity.py b/nipype/interfaces/mrtrix3/connectivity.py index 64e73f8069..a213062a6a 100644 --- a/nipype/interfaces/mrtrix3/connectivity.py +++ b/nipype/interfaces/mrtrix3/connectivity.py @@ -11,13 +11,14 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os import os.path as op -from .base import MRTrix3BaseInputSpec, MRTrix3Base -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, File) from ..traits_extension import isdefined -from ...utils.filemanip import split_filename +from ..base import (CommandLineInputSpec, traits, TraitedSpec, File) +from .base import MRTrix3Base class BuildConnectomeInputSpec(CommandLineInputSpec): @@ -95,7 +96,7 @@ class BuildConnectome(MRTrix3Base): >>> mat = mrt.BuildConnectome() >>> mat.inputs.in_file = 'tracks.tck' >>> mat.inputs.in_parc = 'aparc+aseg.nii' - >>> mat.cmdline # doctest: +ELLIPSIS + >>> mat.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'tck2connectome tracks.tck aparc+aseg.nii connectome.csv' >>> mat.run() # doctest: +SKIP """ @@ -154,7 +155,7 @@ class LabelConfig(MRTrix3Base): >>> labels = mrt.LabelConfig() >>> labels.inputs.in_file = 'aparc+aseg.nii' >>> labels.inputs.in_config = 'mrtrix3_labelconfig.txt' - >>> labels.cmdline # doctest: +ELLIPSIS + >>> labels.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'labelconfig aparc+aseg.nii mrtrix3_labelconfig.txt parcellation.mif' >>> labels.run() # doctest: +SKIP """ diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index e52c84071d..8f96154909 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -11,14 +11,14 @@ >>> os.chdir(datadir) """ -import os +from __future__ import print_function, division, unicode_literals, absolute_import + import os.path as op -from .base import MRTrix3BaseInputSpec, MRTrix3Base +from ..traits_extension import isdefined from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, File) -from ..traits_extension import isdefined -from ...utils.filemanip import split_filename +from .base import MRTrix3BaseInputSpec, MRTrix3Base class ResponseSDInputSpec(MRTrix3BaseInputSpec): @@ -96,7 +96,7 @@ class ResponseSD(MRTrix3Base): >>> resp.inputs.in_file = 'dwi.mif' >>> resp.inputs.in_mask = 'mask.nii.gz' >>> resp.inputs.grad_fsl = ('bvecs', 'bvals') - >>> resp.cmdline # doctest: +ELLIPSIS + >>> resp.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'dwi2response -fslgrad bvecs bvals -mask mask.nii.gz dwi.mif response.txt' >>> resp.run() # doctest: +SKIP """ @@ -139,7 +139,7 @@ class ACTPrepareFSL(CommandLine): >>> import nipype.interfaces.mrtrix3 as mrt >>> prep = mrt.ACTPrepareFSL() >>> prep.inputs.in_file = 'T1.nii.gz' - >>> prep.cmdline # doctest: +ELLIPSIS + >>> prep.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'act_anat_prepare_fsl T1.nii.gz act_5tt.mif' >>> prep.run() # doctest: +SKIP """ @@ -185,7 +185,7 @@ class ReplaceFSwithFIRST(CommandLine): >>> prep.inputs.in_file = 'aparc+aseg.nii' >>> prep.inputs.in_t1w = 'T1.nii.gz' >>> prep.inputs.in_config = 'mrtrix3_labelconfig.txt' - >>> prep.cmdline # doctest: +ELLIPSIS + >>> prep.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'fs_parc_replace_sgm_first aparc+aseg.nii T1.nii.gz \ mrtrix3_labelconfig.txt aparc+first.mif' >>> prep.run() # doctest: +SKIP diff --git a/nipype/interfaces/mrtrix3/reconst.py b/nipype/interfaces/mrtrix3/reconst.py index ce023fbdef..9341347dfe 100644 --- a/nipype/interfaces/mrtrix3/reconst.py +++ b/nipype/interfaces/mrtrix3/reconst.py @@ -11,16 +11,12 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import -from __future__ import absolute_import -import os import os.path as op +from ..base import traits, TraitedSpec, File from .base import MRTrix3BaseInputSpec, MRTrix3Base -from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, - File, InputMultiPath) -from ..traits_extension import isdefined -from ...utils.filemanip import split_filename class FitTensorInputSpec(MRTrix3BaseInputSpec): @@ -62,7 +58,7 @@ class FitTensor(MRTrix3Base): >>> tsr.inputs.in_file = 'dwi.mif' >>> tsr.inputs.in_mask = 'mask.nii.gz' >>> tsr.inputs.grad_fsl = ('bvecs', 'bvals') - >>> tsr.cmdline # doctest: +ELLIPSIS + >>> tsr.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'dwi2tensor -fslgrad bvecs bvals -mask mask.nii.gz dwi.mif dti.mif' >>> tsr.run() # doctest: +SKIP """ @@ -177,7 +173,7 @@ class EstimateFOD(MRTrix3Base): >>> fod.inputs.response = 'response.txt' >>> fod.inputs.in_mask = 'mask.nii.gz' >>> fod.inputs.grad_fsl = ('bvecs', 'bvals') - >>> fod.cmdline # doctest: +ELLIPSIS + >>> fod.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'dwi2fod -fslgrad bvecs bvals -mask mask.nii.gz dwi.mif response.txt\ fods.mif' >>> fod.run() # doctest: +SKIP diff --git a/nipype/interfaces/mrtrix3/tests/__init__.py b/nipype/interfaces/mrtrix3/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/mrtrix3/tests/__init__.py +++ b/nipype/interfaces/mrtrix3/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py b/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py index aeaff9b599..0ff10769be 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py @@ -81,17 +81,17 @@ def test_Tractography_inputs(): seed_dynamic=dict(argstr='-seed_dynamic %s', ), seed_gmwmi=dict(argstr='-seed_gmwmi %s', - requires=['act_file'], + requires=[u'act_file'], ), seed_grid_voxel=dict(argstr='-seed_grid_per_voxel %s %d', - xor=['seed_image', 'seed_rnd_voxel'], + xor=[u'seed_image', u'seed_rnd_voxel'], ), seed_image=dict(argstr='-seed_image %s', ), seed_rejection=dict(argstr='-seed_rejection %s', ), seed_rnd_voxel=dict(argstr='-seed_random_per_voxel %s %d', - xor=['seed_image', 'seed_grid_voxel'], + xor=[u'seed_image', u'seed_grid_voxel'], ), seed_sphere=dict(argstr='-seed_sphere %f,%f,%f,%f', ), diff --git a/nipype/interfaces/mrtrix3/tracking.py b/nipype/interfaces/mrtrix3/tracking.py index 7495211543..7a5fe84b66 100644 --- a/nipype/interfaces/mrtrix3/tracking.py +++ b/nipype/interfaces/mrtrix3/tracking.py @@ -11,16 +11,12 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import -from __future__ import absolute_import -import os import os.path as op +from ..base import traits, TraitedSpec, File from .base import MRTrix3BaseInputSpec, MRTrix3Base -from ..base import (CommandLineInputSpec, CommandLine, traits, - TraitedSpec, File) -from ..traits_extension import isdefined -from ...utils.filemanip import split_filename class TractographyInputSpec(MRTrix3BaseInputSpec): @@ -231,7 +227,7 @@ class Tractography(MRTrix3Base): >>> tk.inputs.in_file = 'fods.mif' >>> tk.inputs.roi_mask = 'mask.nii.gz' >>> tk.inputs.seed_sphere = (80, 100, 70, 10) - >>> tk.cmdline # doctest: +ELLIPSIS + >>> tk.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'tckgen -algorithm iFOD2 -mask mask.nii.gz -seed_sphere \ 80.000000,100.000000,70.000000,10.000000 fods.mif tracked.tck' >>> tk.run() # doctest: +SKIP diff --git a/nipype/interfaces/mrtrix3/utils.py b/nipype/interfaces/mrtrix3/utils.py index 6a5b68f521..5bc94bf3ca 100644 --- a/nipype/interfaces/mrtrix3/utils.py +++ b/nipype/interfaces/mrtrix3/utils.py @@ -11,16 +11,15 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import -from __future__ import absolute_import -import os import os.path as op -from .base import MRTrix3BaseInputSpec, MRTrix3Base +from ..traits_extension import isdefined from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, InputMultiPath) -from ..traits_extension import isdefined -from ...utils.filemanip import split_filename +from .base import MRTrix3BaseInputSpec, MRTrix3Base + class BrainMaskInputSpec(MRTrix3BaseInputSpec): @@ -47,7 +46,7 @@ class BrainMask(CommandLine): >>> import nipype.interfaces.mrtrix3 as mrt >>> bmsk = mrt.BrainMask() >>> bmsk.inputs.in_file = 'dwi.mif' - >>> bmsk.cmdline # doctest: +ELLIPSIS + >>> bmsk.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'dwi2mask dwi.mif brainmask.mif' >>> bmsk.run() # doctest: +SKIP """ @@ -94,7 +93,7 @@ class Mesh2PVE(CommandLine): >>> m2p.inputs.in_file = 'surf1.vtk' >>> m2p.inputs.reference = 'dwi.mif' >>> m2p.inputs.in_first = 'T1.nii.gz' - >>> m2p.cmdline # doctest: +ELLIPSIS + >>> m2p.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'mesh2pve -first T1.nii.gz surf1.vtk dwi.mif mesh2volume.nii.gz' >>> m2p.run() # doctest: +SKIP """ @@ -140,7 +139,7 @@ class Generate5tt(CommandLine): >>> seg.inputs.in_fast = ['tpm_00.nii.gz', ... 'tpm_01.nii.gz', 'tpm_02.nii.gz'] >>> seg.inputs.in_first = 'first_merged.nii.gz' - >>> seg.cmdline # doctest: +ELLIPSIS + >>> seg.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE '5ttgen tpm_00.nii.gz tpm_01.nii.gz tpm_02.nii.gz first_merged.nii.gz\ act-5tt.mif' >>> seg.run() # doctest: +SKIP @@ -198,7 +197,7 @@ class TensorMetrics(CommandLine): >>> comp = mrt.TensorMetrics() >>> comp.inputs.in_file = 'dti.mif' >>> comp.inputs.out_fa = 'fa.mif' - >>> comp.cmdline # doctest: +ELLIPSIS + >>> comp.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'tensor2metric -fa fa.mif dti.mif' >>> comp.run() # doctest: +SKIP """ @@ -338,7 +337,7 @@ class ComputeTDI(MRTrix3Base): >>> import nipype.interfaces.mrtrix3 as mrt >>> tdi = mrt.ComputeTDI() >>> tdi.inputs.in_file = 'dti.mif' - >>> tdi.cmdline # doctest: +ELLIPSIS + >>> tdi.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'tckmap dti.mif tdi.mif' >>> tdi.run() # doctest: +SKIP """ @@ -389,7 +388,7 @@ class TCK2VTK(MRTrix3Base): >>> vtk = mrt.TCK2VTK() >>> vtk.inputs.in_file = 'tracks.tck' >>> vtk.inputs.reference = 'b0.nii' - >>> vtk.cmdline # doctest: +ELLIPSIS + >>> vtk.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE 'tck2vtk -image b0.nii tracks.tck tracks.vtk' >>> vtk.run() # doctest: +SKIP """ diff --git a/nipype/interfaces/nipy/__init__.py b/nipype/interfaces/nipy/__init__.py index d78ae64447..647850fe1b 100644 --- a/nipype/interfaces/nipy/__init__.py +++ b/nipype/interfaces/nipy/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from .model import FitGLM, EstimateContrast from .preprocess import ComputeMask, FmriRealign4d, SpaceTimeRealigner from .utils import Similarity diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index ff22465f6f..584152d01e 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -1,12 +1,15 @@ -from __future__ import division -from builtins import range +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, str, bytes + import os import nibabel as nb import numpy as np from ...utils.misc import package_check -from ...external.six import string_types +from ..base import (BaseInterface, TraitedSpec, traits, File, OutputMultiPath, + BaseInterfaceInputSpec, isdefined) have_nipy = True try: @@ -23,8 +26,6 @@ except AttributeError: from nipy.modalities.fmri.experimental_paradigm import BlockParadigm -from ..base import (BaseInterface, TraitedSpec, traits, File, OutputMultiPath, - BaseInterfaceInputSpec, isdefined) class FitGLMInputSpec(BaseInterfaceInputSpec): @@ -84,7 +85,7 @@ def _run_interface(self, runtime): session_info = self.inputs.session_info functional_runs = self.inputs.session_info[0]['scans'] - if isinstance(functional_runs, string_types): + if isinstance(functional_runs, (str, bytes)): functional_runs = [functional_runs] nii = nb.load(functional_runs[0]) data = nii.get_data() diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index 1328e28a18..579f9f7988 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,14 +7,19 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open + import os -import warnings import nibabel as nb import numpy as np from ...utils.misc import package_check from ...utils.filemanip import split_filename, fname_presuffix +from ..base import (TraitedSpec, BaseInterface, traits, + BaseInterfaceInputSpec, isdefined, File, + InputMultiPath, OutputMultiPath) have_nipy = True @@ -26,9 +32,6 @@ from nipy import save_image, load_image nipy_version = nipy.__version__ -from ..base import (TraitedSpec, BaseInterface, traits, - BaseInterfaceInputSpec, isdefined, File, - InputMultiPath, OutputMultiPath) class ComputeMaskInputSpec(BaseInterfaceInputSpec): diff --git a/nipype/interfaces/nipy/tests/__init__.py b/nipype/interfaces/nipy/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/nipy/tests/__init__.py +++ b/nipype/interfaces/nipy/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py b/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py index 917c1bbe9c..824dbf31de 100644 --- a/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py +++ b/nipype/interfaces/nipy/tests/test_auto_FmriRealign4d.py @@ -13,17 +13,17 @@ def test_FmriRealign4d_inputs(): ), loops=dict(usedefault=True, ), - slice_order=dict(requires=['time_interp'], + slice_order=dict(requires=[u'time_interp'], ), speedup=dict(usedefault=True, ), start=dict(usedefault=True, ), - time_interp=dict(requires=['slice_order'], + time_interp=dict(requires=[u'slice_order'], ), tr=dict(mandatory=True, ), - tr_slices=dict(requires=['time_interp'], + tr_slices=dict(requires=[u'time_interp'], ), ) inputs = FmriRealign4d.input_spec() diff --git a/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py b/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py index 1dc0a5e6df..961756a800 100644 --- a/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py +++ b/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py @@ -10,10 +10,10 @@ def test_SpaceTimeRealigner_inputs(): in_file=dict(mandatory=True, min_ver='0.4.0.dev', ), - slice_info=dict(requires=['slice_times'], + slice_info=dict(requires=[u'slice_times'], ), slice_times=dict(), - tr=dict(requires=['slice_times'], + tr=dict(requires=[u'slice_times'], ), ) inputs = SpaceTimeRealigner.input_spec() diff --git a/nipype/interfaces/nipy/utils.py b/nipype/interfaces/nipy/utils.py index 0e78111c0e..0b1c6a2091 100644 --- a/nipype/interfaces/nipy/utils.py +++ b/nipype/interfaces/nipy/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """ Change directory to provide relative paths for doctests >>> import os @@ -6,11 +7,14 @@ >>> os.chdir(datadir) """ -import warnings +from __future__ import print_function, division, unicode_literals, absolute_import +import warnings import nibabel as nb from ...utils.misc import package_check +from ..base import (TraitedSpec, BaseInterface, traits, + BaseInterfaceInputSpec, File, isdefined) have_nipy = True try: @@ -21,8 +25,6 @@ from nipy.algorithms.registration.histogram_registration import HistogramRegistration from nipy.algorithms.registration.affine import Affine -from ..base import (TraitedSpec, BaseInterface, traits, - BaseInterfaceInputSpec, File, isdefined) class SimilarityInputSpec(BaseInterfaceInputSpec): diff --git a/nipype/interfaces/nitime/__init__.py b/nipype/interfaces/nitime/__init__.py index e1d0d17326..656d601b3e 100644 --- a/nipype/interfaces/nitime/__init__.py +++ b/nipype/interfaces/nitime/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index da76a5882f..ee34271044 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -9,19 +10,17 @@ - nitime.viz.drawmatrix_channels """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import zip, object, open -from builtins import zip -from builtins import object - -import warnings import numpy as np import tempfile -from ...utils.misc import package_check +from ...utils.misc import package_check +from ...utils.filemanip import fname_presuffix from ..base import (TraitedSpec, File, Undefined, traits, BaseInterface, isdefined, BaseInterfaceInputSpec) -from ...utils.filemanip import fname_presuffix have_nitime = True try: diff --git a/nipype/interfaces/nitime/tests/__init__.py b/nipype/interfaces/nitime/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/nitime/tests/__init__.py +++ b/nipype/interfaces/nitime/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py b/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py index 2303b647c0..9766257e19 100644 --- a/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py +++ b/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py @@ -15,7 +15,7 @@ def test_CoherenceAnalyzer_inputs(): usedefault=True, ), in_TS=dict(), - in_file=dict(requires=('TR',), + in_file=dict(requires=(u'TR',), ), n_overlap=dict(usedefault=True, ), diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index a5bfe177bf..42ba9f8edf 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py index b1f6405cd8..72389d9f50 100644 --- a/nipype/interfaces/petpvc.py +++ b/nipype/interfaces/petpvc.py @@ -8,23 +8,12 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ - -from __future__ import print_function -from __future__ import division +from __future__ import print_function, division, unicode_literals, absolute_import import os -import warnings - -from nipype.interfaces.base import ( - TraitedSpec, - CommandLineInputSpec, - CommandLine, - File, - isdefined, - traits, -) - -warn = warnings.warn + +from .base import TraitedSpec, CommandLineInputSpec, CommandLine, File, isdefined, traits + pvc_methods = ['GTM', 'IY', diff --git a/nipype/interfaces/semtools/__init__.py b/nipype/interfaces/semtools/__init__.py index 33a57faab6..14473b8381 100644 --- a/nipype/interfaces/semtools/__init__.py +++ b/nipype/interfaces/semtools/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .diffusion import * from .featurecreator import GenerateCsfClippedFromClassifiedImage diff --git a/nipype/interfaces/semtools/brains/__init__.py b/nipype/interfaces/semtools/brains/__init__.py index a14dc3ba13..48aac3cda5 100644 --- a/nipype/interfaces/semtools/brains/__init__.py +++ b/nipype/interfaces/semtools/brains/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .segmentation import SimilarityIndex, BRAINSTalairach, BRAINSTalairachMask from .utilities import HistogramMatchingFilter diff --git a/nipype/interfaces/semtools/brains/classify.py b/nipype/interfaces/semtools/brains/classify.py index 191284b104..26b7c97dda 100644 --- a/nipype/interfaces/semtools/brains/classify.py +++ b/nipype/interfaces/semtools/brains/classify.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/brains/segmentation.py b/nipype/interfaces/semtools/brains/segmentation.py index a5f5c0031c..8de2d5e7d6 100644 --- a/nipype/interfaces/semtools/brains/segmentation.py +++ b/nipype/interfaces/semtools/brains/segmentation.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/brains/tests/__init__.py b/nipype/interfaces/semtools/brains/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/brains/tests/__init__.py +++ b/nipype/interfaces/semtools/brains/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/brains/utilities.py b/nipype/interfaces/semtools/brains/utilities.py index a296be671c..b24615bc2b 100644 --- a/nipype/interfaces/semtools/brains/utilities.py +++ b/nipype/interfaces/semtools/brains/utilities.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/converters.py b/nipype/interfaces/semtools/converters.py index 01ef06967b..64954fba1c 100644 --- a/nipype/interfaces/semtools/converters.py +++ b/nipype/interfaces/semtools/converters.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/__init__.py b/nipype/interfaces/semtools/diffusion/__init__.py index f7b207f039..100af56cbd 100644 --- a/nipype/interfaces/semtools/diffusion/__init__.py +++ b/nipype/interfaces/semtools/diffusion/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .diffusion import dtiaverage, dtiestim, dtiprocess, DWIConvert from .tractography import * diff --git a/nipype/interfaces/semtools/diffusion/diffusion.py b/nipype/interfaces/semtools/diffusion/diffusion.py index ffb1e4d280..f449964da0 100644 --- a/nipype/interfaces/semtools/diffusion/diffusion.py +++ b/nipype/interfaces/semtools/diffusion/diffusion.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/gtract.py b/nipype/interfaces/semtools/diffusion/gtract.py index c7f2db432c..d4f05b6630 100644 --- a/nipype/interfaces/semtools/diffusion/gtract.py +++ b/nipype/interfaces/semtools/diffusion/gtract.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/maxcurvature.py b/nipype/interfaces/semtools/diffusion/maxcurvature.py index 4206844ea2..6629a006c9 100644 --- a/nipype/interfaces/semtools/diffusion/maxcurvature.py +++ b/nipype/interfaces/semtools/diffusion/maxcurvature.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/tests/__init__.py b/nipype/interfaces/semtools/diffusion/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/diffusion/tests/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/diffusion/tractography/__init__.py b/nipype/interfaces/semtools/diffusion/tractography/__init__.py index 2d170ebcfe..f846b7fde5 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tractography/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .commandlineonly import fiberstats from .fiberprocess import fiberprocess diff --git a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py index edf2f089a9..918fa113d8 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py +++ b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py b/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py index 28fbac5b24..bc003cebc4 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py +++ b/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py index 893f3a9b7b..fb8532f87e 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py +++ b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py b/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py index 63b1ecb33c..dd29991b97 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py +++ b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/featurecreator.py b/nipype/interfaces/semtools/featurecreator.py index 4147832b77..d194e2b9c1 100644 --- a/nipype/interfaces/semtools/featurecreator.py +++ b/nipype/interfaces/semtools/featurecreator.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/filtering/__init__.py b/nipype/interfaces/semtools/filtering/__init__.py index 7631224429..ac69328f10 100644 --- a/nipype/interfaces/semtools/filtering/__init__.py +++ b/nipype/interfaces/semtools/filtering/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .denoising import UnbiasedNonLocalMeans from .featuredetection import GenerateSummedGradientImage, CannySegmentationLevelSetImageFilter, DilateImage, TextureFromNoiseImageFilter, FlippedDifference, ErodeImage, GenerateBrainClippedImage, NeighborhoodMedian, GenerateTestImage, NeighborhoodMean, HammerAttributeCreator, TextureMeasureFilter, DilateMask, DumpBinaryTrainingVectors, DistanceMaps, STAPLEAnalysis, GradientAnisotropicDiffusionImageFilter, CannyEdge diff --git a/nipype/interfaces/semtools/filtering/denoising.py b/nipype/interfaces/semtools/filtering/denoising.py index 03536fd643..f98663df78 100644 --- a/nipype/interfaces/semtools/filtering/denoising.py +++ b/nipype/interfaces/semtools/filtering/denoising.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/filtering/featuredetection.py b/nipype/interfaces/semtools/filtering/featuredetection.py index 6f66a39ae4..99c78d3993 100644 --- a/nipype/interfaces/semtools/filtering/featuredetection.py +++ b/nipype/interfaces/semtools/filtering/featuredetection.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/filtering/tests/__init__.py b/nipype/interfaces/semtools/filtering/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/filtering/tests/__init__.py +++ b/nipype/interfaces/semtools/filtering/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/legacy/__init__.py b/nipype/interfaces/semtools/legacy/__init__.py index e026aab90e..3309d49d62 100644 --- a/nipype/interfaces/semtools/legacy/__init__.py +++ b/nipype/interfaces/semtools/legacy/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .registration import scalartransform diff --git a/nipype/interfaces/semtools/legacy/registration.py b/nipype/interfaces/semtools/legacy/registration.py index 8af1e20bfd..7461fe067a 100644 --- a/nipype/interfaces/semtools/legacy/registration.py +++ b/nipype/interfaces/semtools/legacy/registration.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/legacy/tests/__init__.py b/nipype/interfaces/semtools/legacy/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/legacy/tests/__init__.py +++ b/nipype/interfaces/semtools/legacy/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/registration/__init__.py b/nipype/interfaces/semtools/registration/__init__.py index 2471436c7a..3b9a8916d5 100644 --- a/nipype/interfaces/semtools/registration/__init__.py +++ b/nipype/interfaces/semtools/registration/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .specialized import VBRAINSDemonWarp, BRAINSDemonWarp, BRAINSTransformFromFiducials from .brainsresample import BRAINSResample diff --git a/nipype/interfaces/semtools/registration/brainsfit.py b/nipype/interfaces/semtools/registration/brainsfit.py index f80750d09a..9b6c0f5634 100644 --- a/nipype/interfaces/semtools/registration/brainsfit.py +++ b/nipype/interfaces/semtools/registration/brainsfit.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/registration/brainsresample.py b/nipype/interfaces/semtools/registration/brainsresample.py index 9a14b48589..988b83a56e 100644 --- a/nipype/interfaces/semtools/registration/brainsresample.py +++ b/nipype/interfaces/semtools/registration/brainsresample.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/registration/brainsresize.py b/nipype/interfaces/semtools/registration/brainsresize.py index dac094b6e4..3c05047109 100644 --- a/nipype/interfaces/semtools/registration/brainsresize.py +++ b/nipype/interfaces/semtools/registration/brainsresize.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/registration/specialized.py b/nipype/interfaces/semtools/registration/specialized.py index ed6e6c76d7..91cfa66a8e 100644 --- a/nipype/interfaces/semtools/registration/specialized.py +++ b/nipype/interfaces/semtools/registration/specialized.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/registration/tests/__init__.py b/nipype/interfaces/semtools/registration/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/registration/tests/__init__.py +++ b/nipype/interfaces/semtools/registration/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/segmentation/__init__.py b/nipype/interfaces/semtools/segmentation/__init__.py index c35f11739a..9fbf33be3c 100644 --- a/nipype/interfaces/semtools/segmentation/__init__.py +++ b/nipype/interfaces/semtools/segmentation/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .specialized import BRAINSCut, BRAINSROIAuto, BRAINSConstellationDetector, BRAINSCreateLabelMapFromProbabilityMaps, BinaryMaskEditorBasedOnLandmarks, BRAINSMultiSTAPLE, BRAINSABC, ESLR diff --git a/nipype/interfaces/semtools/segmentation/specialized.py b/nipype/interfaces/semtools/segmentation/specialized.py index ec2395a200..2e4e73fccc 100644 --- a/nipype/interfaces/semtools/segmentation/specialized.py +++ b/nipype/interfaces/semtools/segmentation/specialized.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/segmentation/tests/__init__.py b/nipype/interfaces/semtools/segmentation/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/segmentation/tests/__init__.py +++ b/nipype/interfaces/semtools/segmentation/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/testing/__init__.py b/nipype/interfaces/semtools/testing/__init__.py index 83ddac53d7..66a4a2262e 100644 --- a/nipype/interfaces/semtools/testing/__init__.py +++ b/nipype/interfaces/semtools/testing/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .featuredetection import SphericalCoordinateGeneration from .landmarkscompare import LandmarksCompare diff --git a/nipype/interfaces/semtools/testing/featuredetection.py b/nipype/interfaces/semtools/testing/featuredetection.py index 2985e5bf1b..ec684d5907 100644 --- a/nipype/interfaces/semtools/testing/featuredetection.py +++ b/nipype/interfaces/semtools/testing/featuredetection.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py index 0bd8188269..a308778415 100644 --- a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py +++ b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/testing/landmarkscompare.py b/nipype/interfaces/semtools/testing/landmarkscompare.py index 1370bcb419..6d15778e76 100644 --- a/nipype/interfaces/semtools/testing/landmarkscompare.py +++ b/nipype/interfaces/semtools/testing/landmarkscompare.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/tests/__init__.py b/nipype/interfaces/semtools/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/tests/__init__.py +++ b/nipype/interfaces/semtools/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/utilities/__init__.py b/nipype/interfaces/semtools/utilities/__init__.py index 5bf1a40e4a..f647492fcc 100644 --- a/nipype/interfaces/semtools/utilities/__init__.py +++ b/nipype/interfaces/semtools/utilities/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .brains import BRAINSConstellationModeler, landmarksConstellationWeights, BRAINSTrimForegroundInDirection, BRAINSLmkTransform, BRAINSMush, BRAINSTransformConvert, landmarksConstellationAligner, BRAINSEyeDetector, BRAINSLinearModelerEPCA, BRAINSInitializedControlPoints, CleanUpOverlapLabels, BRAINSClipInferior, GenerateLabelMapFromProbabilityMap, BRAINSAlignMSP, BRAINSLandmarkInitializer, insertMidACPCpoint, BRAINSSnapShotWriter, JointHistogram, ShuffleVectorsModule, ImageRegionPlotter diff --git a/nipype/interfaces/semtools/utilities/brains.py b/nipype/interfaces/semtools/utilities/brains.py index e7402da5a4..4e64db7bd4 100644 --- a/nipype/interfaces/semtools/utilities/brains.py +++ b/nipype/interfaces/semtools/utilities/brains.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/utilities/tests/__init__.py b/nipype/interfaces/semtools/utilities/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/semtools/utilities/tests/__init__.py +++ b/nipype/interfaces/semtools/utilities/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/setup.py b/nipype/interfaces/setup.py index 4f7df34b1e..4c79456824 100644 --- a/nipype/interfaces/setup.py +++ b/nipype/interfaces/setup.py @@ -1,5 +1,7 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, unicode_literals, absolute_import def configuration(parent_package='', top_path=None): diff --git a/nipype/interfaces/slicer/__init__.py b/nipype/interfaces/slicer/__init__.py index 84fd7de4b7..c6b929c41e 100644 --- a/nipype/interfaces/slicer/__init__.py +++ b/nipype/interfaces/slicer/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .diffusion import * from .segmentation import * diff --git a/nipype/interfaces/slicer/base.py b/nipype/interfaces/slicer/base.py index de00883265..aae54ec00b 100644 --- a/nipype/interfaces/slicer/base.py +++ b/nipype/interfaces/slicer/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from ..base import SEMLikeCommandLine diff --git a/nipype/interfaces/slicer/converters.py b/nipype/interfaces/slicer/converters.py index c55656fd4b..cc110d9ceb 100644 --- a/nipype/interfaces/slicer/converters.py +++ b/nipype/interfaces/slicer/converters.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/diffusion/__init__.py b/nipype/interfaces/slicer/diffusion/__init__.py index e8ba6f01c9..85544b6594 100644 --- a/nipype/interfaces/slicer/diffusion/__init__.py +++ b/nipype/interfaces/slicer/diffusion/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .diffusion import ResampleDTIVolume, DWIRicianLMMSEFilter, TractographyLabelMapSeeding, DWIJointRicianLMMSEFilter, DiffusionWeightedVolumeMasking, DTIimport, DWIToDTIEstimation, DiffusionTensorScalarMeasurements, DTIexport diff --git a/nipype/interfaces/slicer/diffusion/diffusion.py b/nipype/interfaces/slicer/diffusion/diffusion.py index cb87deb4f5..1f669ac8d6 100644 --- a/nipype/interfaces/slicer/diffusion/diffusion.py +++ b/nipype/interfaces/slicer/diffusion/diffusion.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/diffusion/tests/__init__.py b/nipype/interfaces/slicer/diffusion/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/slicer/diffusion/tests/__init__.py +++ b/nipype/interfaces/slicer/diffusion/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/filtering/__init__.py b/nipype/interfaces/slicer/filtering/__init__.py index 33c18db51a..d4dbd9e220 100644 --- a/nipype/interfaces/slicer/filtering/__init__.py +++ b/nipype/interfaces/slicer/filtering/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .morphology import GrayscaleGrindPeakImageFilter, GrayscaleFillHoleImageFilter from .denoising import GradientAnisotropicDiffusion, CurvatureAnisotropicDiffusion, GaussianBlurImageFilter, MedianImageFilter diff --git a/nipype/interfaces/slicer/filtering/arithmetic.py b/nipype/interfaces/slicer/filtering/arithmetic.py index cfde7f5d02..c6bb024507 100644 --- a/nipype/interfaces/slicer/filtering/arithmetic.py +++ b/nipype/interfaces/slicer/filtering/arithmetic.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/checkerboardfilter.py b/nipype/interfaces/slicer/filtering/checkerboardfilter.py index 894777bbdf..9e4da45e1a 100644 --- a/nipype/interfaces/slicer/filtering/checkerboardfilter.py +++ b/nipype/interfaces/slicer/filtering/checkerboardfilter.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/denoising.py b/nipype/interfaces/slicer/filtering/denoising.py index 7c506d4839..9861585de9 100644 --- a/nipype/interfaces/slicer/filtering/denoising.py +++ b/nipype/interfaces/slicer/filtering/denoising.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/extractskeleton.py b/nipype/interfaces/slicer/filtering/extractskeleton.py index 0c516850db..e536fe14b5 100644 --- a/nipype/interfaces/slicer/filtering/extractskeleton.py +++ b/nipype/interfaces/slicer/filtering/extractskeleton.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/histogrammatching.py b/nipype/interfaces/slicer/filtering/histogrammatching.py index beaeb044bc..d25b010842 100644 --- a/nipype/interfaces/slicer/filtering/histogrammatching.py +++ b/nipype/interfaces/slicer/filtering/histogrammatching.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/imagelabelcombine.py b/nipype/interfaces/slicer/filtering/imagelabelcombine.py index 36611de864..a9c8455740 100644 --- a/nipype/interfaces/slicer/filtering/imagelabelcombine.py +++ b/nipype/interfaces/slicer/filtering/imagelabelcombine.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/morphology.py b/nipype/interfaces/slicer/filtering/morphology.py index 7214828d95..31d31738a2 100644 --- a/nipype/interfaces/slicer/filtering/morphology.py +++ b/nipype/interfaces/slicer/filtering/morphology.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py index b56d4d3af2..b4102e8ede 100644 --- a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py +++ b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py index b90694118f..9f6ebe45bc 100644 --- a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py +++ b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/tests/__init__.py b/nipype/interfaces/slicer/filtering/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/slicer/filtering/tests/__init__.py +++ b/nipype/interfaces/slicer/filtering/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py index 63e7e5a7b7..7d4d8d9070 100644 --- a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py +++ b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py index 5dd42d9437..e885b76ec5 100644 --- a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py +++ b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/generate_classes.py b/nipype/interfaces/slicer/generate_classes.py index 46b85af86a..31b04e4dd7 100644 --- a/nipype/interfaces/slicer/generate_classes.py +++ b/nipype/interfaces/slicer/generate_classes.py @@ -1,10 +1,10 @@ +# -*- coding: utf-8 -*- """This script generates Slicer Interfaces based on the CLI modules XML. CLI modules are selected from the hardcoded list below and generated code is placed in the cli_modules.py file (and imported in __init__.py). For this to work correctly you must have your CLI executabes in $PATH""" - -from __future__ import print_function - +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, bytes, open import xml.dom.minidom import subprocess import os @@ -13,16 +13,14 @@ import keyword python_keywords = keyword.kwlist # If c++ SEM module uses one of these key words as a command line parameter, we need to modify variable -from ...external.six import string_types - def force_to_valid_python_variable_name(old_name): """ Valid c++ names are not always valid in python, so provide alternate naming - >>> force_to_valid_python_variable_name('lambda') + >>> force_to_valid_python_variable_name('lambda') # doctest: +IGNORE_UNICODE 'opt_lambda' - >>> force_to_valid_python_variable_name('inputVolume') + >>> force_to_valid_python_variable_name('inputVolume') # doctest: +IGNORE_UNICODE 'inputVolume' """ new_name = old_name @@ -36,13 +34,15 @@ def add_class_to_package(class_codes, class_names, module_name, package_dir): module_python_filename = os.path.join(package_dir, "%s.py" % module_name) f_m = open(module_python_filename, 'w') f_i = open(os.path.join(package_dir, "__init__.py"), 'a+') - f_m.write("""# -*- coding: utf8 -*- + f_m.write("""# -*- coding: utf-8 -*- \"\"\"Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.\"\"\"\n\n""") - imports = """from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath + imports = """from __future__ import print_function, division, unicode_literals, absolute_import +from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, + File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath) import os\n\n\n""" f_m.write(imports) - f_m.write("\n\n".join(class_codes).encode('utf8')) + f_m.write("\n\n".join(class_codes)) f_i.write("from %s import %s\n" % (module_name, ", ".join(class_names))) f_m.close() f_i.close() @@ -51,7 +51,7 @@ def add_class_to_package(class_codes, class_names, module_name, package_dir): def crawl_code_struct(code_struct, package_dir): subpackages = [] for k, v in code_struct.items(): - if isinstance(v, str) or isinstance(v, string_types): + if isinstance(v, str) or isinstance(v, (str, bytes)): module_name = k.lower() class_name = k class_code = v @@ -61,7 +61,7 @@ def crawl_code_struct(code_struct, package_dir): l1 = {} l2 = {} for key in list(v.keys()): - if (isinstance(v[key], str) or isinstance(v[key], string_types)): + if (isinstance(v[key], str) or isinstance(v[key], (str, bytes))): l1[key] = v[key] else: l2[key] = v[key] @@ -358,7 +358,7 @@ def grab_xml(module, launcher, mipav_hacks=False): def parse_params(params): list = [] for key, value in params.items(): - if isinstance(value, string_types): + if isinstance(value, (str, bytes)): list.append('%s="%s"' % (key, value.replace('"', "'"))) else: list.append('%s=%s' % (key, value)) diff --git a/nipype/interfaces/slicer/legacy/__init__.py b/nipype/interfaces/slicer/legacy/__init__.py index 8071dab173..ee9d79b38c 100644 --- a/nipype/interfaces/slicer/legacy/__init__.py +++ b/nipype/interfaces/slicer/legacy/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .diffusion import * from .segmentation import OtsuThresholdSegmentation diff --git a/nipype/interfaces/slicer/legacy/converters.py b/nipype/interfaces/slicer/legacy/converters.py index fd1817c06f..509dbd80cf 100644 --- a/nipype/interfaces/slicer/legacy/converters.py +++ b/nipype/interfaces/slicer/legacy/converters.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/diffusion/__init__.py b/nipype/interfaces/slicer/legacy/diffusion/__init__.py index f57a40fd2b..f66daabb5b 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/__init__.py +++ b/nipype/interfaces/slicer/legacy/diffusion/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .denoising import DWIUnbiasedNonLocalMeansFilter diff --git a/nipype/interfaces/slicer/legacy/diffusion/denoising.py b/nipype/interfaces/slicer/legacy/diffusion/denoising.py index bbec5f7d9b..f306797a1a 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/denoising.py +++ b/nipype/interfaces/slicer/legacy/diffusion/denoising.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py b/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py +++ b/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/legacy/filtering.py b/nipype/interfaces/slicer/legacy/filtering.py index b6684d2d64..1faa3191d8 100644 --- a/nipype/interfaces/slicer/legacy/filtering.py +++ b/nipype/interfaces/slicer/legacy/filtering.py @@ -1,4 +1,4 @@ -# -*- coding: utf8 -*- +# -*- coding: utf-8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" @@ -27,7 +27,7 @@ class OtsuThresholdImageFilter(SEMLikeCommandLine): The original reference is: -N.Otsu, ‘‘A threshold selection method from gray level histograms,’’ IEEE Trans.Syst.ManCybern.SMC-9,62–66 1979. +N.Otsu, A threshold selection method from gray level histograms, IEEE Trans.Syst.ManCybern.SMC-9,62–66 1979. version: 0.1.0.$Revision: 19608 $(alpha) diff --git a/nipype/interfaces/slicer/legacy/registration.py b/nipype/interfaces/slicer/legacy/registration.py index 5ba5baf3f5..7570de7431 100644 --- a/nipype/interfaces/slicer/legacy/registration.py +++ b/nipype/interfaces/slicer/legacy/registration.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/segmentation.py b/nipype/interfaces/slicer/legacy/segmentation.py index af724c9f96..b55e0b6245 100644 --- a/nipype/interfaces/slicer/legacy/segmentation.py +++ b/nipype/interfaces/slicer/legacy/segmentation.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/tests/__init__.py b/nipype/interfaces/slicer/legacy/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/slicer/legacy/tests/__init__.py +++ b/nipype/interfaces/slicer/legacy/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/quantification/__init__.py b/nipype/interfaces/slicer/quantification/__init__.py index 3a0d67b038..6054dddd59 100644 --- a/nipype/interfaces/slicer/quantification/__init__.py +++ b/nipype/interfaces/slicer/quantification/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .changequantification import IntensityDifferenceMetric from .petstandarduptakevaluecomputation import PETStandardUptakeValueComputation diff --git a/nipype/interfaces/slicer/quantification/changequantification.py b/nipype/interfaces/slicer/quantification/changequantification.py index f5225065e2..51020f36c1 100644 --- a/nipype/interfaces/slicer/quantification/changequantification.py +++ b/nipype/interfaces/slicer/quantification/changequantification.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py index 8dfe67b546..c566703c95 100644 --- a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py +++ b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/quantification/tests/__init__.py b/nipype/interfaces/slicer/quantification/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/slicer/quantification/tests/__init__.py +++ b/nipype/interfaces/slicer/quantification/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/registration/__init__.py b/nipype/interfaces/slicer/registration/__init__.py index 1d5349c291..f19c1faff7 100644 --- a/nipype/interfaces/slicer/registration/__init__.py +++ b/nipype/interfaces/slicer/registration/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .specialized import ACPCTransform, FiducialRegistration, VBRAINSDemonWarp, BRAINSDemonWarp from .brainsresample import BRAINSResample diff --git a/nipype/interfaces/slicer/registration/brainsfit.py b/nipype/interfaces/slicer/registration/brainsfit.py index b093235b84..2e02879280 100644 --- a/nipype/interfaces/slicer/registration/brainsfit.py +++ b/nipype/interfaces/slicer/registration/brainsfit.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/registration/brainsresample.py b/nipype/interfaces/slicer/registration/brainsresample.py index c7f2ba63a2..fa87020b28 100644 --- a/nipype/interfaces/slicer/registration/brainsresample.py +++ b/nipype/interfaces/slicer/registration/brainsresample.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/registration/specialized.py b/nipype/interfaces/slicer/registration/specialized.py index 3123cd2e63..8ccf48176f 100644 --- a/nipype/interfaces/slicer/registration/specialized.py +++ b/nipype/interfaces/slicer/registration/specialized.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/registration/tests/__init__.py b/nipype/interfaces/slicer/registration/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/slicer/registration/tests/__init__.py +++ b/nipype/interfaces/slicer/registration/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/segmentation/__init__.py b/nipype/interfaces/slicer/segmentation/__init__.py index a88073aceb..d4ebe74d7b 100644 --- a/nipype/interfaces/slicer/segmentation/__init__.py +++ b/nipype/interfaces/slicer/segmentation/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .specialized import RobustStatisticsSegmenter, EMSegmentCommandLine, BRAINSROIAuto from .simpleregiongrowingsegmentation import SimpleRegionGrowingSegmentation diff --git a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py index ee2c9e11ea..6e444a56a3 100644 --- a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py +++ b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/segmentation/specialized.py b/nipype/interfaces/slicer/segmentation/specialized.py index fef3cb7df3..2fb6131348 100644 --- a/nipype/interfaces/slicer/segmentation/specialized.py +++ b/nipype/interfaces/slicer/segmentation/specialized.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/segmentation/tests/__init__.py b/nipype/interfaces/slicer/segmentation/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/slicer/segmentation/tests/__init__.py +++ b/nipype/interfaces/slicer/segmentation/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/surface.py b/nipype/interfaces/slicer/surface.py index cd8edcf2cf..774b76eba0 100644 --- a/nipype/interfaces/slicer/surface.py +++ b/nipype/interfaces/slicer/surface.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/tests/__init__.py b/nipype/interfaces/slicer/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/slicer/tests/__init__.py +++ b/nipype/interfaces/slicer/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/utilities.py b/nipype/interfaces/slicer/utilities.py index bdc675e55a..7e3d690d68 100644 --- a/nipype/interfaces/slicer/utilities.py +++ b/nipype/interfaces/slicer/utilities.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/spm/__init__.py b/nipype/interfaces/spm/__init__.py index b60e0ae560..de829463fb 100644 --- a/nipype/interfaces/spm/__init__.py +++ b/nipype/interfaces/spm/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Top-level namespace for spm.""" diff --git a/nipype/interfaces/spm/base.py b/nipype/interfaces/spm/base.py index dbdad59b3f..bd67003ccc 100644 --- a/nipype/interfaces/spm/base.py +++ b/nipype/interfaces/spm/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The spm module provides basic functions for interfacing with SPM tools. @@ -13,11 +14,10 @@ spm.SPMCommand().version """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, object, str, bytes # Standard library imports -from __future__ import print_function -from builtins import range -from builtins import object import os from copy import deepcopy @@ -27,16 +27,16 @@ from scipy.io import savemat # Local imports +from ... import logging +from ...utils import spm_docs as sd from ..base import (BaseInterface, traits, isdefined, InputMultiPath, BaseInterfaceInputSpec, Directory, Undefined) from ..matlab import MatlabCommand -from ...utils import spm_docs as sd -from ...external.six import string_types from ...external.due import due, Doi, BibTeX -from ... import logging -logger = logging.getLogger('interface') + __docformat__ = 'restructuredtext' +logger = logging.getLogger('interface') def func_is_3d(in_file): @@ -432,8 +432,15 @@ def _generate_job(self, prefix='', contents=None): if isinstance(val, np.ndarray): jobstring += self._generate_job(prefix=None, contents=val) - elif isinstance(val, string_types): - jobstring += '\'%s\';...\n' % (val) + elif isinstance(val, list): + items_format = [] + for el in val: + items_format += ['{}' if not isinstance(el, (str, bytes)) + else '\'{}\''] + val_format = ', '.join(items_format).format + jobstring += '[{}];...\n'.format(val_format(*val)) + elif isinstance(val, (str, bytes)): + jobstring += '\'{}\';...\n'.format(val) else: jobstring += '%s;...\n' % str(val) jobstring += '};\n' @@ -447,7 +454,7 @@ def _generate_job(self, prefix='', contents=None): jobstring += self._generate_job(newprefix, val[field]) return jobstring - if isinstance(contents, string_types): + if isinstance(contents, (str, bytes)): jobstring += "%s = '%s';\n" % (prefix, contents) return jobstring jobstring += "%s = %s;\n" % (prefix, str(contents)) diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 6e098cccc0..4a55a4ea79 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The spm module provides basic functions for interfacing with matlab @@ -10,6 +11,8 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, bytes # Standard library imports import os @@ -20,17 +23,16 @@ import scipy.io as sio # Local imports -from .base import (SPMCommand, SPMCommandInputSpec, - scans_for_fnames) -from ..base import (Bunch, traits, TraitedSpec, File, Directory, - OutputMultiPath, InputMultiPath, isdefined) -from ...external.six import string_types +from ... import logging from ...utils.filemanip import (filename_to_list, list_to_filename, split_filename) -from ... import logging +from ..base import (Bunch, traits, TraitedSpec, File, Directory, + OutputMultiPath, InputMultiPath, isdefined) +from .base import (SPMCommand, SPMCommandInputSpec, + scans_for_fnames) -logger = logging.getLogger('interface') __docformat__ = 'restructuredtext' +logger = logging.getLogger('interface') class Level1DesignInputSpec(SPMCommandInputSpec): @@ -222,7 +224,7 @@ def _format_arg(self, opt, spec, val): if opt == 'spm_mat_file': return np.array([str(val)], dtype=object) if opt == 'estimation_method': - if isinstance(val, string_types): + if isinstance(val, (str, bytes)): return {'%s' % val: 1} else: return val @@ -505,7 +507,7 @@ class ThresholdOutputSpec(TraitedSpec): class Threshold(SPMCommand): - '''Topological FDR thresholding based on cluster extent/size. Smoothness is + """Topological FDR thresholding based on cluster extent/size. Smoothness is estimated from GLM residuals but is assumed to be the same for all of the voxels. @@ -518,7 +520,7 @@ class Threshold(SPMCommand): >>> thresh.inputs.contrast_index = 1 >>> thresh.inputs.extent_fdr_p_threshold = 0.05 >>> thresh.run() # doctest: +SKIP - ''' + """ input_spec = ThresholdInputSpec output_spec = ThresholdOutputSpec @@ -701,7 +703,7 @@ class ThresholdStatisticsOutputSpec(TraitedSpec): class ThresholdStatistics(SPMCommand): - '''Given height and cluster size threshold calculate theoretical + """Given height and cluster size threshold calculate theoretical probabilities concerning false positives Examples @@ -713,7 +715,7 @@ class ThresholdStatistics(SPMCommand): >>> thresh.inputs.contrast_index = 1 >>> thresh.inputs.height_threshold = 4.56 >>> thresh.run() # doctest: +SKIP - ''' + """ input_spec = ThresholdStatisticsInputSpec output_spec = ThresholdStatisticsOutputSpec diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 2aa4821205..3fa7608434 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """SPM wrappers for preprocessing data @@ -8,24 +9,23 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range -# Standard library imports -from builtins import range -from copy import deepcopy import os +from copy import deepcopy # Third-party imports import numpy as np # Local imports +from ...utils.filemanip import (fname_presuffix, filename_to_list, + list_to_filename, split_filename) from ..base import (OutputMultiPath, TraitedSpec, isdefined, traits, InputMultiPath, File) -from .base import (SPMCommand, scans_for_fname, - func_is_3d, Info, +from .base import (SPMCommand, scans_for_fname, func_is_3d, scans_for_fnames, SPMCommandInputSpec) -from ...utils.filemanip import (fname_presuffix, filename_to_list, - list_to_filename, split_filename) __docformat__ = 'restructuredtext' diff --git a/nipype/interfaces/spm/tests/__init__.py b/nipype/interfaces/spm/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/spm/tests/__init__.py +++ b/nipype/interfaces/spm/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py b/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py index 8eca900dd9..31c70733a7 100644 --- a/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py +++ b/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py @@ -7,10 +7,10 @@ def test_ApplyInverseDeformation_inputs(): input_map = dict(bounding_box=dict(field='comp{1}.inv.comp{1}.sn2def.bb', ), deformation=dict(field='comp{1}.inv.comp{1}.sn2def.matname', - xor=['deformation_field'], + xor=[u'deformation_field'], ), deformation_field=dict(field='comp{1}.inv.comp{1}.def', - xor=['deformation'], + xor=[u'deformation'], ), ignore_exception=dict(nohash=True, usedefault=True, diff --git a/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py b/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py index 558790c20e..0332c7c622 100644 --- a/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py +++ b/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py @@ -9,7 +9,7 @@ def test_EstimateContrast_inputs(): ), contrasts=dict(mandatory=True, ), - group_contrast=dict(xor=['use_derivs'], + group_contrast=dict(xor=[u'use_derivs'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -25,7 +25,7 @@ def test_EstimateContrast_inputs(): field='spmmat', mandatory=True, ), - use_derivs=dict(xor=['group_contrast'], + use_derivs=dict(xor=[u'group_contrast'], ), use_mcr=dict(), use_v8struct=dict(min_ver='8', diff --git a/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py b/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py index c382ed3c8d..592d6d1ad5 100644 --- a/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py @@ -9,13 +9,13 @@ def test_FactorialDesign_inputs(): explicit_mask_file=dict(field='masking.em', ), global_calc_mean=dict(field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + xor=[u'global_calc_omit', u'global_calc_values'], ), global_calc_omit=dict(field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + xor=[u'global_calc_mean', u'global_calc_values'], ), global_calc_values=dict(field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + xor=[u'global_calc_mean', u'global_calc_omit'], ), global_normalization=dict(field='globalm.glonorm', ), @@ -31,13 +31,13 @@ def test_FactorialDesign_inputs(): spm_mat_dir=dict(field='dir', ), threshold_mask_absolute=dict(field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + xor=[u'threshold_mask_none', u'threshold_mask_relative'], ), threshold_mask_none=dict(field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + xor=[u'threshold_mask_absolute', u'threshold_mask_relative'], ), threshold_mask_relative=dict(field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + xor=[u'threshold_mask_absolute', u'threshold_mask_none'], ), use_implicit_threshold=dict(field='masking.im', ), diff --git a/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py b/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py index 5fa47fadf2..52b9d8d1b0 100644 --- a/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py @@ -9,13 +9,13 @@ def test_MultipleRegressionDesign_inputs(): explicit_mask_file=dict(field='masking.em', ), global_calc_mean=dict(field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + xor=[u'global_calc_omit', u'global_calc_values'], ), global_calc_omit=dict(field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + xor=[u'global_calc_mean', u'global_calc_values'], ), global_calc_values=dict(field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + xor=[u'global_calc_mean', u'global_calc_omit'], ), global_normalization=dict(field='globalm.glonorm', ), @@ -37,13 +37,13 @@ def test_MultipleRegressionDesign_inputs(): spm_mat_dir=dict(field='dir', ), threshold_mask_absolute=dict(field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + xor=[u'threshold_mask_none', u'threshold_mask_relative'], ), threshold_mask_none=dict(field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + xor=[u'threshold_mask_absolute', u'threshold_mask_relative'], ), threshold_mask_relative=dict(field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + xor=[u'threshold_mask_absolute', u'threshold_mask_none'], ), use_implicit_threshold=dict(field='masking.im', ), diff --git a/nipype/interfaces/spm/tests/test_auto_Normalize.py b/nipype/interfaces/spm/tests/test_auto_Normalize.py index 96ce55975e..cf44ee2edd 100644 --- a/nipype/interfaces/spm/tests/test_auto_Normalize.py +++ b/nipype/interfaces/spm/tests/test_auto_Normalize.py @@ -29,13 +29,13 @@ def test_Normalize_inputs(): parameter_file=dict(copyfile=False, field='subj.matname', mandatory=True, - xor=['source', 'template'], + xor=[u'source', u'template'], ), paths=dict(), source=dict(copyfile=True, field='subj.source', mandatory=True, - xor=['parameter_file'], + xor=[u'parameter_file'], ), source_image_smoothing=dict(field='eoptions.smosrc', ), @@ -45,7 +45,7 @@ def test_Normalize_inputs(): template=dict(copyfile=False, field='eoptions.template', mandatory=True, - xor=['parameter_file'], + xor=[u'parameter_file'], ), template_image_smoothing=dict(field='eoptions.smoref', ), diff --git a/nipype/interfaces/spm/tests/test_auto_Normalize12.py b/nipype/interfaces/spm/tests/test_auto_Normalize12.py index 42ee2b0fa8..651a072ba4 100644 --- a/nipype/interfaces/spm/tests/test_auto_Normalize12.py +++ b/nipype/interfaces/spm/tests/test_auto_Normalize12.py @@ -16,7 +16,7 @@ def test_Normalize12_inputs(): deformation_file=dict(copyfile=False, field='subj.def', mandatory=True, - xor=['image_to_align', 'tpm'], + xor=[u'image_to_align', u'tpm'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -24,7 +24,7 @@ def test_Normalize12_inputs(): image_to_align=dict(copyfile=True, field='subj.vol', mandatory=True, - xor=['deformation_file'], + xor=[u'deformation_file'], ), jobtype=dict(usedefault=True, ), @@ -41,7 +41,7 @@ def test_Normalize12_inputs(): ), tpm=dict(copyfile=False, field='eoptions.tpm', - xor=['deformation_file'], + xor=[u'deformation_file'], ), use_mcr=dict(), use_v8struct=dict(min_ver='8', diff --git a/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py index 51f4e9d4e2..010d4e47f7 100644 --- a/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py @@ -9,13 +9,13 @@ def test_OneSampleTTestDesign_inputs(): explicit_mask_file=dict(field='masking.em', ), global_calc_mean=dict(field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + xor=[u'global_calc_omit', u'global_calc_values'], ), global_calc_omit=dict(field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + xor=[u'global_calc_mean', u'global_calc_values'], ), global_calc_values=dict(field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + xor=[u'global_calc_mean', u'global_calc_omit'], ), global_normalization=dict(field='globalm.glonorm', ), @@ -34,13 +34,13 @@ def test_OneSampleTTestDesign_inputs(): spm_mat_dir=dict(field='dir', ), threshold_mask_absolute=dict(field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + xor=[u'threshold_mask_none', u'threshold_mask_relative'], ), threshold_mask_none=dict(field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + xor=[u'threshold_mask_absolute', u'threshold_mask_relative'], ), threshold_mask_relative=dict(field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + xor=[u'threshold_mask_absolute', u'threshold_mask_none'], ), use_implicit_threshold=dict(field='masking.im', ), diff --git a/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py index b053cb58d1..0e5eddf50b 100644 --- a/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py @@ -11,13 +11,13 @@ def test_PairedTTestDesign_inputs(): explicit_mask_file=dict(field='masking.em', ), global_calc_mean=dict(field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + xor=[u'global_calc_omit', u'global_calc_values'], ), global_calc_omit=dict(field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + xor=[u'global_calc_mean', u'global_calc_values'], ), global_calc_values=dict(field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + xor=[u'global_calc_mean', u'global_calc_omit'], ), global_normalization=dict(field='globalm.glonorm', ), @@ -38,13 +38,13 @@ def test_PairedTTestDesign_inputs(): spm_mat_dir=dict(field='dir', ), threshold_mask_absolute=dict(field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + xor=[u'threshold_mask_none', u'threshold_mask_relative'], ), threshold_mask_none=dict(field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + xor=[u'threshold_mask_absolute', u'threshold_mask_relative'], ), threshold_mask_relative=dict(field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + xor=[u'threshold_mask_absolute', u'threshold_mask_none'], ), use_implicit_threshold=dict(field='masking.im', ), diff --git a/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py b/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py index c27ae684dc..dd5104afb6 100644 --- a/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py +++ b/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py @@ -11,13 +11,13 @@ def test_TwoSampleTTestDesign_inputs(): explicit_mask_file=dict(field='masking.em', ), global_calc_mean=dict(field='globalc.g_mean', - xor=['global_calc_omit', 'global_calc_values'], + xor=[u'global_calc_omit', u'global_calc_values'], ), global_calc_omit=dict(field='globalc.g_omit', - xor=['global_calc_mean', 'global_calc_values'], + xor=[u'global_calc_mean', u'global_calc_values'], ), global_calc_values=dict(field='globalc.g_user.global_uval', - xor=['global_calc_mean', 'global_calc_omit'], + xor=[u'global_calc_mean', u'global_calc_omit'], ), global_normalization=dict(field='globalm.glonorm', ), @@ -39,13 +39,13 @@ def test_TwoSampleTTestDesign_inputs(): spm_mat_dir=dict(field='dir', ), threshold_mask_absolute=dict(field='masking.tm.tma.athresh', - xor=['threshold_mask_none', 'threshold_mask_relative'], + xor=[u'threshold_mask_none', u'threshold_mask_relative'], ), threshold_mask_none=dict(field='masking.tm.tm_none', - xor=['threshold_mask_absolute', 'threshold_mask_relative'], + xor=[u'threshold_mask_absolute', u'threshold_mask_relative'], ), threshold_mask_relative=dict(field='masking.tm.tmr.rthresh', - xor=['threshold_mask_absolute', 'threshold_mask_none'], + xor=[u'threshold_mask_absolute', u'threshold_mask_none'], ), unequal_variance=dict(field='des.t2.variance', ), diff --git a/nipype/interfaces/spm/tests/test_base.py b/nipype/interfaces/spm/tests/test_base.py index 88dc8d7656..93a591aac2 100644 --- a/nipype/interfaces/spm/tests/test_base.py +++ b/nipype/interfaces/spm/tests/test_base.py @@ -1,5 +1,9 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import unicode_literals +from builtins import str, bytes + import os from tempfile import mkdtemp from shutil import rmtree @@ -12,7 +16,7 @@ from nipype.interfaces.spm import no_spm import nipype.interfaces.matlab as mlab from nipype.interfaces.spm.base import SPMCommandInputSpec -from nipype.interfaces.base import traits, text_type +from nipype.interfaces.base import traits try: matlab_cmd = os.environ['MATLABCMD'] @@ -57,7 +61,7 @@ def test_scan_for_fnames(): def test_spm_path(): spm_path = spm.Info.version()['path'] if spm_path is not None: - yield assert_equal, type(spm_path), text_type + yield assert_true, isinstance(spm_path, (str, bytes)) yield assert_true, 'spm' in spm_path diff --git a/nipype/interfaces/spm/tests/test_model.py b/nipype/interfaces/spm/tests/test_model.py index a8f8bf0256..dd49474bed 100644 --- a/nipype/interfaces/spm/tests/test_model.py +++ b/nipype/interfaces/spm/tests/test_model.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/spm/tests/test_preprocess.py b/nipype/interfaces/spm/tests/test_preprocess.py index 406ddf8e54..af82430e68 100644 --- a/nipype/interfaces/spm/tests/test_preprocess.py +++ b/nipype/interfaces/spm/tests/test_preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/spm/tests/test_utils.py b/nipype/interfaces/spm/tests/test_utils.py index 38581b927f..bbb8c6b604 100644 --- a/nipype/interfaces/spm/tests/test_utils.py +++ b/nipype/interfaces/spm/tests/test_utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index a5ddab3e58..5c91e1313b 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -1,17 +1,14 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, unicode_literals, absolute_import + import os import numpy as np -from .base import (SPMCommandInputSpec, SPMCommand, Info, scans_for_fnames, - scans_for_fname) -from ..matlab import MatlabCommand -from ..base import (TraitedSpec, BaseInterface, - BaseInterfaceInputSpec, isdefined, - OutputMultiPath, InputMultiPath) -from ..base import File, traits -from ...utils.filemanip import (split_filename, fname_presuffix, - filename_to_list, list_to_filename) +from ...utils.filemanip import split_filename, fname_presuffix, filename_to_list, list_to_filename +from ..base import TraitedSpec, isdefined, File, traits, OutputMultiPath, InputMultiPath +from .base import SPMCommandInputSpec, SPMCommand, scans_for_fnames, scans_for_fname class Analyze2niiInputSpec(SPMCommandInputSpec): diff --git a/nipype/interfaces/tests/__init__.py b/nipype/interfaces/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/tests/__init__.py +++ b/nipype/interfaces/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/tests/test_auto_Dcm2nii.py b/nipype/interfaces/tests/test_auto_Dcm2nii.py index c322f76149..b674bf6a47 100644 --- a/nipype/interfaces/tests/test_auto_Dcm2nii.py +++ b/nipype/interfaces/tests/test_auto_Dcm2nii.py @@ -53,7 +53,7 @@ def test_Dcm2nii_inputs(): source_dir=dict(argstr='%s', mandatory=True, position=-1, - xor=['source_names'], + xor=[u'source_names'], ), source_in_filename=dict(argstr='-f', usedefault=True, @@ -62,10 +62,10 @@ def test_Dcm2nii_inputs(): copyfile=False, mandatory=True, position=-1, - xor=['source_dir'], + xor=[u'source_dir'], ), spm_analyze=dict(argstr='-s', - xor=['nii_output'], + xor=[u'nii_output'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/tests/test_auto_Dcm2niix.py b/nipype/interfaces/tests/test_auto_Dcm2niix.py index 1e0eb9def1..ce1ca0fb81 100644 --- a/nipype/interfaces/tests/test_auto_Dcm2niix.py +++ b/nipype/interfaces/tests/test_auto_Dcm2niix.py @@ -39,13 +39,13 @@ def test_Dcm2niix_inputs(): source_dir=dict(argstr='%s', mandatory=True, position=-1, - xor=['source_names'], + xor=[u'source_names'], ), source_names=dict(argstr='%s', copyfile=False, mandatory=True, position=-1, - xor=['source_dir'], + xor=[u'source_dir'], ), terminal_output=dict(nohash=True, ), diff --git a/nipype/interfaces/tests/test_auto_MatlabCommand.py b/nipype/interfaces/tests/test_auto_MatlabCommand.py index bfc24cb064..5b879d8b3d 100644 --- a/nipype/interfaces/tests/test_auto_MatlabCommand.py +++ b/nipype/interfaces/tests/test_auto_MatlabCommand.py @@ -41,7 +41,7 @@ def test_MatlabCommand_inputs(): terminal_output=dict(nohash=True, ), uses_mcr=dict(nohash=True, - xor=['nodesktop', 'nosplash', 'single_comp_thread'], + xor=[u'nodesktop', u'nosplash', u'single_comp_thread'], ), ) inputs = MatlabCommand.input_spec() diff --git a/nipype/interfaces/tests/test_auto_MeshFix.py b/nipype/interfaces/tests/test_auto_MeshFix.py index 4c0fd67596..549a6b557e 100644 --- a/nipype/interfaces/tests/test_auto_MeshFix.py +++ b/nipype/interfaces/tests/test_auto_MeshFix.py @@ -26,17 +26,17 @@ def test_MeshFix_inputs(): epsilon_angle=dict(argstr='-a %f', ), finetuning_distance=dict(argstr='%f', - requires=['finetuning_substeps'], + requires=[u'finetuning_substeps'], ), finetuning_inwards=dict(argstr='--fineTuneIn ', - requires=['finetuning_distance', 'finetuning_substeps'], + requires=[u'finetuning_distance', u'finetuning_substeps'], ), finetuning_outwards=dict(argstr='--fineTuneIn ', - requires=['finetuning_distance', 'finetuning_substeps'], - xor=['finetuning_inwards'], + requires=[u'finetuning_distance', u'finetuning_substeps'], + xor=[u'finetuning_inwards'], ), finetuning_substeps=dict(argstr='%d', - requires=['finetuning_distance'], + requires=[u'finetuning_distance'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -49,10 +49,10 @@ def test_MeshFix_inputs(): position=2, ), join_closest_components=dict(argstr='-jc', - xor=['join_closest_components'], + xor=[u'join_closest_components'], ), join_overlapping_largest_components=dict(argstr='-j', - xor=['join_closest_components'], + xor=[u'join_closest_components'], ), laplacian_smoothing_steps=dict(argstr='--smooth %d', ), @@ -68,23 +68,23 @@ def test_MeshFix_inputs(): remove_handles=dict(argstr='--remove-handles', ), save_as_freesurfer_mesh=dict(argstr='--fsmesh', - xor=['save_as_vrml', 'save_as_stl'], + xor=[u'save_as_vrml', u'save_as_stl'], ), save_as_stl=dict(argstr='--stl', - xor=['save_as_vmrl', 'save_as_freesurfer_mesh'], + xor=[u'save_as_vmrl', u'save_as_freesurfer_mesh'], ), save_as_vmrl=dict(argstr='--wrl', - xor=['save_as_stl', 'save_as_freesurfer_mesh'], + xor=[u'save_as_stl', u'save_as_freesurfer_mesh'], ), set_intersections_to_one=dict(argstr='--intersect', ), terminal_output=dict(nohash=True, ), uniform_remeshing_steps=dict(argstr='-u %d', - requires=['uniform_remeshing_vertices'], + requires=[u'uniform_remeshing_vertices'], ), uniform_remeshing_vertices=dict(argstr='--vertices %d', - requires=['uniform_remeshing_steps'], + requires=[u'uniform_remeshing_steps'], ), x_shift=dict(argstr='--smooth %d', ), diff --git a/nipype/interfaces/tests/test_auto_MySQLSink.py b/nipype/interfaces/tests/test_auto_MySQLSink.py index 7b4ff10c0c..ea9904d8d0 100644 --- a/nipype/interfaces/tests/test_auto_MySQLSink.py +++ b/nipype/interfaces/tests/test_auto_MySQLSink.py @@ -5,14 +5,14 @@ def test_MySQLSink_inputs(): input_map = dict(config=dict(mandatory=True, - xor=['host'], + xor=[u'host'], ), database_name=dict(mandatory=True, ), host=dict(mandatory=True, - requires=['username', 'password'], + requires=[u'username', u'password'], usedefault=True, - xor=['config'], + xor=[u'config'], ), ignore_exception=dict(nohash=True, usedefault=True, diff --git a/nipype/interfaces/tests/test_auto_XNATSink.py b/nipype/interfaces/tests/test_auto_XNATSink.py index dd681af29f..a0ac549481 100644 --- a/nipype/interfaces/tests/test_auto_XNATSink.py +++ b/nipype/interfaces/tests/test_auto_XNATSink.py @@ -6,11 +6,11 @@ def test_XNATSink_inputs(): input_map = dict(_outputs=dict(usedefault=True, ), - assessor_id=dict(xor=['reconstruction_id'], + assessor_id=dict(xor=[u'reconstruction_id'], ), cache_dir=dict(), config=dict(mandatory=True, - xor=['server'], + xor=[u'server'], ), experiment_id=dict(mandatory=True, ), @@ -20,11 +20,11 @@ def test_XNATSink_inputs(): project_id=dict(mandatory=True, ), pwd=dict(), - reconstruction_id=dict(xor=['assessor_id'], + reconstruction_id=dict(xor=[u'assessor_id'], ), server=dict(mandatory=True, - requires=['user', 'pwd'], - xor=['config'], + requires=[u'user', u'pwd'], + xor=[u'config'], ), share=dict(usedefault=True, ), diff --git a/nipype/interfaces/tests/test_auto_XNATSource.py b/nipype/interfaces/tests/test_auto_XNATSource.py index 297c050a22..f25a735657 100644 --- a/nipype/interfaces/tests/test_auto_XNATSource.py +++ b/nipype/interfaces/tests/test_auto_XNATSource.py @@ -6,7 +6,7 @@ def test_XNATSource_inputs(): input_map = dict(cache_dir=dict(), config=dict(mandatory=True, - xor=['server'], + xor=[u'server'], ), ignore_exception=dict(nohash=True, usedefault=True, @@ -17,8 +17,8 @@ def test_XNATSource_inputs(): query_template_args=dict(usedefault=True, ), server=dict(mandatory=True, - requires=['user', 'pwd'], - xor=['config'], + requires=[u'user', u'pwd'], + xor=[u'config'], ), user=dict(), ) diff --git a/nipype/interfaces/tests/test_base.py b/nipype/interfaces/tests/test_base.py index 8f20e01287..c80fb221ae 100644 --- a/nipype/interfaces/tests/test_base.py +++ b/nipype/interfaces/tests/test_base.py @@ -1,9 +1,11 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function +from __future__ import print_function, unicode_literals from future import standard_library standard_library.install_aliases() +from builtins import open, str, bytes import os import tempfile import shutil @@ -14,7 +16,7 @@ skipif) import nipype.interfaces.base as nib from nipype.utils.filemanip import split_filename -from nipype.interfaces.base import Undefined, config, text_type +from nipype.interfaces.base import Undefined, config from traits.testing.nose_tools import skip import traits.api as traits @@ -62,7 +64,7 @@ def test_bunch_hash(): yield assert_equal, bhash, 'ddcc7b4ec5675df8cf317a48bd1857fa' # Make sure the hash stored in the json file for `infile` is correct. jshash = nib.md5() - with open(json_pth) as fp: + with open(json_pth, 'r') as fp: jshash.update(fp.read().encode('utf-8')) yield assert_equal, newbdict['infile'][0][1], jshash.hexdigest() yield assert_equal, newbdict['yat'], True @@ -588,7 +590,7 @@ def test_Commandline(): yield assert_equal, res.outputs, None class CommandLineInputSpec1(nib.CommandLineInputSpec): - foo = nib.traits.Str(argstr='%s', desc='a str') + foo = nib.Str(argstr='%s', desc='a str') goo = nib.traits.Bool(argstr='-g', desc='a bool', position=0) hoo = nib.traits.List(argstr='-l %s', desc='a list') moo = nib.traits.List(argstr='-i %d...', desc='a repeated list', @@ -661,7 +663,7 @@ def test_CommandLine_output(): ci.inputs.terminal_output = 'file' res = ci.run() yield assert_true, 'stdout.nipype' in res.runtime.stdout - yield assert_equal, type(res.runtime.stdout), text_type + yield assert_true, isinstance(res.runtime.stdout, (str, bytes)) ci = nib.CommandLine(command='ls -l') ci.inputs.terminal_output = 'none' res = ci.run() diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index c1f4ec35f5..63db195ebd 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -1,10 +1,9 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import print_function -from builtins import zip -from builtins import range -from builtins import open - +from __future__ import print_function, unicode_literals +from builtins import str, zip, range, open +from future import standard_library import os import glob import shutil @@ -35,14 +34,12 @@ noboto3 = True # Check for fakes3 -import subprocess +standard_library.install_aliases() +from subprocess import check_call, CalledProcessError try: - ret_code = subprocess.check_call(['which', 'fakes3'], stdout=open(os.devnull, 'wb')) - if ret_code == 0: - fakes3 = True - else: - fakes3 = False -except subprocess.CalledProcessError: + ret_code = check_call(['which', 'fakes3'], stdout=open(os.devnull, 'wb')) + fakes3 = (ret_code == 0) +except CalledProcessError: fakes3 = False def test_datagrabber(): @@ -441,7 +438,7 @@ def test_datafinder_depth(): df.inputs.min_depth = min_depth df.inputs.max_depth = max_depth result = df.run() - expected = [str(x) for x in range(min_depth, max_depth + 1)] + expected = ['{}'.format(x) for x in range(min_depth, max_depth + 1)] for path, exp_fname in zip(result.outputs.out_paths, expected): _, fname = os.path.split(path) yield assert_equal, fname, exp_fname diff --git a/nipype/interfaces/tests/test_matlab.py b/nipype/interfaces/tests/test_matlab.py index 874e0bddd8..11e95d5615 100644 --- a/nipype/interfaces/tests/test_matlab.py +++ b/nipype/interfaces/tests/test_matlab.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/tests/test_runtime_profiler.py b/nipype/interfaces/tests/test_runtime_profiler.py index a585d00f8f..e127232412 100644 --- a/nipype/interfaces/tests/test_runtime_profiler.py +++ b/nipype/interfaces/tests/test_runtime_profiler.py @@ -1,10 +1,14 @@ +# -*- coding: utf-8 -*- # test_runtime_profiler.py # # Author: Daniel Clark, 2016 -''' +""" Module to unit test the runtime_profiler in nipype -''' +""" + +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, str # Import packages import unittest @@ -68,9 +72,17 @@ def _use_gb_ram(num_gb): ''' Function to consume GB of memory ''' + import sys + + # Getsize of one character string + bsize = sys.getsizeof(' ') - sys.getsizeof(' ') + boffset = sys.getsizeof('') + + num_bytes = int(num_gb * (1024**3)) + # Eat num_gb GB of memory for 1 second + gb_str = ' ' * ((num_bytes - boffset) // bsize) - # Eat 1 GB of memory for 1 second - gb_str = ' ' * int(num_gb*1024.0**3) + assert sys.getsizeof(gb_str) == num_bytes # Spin CPU ctr = 0 @@ -139,7 +151,7 @@ def setUp(self): # Input number of sub-threads (not including parent threads) self.num_threads = 2 # Acceptable percent error for memory profiled against input - self.mem_err_gb = 0.25 + self.mem_err_gb = 0.3 # Increased to 30% for py2.7 # ! Only used for benchmarking the profiler over a range of # ! RAM usage and number of threads @@ -272,8 +284,10 @@ def _run_cmdline_workflow(self, num_gb, num_threads): wf.run(plugin='MultiProc', plugin_args=plugin_args) # Get runtime stats from log file - start_str = open(log_file, 'r').readlines()[0].rstrip('\n') - finish_str = open(log_file, 'r').readlines()[1].rstrip('\n') + with open(log_file, 'r') as log_handle: + lines = log_handle.readlines() + start_str = lines[0].rstrip('\n') + finish_str = lines[1].rstrip('\n') # Delete wf base dir shutil.rmtree(base_dir) @@ -350,8 +364,10 @@ def _run_function_workflow(self, num_gb, num_threads): wf.run(plugin='MultiProc', plugin_args=plugin_args) # Get runtime stats from log file - start_str = open(log_file, 'r').readlines()[0].rstrip('\n') - finish_str = open(log_file, 'r').readlines()[1].rstrip('\n') + with open(log_file, 'r') as log_handle: + lines = log_handle.readlines() + start_str = lines[0].rstrip('\n') + finish_str = lines[1].rstrip('\n') # Delete wf base dir shutil.rmtree(base_dir) diff --git a/nipype/interfaces/tests/test_utility.py b/nipype/interfaces/tests/test_utility.py index 0e9f8ad66c..208026a72d 100644 --- a/nipype/interfaces/tests/test_utility.py +++ b/nipype/interfaces/tests/test_utility.py @@ -1,5 +1,6 @@ -from __future__ import print_function -from builtins import range +# -*- coding: utf-8 -*- +from __future__ import print_function, unicode_literals +from builtins import range, open, str, bytes # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/traits_extension.py b/nipype/interfaces/traits_extension.py index 49af1db164..5490b567f6 100644 --- a/nipype/interfaces/traits_extension.py +++ b/nipype/interfaces/traits_extension.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This module contains Trait classes that we've pulled from the @@ -15,6 +16,9 @@ (usually by Robert Kern). """ +from __future__ import print_function, division, unicode_literals, absolute_import + +from builtins import filter, object, str, bytes import os # perform all external trait imports here @@ -24,10 +28,15 @@ import traits.api as traits from traits.trait_handlers import TraitDictObject, TraitListObject from traits.trait_errors import TraitError -from traits.trait_base import _Undefined +from traits.trait_base import _Undefined, class_of + +from traits.api import BaseUnicode +from traits.api import Unicode +DictStrStr = traits.Dict(str, (bytes, str)) +Str = Unicode -class BaseFile (traits.BaseStr): +class BaseFile(BaseUnicode): """ Defines a trait whose value must be the name of a file. """ @@ -81,8 +90,9 @@ def validate(self, object, name, value): class File (BaseFile): - """ Defines a trait whose value must be the name of a file using a C-level - fast validator. + """ + Defines a trait whose value must be the name of a file. + Disables the default C-level fast validator. """ def __init__(self, value='', filter=None, auto_set=False, @@ -107,9 +117,9 @@ def __init__(self, value='', filter=None, auto_set=False, ------------- *value* or '' """ - if not exists: - # Define the C-level fast validator to use: - fast_validate = (11, str) + # if not exists: + # # Define the C-level fast validator to use: + # fast_validate = (11, str) super(File, self).__init__(value, filter, auto_set, entries, exists, **metadata) @@ -119,8 +129,9 @@ def __init__(self, value='', filter=None, auto_set=False, # ------------------------------------------------------------------------------- -class BaseDirectory (traits.BaseStr): - """ Defines a trait whose value must be the name of a directory. +class BaseDirectory (BaseUnicode): + """ + Defines a trait whose value must be the name of a directory. """ # A description of the type of value this trait accepts: @@ -159,19 +170,25 @@ def validate(self, object, name, value): Note: The 'fast validator' version performs this check in C. """ - validated_value = super(BaseDirectory, self).validate(object, name, value) - if not self.exists: - return validated_value - - if os.path.isdir(value): - return validated_value + if isinstance(value, (str, bytes)): + if not self.exists: + return value + + if os.path.isdir(value): + return value + else: + raise TraitError( + args='The trait \'{}\' of {} instance is {}, but the path ' + ' \'{}\' does not exist.'.format(name, class_of(object), + self.info_text, value)) self.error(object, name, value) class Directory (BaseDirectory): - """ Defines a trait whose value must be the name of a directory using a - C-level fast validator. + """ + Defines a trait whose value must be the name of a directory. + Disables the default C-level fast validator. """ def __init__(self, value='', auto_set=False, entries=0, @@ -195,8 +212,8 @@ def __init__(self, value='', auto_set=False, entries=0, """ # Define the C-level fast validator to use if the directory existence # test is not required: - if not exists: - self.fast_validate = (11, str) + # if not exists: + # self.fast_validate = (11, str) super(Directory, self).__init__(value, auto_set, entries, exists, **metadata) diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index 8c0342190b..4289c7dc85 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Various utilities @@ -8,28 +9,27 @@ >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import zip, range, str, open from future import standard_library standard_library.install_aliases() -from builtins import zip -from builtins import range import os import re -from pickle import dumps -from textwrap import dedent import numpy as np import nibabel as nb +from nipype import logging from .base import (traits, TraitedSpec, DynamicTraitedSpec, File, Undefined, isdefined, OutputMultiPath, runtime_profile, InputMultiPath, BaseInterface, BaseInterfaceInputSpec) from .io import IOBase, add_traits -from ..external.six import string_types from ..testing import assert_equal from ..utils.filemanip import (filename_to_list, copyfile, split_filename) from ..utils.misc import getsource, create_function_from_source +logger = logging.getLogger('interface') if runtime_profile: try: import psutil @@ -55,7 +55,7 @@ class IdentityInterface(IOBase): >>> out = ii.run() - >>> out.outputs.a + >>> out.outputs.a # doctest: +IGNORE_UNICODE 'foo' >>> ii2 = IdentityInterface(fields=['a', 'b'], mandatory_inputs=True) @@ -410,7 +410,7 @@ def __init__(self, input_names, output_names, function=None, imports=None, raise Exception('Interface Function does not accept ' 'function objects defined interactively ' 'in a python session') - elif isinstance(function, string_types): + elif isinstance(function, (str, bytes)): self.inputs.function_str = function else: raise Exception('Unknown type of function') @@ -428,7 +428,7 @@ def _set_function_string(self, obj, name, old, new): if name == 'function_str': if hasattr(new, '__call__'): function_source = getsource(new) - elif isinstance(new, string_types): + elif isinstance(new, (str, bytes)): function_source = new self.inputs.trait_set(trait_change_notify=False, **{'%s' % name: function_source}) @@ -493,7 +493,7 @@ def _function_handle_wrapper(queue, **kwargs): raise out # Function ran successfully, populate runtime stats - setattr(runtime, 'runtime_memory_gb', mem_mb/1024.0) + setattr(runtime, 'runtime_memory_gb', mem_mb / 1024.0) setattr(runtime, 'runtime_threads', num_threads) else: out = function_handle(**args) diff --git a/nipype/interfaces/vista/__init__.py b/nipype/interfaces/vista/__init__.py index e1777c408d..d0372042aa 100644 --- a/nipype/interfaces/vista/__init__.py +++ b/nipype/interfaces/vista/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from .vista import (Vnifti2Image, VtoMat) diff --git a/nipype/interfaces/vista/tests/__init__.py b/nipype/interfaces/vista/tests/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/interfaces/vista/tests/__init__.py +++ b/nipype/interfaces/vista/tests/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py b/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py index 16abe83a0e..2fd2ad4407 100644 --- a/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py +++ b/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py @@ -22,7 +22,7 @@ def test_Vnifti2Image_inputs(): out_file=dict(argstr='-out %s', hash_files=False, keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s.v', position=-1, ), diff --git a/nipype/interfaces/vista/tests/test_auto_VtoMat.py b/nipype/interfaces/vista/tests/test_auto_VtoMat.py index 77c814dab5..6a55d5e69c 100644 --- a/nipype/interfaces/vista/tests/test_auto_VtoMat.py +++ b/nipype/interfaces/vista/tests/test_auto_VtoMat.py @@ -19,7 +19,7 @@ def test_VtoMat_inputs(): out_file=dict(argstr='-out %s', hash_files=False, keep_extension=False, - name_source=['in_file'], + name_source=[u'in_file'], name_template='%s.mat', position=-1, ), diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index fdb054a5ad..0329404232 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -8,12 +9,9 @@ >>> os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import -from nipype.interfaces.base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File -from nipype.utils.filemanip import split_filename -import os -import os.path as op -from nipype.interfaces.traits_extension import isdefined +from ..base import CommandLineInputSpec, CommandLine, TraitedSpec, File class Vnifti2ImageInputSpec(CommandLineInputSpec): @@ -36,7 +34,7 @@ class Vnifti2Image(CommandLine): >>> vimage = Vnifti2Image() >>> vimage.inputs.in_file = 'image.nii' - >>> vimage.cmdline + >>> vimage.cmdline # doctest: +IGNORE_UNICODE 'vnifti2image -in image.nii -out image.v' >>> vimage.run() # doctest: +SKIP """ @@ -65,7 +63,7 @@ class VtoMat(CommandLine): >>> vimage = VtoMat() >>> vimage.inputs.in_file = 'image.v' - >>> vimage.cmdline + >>> vimage.cmdline # doctest: +IGNORE_UNICODE 'vtomat -in image.v -out image.mat' >>> vimage.run() # doctest: +SKIP """ diff --git a/nipype/interfaces/vtkbase.py b/nipype/interfaces/vtkbase.py index 452203367c..458d4fa7b5 100644 --- a/nipype/interfaces/vtkbase.py +++ b/nipype/interfaces/vtkbase.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -5,6 +6,7 @@ Code using tvtk should import it through this module """ +from __future__ import print_function, division, unicode_literals, absolute_import import os from .. import logging diff --git a/nipype/pipeline/__init__.py b/nipype/pipeline/__init__.py index b7a6afe20e..f4e865980c 100644 --- a/nipype/pipeline/__init__.py +++ b/nipype/pipeline/__init__.py @@ -1,10 +1,10 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Package contains modules for generating pipelines using interfaces """ - -from __future__ import absolute_import +from __future__ import print_function, division, unicode_literals, absolute_import __docformat__ = 'restructuredtext' from .engine import Node, MapNode, JoinNode, Workflow diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index acd69cbffd..7bb319e315 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -14,26 +14,19 @@ os.chdir(datadir) """ - -from __future__ import absolute_import +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import object from future import standard_library standard_library.install_aliases() -from builtins import object - -try: - from collections import OrderedDict -except ImportError: - from ordereddict import OrderedDict from copy import deepcopy import re import numpy as np -from ...interfaces.traits_extension import traits, Undefined +from ... import logging from ...interfaces.base import DynamicTraitedSpec from ...utils.filemanip import loadpkl, savepkl -from ... import logging logger = logging.getLogger('workflow') diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index 9adc6b0c10..e19a41dcb8 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -14,66 +14,48 @@ os.chdir(datadir) """ - -from __future__ import absolute_import +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, object, str, bytes, open from future import standard_library standard_library.install_aliases() -from builtins import range -from builtins import object - -from datetime import datetime -from nipype.utils.misc import flatten, unflatten -try: - from collections import OrderedDict -except ImportError: - from ordereddict import OrderedDict +from collections import OrderedDict from copy import deepcopy import pickle from glob import glob import gzip -import inspect import os import os.path as op -import re import shutil import errno import socket from shutil import rmtree import sys from tempfile import mkdtemp -from warnings import warn from hashlib import sha1 -import numpy as np -import networkx as nx - -from ...utils.misc import package_check, str2bool -package_check('networkx', '1.3') - from ... import config, logging -logger = logging.getLogger('workflow') -from ...interfaces.base import (traits, InputMultiPath, CommandLine, - Undefined, TraitedSpec, DynamicTraitedSpec, - Bunch, InterfaceResult, md5, Interface, - TraitDictObject, TraitListObject, isdefined, - runtime_profile) -from ...utils.misc import (getsource, create_function_from_source, - flatten, unflatten) +from ...utils.misc import (flatten, unflatten, package_check, str2bool) from ...utils.filemanip import (save_json, FileNotFoundError, filename_to_list, list_to_filename, copyfiles, fnames_presuffix, loadpkl, split_filename, load_json, savepkl, write_rst_header, write_rst_dict, write_rst_list) -from ...external.six import string_types +from ...interfaces.base import (traits, InputMultiPath, CommandLine, + Undefined, TraitedSpec, DynamicTraitedSpec, + Bunch, InterfaceResult, md5, Interface, + TraitDictObject, TraitListObject, isdefined, + runtime_profile) from .utils import (generate_expanded_graph, modify_paths, export_graph, make_output_dir, write_workflow_prov, clean_working_directory, format_dot, topological_sort, get_print_name, merge_dict, evaluate_connect_function) from .base import EngineBase +package_check('networkx', '1.3') +logger = logging.getLogger('workflow') class Node(EngineBase): """Wraps interface objects for use in pipeline @@ -546,6 +528,10 @@ def _load_resultfile(self, cwd): pkl_file = gzip.open(resultsoutputfile, 'rb') try: result = pickle.load(pkl_file) + except UnicodeDecodeError: + # Was this pickle created with Python 2.x? + pickle.load(pkl_file, fix_imports=True, encoding='utf-8') + logger.warn('Successfully loaded pickle in compatibility mode') except (traits.TraitError, AttributeError, ImportError) as err: if isinstance(err, (AttributeError, ImportError)): attribute_error = True @@ -820,7 +806,7 @@ def __init__(self, interface, name, joinsource, joinfield=None, if not joinfield: # default is the interface fields joinfield = self._interface.inputs.copyable_trait_names() - elif isinstance(joinfield, string_types): + elif isinstance(joinfield, (str, bytes)): joinfield = [joinfield] self.joinfield = joinfield """the fields to join""" @@ -869,7 +855,7 @@ def _add_join_item_fields(self): ... name='inputspec'), >>> join = JoinNode(IdentityInterface(fields=['images', 'mask']), ... joinsource='inputspec', joinfield='images', name='join') - >>> join._add_join_item_fields() + >>> join._add_join_item_fields() # doctest: +IGNORE_UNICODE {'images': 'imagesJ1'} Return the {base field: slot field} dictionary @@ -1030,7 +1016,7 @@ def __init__(self, interface, iterfield, name, serial=False, nested=False, **kwa """ super(MapNode, self).__init__(interface, name, **kwargs) - if isinstance(iterfield, string_types): + if isinstance(iterfield, (str, bytes)): iterfield = [iterfield] self.iterfield = iterfield self.nested = nested diff --git a/nipype/pipeline/engine/tests/__init__.py b/nipype/pipeline/engine/tests/__init__.py index e6c567036d..81bf04cc92 100644 --- a/nipype/pipeline/engine/tests/__init__.py +++ b/nipype/pipeline/engine/tests/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index 5eaaa81fbf..6e0a6839b2 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -1,9 +1,13 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module """ from __future__ import print_function +from __future__ import unicode_literals +from builtins import str +from builtins import open from copy import deepcopy from glob import glob import os @@ -427,7 +431,7 @@ def test_doubleconnect(): import nipype.pipeline.engine as pe import nipype.interfaces.spm as spm import os -from nipype.external.six import StringIO +from six import StringIO from nipype.utils.config import config config.readfp(StringIO(""" diff --git a/nipype/pipeline/engine/tests/test_join.py b/nipype/pipeline/engine/tests/test_join.py index b0882de91e..63ef1041b5 100644 --- a/nipype/pipeline/engine/tests/test_join.py +++ b/nipype/pipeline/engine/tests/test_join.py @@ -1,7 +1,10 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for join expansion """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os from shutil import rmtree diff --git a/nipype/pipeline/engine/tests/test_utils.py b/nipype/pipeline/engine/tests/test_utils.py index 8420f587c2..4d37beef4d 100644 --- a/nipype/pipeline/engine/tests/test_utils.py +++ b/nipype/pipeline/engine/tests/test_utils.py @@ -1,9 +1,11 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine utils module """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, open -from builtins import range import os from copy import deepcopy from tempfile import mkdtemp diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 8a3f6f2ce3..1d0519a68b 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -4,42 +4,31 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Utility routines for workflow graphs """ - -from __future__ import absolute_import +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, open, map, next, zip, range from future import standard_library standard_library.install_aliases() +from collections import defaultdict -from builtins import map -from builtins import next -from builtins import zip -from builtins import range - -try: - import itertools.imap as map -except ImportError: - pass - -from collections import OrderedDict from copy import deepcopy from glob import glob -from collections import defaultdict try: from inspect import signature except ImportError: from funcsigs import signature + import os import re import pickle +from functools import reduce import numpy as np from nipype.utils.misc import package_check -from functools import reduce package_check('networkx', '1.3') import networkx as nx -from ...external.six import string_types from ...utils.filemanip import (fname_presuffix, FileNotFoundError, filename_to_list, get_related_files) from ...utils.misc import create_function_from_source, str2bool @@ -131,7 +120,7 @@ def format_node(node, format='python', include_config=False): klass.__class__.__name__) comment = '# Node: %s' % node.fullname spec = signature(node._interface.__init__) - args = [p.name for p in spec.parameters.values()] + args = [p.name for p in list(spec.parameters.values())] args = args[1:] if args: filled_args = [] @@ -152,7 +141,10 @@ def format_node(node, format='python', include_config=False): lines = [importline, comment, nodedef] if include_config: - lines = [importline, "from collections import OrderedDict", + lines = [importline, + "from future import standard_library", + "standard_library.install_aliases()", + "from collections import OrderedDict", comment, nodedef] lines.append('%s.config = %s' % (name, node.config)) @@ -194,7 +186,7 @@ def modify_paths(object, relative=True, basedir=None): out = tuple(out) else: if isdefined(object): - if isinstance(object, string_types) and os.path.isfile(object): + if isinstance(object, (str, bytes)) and os.path.isfile(object): if relative: if config.getboolean('execution', 'use_relative_paths'): out = relpath(object, start=basedir) @@ -277,7 +269,7 @@ def _write_detailed_dot(graph, dotfilename): inports = [] for u, v, d in graph.in_edges_iter(nbunch=n, data=True): for cd in d['connect']: - if isinstance(cd[0], string_types): + if isinstance(cd[0], (str, bytes)): outport = cd[0] else: outport = cd[0][0] @@ -297,7 +289,7 @@ def _write_detailed_dot(graph, dotfilename): outports = [] for u, v, d in graph.out_edges_iter(nbunch=n, data=True): for cd in d['connect']: - if isinstance(cd[0], string_types): + if isinstance(cd[0], (str, bytes)): outport = cd[0] else: outport = cd[0][0] @@ -362,7 +354,7 @@ def count_iterables(iterables, synchronize=False): op = max else: op = lambda x, y: x * y - return reduce(op, [len(func()) for _, func in iterables.items()]) + return reduce(op, [len(func()) for _, func in list(iterables.items())]) def walk(children, level=0, path=None, usename=True): @@ -502,7 +494,7 @@ def _merge_graphs(supergraph, nodes, subgraph, nodeid, iterables, for edge in supergraph.in_edges_iter(supernodes[nidx]): # make sure edge is not part of subgraph if edge[0] not in subgraph.nodes(): - if n._hierarchy + n._id not in edgeinfo.keys(): + if n._hierarchy + n._id not in list(edgeinfo.keys()): edgeinfo[n._hierarchy + n._id] = [] edgeinfo[n._hierarchy + n._id].append((edge[0], supergraph.get_edge_data(*edge))) @@ -547,7 +539,7 @@ def _merge_graphs(supergraph, nodes, subgraph, nodeid, iterables, supergraph.add_nodes_from(Gc.nodes()) supergraph.add_edges_from(Gc.edges(data=True)) for node in Gc.nodes(): - if node._hierarchy + node._id in edgeinfo.keys(): + if node._hierarchy + node._id in list(edgeinfo.keys()): for info in edgeinfo[node._hierarchy + node._id]: supergraph.add_edges_from([(info[0], node, info[1])]) node._id += template % i @@ -724,7 +716,7 @@ def generate_expanded_graph(graph_in): # the itersource is a (node name, fields) tuple src_name, src_fields = inode.itersource # convert a single field to a list - if isinstance(src_fields, string_types): + if isinstance(src_fields, (str, bytes)): src_fields = [src_fields] # find the unique iterable source node in the graph try: @@ -758,7 +750,7 @@ def generate_expanded_graph(graph_in): def make_field_func(*pair): return pair[0], lambda: pair[1] - iterables = dict([make_field_func(*pair) for pair in iter_dict.items()]) + iterables = dict([make_field_func(*pair) for pair in list(iter_dict.items())]) else: iterables = inode.iterables.copy() inode.iterables = None @@ -909,7 +901,7 @@ def _standardize_iterables(node): if node.synchronize: if len(iterables) == 2: first, last = iterables - if all((isinstance(item, string_types) and item in fields + if all((isinstance(item, (str, bytes)) and item in fields for item in first)): iterables = _transpose_iterables(first, last) @@ -1100,7 +1092,7 @@ def walk_outputs(object): if isdefined(val): out.extend(walk_outputs(val)) else: - if isdefined(object) and isinstance(object, string_types): + if isdefined(object) and isinstance(object, (str, bytes)): if os.path.islink(object) or os.path.isfile(object): out = [(filename, 'f') for filename in get_all_files(object)] elif os.path.isdir(object): diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index dd73608695..9eca5f7235 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -14,59 +14,41 @@ os.chdir(datadir) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, object, str, bytes, open -from __future__ import absolute_import - +# Py2 compat: http://python-future.org/compatible_idioms.html#collections-counter-and-ordereddict from future import standard_library standard_library.install_aliases() -from builtins import range -from builtins import object from datetime import datetime -from nipype.utils.misc import flatten, unflatten -try: - from collections import OrderedDict -except ImportError: - from ordereddict import OrderedDict from copy import deepcopy import pickle -from glob import glob -import gzip -import inspect import os import os.path as op -import re import shutil -import errno -import socket -from shutil import rmtree import sys -from tempfile import mkdtemp from warnings import warn -from hashlib import sha1 import numpy as np import networkx as nx -from ...utils.misc import package_check, str2bool -package_check('networkx', '1.3') from ... import config, logging -logger = logging.getLogger('workflow') +from ...utils.misc import (unflatten, package_check, str2bool, + getsource, create_function_from_source) from ...interfaces.base import (traits, InputMultiPath, CommandLine, Undefined, TraitedSpec, DynamicTraitedSpec, Bunch, InterfaceResult, md5, Interface, TraitDictObject, TraitListObject, isdefined) -from ...utils.misc import (getsource, create_function_from_source, - flatten, unflatten) + from ...utils.filemanip import (save_json, FileNotFoundError, filename_to_list, list_to_filename, copyfiles, fnames_presuffix, loadpkl, split_filename, load_json, savepkl, write_rst_header, write_rst_dict, write_rst_list) -from ...external.six import string_types from .utils import (generate_expanded_graph, modify_paths, export_graph, make_output_dir, write_workflow_prov, clean_working_directory, format_dot, topological_sort, @@ -76,6 +58,8 @@ from .base import EngineBase from .nodes import Node, MapNode +package_check('networkx', '1.3') +logger = logging.getLogger('workflow') class Workflow(EngineBase): """Controls the setup and execution of a pipeline of processes.""" @@ -225,7 +209,7 @@ def connect(self, *args, **kwargs): # handles the case that source is specified # with a function sourcename = source[0] - elif isinstance(source, string_types): + elif isinstance(source, (str, bytes)): sourcename = source else: raise Exception(('Unknown source specification in ' @@ -246,7 +230,7 @@ def connect(self, *args, **kwargs): # turn functions into strings for srcnode, destnode, connects in connection_list: for idx, (src, dest) in enumerate(connects): - if isinstance(src, tuple) and not isinstance(src[1], string_types): + if isinstance(src, tuple) and not isinstance(src[1], (str, bytes)): function_source = getsource(src[1]) connects[idx] = ((src[0], function_source, src[2:]), dest) @@ -561,7 +545,7 @@ def run(self, plugin=None, plugin_args=None, updatehash=False): """ if plugin is None: plugin = config.get('execution', 'plugin') - if not isinstance(plugin, string_types): + if not isinstance(plugin, (str, bytes)): runner = plugin else: name = 'nipype.pipeline.plugins' @@ -799,7 +783,7 @@ def _set_input(self, object, name, newvalue): def _set_node_input(self, node, param, source, sourceinfo): """Set inputs of a node given the edge connection""" - if isinstance(sourceinfo, string_types): + if isinstance(sourceinfo, (str, bytes)): val = source.get_output(sourceinfo) elif isinstance(sourceinfo, tuple): if callable(sourceinfo[1]): diff --git a/nipype/pipeline/plugins/__init__.py b/nipype/pipeline/plugins/__init__.py index 0bf1a8d2f5..cb2c193004 100644 --- a/nipype/pipeline/plugins/__init__.py +++ b/nipype/pipeline/plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 0881e9edf0..098ae5d636 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -1,10 +1,10 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Common graph operations for execution """ - -from builtins import range -from builtins import object +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, object, open from copy import deepcopy from glob import glob @@ -22,13 +22,13 @@ import scipy.sparse as ssp +from ... import logging from ...utils.filemanip import savepkl, loadpkl from ...utils.misc import str2bool from ..engine.utils import (nx, dfs_preorder, topological_sort) from ..engine import MapNode -from ... import logging logger = logging.getLogger('workflow') iflogger = logging.getLogger('interface') @@ -173,9 +173,8 @@ def create_pyscript(node, updatehash=False, store_exception=True): """ cmdstr = cmdstr % (mpl_backend, pkl_file, batch_dir, node.config, suffix) pyscript = os.path.join(batch_dir, 'pyscript_%s.py' % suffix) - fp = open(pyscript, 'wt') - fp.writelines(cmdstr) - fp.close() + with open(pyscript, 'wt') as fp: + fp.writelines(cmdstr) return pyscript @@ -491,7 +490,8 @@ def __init__(self, template, plugin_args=None): if 'template' in plugin_args: self._template = plugin_args['template'] if os.path.isfile(self._template): - self._template = open(self._template).read() + with open(self._template) as tpl_file: + self._template = tpl_file.read() if 'qsub_args' in plugin_args: self._qsub_args = plugin_args['qsub_args'] self._pending = {} @@ -567,9 +567,8 @@ def _submit_job(self, node, updatehash=False): batchscript = '\n'.join((self._template, '%s %s' % (sys.executable, pyscript))) batchscriptfile = os.path.join(batch_dir, 'batchscript_%s.sh' % name) - fp = open(batchscriptfile, 'wt') - fp.writelines(batchscript) - fp.close() + with open(batchscriptfile, 'wt') as fp: + fp.writelines(batchscript) return self._submit_batchtask(batchscriptfile, node) def _report_crash(self, node, result=None): @@ -613,13 +612,15 @@ def _get_args(self, node, keywords): for keyword in keywords: value = getattr(self, "_" + keyword) if keyword == "template" and os.path.isfile(value): - value = open(value).read() + with open(value) as f: + value = f.read() if (hasattr(node, "plugin_args") and isinstance(node.plugin_args, dict) and keyword in node.plugin_args): if (keyword == "template" and os.path.isfile(node.plugin_args[keyword])): - tmp_value = open(node.plugin_args[keyword]).read() + with open(node.plugin_args[keyword]) as f: + tmp_value = f.read() else: tmp_value = node.plugin_args[keyword] diff --git a/nipype/pipeline/plugins/callback_log.py b/nipype/pipeline/plugins/callback_log.py index 14287bda07..5ddc9eedd5 100644 --- a/nipype/pipeline/plugins/callback_log.py +++ b/nipype/pipeline/plugins/callback_log.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Callback logger for recording workflow and node run stats """ +from __future__ import print_function, division, unicode_literals, absolute_import # Log node stats function diff --git a/nipype/pipeline/plugins/condor.py b/nipype/pipeline/plugins/condor.py index 51c433df4f..9b8b5c218d 100644 --- a/nipype/pipeline/plugins/condor.py +++ b/nipype/pipeline/plugins/condor.py @@ -1,14 +1,14 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via Condor """ +from __future__ import print_function, division, unicode_literals, absolute_import import os +from time import sleep +from ...interfaces.base import CommandLine from .base import (SGELikeBatchManagerBase, logger, iflogger, logging) -from nipype.interfaces.base import CommandLine - -from time import sleep - class CondorPlugin(SGELikeBatchManagerBase): """Execute using Condor diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index 1ec5b4d424..1001ab5dac 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via Condor DAGMan """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os import sys @@ -8,7 +11,6 @@ from warnings import warn from .base import (GraphPluginBase, logger) - from ...interfaces.base import CommandLine @@ -52,7 +54,8 @@ class CondorDAGManPlugin(GraphPluginBase): def _get_str_or_file(self, arg): if os.path.isfile(arg): - content = open(arg).read() + with open(arg) as f: + content = f.read() else: content = arg return content diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 9d219ac7df..7c8fd451aa 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Debug plugin """ +from __future__ import print_function, division, unicode_literals, absolute_import -from .base import (PluginBase, logger) -from ..engine.utils import (nx) +import networkx as nx +from .base import PluginBase, logger class DebugPlugin(PluginBase): diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index 942a88e962..b19b4221f6 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via IPython controller """ +from __future__ import print_function, division, unicode_literals, absolute_import from future import standard_library standard_library.install_aliases() @@ -10,6 +12,7 @@ from pickle import dumps import sys +from .base import (DistributedPluginBase, logger, report_crash) IPython_not_loaded = False try: @@ -18,7 +21,6 @@ except: IPython_not_loaded = True -from .base import (DistributedPluginBase, logger, report_crash) def execute_task(pckld_task, node_config, updatehash): diff --git a/nipype/pipeline/plugins/ipythonx.py b/nipype/pipeline/plugins/ipythonx.py index f825f7f4fa..8cb3c4190a 100644 --- a/nipype/pipeline/plugins/ipythonx.py +++ b/nipype/pipeline/plugins/ipythonx.py @@ -1,12 +1,16 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via IPython controller """ +from __future__ import print_function, division, unicode_literals, absolute_import import sys from future.utils import raise_from from ...interfaces.base import LooseVersion +from .base import (DistributedPluginBase, logger, report_crash) + IPython_not_loaded = False try: from IPython import __version__ as IPyversion @@ -16,7 +20,6 @@ IPython_not_loaded = True -from .base import (DistributedPluginBase, logger, report_crash) class IPythonXPlugin(DistributedPluginBase): diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 48e8aba64c..bdb61b8c44 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -1,12 +1,16 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Local serial workflow execution """ +from __future__ import print_function, division, unicode_literals, absolute_import + import os +import networkx as nx from .base import (PluginBase, logger, report_crash, report_nodes_not_run, str2bool) -from ..engine.utils import (nx, dfs_preorder, topological_sort) +from ..engine.utils import dfs_preorder, topological_sort class LinearPlugin(PluginBase): diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index 1dbd91b944..6e27b3ab95 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -1,15 +1,14 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via LSF """ +from __future__ import print_function, division, unicode_literals, absolute_import import os - -from .base import (SGELikeBatchManagerBase, logger, iflogger, logging) - -from nipype.interfaces.base import CommandLine - +import re from time import sleep -import re +from .base import (SGELikeBatchManagerBase, logger, iflogger, logging) +from ...interfaces.base import CommandLine class LSFPlugin(SGELikeBatchManagerBase): diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index bc77296f0b..95e486f3b9 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via multiprocessing @@ -5,19 +6,21 @@ Support for child processes running as non-daemons based on http://stackoverflow.com/a/8963618/1183453 """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open # Import packages from multiprocessing import Process, Pool, cpu_count, pool from traceback import format_exception -import os import sys -import numpy as np from copy import deepcopy -from ..engine import MapNode -from ...utils.misc import str2bool +import numpy as np + from ... import logging -from nipype.pipeline.plugins import semaphore_singleton +from ...utils.misc import str2bool +from ..engine import MapNode +from ..plugins import semaphore_singleton from .base import (DistributedPluginBase, report_crash) # Init logger diff --git a/nipype/pipeline/plugins/oar.py b/nipype/pipeline/plugins/oar.py index 1298a83cc9..ca77fade1e 100644 --- a/nipype/pipeline/plugins/oar.py +++ b/nipype/pipeline/plugins/oar.py @@ -1,15 +1,17 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via OAR http://oar.imag.fr """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, open import os import stat from time import sleep import subprocess -import json +import simplejson as json from .base import (SGELikeBatchManagerBase, logger, iflogger, logging) - -from nipype.interfaces.base import CommandLine +from ...interfaces.base import CommandLine class OARPlugin(SGELikeBatchManagerBase): diff --git a/nipype/pipeline/plugins/pbs.py b/nipype/pipeline/plugins/pbs.py index 0c298670fd..2c354fd950 100644 --- a/nipype/pipeline/plugins/pbs.py +++ b/nipype/pipeline/plugins/pbs.py @@ -1,13 +1,16 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, open import os from time import sleep import subprocess +from ...interfaces.base import CommandLine from .base import (SGELikeBatchManagerBase, logger, iflogger, logging) -from ...interfaces.base import CommandLine, text_type class PBSPlugin(SGELikeBatchManagerBase): @@ -95,15 +98,14 @@ def _submit_batchtask(self, scriptfile, node): sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) - raise RuntimeError('\n'.join((('Could not submit pbs task' - ' for node %s') % node._id, - text_type(e)))) + raise RuntimeError( + 'Could not submit pbs task for node {}\n{}'.format(node._id, e)) else: break iflogger.setLevel(oldlevel) # retrieve pbs taskid taskid = result.runtime.stdout.split('.')[0] self._pending[taskid] = node.output_dir() - logger.debug('submitted pbs task: %s for node %s' % (taskid, node._id)) + logger.debug('submitted pbs task: {} for node {}'.format(taskid, node._id)) return taskid diff --git a/nipype/pipeline/plugins/pbsgraph.py b/nipype/pipeline/plugins/pbsgraph.py index 0c35292f45..1aafd24e37 100644 --- a/nipype/pipeline/plugins/pbsgraph.py +++ b/nipype/pipeline/plugins/pbsgraph.py @@ -1,13 +1,14 @@ """Parallel workflow execution via PBS/Torque """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os import sys -from .base import (GraphPluginBase, logger) - from ...interfaces.base import CommandLine from .sgegraph import SGEGraphPlugin +from .base import logger class PBSGraphPlugin(SGEGraphPlugin): diff --git a/nipype/pipeline/plugins/semaphore_singleton.py b/nipype/pipeline/plugins/semaphore_singleton.py index 99c7752b82..1b43e6652c 100644 --- a/nipype/pipeline/plugins/semaphore_singleton.py +++ b/nipype/pipeline/plugins/semaphore_singleton.py @@ -1,2 +1,4 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import import threading semaphore = threading.Semaphore(1) diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index bbe5be9db8..c87cb418a9 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -1,5 +1,7 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ +from __future__ import print_function, division, unicode_literals, absolute_import from builtins import object @@ -13,8 +15,8 @@ import random +from ...interfaces.base import CommandLine from .base import (SGELikeBatchManagerBase, logger, iflogger, logging) -from nipype.interfaces.base import CommandLine DEBUGGING_PREFIX = str(int(random.uniform(100, 999))) @@ -312,9 +314,9 @@ def qsub_sanitize_job_name(testjobname): Numbers and punctuation are not allowed. - >>> qsub_sanitize_job_name('01') + >>> qsub_sanitize_job_name('01') # doctest: +IGNORE_UNICODE 'J01' - >>> qsub_sanitize_job_name('a01') + >>> qsub_sanitize_job_name('a01') # doctest: +IGNORE_UNICODE 'a01' """ if testjobname[0].isalpha(): diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index c8d97a682f..dd4b8076e8 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -1,12 +1,14 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os import sys -from .base import (GraphPluginBase, logger) - from ...interfaces.base import CommandLine +from .base import (GraphPluginBase, logger) def node_completed_status(checknode): diff --git a/nipype/pipeline/plugins/slurm.py b/nipype/pipeline/plugins/slurm.py index a0b6b9c529..e0b23fb335 100644 --- a/nipype/pipeline/plugins/slurm.py +++ b/nipype/pipeline/plugins/slurm.py @@ -5,15 +5,16 @@ Parallel workflow execution with SLURM ''' +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os import re -import subprocess from time import sleep +from ...interfaces.base import CommandLine from .base import (SGELikeBatchManagerBase, logger, iflogger, logging) -from ...interfaces.base import CommandLine class SLURMPlugin(SGELikeBatchManagerBase): @@ -50,7 +51,8 @@ def __init__(self, **kwargs): if 'template' in kwargs['plugin_args']: self._template = kwargs['plugin_args']['template'] if os.path.isfile(self._template): - self._template = open(self._template).read() + with open(self._template) as f: + self._template = f.read() if 'sbatch_args' in kwargs['plugin_args']: self._sbatch_args = kwargs['plugin_args']['sbatch_args'] self._pending = {} diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index 87e1408d67..794a35bc84 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -1,12 +1,14 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via SLURM """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os import sys -from .base import (GraphPluginBase, logger) - from ...interfaces.base import CommandLine +from .base import (GraphPluginBase, logger) def node_completed_status(checknode): diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index db7768703e..f384e35adf 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -1,9 +1,13 @@ +# -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ +from __future__ import print_function, division, unicode_literals, absolute_import import os import sys +from .base import (GraphPluginBase, logger) + soma_not_loaded = False try: from soma.workflow.client import (Job, Workflow, WorkflowController, @@ -12,9 +16,6 @@ soma_not_loaded = True -from .base import (GraphPluginBase, logger) - - class SomaFlowPlugin(GraphPluginBase): """Execute using Soma workflow """ diff --git a/nipype/pipeline/plugins/tests/__init__.py b/nipype/pipeline/plugins/tests/__init__.py index 349937997e..99fb243f19 100644 --- a/nipype/pipeline/plugins/tests/__init__.py +++ b/nipype/pipeline/plugins/tests/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/plugins/tests/test_base.py b/nipype/pipeline/plugins/tests/test_base.py index 07a25532f3..3e22019816 100644 --- a/nipype/pipeline/plugins/tests/test_base.py +++ b/nipype/pipeline/plugins/tests/test_base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 0769781e8a..78b48f6b32 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/plugins/tests/test_debug.py b/nipype/pipeline/plugins/tests/test_debug.py index f15fc62939..115d40c5d4 100644 --- a/nipype/pipeline/plugins/tests/test_debug.py +++ b/nipype/pipeline/plugins/tests/test_debug.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os import nipype.interfaces.base as nib from tempfile import mkdtemp diff --git a/nipype/pipeline/plugins/tests/test_linear.py b/nipype/pipeline/plugins/tests/test_linear.py index a59c7c1981..9c2568a89c 100644 --- a/nipype/pipeline/plugins/tests/test_linear.py +++ b/nipype/pipeline/plugins/tests/test_linear.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os import nipype.interfaces.base as nib from tempfile import mkdtemp diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index 64a50e51ba..19e876288d 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import logging import os from tempfile import mkdtemp diff --git a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py index cdba9da5b5..4320b015a8 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Testing module for functions and classes from multiproc.py """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, open # Import packages -from builtins import range import os from tempfile import mkdtemp from shutil import rmtree diff --git a/nipype/pipeline/plugins/tests/test_oar.py b/nipype/pipeline/plugins/tests/test_oar.py index a5ef97fee3..faf62e9d6d 100644 --- a/nipype/pipeline/plugins/tests/test_oar.py +++ b/nipype/pipeline/plugins/tests/test_oar.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os from shutil import rmtree from tempfile import mkdtemp diff --git a/nipype/pipeline/plugins/tests/test_pbs.py b/nipype/pipeline/plugins/tests/test_pbs.py index 8aa52e1163..ed6be64519 100644 --- a/nipype/pipeline/plugins/tests/test_pbs.py +++ b/nipype/pipeline/plugins/tests/test_pbs.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os from shutil import rmtree from tempfile import mkdtemp diff --git a/nipype/pipeline/plugins/tests/test_somaflow.py b/nipype/pipeline/plugins/tests/test_somaflow.py index 27b2e30a83..36aa050a43 100644 --- a/nipype/pipeline/plugins/tests/test_somaflow.py +++ b/nipype/pipeline/plugins/tests/test_somaflow.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os from shutil import rmtree from tempfile import mkdtemp diff --git a/nipype/pkg_info.py b/nipype/pkg_info.py index 04ea874f7d..b158bed6e9 100644 --- a/nipype/pkg_info.py +++ b/nipype/pkg_info.py @@ -1,15 +1,15 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open + from future import standard_library standard_library.install_aliases() +from configparser import ConfigParser import os import sys import subprocess -try: - from configparser import ConfigParser -except ImportError: - from configparser import ConfigParser # python 3 - COMMIT_INFO_FNAME = 'COMMIT_INFO.txt' diff --git a/nipype/testing/__init__.py b/nipype/testing/__init__.py index 97fa976ee6..e2f1e528fb 100644 --- a/nipype/testing/__init__.py +++ b/nipype/testing/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The testing directory contains a small set of imaging files to be diff --git a/nipype/testing/decorators.py b/nipype/testing/decorators.py index 7a76d26958..e4cdf29529 100644 --- a/nipype/testing/decorators.py +++ b/nipype/testing/decorators.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/testing/tests/test_utils.py b/nipype/testing/tests/test_utils.py index dbc14ebbba..f62389dd95 100644 --- a/nipype/testing/tests/test_utils.py +++ b/nipype/testing/tests/test_utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Test testing utilities diff --git a/nipype/testing/utils.py b/nipype/testing/utils.py index d98bda9de6..7cc3311dad 100644 --- a/nipype/testing/utils.py +++ b/nipype/testing/utils.py @@ -1,8 +1,10 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Additional handy utilities for testing """ -__docformat__ = 'restructuredtext' +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import range, object, open import os import time @@ -11,9 +13,11 @@ import subprocess from subprocess import CalledProcessError from tempfile import mkdtemp -from ..utils.misc import package_check from nose import SkipTest from future.utils import raise_from +from ..utils.misc import package_check + +__docformat__ = 'restructuredtext' def skip_if_no_package(*args, **kwargs): """Raise SkipTest if package_check fails diff --git a/nipype/utils/__init__.py b/nipype/utils/__init__.py index 926f228876..691947f82f 100644 --- a/nipype/utils/__init__.py +++ b/nipype/utils/__init__.py @@ -1,4 +1,5 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import -from .onetime import OneTimeProperty, setattr_on_read -from .tmpdirs import TemporaryDirectory, InTemporaryDirectory +from nipype.utils.onetime import OneTimeProperty, setattr_on_read +from nipype.utils.tmpdirs import TemporaryDirectory, InTemporaryDirectory diff --git a/nipype/utils/config.py b/nipype/utils/config.py index 2c3411feb0..d55515c5ec 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: ''' @@ -8,19 +9,21 @@ @author: Chris Filo Gorgolewski ''' +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, object, open + from future import standard_library standard_library.install_aliases() -from builtins import object import configparser -from json import load, dump import os import shutil import errno from warnings import warn +from io import StringIO +from simplejson import load, dump from ..external import portalocker -from ..external.six import StringIO # Get home directory in platform-agnostic way diff --git a/nipype/utils/docparse.py b/nipype/utils/docparse.py index a445262a15..ebf52d06d3 100644 --- a/nipype/utils/docparse.py +++ b/nipype/utils/docparse.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utilities to pull in documentation from command-line tools. @@ -12,11 +13,12 @@ docstring = docparse.get_doc(better.cmd, better.opt_map) """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, open, bytes import subprocess -from nipype.interfaces.base import CommandLine -from nipype.utils.misc import is_container -from nipype.external.six import string_types +from ..interfaces.base import CommandLine +from .misc import is_container def grab_doc(cmd, trap_error=True): @@ -282,7 +284,7 @@ def _parse_doc(doc, style=['--']): # individual flag/option. doclist = doc.split('\n') optmap = {} - if isinstance(style, string_types): + if isinstance(style, (str, bytes)): style = [style] for line in doclist: linelist = line.split() diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index 9d1ac3b0fd..75ea7dc3ea 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -1,15 +1,23 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Module to draw an html gantt chart from logfile produced by callback_log.log_nodes_cb() """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, range, open + +# Py2 compat: http://python-future.org/compatible_idioms.html#collections-counter-and-ordereddict +from future import standard_library +standard_library.install_aliases() +from collections import OrderedDict # Import packages -import json -from dateutil import parser -import datetime import random -from collections import OrderedDict +import datetime +import simplejson as json +from dateutil import parser + # Pandas try: import pandas as pd @@ -190,7 +198,7 @@ def calculate_resource_timeseries(events, resource): res[current_time] = all_res # Formulate the pandas timeseries - time_series = pd.Series(data=res.values(), index=res.keys()) + time_series = pd.Series(data=list(res.values()), index=list(res.keys())) # Downsample where there is only value-diff ts_diff = time_series.diff() time_series = time_series[ts_diff!=0] @@ -227,7 +235,7 @@ def draw_lines(start, total_duration, minute_scale, scale): result = '' next_line = 220 next_time = start - num_lines = int((total_duration/60) / minute_scale) + 2 + num_lines = ((total_duration // 60) // minute_scale) + 2 # Iterate through the lines and create html line markers string for line in range(num_lines): @@ -282,8 +290,8 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, # Init variables result = '' - scale = float(space_between_minutes/float(minute_scale)) - space_between_minutes = float(space_between_minutes/scale) + scale = space_between_minutes / minute_scale + space_between_minutes = space_between_minutes / scale end_times = [datetime.datetime(start.year, start.month, start.day, start.hour, start.minute, start.second) \ for core in range(cores)] @@ -325,7 +333,7 @@ def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, 'scale_duration' : scale_duration, 'color' : color, 'node_name' : node['name'], - 'node_dur' : node['duration']/60.0, + 'node_dur' : node['duration'] / 60.0, 'node_start' : node_start.strftime("%Y-%m-%d %H:%M:%S"), 'node_finish' : node_finish.strftime("%Y-%m-%d %H:%M:%S")} # Create new node string @@ -350,12 +358,12 @@ def draw_resource_bar(start_time, finish_time, time_series, space_between_minute result = "

%s

" \ % (left, resource) # Image scaling factors - scale = float(space_between_minutes/float(minute_scale)) - space_between_minutes = float(space_between_minutes/scale) + scale = space_between_minutes / minute_scale + space_between_minutes = space_between_minutes / scale # Iterate through time series ts_len = len(time_series) - for idx, (ts_start, amount) in enumerate(time_series.iteritems()): + for idx, (ts_start, amount) in enumerate(time_series.items()): if idx < ts_len-1: ts_end = time_series.index[idx+1] else: @@ -536,7 +544,7 @@ def generate_gantt_chart(logfile, cores, minute_scale=10, # Summary strings of workflow at top html_string += '

Start: ' + start_node['start'].strftime("%Y-%m-%d %H:%M:%S") + '

' html_string += '

Finish: ' + last_node['finish'].strftime("%Y-%m-%d %H:%M:%S") + '

' - html_string += '

Duration: ' + "{0:.2f}".format(duration/60) + ' minutes

' + html_string += '

Duration: ' + "{0:.2f}".format(duration / 60) + ' minutes

' html_string += '

Nodes: ' + str(len(nodes_list))+'

' html_string += '

Cores: ' + str(cores) + '

' html_string += close_header diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index 189528cd4d..2baabcac50 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -1,29 +1,31 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous file manipulation functions """ +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str, bytes, open from future import standard_library standard_library.install_aliases() +import sys import pickle import gzip import hashlib from hashlib import md5 -import simplejson import os import re import shutil import posixpath - +import simplejson as json import numpy as np +from .. import logging, config from .misc import is_container -from ..external.six import string_types from ..interfaces.traits_extension import isdefined -from .. import logging, config fmlogger = logging.getLogger("filemanip") @@ -52,13 +54,13 @@ def split_filename(fname): -------- >>> from nipype.utils.filemanip import split_filename >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') - >>> pth + >>> pth # doctest: +IGNORE_UNICODE '/home/data' - >>> fname + >>> fname # doctest: +IGNORE_UNICODE 'subject' - >>> ext + >>> ext # doctest: +IGNORE_UNICODE '.nii.gz' """ @@ -81,6 +83,30 @@ def split_filename(fname): return pth, fname, ext +def encode_dict(value): + """ + Manipulates ordered dicts before they are hashed (Py2/3 compat.) + + """ + if sys.version_info[0] > 2: + return str(value) + + if isinstance(value, str): + value = value.encode() + + if isinstance(value, tuple): + val0 = encode_dict(value[0]) + val1 = encode_dict(value[1]) + return '(' + val0 + ', ' + val1 + ')' + + if isinstance(value, list): + retval = '[' + for i, v in enumerate(value): + if i > 0: + retval += ', ' + retval += encode_dict(v) + return retval + ']' + return repr(value) def fname_presuffix(fname, prefix='', suffix='', newpath=None, use_ext=True): """Manipulates path and name of input filename @@ -105,7 +131,7 @@ def fname_presuffix(fname, prefix='', suffix='', newpath=None, use_ext=True): >>> from nipype.utils.filemanip import fname_presuffix >>> fname = 'foo.nii.gz' - >>> fname_presuffix(fname,'pre','post','/tmp') + >>> fname_presuffix(fname,'pre','post','/tmp') # doctest: +IGNORE_UNICODE '/tmp/prefoopost.nii.gz' """ @@ -225,17 +251,18 @@ def copyfile(originalfile, newfile, copy=False, create_new=False, # ------------- # Options: # symlink - # to originalfile (keep if not (use_hardlink or copy)) - # to other file (unlink) + # to regular file originalfile (keep if symlinking) + # to same dest as symlink originalfile (keep if symlinking) + # to other file (unlink) # regular file - # hard link to originalfile (keep) - # copy of file (same hash) (keep) - # different file (diff hash) (unlink) + # hard link to originalfile (keep) + # copy of file (same hash) (keep) + # different file (diff hash) (unlink) keep = False if os.path.lexists(newfile): if os.path.islink(newfile): - if all((os.readlink(newfile) == originalfile, not use_hardlink, - not copy)): + if all((os.readlink(newfile) == os.path.realpath(originalfile), + not use_hardlink, not copy)): keep = True elif posixpath.samefile(newfile, originalfile): keep = True @@ -363,7 +390,7 @@ def copyfiles(filelist, dest, copy=False, create_new=False): def filename_to_list(filename): """Returns a list given either a string or a list """ - if isinstance(filename, (str, string_types)): + if isinstance(filename, (str, bytes)): return [filename] elif isinstance(filename, list): return filename @@ -382,7 +409,6 @@ def list_to_filename(filelist): else: return filelist[0] - def save_json(filename, data): """Save data to a json file @@ -394,9 +420,11 @@ def save_json(filename, data): Dictionary to save in json file. """ - - with open(filename, 'w') as fp: - simplejson.dump(data, fp, sort_keys=True, indent=4) + mode = 'w' + if sys.version_info[0] < 3: + mode = 'wb' + with open(filename, mode) as fp: + json.dump(data, fp, sort_keys=True, indent=4) def load_json(filename): @@ -414,7 +442,7 @@ def load_json(filename): """ with open(filename, 'r') as fp: - data = simplejson.load(fp) + data = json.load(fp) return data @@ -438,11 +466,17 @@ def loadcrash(infile, *args): def loadpkl(infile): """Load a zipped or plain cPickled file """ + fmlogger.debug('Loading pkl: %s', infile) if infile.endswith('pklz'): pkl_file = gzip.open(infile, 'rb') else: - pkl_file = open(infile) - return pickle.load(pkl_file) + pkl_file = open(infile, 'rb') + + try: + unpkl = pickle.load(pkl_file) + except UnicodeDecodeError: + unpkl = pickle.load(pkl_file, fix_imports=True, encoding='utf-8') + return unpkl def savepkl(filename, record): @@ -464,12 +498,12 @@ def write_rst_header(header, level=0): def write_rst_list(items, prefix=''): out = [] for item in items: - out.append(prefix + ' ' + str(item)) + out.append('{} {}'.format(prefix, str(item))) return '\n'.join(out) + '\n\n' def write_rst_dict(info, prefix=''): out = [] for key, value in sorted(info.items()): - out.append(prefix + '* ' + key + ' : ' + str(value)) + out.append('{}* {} : {}'.format(prefix, key, str(value))) return '\n'.join(out) + '\n\n' diff --git a/nipype/utils/logger.py b/nipype/utils/logger.py index 38edd8e1e6..b30b50bc72 100644 --- a/nipype/utils/logger.py +++ b/nipype/utils/logger.py @@ -1,3 +1,6 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import + from builtins import object # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -5,6 +8,8 @@ import logging import os import sys +from .misc import str2bool + try: from ..external.cloghandler import ConcurrentRotatingFileHandler as \ RFHandler @@ -13,8 +18,6 @@ from warnings import warn warn("ConcurrentLogHandler not installed. Using builtin log handler") from logging.handlers import RotatingFileHandler as RFHandler -from .misc import str2bool -from .config import NipypeConfig class Logging(object): diff --git a/nipype/utils/matlabtools.py b/nipype/utils/matlabtools.py index e272288b75..03ce6270fd 100644 --- a/nipype/utils/matlabtools.py +++ b/nipype/utils/matlabtools.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Useful Functions for working with matlab""" +from __future__ import print_function, division, unicode_literals, absolute_import from builtins import range @@ -8,6 +10,7 @@ import os import re import tempfile +import numpy as np # Functions, classes and other top-level code diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index 8024b3a67d..f01af7a02e 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -1,24 +1,22 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous utility functions """ - +from __future__ import print_function, division, unicode_literals, absolute_import from future import standard_library standard_library.install_aliases() +from builtins import next, str from future.utils import raise_from -from builtins import next -from pickle import dumps, loads -import inspect -from distutils.version import LooseVersion -import numpy as np -from textwrap import dedent import sys import re from collections import Iterator +import inspect -from ..external.six import string_types - +from distutils.version import LooseVersion +from textwrap import dedent +import numpy as np def human_order_sorted(l): """Sorts string in human order (i.e. 'stat10' will go after 'stat2')""" @@ -34,6 +32,9 @@ def natural_keys(text): def trim(docstring, marker=None): + if isinstance(docstring, bytes): + docstring = str(docstring, 'utf-8') + if not docstring: return '' # Convert tabs to spaces (following the normal Python rules) @@ -126,7 +127,7 @@ def is_container(item): True if container False if not (eg string) """ - if isinstance(item, string_types): + if isinstance(item, str): return False elif hasattr(item, '__iter__'): return True @@ -154,9 +155,8 @@ def container_to_string(cont): Container elements joined into a string. """ - if hasattr(cont, '__iter__') and not isinstance(cont, string_types): - return str(' '.join(cont)) - + if hasattr(cont, '__iter__') and not isinstance(cont, str): + cont = ' '.join(cont) return str(cont) diff --git a/nipype/utils/nipype2boutiques.py b/nipype/utils/nipype2boutiques.py index 49fc1d755d..8696c321f7 100644 --- a/nipype/utils/nipype2boutiques.py +++ b/nipype/utils/nipype2boutiques.py @@ -1,4 +1,7 @@ -from __future__ import print_function +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import + +from builtins import str, open # This tool exports a Nipype interface in the Boutiques (https://github.com/boutiques) JSON format. # Boutiques tools can be imported in CBRAIN (https://github.com/aces/cbrain) among other platforms. # @@ -13,12 +16,9 @@ import os import argparse -import inspect import sys -import simplejson import tempfile - -from nipype.interfaces.base import Interface +import simplejson as json def main(argv): @@ -45,9 +45,8 @@ def main(argv): parsed.ignore_template_numbers) # Writes JSON string to file - f = open(parsed.output, 'w') - f.write(json_string) - f.close() + with open(parsed.output, 'w') as f: + f.write(json_string) def generate_boutiques_descriptor(module, interface_name, ignored_template_inputs, docker_image, docker_index, verbose, ignore_template_numbers): @@ -108,7 +107,7 @@ def generate_boutiques_descriptor(module, interface_name, ignored_template_input for input in tool_desc['inputs']: del input['tempvalue'] - return simplejson.dumps(tool_desc, indent=4, separators=(',', ': ')) + return json.dumps(tool_desc, indent=4, separators=(',', ': ')) def get_boutiques_input(inputs, interface, input_name, spec, ignored_template_inputs, verbose, ignore_template_numbers): diff --git a/nipype/utils/nipype_cmd.py b/nipype/utils/nipype_cmd.py index 9bdfd7df91..116dd5f18c 100644 --- a/nipype/utils/nipype_cmd.py +++ b/nipype/utils/nipype_cmd.py @@ -1,10 +1,13 @@ -from __future__ import print_function +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str import os import argparse import inspect import sys -from nipype.interfaces.base import Interface, InputMultiPath, traits -from nipype.utils.misc import str2bool + +from ..interfaces.base import Interface, InputMultiPath, traits +from .misc import str2bool def listClasses(module=None): diff --git a/nipype/utils/onetime.py b/nipype/utils/onetime.py index 44ddd233c8..ade1309598 100644 --- a/nipype/utils/onetime.py +++ b/nipype/utils/onetime.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Descriptor support for NIPY. @@ -16,6 +17,7 @@ [2] Python data model, http://docs.python.org/reference/datamodel.html """ +from __future__ import print_function, division, unicode_literals, absolute_import from builtins import object diff --git a/nipype/utils/provenance.py b/nipype/utils/provenance.py index 0269bd1502..0fdf72e02a 100644 --- a/nipype/utils/provenance.py +++ b/nipype/utils/provenance.py @@ -1,29 +1,27 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, object, str, bytes + +# Py2 compat: http://python-future.org/compatible_idioms.html#collections-counter-and-ordereddict from future import standard_library standard_library.install_aliases() -from builtins import object, str +from collections import OrderedDict from copy import deepcopy from pickle import dumps -import simplejson import os import getpass from socket import getfqdn from uuid import uuid1 +import simplejson as json import numpy as np -try: - from collections import OrderedDict -except ImportError: - from ordereddict import OrderedDict - import prov.model as pm -from ..external.six import string_types, text_type -from .. import get_info +from .. import get_info, logging, __version__ from .filemanip import (md5, hashlib, hash_infile) -from .. import logging, __version__ -iflogger = logging.getLogger('interface') +iflogger = logging.getLogger('interface') foaf = pm.Namespace("foaf", "http://xmlns.com/foaf/0.1/") dcterms = pm.Namespace("dcterms", "http://purl.org/dc/terms/") nipype_ns = pm.Namespace("nipype", "http://nipy.org/nipype/terms/") @@ -33,6 +31,12 @@ "cryptographicHashFunctions/")) get_id = lambda: niiri[uuid1().hex] +PROV_ENVVARS = ['PATH', 'FSLDIR', 'FREESURFER_HOME', 'ANTSPATH', + 'CAMINOPATH', 'CLASSPATH', 'LD_LIBRARY_PATH', + 'DYLD_LIBRARY_PATH', 'FIX_VERTEX_AREA', + 'FSF_OUTPUT_FORMAT', 'FSLCONFDIR', 'FSLOUTPUTTYPE', + 'LOGNAME', 'USER', + 'MKL_NUM_THREADS', 'OMP_NUM_THREADS'] def get_attr_id(attr, skip=None): dictwithhash, hashval = get_hashval(attr, skip=skip) @@ -101,21 +105,22 @@ def _get_sorteddict(object, dictwithhash=False): if isinstance(object, tuple): out = tuple(out) else: - if isinstance(object, string_types) and os.path.isfile(object): + if isinstance(object, str) and os.path.isfile(object): hash = hash_infile(object) if dictwithhash: out = (object, hash) else: out = hash elif isinstance(object, float): - out = '%.10f' % object + out = '%.10f'.format(object) else: out = object return out def safe_encode(x, as_literal=True): - """Encodes a python value for prov + """ + Encodes a python value for prov """ if x is None: value = "Unknown" @@ -123,119 +128,157 @@ def safe_encode(x, as_literal=True): return pm.Literal(value, pm.XSD['string']) else: return value - try: - if isinstance(x, (str, string_types)): - if os.path.exists(x): - value = 'file://%s%s' % (getfqdn(), x) - if not as_literal: - return value - try: - return pm.URIRef(value) - except AttributeError: - return pm.Literal(value, pm.XSD['anyURI']) - else: - if len(x) > max_text_len: - value = x[:max_text_len - 13] + ['...Clipped...'] - else: - value = x - if not as_literal: - return value - if isinstance(value, str): - return pm.Literal(value, pm.XSD['string']) - else: - return pm.Literal(text_type(value, 'utf-8'), pm.XSD['string']) - if isinstance(x, int): + + if isinstance(x, (str, bytes)): + if isinstance(x, bytes): + x = str(x, 'utf-8') + if os.path.exists(x): + value = 'file://{}{}'.format(getfqdn(), x) if not as_literal: - return x - return pm.Literal(int(x), pm.XSD['integer']) - if isinstance(x, float): + return value + try: + return pm.URIRef(value) + except AttributeError: + return pm.Literal(value, pm.XSD['anyURI']) + else: + value = x + if len(x) > max_text_len: + cliptxt = '...Clipped...' + value = x[:max_text_len - len(cliptxt)] + cliptxt + if not as_literal: - return x - return pm.Literal(x, pm.XSD['float']) - if isinstance(x, dict): - outdict = {} - for key, value in list(x.items()): + return value + + return pm.Literal(value, pm.XSD['string']) + if isinstance(x, int): + if not as_literal: + return x + return pm.Literal(int(x), pm.XSD['integer']) + if isinstance(x, float): + if not as_literal: + return x + return pm.Literal(x, pm.XSD['float']) + if isinstance(x, dict): + outdict = {} + for key, value in list(x.items()): + encoded_value = safe_encode(value, as_literal=False) + if isinstance(encoded_value, pm.Literal): + outdict[key] = encoded_value.json_representation() + else: + outdict[key] = encoded_value + + try: + jsonstr = json.dumps(outdict) + except UnicodeDecodeError as excp: + jsonstr = "Could not encode dictionary. {}".format(excp) + iflogger.warn('Prov: %s', jsonstr) + + if not as_literal: + return jsonstr + return pm.Literal(jsonstr, pm.XSD['string']) + if isinstance(x, (list, tuple)): + x = list(x) + is_object = False + try: + nptype = np.array(x).dtype + is_object = nptype == np.dtype(object) + except ValueError: + is_object = True + + # If the array contains an heterogeneous mixture of data types + # they should be encoded sequentially + if is_object: + outlist = [] + for value in x: encoded_value = safe_encode(value, as_literal=False) if isinstance(encoded_value, pm.Literal): - outdict[key] = encoded_value.json_representation() + outlist.append(encoded_value.json_representation()) else: - outdict[key] = encoded_value - if not as_literal: - return simplejson.dumps(outdict) - return pm.Literal(simplejson.dumps(outdict), pm.XSD['string']) - if isinstance(x, list): - try: - nptype = np.array(x).dtype - if nptype == np.dtype(object): - raise ValueError('dtype object') - except ValueError as e: - outlist = [] - for value in x: - encoded_value = safe_encode(value, as_literal=False) - if isinstance(encoded_value, pm.Literal): - outlist.append(encoded_value.json_representation()) - else: - outlist.append(encoded_value) - else: - outlist = x - if not as_literal: - return simplejson.dumps(outlist) - return pm.Literal(simplejson.dumps(outlist), pm.XSD['string']) - if not as_literal: - return dumps(x) - return pm.Literal(dumps(x), nipype_ns['pickle']) - except TypeError as e: - iflogger.debug(e) - value = "Could not encode: " + str(e) + outlist.append(encoded_value) + x = outlist + + try: + jsonstr = json.dumps(x) + except UnicodeDecodeError as excp: + jsonstr = "Could not encode list/tuple. {}".format(excp) + iflogger.warn('Prov: %s', jsonstr) + if not as_literal: - return value - return pm.Literal(value, pm.XSD['string']) + return jsonstr + return pm.Literal(jsonstr, pm.XSD['string']) + + # If is a literal, and as_literal do nothing. + # else bring back to json. + if isinstance(x, pm.Literal): + if as_literal: + return x + return dumps(x.json_representation()) + + jsonstr = None + ltype = pm.XSD['string'] + try: + jsonstr = json.dumps(x.__dict__) + except AttributeError: + pass + + if jsonstr is None: + try: + jsonstr = dumps(x) + ltype = nipype_ns['pickle'] + except TypeError as excp: + jsonstr = 'Could not encode object. {}'.format(excp) + + if not as_literal: + return jsonstr + return pm.Literal(jsonstr, ltype) def prov_encode(graph, value, create_container=True): - if isinstance(value, list) and create_container: + if isinstance(value, (list, tuple)) and create_container: + value = list(value) if len(value) == 0: encoded_literal = safe_encode(value) attr = {pm.PROV['value']: encoded_literal} - id = get_attr_id(attr) - entity = graph.entity(id, attr) - elif len(value) > 1: - try: - entities = [] - for item in value: - item_entity = prov_encode(graph, item) - entities.append(item_entity) - if isinstance(item, list): - continue - if not isinstance(list(item_entity.value)[0], string_types): - raise ValueError('Not a string literal') - if 'file://' not in list(item_entity.value)[0]: - raise ValueError('No file found') - id = get_id() - entity = graph.collection(identifier=id) - for item_entity in entities: - graph.hadMember(id, item_entity) - except ValueError as e: - iflogger.debug(e) - entity = prov_encode(graph, value, create_container=False) - else: - entity = prov_encode(graph, value[0]) + eid = get_attr_id(attr) + return graph.entity(eid, attr) + + if len(value) == 1: + return prov_encode(graph, value[0]) + + entities = [] + for item in value: + item_entity = prov_encode(graph, item) + entities.append(item_entity) + if isinstance(item, (list, tuple)): + continue + + item_entity_val = list(item_entity.value)[0] + is_str = isinstance(item_entity_val, str) + if not is_str or (is_str and 'file://' not in item_entity_val): + return prov_encode(graph, value, create_container=False) + + eid = get_id() + entity = graph.collection(identifier=eid) + for item_entity in entities: + graph.hadMember(eid, item_entity) + + return entity else: encoded_literal = safe_encode(value) attr = {pm.PROV['value']: encoded_literal} - if isinstance(value, string_types) and os.path.exists(value): + if isinstance(value, str) and os.path.exists(value): attr.update({pm.PROV['location']: encoded_literal}) if not os.path.isdir(value): sha512 = hash_infile(value, crypto=hashlib.sha512) attr.update({crypto['sha512']: pm.Literal(sha512, pm.XSD['string'])}) - id = get_attr_id(attr, skip=[pm.PROV['location'], - pm.PROV['value']]) + eid = get_attr_id(attr, skip=[pm.PROV['location'], + pm.PROV['value']]) else: - id = get_attr_id(attr, skip=[pm.PROV['location']]) + eid = get_attr_id(attr, skip=[pm.PROV['location']]) else: - id = get_attr_id(attr) - entity = graph.entity(id, attr) + eid = get_attr_id(attr) + entity = graph.entity(eid, attr) return entity @@ -298,12 +341,7 @@ def add_results(self, results, keep_provenance=False): self.g.used(a0, id) # write environment entities for idx, (key, val) in enumerate(sorted(runtime.environ.items())): - if key not in ['PATH', 'FSLDIR', 'FREESURFER_HOME', 'ANTSPATH', - 'CAMINOPATH', 'CLASSPATH', 'LD_LIBRARY_PATH', - 'DYLD_LIBRARY_PATH', 'FIX_VERTEX_AREA', - 'FSF_OUTPUT_FORMAT', 'FSLCONFDIR', 'FSLOUTPUTTYPE', - 'LOGNAME', 'USER', - 'MKL_NUM_THREADS', 'OMP_NUM_THREADS']: + if key not in PROV_ENVVARS: continue in_attr = {pm.PROV["label"]: key, nipype_ns["environmentVariable"]: key, diff --git a/nipype/utils/spm_docs.py b/nipype/utils/spm_docs.py index 3dd5af528e..1b7a1a1dc4 100644 --- a/nipype/utils/spm_docs.py +++ b/nipype/utils/spm_docs.py @@ -1,11 +1,13 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Grab documentation from spm.""" +from __future__ import print_function, division, unicode_literals, absolute_import +from future.utils import raise_from import os -from future.utils import raise_from -from nipype.interfaces import matlab +from ..interfaces import matlab def grab_doc(task_name): diff --git a/nipype/utils/tests/__init__.py b/nipype/utils/tests/__init__.py index 00d7c65d5a..939910d6b6 100644 --- a/nipype/utils/tests/__init__.py +++ b/nipype/utils/tests/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/utils/tests/test_cmd.py b/nipype/utils/tests/test_cmd.py index a4bb0ae060..09f07c1862 100644 --- a/nipype/utils/tests/test_cmd.py +++ b/nipype/utils/tests/test_cmd.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function, division, unicode_literals, absolute_import from future import standard_library standard_library.install_aliases() @@ -7,9 +8,11 @@ import sys from contextlib import contextmanager -from nipype.external.six import PY2, PY3, StringIO -from nipype.utils import nipype_cmd +from io import StringIO +from ...utils import nipype_cmd +PY3 = sys.version_info[0] >= 3 +PY2 = sys.version_info[0] < 2 @contextmanager def capture_sys_output(): @@ -23,6 +26,7 @@ def capture_sys_output(): class TestNipypeCMD(unittest.TestCase): + maxDiff = None def test_main_returns_2_on_empty(self): with self.assertRaises(SystemExit) as cm: @@ -110,13 +114,13 @@ def test_run_4d_realign_without_arguments(self): in_file [in_file ...] tr""" - if PY2: + if PY3: error_message += """ -nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: too few arguments +nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: the following arguments are required: in_file, tr """ - elif PY3: + else: error_message += """ -nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: the following arguments are required: in_file, tr +nipype_cmd nipype.interfaces.nipy FmriRealign4d: error: too few arguments """ self.assertEqual(stderr.getvalue(), error_message) diff --git a/nipype/utils/tests/test_docparse.py b/nipype/utils/tests/test_docparse.py index 84f3d99097..ff659cecb8 100644 --- a/nipype/utils/tests/test_docparse.py +++ b/nipype/utils/tests/test_docparse.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from builtins import object # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/utils/tests/test_filemanip.py b/nipype/utils/tests/test_filemanip.py index 8b81e03d4d..08ae92472d 100644 --- a/nipype/utils/tests/test_filemanip.py +++ b/nipype/utils/tests/test_filemanip.py @@ -1,5 +1,7 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import unicode_literals from builtins import open import os @@ -168,7 +170,6 @@ def test_linkchain(): os.unlink(orig_img) os.unlink(orig_hdr) - def test_recopy(): # Re-copying with the same parameters on an unchanged file should be # idempotent @@ -217,7 +218,6 @@ def test_recopy(): os.unlink(orig_img) os.unlink(orig_hdr) - def test_copyfallback(): if os.name is not 'posix': return diff --git a/nipype/utils/tests/test_misc.py b/nipype/utils/tests/test_misc.py index 5b458deb64..cf92bbb537 100644 --- a/nipype/utils/tests/test_misc.py +++ b/nipype/utils/tests/test_misc.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from future import standard_library diff --git a/nipype/utils/tests/test_provenance.py b/nipype/utils/tests/test_provenance.py index 7689ce729c..85f6e032f6 100644 --- a/nipype/utils/tests/test_provenance.py +++ b/nipype/utils/tests/test_provenance.py @@ -1,19 +1,21 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import unicode_literals +from builtins import str, bytes from future import standard_library standard_library.install_aliases() -from builtins import str + import os from tempfile import mkdtemp -from ...testing import assert_equal, assert_true, assert_false - -from ..provenance import ProvStore, safe_encode, text_type +from nipype.testing import assert_equal, assert_true, assert_false +from nipype.utils.provenance import ProvStore, safe_encode def test_provenance(): ps = ProvStore() - from ...interfaces.base import CommandLine + from nipype.interfaces.base import CommandLine results = CommandLine('echo hello').run() ps.add_results(results) provn = ps.g.get_provn() @@ -24,8 +26,8 @@ def test_provenance_exists(): tempdir = mkdtemp() cwd = os.getcwd() os.chdir(tempdir) - from ...interfaces.base import CommandLine - from ... import config + from nipype import config + from nipype.interfaces.base import CommandLine provenance_state = config.get('execution', 'write_provenance') hash_state = config.get('execution', 'hash_method') config.enable_provenance() @@ -39,6 +41,4 @@ def test_provenance_exists(): def test_safe_encode(): a = '\xc3\xa9lg' out = safe_encode(a) - if not isinstance(a, str): - a = text_type(a, 'utf-8') - yield assert_equal, out.value, a \ No newline at end of file + yield assert_equal, out.value, a diff --git a/nipype/utils/tmpdirs.py b/nipype/utils/tmpdirs.py index aa7c5da67c..ef7bfd6950 100644 --- a/nipype/utils/tmpdirs.py +++ b/nipype/utils/tmpdirs.py @@ -1,3 +1,6 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import + from builtins import object # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/__init__.py b/nipype/workflows/__init__.py index 349937997e..99fb243f19 100644 --- a/nipype/workflows/__init__.py +++ b/nipype/workflows/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/data/__init__.py b/nipype/workflows/data/__init__.py index 3f0dd77db1..390dbe81f3 100644 --- a/nipype/workflows/data/__init__.py +++ b/nipype/workflows/data/__init__.py @@ -1,6 +1,9 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, unicode_literals, absolute_import + import os.path as op diff --git a/nipype/workflows/dmri/__init__.py b/nipype/workflows/dmri/__init__.py index d138c9e02e..8a48c710a3 100644 --- a/nipype/workflows/dmri/__init__.py +++ b/nipype/workflows/dmri/__init__.py @@ -1,2 +1,3 @@ -from __future__ import absolute_import +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import from . import camino, mrtrix, fsl, dipy diff --git a/nipype/workflows/dmri/camino/__init__.py b/nipype/workflows/dmri/camino/__init__.py index fef8c6aae1..07ba37fc52 100644 --- a/nipype/workflows/dmri/camino/__init__.py +++ b/nipype/workflows/dmri/camino/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .diffusion import create_camino_dti_pipeline from .connectivity_mapping import create_connectivity_pipeline diff --git a/nipype/workflows/dmri/camino/connectivity_mapping.py b/nipype/workflows/dmri/camino/connectivity_mapping.py index a0838b4468..d0d2a1c820 100644 --- a/nipype/workflows/dmri/camino/connectivity_mapping.py +++ b/nipype/workflows/dmri/camino/connectivity_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import inspect import os.path as op diff --git a/nipype/workflows/dmri/camino/diffusion.py b/nipype/workflows/dmri/camino/diffusion.py index 1c5c174e2d..71d1904bd6 100644 --- a/nipype/workflows/dmri/camino/diffusion.py +++ b/nipype/workflows/dmri/camino/diffusion.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from ....interfaces import utility as util # utility from ....pipeline import engine as pe # pypeline engine from ....interfaces import camino as camino diff --git a/nipype/workflows/dmri/camino/group_connectivity.py b/nipype/workflows/dmri/camino/group_connectivity.py index 1f323bda3d..633b5ba18e 100644 --- a/nipype/workflows/dmri/camino/group_connectivity.py +++ b/nipype/workflows/dmri/camino/group_connectivity.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os.path as op # system functions from .connectivity_mapping import create_connectivity_pipeline diff --git a/nipype/workflows/dmri/connectivity/__init__.py b/nipype/workflows/dmri/connectivity/__init__.py index 8b9ffea19a..5ef73de586 100644 --- a/nipype/workflows/dmri/connectivity/__init__.py +++ b/nipype/workflows/dmri/connectivity/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .nx import (create_networkx_pipeline, create_cmats_to_csv_pipeline) from .group_connectivity import (create_merge_networks_by_group_workflow, diff --git a/nipype/workflows/dmri/connectivity/group_connectivity.py b/nipype/workflows/dmri/connectivity/group_connectivity.py index 8ebf0a927d..9efd45bf50 100644 --- a/nipype/workflows/dmri/connectivity/group_connectivity.py +++ b/nipype/workflows/dmri/connectivity/group_connectivity.py @@ -1,4 +1,7 @@ -from __future__ import print_function +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open + from future.utils import raise_from import os.path as op diff --git a/nipype/workflows/dmri/connectivity/nx.py b/nipype/workflows/dmri/connectivity/nx.py index 4fe2f89141..3d1a53a4d9 100644 --- a/nipype/workflows/dmri/connectivity/nx.py +++ b/nipype/workflows/dmri/connectivity/nx.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from ....pipeline import engine as pe from ....interfaces import utility as util from ....interfaces import cmtk as cmtk diff --git a/nipype/workflows/dmri/dipy/__init__.py b/nipype/workflows/dmri/dipy/__init__.py index 04b751f203..354ba7a7e6 100644 --- a/nipype/workflows/dmri/dipy/__init__.py +++ b/nipype/workflows/dmri/dipy/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/dmri/dipy/denoise.py b/nipype/workflows/dmri/dipy/denoise.py index 68c27d0d89..c060bb97c1 100644 --- a/nipype/workflows/dmri/dipy/denoise.py +++ b/nipype/workflows/dmri/dipy/denoise.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/dmri/fsl/__init__.py b/nipype/workflows/dmri/fsl/__init__.py index 9d76d17ba1..30cfa7b86a 100644 --- a/nipype/workflows/dmri/fsl/__init__.py +++ b/nipype/workflows/dmri/fsl/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .dti import create_bedpostx_pipeline, bedpostx_parallel diff --git a/nipype/workflows/dmri/fsl/artifacts.py b/nipype/workflows/dmri/fsl/artifacts.py index 03a1b0c31c..906541829f 100644 --- a/nipype/workflows/dmri/fsl/artifacts.py +++ b/nipype/workflows/dmri/fsl/artifacts.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/dmri/fsl/dti.py b/nipype/workflows/dmri/fsl/dti.py index 1d252430b5..ebcd46c84f 100644 --- a/nipype/workflows/dmri/fsl/dti.py +++ b/nipype/workflows/dmri/fsl/dti.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # coding: utf-8 from __future__ import absolute_import diff --git a/nipype/workflows/dmri/fsl/epi.py b/nipype/workflows/dmri/fsl/epi.py index f62fabb792..16bcab3ab6 100644 --- a/nipype/workflows/dmri/fsl/epi.py +++ b/nipype/workflows/dmri/fsl/epi.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- # coding: utf-8 - -from __future__ import division +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, str import warnings @@ -680,7 +681,7 @@ def _rotate_bvecs(in_bvec, in_matrix): bvec = np.matrix(bvecs[:, i]) rot = np.matrix(np.loadtxt(vol_matrix)[0:3, 0:3]) new_bvecs[i] = (np.array(rot * bvec.T).T)[0] # fill each volume with x,y,z as we go along - np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') + np.savetxt(out_file, np.array(new_bvecs).T, fmt=b'%0.15f') return out_file diff --git a/nipype/workflows/dmri/fsl/tbss.py b/nipype/workflows/dmri/fsl/tbss.py index c0e2faa096..feede0d223 100644 --- a/nipype/workflows/dmri/fsl/tbss.py +++ b/nipype/workflows/dmri/fsl/tbss.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/dmri/fsl/tests/__init__.py b/nipype/workflows/dmri/fsl/tests/__init__.py index 349937997e..99fb243f19 100644 --- a/nipype/workflows/dmri/fsl/tests/__init__.py +++ b/nipype/workflows/dmri/fsl/tests/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/dmri/fsl/tests/test_dti.py b/nipype/workflows/dmri/fsl/tests/test_dti.py index 346ac5c23a..9157b04947 100644 --- a/nipype/workflows/dmri/fsl/tests/test_dti.py +++ b/nipype/workflows/dmri/fsl/tests/test_dti.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals, print_function, absolute_import import os from nipype.testing import skipif diff --git a/nipype/workflows/dmri/fsl/tests/test_epi.py b/nipype/workflows/dmri/fsl/tests/test_epi.py index dbac8f2db8..f622b8304a 100644 --- a/nipype/workflows/dmri/fsl/tests/test_epi.py +++ b/nipype/workflows/dmri/fsl/tests/test_epi.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os from nipype.testing import (skipif) diff --git a/nipype/workflows/dmri/fsl/tests/test_tbss.py b/nipype/workflows/dmri/fsl/tests/test_tbss.py index 9bd401ea45..1900629d49 100644 --- a/nipype/workflows/dmri/fsl/tests/test_tbss.py +++ b/nipype/workflows/dmri/fsl/tests/test_tbss.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/dmri/fsl/utils.py b/nipype/workflows/dmri/fsl/utils.py index 72c92b565a..ba1658d468 100644 --- a/nipype/workflows/dmri/fsl/utils.py +++ b/nipype/workflows/dmri/fsl/utils.py @@ -1,11 +1,10 @@ +# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import division -from builtins import zip -from builtins import next -from builtins import range +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import zip, next, range, str from ....pipeline import engine as pe from ....interfaces import utility as niu @@ -500,7 +499,7 @@ def rotate_bvecs(in_bvec, in_matrix): newbvec = invrot.dot(bvec) new_bvecs.append((newbvec / np.linalg.norm(newbvec))) - np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') + np.savetxt(out_file, np.array(new_bvecs).T, fmt=b'%0.15f') return out_file @@ -550,7 +549,7 @@ def eddy_rotate_bvecs(in_bvec, eddy_params): newbvec = invrot.dot(bvec) new_bvecs.append(newbvec / np.linalg.norm(newbvec)) - np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') + np.savetxt(out_file, np.array(new_bvecs).T, fmt=b'%0.15f') return out_file @@ -712,7 +711,7 @@ def reorient_bvecs(in_dwi, old_dwi, in_bvec): R = RS / S new_bvecs = [R.dot(b) for b in bvecs] - np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f') + np.savetxt(out_file, np.array(new_bvecs).T, fmt=b'%0.15f') return out_file diff --git a/nipype/workflows/dmri/mrtrix/__init__.py b/nipype/workflows/dmri/mrtrix/__init__.py index 044007abb1..6851021111 100644 --- a/nipype/workflows/dmri/mrtrix/__init__.py +++ b/nipype/workflows/dmri/mrtrix/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import absolute_import from .diffusion import create_mrtrix_dti_pipeline from .connectivity_mapping import create_connectivity_pipeline diff --git a/nipype/workflows/dmri/mrtrix/connectivity_mapping.py b/nipype/workflows/dmri/mrtrix/connectivity_mapping.py index 1bf1d53cc6..933ead416b 100644 --- a/nipype/workflows/dmri/mrtrix/connectivity_mapping.py +++ b/nipype/workflows/dmri/mrtrix/connectivity_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import inspect import os.path as op # system functions diff --git a/nipype/workflows/dmri/mrtrix/diffusion.py b/nipype/workflows/dmri/mrtrix/diffusion.py index 86d098011b..7e5e7955f3 100644 --- a/nipype/workflows/dmri/mrtrix/diffusion.py +++ b/nipype/workflows/dmri/mrtrix/diffusion.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from ....interfaces import utility as util # utility from ....pipeline import engine as pe # pypeline engine from ....interfaces import fsl as fsl diff --git a/nipype/workflows/dmri/mrtrix/group_connectivity.py b/nipype/workflows/dmri/mrtrix/group_connectivity.py index b308e17137..976041440d 100644 --- a/nipype/workflows/dmri/mrtrix/group_connectivity.py +++ b/nipype/workflows/dmri/mrtrix/group_connectivity.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os.path as op import warnings diff --git a/nipype/workflows/fmri/__init__.py b/nipype/workflows/fmri/__init__.py index 9861e32e2d..938634da49 100644 --- a/nipype/workflows/fmri/__init__.py +++ b/nipype/workflows/fmri/__init__.py @@ -1 +1,3 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import from . import fsl, spm diff --git a/nipype/workflows/fmri/fsl/__init__.py b/nipype/workflows/fmri/fsl/__init__.py index c223176943..9f6ca78ee8 100644 --- a/nipype/workflows/fmri/fsl/__init__.py +++ b/nipype/workflows/fmri/fsl/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from .preprocess import (create_susan_smooth, create_fsl_fs_preproc, create_parallelfeat_preproc, create_featreg_preproc, create_reg_workflow) diff --git a/nipype/workflows/fmri/fsl/estimate.py b/nipype/workflows/fmri/fsl/estimate.py index a600f63350..462b4213db 100644 --- a/nipype/workflows/fmri/fsl/estimate.py +++ b/nipype/workflows/fmri/fsl/estimate.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from builtins import range diff --git a/nipype/workflows/fmri/fsl/preprocess.py b/nipype/workflows/fmri/fsl/preprocess.py index 272b7118e2..d3f2d25ebe 100644 --- a/nipype/workflows/fmri/fsl/preprocess.py +++ b/nipype/workflows/fmri/fsl/preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from __future__ import division diff --git a/nipype/workflows/fmri/fsl/tests/__init__.py b/nipype/workflows/fmri/fsl/tests/__init__.py index cfdb162e68..2986294d9d 100644 --- a/nipype/workflows/fmri/fsl/tests/__init__.py +++ b/nipype/workflows/fmri/fsl/tests/__init__.py @@ -1 +1,2 @@ +# -*- coding: utf-8 -*- __author__ = 'satra' diff --git a/nipype/workflows/fmri/spm/__init__.py b/nipype/workflows/fmri/spm/__init__.py index 8ddf878d12..f974a663db 100644 --- a/nipype/workflows/fmri/spm/__init__.py +++ b/nipype/workflows/fmri/spm/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from .preprocess import (create_spm_preproc, create_vbm_preproc, create_DARTEL_template) diff --git a/nipype/workflows/fmri/spm/estimate.py b/nipype/workflows/fmri/spm/estimate.py index 349937997e..99fb243f19 100644 --- a/nipype/workflows/fmri/spm/estimate.py +++ b/nipype/workflows/fmri/spm/estimate.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/fmri/spm/preprocess.py b/nipype/workflows/fmri/spm/preprocess.py index 1208b30c90..384284434d 100644 --- a/nipype/workflows/fmri/spm/preprocess.py +++ b/nipype/workflows/fmri/spm/preprocess.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/fmri/spm/tests/__init__.py b/nipype/workflows/fmri/spm/tests/__init__.py index cfdb162e68..2986294d9d 100644 --- a/nipype/workflows/fmri/spm/tests/__init__.py +++ b/nipype/workflows/fmri/spm/tests/__init__.py @@ -1 +1,2 @@ +# -*- coding: utf-8 -*- __author__ = 'satra' diff --git a/nipype/workflows/graph/__init__.py b/nipype/workflows/graph/__init__.py index e69de29bb2..2f5839ca6b 100644 --- a/nipype/workflows/graph/__init__.py +++ b/nipype/workflows/graph/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import diff --git a/nipype/workflows/misc/__init__.py b/nipype/workflows/misc/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/workflows/misc/__init__.py +++ b/nipype/workflows/misc/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/nipype/workflows/misc/utils.py b/nipype/workflows/misc/utils.py index 640e6b0221..1d47be1e31 100644 --- a/nipype/workflows/misc/utils.py +++ b/nipype/workflows/misc/utils.py @@ -1,8 +1,9 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +from __future__ import print_function, division, unicode_literals, absolute_import -from builtins import map -from builtins import range +from builtins import map, range def get_vox_dims(volume): diff --git a/nipype/workflows/rsfmri/__init__.py b/nipype/workflows/rsfmri/__init__.py index 81eb9af04b..cd892ab85f 100644 --- a/nipype/workflows/rsfmri/__init__.py +++ b/nipype/workflows/rsfmri/__init__.py @@ -1 +1,4 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import + from . import fsl diff --git a/nipype/workflows/rsfmri/fsl/__init__.py b/nipype/workflows/rsfmri/fsl/__init__.py index 850daa883d..2e17899066 100644 --- a/nipype/workflows/rsfmri/fsl/__init__.py +++ b/nipype/workflows/rsfmri/fsl/__init__.py @@ -1 +1,2 @@ +# -*- coding: utf-8 -*- from .resting import create_resting_preproc diff --git a/nipype/workflows/rsfmri/fsl/resting.py b/nipype/workflows/rsfmri/fsl/resting.py index 2bf8f28bd8..4c879a1fbb 100644 --- a/nipype/workflows/rsfmri/fsl/resting.py +++ b/nipype/workflows/rsfmri/fsl/resting.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import division +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import str from ....interfaces import fsl as fsl # fsl from ....interfaces import utility as util # utility @@ -34,7 +36,7 @@ def extract_noise_components(realigned_file, noise_mask_file, num_components): else: components = np.hstack((components, u[:, :num_components])) components_file = os.path.join(os.getcwd(), 'noise_components.txt') - np.savetxt(components_file, components, fmt="%.10f") + np.savetxt(components_file, components, fmt=b"%.10f") return components_file diff --git a/nipype/workflows/smri/__init__.py b/nipype/workflows/smri/__init__.py index 06a1e16e33..64030857a9 100644 --- a/nipype/workflows/smri/__init__.py +++ b/nipype/workflows/smri/__init__.py @@ -1,2 +1,5 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, division, unicode_literals, absolute_import + from . import freesurfer from . import ants diff --git a/nipype/workflows/smri/ants/ANTSBuildTemplate.py b/nipype/workflows/smri/ants/ANTSBuildTemplate.py index 11bcbb9809..5742fa4e4f 100644 --- a/nipype/workflows/smri/ants/ANTSBuildTemplate.py +++ b/nipype/workflows/smri/ants/ANTSBuildTemplate.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- ################################################################################# # Program: Build Template Parallel # Language: Python diff --git a/nipype/workflows/smri/ants/__init__.py b/nipype/workflows/smri/ants/__init__.py index f22640eb1e..3cb140771c 100644 --- a/nipype/workflows/smri/ants/__init__.py +++ b/nipype/workflows/smri/ants/__init__.py @@ -1,2 +1,3 @@ +# -*- coding: utf-8 -*- from .ANTSBuildTemplate import ANTSTemplateBuildSingleIterationWF from .antsRegistrationBuildTemplate import antsRegistrationTemplateBuildSingleIterationWF diff --git a/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py b/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py index e58b62177d..6aeb990e39 100644 --- a/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py +++ b/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- ################################################################################# # Program: Build Template Parallel # Language: Python diff --git a/nipype/workflows/smri/freesurfer/__init__.py b/nipype/workflows/smri/freesurfer/__init__.py index 8992ce715f..cbecb6e3e7 100644 --- a/nipype/workflows/smri/freesurfer/__init__.py +++ b/nipype/workflows/smri/freesurfer/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from .utils import (create_getmask_flow, create_get_stats_flow, create_tessellation_flow) from .bem import create_bem_flow from .recon import create_skullstripped_recon_flow, create_reconall_workflow diff --git a/nipype/workflows/smri/freesurfer/autorecon1.py b/nipype/workflows/smri/freesurfer/autorecon1.py index ddff03a64b..bd93b5b722 100644 --- a/nipype/workflows/smri/freesurfer/autorecon1.py +++ b/nipype/workflows/smri/freesurfer/autorecon1.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from nipype.interfaces.utility import Function,IdentityInterface import nipype.pipeline.engine as pe # pypeline engine from nipype.interfaces.freesurfer import * diff --git a/nipype/workflows/smri/freesurfer/autorecon2.py b/nipype/workflows/smri/freesurfer/autorecon2.py index 41f15a017b..a74b5681dd 100644 --- a/nipype/workflows/smri/freesurfer/autorecon2.py +++ b/nipype/workflows/smri/freesurfer/autorecon2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from nipype.interfaces.utility import Function, IdentityInterface, Merge import nipype.pipeline.engine as pe # pypeline engine from nipype.interfaces.freesurfer import * diff --git a/nipype/workflows/smri/freesurfer/autorecon3.py b/nipype/workflows/smri/freesurfer/autorecon3.py index 1c5729c777..b5210cc046 100644 --- a/nipype/workflows/smri/freesurfer/autorecon3.py +++ b/nipype/workflows/smri/freesurfer/autorecon3.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from nipype.interfaces.utility import IdentityInterface, Merge, Function import nipype.pipeline.engine as pe # pypeline engine from nipype.interfaces.freesurfer import * diff --git a/nipype/workflows/smri/freesurfer/ba_maps.py b/nipype/workflows/smri/freesurfer/ba_maps.py index 7fa266250c..5661730687 100644 --- a/nipype/workflows/smri/freesurfer/ba_maps.py +++ b/nipype/workflows/smri/freesurfer/ba_maps.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import os import nipype from nipype.interfaces.utility import Function,IdentityInterface diff --git a/nipype/workflows/smri/freesurfer/bem.py b/nipype/workflows/smri/freesurfer/bem.py index a9b0a0a325..743506c4d1 100644 --- a/nipype/workflows/smri/freesurfer/bem.py +++ b/nipype/workflows/smri/freesurfer/bem.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/smri/freesurfer/recon.py b/nipype/workflows/smri/freesurfer/recon.py index af4386ac8e..b1ebe1a4fc 100644 --- a/nipype/workflows/smri/freesurfer/recon.py +++ b/nipype/workflows/smri/freesurfer/recon.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from ....pipeline import engine as pe from ....interfaces import freesurfer as fs from ....interfaces import utility as niu diff --git a/nipype/workflows/smri/freesurfer/utils.py b/nipype/workflows/smri/freesurfer/utils.py index 65e352f5e5..6ee04c9673 100644 --- a/nipype/workflows/smri/freesurfer/utils.py +++ b/nipype/workflows/smri/freesurfer/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/warp/__init__.py b/nipype/workflows/warp/__init__.py index e69de29bb2..40a96afc6f 100644 --- a/nipype/workflows/warp/__init__.py +++ b/nipype/workflows/warp/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000000..515b2196f8 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +doctest_optionflags = ALLOW_UNICODE ALLOW_BYTES diff --git a/requirements.txt b/requirements.txt index 71c43c91c9..ef66036744 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,3 +11,4 @@ prov>=1.4.0 xvfbwrapper psutil funcsigs +configparser diff --git a/setup.py b/setup.py index bc4b4bde89..2d827a36c3 100755 --- a/setup.py +++ b/setup.py @@ -11,11 +11,16 @@ nibabel denoted by ## START - COPIED FROM NIBABEL and a corresponding ## END """ -"""Build helper.""" +# Build helper +from __future__ import print_function +from builtins import str, bytes, open import os -from glob import glob +from os.path import join as pjoin import sys +from configparser import ConfigParser + +from glob import glob from functools import partial # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly @@ -27,31 +32,19 @@ if len(set(('develop', 'bdist_egg', 'bdist_rpm', 'bdist', 'bdist_dumb', 'install_egg_info', 'egg_info', 'easy_install', 'bdist_wheel', 'bdist_mpkg')).intersection(sys.argv)) > 0: - # setup_egg imports setuptools setup, thus monkeypatching distutils. + # import setuptools setup, thus monkeypatching distutils. import setup_egg - -from distutils.core import setup + from setuptools import setup +else: + from distutils.core import setup # Commit hash writing, and dependency checking ''' Distutils / setuptools helpers from nibabel.nisext''' - -import os -from os.path import join as pjoin -import sys -PY3 = sys.version_info[0] >= 3 -if PY3: - string_types = str, -else: - string_types = basestring, -try: - from ConfigParser import ConfigParser -except ImportError: - from configparser import ConfigParser - from distutils.version import LooseVersion from distutils.command.build_py import build_py from distutils import log +PY3 = sys.version_info[0] >= 3 def get_comrec_build(pkg_dir, build_cmd=build_py): """ Return extended build command class for recording commit @@ -116,7 +109,7 @@ def _add_append_key(in_dict, key, value): # Append value to in_dict[key] list if key not in in_dict: in_dict[key] = [] - elif isinstance(in_dict[key], string_types): + elif isinstance(in_dict[key], (str, bytes)): in_dict[key] = [in_dict[key]] in_dict[key].append(value) @@ -213,7 +206,7 @@ def version_getter(pkg_name): msgs['opt suffix']) return # setuptools mode - if optional_tf and not isinstance(optional, string_types): + if optional_tf and not isinstance(optional, (str, bytes)): raise RuntimeError('Not-False optional arg should be string') dependency = pkg_name if version: @@ -287,6 +280,19 @@ def main(**extra_args): testdatafiles = [pjoin('testing', 'data', val) for val in os.listdir(pjoin(thispath, 'nipype', 'testing', 'data')) if not os.path.isdir(pjoin(thispath, 'nipype', 'testing', 'data', val))] + + testdatafiles+=[ + pjoin('testing', 'data', 'dicomdir', '*'), + pjoin('testing', 'data', 'bedpostxout', '*'), + pjoin('testing', 'data', 'tbss_dir', '*'), + pjoin('workflows', 'data', '*'), + pjoin('pipeline', 'engine', 'report_template.html'), + pjoin('external', 'd3.js'), + pjoin('interfaces', 'script_templates', '*'), + pjoin('interfaces', 'tests', 'realign_json.json'), + pjoin('interfaces', 'tests', 'use_resources'), + ] + setup(name=NAME, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, @@ -427,19 +433,8 @@ def main(**extra_args): # above, but distutils is surely the worst piece of code in all of # python -- duplicating things into MANIFEST.in but this is admittedly # only a workaround to get things started -- not a solution - package_data={'nipype': - testdatafiles + [ - pjoin('testing', 'data', 'dicomdir', '*'), - pjoin('testing', 'data', 'bedpostxout', '*'), - pjoin('testing', 'data', 'tbss_dir', '*'), - pjoin('workflows', 'data', '*'), - pjoin('pipeline', 'engine', 'report_template.html'), - pjoin('external', 'd3.js'), - pjoin('interfaces', 'script_templates', '*'), - pjoin('interfaces', 'tests', 'realign_json.json'), - pjoin('interfaces', 'tests', 'use_resources'), - ]}, - scripts=glob('bin/*'), + package_data={'nipype': testdatafiles}, + scripts=glob('bin/*') + ['nipype/external/fsl_imglob.py'], cmdclass=cmdclass, **extra_args ) diff --git a/setup_egg.py b/setup_egg.py index 1534f8a2c2..4935e2957c 100755 --- a/setup_egg.py +++ b/setup_egg.py @@ -2,9 +2,9 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Wrapper to run setup.py using setuptools.""" - +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os.path -from setuptools import setup ################################################################################ # Call the setup.py script, injecting the setuptools-specific arguments. diff --git a/tools/apigen.py b/tools/apigen.py index dba2ce0a37..48b11fff66 100644 --- a/tools/apigen.py +++ b/tools/apigen.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Attempt to generate templates for module reference with Sphinx @@ -17,11 +18,11 @@ NOTE: this is a modified version of a script originally shipped with the PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed -project.""" +project. +""" +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import object, open -# Stdlib imports -from __future__ import print_function -from builtins import object import os import re @@ -102,11 +103,11 @@ def set_package_name(self, package_name): def _get_object_name(self, line): ''' Get second token in line >>> docwriter = ApiDocWriter('sphinx') - >>> docwriter._get_object_name(" def func(): ") - 'func' - >>> docwriter._get_object_name(" class Klass(object): ") + >>> docwriter._get_object_name(" def func(): ") # doctest: +IGNORE_UNICODE + u'func' + >>> docwriter._get_object_name(" class Klass(object): ") # doctest: +IGNORE_UNICODE 'Klass' - >>> docwriter._get_object_name(" class Klass: ") + >>> docwriter._get_object_name(" class Klass: ") # doctest: +IGNORE_UNICODE 'Klass' ''' name = line.split()[1].split('(')[0].strip() diff --git a/tools/build_interface_docs.py b/tools/build_interface_docs.py index 07fe607be3..a1189c63bb 100755 --- a/tools/build_interface_docs.py +++ b/tools/build_interface_docs.py @@ -3,8 +3,7 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Script to auto-generate interface docs. """ - -from __future__ import print_function +from __future__ import print_function, unicode_literals # stdlib imports import os import sys diff --git a/tools/checkspecs.py b/tools/checkspecs.py index 8974428780..c284ee8b42 100644 --- a/tools/checkspecs.py +++ b/tools/checkspecs.py @@ -3,9 +3,8 @@ """Attempt to check each interface in nipype """ - -from __future__ import print_function -from builtins import object +from __future__ import print_function, unicode_literals +from builtins import object, str, bytes, open # Stdlib imports import os @@ -14,7 +13,6 @@ import warnings from nipype.interfaces.base import BaseInterface -from nipype.external.six import string_types # Functions and classes @@ -219,7 +217,7 @@ def test_specs(self, uri): for key, value in sorted(trait.__dict__.items()): if key in in_built or key == 'desc': continue - if isinstance(value, string_types): + if isinstance(value, (str, bytes)): quote = "'" if "'" in value: quote = '"' @@ -263,7 +261,7 @@ def test_specs(self, uri): for key, value in sorted(trait.__dict__.items()): if key in in_built or key == 'desc': continue - if isinstance(value, string_types): + if isinstance(value, (str, bytes)): quote = "'" if "'" in value: quote = '"' diff --git a/tools/ex2rst b/tools/ex2rst index f08539b2f6..6af04eda78 100755 --- a/tools/ex2rst +++ b/tools/ex2rst @@ -12,9 +12,8 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Helper to automagically generate ReST versions of examples""" - -__docformat__ = 'restructuredtext' - +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open, str import os import sys @@ -22,6 +21,8 @@ import re import glob from optparse import OptionParser +__docformat__ = 'restructuredtext' + def auto_image(line): """Automatically replace generic image markers with ones that have full diff --git a/tools/github.py b/tools/github.py index cc6c78e1a8..89d75fe298 100644 --- a/tools/github.py +++ b/tools/github.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals from future import standard_library standard_library.install_aliases() +from builtins import open import http.client import inspect import simplejson diff --git a/tools/gitwash_dumper.py b/tools/gitwash_dumper.py index 8803786c8c..bce492f039 100755 --- a/tools/gitwash_dumper.py +++ b/tools/gitwash_dumper.py @@ -1,6 +1,7 @@ #!/usr/bin/env python ''' Checkout gitwash repo into directory and do search replace on name ''' -from __future__ import print_function +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open import os from os.path import join as pjoin diff --git a/tools/interfacedocgen.py b/tools/interfacedocgen.py index eda0c6a8b5..43f61d268e 100644 --- a/tools/interfacedocgen.py +++ b/tools/interfacedocgen.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Attempt to generate templates for module reference with Sphinx @@ -19,8 +20,8 @@ PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed project.""" -from __future__ import print_function -from builtins import object +from __future__ import print_function, unicode_literals +from builtins import object, open # Stdlib imports import inspect @@ -123,11 +124,11 @@ def set_package_name(self, package_name): def _get_object_name(self, line): ''' Get second token in line >>> docwriter = ApiDocWriter('sphinx') - >>> docwriter._get_object_name(" def func(): ") - 'func' - >>> docwriter._get_object_name(" class Klass(object): ") + >>> docwriter._get_object_name(" def func(): ") # doctest: +IGNORE_UNICODE + u'func' + >>> docwriter._get_object_name(" class Klass(object): ") # doctest: +IGNORE_UNICODE 'Klass' - >>> docwriter._get_object_name(" class Klass: ") + >>> docwriter._get_object_name(" class Klass: ") # doctest: +IGNORE_UNICODE 'Klass' ''' name = line.split()[1].split('(')[0].strip() diff --git a/tools/make_examples.py b/tools/make_examples.py index 605746cc7a..64b61bc4a2 100755 --- a/tools/make_examples.py +++ b/tools/make_examples.py @@ -3,7 +3,8 @@ This also creates the index.rst file appropriately, makes figures, etc. """ - +from __future__ import print_function, division, unicode_literals, absolute_import +from builtins import open from past.builtins import execfile # ----------------------------------------------------------------------------- # Library imports diff --git a/tools/nipype_nightly.py b/tools/nipype_nightly.py index 5206c8afe9..9f647e903f 100644 --- a/tools/nipype_nightly.py +++ b/tools/nipype_nightly.py @@ -5,7 +5,8 @@ """ from __future__ import print_function - +from __future__ import unicode_literals +from builtins import open import os import sys import subprocess diff --git a/tools/report_coverage.py b/tools/report_coverage.py index 0009ccc20d..d02e2c7851 100644 --- a/tools/report_coverage.py +++ b/tools/report_coverage.py @@ -1,6 +1,7 @@ #!/usr/bin/env python - from __future__ import print_function +from __future__ import unicode_literals +from builtins import open import subprocess diff --git a/tools/run_examples.py b/tools/run_examples.py index 2308951426..c34085a8e7 100644 --- a/tools/run_examples.py +++ b/tools/run_examples.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- from __future__ import print_function import os import sys @@ -5,7 +6,7 @@ from multiprocessing import cpu_count -def run_examples(example, pipelines, data_path, plugin=None): +def run_examples(example, pipelines, data_path, plugin=None, rm_base_dir=True): from nipype import config from nipype.interfaces.base import CommandLine @@ -19,20 +20,21 @@ def run_examples(example, pipelines, data_path, plugin=None): plugin_args = {} if plugin == 'MultiProc': - plugin_args['n_procs'] = cpu_count() + plugin_args['n_procs'] = int(os.getenv('NIPYPE_NUMBER_OF_CPUS', cpu_count())) __import__(example) for pipeline in pipelines: wf = getattr(sys.modules[example], pipeline) wf.base_dir = os.path.join(os.getcwd(), 'output', example, plugin) - if os.path.exists(wf.base_dir): - rmtree(wf.base_dir) + + results_dir = os.path.join(wf.base_dir, wf.name) + if rm_base_dir and os.path.exists(results_dir): + rmtree(results_dir) # Handle a logging directory log_dir = os.path.join(os.getcwd(), 'logs', example) - if os.path.exists(log_dir): - rmtree(log_dir) - os.makedirs(log_dir) + if not os.path.exists(log_dir): + os.makedirs(log_dir) wf.config = {'execution': {'hash_method': 'timestamp', 'stop_on_first_rerun': 'true', 'write_provenance': 'true'}, diff --git a/tools/toollib.py b/tools/toollib.py index 089620de69..2434778e4d 100644 --- a/tools/toollib.py +++ b/tools/toollib.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- """Various utilities common to IPython release and maintenance tools. """