Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ sudo: required

language: python

env:
global:
- TEST_RUN_FOLDER="/tmp" # folder where the tests are run from

matrix:
# Do not wait for the allowed_failures entry to finish before
# setting the status
Expand Down Expand Up @@ -39,12 +43,8 @@ virtualenv:

install: source continuous_integration/install.sh

script:
- make clean
- make test-code
before_script: make clean

script: source continuous_integration/test_script.sh

after_success:
- |
if [[ "$COVERAGE" == "true" ]]; then
coveralls || echo "coverage upload failed"
fi
after_success: source continuous_integration/after_success.sh
24 changes: 21 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,15 @@
PYTHON ?= python
CYTHON ?= cython
NOSETESTS ?= nosetests
NOSETESTS_OPTIONS := $(shell pip list | grep nose-timer > /dev/null && \
echo '--with-timer --timer-top-n 50')
CTAGS ?= ctags

all: clean test
all: clean test doc-noplot

clean-pyc:
find . -name "*.pyc" | xargs rm -f
find . -name "__pycache__" | xargs rm -rf

clean-so:
find . -name "*.so" | xargs rm -f
Expand All @@ -29,14 +32,17 @@ inplace:
$(PYTHON) setup.py build_ext -i

test-code:
$(NOSETESTS) -s pypreprocess
$(NOSETESTS) -s pypreprocess $(NOSETESTS_OPTIONS)
test-doc:
$(NOSETESTS) -s --with-doctest --doctest-tests --doctest-extension=rst \
--doctest-extension=inc --doctest-fixtures=_fixture `find doc/ -name '*.rst'`

test-coverage:
rm -rf coverage .coverage
$(NOSETESTS) -s --with-coverage --cover-html --cover-html-dir=coverage \
--cover-package=pypreprocess pypreprocess

test: test-code
test: test-code test-doc

trailing-spaces:
find . -name "*.py" | xargs perl -pi -e 's/[ \t]*$$//'
Expand All @@ -48,3 +54,15 @@ ctags:
# make tags for symbol based navigation in emacs and vim
# Install with: sudo apt-get install exuberant-ctags
$(CTAGS) -R *

.PHONY : doc-plot
doc-plot:
make -C doc html

.PHONY : doc
doc:
make -C doc html-noplot

.PHONY : pdf
pdf:
make -C doc pdf
21 changes: 18 additions & 3 deletions circle.yml
Original file line number Diff line number Diff line change
@@ -1,19 +1,34 @@
machine:
environment:
SPM_DIR: /home/ubuntu/opt/spm12/spm12_mcr/spm12
SPM_MCR: /home/ubuntu/opt/spm12/spm12.sh
SPM_VERSION: 8
SPM_DIR: /home/ubuntu/opt/spm8/spm8/spm8_mcr/spm8
SPM_MCR: /home/ubuntu/opt/spm8/spm8.sh
# SPM_DIR: /home/ubuntu/opt/spm12/spm12_mcr/spm12
# SPM_MCR: /home/ubuntu/opt/spm12/spm12.sh

dependencies:
cache_directories:
- "~/opt/spm8"
- "~/opt/spm12"
- "~/nilearn_data"

pre:
# Installing dependencies for SPM and MCR
- sudo apt-get update
- sudo apt-get install libxp6 libxpm4 libxmu6 libxt6 bc
# Installing SPM
- source continuous_integration/setup_spm.sh
# Installing Pypreprocess dependencies
- pip install --upgrade pip
- pip install nose coverage configparser

override:
# Installing pypreprocess
- pip install -e .
# Fetching Auditory and Multimodal datasets in order to be cached in the future
- python -c "from pypreprocess import datasets; datasets.fetch_spm_auditory(); datasets.fetch_spm_multimodal_fmri(); datasets.fetch_fsl_feeds()"
# Caching terminates here. Outputs from test won't be saved.

# Moving to nilearn directory before performing the installation.
- source continuous_integration/install.sh:
environment:
Expand All @@ -30,7 +45,7 @@ test:
- make clean
- make test-code
- cd examples/easy_start && python nipype_preproc_spm_auditory.py
- cd examples/pipelining && python nipype_preproc_spm_multimodal_faces.py
# - cd examples/pipelining && python nipype_preproc_spm_multimodal_faces.py
- cd examples/pipelining && python nistats_glm_fsl_feeds_fmri.py
- sh continuous_integration/clean_output.sh

Expand Down
12 changes: 12 additions & 0 deletions continuous_integration/after_success.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/sh

set -e

# Ignore codecov failures because we don't want travis to report a failure
# in the github UI just because the coverage report failed to be published.
# codecov needs to be run from the git checkout
# so we need to copy the coverage results from TEST_RUN_FOLDER
if [[ "$SKIP_TESTS" != "true" && "$COVERAGE" == "true" ]]; then
cp "$TEST_RUN_FOLDER/.coverage" .
codecov || echo "Codecov upload failed"
fi
6 changes: 3 additions & 3 deletions continuous_integration/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -100,13 +100,13 @@ elif [[ "$DISTRIB" == "conda" ]]; then
# useful if you happen to want a specific nibabel version rather
# than the latest available one.
if [ -n "$NIBABEL_VERSION" ]; then
pip install nibabel=="$NIBABEL_VERSION"
pip install nibabel=="$NIBABEL_VERSION" --upgrade
fi
if [ -n "$NIPYPE_VERSION" ]; then
pip install nipype=="$NIPYPE_VERSION"
pip install nipype=="$NIPYPE_VERSION" --upgrade
fi
if [ -n "$NILEARN_VERSION" ]; then
pip install nilearn=="$NILEARN_VERSION"
pip install nilearn=="$NILEARN_VERSION" --upgrade
fi

else
Expand Down
2 changes: 1 addition & 1 deletion continuous_integration/setup_spm.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash
set -e

SPM_INSTALL_SCRIPT=continuous_integration/install_spm12.sh
SPM_INSTALL_SCRIPT=continuous_integration/install_spm${SPM_VERSION}.sh
echo ""
echo "SPM_INSTALL_SCRIPT: $SPM_INSTALL_SCRIPT"
sudo bash $SPM_INSTALL_SCRIPT
25 changes: 25 additions & 0 deletions continuous_integration/show-python-packages-versions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import sys

DEPENDENCIES = ['numpy', 'scipy', 'sklearn', 'matplotlib', 'nibabel']


def print_package_version(package_name, indent=' '):
try:
package = __import__(package_name)
version = getattr(package, '__version__', None)
package_file = getattr(package, '__file__', )
provenance_info = '{0} from {1}'.format(version, package_file)
except ImportError:
provenance_info = 'not installed'

print('{0}{1}: {2}'.format(indent, package_name, provenance_info))

if __name__ == '__main__':
print('=' * 120)
print('Python %s' % str(sys.version))
print('from: %s\n' % sys.executable)

print('Dependencies versions')
for package_name in DEPENDENCIES:
print_package_version(package_name)
print('=' * 120)
21 changes: 21 additions & 0 deletions continuous_integration/test_script.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#!/bin/sh

set -e

if [[ -n "$FLAKE8_VERSION" ]]; then
source continuous_integration/flake8_diff.sh
fi

if [[ "$SKIP_TESTS" != "true" ]]; then
python continuous_integration/show-python-packages-versions.py
# Copy setup.cfg to TEST_RUN_FOLDER where we are going to run the tests from
# Mainly for nose config settings
cp setup.cfg "$TEST_RUN_FOLDER"
# We want to back out of the current working directory to make
# sure we are using nilearn installed in site-packages rather
# than the one from the current working directory
# Parentheses (run in a subshell) are used to leave
# the current directory unchanged
(cd "$TEST_RUN_FOLDER" && make -f $OLDPWD/Makefile test-code)
test "$MATPLOTLIB_VERSION" == "" || make test-doc
fi
2 changes: 1 addition & 1 deletion pypreprocess/configure_spm.py
Original file line number Diff line number Diff line change
Expand Up @@ -458,7 +458,7 @@ def _find_spm_mcr_and_spm_dir(cli_spm_mcr, config_spm_mcr,
'nipype version {} too old.'
' No support for precompiled SPM'.format(nipype.__version__))
return None

_logger.info("using nipype version {}".format(nipype.__version__))
check_mcr = _IsValidMCR(cli_spm_dir, config_spm_dir, defaults)
spm_mcr_envs = _get_exported('spm_mcr_env_template',
templates_dict=defaults)
Expand Down
22 changes: 11 additions & 11 deletions pypreprocess/nipype_preproc_spm_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def _do_subject_slice_timing(subject_data, TR, TA=None, spm_dir=None,
time_acquisition=TA, num_slices=nslices,
ref_slice=ref_slice + 1,
slice_order=list(slice_order + 1), # SPM
ignore_exception=True
ignore_exception=False
)
if stc_result.outputs is None:
subject_data.failed = True
Expand Down Expand Up @@ -495,7 +495,7 @@ def _do_subject_coregister(subject_data, reslice=False, spm_dir=None,
source=coreg_source,
apply_to_files=apply_to_files,
jobtype=jobtype,
ignore_exception=True
ignore_exception=False
)

# failed node ?
Expand Down Expand Up @@ -633,7 +633,7 @@ def _do_subject_segment(subject_data, output_modulated_tpms=True, spm_dir=None,
wm_output_type=gm_output_type,
csf_output_type=csf_output_type,
tissue_prob_maps=[GM_TEMPLATE, WM_TEMPLATE, CSF_TEMPLATE],
ignore_exception=True
ignore_exception=False
)

# failed node
Expand Down Expand Up @@ -746,7 +746,7 @@ def _do_subject_normalize(subject_data, fwhm=0., anat_fwhm=0., caching=True,
output_dir=subject_data.scratch)
normalize_result = normalize(
source=subject_data.anat, template=t1_template,
write_preserve=False, ignore_exception=True)
write_preserve=False, ignore_exception=False)
parameter_file = normalize_result.outputs.normalization_parameters
else:
parameter_file = subject_data.nipype_results[
Expand Down Expand Up @@ -777,7 +777,7 @@ def _do_subject_normalize(subject_data, fwhm=0., anat_fwhm=0., caching=True,
apply_to_files=apply_to_files,
write_voxel_sizes=list(write_voxel_sizes),
# write_bounding_box=[[-78, -112, -50], [78, 76, 85]],
write_interp=1, jobtype='write', ignore_exception=True)
write_interp=1, jobtype='write', ignore_exception=False)

# failed node ?
if normalize_result.outputs is None:
Expand Down Expand Up @@ -808,7 +808,7 @@ def _do_subject_normalize(subject_data, fwhm=0., anat_fwhm=0., caching=True,
write_wrap=[0, 0, 0],
write_interp=1,
jobtype='write',
ignore_exception=True
ignore_exception=False
)

# failed node
Expand Down Expand Up @@ -933,7 +933,7 @@ def _do_subject_smooth(subject_data, fwhm, anat_fwhm=None, spm_dir=None,
in_files = [getattr(subject_data, x) for x in anat_like]

smooth_result = smooth(
in_files=in_files, fwhm=width, ignore_exception=True)
in_files=in_files, fwhm=width, ignore_exception=False)

# failed node ?
subject_data.nipype_results['smooth'][brain_name] = smooth_result
Expand Down Expand Up @@ -1025,7 +1025,7 @@ def _do_subject_dartelnorm2mni(subject_data,
flowfield_files=subject_data.dartel_flow_fields,
template_file=template_file,
modulate=output_modulated_tpms, # don't modulate
fwhm=anat_fwhm, ignore_exception=True, **tricky_kwargs)
fwhm=anat_fwhm, ignore_exception=False, **tricky_kwargs)
setattr(subject_data, "mw" + tissue,
dartelnorm2mni_result.outputs.normalized_files)

Expand All @@ -1034,7 +1034,7 @@ def _do_subject_dartelnorm2mni(subject_data,
apply_to_files=subject_data.anat,
flowfield_files=subject_data.dartel_flow_fields,
template_file=template_file,
ignore_exception=True,
ignore_exception=False,
modulate=output_modulated_tpms,
fwhm=anat_fwhm,
**tricky_kwargs
Expand All @@ -1049,7 +1049,7 @@ def _do_subject_dartelnorm2mni(subject_data,
createwarped_result = createwarped(
image_files=subject_data.func,
flowfield_files=subject_data.dartel_flow_fields,
ignore_exception=True
ignore_exception=False
)
subject_data.func = createwarped_result.outputs.warped_files

Expand Down Expand Up @@ -1408,7 +1408,7 @@ def _do_subjects_newsegment(
newsegment_result = newsegment(
channel_files=[subject_data.anat for subject_data in subjects],
tissues=TISSUES,
ignore_exception=True)
ignore_exception=False)
if newsegment_result.outputs is None:
return
else:
Expand Down