Skip to content

Commit

Permalink
Remove explicit calls to nose from library code (nilearn#2277)
Browse files Browse the repository at this point in the history
* Fixed existing test for view_connectome's parameter deprecation

* Fixed existing tests for replace_parameters

* Removed final nose based fixture from code

 -  Renamed a fixture for clarity & non-ambiguity

* Simplified code for checking when pytest is running

* Placate flake8

* Removed remaining letover instances of assert_raises, assert_warns

* Removed nose from requirements & config

* Added docstrings for the mocker fixture

* Typo corrected
  • Loading branch information
kchawla-pi authored Jan 9, 2020
1 parent fcc22e6 commit 9173aac
Show file tree
Hide file tree
Showing 13 changed files with 144 additions and 136 deletions.
10 changes: 4 additions & 6 deletions continuous_integration/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,18 +22,18 @@ create_new_venv() {
deactivate
virtualenv --system-site-packages testvenv
source testvenv/bin/activate
pip install nose pytest
pip install pytest
}

echo_requirements_string() {
# Echo a requirement string for example
# "pip nose python='2.7.3 scikit-learn=*". It has a hardcoded
# "pip pytest python='2.7.3 scikit-learn=*". It has a hardcoded
# list of possible packages to install and looks at _VERSION
# environment variables to know whether to install a given package and
# if yes which version to install. For example:
# - for numpy, NUMPY_VERSION is used
# - for scikit-learn, SCIKIT_LEARN_VERSION is used
TO_INSTALL_ALWAYS="pip nose pytest"
TO_INSTALL_ALWAYS="pip pytest"
REQUIREMENTS="$TO_INSTALL_ALWAYS"
TO_INSTALL_MAYBE="numpy scipy matplotlib scikit-learn pandas flake8 lxml joblib"
for PACKAGE in $TO_INSTALL_MAYBE; do
Expand Down Expand Up @@ -68,13 +68,11 @@ create_new_travisci_env() {

if [[ "$DISTRIB" == "neurodebian" ]]; then
create_new_venv
pip install nose-timer
bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh)
sudo apt-get install -qq python-scipy python-nose python-nibabel python-sklearn python-joblib
sudo apt-get install -qq python-scipy python-pytest python-nibabel python-sklearn python-joblib

elif [[ "$DISTRIB" == "travisci" ]]; then
create_new_travisci_env
pip install nose-timer
# Note: nibabel is in setup.py install_requires so nibabel will
# always be installed eventually. Defining NIBABEL_VERSION is only
# useful if you happen to want a specific nibabel version rather
Expand Down
2 changes: 1 addition & 1 deletion continuous_integration/test_script.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ fi
if [[ "$SKIP_TESTS" != "true" ]]; then
python continuous_integration/show-python-packages-versions.py
# Copy setup.cfg to TEST_RUN_FOLDER where we are going to run the tests from
# Mainly for nose config settings
# Mainly for test config settings
cp setup.cfg "$TEST_RUN_FOLDER"
cp .coveragerc "$TEST_RUN_FOLDER"
# We want to back out of the current working directory to make
Expand Down
18 changes: 2 additions & 16 deletions nilearn/_utils/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,23 +244,9 @@ def __call__(self, *args, **kwargs):


def are_tests_running():
"""Returns whether we are running the nose test loader
"""Returns whether we are running the pytest test loader
"""
if 'nose' not in sys.modules:
return
try:
import nose
except ImportError:
return False
# Now check that we have the loader in the call stask
stack = inspect.stack()
loader_file_name = nose.loader.__file__
if loader_file_name.endswith('.pyc'):
loader_file_name = loader_file_name[:-1]
for _, file_name, _, _, _, _ in stack:
if file_name == loader_file_name:
return True
return False
return 'PYTEST_CURRENT_TEST' in os.environ


def skip_if_running_tests(msg=''):
Expand Down
26 changes: 14 additions & 12 deletions nilearn/datasets/tests/test_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import nibabel
import pytest

from nose import with_setup
from numpy.testing import assert_array_equal

from . import test_utils as tst
Expand All @@ -25,7 +24,10 @@


@pytest.fixture()
def request_mock():
def request_mocker():
""" Mocks URL calls for atlas fetchers during testing.
Tests the fetcher code without actually downloading the files.
"""
tst.setup_mock(utils, atlas)
yield
tst.teardown_mock(utils, atlas)
Expand Down Expand Up @@ -248,7 +250,7 @@ def test_fail_fetch_atlas_harvard_oxford(tmp_path):
assert ho.labels[6] == "Right R3"


def test_fetch_atlas_craddock_2012(tmp_path, request_mock):
def test_fetch_atlas_craddock_2012(tmp_path, request_mocker):
bunch = atlas.fetch_atlas_craddock_2012(data_dir=str(tmp_path),
verbose=0)

Expand All @@ -268,7 +270,7 @@ def test_fetch_atlas_craddock_2012(tmp_path, request_mock):
assert bunch.description != ''


def test_fetch_atlas_smith_2009(tmp_path, request_mock):
def test_fetch_atlas_smith_2009(tmp_path, request_mocker):
bunch = atlas.fetch_atlas_smith_2009(data_dir=str(tmp_path), verbose=0)

keys = ("rsn20", "rsn10", "rsn70",
Expand Down Expand Up @@ -311,7 +313,7 @@ def test_fetch_coords_seitzman_2018():
assert np.any(bunch.networks != np.sort(bunch.networks))


def test_fetch_atlas_destrieux_2009(tmp_path, request_mock):
def test_fetch_atlas_destrieux_2009(tmp_path, request_mocker):
datadir = str(tmp_path / 'destrieux_2009')
os.mkdir(datadir)
dummy = open(os.path.join(
Expand All @@ -337,7 +339,7 @@ def test_fetch_atlas_destrieux_2009(tmp_path, request_mock):
datadir, 'destrieux2009_rois.nii.gz')


def test_fetch_atlas_msdl(tmp_path, request_mock):
def test_fetch_atlas_msdl(tmp_path, request_mocker):
datadir = str(tmp_path / 'msdl_atlas')
os.mkdir(datadir)
os.mkdir(os.path.join(datadir, 'MSDL_rois'))
Expand All @@ -361,7 +363,7 @@ def test_fetch_atlas_msdl(tmp_path, request_mock):
assert dataset.description != ''


def test_fetch_atlas_yeo_2011(tmp_path, request_mock):
def test_fetch_atlas_yeo_2011(tmp_path, request_mocker):
dataset = atlas.fetch_atlas_yeo_2011(data_dir=str(tmp_path), verbose=0)
assert isinstance(dataset.anat, _basestring)
assert isinstance(dataset.colors_17, _basestring)
Expand All @@ -374,7 +376,7 @@ def test_fetch_atlas_yeo_2011(tmp_path, request_mock):
assert dataset.description != ''


def test_fetch_atlas_aal(tmp_path, request_mock):
def test_fetch_atlas_aal(tmp_path, request_mocker):
ho_dir = str(tmp_path / 'aal_SPM12' / 'aal' / 'atlas')
os.makedirs(ho_dir)
with open(os.path.join(ho_dir, 'AAL.xml'), 'w') as xml_file:
Expand All @@ -397,7 +399,7 @@ def test_fetch_atlas_aal(tmp_path, request_mock):
assert dataset.description != ''


def test_fetch_atlas_basc_multiscale_2015(tmp_path, request_mock):
def test_fetch_atlas_basc_multiscale_2015(tmp_path, request_mocker):
# default version='sym',
data_sym = atlas.fetch_atlas_basc_multiscale_2015(data_dir=str(tmp_path),
verbose=0)
Expand Down Expand Up @@ -449,7 +451,7 @@ def test_fetch_coords_dosenbach_2010():
assert np.any(bunch.networks != np.sort(bunch.networks))


def test_fetch_atlas_allen_2011(tmp_path, request_mock):
def test_fetch_atlas_allen_2011(tmp_path, request_mocker):
bunch = atlas.fetch_atlas_allen_2011(data_dir=str(tmp_path), verbose=0)
keys = ("maps",
"rsn28",
Expand All @@ -467,7 +469,7 @@ def test_fetch_atlas_allen_2011(tmp_path, request_mock):
assert bunch.description != ''


def test_fetch_atlas_surf_destrieux(tmp_path, request_mock, verbose=0):
def test_fetch_atlas_surf_destrieux(tmp_path, request_mocker, verbose=0):
data_dir = str(tmp_path / 'destrieux_surface')
os.mkdir(data_dir)
# Create mock annots
Expand Down Expand Up @@ -507,7 +509,7 @@ def _mock_talairach_fetch_files(data_dir, *args, **kwargs):
return [file_name]


def test_fetch_atlas_talairach(tmp_path, request_mock):
def test_fetch_atlas_talairach(tmp_path, request_mocker):
atlas._fetch_files = _mock_talairach_fetch_files
level_values = np.ones((81, 3)) * [0, 1, 2]
talairach = atlas.fetch_atlas_talairach('hemisphere',
Expand Down
Loading

0 comments on commit 9173aac

Please sign in to comment.