diff --git a/.github/workflows/nestbuildmatrix.yml b/.github/workflows/nestbuildmatrix.yml index 6ec8a79f0f..abd7e25bbb 100644 --- a/.github/workflows/nestbuildmatrix.yml +++ b/.github/workflows/nestbuildmatrix.yml @@ -10,7 +10,7 @@ jobs: clang-format: runs-on: "ubuntu-22.04" env: - CLANG_REQUIRE_VERSION: 17.0.4 + CLANG_REQUIRE_VERSION: 21.1.7 CLANG_FORMAT_FILE: ".clang-format" steps: - name: Harden Runner @@ -62,11 +62,11 @@ jobs: - name: "Set up Python 3.x" uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: - python-version: 3.9 + python-version: "3.10" - name: "Install dependencies" run: | - pip install pre-commit + pip install pre-commit pylint - name: "Run pre-commit checks..." run: | diff --git a/.mypy.ini b/.mypy.ini deleted file mode 100644 index b04439f171..0000000000 --- a/.mypy.ini +++ /dev/null @@ -1,10 +0,0 @@ -# For configuation details see -# - -[mypy] -exclude = .git/, .snakemake/, .pytest_cache/, sync-test-env/, conda/, env/ -explicit_package_bases = True - -# Check explanations of error codes -# https://mypy.readthedocs.io/en/stable/error_code_list.html -disable_error_code = attr-defined, index, import, call-arg, misc, dict-item, arg-type, operator, call-overload, assignment, list-item, var-annotated, valid-type diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f0b9728371..830185d3b1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,8 @@ +fail_fast: true + repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: - id: end-of-file-fixer - id: trailing-whitespace @@ -10,24 +12,34 @@ repos: - id: fix-byte-order-marker - repo: https://github.com/gitleaks/gitleaks - rev: v8.16.3 + rev: v8.10.0 hooks: - id: gitleaks - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 7.0.0 hooks: - id: isort args: ["--profile", "black", "--thirdparty", "nest"] - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 25.11.0 hooks: - id: black language_version: python3 + - repo: https://github.com/PyCQA/flake8 + rev: 7.3.0 + hooks: + - id: flake8 + + - repo: https://github.com/PyCQA/pydocstyle + rev: 6.3.0 + hooks: + - id: pydocstyle + - repo: https://github.com/shellcheck-py/shellcheck-py - rev: v0.10.0.1 + rev: v0.11.0.1 hooks: - id: shellcheck # explicitly add input files that are not "*.sh" @@ -35,18 +47,60 @@ repos: # # note that these differ from arguments in .github/workflows/nestbuildmatrix.yml # IF YOU READ THIS: recheck for consistency with `find . -iname "*.sh" -o -iname "*.sh.in"` - "bin/nest_vars.sh.in", # testsuite/do_tests.sh:112 requires this for `. "${PREFIX}/bin/nest_vars.sh"` - "testsuite/run_test.sh", - "testsuite/do_tests.sh", - "testsuite/junit_xml.sh", - "examples/run_examples.sh", - "examples/list_examples.sh", + "./testsuite/junit_xml.sh", + "./testsuite/do_tests.sh", + "./testsuite/run_test.sh", + "./bin/nest_vars.sh.in", # testsuite/do_tests.sh:112 requires this for `. "${PREFIX}/bin/nest_vars.sh"` + "./cmake/CheckFiles/check_return_val.sh", + "./examples/run_examples.sh", + "./examples/list_examples.sh", + "./build_support/version_info.sh", + "./build_support/install_csa-libneurosim.sh", + "./build_support/install_music.sh", + "./build_support/check_forbidden_types.sh", + "./build_support/format_all_c_c++_files.sh", + "./build_support/install_sionlib.sh", ] - repo: https://github.com/pre-commit/mirrors-clang-format - rev: v17.0.4 + rev: v21.1.7 hooks: - id: clang-format types_or: [c++] + # ref https://pylint.pycqa.org/en/latest/user_guide/installation/pre-commit-integration.html + - repo: local + hooks: + - id: pylint + name: pylint + entry: pylint + language: system + require_serial: true + types: ["python"] + args: + - "-rn" # Only display messages + - "-sn" # Don't display the score + - "--rcfile=.pylintrc" # Link to your config file + - "--load-plugins=pylint.extensions.docparams" # Load an extension + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.19.0 + hooks: + - id: mypy + additional_dependencies: + - 'jinja2' + - 'python-dateutil' + - 'pytest' + - 'semver' + - 'types-python-dateutil' + - 'types-requests' + - 'matplotlib-stubs' + - 'pandas-stubs' + args: + - --show-error-codes + - --config-file + - pyproject.toml + - . + pass_filenames: false + exclude: 'build/.*|thirdparty/.*' diff --git a/.pylintrc b/.pylintrc index 9569543737..43ab16a8a1 100644 --- a/.pylintrc +++ b/.pylintrc @@ -6,7 +6,7 @@ options = unneeded-not, line-too-long, unnecessary-semicolon, trailing-whitespace, missing-final-newline, bad-indentation, multiple-statements, bare-except ignore = CVS .git conda env __pycache__ .pytest_cache .mypy_cache -disable = no-member, redefined-outer-name, invalid-name, consider-using-f-string, wrong-import-order, missing-function-docstring, missing-method-docstring, missing-class-docstring, attribute-defined-outside-init, no-else-return, cell-var-from-loop, import-error, pointless-string-statement, unused-import, redefined-builtin, superfluous-parens, unused-variable, too-many-locals, consider-using-from-import, consider-using-enumerate, no-name-in-module, too-many-arguments, too-many-instance-attributes, import-outside-toplevel, too-few-public-methods, cyclic-import, missing-module-docstring, unidiomatic-typecheck, dangerous-default-value, unused-argument, use-dict-literal, exec-used, no-self-use, too-many-statements, ungrouped-imports, consider-using-sys-exit, too-many-statements, redundant-u-string-prefix, protected-access, consider-using-dict-comprehension, no-else-raise, too-many-nested-blocks, use-a-generator, reimported, undefined-variable, too-many-branches, raise-missing-from, trailing-comma-tuple, unspecified-encoding, consider-using-with, f-string-without-interpolation, broad-except, unnecessary-pass, global-statement, too-many-lines, consider-merging-isinstance, redefined-argument-from-local, global-variable-undefined, use-implicit-booleaness-not-len, inconsistent-return-statements, consider-using-in, inconsistent-return-statements, keyword-arg-before-vararg, consider-using-dict-items, import-self, fixme, c-extension-no-member, too-many-public-methods, consider-iterating-dictionary, consider-using-max-builtin, super-with-arguments, expression-not-assigned, unnecessary-comprehension, no-self-argument, chained-comparison, undefined-loop-variable, empty-docstring, use-maxsplit-arg, pointless-statement, wrong-import-position, redundant-unittest-assert, eval-used, not-callable, invalid-unary-operand-type, consider-using-generator, R0801, unnecessary-dunder-call, logging-fstring-interpolation, consider-using-get, useless-object-inheritance, unrecognized-option, unknown-option-value, useless-option-value +disable = no-member, redefined-outer-name, invalid-name, consider-using-f-string, wrong-import-order, missing-function-docstring, missing-method-docstring, missing-class-docstring, attribute-defined-outside-init, no-else-return, cell-var-from-loop, import-error, pointless-string-statement, unused-import, redefined-builtin, superfluous-parens, unused-variable, too-many-locals, consider-using-from-import, consider-using-enumerate, no-name-in-module, too-many-arguments, too-many-instance-attributes, too-many-return-statements, import-outside-toplevel, too-few-public-methods, cyclic-import, missing-module-docstring, unidiomatic-typecheck, dangerous-default-value, unused-argument, use-dict-literal, exec-used, no-self-use, too-many-statements, ungrouped-imports, consider-using-sys-exit, too-many-statements, redundant-u-string-prefix, protected-access, consider-using-dict-comprehension, no-else-raise, too-many-nested-blocks, use-a-generator, reimported, undefined-variable, too-many-branches, raise-missing-from, trailing-comma-tuple, unspecified-encoding, consider-using-with, f-string-without-interpolation, broad-except, unnecessary-pass, global-statement, too-many-lines, consider-merging-isinstance, redefined-argument-from-local, global-variable-undefined, use-implicit-booleaness-not-len, inconsistent-return-statements, consider-using-in, inconsistent-return-statements, keyword-arg-before-vararg, consider-using-dict-items, import-self, fixme, c-extension-no-member, too-many-public-methods, consider-iterating-dictionary, consider-using-max-builtin, super-with-arguments, expression-not-assigned, unnecessary-comprehension, no-self-argument, chained-comparison, undefined-loop-variable, empty-docstring, use-maxsplit-arg, pointless-statement, wrong-import-position, redundant-unittest-assert, eval-used, not-callable, invalid-unary-operand-type, consider-using-generator, R0801, unnecessary-dunder-call, logging-fstring-interpolation, consider-using-get, useless-object-inheritance, unrecognized-option, unknown-option-value, useless-option-value, missing-type-doc, too-many-positional-arguments, missing-param-doc, logging-not-lazy, useless-return, duplicate-key, possibly-used-before-assignment, unnecessary-lambda, consider-using-min-builtin, using-constant-test, unexpected-keyword-arg const-naming-style = snake_case method-naming-style = PascalCase diff --git a/bin/nest-server-mpi b/bin/nest-server-mpi index 3af45073c7..fde56b6cfd 100755 --- a/bin/nest-server-mpi +++ b/bin/nest-server-mpi @@ -13,21 +13,17 @@ Options: """ -from docopt import docopt -from mpi4py import MPI - -if __name__ == "__main__": - opt = docopt(__doc__) - import logging import os import time -logger = logging.getLogger(__name__) -logger.setLevel(os.getenv("NEST_SERVER_MPI_LOGGER_LEVEL", "INFO")) - import nest import nest.server +from docopt import docopt +from mpi4py import MPI + +logger = logging.getLogger(__name__) +logger.setLevel(os.getenv("NEST_SERVER_MPI_LOGGER_LEVEL", "INFO")) HOST = os.getenv("NEST_SERVER_HOST", "127.0.0.1") PORT = os.getenv("NEST_SERVER_PORT", "52425") @@ -41,41 +37,47 @@ def log(call_name, msg): logger.debug(msg) -if rank == 0: - logger.info("==> Starting NEST Server Master on rank 0") - nest.server.set_mpi_comm(comm) - nest.server.run_mpi_app(host=opt.get("--host", HOST), port=opt.get("--port", PORT)) - -else: - logger.info(f"==> Starting NEST Server Worker on rank {rank}") - nest.server.set_mpi_comm(comm) - - while True: - log("spinwait", "waiting for call bcast") - call_name = comm.bcast(None, root=0) - - log(call_name, "received call bcast, waiting for data bcast") - data = comm.bcast(None, root=0) - - log(call_name, f"received data bcast, data={data}") - args, kwargs = data - - if call_name == "exec": - response = nest.server.do_exec(args, kwargs) - else: - call, args, kwargs = nest.server.nestify(call_name, args, kwargs) - log(call_name, f"local call, args={args}, kwargs={kwargs}") - - # The following exception handler is useful if an error - # occurs simulataneously on all processes. If only a - # subset of processes raises an exception, a deadlock due - # to mismatching MPI communication calls is inevitable on - # the next call. - try: - response = call(*args, **kwargs) - except Exception: - logger.error("Failed to execute call") - continue - - log(call_name, f"sending reponse gather, data={response}") - comm.gather(nest.serialize_data(response), root=0) +def main(): + opt = docopt(__doc__) + if rank == 0: + logger.info("==> Starting NEST Server Master on rank 0") + nest.server.set_mpi_comm(comm) + nest.server.run_mpi_app(host=opt.get("--host", HOST), port=opt.get("--port", PORT)) + + else: + logger.info(f"==> Starting NEST Server Worker on rank {rank}") + nest.server.set_mpi_comm(comm) + + while True: + log("spinwait", "waiting for call bcast") + call_name = comm.bcast(None, root=0) + + log(call_name, "received call bcast, waiting for data bcast") + data = comm.bcast(None, root=0) + + log(call_name, f"received data bcast, data={data}") + args, kwargs = data + + if call_name == "exec": + response = nest.server.do_exec(args, kwargs) + else: + call, args, kwargs = nest.server.nestify(call_name, args, kwargs) + log(call_name, f"local call, args={args}, kwargs={kwargs}") + + # The following exception handler is useful if an error + # occurs simulataneously on all processes. If only a + # subset of processes raises an exception, a deadlock due + # to mismatching MPI communication calls is inevitable on + # the next call. + try: + response = call(*args, **kwargs) + except Exception: + logger.error("Failed to execute call") + continue + + log(call_name, f"sending reponse gather, data={response}") + comm.gather(nest.serialize_data(response), root=0) + + +if __name__ == "__main__": + main() diff --git a/build_support/check_copyright_headers.py b/build_support/check_copyright_headers.py index b7c2147963..0e52418b49 100644 --- a/build_support/check_copyright_headers.py +++ b/build_support/check_copyright_headers.py @@ -118,6 +118,9 @@ def eprint(*args, **kwargs): if any([exclude_file in tested_file for exclude_file in exclude_files]): continue + if os.stat(tested_file).st_size == 0: + continue + with open(tested_file, encoding="utf-8") as source_file: total_files += 1 for template_line in template_contents[extension]: diff --git a/doc/htmldoc/_ext/extract_api_functions.py b/doc/htmldoc/_ext/extract_api_functions.py index 09b0f2e36f..29c81be651 100644 --- a/doc/htmldoc/_ext/extract_api_functions.py +++ b/doc/htmldoc/_ext/extract_api_functions.py @@ -35,9 +35,9 @@ def find_all_variables(file_path): """ - This function gets the names of all functions listed in ``__all__`` - in each of the PyNEST API files, along with the Kernel Attributes - found in ``__init__.py`` of ``pynest/nest/``. + Gets the names of all functions listed in ``__all__`` in each of the PyNEST + API files, along with the Kernel Attributes found in ``__init__.py`` of + ``pynest/nest/``. """ all_variables = None diff --git a/libnestutil/block_vector.h b/libnestutil/block_vector.h index 5648804bac..278977dda8 100644 --- a/libnestutil/block_vector.h +++ b/libnestutil/block_vector.h @@ -323,7 +323,7 @@ class BlockVector template < typename value_type_ > BlockVector< value_type_ >::BlockVector() : blockmap_( - std::vector< std::vector< value_type_ > >( 1, std::move( std::vector< value_type_ >( max_block_size ) ) ) ) + std::vector< std::vector< value_type_ > >( 1, std::move( std::vector< value_type_ >( max_block_size ) ) ) ) , finish_( begin() ) { } @@ -331,7 +331,7 @@ BlockVector< value_type_ >::BlockVector() template < typename value_type_ > BlockVector< value_type_ >::BlockVector( size_t n ) : blockmap_( - std::vector< std::vector< value_type_ > >( 1, std::move( std::vector< value_type_ >( max_block_size ) ) ) ) + std::vector< std::vector< value_type_ > >( 1, std::move( std::vector< value_type_ >( max_block_size ) ) ) ) , finish_( begin() ) { size_t num_blocks_needed = std::ceil( static_cast< double >( n ) / max_block_size ); diff --git a/libnestutil/sort.h b/libnestutil/sort.h index 4712cde804..6071cab267 100644 --- a/libnestutil/sort.h +++ b/libnestutil/sort.h @@ -49,12 +49,12 @@ template < typename T > inline size_t median3_( const BlockVector< T >& vec, const size_t i, const size_t j, const size_t k ) { - return ( ( vec[ i ] < vec[ j ] ) ? ( ( vec[ j ] < vec[ k ] ) ? j - : ( vec[ i ] < vec[ k ] ) ? k - : i ) - : ( ( vec[ k ] < vec[ j ] ) ? j - : ( vec[ k ] < vec[ i ] ) ? k - : i ) ); + return ( ( vec[ i ] < vec[ j ] ) ? ( ( vec[ j ] < vec[ k ] ) ? j + : ( vec[ i ] < vec[ k ] ) ? k + : i ) + : ( ( vec[ k ] < vec[ j ] ) ? j + : ( vec[ k ] < vec[ i ] ) ? k + : i ) ); } /** diff --git a/models/aeif_cond_alpha_multisynapse.cpp b/models/aeif_cond_alpha_multisynapse.cpp index f027713973..b4c4b97b9a 100644 --- a/models/aeif_cond_alpha_multisynapse.cpp +++ b/models/aeif_cond_alpha_multisynapse.cpp @@ -321,8 +321,8 @@ aeif_cond_alpha_multisynapse::State_::get( DictionaryDatum& d ) const std::vector< double >* g = new std::vector< double >(); for ( size_t i = 0; - i < ( ( y_.size() - State_::NUMBER_OF_FIXED_STATES_ELEMENTS ) / State_::NUM_STATE_ELEMENTS_PER_RECEPTOR ); - ++i ) + i < ( ( y_.size() - State_::NUMBER_OF_FIXED_STATES_ELEMENTS ) / State_::NUM_STATE_ELEMENTS_PER_RECEPTOR ); + ++i ) { dg->push_back( y_[ State_::DG + ( State_::NUM_STATE_ELEMENTS_PER_RECEPTOR * i ) ] ); g->push_back( y_[ State_::G + ( State_::NUM_STATE_ELEMENTS_PER_RECEPTOR * i ) ] ); diff --git a/models/aeif_cond_beta_multisynapse.cpp b/models/aeif_cond_beta_multisynapse.cpp index 5ef7e10717..a25a26705f 100644 --- a/models/aeif_cond_beta_multisynapse.cpp +++ b/models/aeif_cond_beta_multisynapse.cpp @@ -329,8 +329,8 @@ aeif_cond_beta_multisynapse::State_::get( DictionaryDatum& d ) const std::vector< double >* g = new std::vector< double >(); for ( size_t i = 0; - i < ( ( y_.size() - State_::NUMBER_OF_FIXED_STATES_ELEMENTS ) / State_::NUM_STATE_ELEMENTS_PER_RECEPTOR ); - ++i ) + i < ( ( y_.size() - State_::NUMBER_OF_FIXED_STATES_ELEMENTS ) / State_::NUM_STATE_ELEMENTS_PER_RECEPTOR ); + ++i ) { dg->push_back( y_[ State_::DG + ( State_::NUM_STATE_ELEMENTS_PER_RECEPTOR * i ) ] ); g->push_back( y_[ State_::G + ( State_::NUM_STATE_ELEMENTS_PER_RECEPTOR * i ) ] ); diff --git a/models/cm_tree.cpp b/models/cm_tree.cpp index 52121ca2fe..5cc134ff84 100644 --- a/models/cm_tree.cpp +++ b/models/cm_tree.cpp @@ -290,7 +290,7 @@ void nest::CompTree::set_parents() { for ( auto compartment_idx_it = compartment_indices_.begin(); compartment_idx_it != compartment_indices_.end(); - ++compartment_idx_it ) + ++compartment_idx_it ) { Compartment* comp_ptr = get_compartment( *compartment_idx_it ); // will be nullptr if root @@ -309,7 +309,7 @@ nest::CompTree::set_compartments() compartments_.clear(); for ( auto compartment_idx_it = compartment_indices_.begin(); compartment_idx_it != compartment_indices_.end(); - ++compartment_idx_it ) + ++compartment_idx_it ) { compartments_.push_back( get_compartment( *compartment_idx_it ) ); } diff --git a/models/gif_pop_psc_exp.cpp b/models/gif_pop_psc_exp.cpp index 43299c877f..aed27e072a 100644 --- a/models/gif_pop_psc_exp.cpp +++ b/models/gif_pop_psc_exp.cpp @@ -592,7 +592,7 @@ nest::gif_pop_psc_exp::update( Time const& origin, const long from, const long t const double ompl = ( 1. - P_lambda_ ); V_.v_[ k ] = ompl * ompl * V_.v_[ k ] + P_lambda_ * V_.m_[ k ]; V_.m_[ k ] = ompl * V_.m_[ k ]; // line 26 of [1] - } // line 27 of [1] + } // line 27 of [1] double P_Lambda_; if ( ( Z_ + V_.z_ ) > 0.0 ) diff --git a/models/glif_cond.cpp b/models/glif_cond.cpp index a1434786b9..bf943c2551 100644 --- a/models/glif_cond.cpp +++ b/models/glif_cond.cpp @@ -405,9 +405,9 @@ nest::glif_cond::State_::get( DictionaryDatum& d, const Parameters_& p ) const std::vector< double >* dg = new std::vector< double >(); std::vector< double >* g = new std::vector< double >(); - for ( size_t i = 0; i - < ( ( y_.size() - State_::NUMBER_OF_FIXED_STATES_ELEMENTS ) / State_::NUMBER_OF_STATES_ELEMENTS_PER_RECEPTOR ); - ++i ) + for ( size_t i = 0; + i < ( ( y_.size() - State_::NUMBER_OF_FIXED_STATES_ELEMENTS ) / State_::NUMBER_OF_STATES_ELEMENTS_PER_RECEPTOR ); + ++i ) { dg->push_back( y_[ State_::DG_SYN - State_::NUMBER_OF_RECORDABLES_ELEMENTS + ( i * State_::NUMBER_OF_STATES_ELEMENTS_PER_RECEPTOR ) ] ); diff --git a/models/poisson_generator_ps.cpp b/models/poisson_generator_ps.cpp index 7a913d15bc..c6b71fd3e7 100644 --- a/models/poisson_generator_ps.cpp +++ b/models/poisson_generator_ps.cpp @@ -155,8 +155,8 @@ nest::poisson_generator_ps::pre_run_hook() Time min_time = B_.next_spike_.begin()->first; for ( std::vector< Buffers_::SpikeTime >::const_iterator it = B_.next_spike_.begin() + 1; - it != B_.next_spike_.end(); - ++it ) + it != B_.next_spike_.end(); + ++it ) { min_time = std::min( min_time, it->first ); } diff --git a/models/pp_psc_delta.cpp b/models/pp_psc_delta.cpp index 482fde1dff..44fa53144f 100644 --- a/models/pp_psc_delta.cpp +++ b/models/pp_psc_delta.cpp @@ -447,7 +447,7 @@ nest::pp_psc_delta::update( Time const& origin, const long from, const long to ) S_.y3_ = 0.0; } } // S_.y3_ = P_.V_reset_; - } // if (rate > 0.0) + } // if (rate > 0.0) } else // Neuron is within dead time { diff --git a/nestkernel/archiving_node.cpp b/nestkernel/archiving_node.cpp index 7ad511eb3c..087fe7efff 100644 --- a/nestkernel/archiving_node.cpp +++ b/nestkernel/archiving_node.cpp @@ -71,8 +71,8 @@ ArchivingNode::register_stdp_connection( double t_first_read, double delay ) // For details see bug #218. MH 08-04-22 for ( std::deque< histentry >::iterator runner = history_.begin(); - runner != history_.end() and ( t_first_read - runner->t_ > -1.0 * kernel().connection_manager.get_stdp_eps() ); - ++runner ) + runner != history_.end() and ( t_first_read - runner->t_ > -1.0 * kernel().connection_manager.get_stdp_eps() ); + ++runner ) { ( runner->access_counter_ )++; } diff --git a/nestkernel/conn_builder.cpp b/nestkernel/conn_builder.cpp index f70f00f0b4..968a75c031 100644 --- a/nestkernel/conn_builder.cpp +++ b/nestkernel/conn_builder.cpp @@ -69,9 +69,9 @@ nest::ConnBuilder::ConnBuilder( const std::string& primary_rule, const DictionaryDatum& third_conn_spec, const std::map< Name, std::vector< DictionaryDatum > >& syn_specs ) : third_in_builder_( new ThirdInBuilder( sources, - third, - third_conn_spec, - const_cast< std::map< Name, std::vector< DictionaryDatum > >& >( syn_specs )[ names::third_in ] ) ) + third, + third_conn_spec, + const_cast< std::map< Name, std::vector< DictionaryDatum > >& >( syn_specs )[ names::third_in ] ) ) , third_out_builder_( kernel().connection_manager.get_third_conn_builder( third_rule, third, targets, diff --git a/nestkernel/conn_builder.h b/nestkernel/conn_builder.h index 801d4bb355..ba70751027 100644 --- a/nestkernel/conn_builder.h +++ b/nestkernel/conn_builder.h @@ -853,8 +853,8 @@ inline void BipartiteConnBuilder::skip_conn_parameter_( size_t target_thread, size_t n_skip ) { for ( std::vector< ConnParameter* >::iterator it = parameters_requiring_skipping_.begin(); - it != parameters_requiring_skipping_.end(); - ++it ) + it != parameters_requiring_skipping_.end(); + ++it ) { ( *it )->skip( target_thread, n_skip ); } diff --git a/nestkernel/connection_creator.cpp b/nestkernel/connection_creator.cpp index 21b94e3153..16bfac6da4 100644 --- a/nestkernel/connection_creator.cpp +++ b/nestkernel/connection_creator.cpp @@ -85,7 +85,7 @@ ConnectionCreator::ConnectionCreator( DictionaryDatum dict ) param_dicts_.resize( syn_params_dvd->size() ); auto param_dict = param_dicts_.begin(); for ( auto synapse_datum = syn_params_dvd->begin(); synapse_datum < syn_params_dvd->end(); - ++synapse_datum, ++param_dict ) + ++synapse_datum, ++param_dict ) { auto syn_param = dynamic_cast< DictionaryDatum* >( synapse_datum->datum() ); extract_params_( *syn_param, *param_dict ); diff --git a/nestkernel/connection_creator_impl.h b/nestkernel/connection_creator_impl.h index e094ab59d5..39e3945953 100644 --- a/nestkernel/connection_creator_impl.h +++ b/nestkernel/connection_creator_impl.h @@ -531,8 +531,8 @@ ConnectionCreator::fixed_indegree_( Layer< D >& source, // Collect probabilities for the sources for ( typename std::vector< std::pair< Position< D >, size_t > >::iterator iter = positions.begin(); - iter != positions.end(); - ++iter ) + iter != positions.end(); + ++iter ) { iter->first.get_vector( source_pos_vector ); probabilities.push_back( kernel_->value( rng, source_pos_vector, target_pos_vector, source, tgt ) ); @@ -664,8 +664,8 @@ ConnectionCreator::fixed_indegree_( Layer< D >& source, // Collect probabilities for the sources for ( typename std::vector< std::pair< Position< D >, size_t > >::iterator iter = positions->begin(); - iter != positions->end(); - ++iter ) + iter != positions->end(); + ++iter ) { iter->first.get_vector( source_pos_vector ); probabilities.push_back( kernel_->value( rng, source_pos_vector, target_pos_vector, source, tgt ) ); diff --git a/nestkernel/connection_manager.cpp b/nestkernel/connection_manager.cpp index de9307ce1a..a02598cfad 100644 --- a/nestkernel/connection_manager.cpp +++ b/nestkernel/connection_manager.cpp @@ -1028,7 +1028,7 @@ nest::ConnectionManager::trigger_update_weight( const long vt_id, const size_t tid = kernel().vp_manager.get_thread_id(); for ( std::vector< ConnectorBase* >::iterator it = connections_[ tid ].begin(); it != connections_[ tid ].end(); - ++it ) + ++it ) { if ( *it ) { @@ -1315,8 +1315,8 @@ nest::ConnectionManager::get_connections_from_sources_( const size_t tid, else { for ( std::vector< size_t >::const_iterator t_node_id = target_neuron_node_ids.begin(); - t_node_id != target_neuron_node_ids.end(); - ++t_node_id ) + t_node_id != target_neuron_node_ids.end(); + ++t_node_id ) { // target_table_devices_ contains connections both to and from // devices. First we get connections from devices. @@ -1324,8 +1324,8 @@ nest::ConnectionManager::get_connections_from_sources_( const size_t tid, source_node_id, *t_node_id, tid, syn_id, synapse_label, conns_in_thread ); } for ( std::vector< size_t >::const_iterator t_node_id = target_device_node_ids.begin(); - t_node_id != target_device_node_ids.end(); - ++t_node_id ) + t_node_id != target_device_node_ids.end(); + ++t_node_id ) { // Then, we get connections to devices. target_table_devices_.get_connections_to_devices_( diff --git a/nestkernel/eprop_archiving_node_impl.h b/nestkernel/eprop_archiving_node_impl.h index 7c4c60a52e..8b848585b2 100644 --- a/nestkernel/eprop_archiving_node_impl.h +++ b/nestkernel/eprop_archiving_node_impl.h @@ -140,8 +140,8 @@ EpropArchivingNode< HistEntryT >::erase_used_eprop_history() auto it_update_hist = update_history_.begin(); for ( long t = update_history_.begin()->t_; - t <= ( update_history_.end() - 1 )->t_ and it_update_hist != update_history_.end(); - t += update_interval ) + t <= ( update_history_.end() - 1 )->t_ and it_update_hist != update_history_.end(); + t += update_interval ) { if ( it_update_hist->t_ == t ) { diff --git a/nestkernel/event_delivery_manager.cpp b/nestkernel/event_delivery_manager.cpp index 8ba790b740..91f0c14684 100644 --- a/nestkernel/event_delivery_manager.cpp +++ b/nestkernel/event_delivery_manager.cpp @@ -771,7 +771,7 @@ EventDeliveryManager::deliver_events_( const size_t tid, const std::vector< Spik } } } // if-else not compressed - } // for rank + } // for rank } @@ -1003,7 +1003,7 @@ EventDeliveryManager::collocate_target_data_buffers_( const size_t tid, } return is_source_table_read; } // of else - } // of while(true) + } // of while(true) } bool diff --git a/nestkernel/layer_impl.h b/nestkernel/layer_impl.h index e24f571d08..81afe7741b 100644 --- a/nestkernel/layer_impl.h +++ b/nestkernel/layer_impl.h @@ -197,8 +197,8 @@ Layer< D >::do_get_global_positions_ntree_( NodeCollectionPTR node_collection ) typename std::insert_iterator< Ntree< D, size_t > > to = std::inserter( *cached_ntree_, cached_ntree_->end() ); for ( typename std::vector< std::pair< Position< D >, size_t > >::iterator from = cached_vector_->begin(); - from != cached_vector_->end(); - ++from ) + from != cached_vector_->end(); + ++from ) { *to = *from; } @@ -265,7 +265,7 @@ Layer< D >::get_global_positions_vector( const MaskDatum& mask, std::vector< std::pair< Position< D >, size_t > > positions; for ( typename Ntree< D, size_t >::masked_iterator iter = masked_layer.begin( anchor ); iter != masked_layer.end(); - ++iter ) + ++iter ) { positions.push_back( *iter ); } @@ -294,8 +294,8 @@ void Layer< D >::dump_nodes( std::ostream& out ) const { for ( NodeCollection::const_iterator it = this->node_collection_->rank_local_begin(); - it < this->node_collection_->end(); - ++it ) + it < this->node_collection_->end(); + ++it ) { out << ( *it ).node_id << ' '; get_position( ( *it ).nc_index ).print( out ); diff --git a/nestkernel/music_manager.cpp b/nestkernel/music_manager.cpp index ec237eca7d..3d68048329 100644 --- a/nestkernel/music_manager.cpp +++ b/nestkernel/music_manager.cpp @@ -256,15 +256,15 @@ void MUSICManager::publish_music_in_ports_() { for ( std::map< std::string, MusicEventHandler >::iterator it = music_event_in_portmap_.begin(); - it != music_event_in_portmap_.end(); - ++it ) + it != music_event_in_portmap_.end(); + ++it ) { it->second.publish_port(); } for ( std::map< std::string, MusicRateInHandler >::iterator it = music_rate_in_portmap_.begin(); - it != music_rate_in_portmap_.end(); - ++it ) + it != music_rate_in_portmap_.end(); + ++it ) { it->second.publish_port(); } @@ -274,15 +274,15 @@ void MUSICManager::update_music_event_handlers( Time const& origin, const long from, const long to ) { for ( std::map< std::string, MusicEventHandler >::iterator it = music_event_in_portmap_.begin(); - it != music_event_in_portmap_.end(); - ++it ) + it != music_event_in_portmap_.end(); + ++it ) { it->second.update( origin, from, to ); } for ( std::map< std::string, MusicRateInHandler >::iterator it = music_rate_in_portmap_.begin(); - it != music_rate_in_portmap_.end(); - ++it ) + it != music_rate_in_portmap_.end(); + ++it ) { it->second.update( origin, from, to ); } diff --git a/nestkernel/nest_time.h b/nestkernel/nest_time.h index f579af5659..ee88ed2fe9 100644 --- a/nestkernel/nest_time.h +++ b/nestkernel/nest_time.h @@ -297,21 +297,21 @@ class Time Time( tic t ) : tics( ( time_abs( t.t ) < LIM_MAX.tics ) ? t.t - : ( t.t < 0 ) ? LIM_NEG_INF.tics + : ( t.t < 0 ) ? LIM_NEG_INF.tics : LIM_POS_INF.tics ) { } Time( step t ) : tics( ( time_abs( t.t ) < LIM_MAX.steps ) ? t.t * Range::TICS_PER_STEP - : ( t.t < 0 ) ? LIM_NEG_INF.tics + : ( t.t < 0 ) ? LIM_NEG_INF.tics : LIM_POS_INF.tics ) { } Time( ms t ) : tics( ( time_abs( t.t ) < LIM_MAX.ms ) ? static_cast< tic_t >( t.t * Range::TICS_PER_MS + 0.5 ) - : ( t.t < 0 ) ? LIM_NEG_INF.tics + : ( t.t < 0 ) ? LIM_NEG_INF.tics : LIM_POS_INF.tics ) { } diff --git a/nestkernel/nestmodule.cpp b/nestkernel/nestmodule.cpp index 56fa7ec848..90832964f0 100644 --- a/nestkernel/nestmodule.cpp +++ b/nestkernel/nestmodule.cpp @@ -2007,8 +2007,8 @@ NestModule::SelectNodesByMask_g_a_MFunction::execute( SLIInterpreter* i ) const MaskedLayer< 2 > ml = MaskedLayer< 2 >( *layer, mask, false, layer_nc ); for ( Ntree< 2, size_t >::masked_iterator it = ml.begin( Position< 2 >( anchor[ 0 ], anchor[ 1 ] ) ); - it != ml.end(); - ++it ) + it != ml.end(); + ++it ) { mask_node_ids.push_back( it->second ); } @@ -2024,8 +2024,8 @@ NestModule::SelectNodesByMask_g_a_MFunction::execute( SLIInterpreter* i ) const MaskedLayer< 3 > ml = MaskedLayer< 3 >( *layer, mask, false, layer_nc ); for ( Ntree< 3, size_t >::masked_iterator it = ml.begin( Position< 3 >( anchor[ 0 ], anchor[ 1 ], anchor[ 2 ] ) ); - it != ml.end(); - ++it ) + it != ml.end(); + ++it ) { mask_node_ids.push_back( it->second ); } diff --git a/nestkernel/node_collection.cpp b/nestkernel/node_collection.cpp index b4323921f4..03336e2b41 100644 --- a/nestkernel/node_collection.cpp +++ b/nestkernel/node_collection.cpp @@ -543,7 +543,7 @@ NodeCollection::to_array( const std::string& selection ) const node_ids.push_back( ( *it ).node_id ); } } // end critical - } // end parallel + } // end parallel } else { diff --git a/nestkernel/node_manager.cpp b/nestkernel/node_manager.cpp index 03a637494f..735e5620ef 100644 --- a/nestkernel/node_manager.cpp +++ b/nestkernel/node_manager.cpp @@ -735,8 +735,8 @@ NodeManager::print( std::ostream& out ) const const double node_id_range_width = 6 + 2 * max_node_id_width; for ( std::vector< modelrange >::const_iterator it = kernel().modelrange_manager.begin(); - it != kernel().modelrange_manager.end(); - ++it ) + it != kernel().modelrange_manager.end(); + ++it ) { const size_t first_node_id = it->get_first_node_id(); const size_t last_node_id = it->get_last_node_id(); diff --git a/nestkernel/parameter.h b/nestkernel/parameter.h index dfae3cfcb9..de71a57d35 100644 --- a/nestkernel/parameter.h +++ b/nestkernel/parameter.h @@ -761,7 +761,7 @@ class ConditionalParameter : public Parameter std::shared_ptr< Parameter > if_true, std::shared_ptr< Parameter > if_false ) : Parameter( condition->is_spatial() or if_true->is_spatial() or if_false->is_spatial(), - if_true->returns_int_only() and if_false->returns_int_only() ) + if_true->returns_int_only() and if_false->returns_int_only() ) , condition_( condition ) , if_true_( if_true ) , if_false_( if_false ) diff --git a/nestkernel/position.h b/nestkernel/position.h index 812239be92..9a33a67a93 100644 --- a/nestkernel/position.h +++ b/nestkernel/position.h @@ -805,7 +805,8 @@ Position< D, T >::length() const } template < int D, class T > -Position< D, T >::operator std::string() const +Position< D, T >:: +operator std::string() const { std::stringstream ss; ss << *this; diff --git a/nestkernel/ring_buffer_impl.h b/nestkernel/ring_buffer_impl.h index 76db8a6fb6..668fed04ca 100644 --- a/nestkernel/ring_buffer_impl.h +++ b/nestkernel/ring_buffer_impl.h @@ -28,7 +28,7 @@ template < unsigned int num_channels > nest::MultiChannelInputBuffer< num_channels >::MultiChannelInputBuffer() : buffer_( kernel().connection_manager.get_min_delay() + kernel().connection_manager.get_max_delay(), - std::array< double, num_channels >() ) + std::array< double, num_channels >() ) { } diff --git a/nestkernel/simulation_manager.cpp b/nestkernel/simulation_manager.cpp index 44aeec3efd..3f0147ffd4 100644 --- a/nestkernel/simulation_manager.cpp +++ b/nestkernel/simulation_manager.cpp @@ -940,8 +940,8 @@ nest::SimulationManager::update_() // this loop may be empty for those threads // that do not have any nodes requiring wfr_update for ( std::vector< Node* >::const_iterator i = thread_local_wfr_nodes.begin(); - i != thread_local_wfr_nodes.end(); - ++i ) + i != thread_local_wfr_nodes.end(); + ++i ) { done_p = wfr_update_( *i ) and done_p; } @@ -1008,8 +1008,8 @@ nest::SimulationManager::update_() { #pragma omp barrier for ( SparseNodeArray::const_iterator i = kernel().node_manager.get_local_nodes( tid ).begin(); - i != kernel().node_manager.get_local_nodes( tid ).end(); - ++i ) + i != kernel().node_manager.get_local_nodes( tid ).end(); + ++i ) { Node* node = i->get_node(); node->update_synaptic_elements( Time( Time::step( clock_.get_steps() + from_step_ ) ).get_ms() ); @@ -1023,8 +1023,8 @@ nest::SimulationManager::update_() } // Remove 10% of the vacant elements for ( SparseNodeArray::const_iterator i = kernel().node_manager.get_local_nodes( tid ).begin(); - i != kernel().node_manager.get_local_nodes( tid ).end(); - ++i ) + i != kernel().node_manager.get_local_nodes( tid ).end(); + ++i ) { Node* node = i->get_node(); node->decay_synaptic_elements_vacant(); @@ -1122,8 +1122,8 @@ nest::SimulationManager::update_() // End of the slice, we update the number of synaptic elements for ( SparseNodeArray::const_iterator i = kernel().node_manager.get_local_nodes( tid ).begin(); - i != kernel().node_manager.get_local_nodes( tid ).end(); - ++i ) + i != kernel().node_manager.get_local_nodes( tid ).end(); + ++i ) { Node* node = i->get_node(); node->update_synaptic_elements( Time( Time::step( clock_.get_steps() + to_step_ ) ).get_ms() ); diff --git a/nestkernel/sonata_connector.cpp b/nestkernel/sonata_connector.cpp index b44ae89640..6320e437d3 100644 --- a/nestkernel/sonata_connector.cpp +++ b/nestkernel/sonata_connector.cpp @@ -479,7 +479,7 @@ SonataConnector::connect_chunk_( const hsize_t hyperslab_size, const hsize_t off weight ); } // end for - } // end try + } // end try catch ( std::exception& err ) { diff --git a/nestkernel/source_table.cpp b/nestkernel/source_table.cpp index 5be1b6fa4e..fd7bd75d76 100644 --- a/nestkernel/source_table.cpp +++ b/nestkernel/source_table.cpp @@ -252,8 +252,8 @@ nest::SourceTable::compute_buffer_pos_for_unique_secondary_sources( const size_t if ( not is_primary ) { for ( BlockVector< Source >::const_iterator source_cit = sources_[ tid ][ syn_id ].begin(); - source_cit != sources_[ tid ][ syn_id ].end(); - ++source_cit ) + source_cit != sources_[ tid ][ syn_id ].end(); + ++source_cit ) { #pragma omp critical { @@ -274,8 +274,8 @@ nest::SourceTable::compute_buffer_pos_for_unique_secondary_sources( const size_t for ( std::set< std::pair< size_t, size_t > >::const_iterator cit = ( *unique_secondary_source_node_id_syn_id ).begin(); - cit != ( *unique_secondary_source_node_id_syn_id ).end(); - ++cit ) + cit != ( *unique_secondary_source_node_id_syn_id ).end(); + ++cit ) { const size_t source_rank = kernel().mpi_manager.get_process_id_of_node_id( cit->first ); const size_t event_size = kernel().model_manager.get_secondary_event_prototype( cit->second, tid )->size(); @@ -561,7 +561,7 @@ nest::SourceTable::fill_compressed_spike_data( for ( synindex syn_id = 0; syn_id < kernel().model_manager.get_num_connection_models(); ++syn_id ) { for ( size_t target_thread = 0; target_thread < static_cast< size_t >( compressible_sources_.size() ); - ++target_thread ) + ++target_thread ) { for ( const auto& connection : compressible_sources_[ target_thread ][ syn_id ] ) { @@ -589,7 +589,7 @@ nest::SourceTable::fill_compressed_spike_data( compressible_sources_[ target_thread ][ syn_id ].clear(); } // for target_thread - } // for syn_id + } // for syn_id } // Argument name only needed if full logging is activated. Macro-protect to avoid unused argument warning. @@ -598,8 +598,7 @@ nest::SourceTable::dump_compressed_spike_data( const std::vector< std::vector< std::vector< SpikeData > > >& FULL_LOGGING_ONLY( compressed_spike_data ) ) const { FULL_LOGGING_ONLY( - for ( const auto& tab - : compressed_spike_data_map_ ) { + for ( const auto& tab : compressed_spike_data_map_ ) { for ( const auto& entry : tab ) { kernel().write_to_dump( String::compose( "csdm : r%1 t%2 s%3 sx%4 tt%5", @@ -611,8 +610,7 @@ nest::SourceTable::dump_compressed_spike_data( } } - for ( const auto& tab - : compressed_spike_data ) { + for ( const auto& tab : compressed_spike_data ) { for ( size_t six = 0; six < tab.size(); ++six ) { for ( size_t tx = 0; tx < tab[ six ].size(); ++tx ) diff --git a/nestkernel/source_table.h b/nestkernel/source_table.h index 55f48756ba..4e3618439e 100644 --- a/nestkernel/source_table.h +++ b/nestkernel/source_table.h @@ -500,8 +500,8 @@ SourceTable::num_unique_sources( const size_t tid, const synindex syn_id ) const size_t n = 0; size_t last_source = 0; for ( BlockVector< Source >::const_iterator cit = sources_[ tid ][ syn_id ].begin(); - cit != sources_[ tid ][ syn_id ].end(); - ++cit ) + cit != sources_[ tid ][ syn_id ].end(); + ++cit ) { if ( last_source != ( *cit ).get_node_id() ) { diff --git a/nestkernel/stopwatch_impl.h b/nestkernel/stopwatch_impl.h index 2fac293267..7853d1a66d 100644 --- a/nestkernel/stopwatch_impl.h +++ b/nestkernel/stopwatch_impl.h @@ -56,7 +56,7 @@ Stopwatch< detailed_timer, threaded_timer >::start() cputime_timer_.start(); } } // use_timer_array - } // enable_timer + } // enable_timer } template < StopwatchGranularity detailed_timer, StopwatchParallelism threaded_timer > @@ -79,7 +79,7 @@ Stopwatch< detailed_timer, threaded_timer >::stop() cputime_timer_.stop(); } } // use_timer_array - } // enable_timer + } // enable_timer } template < StopwatchGranularity detailed_timer, StopwatchParallelism threaded_timer > @@ -156,7 +156,7 @@ Stopwatch< detailed_timer, threaded_timer >::print( const std::string& msg, walltime_timer_.print( msg, timeunit, os ); } } // use_timer_array - } // enable_timer + } // enable_timer } template < StopwatchGranularity detailed_timer, StopwatchParallelism threaded_timer > @@ -189,7 +189,7 @@ Stopwatch< detailed_timer, threaded_timer >::get_status( DictionaryDatum& d, def< double >( d, walltime_name, walltime_timer_.elapsed() ); def< double >( d, cputime_name, cputime_timer_.elapsed() ); } // use_timer_array - } // enable_timer + } // enable_timer } template < StopwatchGranularity detailed_timer, StopwatchParallelism threaded_timer > @@ -218,7 +218,7 @@ Stopwatch< detailed_timer, threaded_timer >::reset() cputime_timer_.reset(); } } // use_timer_array - } // enable_timer + } // enable_timer } } // namespace nest diff --git a/nestkernel/structural_plasticity_node.cpp b/nestkernel/structural_plasticity_node.cpp index 07a3af88e4..e590849d83 100644 --- a/nestkernel/structural_plasticity_node.cpp +++ b/nestkernel/structural_plasticity_node.cpp @@ -63,8 +63,8 @@ nest::StructuralPlasticityNode::get_status( DictionaryDatum& d ) const synaptic_elements_d = DictionaryDatum( new Dictionary ); def< DictionaryDatum >( d, names::synaptic_elements, synaptic_elements_d ); for ( std::map< Name, SynapticElement >::const_iterator it = synaptic_elements_map_.begin(); - it != synaptic_elements_map_.end(); - ++it ) + it != synaptic_elements_map_.end(); + ++it ) { synaptic_element_d = DictionaryDatum( new Dictionary ); def< DictionaryDatum >( synaptic_elements_d, it->first, synaptic_element_d ); @@ -116,8 +116,8 @@ nest::StructuralPlasticityNode::set_status( const DictionaryDatum& d ) const DictionaryDatum synaptic_elements_dict = getValue< DictionaryDatum >( d, names::synaptic_elements_param ); for ( std::map< Name, SynapticElement >::iterator it = synaptic_elements_map_.begin(); - it != synaptic_elements_map_.end(); - ++it ) + it != synaptic_elements_map_.end(); + ++it ) { if ( synaptic_elements_dict->known( it->first ) ) { @@ -214,8 +214,8 @@ nest::StructuralPlasticityNode::get_synaptic_elements() const std::map< Name, double > n_map; for ( std::map< Name, SynapticElement >::const_iterator it = synaptic_elements_map_.begin(); - it != synaptic_elements_map_.end(); - ++it ) + it != synaptic_elements_map_.end(); + ++it ) { n_map.insert( std::pair< Name, double >( it->first, get_synaptic_elements( it->first ) ) ); } @@ -228,8 +228,8 @@ nest::StructuralPlasticityNode::update_synaptic_elements( double t ) assert( t >= Ca_t_ ); for ( std::map< Name, SynapticElement >::iterator it = synaptic_elements_map_.begin(); - it != synaptic_elements_map_.end(); - ++it ) + it != synaptic_elements_map_.end(); + ++it ) { it->second.update( t, Ca_t_, Ca_minus_, tau_Ca_ ); } @@ -242,8 +242,8 @@ void nest::StructuralPlasticityNode::decay_synaptic_elements_vacant() { for ( std::map< Name, SynapticElement >::iterator it = synaptic_elements_map_.begin(); - it != synaptic_elements_map_.end(); - ++it ) + it != synaptic_elements_map_.end(); + ++it ) { it->second.decay_z_vacant(); } diff --git a/nestkernel/target_table.cpp b/nestkernel/target_table.cpp index d9d06a8c4e..f17375cf2d 100644 --- a/nestkernel/target_table.cpp +++ b/nestkernel/target_table.cpp @@ -71,8 +71,8 @@ void nest::TargetTable::compress_secondary_send_buffer_pos( const size_t tid ) { for ( std::vector< std::vector< std::vector< size_t > > >::iterator it = secondary_send_buffer_pos_[ tid ].begin(); - it != secondary_send_buffer_pos_[ tid ].end(); - ++it ) + it != secondary_send_buffer_pos_[ tid ].end(); + ++it ) { for ( std::vector< std::vector< size_t > >::iterator iit = it->begin(); iit != it->end(); ++iit ) { diff --git a/nestkernel/target_table_devices.cpp b/nestkernel/target_table_devices.cpp index da106e9b43..940695d22b 100644 --- a/nestkernel/target_table_devices.cpp +++ b/nestkernel/target_table_devices.cpp @@ -158,8 +158,8 @@ nest::TargetTableDevices::get_connections_from_devices_( const size_t requested_ std::deque< ConnectionID >& conns ) const { for ( std::vector< size_t >::const_iterator it = sending_devices_node_ids_[ tid ].begin(); - it != sending_devices_node_ids_[ tid ].end(); - ++it ) + it != sending_devices_node_ids_[ tid ].end(); + ++it ) { const size_t source_node_id = *it; if ( source_node_id > 0 and ( requested_source_node_id == source_node_id or requested_source_node_id == 0 ) ) diff --git a/nestkernel/target_table_devices.h b/nestkernel/target_table_devices.h index de6d31db3d..ca147f01cb 100644 --- a/nestkernel/target_table_devices.h +++ b/nestkernel/target_table_devices.h @@ -236,8 +236,8 @@ TargetTableDevices::send_from_device( const size_t tid, const std::vector< ConnectorModel* >& cm ) { for ( std::vector< ConnectorBase* >::iterator it = target_from_devices_[ tid ][ ldid ].begin(); - it != target_from_devices_[ tid ][ ldid ].end(); - ++it ) + it != target_from_devices_[ tid ][ ldid ].end(); + ++it ) { if ( *it ) { diff --git a/nestkernel/target_table_devices_impl.h b/nestkernel/target_table_devices_impl.h index 26a4668fb1..53d052329d 100644 --- a/nestkernel/target_table_devices_impl.h +++ b/nestkernel/target_table_devices_impl.h @@ -80,8 +80,8 @@ nest::TargetTableDevices::send_to_device( const size_t tid, { const size_t lid = kernel().vp_manager.node_id_to_lid( source_node_id ); for ( std::vector< ConnectorBase* >::iterator it = target_to_devices_[ tid ][ lid ].begin(); - it != target_to_devices_[ tid ][ lid ].end(); - ++it ) + it != target_to_devices_[ tid ][ lid ].end(); + ++it ) { if ( *it ) { diff --git a/pyproject.toml b/pyproject.toml index 26a94cb932..f9b4efabaa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,3 +14,40 @@ known_third_party = "nest" [tool.black] line-length = 120 + + +[tool.mypy] +# For configuation details see +# +python_version = "3.10" +warn_return_any = true +warn_unused_configs = true +exclude = [ + '.git/', + '.snakemake/', + '.pytest_cache/', + '^sync-test-env/', + '^conda/', + '^env/', + '^doc/htmldoc/developer_space/templates/pynest_example_template\.py$', +] +exclude_gitignore = true +explicit_package_bases = true + +# Check explanations of error codes +# https://mypy.readthedocs.io/en/stable/error_code_list.html +disable_error_code = [ + "attr-defined", + "index", + "import", + "call-arg", + "misc", + "dict-item", + "arg-type", + "operator", + "call-overload", + "assignment", + "list-item", + "var-annotated", + "valid-type", +] diff --git a/requirements_testing.txt b/requirements_testing.txt index c4d4b95fa0..4caeffe599 100644 --- a/requirements_testing.txt +++ b/requirements_testing.txt @@ -21,6 +21,7 @@ pytest-mypy pytest-cov data-science-types terminaltables +pre-commit pycodestyle pydocstyle rstcheck diff --git a/sli/booldatum.cc b/sli/booldatum.cc index 3a70a98e7b..0102f3c12f 100644 --- a/sli/booldatum.cc +++ b/sli/booldatum.cc @@ -35,12 +35,14 @@ BoolDatum::BoolDatum( const Name& val ) d = ( val == Name( true_string ) ); } -BoolDatum::operator Name() const +BoolDatum:: +operator Name() const { return ( d ? Name( true_string ) : Name( false_string ) ); } -BoolDatum::operator std::string() const +BoolDatum:: +operator std::string() const { return ( d ? std::string( true_string ) : std::string( false_string ) ); } diff --git a/sli/token.cc b/sli/token.cc index d565c5de41..63f33268cf 100644 --- a/sli/token.cc +++ b/sli/token.cc @@ -117,27 +117,32 @@ Token::operator Datum* () const } */ -Token::operator long() const +Token:: +operator long() const { return getValue< long >( *this ); } -Token::operator size_t() const +Token:: +operator size_t() const { return getValue< long >( *this ); } -Token::operator double() const +Token:: +operator double() const { return getValue< double >( *this ); } -Token::operator bool() const +Token:: +operator bool() const { return getValue< bool >( *this ); } -Token::operator std::string() const +Token:: +operator std::string() const { return getValue< std::string >( *this ); } diff --git a/testsuite/pytests/__init__.py b/testsuite/pytests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/mpi/__init__.py b/testsuite/pytests/mpi/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/mpi/nproc2/__init__.py b/testsuite/pytests/mpi/nproc2/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/mpi/2/connect_test_base.py b/testsuite/pytests/mpi/nproc2/connect_test_base.py similarity index 100% rename from testsuite/pytests/mpi/2/connect_test_base.py rename to testsuite/pytests/mpi/nproc2/connect_test_base.py diff --git a/testsuite/pytests/mpi/2/test_connect_all_to_all.py b/testsuite/pytests/mpi/nproc2/test_connect_all_to_all.py similarity index 100% rename from testsuite/pytests/mpi/2/test_connect_all_to_all.py rename to testsuite/pytests/mpi/nproc2/test_connect_all_to_all.py diff --git a/testsuite/pytests/mpi/2/test_connect_arrays_mpi.py b/testsuite/pytests/mpi/nproc2/test_connect_arrays_mpi.py similarity index 100% rename from testsuite/pytests/mpi/2/test_connect_arrays_mpi.py rename to testsuite/pytests/mpi/nproc2/test_connect_arrays_mpi.py diff --git a/testsuite/pytests/mpi/2/test_connect_fixed_indegree.py b/testsuite/pytests/mpi/nproc2/test_connect_fixed_indegree.py similarity index 100% rename from testsuite/pytests/mpi/2/test_connect_fixed_indegree.py rename to testsuite/pytests/mpi/nproc2/test_connect_fixed_indegree.py diff --git a/testsuite/pytests/mpi/2/test_connect_fixed_outdegree.py b/testsuite/pytests/mpi/nproc2/test_connect_fixed_outdegree.py similarity index 100% rename from testsuite/pytests/mpi/2/test_connect_fixed_outdegree.py rename to testsuite/pytests/mpi/nproc2/test_connect_fixed_outdegree.py diff --git a/testsuite/pytests/mpi/2/test_connect_fixed_total_number.py b/testsuite/pytests/mpi/nproc2/test_connect_fixed_total_number.py similarity index 100% rename from testsuite/pytests/mpi/2/test_connect_fixed_total_number.py rename to testsuite/pytests/mpi/nproc2/test_connect_fixed_total_number.py diff --git a/testsuite/pytests/mpi/2/test_connect_one_to_one.py b/testsuite/pytests/mpi/nproc2/test_connect_one_to_one.py similarity index 100% rename from testsuite/pytests/mpi/2/test_connect_one_to_one.py rename to testsuite/pytests/mpi/nproc2/test_connect_one_to_one.py diff --git a/testsuite/pytests/mpi/2/test_connect_pairwise_bernoulli.py b/testsuite/pytests/mpi/nproc2/test_connect_pairwise_bernoulli.py similarity index 100% rename from testsuite/pytests/mpi/2/test_connect_pairwise_bernoulli.py rename to testsuite/pytests/mpi/nproc2/test_connect_pairwise_bernoulli.py diff --git a/testsuite/pytests/mpi/2/test_connect_symmetric_pairwise_bernoulli.py b/testsuite/pytests/mpi/nproc2/test_connect_symmetric_pairwise_bernoulli.py similarity index 100% rename from testsuite/pytests/mpi/2/test_connect_symmetric_pairwise_bernoulli.py rename to testsuite/pytests/mpi/nproc2/test_connect_symmetric_pairwise_bernoulli.py diff --git a/testsuite/pytests/mpi/2/test_getnodes.py b/testsuite/pytests/mpi/nproc2/test_getnodes.py similarity index 100% rename from testsuite/pytests/mpi/2/test_getnodes.py rename to testsuite/pytests/mpi/nproc2/test_getnodes.py diff --git a/testsuite/pytests/mpi/2/test_issue_1974.py b/testsuite/pytests/mpi/nproc2/test_issue_1974.py similarity index 100% rename from testsuite/pytests/mpi/2/test_issue_1974.py rename to testsuite/pytests/mpi/nproc2/test_issue_1974.py diff --git a/testsuite/pytests/mpi/2/test_issue_3099.py b/testsuite/pytests/mpi/nproc2/test_issue_3099.py similarity index 100% rename from testsuite/pytests/mpi/2/test_issue_3099.py rename to testsuite/pytests/mpi/nproc2/test_issue_3099.py diff --git a/testsuite/pytests/mpi/2/test_issue_3108.py b/testsuite/pytests/mpi/nproc2/test_issue_3108.py similarity index 100% rename from testsuite/pytests/mpi/2/test_issue_3108.py rename to testsuite/pytests/mpi/nproc2/test_issue_3108.py diff --git a/testsuite/pytests/mpi/2/test_issue_576.py b/testsuite/pytests/mpi/nproc2/test_issue_576.py similarity index 100% rename from testsuite/pytests/mpi/2/test_issue_576.py rename to testsuite/pytests/mpi/nproc2/test_issue_576.py diff --git a/testsuite/pytests/mpi/2/test_multiplicity.py b/testsuite/pytests/mpi/nproc2/test_multiplicity.py similarity index 100% rename from testsuite/pytests/mpi/2/test_multiplicity.py rename to testsuite/pytests/mpi/nproc2/test_multiplicity.py diff --git a/testsuite/pytests/mpi/2/test_neuron_vp.py b/testsuite/pytests/mpi/nproc2/test_neuron_vp.py similarity index 100% rename from testsuite/pytests/mpi/2/test_neuron_vp.py rename to testsuite/pytests/mpi/nproc2/test_neuron_vp.py diff --git a/testsuite/pytests/mpi/2/test_spatial_positions.py b/testsuite/pytests/mpi/nproc2/test_spatial_positions.py similarity index 100% rename from testsuite/pytests/mpi/2/test_spatial_positions.py rename to testsuite/pytests/mpi/nproc2/test_spatial_positions.py diff --git a/testsuite/pytests/mpi/2/test_synapsecollection_mpi.py b/testsuite/pytests/mpi/nproc2/test_synapsecollection_mpi.py similarity index 100% rename from testsuite/pytests/mpi/2/test_synapsecollection_mpi.py rename to testsuite/pytests/mpi/nproc2/test_synapsecollection_mpi.py diff --git a/testsuite/pytests/mpi/nproc3/__init__.py b/testsuite/pytests/mpi/nproc3/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/mpi/3/test_issue_3108.py b/testsuite/pytests/mpi/nproc3/test_issue_3108.py similarity index 100% rename from testsuite/pytests/mpi/3/test_issue_3108.py rename to testsuite/pytests/mpi/nproc3/test_issue_3108.py diff --git a/testsuite/pytests/mpi/3/test_neuron_vp.py b/testsuite/pytests/mpi/nproc3/test_neuron_vp.py similarity index 100% rename from testsuite/pytests/mpi/3/test_neuron_vp.py rename to testsuite/pytests/mpi/nproc3/test_neuron_vp.py diff --git a/testsuite/pytests/mpi/nproc4/__init__.py b/testsuite/pytests/mpi/nproc4/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/mpi/4/test_consistent_local_vps.py b/testsuite/pytests/mpi/nproc4/test_consistent_local_vps.py similarity index 100% rename from testsuite/pytests/mpi/4/test_consistent_local_vps.py rename to testsuite/pytests/mpi/nproc4/test_consistent_local_vps.py diff --git a/testsuite/pytests/mpi/4/test_delay_exchange.py b/testsuite/pytests/mpi/nproc4/test_delay_exchange.py similarity index 100% rename from testsuite/pytests/mpi/4/test_delay_exchange.py rename to testsuite/pytests/mpi/nproc4/test_delay_exchange.py diff --git a/testsuite/pytests/mpi/4/test_getnodes.py b/testsuite/pytests/mpi/nproc4/test_getnodes.py similarity index 100% rename from testsuite/pytests/mpi/4/test_getnodes.py rename to testsuite/pytests/mpi/nproc4/test_getnodes.py diff --git a/testsuite/pytests/mpi/4/test_issue_1974.py b/testsuite/pytests/mpi/nproc4/test_issue_1974.py similarity index 100% rename from testsuite/pytests/mpi/4/test_issue_1974.py rename to testsuite/pytests/mpi/nproc4/test_issue_1974.py diff --git a/testsuite/pytests/mpi/4/test_issue_3108.py b/testsuite/pytests/mpi/nproc4/test_issue_3108.py similarity index 100% rename from testsuite/pytests/mpi/4/test_issue_3108.py rename to testsuite/pytests/mpi/nproc4/test_issue_3108.py diff --git a/testsuite/pytests/mpi/4/test_localonly.py b/testsuite/pytests/mpi/nproc4/test_localonly.py similarity index 100% rename from testsuite/pytests/mpi/4/test_localonly.py rename to testsuite/pytests/mpi/nproc4/test_localonly.py diff --git a/testsuite/pytests/mpi/4/test_set_status_resolution_nthreads.py b/testsuite/pytests/mpi/nproc4/test_set_status_resolution_nthreads.py similarity index 100% rename from testsuite/pytests/mpi/4/test_set_status_resolution_nthreads.py rename to testsuite/pytests/mpi/nproc4/test_set_status_resolution_nthreads.py diff --git a/testsuite/pytests/mpi/4/test_spatial_positions.py b/testsuite/pytests/mpi/nproc4/test_spatial_positions.py similarity index 100% rename from testsuite/pytests/mpi/4/test_spatial_positions.py rename to testsuite/pytests/mpi/nproc4/test_spatial_positions.py diff --git a/testsuite/pytests/mpi/4/test_spike_train_injector_mpi.py b/testsuite/pytests/mpi/nproc4/test_spike_train_injector_mpi.py similarity index 100% rename from testsuite/pytests/mpi/4/test_spike_train_injector_mpi.py rename to testsuite/pytests/mpi/nproc4/test_spike_train_injector_mpi.py diff --git a/testsuite/pytests/sli2py_regressions/test_ticket_638.py b/testsuite/pytests/sli2py_regressions/test_ticket_638.py index fc26de9859..afd803db95 100644 --- a/testsuite/pytests/sli2py_regressions/test_ticket_638.py +++ b/testsuite/pytests/sli2py_regressions/test_ticket_638.py @@ -43,7 +43,7 @@ def _run_simulation(model: str, t_ref: float) -> int: nest.Connect(neuron, spike_recorder) nest.Simulate(100.0) - n_events = spike_recorder.get("n_events") + n_events: int = spike_recorder.get("n_events") return n_events diff --git a/testsuite/pytests/sli2py_regressions/test_ticket_673.py b/testsuite/pytests/sli2py_regressions/test_ticket_673.py index 29c9843c31..9beff56370 100644 --- a/testsuite/pytests/sli2py_regressions/test_ticket_673.py +++ b/testsuite/pytests/sli2py_regressions/test_ticket_673.py @@ -46,7 +46,7 @@ def _second_spike_time(model: str, t_ref: float) -> float: nest.Simulate(20.0) events = nest.GetStatus(spike_recorder, "events")[0] - times = events["times"] + times: list[float] = events["times"] assert len(times) > 1, f"Model {model} did not fire at least twice." diff --git a/testsuite/pytests/test_connect_pairwise_poisson.py b/testsuite/pytests/test_connect_pairwise_poisson.py index f7a2c693ca..ab7138647e 100644 --- a/testsuite/pytests/test_connect_pairwise_poisson.py +++ b/testsuite/pytests/test_connect_pairwise_poisson.py @@ -26,7 +26,6 @@ import nest import numpy as np import scipy.stats -from connect_test_base import get_connectivity_matrix HAVE_OPENMP = nest.ll_api.sli_func("is_threaded") diff --git a/testsuite/pytests/test_sp/__init__.py b/testsuite/pytests/test_sp/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/__init__.py b/testsuite/pytests/test_spatial/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/circular/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/circular/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/free/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/free/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/free/circular/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/free/circular/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/free/doughnut/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/free/doughnut/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/free/rectangular/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/free/rectangular/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/grid/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/grid/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/grid/circular/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/grid/circular/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/grid/doughnut/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/grid/doughnut/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/grid/grid/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/grid/grid/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/test_spatial/spatial_test_references/grid/rectangular/__init__.py b/testsuite/pytests/test_spatial/spatial_test_references/grid/rectangular/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/pytests/utilities/__init__.py b/testsuite/pytests/utilities/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testsuite/regressiontests/sli2py_ignore/issue-779-1016.py b/testsuite/regressiontests/sli2py_ignore/issue-779-1016.py index e182bc8cdd..81f6f1f426 100644 --- a/testsuite/regressiontests/sli2py_ignore/issue-779-1016.py +++ b/testsuite/regressiontests/sli2py_ignore/issue-779-1016.py @@ -20,8 +20,8 @@ # along with NEST. If not, see . """ -This script ensures that NEST parses commandline arguments correctly -and makes all of them available in the argv array in the statusdict. +Ensures that NEST parses commandline arguments correctly and makes all of them +available in the argv array in the statusdict. This is a regression test for GitHub issues 779 and 1016. """ diff --git a/testsuite/run_test.sh b/testsuite/run_test.sh index 4cb3f96d0f..e7dab63089 100755 --- a/testsuite/run_test.sh +++ b/testsuite/run_test.sh @@ -156,7 +156,7 @@ run_test () echo "${explanation}" - if test "x${msg_error}" != x ; then + if test -n "${msg_error}"; then echo ================================================== echo "Following is the full output of the test:" echo ================================================== @@ -170,7 +170,7 @@ run_test () junit_write "${junit_class}" "${junit_name}" "${junit_status}" "${junit_failure}" "$(cat "${TEST_OUTFILE}")" # Panic on "unexpected" exit code - if test "x${unexpected_exitcode:-}" != x ; then + if test -n "${unexpected_exitcode:-}"; then echo "***" echo "*** An unexpected exit code usually hints at a bug in the test suite!" exit 2 diff --git a/testsuite/summarize_tests.py b/testsuite/summarize_tests.py index 5f084c05c7..bacd21e97f 100644 --- a/testsuite/summarize_tests.py +++ b/testsuite/summarize_tests.py @@ -49,9 +49,9 @@ "05 mpitests": 1, # set to 1 to avoid complications during porting to Pytest "06 musictests": 1, "07 pynesttests": 3719, # without thread-dependent cases - "07 pynesttests mpi 2": (230, 172), # first case without thread-dependent cases - "07 pynesttests mpi 3": (58, 0), - "07 pynesttests mpi 4": (65, 7), + "07 pynesttests mpi nproc2": (230, 172), # first case without thread-dependent cases + "07 pynesttests mpi nproc3": (58, 0), + "07 pynesttests mpi nproc4": (65, 7), "07 pynesttests sli2py mpi": 13, "08 cpptests": 29, }