From 8c8c72503c8077ef44ea44934e463cf72ac98b17 Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Wed, 16 Apr 2025 00:52:38 +0200 Subject: [PATCH 01/10] Hike ubuntu version for isort to 22.04 --- .github/workflows/nestbuildmatrix.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nestbuildmatrix.yml b/.github/workflows/nestbuildmatrix.yml index d78e75c069..240c30c743 100644 --- a/.github/workflows/nestbuildmatrix.yml +++ b/.github/workflows/nestbuildmatrix.yml @@ -358,7 +358,7 @@ jobs: pylint --jobs=$(nproc) pynest/ testsuite/pytests/*.py testsuite/regressiontests/*.py isort: - runs-on: "ubuntu-20.04" + runs-on: "ubuntu-22.04" steps: - name: "Checkout repository content" uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 From 29f52dae4cf2a73cb4485a0380506e19d71e047f Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Sun, 13 Jul 2025 23:07:35 +0200 Subject: [PATCH 02/10] Properly null out pointer after object deletion --- models/eprop_synapse.h | 2 +- models/eprop_synapse_bsshslm_2020.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/models/eprop_synapse.h b/models/eprop_synapse.h index 5509019296..b072fcb222 100644 --- a/models/eprop_synapse.h +++ b/models/eprop_synapse.h @@ -481,7 +481,7 @@ inline void eprop_synapse< targetidentifierT >::delete_optimizer() { delete optimizer_; - // do not set to nullptr to allow detection of double deletion + optimizer_ = nullptr; } template < typename targetidentifierT > diff --git a/models/eprop_synapse_bsshslm_2020.h b/models/eprop_synapse_bsshslm_2020.h index 3ac9144827..7a655f8f95 100644 --- a/models/eprop_synapse_bsshslm_2020.h +++ b/models/eprop_synapse_bsshslm_2020.h @@ -511,7 +511,7 @@ inline void eprop_synapse_bsshslm_2020< targetidentifierT >::delete_optimizer() { delete optimizer_; - // do not set to nullptr to allow detection of double deletion + optimizer_ = nullptr; } template < typename targetidentifierT > From b8cc35bffd91de161eea59217ed84d857d93840f Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Sun, 13 Jul 2025 23:19:01 +0200 Subject: [PATCH 03/10] Attempt to fix #3489 by moving conn infra update after event delivery --- nestkernel/connection_manager.cpp | 9 +++------ nestkernel/simulation_manager.cpp | 23 ++++++++++++++++------- nestkernel/sp_manager.cpp | 9 --------- 3 files changed, 19 insertions(+), 22 deletions(-) diff --git a/nestkernel/connection_manager.cpp b/nestkernel/connection_manager.cpp index 0924fb73e2..4eac92411d 100644 --- a/nestkernel/connection_manager.cpp +++ b/nestkernel/connection_manager.cpp @@ -1131,6 +1131,9 @@ nest::ConnectionManager::get_connections( const DictionaryDatum& params ) // as this may involve sorting connections by source node IDs. if ( connections_have_changed() ) { + // TODO fix-3489: Note entirely sure if we need the next two calls here, + // but ran into trouble when I took both out, I think + // We need to update min_delay because it is used by check_wfr_use() below // to set secondary event data size. update_delay_extrema_(); @@ -1139,12 +1142,6 @@ nest::ConnectionManager::get_connections( const DictionaryDatum& params ) // needs to be called before update_connection_infrastructure since // it resizes coefficient arrays for secondary events kernel().node_manager.check_wfr_use(); - -#pragma omp parallel - { - const size_t tid = kernel().vp_manager.get_thread_id(); - kernel().simulation_manager.update_connection_infrastructure( tid ); - } } // We check, whether a synapse model is given. If not, we will iterate all. diff --git a/nestkernel/simulation_manager.cpp b/nestkernel/simulation_manager.cpp index 9f23d7f89b..6951ce7911 100644 --- a/nestkernel/simulation_manager.cpp +++ b/nestkernel/simulation_manager.cpp @@ -544,7 +544,13 @@ nest::SimulationManager::prepare() // it resizes coefficient arrays for secondary events kernel().node_manager.check_wfr_use(); - if ( kernel().node_manager.have_nodes_changed() or kernel().connection_manager.connections_have_changed() ) + // Only update connection infrastructure if we have not simulated yet. Otherwise, + // there might still be spikes in the event delivery data structures destined for + // connections that have been deleted. See #3489. + // We must do this here once before the first simulation round because WFR otherwise + // cannot do its pre-step in the first time slice. + if ( not simulated_ + and ( kernel().node_manager.have_nodes_changed() or kernel().connection_manager.connections_have_changed() ) ) { #pragma omp parallel { @@ -1028,14 +1034,17 @@ nest::SimulationManager::update_() node->decay_synaptic_elements_vacant(); } - // after structural plasticity has created and deleted - // connections, update the connection infrastructure; implies - // complete removal of presynaptic part and reconstruction - // from postsynaptic data - update_connection_infrastructure( tid ); - } // of structural plasticity + // If either the user or structual plasticity has modified the basis of connectivity, + // we need to re-build the connection infrastructure. At this point, all spikes from + // the previous time slice have been delivered, so we do not risk transmission via + // deleted connections. + if ( kernel().node_manager.have_nodes_changed() or kernel().connection_manager.connections_have_changed() ) + { + update_connection_infrastructure( tid ); + } + sw_update_.start(); const SparseNodeArray& thread_local_nodes = kernel().node_manager.get_local_nodes( tid ); diff --git a/nestkernel/sp_manager.cpp b/nestkernel/sp_manager.cpp index 4705b7de64..3a1c358097 100644 --- a/nestkernel/sp_manager.cpp +++ b/nestkernel/sp_manager.cpp @@ -242,15 +242,6 @@ SPManager::disconnect( NodeCollectionPTR sources, DictionaryDatum& conn_spec, DictionaryDatum& syn_spec ) { - if ( kernel().connection_manager.connections_have_changed() ) - { -#pragma omp parallel - { - const size_t tid = kernel().vp_manager.get_thread_id(); - kernel().simulation_manager.update_connection_infrastructure( tid ); - } - } - BipartiteConnBuilder* cb = nullptr; conn_spec->clear_access_flags(); syn_spec->clear_access_flags(); From 0d2cde048a4a688d193332b2ccc71ceda44bae3b Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Sun, 13 Jul 2025 23:22:58 +0200 Subject: [PATCH 04/10] Adjust neuron number to make tests more robust. But the affected tests need revision, see Note. --- testsuite/mpitests/test_connect_array_all_to_all_mpi.sli | 7 ++++++- .../mpitests/test_connect_array_fixed_indegree_mpi.sli | 6 +++++- .../mpitests/test_connect_array_fixed_outdegree_mpi.sli | 6 +++++- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/testsuite/mpitests/test_connect_array_all_to_all_mpi.sli b/testsuite/mpitests/test_connect_array_all_to_all_mpi.sli index 376d5d7ca7..53565c238d 100644 --- a/testsuite/mpitests/test_connect_array_all_to_all_mpi.sli +++ b/testsuite/mpitests/test_connect_array_all_to_all_mpi.sli @@ -37,6 +37,10 @@ SeeAlso: Connect */ +% NOTE: Test should be re-designed because the current design can lead to different numbers of GetConnection calls +% on different Ranks if N is not divisible by all process numbers. + + (unittest) run /unittest using @@ -45,7 +49,7 @@ skip_if_not_threaded [2 3] % check for 2, 3 processes { -/N 10 def % number of neurons in each population +/N 12 def % number of neurons in each population ResetKernel @@ -72,6 +76,7 @@ ResetKernel sources targets conn_dict syn_dict Connect % connects source to target + /pass true def % error control flag /n_loc 0 def % used to check n. of neurons belonging to this MPI process targets % loop through all target neurons diff --git a/testsuite/mpitests/test_connect_array_fixed_indegree_mpi.sli b/testsuite/mpitests/test_connect_array_fixed_indegree_mpi.sli index bd1c6f1998..709c89c816 100644 --- a/testsuite/mpitests/test_connect_array_fixed_indegree_mpi.sli +++ b/testsuite/mpitests/test_connect_array_fixed_indegree_mpi.sli @@ -37,6 +37,10 @@ SeeAlso: Connect */ + +% NOTE: Test should be re-designed because the current design can lead to different numbers of GetConnection calls +% on different Ranks if N is not divisible by all process numbers. + (unittest) run /unittest using @@ -45,7 +49,7 @@ skip_if_not_threaded [2 3] % check for 2, 3 processes { -/N 20 def % number of neurons in each population +/N 24 def % number of neurons in each population /K 5 def % number of connections per neuron ResetKernel diff --git a/testsuite/mpitests/test_connect_array_fixed_outdegree_mpi.sli b/testsuite/mpitests/test_connect_array_fixed_outdegree_mpi.sli index 2f75219a38..2e4ae74005 100644 --- a/testsuite/mpitests/test_connect_array_fixed_outdegree_mpi.sli +++ b/testsuite/mpitests/test_connect_array_fixed_outdegree_mpi.sli @@ -37,6 +37,10 @@ SeeAlso: Connect */ +% NOTE: Test should be re-designed because the current design can lead to different numbers of GetConnection calls +% on different Ranks if N is not divisible by all process numbers. + + (unittest) run /unittest using @@ -45,7 +49,7 @@ skip_if_not_threaded [1 2 3] % check for 1, 2, 3 processes { - /N 20 def % number of neurons in each population + /N 24 def % number of neurons in each population /K 5 def % number of connections per neuron ResetKernel From 5f18db34a6b4da66a7bde20d571b334c0a2818fc Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Sun, 13 Jul 2025 23:24:39 +0200 Subject: [PATCH 05/10] Add function to turn SynapseCollection into Counter object (multiset) for order-independent comparison --- testsuite/pytests/utilities/testutil.py | 32 +++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/testsuite/pytests/utilities/testutil.py b/testsuite/pytests/utilities/testutil.py index 54a9e0dcdc..ec89438690 100644 --- a/testsuite/pytests/utilities/testutil.py +++ b/testsuite/pytests/utilities/testutil.py @@ -19,6 +19,7 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . +import collections import dataclasses import sys @@ -113,3 +114,34 @@ def default(d=attr): def use_simulation(cls): # If `mark` receives one argument that is a class, it decorates that arg. return pytest.mark.simulation(cls, "") + + +def synapse_counts(syn_collection, criteria=("source", "target", "target_thread", "synapse_id")): + """ + Returns Counter object representing synapse collection. + + This is useful for comparing whether two synapse collections contain the same connections + irrespective of ordering using source, target, target thread and synapse id as criteria. + By default, the "port" is ignored, since it is an implemenation detail and affected by + synapse sorting. + """ + + return collections.Counter(zip(*syn_collection.get(criteria).values())) + + +def expected_synapse_counts(sources, targets=None, target_threads=None, syn_ids=None, ports=None): + """ + From given lists of sources, targets and optionally target threads, synapse ids or ports + create a Counter object that can be compared to the object returned by synapse_counts. + """ + + n = len(sources) + data = [sources] + for c in [targets, target_threads, syn_ids, ports]: + if c is None: + continue + + assert len(c) == n + data.append(c) + + return collections.Counter(zip(*data)) From f047cbcc75bc8dd76104414a432f7a3068119cc8 Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Sun, 13 Jul 2025 23:29:37 +0200 Subject: [PATCH 06/10] Adapt to work correctly independent of synapse ordering --- .../sli2py_regressions/test_issue_2629.py | 20 ++++++++----------- 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/testsuite/pytests/sli2py_regressions/test_issue_2629.py b/testsuite/pytests/sli2py_regressions/test_issue_2629.py index dfc14e6b2d..6e968602d5 100644 --- a/testsuite/pytests/sli2py_regressions/test_issue_2629.py +++ b/testsuite/pytests/sli2py_regressions/test_issue_2629.py @@ -65,13 +65,10 @@ def test_dump_layer_connections_target_1(tmp_path, network): fname_1 = tmp_path / "conns_1.txt" nest.DumpLayerConnections(src_layer, tgt_layer_1, "static_synapse", fname_1) - expected_dump_1 = [ - "1 3 1 1 0 0", - "1 4 1 1 0.5 0", - "2 3 1 1 -0.5 0", - "2 4 1 1 0 0", - ] - actual_dump_1 = fname_1.read_text().splitlines() + + # Ordering of lines is irrelevant, therefore compare sorted lists + expected_dump_1 = sorted(["1 3 1 1 0 0", "1 4 1 1 0.5 0", "2 3 1 1 -0.5 0", "2 4 1 1 0 0"]) + actual_dump_1 = sorted(fname_1.read_text().splitlines()) assert actual_dump_1 == expected_dump_1 @@ -82,9 +79,8 @@ def test_dump_layer_connections_target_2(tmp_path, network): fname_2 = tmp_path / "conns_2.txt" nest.DumpLayerConnections(src_layer, tgt_layer_2, "static_synapse", fname_2) - expected_dump_2 = [ - "1 5 1 1 0 0", - "2 6 1 1 0 0", - ] - actual_dump_2 = fname_2.read_text().splitlines() + + # Ordering of lines is irrelevant, therefore compare sorted lists + expected_dump_2 = sorted(["1 5 1 1 0 0", "2 6 1 1 0 0"]) + actual_dump_2 = sorted(fname_2.read_text().splitlines()) assert actual_dump_2 == expected_dump_2 From fa8601872042891b5c75a7b0e14209c032f9d352 Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Sun, 13 Jul 2025 23:30:56 +0200 Subject: [PATCH 07/10] Make test robust against changes in synapse reordering and invalidation of SynapseCollection --- testsuite/pytests/test_connect_after_simulate.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/testsuite/pytests/test_connect_after_simulate.py b/testsuite/pytests/test_connect_after_simulate.py index 99a9685373..b7e47b6aa0 100644 --- a/testsuite/pytests/test_connect_after_simulate.py +++ b/testsuite/pytests/test_connect_after_simulate.py @@ -29,6 +29,7 @@ import nest import pytest +from testutil import synapse_counts @pytest.fixture(autouse=True) @@ -48,11 +49,15 @@ def test_connections_before_after_simulate(use_compressed_spikes): neurons = nest.Create("iaf_psc_alpha", 10) nest.Connect(neurons, neurons, conn_spec={"rule": "fixed_indegree", "indegree": 2}) - connections_before = nest.GetConnections() + # SynapseCollections are considered equal if they contain the same (source, target, target_thread, syn_id) + # tuples, irrespective of order. We do not consider port number, because it can change due to compression. + # Therefore, we need to compare explicitly. Since Simulate may invalidate SynapseCollections, we need to + # extract data right before calling simulate. + connections_before = synapse_counts(nest.GetConnections()) nest.Simulate(1.0) - connections_after = nest.GetConnections() + connections_after = synapse_counts(nest.GetConnections()) assert connections_before == connections_after From 033e1eff2eb70ea2e406a9ff1851573f9c4bf0e7 Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Sun, 13 Jul 2025 23:31:35 +0200 Subject: [PATCH 08/10] Make test robust against changes in synapse reordering and invalidation of SynapseCollection --- testsuite/pytests/test_get_connections.py | 38 +++++++++++++---------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/testsuite/pytests/test_get_connections.py b/testsuite/pytests/test_get_connections.py index 06e67138e2..637940f430 100644 --- a/testsuite/pytests/test_get_connections.py +++ b/testsuite/pytests/test_get_connections.py @@ -31,6 +31,7 @@ import pandas as pd import pandas.testing as pdtest import pytest +from testutil import expected_synapse_counts, synapse_counts @pytest.fixture(autouse=True) @@ -60,15 +61,10 @@ def test_get_connections_with_node_collection_step(): nodes = nest.Create("iaf_psc_alpha", 3) nest.Connect(nodes, nodes) - conns = nest.GetConnections(nodes[::2]) - actual_sources = conns.get("source") - actual_targets = conns.get("target") - - expected_sources = [1, 1, 1, 3, 3, 3] - expected_targets = [1, 2, 3, 1, 2, 3] + actual = synapse_counts(nest.GetConnections(nodes[::2]), criteria=["source", "target"]) + expected = expected_synapse_counts(sources=[1, 1, 1, 3, 3, 3], targets=[1, 2, 3, 1, 2, 3]) - assert actual_sources == expected_sources - assert actual_targets == expected_targets + assert actual == expected def test_get_connections_with_sliced_node_collection(): @@ -77,11 +73,10 @@ def test_get_connections_with_sliced_node_collection(): nodes = nest.Create("iaf_psc_alpha", 11) nest.Connect(nodes, nodes) - conns = nest.GetConnections(nodes[1:9:3]) - actual_sources = conns.get("source") + actual = synapse_counts(nest.GetConnections(nodes[1:9:3]), criteria=["source"]) + expected = expected_synapse_counts(sources=[2] * 11 + [5] * 11 + [8] * 11) - expected_sources = [2] * 11 + [5] * 11 + [8] * 11 - assert actual_sources == expected_sources + assert actual == expected def test_get_connections_with_sliced_node_collection_2(): @@ -91,11 +86,10 @@ def test_get_connections_with_sliced_node_collection_2(): nest.Connect(nodes, nodes) # ([ 2 3 4 ] + [ 8 9 10 11 ])[::3] -> [2 8 11] - conns = nest.GetConnections((nodes[1:4] + nodes[7:])[::3]) - actual_sources = conns.get("source") + actual = synapse_counts(nest.GetConnections((nodes[1:4] + nodes[7:])[::3]), criteria=["source"]) + expected = expected_synapse_counts(sources=[2] * 11 + [8] * 11 + [11] * 11) - expected_sources = [2] * 11 + [8] * 11 + [11] * 11 - assert actual_sources == expected_sources + assert actual == expected def test_get_connections_bad_source_raises(): @@ -111,11 +105,17 @@ def test_get_connections_bad_source_raises(): def test_get_connections_correct_table_with_node_collection_step(): """ Test that ``GetConnections`` table from ``NodeCollection`` sliced in step match expectations. + + This test is for internal testing also of the synapse sorting process. It therefore requires + that the connection infrastructure has been updated before connections are read out. """ nodes = nest.Create("iaf_psc_alpha", 3) nest.Connect(nodes, nodes) + # Force synapse sorting + nest.Simulate(0.1) + conns = nest.GetConnections(nodes[::2]) actual_row = [ conns.get("source")[3], @@ -134,11 +134,17 @@ def test_get_connections_correct_table_with_node_collection_step(): def test_get_connections_correct_table_with_node_collection_index(): """ Test that ``GetConnections`` table from ``NodeCollection`` index match expectations. + + This test is for internal testing also of the synapse sorting process. It therefore requires + that the connection infrastructure has been updated before connections are read out. """ nodes = nest.Create("iaf_psc_alpha", 3) nest.Connect(nodes, nodes) + # Force synapse sorting + nest.Simulate(0.1) + actual_conns = pd.DataFrame( nest.GetConnections(nodes[0]).get(["source", "target", "target_thread", "synapse_id", "port"]) ) From 3efc80636f060cfe4487583b2dbe8f4cdfcb5f1b Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Mon, 14 Jul 2025 07:15:26 +0200 Subject: [PATCH 09/10] Add regression test for issue 3489 --- testsuite/pytests/test_issue_3489.py | 55 ++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 testsuite/pytests/test_issue_3489.py diff --git a/testsuite/pytests/test_issue_3489.py b/testsuite/pytests/test_issue_3489.py new file mode 100644 index 0000000000..e256d47289 --- /dev/null +++ b/testsuite/pytests/test_issue_3489.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# test_issue_3489.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +import nest +import pytest + + +@pytest.mark.parametrize("compressed", [True, False]) +@pytest.mark.parametrize("simtime", [10.9, 11, 11.1]) +def test_spike_delivered_before_connection_removed(compressed, simtime): + """ + Test that spikes are properly transmitted before the connection infrastructure + is updated. + + Simtime 10.9 means that Simulate() terminates before the test spike is delivered + and as we then step in 0.1 ms steps, we ensure that connection updates happen only + after spikes have been delivered at slice boundaries. + """ + + nest.ResetKernel() + nest.use_compressed_spikes = compressed + + nodes_e = nest.Create("parrot_neuron", 3) + sg = nest.Create("spike_generator", params={"spike_times": [9.7]}) + + nest.Connect(sg, nodes_e[1]) # only neuron with GID 2 gets input + nest.Connect(nodes_e[:2], nodes_e[2]) # neurons with GIDs 1 and 2 connect to 3 + + # Neuron 2 spikes at 10.7, spike is stored, but not yet delivered + # If we simulated here until 11.1, the spike would be delivered here and all is fine + nest.Simulate(simtime) + + nest.Disconnect(nodes_e[0], nodes_e[2]) + + while nest.biological_time < 11.1: + print("TIME:", nest.biological_time) + nest.Simulate(0.1) # This call will trigger delivery of the spike, but now the connection is gone -> segfault From 5c4f24e3b822c14dc97e925ba749422cde48374a Mon Sep 17 00:00:00 2001 From: Hans Ekkehard Plesser Date: Mon, 14 Jul 2025 07:17:41 +0200 Subject: [PATCH 10/10] Update conn infrastructure only at beginning of slice --- nestkernel/simulation_manager.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nestkernel/simulation_manager.cpp b/nestkernel/simulation_manager.cpp index 6951ce7911..9bfeaf0178 100644 --- a/nestkernel/simulation_manager.cpp +++ b/nestkernel/simulation_manager.cpp @@ -1039,8 +1039,10 @@ nest::SimulationManager::update_() // If either the user or structual plasticity has modified the basis of connectivity, // we need to re-build the connection infrastructure. At this point, all spikes from // the previous time slice have been delivered, so we do not risk transmission via - // deleted connections. - if ( kernel().node_manager.have_nodes_changed() or kernel().connection_manager.connections_have_changed() ) + // deleted connections. We don't need to do this in slice_ == 0, because there + // prepare() has taken care of this. + if ( slice_ > 0 and from_step_ == 0 + and ( kernel().node_manager.have_nodes_changed() or kernel().connection_manager.connections_have_changed() ) ) { update_connection_infrastructure( tid ); }