Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion models/eprop_synapse.h
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,7 @@ inline void
eprop_synapse< targetidentifierT >::delete_optimizer()
{
delete optimizer_;
// do not set to nullptr to allow detection of double deletion
optimizer_ = nullptr;
}

template < typename targetidentifierT >
Expand Down
2 changes: 1 addition & 1 deletion models/eprop_synapse_bsshslm_2020.h
Original file line number Diff line number Diff line change
Expand Up @@ -511,7 +511,7 @@ inline void
eprop_synapse_bsshslm_2020< targetidentifierT >::delete_optimizer()
{
delete optimizer_;
// do not set to nullptr to allow detection of double deletion
optimizer_ = nullptr;
}

template < typename targetidentifierT >
Expand Down
9 changes: 3 additions & 6 deletions nestkernel/connection_manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -808,7 +808,7 @@
}

void
nest::ConnectionManager::connect_sonata( const DictionaryDatum& graph_specs, const long hyberslab_size )

Check warning on line 811 in nestkernel/connection_manager.cpp

View workflow job for this annotation

GitHub Actions / build_linux (ubuntu-22.04, gcc, boost, optimize, warning)

unused parameter ‘hyberslab_size’ [-Wunused-parameter]

Check warning on line 811 in nestkernel/connection_manager.cpp

View workflow job for this annotation

GitHub Actions / build_linux (ubuntu-22.04, gcc, boost, optimize, warning)

unused parameter ‘graph_specs’ [-Wunused-parameter]

Check warning on line 811 in nestkernel/connection_manager.cpp

View workflow job for this annotation

GitHub Actions / build_linux (ubuntu-22.04, gcc, openmp, python, gsl, ltdl, boost, optimize, warning)

unused parameter ‘hyberslab_size’ [-Wunused-parameter]

Check warning on line 811 in nestkernel/connection_manager.cpp

View workflow job for this annotation

GitHub Actions / build_linux (ubuntu-22.04, gcc, openmp, python, gsl, ltdl, boost, optimize, warning)

unused parameter ‘graph_specs’ [-Wunused-parameter]
{
#ifdef HAVE_HDF5
SonataConnector sonata_connector( graph_specs, hyberslab_size );
Expand Down Expand Up @@ -1131,6 +1131,9 @@
// as this may involve sorting connections by source node IDs.
if ( connections_have_changed() )
{
// TODO fix-3489: Note entirely sure if we need the next two calls here,
// but ran into trouble when I took both out, I think

// We need to update min_delay because it is used by check_wfr_use() below
// to set secondary event data size.
update_delay_extrema_();
Expand All @@ -1139,12 +1142,6 @@
// needs to be called before update_connection_infrastructure since
// it resizes coefficient arrays for secondary events
kernel().node_manager.check_wfr_use();

#pragma omp parallel
{
const size_t tid = kernel().vp_manager.get_thread_id();
kernel().simulation_manager.update_connection_infrastructure( tid );
}
}

// We check, whether a synapse model is given. If not, we will iterate all.
Expand Down
25 changes: 18 additions & 7 deletions nestkernel/simulation_manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,13 @@ nest::SimulationManager::prepare()
// it resizes coefficient arrays for secondary events
kernel().node_manager.check_wfr_use();

if ( kernel().node_manager.have_nodes_changed() or kernel().connection_manager.connections_have_changed() )
// Only update connection infrastructure if we have not simulated yet. Otherwise,
// there might still be spikes in the event delivery data structures destined for
// connections that have been deleted. See #3489.
// We must do this here once before the first simulation round because WFR otherwise
// cannot do its pre-step in the first time slice.
if ( not simulated_
and ( kernel().node_manager.have_nodes_changed() or kernel().connection_manager.connections_have_changed() ) )
{
#pragma omp parallel
{
Expand Down Expand Up @@ -1028,14 +1034,19 @@ nest::SimulationManager::update_()
node->decay_synaptic_elements_vacant();
}

// after structural plasticity has created and deleted
// connections, update the connection infrastructure; implies
// complete removal of presynaptic part and reconstruction
// from postsynaptic data
update_connection_infrastructure( tid );

} // of structural plasticity

// If either the user or structual plasticity has modified the basis of connectivity,
// we need to re-build the connection infrastructure. At this point, all spikes from
// the previous time slice have been delivered, so we do not risk transmission via
// deleted connections. We don't need to do this in slice_ == 0, because there
// prepare() has taken care of this.
if ( slice_ > 0 and from_step_ == 0
and ( kernel().node_manager.have_nodes_changed() or kernel().connection_manager.connections_have_changed() ) )
{
update_connection_infrastructure( tid );
}

sw_update_.start();
const SparseNodeArray& thread_local_nodes = kernel().node_manager.get_local_nodes( tid );

Expand Down
9 changes: 0 additions & 9 deletions nestkernel/sp_manager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -242,15 +242,6 @@ SPManager::disconnect( NodeCollectionPTR sources,
DictionaryDatum& conn_spec,
DictionaryDatum& syn_spec )
{
if ( kernel().connection_manager.connections_have_changed() )
{
#pragma omp parallel
{
const size_t tid = kernel().vp_manager.get_thread_id();
kernel().simulation_manager.update_connection_infrastructure( tid );
}
}

BipartiteConnBuilder* cb = nullptr;
conn_spec->clear_access_flags();
syn_spec->clear_access_flags();
Expand Down
7 changes: 6 additions & 1 deletion testsuite/mpitests/test_connect_array_all_to_all_mpi.sli
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@
SeeAlso: Connect
*/

% NOTE: Test should be re-designed because the current design can lead to different numbers of GetConnection calls
% on different Ranks if N is not divisible by all process numbers.


(unittest) run
/unittest using

Expand All @@ -45,7 +49,7 @@ skip_if_not_threaded
[2 3] % check for 2, 3 processes
{

/N 10 def % number of neurons in each population
/N 12 def % number of neurons in each population

ResetKernel

Expand All @@ -72,6 +76,7 @@ ResetKernel

sources targets conn_dict syn_dict Connect % connects source to target


/pass true def % error control flag
/n_loc 0 def % used to check n. of neurons belonging to this MPI process
targets % loop through all target neurons
Expand Down
6 changes: 5 additions & 1 deletion testsuite/mpitests/test_connect_array_fixed_indegree_mpi.sli
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@
SeeAlso: Connect
*/


% NOTE: Test should be re-designed because the current design can lead to different numbers of GetConnection calls
% on different Ranks if N is not divisible by all process numbers.

(unittest) run
/unittest using

Expand All @@ -45,7 +49,7 @@ skip_if_not_threaded
[2 3] % check for 2, 3 processes
{

/N 20 def % number of neurons in each population
/N 24 def % number of neurons in each population
/K 5 def % number of connections per neuron

ResetKernel
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@
SeeAlso: Connect
*/

% NOTE: Test should be re-designed because the current design can lead to different numbers of GetConnection calls
% on different Ranks if N is not divisible by all process numbers.


(unittest) run
/unittest using

Expand All @@ -45,7 +49,7 @@ skip_if_not_threaded
[1 2 3] % check for 1, 2, 3 processes
{

/N 20 def % number of neurons in each population
/N 24 def % number of neurons in each population
/K 5 def % number of connections per neuron

ResetKernel
Expand Down
20 changes: 8 additions & 12 deletions testsuite/pytests/sli2py_regressions/test_issue_2629.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,10 @@ def test_dump_layer_connections_target_1(tmp_path, network):

fname_1 = tmp_path / "conns_1.txt"
nest.DumpLayerConnections(src_layer, tgt_layer_1, "static_synapse", fname_1)
expected_dump_1 = [
"1 3 1 1 0 0",
"1 4 1 1 0.5 0",
"2 3 1 1 -0.5 0",
"2 4 1 1 0 0",
]
actual_dump_1 = fname_1.read_text().splitlines()

# Ordering of lines is irrelevant, therefore compare sorted lists
expected_dump_1 = sorted(["1 3 1 1 0 0", "1 4 1 1 0.5 0", "2 3 1 1 -0.5 0", "2 4 1 1 0 0"])
actual_dump_1 = sorted(fname_1.read_text().splitlines())
assert actual_dump_1 == expected_dump_1


Expand All @@ -82,9 +79,8 @@ def test_dump_layer_connections_target_2(tmp_path, network):

fname_2 = tmp_path / "conns_2.txt"
nest.DumpLayerConnections(src_layer, tgt_layer_2, "static_synapse", fname_2)
expected_dump_2 = [
"1 5 1 1 0 0",
"2 6 1 1 0 0",
]
actual_dump_2 = fname_2.read_text().splitlines()

# Ordering of lines is irrelevant, therefore compare sorted lists
expected_dump_2 = sorted(["1 5 1 1 0 0", "2 6 1 1 0 0"])
actual_dump_2 = sorted(fname_2.read_text().splitlines())
assert actual_dump_2 == expected_dump_2
9 changes: 7 additions & 2 deletions testsuite/pytests/test_connect_after_simulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@

import nest
import pytest
from testutil import synapse_counts


@pytest.fixture(autouse=True)
Expand All @@ -48,11 +49,15 @@ def test_connections_before_after_simulate(use_compressed_spikes):
neurons = nest.Create("iaf_psc_alpha", 10)
nest.Connect(neurons, neurons, conn_spec={"rule": "fixed_indegree", "indegree": 2})

connections_before = nest.GetConnections()
# SynapseCollections are considered equal if they contain the same (source, target, target_thread, syn_id)
# tuples, irrespective of order. We do not consider port number, because it can change due to compression.
# Therefore, we need to compare explicitly. Since Simulate may invalidate SynapseCollections, we need to
# extract data right before calling simulate.
connections_before = synapse_counts(nest.GetConnections())

nest.Simulate(1.0)

connections_after = nest.GetConnections()
connections_after = synapse_counts(nest.GetConnections())

assert connections_before == connections_after

Expand Down
38 changes: 22 additions & 16 deletions testsuite/pytests/test_get_connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import pandas as pd
import pandas.testing as pdtest
import pytest
from testutil import expected_synapse_counts, synapse_counts


@pytest.fixture(autouse=True)
Expand Down Expand Up @@ -60,15 +61,10 @@ def test_get_connections_with_node_collection_step():
nodes = nest.Create("iaf_psc_alpha", 3)
nest.Connect(nodes, nodes)

conns = nest.GetConnections(nodes[::2])
actual_sources = conns.get("source")
actual_targets = conns.get("target")

expected_sources = [1, 1, 1, 3, 3, 3]
expected_targets = [1, 2, 3, 1, 2, 3]
actual = synapse_counts(nest.GetConnections(nodes[::2]), criteria=["source", "target"])
expected = expected_synapse_counts(sources=[1, 1, 1, 3, 3, 3], targets=[1, 2, 3, 1, 2, 3])

assert actual_sources == expected_sources
assert actual_targets == expected_targets
assert actual == expected


def test_get_connections_with_sliced_node_collection():
Expand All @@ -77,11 +73,10 @@ def test_get_connections_with_sliced_node_collection():
nodes = nest.Create("iaf_psc_alpha", 11)
nest.Connect(nodes, nodes)

conns = nest.GetConnections(nodes[1:9:3])
actual_sources = conns.get("source")
actual = synapse_counts(nest.GetConnections(nodes[1:9:3]), criteria=["source"])
expected = expected_synapse_counts(sources=[2] * 11 + [5] * 11 + [8] * 11)

expected_sources = [2] * 11 + [5] * 11 + [8] * 11
assert actual_sources == expected_sources
assert actual == expected


def test_get_connections_with_sliced_node_collection_2():
Expand All @@ -91,11 +86,10 @@ def test_get_connections_with_sliced_node_collection_2():
nest.Connect(nodes, nodes)

# ([ 2 3 4 ] + [ 8 9 10 11 ])[::3] -> [2 8 11]
conns = nest.GetConnections((nodes[1:4] + nodes[7:])[::3])
actual_sources = conns.get("source")
actual = synapse_counts(nest.GetConnections((nodes[1:4] + nodes[7:])[::3]), criteria=["source"])
expected = expected_synapse_counts(sources=[2] * 11 + [8] * 11 + [11] * 11)

expected_sources = [2] * 11 + [8] * 11 + [11] * 11
assert actual_sources == expected_sources
assert actual == expected


def test_get_connections_bad_source_raises():
Expand All @@ -111,11 +105,17 @@ def test_get_connections_bad_source_raises():
def test_get_connections_correct_table_with_node_collection_step():
"""
Test that ``GetConnections`` table from ``NodeCollection`` sliced in step match expectations.

This test is for internal testing also of the synapse sorting process. It therefore requires
that the connection infrastructure has been updated before connections are read out.
"""

nodes = nest.Create("iaf_psc_alpha", 3)
nest.Connect(nodes, nodes)

# Force synapse sorting
nest.Simulate(0.1)

conns = nest.GetConnections(nodes[::2])
actual_row = [
conns.get("source")[3],
Expand All @@ -134,11 +134,17 @@ def test_get_connections_correct_table_with_node_collection_step():
def test_get_connections_correct_table_with_node_collection_index():
"""
Test that ``GetConnections`` table from ``NodeCollection`` index match expectations.

This test is for internal testing also of the synapse sorting process. It therefore requires
that the connection infrastructure has been updated before connections are read out.
"""

nodes = nest.Create("iaf_psc_alpha", 3)
nest.Connect(nodes, nodes)

# Force synapse sorting
nest.Simulate(0.1)

actual_conns = pd.DataFrame(
nest.GetConnections(nodes[0]).get(["source", "target", "target_thread", "synapse_id", "port"])
)
Expand Down
55 changes: 55 additions & 0 deletions testsuite/pytests/test_issue_3489.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# -*- coding: utf-8 -*-
#
# test_issue_3489.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.

import nest
import pytest


@pytest.mark.parametrize("compressed", [True, False])
@pytest.mark.parametrize("simtime", [10.9, 11, 11.1])
def test_spike_delivered_before_connection_removed(compressed, simtime):
"""
Test that spikes are properly transmitted before the connection infrastructure
is updated.

Simtime 10.9 means that Simulate() terminates before the test spike is delivered
and as we then step in 0.1 ms steps, we ensure that connection updates happen only
after spikes have been delivered at slice boundaries.
"""

nest.ResetKernel()
nest.use_compressed_spikes = compressed

nodes_e = nest.Create("parrot_neuron", 3)
sg = nest.Create("spike_generator", params={"spike_times": [9.7]})

nest.Connect(sg, nodes_e[1]) # only neuron with GID 2 gets input
nest.Connect(nodes_e[:2], nodes_e[2]) # neurons with GIDs 1 and 2 connect to 3

# Neuron 2 spikes at 10.7, spike is stored, but not yet delivered
# If we simulated here until 11.1, the spike would be delivered here and all is fine
nest.Simulate(simtime)

nest.Disconnect(nodes_e[0], nodes_e[2])

while nest.biological_time < 11.1:
print("TIME:", nest.biological_time)
nest.Simulate(0.1) # This call will trigger delivery of the spike, but now the connection is gone -> segfault
Loading
Loading