From 6b7b32b00d619616c6840200592d5b8d8556e44c Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 5 Dec 2023 01:36:09 -0800 Subject: [PATCH 1/6] ensure compatibility with NESTML custom as well as NEST built-in synaptic plasticity models --- .../codegeneration/nest_code_generator.py | 2 +- .../point_neuron/common/NeuronClass.jinja2 | 78 ++++++++++--------- .../point_neuron/common/NeuronHeader.jinja2 | 13 +--- .../common/SynapseHeader.h.jinja2 | 2 +- 4 files changed, 47 insertions(+), 48 deletions(-) diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index b80eb2ca1..efa7e60ca 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -91,7 +91,7 @@ class NESTCodeGenerator(CodeGenerator): Options: - - **neuron_parent_class**: The C++ class from which the generated NESTML neuron class inherits. Examples: ``"ArchivingNode"``, ``"StructuralPlasticityNode"``. Default: ``"ArchivingNode"``. + - **neuron_parent_class**: The C++ class from which the generated NESTML neuron class inherits. Examples: ``"ArchivingNode"``, ``"StructuralPlasticityNode"``. To generate a model that has the smallest memory footprint, use ``"StructuralPlasticityNode"``. To ensure compatibility with the NEST built-in plastic synapses (like the ``stdp_synapse``), choose ``"ArchivingNode"``. Default: ``"ArchivingNode"``. - **neuron_parent_class_include**: The C++ header filename to include that contains **neuron_parent_class**. Default: ``"archiving_node.h"``. - **neuron_synapse_pairs**: List of pairs of (neuron, synapse) model names. - **preserve_expressions**: Set to True, or a list of strings corresponding to individual variable names, to disable internal rewriting of expressions, and return same output as input expression where possible. Only applies to variables specified as first-order differential equations. (This parameter is passed to ODE-toolbox.) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index b8d2f8ee7..f5b5b715b 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -265,7 +265,7 @@ std::vector< std::tuple< int, int > > {{neuronName}}::rport_to_nestml_buffer_idx {%- if paired_synapse is defined %} n_incoming_ = __n.n_incoming_; max_delay_ = __n.max_delay_; - last_spike_ = __n.last_spike_; + last_spike_nestml_ = __n.last_spike_nestml_; // cache initial values {%- for var_name in transferred_variables %} @@ -373,7 +373,7 @@ void {{neuronName}}::init_state_internal_() // state variables for archiving state for paired synapse n_incoming_ = 0; max_delay_ = 0; - last_spike_ = -1.; + last_spike_nestml_ = -1.; // cache initial values {%- for var_name in transferred_variables %} @@ -873,20 +873,24 @@ void {{neuronName}}::handle(nest::CurrentEvent& e) inline double {{neuronName}}::get_spiketime_ms() const { - return last_spike_; + return last_spike_nestml_; } void {{neuronName}}::register_stdp_connection( double t_first_read, double delay ) { +{%- if neuron_parent_class in ["ArchivingNode", "Archiving_Node"] %} + ArchivingNode::register_stdp_connection(t_first_read, delay); +{%- endif %} + // Mark all entries in the deque, which we will not read in future as read by // this input input, so that we safely increment the incoming number of // connections afterwards without leaving spikes in the history. // For details see bug #218. MH 08-04-22 - for ( std::deque< histentry__{{neuronName}} >::iterator runner = history_.begin(); - runner != history_.end() and ( t_first_read - runner->t_ > -1.0 * nest::kernel().connection_manager.get_stdp_eps() ); + for ( std::deque< histentry__{{neuronName}} >::iterator runner = history_nestml_.begin(); + runner != history_nestml_.end() and ( t_first_read - runner->t_ > -1.0 * nest::kernel().connection_manager.get_stdp_eps() ); ++runner ) { ( runner->access_counter_ )++; @@ -899,26 +903,26 @@ void void -{{neuronName}}::get_history__( double t1, +{{neuronName}}::get_history_nestml_( double t1, double t2, std::deque< histentry__{{neuronName}} >::iterator* start, std::deque< histentry__{{neuronName}} >::iterator* finish ) { - *finish = history_.end(); - if ( history_.empty() ) + *finish = history_nestml_.end(); + if ( history_nestml_.empty() ) { *start = *finish; return; } - std::deque< histentry__{{neuronName}} >::reverse_iterator runner = history_.rbegin(); + std::deque< histentry__{{neuronName}} >::reverse_iterator runner = history_nestml_.rbegin(); const double t2_lim = t2 + nest::kernel().connection_manager.get_stdp_eps(); const double t1_lim = t1 + nest::kernel().connection_manager.get_stdp_eps(); - while ( runner != history_.rend() and runner->t_ >= t2_lim ) + while ( runner != history_nestml_.rend() and runner->t_ >= t2_lim ) { ++runner; } *finish = runner.base(); - while ( runner != history_.rend() and runner->t_ >= t1_lim ) + while ( runner != history_nestml_.rend() and runner->t_ >= t1_lim ) { runner->access_counter_++; ++runner; @@ -946,13 +950,13 @@ void // STDP synapses, and // - there is another, later spike, that is strictly more than // (min_global_delay + max_delay_ + eps) away from the new spike (at t_sp_ms) - while ( history_.size() > 1 ) + while ( history_nestml_.size() > 1 ) { - const double next_t_sp = history_[ 1 ].t_; - if ( history_.front().access_counter_ >= n_incoming_ * num_transferred_variables + const double next_t_sp = history_nestml_[ 1 ].t_; + if ( history_nestml_.front().access_counter_ >= n_incoming_ * num_transferred_variables and t_sp_ms - next_t_sp > max_delay_ + nest::Time::delay_steps_to_ms(nest::kernel().connection_manager.get_min_delay()) + nest::kernel().connection_manager.get_stdp_eps() ) { - history_.pop_front(); + history_nestml_.pop_front(); } else { @@ -960,14 +964,14 @@ void } } - if (history_.size() > 0) { - assert(history_.back().t_ == last_spike_); + if (history_nestml_.size() > 0) { + assert(history_nestml_.back().t_ == last_spike_nestml_); {%- for var in purely_numeric_state_variables_moved|sort %} - {{ printer.print(utils.get_state_variable_by_name(astnode, var)) }} = history_.back().{{var}}_; + {{ printer.print(utils.get_state_variable_by_name(astnode, var)) }} = history_nestml_.back().{{var}}_; {%- endfor %} {%- for var in analytic_state_variables_moved|sort %} - {{ printer.print(utils.get_state_variable_by_name(astnode, var)) }} = history_.back().{{var}}_; + {{ printer.print(utils.get_state_variable_by_name(astnode, var)) }} = history_nestml_.back().{{var}}_; {%- endfor %} } else { @@ -981,11 +985,11 @@ void /** - * update state variables transferred from synapse from `last_spike_` to `t_sp_ms` + * update state variables transferred from synapse from `last_spike_nestml_` to `t_sp_ms` **/ const double old___h = V_.__h; - V_.__h = t_sp_ms - last_spike_; + V_.__h = t_sp_ms - last_spike_nestml_; if (V_.__h > 1E-12) { recompute_internal_variables(true); {# @@ -1041,8 +1045,8 @@ S_.ode_state[State_::{{variable_name}}] = ode_state_bak[State_::{{variable_name} {{ printer.print(utils.get_variable_by_name(astnode, spike_update.get_variable().get_complete_name())) }} += 1.; {%- endfor %} - last_spike_ = t_sp_ms; - history_.push_back( histentry__{{neuronName}}( last_spike_ + last_spike_nestml_ = t_sp_ms; + history_nestml_.push_back( histentry__{{neuronName}}( last_spike_nestml_ {%- for var in purely_numeric_state_variables_moved|sort %} , get_{{var}}() {%- endfor %} @@ -1054,7 +1058,7 @@ S_.ode_state[State_::{{variable_name}}] = ode_state_bak[State_::{{variable_name} } else { - last_spike_ = t_sp_ms; + last_spike_nestml_ = t_sp_ms; } } @@ -1062,8 +1066,8 @@ S_.ode_state[State_::{{variable_name}}] = ode_state_bak[State_::{{variable_name} void {{neuronName}}::clear_history() { - last_spike_ = -1.0; - history_.clear(); + last_spike_nestml_ = -1.0; + history_nestml_.clear(); } @@ -1086,7 +1090,7 @@ double #endif // case when the neuron has not yet spiked - if ( history_.empty() ) + if ( history_nestml_.empty() ) { #ifdef DEBUG std::cout << "{{neuronName}}::get_{{var}}: \thistory empty, returning initial value = " << {{var}}__iv << std::endl; @@ -1096,34 +1100,34 @@ double } // search for the latest post spike in the history buffer that came strictly before `t` - int i = history_.size() - 1; + int i = history_nestml_.size() - 1; double eps = 0.; if ( before_increment ) { eps = nest::kernel().connection_manager.get_stdp_eps(); } while ( i >= 0 ) { - if ( t - history_[ i ].t_ >= eps ) + if ( t - history_nestml_[ i ].t_ >= eps ) { #ifdef DEBUG - std::cout<<"{{neuronName}}::get_{{var}}: \tspike occurred at history[i].t_ = " << history_[i].t_ << std::endl; + std::cout<<"{{neuronName}}::get_{{var}}: \tspike occurred at history[i].t_ = " << history_nestml_[i].t_ << std::endl; #endif {%- for var_ in purely_numeric_state_variables_moved %} - {{ printer.print(utils.get_variable_by_name(astnode, var_)) }} = history_[ i ].{{var_}}_; + {{ printer.print(utils.get_variable_by_name(astnode, var_)) }} = history_nestml_[ i ].{{var_}}_; {%- endfor %} {%- for var_ in analytic_state_variables_moved %} - {{ printer.print(utils.get_variable_by_name(astnode, var_)) }} = history_[ i ].{{var_}}_; + {{ printer.print(utils.get_variable_by_name(astnode, var_)) }} = history_nestml_[ i ].{{var_}}_; {%- endfor %} /** * update state variables transferred from synapse from `history[i].t_` to `t` **/ - if ( t - history_[ i ].t_ >= nest::kernel().connection_manager.get_stdp_eps() ) + if ( t - history_nestml_[ i ].t_ >= nest::kernel().connection_manager.get_stdp_eps() ) { const double old___h = V_.__h; - V_.__h = t - history_[i].t_; + V_.__h = t - history_nestml_[i].t_; assert(V_.__h > 0); recompute_internal_variables(true); {# @@ -1171,13 +1175,13 @@ S_.ode_state[State_::{{variable_name}}] = ode_state_tmp[State_::{{variable_name} } // this case occurs when the trace was requested at a time precisely at that of the first spike in the history - if ( (!before_increment) and t == history_[ 0 ].t_) + if ( (!before_increment) and t == history_nestml_[ 0 ].t_) { {%- for var_ in purely_numeric_state_variables_moved %} - {{ printer.print(utils.get_state_variable_by_name(astnode, var_)) }} = history_[ 0 ].{{var_}}_; + {{ printer.print(utils.get_state_variable_by_name(astnode, var_)) }} = history_nestml_[ 0 ].{{var_}}_; {%- endfor %} {%- for var_ in analytic_state_variables_moved %} - {{ printer.print(utils.get_state_variable_by_name(astnode, var_)) }} = history_[ 0 ].{{var_}}_; + {{ printer.print(utils.get_state_variable_by_name(astnode, var_)) }} = history_nestml_[ 0 ].{{var_}}_; {%- endfor %} #ifdef DEBUG diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index d99a6f406..7a1dd6af2 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -299,14 +299,9 @@ public: // support for spike archiving /** - * \fn void get_history(long t1, long t2, - * std::deque::iterator* start, - * std::deque::iterator* finish) - * return the spike times (in steps) of spikes which occurred in the range - * (t1,t2]. - * XXX: two underscores to differentiate it from nest::Node::get_history() + * Return the spike times (in steps) of spikes which occurred in the range (t1,t2]. */ - void get_history__( double t1, + void get_history_nestml_( double t1, double t2, std::deque< histentry__{{neuronName}} >::iterator* start, std::deque< histentry__{{neuronName}} >::iterator* finish ); @@ -413,10 +408,10 @@ private: double max_delay_; - double last_spike_; + double last_spike_nestml_; // spiking history needed by stdp synapses - std::deque< histentry__{{neuronName}} > history_; + std::deque< histentry__{{neuronName}} > history_nestml_; // cache for initial values {%- for var in transferred_variables %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index 47cf7e6c9..653e9fbd5 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -713,7 +713,7 @@ public: // history[0, ..., t_last_spike - dendritic_delay] have been // incremented by Archiving_Node::register_stdp_connection(). See bug #218 for // details. - __target->get_history__( t_lastspike_ - __dendritic_delay, + __target->get_history_nestml_( t_lastspike_ - __dendritic_delay, __t_spike - __dendritic_delay, &start, &finish ); From 974cbbee7f58f925ad05033a053c69db08aff5e7 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 5 Dec 2023 03:24:02 -0800 Subject: [PATCH 2/6] ensure compatibility with NESTML custom as well as NEST built-in synaptic plasticity models --- ...psc_exp_nonlineardendrite_alternate.nestml | 101 ++++++++++++ .../stdsp_synapse_no_permanence.nestml | 58 +++++++ ...est_built_in_and_nestml_plastic_synapse.py | 144 ++++++++++++++++++ 3 files changed, 303 insertions(+) create mode 100644 tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_alternate.nestml create mode 100644 tests/nest_tests/resources/stdsp_synapse_no_permanence.nestml create mode 100644 tests/nest_tests/test_built_in_and_nestml_plastic_synapse.py diff --git a/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_alternate.nestml b/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_alternate.nestml new file mode 100644 index 000000000..12a9b6262 --- /dev/null +++ b/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_alternate.nestml @@ -0,0 +1,101 @@ +neuron iaf_psc_exp_nonlineardendrite: + + state: + V_m mV = 0mV # membrane potential in mV + z pA = 0pA # dAP trace + active_dendrite boolean = false + dAP_counts integer = 0 + ref_counts integer = 0 + + equations: + + # exponential shaped postsynaptic current kernel + kernel I_kernel1 = exp(-1/tau_syn1*t) + + # alpha shaped postsynaptic current kernel + kernel I_kernel2 = (e/tau_syn2) * t * exp(-t/tau_syn2) + + # exponential shaped postsynaptic current kernel + kernel I_kernel3 = exp(-1/tau_syn3*t) + + # exponential shaped postsynaptic current kernel + kernel I_kernel4 = exp(-1/tau_syn4*t) + + # diff. eq. for membrane potential + recordable inline I_dend pA = convolve(I_kernel2, I_2) * pA + inline I_syn pA = convolve(I_kernel1, I_1) * pA + I_dend - convolve(I_kernel3, I_3) * pA + convolve(I_kernel4, I_4) * pA + I_e + V_m' = -(V_m-E_L)/tau_m + I_syn/C_m + + # diff. eq. for dAP trace + z' = -z/tau_h + + parameters: + C_m pF = 250 pF # capacity of the membrane + tau_m ms = 20 ms # membrane time constant. + tau_syn1 ms = 10 ms # time constant of synaptic current, port 1 + tau_syn2 ms = 10 ms # time constant of synaptic current, port 2 + tau_syn3 ms = 10 ms # time constant of synaptic current, port 3 + tau_syn4 ms = 10 ms # time constant of synaptic current, port 4 + tau_h ms = 400 ms # time constant of the dAP trace + V_th mV = 25 mV # spike threshold + V_reset mV = 0 mV # reset voltage + I_e pA = 0pA # external current. + E_L mV = 0mV # resting potential. + + # dendritic action potential + theta_dAP pA = 60pA # current threshold for a dendritic action potential + I_p pA = 250pA # current clamp value for I_dAP during a dendritic action potential + tau_dAP ms = 60ms # time window over which the dendritic current clamp is active + + # refractory parameters + t_ref ms = 10ms # refractory period + + internals: + dAP_timeout_ticks integer = steps(tau_dAP) + ref_timeout_ticks integer = steps(t_ref) + + input: + I_1 <- spike + I_2 <- spike + I_3 <- spike + I_4 <- spike + + output: + spike + + update: + # solve ODEs + integrate_odes() + + # current-threshold, emit a dendritic action potential + if I_dend > theta_dAP or active_dendrite: + if dAP_counts == 0: + + if active_dendrite == false: + z += 1pA + active_dendrite = true + I_dend = I_p + dAP_counts = dAP_timeout_ticks + else: + I_dend = 0pA + active_dendrite = false + + else: + dAP_counts -= 1 + I_dend = I_p + + # threshold crossing and refractoriness + if ref_counts == 0: + if V_m > V_th: + emit_spike() + ref_counts = ref_timeout_ticks + V_m = V_reset + dAP_counts = 0 + I_dend = 0pA + active_dendrite = false + else: + ref_counts -= 1 + V_m = V_reset + active_dendrite = false + dAP_counts = 0 + I_dend = 0pA diff --git a/tests/nest_tests/resources/stdsp_synapse_no_permanence.nestml b/tests/nest_tests/resources/stdsp_synapse_no_permanence.nestml new file mode 100644 index 000000000..016d54ffd --- /dev/null +++ b/tests/nest_tests/resources/stdsp_synapse_no_permanence.nestml @@ -0,0 +1,58 @@ + +synapse stdsp_synapse_no_permanence: + + state: + w real = 1. @nest::weight + t_last_pre_spike ms = -1ms + + parameters: + d ms = 2.0 ms @nest::delay # !!! cannot have a variable called "delay" + lambda real = .01 + lambda_minus real = .01 + tau_tr_pre ms = 20 ms + tau_tr_post ms = 20 ms + tau_perm ms = 18500 ms + mu_plus real = 1 + Wmax real = 100. + Wmin real = 0. + dt_min ms = - 5. ms + dt_max ms = - 50. ms + + equations: + kernel pre_trace_kernel = exp(-t / tau_tr_pre) + inline pre_trace real = convolve(pre_trace_kernel, pre_spikes) + + # all-to-all trace of postsynaptic neuron + kernel post_trace_kernel = exp(-t / tau_tr_post) + inline post_trace real = convolve(post_trace_kernel, post_spikes) + + w' = (Wmin-w) / tau_perm + + input: + pre_spikes <- spike + post_spikes <- spike + z_post pA <- continuous + + output: + spike + + onReceive(post_spikes): + delta_t ms = t_last_pre_spike - ( t + d ) + # potentiate synapse + w_ real = 0. + if delta_t > dt_max and delta_t < dt_min: + w_ = w + Wmax * lambda * ( w / Wmax )**mu_plus * pre_trace + w = min(Wmax, w_) + elif delta_t > dt_min: + w_ = w + Wmax * lambda * ( w / Wmax )**mu_plus * ( pre_trace - exp( delta_t / tau_tr_pre) ) + w = min(Wmax, w_) + + onReceive(pre_spikes): + t_last_pre_spike = t + # depress synapse + w_ real = w - lambda_minus * Wmax + w = max(Wmin, w_) + + # deliver spike to postsynaptic partner + deliver_spike(w, d) + diff --git a/tests/nest_tests/test_built_in_and_nestml_plastic_synapse.py b/tests/nest_tests/test_built_in_and_nestml_plastic_synapse.py new file mode 100644 index 000000000..7bcc63d05 --- /dev/null +++ b/tests/nest_tests/test_built_in_and_nestml_plastic_synapse.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# +# test_built_in_and_nestml_plastic_synapse.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +import os +import os.path + +import nest +import numpy as np +import pytest +from pynestml.codegeneration.nest_tools import NESTTools + +from pynestml.frontend.pynestml_frontend import generate_nest_target + + +@pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") +class TestBuiltInAndNESTMLPlasticSynapse: + r"""Test that synaptic plasticity works with both a NEST built-in plastic synapse and a NESTML custom plastic synapse attached to the same neuron.""" + + neuron_model = "iaf_psc_exp_nonlineardendrite" + synapse_model = "stdsp_synapse_no_permanence" + + def setup_nest(self): + files = [f"{TestBuiltInAndNESTMLPlasticSynapse.neuron_model}_alternate.nestml", + f"{TestBuiltInAndNESTMLPlasticSynapse.synapse_model}.nestml"] + input_path = [os.path.realpath(os.path.join(os.path.dirname(__file__), "resources", s)) for s in files] + + generate_nest_target( + input_path=input_path, + target_path="module", + logging_level="DEBUG", + module_name=f"nestml_{TestBuiltInAndNESTMLPlasticSynapse.neuron_model}_{TestBuiltInAndNESTMLPlasticSynapse.synapse_model}_module", + suffix="_nestml", + codegen_opts={ + "neuron_synapse_pairs": [ + { + "neuron": TestBuiltInAndNESTMLPlasticSynapse.neuron_model, + "synapse": TestBuiltInAndNESTMLPlasticSynapse.synapse_model, + "post_ports": ["post_spikes", ["z_post", "z"]], + } + ], + }, + ) + + # install custom neuron models + nest.Install(f"nestml_{TestBuiltInAndNESTMLPlasticSynapse.neuron_model}_{TestBuiltInAndNESTMLPlasticSynapse.synapse_model}_module") + + def _test_plasticity(self, neuron_model, synapse_model): + + print("testing plasticity for synapse mode " + str(synapse_model)) + + # parameters + Jns = 3000.0 + t_stop = 500.0 # [ms] + initial_weight = 123. + + nest.ResetKernel() + + # create pre and post neurons + pre_neuron = nest.Create(neuron_model) + post_neuron = nest.Create(neuron_model) + + syn_spec = { + "synapse_model": synapse_model, + "receptor_type": 1, # external input + "lambda": 1E-3, + "weight": initial_weight, + } + + if synapse_model != "stdp_synapse": + syn_spec["lambda_minus"] = 1E-4 + + # connect pre and post + nest.Connect( + pre_neuron, + post_neuron, + syn_spec=syn_spec, + ) + + # create and connect stimulus source + pre_stimulus = nest.Create( + "spike_generator", {"spike_times": [float(5 * i) for i in range(1, 200)]} + ) + post_stimulus = nest.Create( + "spike_generator", {"spike_times": [float(10 + 5 * i) for i in range(1, 200)]} + ) + sr_pre = nest.Create("spike_recorder") + nest.Connect(pre_neuron, sr_pre) + sr_post = nest.Create("spike_recorder") + nest.Connect(post_neuron, sr_post) + + nest.Connect(pre_stimulus, pre_neuron, syn_spec={"weight": Jns, "receptor_type": 1}) + nest.Connect( + post_stimulus, post_neuron, syn_spec={"weight": Jns, "receptor_type": 1} + ) + + connection_before = nest.GetConnections(synapse_model=synapse_model) + weight_before = connection_before.get("weight") + np.testing.assert_allclose(initial_weight, weight_before) + + print("\nconnections before learning:") + print(connection_before) + + # simulate + nest.Simulate(t_stop) + + connection_after = nest.GetConnections(synapse_model=synapse_model) + weight_after = connection_after.get("weight") + + print("\nconnections after learning:") + print(connection_after) + + assert np.abs(weight_before - weight_after) > 1., "Weight did not change during STDP induction protocol!" + + def test_plasticity(self): + self.setup_nest() + + self._test_plasticity( + neuron_model=f"{self.neuron_model}_nestml__with_{self.synapse_model}_nestml", + synapse_model=f"{self.synapse_model}_nestml__with_{self.neuron_model}_nestml", + ) + + self._test_plasticity( + neuron_model=f"{self.neuron_model}_nestml__with_{self.synapse_model}_nestml", + synapse_model="stdp_synapse", + ) From b04ecf1b0b63b7bb8fb1628db942cb48523aaec3 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 5 Dec 2023 04:24:33 -0800 Subject: [PATCH 3/6] ensure compatibility with NESTML custom as well as NEST built-in synaptic plasticity models --- .../resources_nest/point_neuron/common/SynapseHeader.h.jinja2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 index 653e9fbd5..bf1f89100 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/SynapseHeader.h.jinja2 @@ -1262,7 +1262,7 @@ inline void // get spike history in relevant range (t_last_update, t_trig] from postsyn. neuron std::deque< histentry__{{paired_neuron}} >::iterator start; std::deque< histentry__{{paired_neuron}} >::iterator finish; - static_cast<{{paired_neuron}}*>(get_target(t))->get_history__( t_last_update_ - dendritic_delay, t_trig - dendritic_delay, &start, &finish ); + static_cast<{{paired_neuron}}*>(get_target(t))->get_history_nestml_( t_last_update_ - dendritic_delay, t_trig - dendritic_delay, &start, &finish ); // facilitation due to postsyn. spikes since last update double t0 = t_last_update_; From 917bdc335f5571de3f5ae5a1dd01b6793384b197 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 5 Dec 2023 05:28:27 -0800 Subject: [PATCH 4/6] ensure compatibility with NESTML custom as well as NEST built-in synaptic plasticity models --- .../resources_nest/point_neuron/common/NeuronClass.jinja2 | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index f5b5b715b..6eff876c6 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -1066,6 +1066,10 @@ S_.ode_state[State_::{{variable_name}}] = ode_state_bak[State_::{{variable_name} void {{neuronName}}::clear_history() { +{%- if neuron_parent_class in ["ArchivingNode", "Archiving_Node"] %} + ArchivingNode::clear_history(); +{%- endif %} + last_spike_nestml_ = -1.0; history_nestml_.clear(); } From f91df46b681b8ffafc82e6dc0205aefbcae80733 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 7 Dec 2023 03:43:32 -0800 Subject: [PATCH 5/6] ensure compatibility with NESTML custom as well as NEST built-in synaptic plasticity models --- .../resources_nest/point_neuron/common/NeuronClass.jinja2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 6eff876c6..323325b6c 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -881,7 +881,7 @@ void {{neuronName}}::register_stdp_connection( double t_first_read, double delay ) { {%- if neuron_parent_class in ["ArchivingNode", "Archiving_Node"] %} - ArchivingNode::register_stdp_connection(t_first_read, delay); + {{ neuron_parent_class }}::register_stdp_connection(t_first_read, delay); {%- endif %} // Mark all entries in the deque, which we will not read in future as read by @@ -1067,7 +1067,7 @@ void {{neuronName}}::clear_history() { {%- if neuron_parent_class in ["ArchivingNode", "Archiving_Node"] %} - ArchivingNode::clear_history(); + {{ neuron_parent_class }}::clear_history(); {%- endif %} last_spike_nestml_ = -1.0; From fcf3e68f7702ac0fa46778eb6e1e36a6f96e7715 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 20 Jun 2024 17:38:24 +0200 Subject: [PATCH 6/6] ensure compatibility with NESTML custom as well as NEST built-in synaptic plasticity models --- .../codegeneration/nest_code_generator.py | 1 + ...f_psc_exp_nonlineardendrite_neuron.nestml} | 28 +++++++++++++-- ...tml => stdsp_no_permanence_synapse.nestml} | 36 ++++++++++++++++--- ...est_built_in_and_nestml_plastic_synapse.py | 15 ++++---- 4 files changed, 65 insertions(+), 15 deletions(-) rename tests/nest_tests/resources/{iaf_psc_exp_nonlineardendrite_alternate.nestml => iaf_psc_exp_nonlineardendrite_neuron.nestml} (78%) rename tests/nest_tests/resources/{stdsp_synapse_no_permanence.nestml => stdsp_no_permanence_synapse.nestml} (58%) diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index a4514efde..64ad13f17 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -169,6 +169,7 @@ def __init__(self, options: Optional[Mapping[str, Any]] = None): def run_nest_target_specific_cocos(self, neurons: Sequence[ASTModel], synapses: Sequence[ASTModel]): for synapse in synapses: synapse_name_stripped = removesuffix(removesuffix(synapse.name.split("_with_")[0], "_"), FrontendConfiguration.suffix) + assert synapse_name_stripped in self.get_option("delay_variable").keys(), "Please specify a ``delay_variable`` for the synapse '" + synapse_name_stripped + "'" delay_variable = self.get_option("delay_variable")[synapse_name_stripped] CoCoNESTSynapseDelayNotAssignedTo.check_co_co(delay_variable, synapse) if Logger.has_errors(synapse): diff --git a/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_alternate.nestml b/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_neuron.nestml similarity index 78% rename from tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_alternate.nestml rename to tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_neuron.nestml index 12a9b6262..38e22066a 100644 --- a/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_alternate.nestml +++ b/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite_neuron.nestml @@ -1,4 +1,29 @@ -neuron iaf_psc_exp_nonlineardendrite: +""" +iaf_psc_exp_nonlineardendrite_neuron.nestml +########################################### + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model iaf_psc_exp_nonlineardendrite_neuron: state: V_m mV = 0mV # membrane potential in mV @@ -8,7 +33,6 @@ neuron iaf_psc_exp_nonlineardendrite: ref_counts integer = 0 equations: - # exponential shaped postsynaptic current kernel kernel I_kernel1 = exp(-1/tau_syn1*t) diff --git a/tests/nest_tests/resources/stdsp_synapse_no_permanence.nestml b/tests/nest_tests/resources/stdsp_no_permanence_synapse.nestml similarity index 58% rename from tests/nest_tests/resources/stdsp_synapse_no_permanence.nestml rename to tests/nest_tests/resources/stdsp_no_permanence_synapse.nestml index 016d54ffd..b099d4dfe 100644 --- a/tests/nest_tests/resources/stdsp_synapse_no_permanence.nestml +++ b/tests/nest_tests/resources/stdsp_no_permanence_synapse.nestml @@ -1,12 +1,36 @@ +""" +stdsp_no_permanence_synapse.nestml +################################## -synapse stdsp_synapse_no_permanence: + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model stdsp_no_permanence_synapse: state: - w real = 1. @nest::weight - t_last_pre_spike ms = -1ms + w real = 1 + t_last_pre_spike ms = -1 ms parameters: - d ms = 2.0 ms @nest::delay # !!! cannot have a variable called "delay" + d ms = 2.0 ms lambda real = .01 lambda_minus real = .01 tau_tr_pre ms = 20 ms @@ -54,5 +78,7 @@ synapse stdsp_synapse_no_permanence: w = max(Wmin, w_) # deliver spike to postsynaptic partner - deliver_spike(w, d) + emit_spike(w, d) + update: + integrate_odes() diff --git a/tests/nest_tests/test_built_in_and_nestml_plastic_synapse.py b/tests/nest_tests/test_built_in_and_nestml_plastic_synapse.py index 7bcc63d05..6b99fa08f 100644 --- a/tests/nest_tests/test_built_in_and_nestml_plastic_synapse.py +++ b/tests/nest_tests/test_built_in_and_nestml_plastic_synapse.py @@ -35,17 +35,16 @@ class TestBuiltInAndNESTMLPlasticSynapse: r"""Test that synaptic plasticity works with both a NEST built-in plastic synapse and a NESTML custom plastic synapse attached to the same neuron.""" - neuron_model = "iaf_psc_exp_nonlineardendrite" - synapse_model = "stdsp_synapse_no_permanence" + neuron_model = "iaf_psc_exp_nonlineardendrite_neuron" + synapse_model = "stdsp_no_permanence_synapse" def setup_nest(self): - files = [f"{TestBuiltInAndNESTMLPlasticSynapse.neuron_model}_alternate.nestml", + files = [f"{TestBuiltInAndNESTMLPlasticSynapse.neuron_model}.nestml", f"{TestBuiltInAndNESTMLPlasticSynapse.synapse_model}.nestml"] input_path = [os.path.realpath(os.path.join(os.path.dirname(__file__), "resources", s)) for s in files] generate_nest_target( input_path=input_path, - target_path="module", logging_level="DEBUG", module_name=f"nestml_{TestBuiltInAndNESTMLPlasticSynapse.neuron_model}_{TestBuiltInAndNESTMLPlasticSynapse.synapse_model}_module", suffix="_nestml", @@ -57,12 +56,11 @@ def setup_nest(self): "post_ports": ["post_spikes", ["z_post", "z"]], } ], - }, + "delay_variable": {"stdsp_no_permanence_synapse": "d"}, + "weight_variable": {"stdsp_no_permanence_synapse": "w"} + } ) - # install custom neuron models - nest.Install(f"nestml_{TestBuiltInAndNESTMLPlasticSynapse.neuron_model}_{TestBuiltInAndNESTMLPlasticSynapse.synapse_model}_module") - def _test_plasticity(self, neuron_model, synapse_model): print("testing plasticity for synapse mode " + str(synapse_model)) @@ -73,6 +71,7 @@ def _test_plasticity(self, neuron_model, synapse_model): initial_weight = 123. nest.ResetKernel() + nest.Install(f"nestml_{TestBuiltInAndNESTMLPlasticSynapse.neuron_model}_{TestBuiltInAndNESTMLPlasticSynapse.synapse_model}_module") # create pre and post neurons pre_neuron = nest.Create(neuron_model)