Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
a618214
Merge pull request #3207 from JesusEV/eprop_bio_feature
heplesser Mar 17, 2025
c59b4d8
Add support for delays between the recurrent-to-readout and readout-t…
JesusEV May 27, 2024
d675806
Ensure consistency of delays throughout network
JesusEV Jun 26, 2024
fcea737
Fix format and remove debugging code
JesusEV Jun 26, 2024
b443834
Use duration dictionary
JesusEV Jun 26, 2024
ba5031b
Update print_time flag
JesusEV Jun 26, 2024
5be08dd
Use duration dictionary consistently
JesusEV Jun 26, 2024
95ff877
Update default number of virtual procs
JesusEV Jun 26, 2024
86141ea
Use duration dictionary consistently
JesusEV Jun 26, 2024
fd8d136
Update default save_path
JesusEV Jun 26, 2024
da5b767
Set delays as doubles
JesusEV Jun 27, 2024
c17cf58
Add optimize_each_step flag to overloaded compute_gradient function
JesusEV Jul 2, 2024
c798595
Remove overloaded compute_gradient function
JesusEV Jul 2, 2024
aa13386
Fix rebase remaining conflicts
JesusEV Mar 17, 2025
6b02193
Adjust documentation
JesusEV Mar 28, 2025
d74f893
Remove unnecessary local variable
JesusEV Mar 28, 2025
fd55934
Apply suggestions regarding documentation
JesusEV Mar 28, 2025
6d023be
Refactor code
JesusEV Mar 28, 2025
431cd10
Apply suggestions regarding documentation
JesusEV Mar 28, 2025
c11284d
Refactor code
JesusEV Mar 28, 2025
2bd047f
Apply suggestions regarding documentation
JesusEV Mar 28, 2025
b1c3bc7
Fix error messages
JesusEV Mar 28, 2025
b45cbc6
Fix remaining documentation suggestions
JesusEV Mar 28, 2025
11110ad
Adjust target signal offset
JesusEV Jun 9, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 51 additions & 12 deletions models/eprop_iaf.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ eprop_iaf::Parameters_::Parameters_()
, kappa_( 0.97 )
, kappa_reg_( 0.97 )
, eprop_isi_trace_cutoff_( 1000.0 )
, delay_rec_out_( 1 )
, delay_out_rec_( 1 )
, delay_total_( 1 )
{
}

Expand Down Expand Up @@ -131,6 +134,8 @@ eprop_iaf::Parameters_::get( DictionaryDatum& d ) const
def< double >( d, names::kappa, kappa_ );
def< double >( d, names::kappa_reg, kappa_reg_ );
def< double >( d, names::eprop_isi_trace_cutoff, eprop_isi_trace_cutoff_ );
def< double >( d, names::delay_rec_out, Time( Time::step( delay_rec_out_ ) ).get_ms() );
def< double >( d, names::delay_out_rec, Time( Time::step( delay_out_rec_ ) ).get_ms() );
}

double
Expand Down Expand Up @@ -169,6 +174,14 @@ eprop_iaf::Parameters_::set( const DictionaryDatum& d, Node* node )
updateValueParam< double >( d, names::kappa_reg, kappa_reg_, node );
updateValueParam< double >( d, names::eprop_isi_trace_cutoff, eprop_isi_trace_cutoff_, node );

double delay_rec_out_ms = Time( Time::step( delay_rec_out_ ) ).get_ms();
updateValueParam< double >( d, names::delay_rec_out, delay_rec_out_ms, node );
delay_rec_out_ = Time( Time::ms( delay_rec_out_ms ) ).get_steps();

double delay_out_rec_ms = Time( Time::step( delay_out_rec_ ) ).get_ms();
updateValueParam< double >( d, names::delay_out_rec, delay_out_rec_ms, node );
delay_out_rec_ = Time( Time::ms( delay_out_rec_ms ) ).get_steps();

if ( C_m_ <= 0 )
{
throw BadProperty( "Membrane capacitance C_m > 0 required." );
Expand Down Expand Up @@ -214,6 +227,18 @@ eprop_iaf::Parameters_::set( const DictionaryDatum& d, Node* node )
throw BadProperty( "Cutoff of integration of eprop trace between spikes eprop_isi_trace_cutoff ≥ 0 required." );
}

if ( delay_rec_out_ < 1 )
{
throw BadProperty( "Connection delay from recurrent to readout neuron ≥ 1 required." );
}

if ( delay_out_rec_ < 1 )
{
throw BadProperty( "Connection delay from readout to recurrent neuron ≥ 1 required." );
}

delay_total_ = delay_rec_out_ + ( delay_out_rec_ - 1 );

return delta_EL;
}

Expand Down Expand Up @@ -278,6 +303,14 @@ eprop_iaf::pre_run_hook()

V_.P_v_m_ = std::exp( -dt / P_.tau_m_ );
V_.P_i_in_ = P_.tau_m_ / P_.C_m_ * ( 1.0 - V_.P_v_m_ );

if ( eprop_history_.empty() )
{
for ( long t = -P_.delay_total_; t < 0; ++t )
{
append_new_eprop_history_entry( t );
}
}
}


Expand Down Expand Up @@ -373,7 +406,8 @@ eprop_iaf::handle( DataLoggingRequest& e )
void
eprop_iaf::compute_gradient( const long t_spike,
const long t_spike_previous,
double& z_previous_buffer,
std::queue< double >& z_previous_buffer,
double& z_previous,
double& z_bar,
double& e_bar,
double& e_bar_reg,
Expand All @@ -382,26 +416,31 @@ eprop_iaf::compute_gradient( const long t_spike,
const CommonSynapseProperties& cp,
WeightOptimizer* optimizer )
{
double e = 0.0; // eligibility trace
double z = 0.0; // spiking variable
double z_current_buffer = 1.0; // buffer containing the spike that triggered the current integration
double psi = 0.0; // surrogate gradient
double L = 0.0; // learning signal
double firing_rate_reg = 0.0; // firing rate regularization
double grad = 0.0; // gradient
double e = 0.0; // eligibility trace
double z = 0.0; // spiking variable
double z_current = 1.0; // spike state that triggered the current integration
double psi = 0.0; // surrogate gradient
double L = 0.0; // learning signal
double firing_rate_reg = 0.0; // firing rate regularization
double grad = 0.0; // gradient

const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp );
const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_;

auto eprop_hist_it = get_eprop_history( t_spike_previous - 1 );
auto eprop_hist_it = get_eprop_history( t_spike_previous - P_.delay_total_ );

const long t_compute_until = std::min( t_spike_previous + V_.eprop_isi_trace_cutoff_steps_, t_spike );

for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it )
{
z = z_previous_buffer;
z_previous_buffer = z_current_buffer;
z_current_buffer = 0.0;
if ( P_.delay_total_ > 1 )
{
update_pre_syn_buffer_multiple_entries( z, z_current, z_previous, z_previous_buffer, t_spike, t );
}
else
{
update_pre_syn_buffer_one_entry( z, z_current, z_previous, z_previous_buffer, t_spike, t );
}

psi = eprop_hist_it->surrogate_gradient_;
L = eprop_hist_it->learning_signal_;
Expand Down
38 changes: 38 additions & 0 deletions models/eprop_iaf.h
Original file line number Diff line number Diff line change
Expand Up @@ -390,6 +390,7 @@ class eprop_iaf : public EpropArchivingNodeRecurrent< false >

void compute_gradient( const long,
const long,
std::queue< double >&,
double&,
double&,
double&,
Expand All @@ -402,6 +403,9 @@ class eprop_iaf : public EpropArchivingNodeRecurrent< false >
long get_shift() const override;
bool is_eprop_recurrent_node() const override;
long get_eprop_isi_trace_cutoff() const override;
long get_delay_total() const override;
long get_delay_recurrent_to_readout() const override;
long get_delay_readout_to_recurrent() const override;

//! Map for storing a static set of recordables.
friend class RecordablesMap< eprop_iaf >;
Expand Down Expand Up @@ -458,6 +462,15 @@ class eprop_iaf : public EpropArchivingNodeRecurrent< false >
//! Time interval from the previous spike until the cutoff of e-prop update integration between two spikes (ms).
double eprop_isi_trace_cutoff_;

//! Connection delay from recurrent to readout neuron.
long delay_rec_out_;

//! Connection delay from readout to recurrent neuron.
long delay_out_rec_;

//! Sum of connection delays from recurrent to readout neuron and from readout to recurrent neuron.
long delay_total_;

//! Default constructor.
Parameters_();

Expand Down Expand Up @@ -594,10 +607,35 @@ eprop_iaf::get_eprop_isi_trace_cutoff() const
return V_.eprop_isi_trace_cutoff_steps_;
}

inline long
eprop_iaf::get_delay_total() const
{
return P_.delay_total_;
}

inline long
eprop_iaf::get_delay_recurrent_to_readout() const
{
return P_.delay_rec_out_;
}

inline long
eprop_iaf::get_delay_readout_to_recurrent() const
{
return P_.delay_out_rec_;
}

inline size_t
eprop_iaf::send_test_event( Node& target, size_t receptor_type, synindex, bool )
{
SpikeEvent e;

// To perform a consistency check on the delay parameter d_out_rec between recurrent
// neurons and readout neurons, the recurrent neurons send a test event with a delay
// specified by d_rec_out. Upon receiving the test event from the recurrent neuron,
// the readout neuron checks if the delay with which the event was received matches
// its own specified delay parameter d_rec_out.
e.set_delay_steps( P_.delay_rec_out_ );
e.set_sender( *this );
return target.handles_test_event( e, receptor_type );
}
Expand Down
61 changes: 49 additions & 12 deletions models/eprop_iaf_adapt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,9 @@ eprop_iaf_adapt::Parameters_::Parameters_()
, kappa_( 0.97 )
, kappa_reg_( 0.97 )
, eprop_isi_trace_cutoff_( 1000.0 )
, delay_rec_out_( 1 )
, delay_out_rec_( 1 )
, delay_total_( 1 )
{
}

Expand Down Expand Up @@ -139,6 +142,8 @@ eprop_iaf_adapt::Parameters_::get( DictionaryDatum& d ) const
def< double >( d, names::kappa, kappa_ );
def< double >( d, names::kappa_reg, kappa_reg_ );
def< double >( d, names::eprop_isi_trace_cutoff, eprop_isi_trace_cutoff_ );
def< double >( d, names::delay_rec_out, Time( Time::step( delay_rec_out_ ) ).get_ms() );
def< double >( d, names::delay_out_rec, Time( Time::step( delay_out_rec_ ) ).get_ms() );
}

double
Expand Down Expand Up @@ -179,6 +184,12 @@ eprop_iaf_adapt::Parameters_::set( const DictionaryDatum& d, Node* node )
updateValueParam< double >( d, names::kappa_reg, kappa_reg_, node );
updateValueParam< double >( d, names::eprop_isi_trace_cutoff, eprop_isi_trace_cutoff_, node );

const double delay_rec_out_ = Time::step( delay_rec_out_ );
updateValueParam< double >( d, names::delay_rec_out, Time( delay_rec_out_).get_ms(), node );

const double delay_out_rec_ = Time::step( delay_out_rec_ );
updateValueParam< double >( d, names::delay_out_rec, Time( delay_out_rec_ ).get_ms(), node );

if ( adapt_beta_ < 0 )
{
throw BadProperty( "Threshold adaptation prefactor adapt_beta ≥ 0 required." );
Expand Down Expand Up @@ -234,6 +245,18 @@ eprop_iaf_adapt::Parameters_::set( const DictionaryDatum& d, Node* node )
throw BadProperty( "Cutoff of integration of eprop trace between spikes eprop_isi_trace_cutoff ≥ 0 required." );
}

if ( delay_rec_out_ < 1 )
{
throw BadProperty( "Connection delay from recurrent to readout neuron ≥ 1 required." );
}

if ( delay_out_rec_ < 1 )
{
throw BadProperty( "Connection delay from readout to recurrent neuron ≥ 1 required." );
}

delay_total_ = delay_rec_out_ + ( delay_out_rec_ - 1 );
Comment on lines +257 to +258
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Introduce a variable with a meaningful name for 1 and / or delay_out_rec_ - 1 also in other locations where this appears.


return delta_EL;
}

Expand Down Expand Up @@ -313,6 +336,14 @@ eprop_iaf_adapt::pre_run_hook()
V_.P_v_m_ = std::exp( -dt / P_.tau_m_ );
V_.P_i_in_ = P_.tau_m_ / P_.C_m_ * ( 1.0 - V_.P_v_m_ );
V_.P_adapt_ = std::exp( -dt / P_.adapt_tau_ );

if ( eprop_history_.empty() )
{
for ( long t = -P_.delay_total_; t < 0; ++t )
{
append_new_eprop_history_entry( t );
}
}
}


Expand Down Expand Up @@ -412,7 +443,8 @@ eprop_iaf_adapt::handle( DataLoggingRequest& e )
void
eprop_iaf_adapt::compute_gradient( const long t_spike,
const long t_spike_previous,
double& z_previous_buffer,
std::queue< double >& z_previous_buffer,
double& z_previous,
double& z_bar,
double& e_bar,
double& e_bar_reg,
Expand All @@ -421,26 +453,31 @@ eprop_iaf_adapt::compute_gradient( const long t_spike,
const CommonSynapseProperties& cp,
WeightOptimizer* optimizer )
{
double e = 0.0; // eligibility trace
double z = 0.0; // spiking variable
double z_current_buffer = 1.0; // buffer containing the spike that triggered the current integration
double psi = 0.0; // surrogate gradient
double L = 0.0; // learning signal
double firing_rate_reg = 0.0; // firing rate regularization
double grad = 0.0; // gradient
double e = 0.0; // eligibility trace
double z = 0.0; // spiking variable
double z_current = 1.0; // spike state that triggered the current integration
double psi = 0.0; // surrogate gradient
double L = 0.0; // learning signal
double firing_rate_reg = 0.0; // firing rate regularization
double grad = 0.0; // gradient

const EpropSynapseCommonProperties& ecp = static_cast< const EpropSynapseCommonProperties& >( cp );
const auto optimize_each_step = ( *ecp.optimizer_cp_ ).optimize_each_step_;

auto eprop_hist_it = get_eprop_history( t_spike_previous - 1 );
auto eprop_hist_it = get_eprop_history( t_spike_previous - P_.delay_total_ );

const long t_compute_until = std::min( t_spike_previous + V_.eprop_isi_trace_cutoff_steps_, t_spike );

for ( long t = t_spike_previous; t < t_compute_until; ++t, ++eprop_hist_it )
{
z = z_previous_buffer;
z_previous_buffer = z_current_buffer;
z_current_buffer = 0.0;
if ( P_.delay_total_ > 1 )
{
update_pre_syn_buffer_multiple_entries( z, z_current, z_previous, z_previous_buffer, t_spike, t );
}
else
{
update_pre_syn_buffer_one_entry( z, z_current, z_previous, z_previous_buffer, t_spike, t );
}

psi = eprop_hist_it->surrogate_gradient_;
L = eprop_hist_it->learning_signal_;
Expand Down
38 changes: 38 additions & 0 deletions models/eprop_iaf_adapt.h
Original file line number Diff line number Diff line change
Expand Up @@ -358,6 +358,7 @@ class eprop_iaf_adapt : public EpropArchivingNodeRecurrent< false >

void compute_gradient( const long,
const long,
std::queue< double >&,
double&,
double&,
double&,
Expand All @@ -370,6 +371,9 @@ class eprop_iaf_adapt : public EpropArchivingNodeRecurrent< false >
long get_shift() const override;
bool is_eprop_recurrent_node() const override;
long get_eprop_isi_trace_cutoff() const override;
long get_delay_total() const override;
long get_delay_recurrent_to_readout() const override;
long get_delay_readout_to_recurrent() const override;

//! Map for storing a static set of recordables.
friend class RecordablesMap< eprop_iaf_adapt >;
Expand Down Expand Up @@ -432,6 +436,15 @@ class eprop_iaf_adapt : public EpropArchivingNodeRecurrent< false >
//! Time interval from the previous spike until the cutoff of e-prop update integration between two spikes (ms).
double eprop_isi_trace_cutoff_;

//! Connection delay from recurrent to readout neuron.
long delay_rec_out_;

//! Connection delay from readout to recurrent neuron.
long delay_out_rec_;

//! Sum of connection delays from recurrent to readout neuron and readout to recurrent neuron.
long delay_total_;

//! Default constructor.
Parameters_();

Expand Down Expand Up @@ -591,10 +604,35 @@ eprop_iaf_adapt::get_eprop_isi_trace_cutoff() const
return V_.eprop_isi_trace_cutoff_steps_;
}

inline long
eprop_iaf_adapt::get_delay_total() const
{
return P_.delay_total_;
}

inline long
eprop_iaf_adapt::get_delay_recurrent_to_readout() const
{
return P_.delay_rec_out_;
}

inline long
eprop_iaf_adapt::get_delay_readout_to_recurrent() const
{
return P_.delay_out_rec_;
}

inline size_t
eprop_iaf_adapt::send_test_event( Node& target, size_t receptor_type, synindex, bool )
{
SpikeEvent e;

// To perform a consistency check on the delay parameter d_out_rec between recurrent
// neurons and readout neurons, the recurrent neurons send a test event with a delay
// specified by d_rec_out. Upon receiving the test event from the recurrent neuron,
// the readout neuron checks if the delay with which the event was received matches
// its own specified delay parameter d_rec_out.
e.set_delay_steps( P_.delay_rec_out_ );
e.set_sender( *this );
return target.handles_test_event( e, receptor_type );
}
Expand Down
Loading
Loading