/*Copyright (C) 2005, 2006, 2007, 2008 Frank Michler, Philipps-University Marburg, Germany This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ #include "sys.hpp" // for libcwd #include "debug.hpp" // for libcwd #include "vlearn.hpp" veclearning::veclearning(VecConnection* c, float _maxWeight): SimElement(seLearning), con(c), maxWeight(_maxWeight), minWeight(0) { Name="VecLearning"; cout << "Initialize VecLearning Object, maxWeight= " << maxWeight << "\n"; int i,j,k; VecConnectionInfo ConInf = con->GetConnectionInfo(); Dmax = ConInf.Dmax; maximumDelay = ConInf.maximumDelay; cout << "Dmax=" << Dmax << "\n"; TargetLayer = ConInf.TargetLayer; SourceLayer = ConInf.SourceLayer; nt = TargetLayer->N; ns = SourceLayer->N; dt = TargetLayer->GetDt(); MacroTimeStep = TargetLayer->GetMacroTimeStep(); cout << "Initialize VecLearning Object: N_Target =" << nt << "\n"; PSynWeights=ConInf.PSynWeights; PSynTargetNr=ConInf.PSynTargetNr; PSynSourceNr=ConInf.PSynSourceNr; PSynDelays=ConInf.PSynDelays; PPreSynNr=ConInf.PPreSynNr; Pdelays=ConInf.Pdelays; } veclearning::~veclearning() { cout << "veclearning Destructor\n"; fflush(stdout); } /////////////////////////////////////// VecLearnHebbLP2::VecLearnHebbLP2(VecConnection* con, float _MaxWeight, float TauDec, float _BaseLine, float _LearnSpeed, bool _Accumulate): veclearning(con, _MaxWeight), BaseLine(_BaseLine), LearnSpeed(_LearnSpeed), LtpInc(1), Accumulate(_Accumulate) { LtpDecFac = exp(-dt/TauDec); int i,j,k; NewArray2d(LTP,ns,1001+Dmax); // presynaptic trace (source) for (i=0;i \n"; fw << " \n"; fw << "IdNumber << "\"/> \n"; fw << " \n"; fw << " \n"; fw << " \n"; fw << " \n"; fw << " \n"; fw << " \n"; fw << " \n"; } int VecLearnHebbLP2::proceede(int TotalTime) { // cout <<"L";fflush(stdout); //remove int t = int(TotalTime % MacroTimeStep); int i,j,k; // increase learning potentials for each spike of last time step int spike = SourceLayer->last_N_firings; if ((SourceLayer->firings[spike][0] != t) && (SourceLayer->last_N_firings N_firings )) { cerr << "this should never happen, I just don't know why ;-)\n"; cerr << "programming error, wrong firing indices \n"; cerr << "t=" << t << " SourceLayer->firings[spike][0] =" << SourceLayer->firings[spike][0] << "\n"; cerr << "SourceLayer->N_firings=" << SourceLayer->N_firings << "\n"; cerr << "SourceLayer->last_N_firings=" << SourceLayer->last_N_firings << "\n"; } // update presynaptic learning potential (for ltp) if (Accumulate) while (spike < SourceLayer->N_firings) LTP[SourceLayer->firings[spike++][1]][t+Dmax] += LtpInc; else while (spike < SourceLayer->N_firings) LTP[SourceLayer->firings[spike++][1]][t+Dmax] = LtpInc; // END update spike = TargetLayer->last_N_firings; while (spike < TargetLayer->N_firings) { i= TargetLayer->firings[spike++][1]; // calculate LTP for (j=0;j<(*PPreSynNr)[i].size();j++) { T_NSynapses SynNr=(*PPreSynNr)[i][j]; (*PSynWeights)[SynNr] += LearnSpeed*(LTP[(*PSynSourceNr)[SynNr]][t+Dmax-(*PSynDelays)[SynNr]-1] - BaseLine); if ((*PSynWeights)[SynNr] > maxWeight) { (*PSynWeights)[SynNr]=maxWeight; } else { if ((*PSynWeights)[SynNr] < minWeight) { (*PSynWeights)[SynNr]=minWeight; } } } } for (i=0;ilast_N_firings; if ((SourceLayer->firings[spike][0] != t) && (SourceLayer->last_N_firings N_firings )) { cout << "programming error, wrong firing indices \n"; cout << "t=" << t << " SourceLayer->firings[spike][0] =" << SourceLayer->firings[spike][0] << "\n"; cout << "SourceLayer->N_firings=" << SourceLayer->N_firings << "\n"; cout << "SourceLayer->last_N_firings=" << SourceLayer->last_N_firings << "\n"; } // update presynaptic learning potential (for ltp) while (spike < SourceLayer->N_firings) LPpre[SourceLayer->firings[spike++][1]][t+Dmax] += LearnRate; // update spike = TargetLayer->last_N_firings; // cout << "ltp ";fflush(stdout); while (spike < TargetLayer->N_firings) { i= TargetLayer->firings[spike++][1]; LPpost[i]+=1; // calculate LTP for (j=0;j<(*PPreSynNr)[i].size();j++) { T_NSynapses SynNr=(*PPreSynNr)[i][j]; (*PSynWeights)[SynNr] += LPpost[i]*(LPpre[(*PSynSourceNr)[SynNr]][t+Dmax-(*PSynDelays)[SynNr]-1]-BaseLine); if ((*PSynWeights)[SynNr] > maxWeight) (*PSynWeights)[SynNr] = maxWeight; if ((*PSynWeights)[SynNr] < minWeight) (*PSynWeights)[SynNr] = minWeight; } // this spike was after pre-synaptic spikes // update postsynaptic potential } // decreas synaptic potentials for (i=0;i