/*Copyright (C) 2005, 2006, 2007 Frank Michler, Philipps-University Marburg, Germany This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ #include "sys.hpp" // for libcwd #include "debug.hpp" // for libcwd #include "vnormalize.hpp" ////////Normalize////////////////////////////////// VecNormalize::VecNormalize() : AbstractNormalize(), Target(0), NTarget(0), NSource(0) { } int VecNormalize::AddConnection(VecConnection* newcon) { if (Target == 0) { Target = newcon->GetTargetLayer(); NTarget = Target->N; } if (Target == newcon->GetTargetLayer()) ConList.push_back(newcon); else { cerr << "ERROR: Target-Layer not the same\n"; exit (1); } NSource += newcon->GetNSource(); if (RewiringOn) { newcon->SetRewiringOff(); // if VecNormalize object does the rewiring the connection object should't do ot too!! } } int VecNormalize::WriteSimInfo(fstream &fw) { stringstream sstr; sstr << "IdNumber << "\"/> \n"; SimElement::WriteSimInfo(fw, sstr.str()); } int VecNormalize::WriteSimInfo(fstream &fw, const string &ChildInfo) { stringstream sstr; sstr << "IdNumber << "\"/> \n"; sstr << " \n"; sstr << " \n"; sstr << " \n"; sstr << "\n"; sstr << ChildInfo; SimElement::WriteSimInfo(fw, sstr.str()); } void VecNormalize::SetRewiring(float _SynDelThreshold, float _IncommingConnectivity, float _InitialWeights) { RewiringOn=true; IncommingConnectivity=_IncommingConnectivity; SynDelThreshold=_SynDelThreshold; InitialWeights=_InitialWeights; // turn off rewiring in VecConnection objects, // because now the normalization object handles the rewiring for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) { (*it)->SetRewiringOff(); } } int VecNormalize::Rewire() { int tar=0; int NCon=ConList.size(); // delete low weights int NDeletedSynapses=0; for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) { NDeletedSynapses += (*it)->DeleteLowWeights(SynDelThreshold); } cout << "VecNormalize::Rewire() TotalDeletedSynapses=" << NDeletedSynapses << "\n"; int NMaxWeights = int(round(IncommingConnectivity*NSource)); vector NNewWeights(NTarget); for (tar=0;tar > NFreeWeights(NCon,vector(NTarget)); vector > NSglConNewWeights(NCon,vector(NTarget)); // count incomming weights vector TotalFreeWeights(NTarget); for (int ConNr=0;ConNrns; for (int tar=0;tarPreSynNr[tar]).size(); NNewWeights[tar] -= PreSynSize; NFreeWeights[ConNr][tar] = CurNs-PreSynSize; TotalFreeWeights[tar] += NFreeWeights[ConNr][tar]; } } if (NDeletedSynapses >0) { for (tar=0;tar0) { cout << "NNewWeights["<0) { float CurNewWeights = float(NNewWeights[tar])/TotalFreeWeights[tar]; for(int ConNr=0;ConNr= NCon) { cerr << "fatal ERROR: NNewWeights[tar] should be less than NConnections! (exiting)\n"; exit(1); } while (NNewWeights[tar]>0) { int Winner = gsl_rng_uniform_int(gslr, NCon); ++NSglConNewWeights[Winner][tar]; --NNewWeights[tar]; } } } // setting new weights for (int ConNr=0;ConNrSetNewWeights(&(NSglConNewWeights[ConNr]), InitialWeights); } } int VecNormalize::prepare(int Step) { if (RewiringOn) Rewire(); } ////////////////////////////// /*! \brief normalizing synapit weights if firing rates are above a threshold NormFrequency: above this spike frequency normalization occurs Weights are multiplied with (1-NormFactor) if spike frequency is higher then weight reduction is larger maximum: (1-NormFactor*MaxNormFactor) a look up table is used to determine the current normalization factor, depending on the current spike frequency (time difference DeltaT between current spike and last spike): NormLut(DeltaT)=1-MaxNormFactor*NormFactor*exp(-DeltaT/Tau), weight is multiplied with NormLut(DeltaT) @param _NormFrequency threshold frequency, above this frequency normalization occurs @param _NormFactor weight normalization factor, if postsynaptic neuron fires with _NormFrequency, synaptic weight is mulitiplied with 1-_NormFactor @param _MaxNormFactor maximal normalization: (1-NormFactor*MaxNormFactor) If postsynaptic neuron fires with frequency higher than _NormFrequency, the normalization is stronger. For infinite firing rate normalization strength can raise up to _MaxNormFactor times. @author (fm) */ VecFiringRateNormalize2::VecFiringRateNormalize2( float _NormFrequency, float _NormFactor, float _MaxNormFactor) :PostSynLastFirings(0), MaxNormFactor(_MaxNormFactor), NormFactor(_NormFactor), NormFrequency(_NormFrequency) { NormDeltaT = 1000./(NormFrequency*dt); Tau = NormDeltaT/log(MaxNormFactor); NormLut = ExpDecayLut(NormLutN, Tau, -MaxNormFactor*NormFactor, 1, dt, NormDeltaT/Tau); cout << "VecFiringRateNormalization2\n"; cout << "NormLut=" << "\n"; for (int i=0;i\n"; sstr << "\n"; sstr << "\n"; VecNormalize::WriteSimInfo(fw, sstr.str()); } int VecFiringRateNormalize2::AddConnection(VecConnection* newcon) { VecNormalize::AddConnection(newcon); if (PostSynLastFirings == 0) { PostSynLastFirings = new int [NTarget]; int i; for (i=0;ilast_N_firings; int CurTarget; int i,j, TDiff; float NormFactor=1; while (spike < Target->N_firings) { CurTarget = Target->firings[spike][1]; int TDiff = t-PostSynLastFirings[CurTarget]; if (TDiff < NormLutN) { NormFactor = NormLut[TDiff]; for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) { (*it)->MultiplyTargetWeights(CurTarget, NormFactor); } } PostSynLastFirings[CurTarget]=t; ++spike; } } int VecFiringRateNormalize2::prepare(int Step) { VecNormalize::prepare(Step); int i; cout << "shifting lastspikes\n"; fflush(stdout); for (i=0;ilast_N_firings; int CurTarget; int i,j; float CurWeightSum, NormFactor; float tmpweight; if (quadratic) { cerr << " VecConstSumNormalize::proceede/QUADRATIC noch nicht implementiert}\n"; exit(1); // while (spike < Target->N_firings) { // CurTarget = Target->firings[spike][1]; // // calculate WeightSum // CurWeightSum=0; // for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) // { // for (i=0;i<(*it)->N_pre[CurTarget];++i) { // tmpweight= (*((*it)->s_pre[CurTarget][i])); // if (tmpweight <0) { // delete this thread // cout << "EEEEEEEEEERRRROOORRR, weight deletion didn't work\n"; // fflush(stdout); // exit(2); // } // CurWeightSum += (*((*it)->s_pre[CurTarget][i]))*(*((*it)->s_pre[CurTarget][i])); // } // } // // DEBUG // if (CurWeightSum > 100) { // for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) // { // cout << "N_pre=" << (*it)->N_pre[CurTarget] << "\n"; // for (i=0;i<(*it)->N_pre[CurTarget];++i) { // tmpweight= (*((*it)->s_pre[CurTarget][i])); // cout << "w"<< i << "=" << tmpweight << "I_pre=" << (*it)->I_pre[CurTarget][i] << "\n"; // } // } // fflush(stdout); // exit(2); // } // // END DEBUG // NormFactor = WeightSum/sqrt(CurWeightSum); // cout << "NormFactor=" << NormFactor << "WeightSum" << CurWeightSum << "\n"; // for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) // { // for (i=0;i<(*it)->N_pre[CurTarget];++i) *((*it)->s_pre[CurTarget][i]) *= NormFactor; // } // ++spike; // } } else { while (spike < Target->N_firings) { CurTarget = Target->firings[spike][1]; // calculate WeightSum CurWeightSum=0; int Npre; for(TVecConnectionList::iterator it=ConList.begin(); it !=ConList.end(); ++it) { // Npre=((*it)->PreSynNr)[CurTarget].size(); // for (i=0;iSynWeights) [((*it)->PreSynNr)[CurTarget][i]]; CurWeightSum += (*it)->GetWeightSum(CurTarget, false); } NormFactor = WeightSum/CurWeightSum; // multiplicatively normalize weights for(TVecConnectionList::iterator it=ConList.begin(); it !=ConList.end(); ++it) { (*it)->MultiplyTargetWeights(CurTarget, NormFactor); } ++spike; } } } int VecConstSumNormalize::NormalizeAll() { cout << "Normalizing All ..."; int CurTarget; int i,j; float CurWeightSum, NormFactor; if (quadratic) { cerr << " VecConstSumNormalize::proceede/QUADRATIC noch nicht implementiert}\n"; exit(1); // for (CurTarget=0;CurTargetN;++CurTarget) // { // CurWeightSum=0; // wie oben // for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) // { // for (i=0;i<(*it)->N_pre[CurTarget];++i) CurWeightSum += (*((*it)->s_pre[CurTarget][i]))*(*((*it)->s_pre[CurTarget][i])); // } // NormFactor = WeightSum/sqrt(CurWeightSum); // for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) // { // for (i=0;i<(*it)->N_pre[CurTarget];++i) *((*it)->s_pre[CurTarget][i]) *= NormFactor; // } // } } else { for (CurTarget=0;CurTargetN;++CurTarget) { CurWeightSum=0; // calculate Weight Sum for(TVecConnectionList::iterator it=ConList.begin(); it !=ConList.end(); ++it) { CurWeightSum += (*it)->GetWeightSum(CurTarget, false); } NormFactor = WeightSum/CurWeightSum; // multiplicatively normalize weights for(TVecConnectionList::iterator it=ConList.begin(); it !=ConList.end(); ++it) { (*it)->MultiplyTargetWeights(CurTarget, NormFactor); } } } cout << " [done]\n"; } // ToDo: diese Funktion testen // setzt die Gewichtssumme entsprechend des Durchschnitts aller Gewichte void VecConstSumNormalize::CalcInitWeightSum() { cout << "Calculating Initial Weight Sum ..."; cerr << "VecConstSumNormalize::CalcInitWeightSum() is not tested\n"; exit(1); int CurTarget; int i,j; float CurWeightSum, NormFactor; float TmpWeightSum=0; if (quadratic) { cerr << " VecConstSumNormalize::proceede/QUADRATIC noch nicht implementiert}\n"; exit(1); // for (CurTarget=0;CurTargetN;++CurTarget) // { // CurWeightSum=0; // wie oben // for(vector::iterator it=ConList.begin(); it !=ConList.end(); ++it) // { // for (i=0;i<(*it)->N_pre[CurTarget];++i) CurWeightSum += (*((*it)->s_pre[CurTarget][i]))*(*((*it)->s_pre[CurTarget][i])); // } // TmpWeightSum += sqrt(CurWeightSum); // } // WeightSum = TmpWeightSum/Target->N; } else { for (CurTarget=0;CurTargetN;++CurTarget) { CurWeightSum=0; // calculate Weight Sum for(TVecConnectionList::iterator it=ConList.begin(); it !=ConList.end(); ++it) { CurWeightSum += (*it)->GetWeightSum(CurTarget, false); } TmpWeightSum += CurWeightSum; } WeightSum = TmpWeightSum/Target->N; } cout << " [done]\n"; } float VecConstSumNormalize::GetWeightSum() { return WeightSum; } int VecConstSumNormalize::SetWeightSum(float NewWeightSum) { WeightSum=NewWeightSum; } int VecConstSumNormalize::WriteSimInfo(fstream &fw) { stringstream sstr; sstr << " \n"; sstr << " \n"; VecNormalize::WriteSimInfo(fw, sstr.str()); } //////////////////////////////