vconnection.cpp 50 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547
  1. #include "sys.hpp" // for libcwd
  2. #include "debug.hpp" // for libcwd
  3. #include "vconnection.hpp"
  4. #include "vlearn.hpp"
  5. #include "layer.hpp"
  6. #include "matrix4d.hpp"
  7. #include "chunkfile/chunkfile/chunkfile.h"
  8. #include <algorithm>
  9. const T_Delays DELETED_SYNAPSE=-1;
  10. VecConnection::VecConnection(): NSynapses(0), learnobj(0), FileTypeString("VecConnection_1.0")
  11. {
  12. }
  13. VecConnection::VecConnection(layer* SL, layer* TL, csimInputChannel _InputChannel, bool _nonself)
  14. : Connection(SL,TL,_InputChannel,_nonself), NSynapses(0),
  15. learnobj(0), FileTypeString("VecConnection_1.0")
  16. {
  17. Name="VecConnection";
  18. WeightFileName ="vweights.dat";
  19. // initialize all pointers with 0
  20. // delays(SourceLayer->N, maximumDelay, 2);
  21. delays.resize(ns);
  22. PreSynNr.resize(nt);
  23. }
  24. VecConnection::~VecConnection()
  25. {
  26. Dout(dc::con, "VecConnection Destructor");fflush(stdout);
  27. if (learnobj != 0) delete learnobj;
  28. }
  29. VecConnectionInfo VecConnection::GetConnectionInfo()
  30. {
  31. VecConnectionInfo info;
  32. info.Dmax = Dmax;
  33. info.maximumDelay = maximumDelay;
  34. info.MaxWeight = maxWeight;
  35. info.TargetLayer = TargetLayer;
  36. info.SourceLayer = SourceLayer;
  37. info.PSynWeights = &SynWeights;
  38. info.PSynSourceNr = &SynSourceNr;
  39. info.PSynTargetNr = &SynTargetNr;
  40. info.PSynDelays = &SynDelays;
  41. info.PPreSynNr = &PreSynNr;
  42. info.Pdelays = &delays;
  43. return info;
  44. }
  45. int VecConnection::proceede(int TotalTime)
  46. {
  47. int t = TotalTime % MacroTimeStep;
  48. int i,j,k, mi, ipre;
  49. int CurDelay;
  50. // calculate input for target layer
  51. k=SourceLayer->N_firings;
  52. // if (rec) rec->record(dt*TotalTime, s[Observe_s][Observe_m]);
  53. // if (BinRec) BinRec->record();
  54. while (t-(SourceLayer->firings[--k][0]) < maximumDelay) // Nur Spikes, die nicht laenger als maximumDelay in der Vergangeheit liegen werden beruecksichtigt. firins-array besteht aus firings[SpikeNr][0]:zeitpunkte, firnigs[SpikeNr][1]:Neuronennummer
  55. {
  56. CurDelay = t-SourceLayer->firings[k][0];
  57. ipre = SourceLayer->firings[k][1];
  58. for (vector <T_NSynapses>::iterator it=delays[ipre][CurDelay].begin();it != delays[ipre][CurDelay].end(); ++it)
  59. {
  60. InputPointer[SynTargetNr[(*it)]] += SynWeights[(*it)];
  61. }
  62. }
  63. if (learn == true) {
  64. learnobj->proceede(TotalTime);
  65. }
  66. }
  67. int VecConnection::prepare(int step)
  68. {
  69. SimElement::prepare(step);
  70. if ((learn == true) && (learnobj != 0)) {
  71. learnobj->prepare();
  72. if (RewireOn) {
  73. Rewire(RewireThreshold, RewireMaxConnectivity);
  74. }
  75. if (AutoSave) Save();
  76. }
  77. }
  78. int VecConnection::PushBackNewSynapse(int source, int target, float weight, int delay)
  79. {
  80. SynSourceNr.push_back(source);
  81. SynTargetNr.push_back(target);
  82. SynWeights.push_back(weight);
  83. SynDelays.push_back(delay);
  84. PreSynNr[target].push_back(NSynapses);
  85. ++NSynapses;
  86. }
  87. int VecConnection::ScrambleSynTargets()
  88. {
  89. // Targets der Synapsen zufaellig vertauschen (fuer Reviewer)
  90. // Scramble Targets
  91. for (vector<T_NNeurons>::iterator it=SynTargetNr.begin(); it != SynTargetNr.end();++it) {
  92. (*it) =gsl_rng_uniform_int(gslr, nt);
  93. }
  94. // rebuild delays array and PreSynNr array
  95. SetupDelaysArray();
  96. SetupPreSynNrArray();
  97. }
  98. ///////////// Connect-Routinen um verschiedene Typen von Verbindungsmatrizen zu erzeugen
  99. // ConnectFull
  100. // ConnectRandomIncomming
  101. // ConnectSelf
  102. // ConnectGaussian
  103. int VecConnection::ConnectFull(float MaxWeight, float maxDelay, float minDelay, bool RandomWeights)
  104. {
  105. Dout(dc::con, "Connection::ConnectFull");
  106. SetMinMaxDelay(maxDelay, minDelay);
  107. Strength=MaxWeight;
  108. if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
  109. int i,j;
  110. for (i=0;i<ns;i++) {
  111. for (j=0;j<nt;j++)
  112. {
  113. if ((!NonSelf) || (i != j)) {
  114. // if (RandomWeights)
  115. PushBackNewSynapse(i, j, MaxWeight, 0);
  116. }
  117. }
  118. }
  119. Dout(dc::con, "connected, NSynapses=" << NSynapses);
  120. SetRandomDelays();
  121. SetupDelaysArray();
  122. // SetupPresynapticInfo();
  123. Dout(dc::con, "Connection initialized");
  124. }
  125. int VecConnection::ConnectRandomIncomming(double _Connectivity, float _InitialWeights, float maxDelay, float minDelay, bool RandomDelays)
  126. {
  127. if ((_Connectivity <= 1) && (_Connectivity>=0)) {
  128. int NConnections = int(round(float(ns)*_Connectivity));
  129. ConnectRandomIncomming(NConnections, _InitialWeights, maxDelay, minDelay, RandomDelays);
  130. } else {
  131. cerr << "fatal ERROR in connection::ConnectRandom2: _Connectivity should be [0..1] (exit simulation)\n";
  132. exit (1);
  133. }
  134. }
  135. int VecConnection::ConnectRandomIncomming(int NIncommingCon, float _InitialWeights, float maxDelay, float minDelay, bool RandomDelays)
  136. {
  137. Dout(dc::con, "VecConnection::ConnectRandomIncomming");
  138. SetMinMaxDelay(maxDelay, minDelay);
  139. if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
  140. int i;
  141. float InitialWeights=1;
  142. connectivity=float(NIncommingCon)/float(ns);
  143. vector <int> SourceList;
  144. SourceList.reserve(ns);
  145. for (int i=0;i<ns;++i) {
  146. SourceList.push_back(i);
  147. }
  148. long SynCounter=0;
  149. Dout(dc::con, " nt=" << nt << "");
  150. Dout(dc::con, " NIncommingCon=" << NIncommingCon << "");
  151. for (int tar=0;tar<nt;++tar) {
  152. vector <int> AvailSrcNeurons (SourceList);
  153. for (int IncomCon=0;IncomCon<NIncommingCon;++IncomCon) {
  154. int rndNum=gsl_rng_uniform_int(gslr, AvailSrcNeurons.size());
  155. int SourceNum=AvailSrcNeurons[rndNum];
  156. PushBackNewSynapse(SourceNum, tar, _InitialWeights, 0);
  157. AvailSrcNeurons.erase(AvailSrcNeurons.begin()+rndNum);
  158. }
  159. }
  160. Dout(dc::con, "* connected, NSynapses=" << NSynapses << ""); fflush(stdout);
  161. SetRandomDelays();
  162. SetupDelaysArray();
  163. Dout(dc::con, " Tar0"<<PreSynNr[0].size()<<"");
  164. Dout(dc::con, " Tar1"<<PreSynNr[1].size()<<"");
  165. Dout(dc::con, " Tar2"<<PreSynNr[2].size()<<"");
  166. Dout(dc::con, " NSynapses=" << NSynapses <<"");
  167. Dout(dc::con, " VecConnection initialized\n");
  168. }
  169. // verbinde jedes Neuron der Source-Schicht mit dem entsprechenden
  170. // Neuron in der Target-Schicht, welches die gleiche Nummer hat
  171. int VecConnection::ConnectSelf(float MaxWeight, float maxDelay, float minDelay)
  172. {
  173. Strength=MaxWeight;
  174. Dout(dc::con, "VecConnection::ConnectSelf");
  175. SetMinMaxDelay(maxDelay, minDelay);
  176. int CurDelay;
  177. int n_connections = min(ns,nt);
  178. Dout(dc::con, " number of connections =" << n_connections << "");
  179. // generate delays
  180. vector<T_Delays> Delays(n_connections, minimumDelay);
  181. if (DelayDiff>=0) {
  182. Dout(dc::con, "generate Delase for each synapse");
  183. for (int i=0;i<n_connections;i++) {
  184. Delays[i] = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
  185. }
  186. }
  187. for (int i=0;i<n_connections;i++)
  188. {
  189. PushBackNewSynapse(i,i,MaxWeight, Delays[i]);
  190. }
  191. SetupDelaysArray();
  192. Dout(dc::con, " VecConnection initialized\n"); fflush(stdout);
  193. }
  194. // ConnectGaussian: Verbinde Neuronen in Abhaengigkeit von ihrem Abstand
  195. // in einer 2D-Ebene, Verbindungsstaerke faellt Gauss-Foermig ab
  196. int VecConnection::ConnectGaussian(float Sigma, float MaxWeight, float maxDelay, float minDelay, bool Cyclic)
  197. {
  198. Dout(dc::con, "VecConnection::ConnectGaussian");
  199. Strength=MaxWeight;
  200. vector<vector2d> TargetPositions;
  201. vector<vector2d> SourcePositions;
  202. try
  203. {
  204. TargetPositions = TargetLayer->getPositions();
  205. SourcePositions = SourceLayer->getPositions();
  206. }
  207. catch (std::bad_alloc) {
  208. cerr << "ERROR: Target or Source layer dead\n";
  209. throw;
  210. }
  211. Dout(dc::con, "sizeof TargetPositions=" << TargetPositions.size());
  212. if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
  213. int i,j,k,exists, r;
  214. // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
  215. int CurDelay=0;
  216. SetMinMaxDelay(maxDelay, minDelay);
  217. float Distance;
  218. vector2d basis(1,1);
  219. SimpleTextProgressBar pgbar(ns);
  220. float MaxConDistance = 3*Sigma;
  221. for (i=0;i<ns;i++) {
  222. pgbar.Next(i);
  223. for (j=0;j<nt;j++)
  224. {
  225. // calculate Distance between neurons
  226. if (Cyclic) Distance = (TargetPositions[j]).CyclicDistance(SourcePositions[i], basis);
  227. else Distance = (TargetPositions[j] - SourcePositions[i]).abs() ;
  228. if ((Distance < MaxConDistance) && (!NonSelf || (i!=j))) {
  229. CurDelay = minimumDelay + int(Distance*DelayDiff/MaxConDistance);
  230. if (CurDelay>=maximumDelay)
  231. {
  232. CurDelay = maximumDelay;
  233. cerr << "ERROR: delay too high\n";
  234. }
  235. PushBackNewSynapse(i,j,MaxWeight*gauss(Distance,Sigma), CurDelay);
  236. }
  237. }
  238. }
  239. SetupDelaysArray();
  240. Dout(dc::con, " NSynapses=" << NSynapses);
  241. Dout(dc::con, " connected"); fflush(stdout);
  242. Dout(dc::con, " VecConnection initialized\n");
  243. }
  244. void VecConnection::ConnectCircular(const RangeConnectionParameters& Paras)
  245. {
  246. Dout(dc::con, "VecConnection::ConnectCircular");
  247. vector<vector2d> TargetPositions;
  248. vector<vector2d> SourcePositions;
  249. try
  250. {
  251. TargetPositions = TargetLayer->getPositions();
  252. SourcePositions = SourceLayer->getPositions();
  253. }
  254. catch (std::bad_alloc) {
  255. cerr << "ERROR: Target or Source layer dead\n";
  256. throw;
  257. }
  258. if (!TargetLayer->isPositionNormalized() || !SourceLayer->isPositionNormalized()) {
  259. throw std::runtime_error("Source or Target positions not normalized");
  260. }
  261. if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
  262. int CurDelay=0;
  263. SetMinMaxDelay(Paras.MaxDelay, Paras.MinDelay);
  264. float Distance;
  265. vector2d basis(1,1);
  266. SimpleTextProgressBar pgbar(ns);
  267. // double loop (ns, nt) goes throug every possible synapse
  268. // and checks wether to connect or not
  269. for (int i=0;i<ns;i++) {
  270. pgbar.Next(i);
  271. for (int j=0;j<nt;j++)
  272. {
  273. // calculate Distance between neurons
  274. if (Paras.Cyclic) {
  275. Distance = (TargetPositions[j]).CyclicDistance(SourcePositions[i], basis);
  276. } else {
  277. Distance = (TargetPositions[j] - SourcePositions[i]).abs() ;
  278. }
  279. if ((Distance < Paras.Range) && (!NonSelf || (i!=j))) {
  280. if (Paras.Connectivity>=1 || (gsl_rng_uniform(gslr) <= Paras.Connectivity)) {
  281. CurDelay = minimumDelay + int(Distance*DelayDiff/Paras.Range);
  282. if (CurDelay>=maximumDelay)
  283. {
  284. CurDelay = maximumDelay;
  285. cerr << "ERROR: delay too high\n";
  286. }
  287. PushBackNewSynapse(i,j,Paras.Strength, CurDelay);
  288. }
  289. }
  290. }
  291. }
  292. SetupDelaysArray();
  293. Dout(dc::con, " NSynapses=" << NSynapses);
  294. Dout(dc::con, " connected");
  295. Dout(dc::con, " VecConnection initialized\n");
  296. }
  297. ///////////// ENDE Connect-Routinen //////////////////
  298. int VecConnection::SetNewWeights(float IncommingConnectivity, float InitialWeights)
  299. {
  300. Dout(dc::con, "VecConnection::SetNewWeights"); fflush(stdout);
  301. vector <int> SourceNeuronList (ns);
  302. for (int s=0;s<ns;++s) SourceNeuronList[s]=s;
  303. for (int tar=0; tar<nt;++tar) {
  304. int NNewWeights = int(ns*IncommingConnectivity) - PreSynNr[tar].size();
  305. // Dout(dc::con, "NNewWeights=" << NNewWeights << ""); fflush(stdout);
  306. // Dout(dc::con, "ns" << ns << ""); fflush(stdout);
  307. // Dout(dc::con, "IncommingConnectivity=" << IncommingConnectivity << ""); fflush(stdout);
  308. // Dout(dc::con, "PreSynNr[tar].size()=" << PreSynNr[tar].size() << ""); fflush(stdout);
  309. if (NNewWeights >0) {
  310. vector <int> AvailNeur(SourceNeuronList);
  311. // remove all presynaptic Neurons from available neurons list
  312. for (vector<T_NSynapses>::iterator it=PreSynNr[tar].begin();it!=PreSynNr[tar].end();++it) {
  313. AvailNeur.erase(remove(AvailNeur.begin(),
  314. AvailNeur.end(),
  315. SynSourceNr[*it]),
  316. AvailNeur.end());
  317. }
  318. for (int nw=0;nw<NNewWeights;++nw) {
  319. // Dout(dc::con, "AvailNeur=" << AvailNeur.size() << ""); fflush(stdout);
  320. int rndNum=gsl_rng_uniform_int(gslr, AvailNeur.size());
  321. int SourceNum=AvailNeur[rndNum];
  322. T_NSynapses NewSynNr = SynSourceNr.size();
  323. SynSourceNr.push_back(SourceNum);
  324. SynTargetNr.push_back(tar);
  325. SynWeights.push_back(InitialWeights);
  326. // random delay??
  327. SynDelays.push_back( minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1));
  328. //! @ToDo: Check wether +1 is correct
  329. PreSynNr[tar].push_back(NewSynNr);
  330. delays[SynSourceNr[NewSynNr]][SynDelays[NewSynNr]].push_back(NewSynNr);
  331. ++NSynapses;
  332. AvailNeur.erase(AvailNeur.begin()+rndNum);
  333. }
  334. }
  335. }
  336. }
  337. // vector<int>* _NNewWeights gibt fuer jedes Target-Neuron an, wieviele neue eingehende
  338. // Synapsen gesetzt werden
  339. int VecConnection::SetNewWeights(vector<int>* _NNewWeights, float InitialWeights)
  340. {
  341. Dout(dc::con, "VecConnection::SetNewWeights");
  342. vector <int> SourceNeuronList (ns);
  343. for (int s=0;s<ns;++s) SourceNeuronList[s]=s;
  344. for (int tar=0; tar<nt;++tar) {
  345. int NNewWeights = (*_NNewWeights)[tar];
  346. // Dout(dc::con, "NNewWeights=" << NNewWeights << "");fflush(stdout);
  347. if (NNewWeights >0) {
  348. vector <int> AvailNeur(SourceNeuronList);
  349. // remove all presynaptic Neurons from available neurons list
  350. for (vector<T_NSynapses>::iterator it=PreSynNr[tar].begin();it!=PreSynNr[tar].end();++it) {
  351. AvailNeur.erase(remove(AvailNeur.begin(),AvailNeur.end(),SynSourceNr[*it]), AvailNeur.end());
  352. }
  353. for (int nw=0;nw<NNewWeights;++nw) {
  354. Dout(dc::con, "AvailNeur=" << AvailNeur.size() << ""); fflush(stdout);
  355. int rndNum=gsl_rng_uniform_int(gslr, AvailNeur.size());
  356. int SourceNum=AvailNeur[rndNum];
  357. int NewSynNr = SynSourceNr.size();
  358. SynSourceNr.push_back(SourceNum);
  359. SynTargetNr.push_back(tar);
  360. SynWeights.push_back(InitialWeights);
  361. // random delay??
  362. SynDelays.push_back( minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1));
  363. //ToDo: Check wether +1 is correct
  364. PreSynNr[tar].push_back(NewSynNr);
  365. delays[SynSourceNr[NewSynNr]][SynDelays[NewSynNr]].push_back(NewSynNr);
  366. ++NSynapses;
  367. AvailNeur.erase(AvailNeur.begin()+rndNum);
  368. }
  369. }
  370. }
  371. }
  372. /** calculate maximumDelay and minimumDelay (in time steps) from maxDelay, minDelay (in mm/s)
  373. @param maxDelay maximum delay in mm/s
  374. @param minDelay minimum delay in mm/s
  375. */
  376. int VecConnection::SetMinMaxDelay(float maxDelay, float minDelay)
  377. {
  378. maximumDelay = int(maxDelay/dt);
  379. if (maximumDelay == 0) maximumDelay = 1;
  380. // equivalent to Dmax; compare allways with "<" (not with "<=")
  381. minimumDelay = int(minDelay/dt);
  382. if (maximumDelay >= Dmax) {
  383. cerr << "ERROR: SetMinMaxDelay: Parameter Error: maximumDelay="
  384. << (int) maximumDelay << " > Dmax=" << (int) Dmax << "\n";
  385. throw RequestedDelayTooLarge(maximumDelay);
  386. }
  387. if (minimumDelay >= maximumDelay) {
  388. cerr << "ERROR: SetMinMaxDelay: Parameter Error:"
  389. << " minimumDelay=" << (int)minimumDelay << " >= maximumDelay =" << (int)maximumDelay << "\n";
  390. minimumDelay = maximumDelay-1;
  391. cerr << "setting minimumDelay to " << (int)minimumDelay << "\n";
  392. }
  393. DelayDiff = maximumDelay-minimumDelay-1;
  394. Dout(dc::con, "MaximumDelay=" << (int)maximumDelay
  395. << " MinimumDelay=" << (int)minimumDelay
  396. << " DelayDiff= " << (int)DelayDiff);
  397. }
  398. int VecConnection::SetRandomDelays()
  399. {
  400. Dout(dc::con, "SetRandomDelays"); fflush(stdout);
  401. int NSynapses = SynWeights.size();
  402. for (int SynNr=0;SynNr<NSynapses;++SynNr) {
  403. // SynDelays[SynNr] = minimumDelay + getrandom(DelayDiff+1);
  404. SynDelays[SynNr] = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
  405. }
  406. }
  407. int VecConnection::SetupPreSynNrArray()
  408. {
  409. Dout(dc::con, "VecConnection::SetupPreSynNrArray");
  410. Dout(dc::con, "PreSynNr.size()=" << PreSynNr.size()
  411. << " NSynapses=" << NSynapses << " size()=" << SynDelays.size());
  412. if (PreSynNr.size() != nt) {
  413. PreSynNr.resize(nt);
  414. }
  415. if (SynWeights.size() != NSynapses) {
  416. cerr << "ERROR in SetupPreSynNrArray(): there are deleted Synapses \n";
  417. cerr << " or did you forget 'NSynapses=SynCounter;' in your Connect routine?\n";
  418. exit(1);
  419. }
  420. // clear PreSynNrArray
  421. for (vector< vector<T_NSynapses> >::iterator it=PreSynNr.begin();it!=PreSynNr.end();++it) {
  422. (*it).clear();
  423. }
  424. for (T_NSynapses i=0;i<NSynapses;++i) {
  425. if (SynDelays[i] == -1) {
  426. cerr << "ERROR in SetupPreSynNrArray(): there are deleted Synapses \n";
  427. exit(1);
  428. } else {
  429. PreSynNr[SynTargetNr[i]].push_back(i);
  430. }
  431. }
  432. }
  433. int VecConnection::SetupDelaysArray()
  434. {
  435. Dout(dc::con, "SetupDelaysArray"); fflush(stdout);
  436. if (SynWeights.size() != NSynapses) {
  437. cerr << "ERROR in SetupDelaysArray(): there are deleted Synapses \n";
  438. exit(1);
  439. }
  440. delays.resize(ns);
  441. Dout(dc::con, "maximumDelay=" << maximumDelay << "");
  442. for (int i=0;i<ns;++i) delays[i].resize(maximumDelay);
  443. Dout(dc::con, "Clear delays Array" << "");
  444. for (vector<vector<vector<T_NSynapses> > >::iterator ita=delays.begin();ita!=delays.end();++ita) {
  445. for (vector<vector<T_NSynapses> >::iterator itb=(*ita).begin();itb!=(*ita).end();++itb) {
  446. (*itb).clear();
  447. }
  448. }
  449. #ifdef DEBUG
  450. int CurDelay=0;
  451. int CurSource=0;
  452. Dout(dc::con, "DEBUGGING mode ");
  453. for (int SynNr=0;SynNr<NSynapses;++SynNr) {
  454. CurDelay = SynDelays[SynNr];
  455. CurSource=SynSourceNr[SynNr];
  456. if (CurDelay < delays[CurSource].capacity()) {
  457. delays[CurSource][CurDelay].push_back(SynNr);
  458. } else {
  459. cerr << "ERROR in SetupDelays() \n";
  460. exit (1);
  461. }
  462. }
  463. #else //DEBUG
  464. Dout(dc::con, "Rebuild delays Array" << ""); fflush(stdout);
  465. for (int SynNr=0;SynNr<NSynapses;++SynNr) {
  466. // cout << "["
  467. // << CountSyn++ << ":"
  468. // << delays[SynSourceNr[SynNr]][SynDelays[SynNr]].size() << ","
  469. // << SynSourceNr[SynNr] << ","
  470. // << SynTargetNr[SynNr] << ","
  471. // << SynDelays[SynNr] << ",c"
  472. // << delays[SynSourceNr[SynNr]][SynDelays[SynNr]].capacity() << "] .\n "; fflush(stdout);
  473. if (SynDelays[SynNr] >= 0) {
  474. delays[SynSourceNr[SynNr]][SynDelays[SynNr]].push_back(SynNr);
  475. } else {
  476. cerr << "WARNING in SetupDelays(): there are deleted synapses, \n";
  477. }
  478. }
  479. #endif //DEBUG
  480. Dout(dc::con, "done rebuilding delays Array" << ""); fflush(stdout);
  481. }
  482. /**
  483. * delete a single synapse
  484. * @param SynNr is the number of the synapse (index in SynSourceNr,
  485. * SynTargetNr, SynDelays, SynWeights
  486. * @return
  487. */
  488. int VecConnection::DeleteSynapse(int SynNr)
  489. {
  490. // Dout(dc::con, "Deleting Synapse Nr:" << SynNr << " weight=" << SynWeights[SynNr] << " TargetNr=" << SynTargetNr[SynNr] << "");
  491. int DelSourceNr=SynSourceNr[SynNr];
  492. int DelTargetNr = SynTargetNr[SynNr];
  493. int DelDelay = SynDelays[SynNr];
  494. if (DelDelay <0) {
  495. cerr << "Deleting Synapse which is already deleted\n";
  496. exit (1);
  497. }
  498. // delete synapse in delays vector
  499. for (vector <T_NSynapses>::iterator it=delays[DelSourceNr][DelDelay].begin(); it != delays[DelSourceNr][DelDelay].end(); ++it)
  500. {
  501. if (*it == SynNr) {
  502. delays[DelSourceNr][DelDelay].erase(it);
  503. break;
  504. }
  505. }
  506. // delete synapse in PreSynNr vector
  507. for (vector<T_NSynapses>::iterator it=PreSynNr[DelTargetNr].begin(); it != PreSynNr[DelTargetNr].end(); ++it) {
  508. if (*it == SynNr) {
  509. PreSynNr[DelTargetNr].erase(it);
  510. break;
  511. }
  512. }
  513. // set synapse to -1
  514. SynSourceNr[SynNr] = -1;
  515. SynTargetNr[SynNr] = -1;
  516. SynDelays[SynNr] = -1;
  517. SynWeights[SynNr] = -1;
  518. --NSynapses;
  519. }
  520. /**
  521. * set all valid synapses to new WeightValue
  522. * @param WeightValue new weight value
  523. * @return
  524. */
  525. int VecConnection::SetAllWeights(float WeightValue)
  526. {
  527. int NWeights=SynWeights.size();
  528. for (int i=0; i<NWeights; ++i) {
  529. if (SynDelays[i] != -1) { // invalid (deleted) synapses have delay -1
  530. SynWeights[i]=WeightValue;
  531. }
  532. }
  533. }
  534. int VecConnection::DeleteLowWeights(float Threshold)
  535. {
  536. vector <long> DeleteList;
  537. for (int SourceNr=0; SourceNr<ns;++SourceNr) {
  538. int NDelays=delays[SourceNr].size();
  539. for (int Delay=0;Delay<NDelays; ++Delay) {
  540. for (vector <T_NSynapses>::iterator it=delays[SourceNr][Delay].begin(); it != delays[SourceNr][Delay].end(); ++it) {
  541. if (SynWeights[*it ] < Threshold) {
  542. DeleteList.push_back(*it);
  543. if (SynDelays[*it] <0) {
  544. cerr << "Hier stimmt was nicht\n";
  545. cerr << "SynNr=" << (*it) << "\n";
  546. exit (1);
  547. }
  548. }
  549. }
  550. }
  551. }
  552. Dout(dc::con, "DeleteSynapses");fflush(stdout);
  553. for (vector <long>::iterator it=DeleteList.begin(); it != DeleteList.end(); ++it) {
  554. DeleteSynapse(*it);
  555. }
  556. int NDeletedSynapses = DeleteList.size();
  557. Dout(dc::con, "Deleted " << NDeletedSynapses << "synapses");
  558. Dout(dc::con, "Done DeleteSynapses");fflush(stdout);
  559. if (SynWeights.size() > 20*NSynapses) {
  560. CleanupArrays();
  561. }
  562. Dout(dc::con, "DeleteThreshold = " << Threshold << "");
  563. return NDeletedSynapses;
  564. }
  565. int VecConnection::CleanupArrays()
  566. {
  567. Dout(dc::con, "Renewing delays array and PreSynNr array");
  568. // remove deleted synapses
  569. int c=0;
  570. while (c < SynDelays.size()) {
  571. if (SynDelays[c] == -1) {
  572. SynDelays.erase(SynDelays.begin()+c);
  573. SynSourceNr.erase(SynSourceNr.begin()+c);
  574. SynTargetNr.erase(SynTargetNr.begin()+c);
  575. SynWeights.erase(SynWeights.begin()+c);
  576. } else {
  577. ++c;
  578. }
  579. }
  580. SetupDelaysArray();
  581. SetupPreSynNrArray();
  582. }
  583. void VecConnection::SetName(const char* _name)
  584. {
  585. SimElement::SetName(_name);
  586. WeightFileName = Name + "weights.dat";
  587. Dout(dc::con, "VecConnection, WeightFileName = " << WeightFileName << "");
  588. }
  589. int VecConnection::Save()
  590. {
  591. Save(WeightFileName.c_str());
  592. }
  593. int VecConnection::Save(int nr)
  594. {
  595. Save((WeightFileName + stringify(nr)).c_str());
  596. }
  597. void VecConnection::Save_VecConnection_2_0(const string& SaveWeightFileName)
  598. {
  599. FileFormat VecFileFormat("VecConnection", 2, 0);
  600. ChunkFileWriter MyFileWriter(SaveWeightFileName, VecFileFormat);
  601. SourceTargetDim MyVecConHeader(ns,
  602. SourceNx,
  603. SourceNy,
  604. nt,
  605. TargetNx,
  606. TargetNy);
  607. MyFileWriter.write("VecHeader", MyVecConHeader);
  608. MyFileWriter.writeVector("SourceNr", SynSourceNr);
  609. MyFileWriter.writeVector("TargetNr", SynTargetNr);
  610. MyFileWriter.writeVector("Delays", SynDelays);
  611. MyFileWriter.writeVector("Weights", SynWeights);
  612. MyFileWriter.close();
  613. }
  614. void VecConnection::Load_VecConnection_2_0(const string & FileName)
  615. {
  616. Dout(dc::con, "VecConnection::Load_VecConnection_2_0, FileName=" << FileName);
  617. ChunkFileReader FileReader(FileName, FileFormat("VecConnection", 2, 0));
  618. Dout(dc::con, "allocate SourceTargetDim");
  619. SourceTargetDim VecConHeader;
  620. Dout(dc::con, "read VecHeader");
  621. FileReader.read("VecHeader", VecConHeader);
  622. ns = VecConHeader.NSource;
  623. nt = VecConHeader.NTarget;
  624. SourceNx = VecConHeader.NSx;
  625. SourceNy = VecConHeader.NSy;
  626. TargetNx = VecConHeader.NTx;
  627. TargetNy = VecConHeader.NTy;
  628. Dout(dc::con, "read vectors");
  629. FileReader.readAndCastVector("SourceNr", SynSourceNr);
  630. FileReader.readAndCastVector("TargetNr", SynTargetNr);
  631. FileReader.readAndCastVector("Delays", SynDelays);
  632. FileReader.readVector("Weights", SynWeights);
  633. FileReader.close();
  634. Dout(dc::con, "Load_VecConnection_2_0: Data loaded");
  635. //Dout(dc::con, "Delays=" << stringifyVector(OriginalDelays));
  636. vector<T_Delays>::iterator it = max_element(SynDelays.begin(),SynDelays.end());
  637. maximumDelay = (*it)+1;
  638. if (maximumDelay >= Dmax) {
  639. throw RequestedDelayTooLarge(maximumDelay);
  640. }
  641. updateNSynapses();
  642. SetupDelaysArray();
  643. SetupPreSynNrArray();
  644. }
  645. void VecConnection::Load_VecConnection_2_1(const string & FileName)
  646. {
  647. Dout(dc::con, "VecConnection::Load_VecConnection_2_1, FileName=" << FileName);
  648. ChunkFileReader FileReader(FileName, FileFormat("VecConnection", 2, 1));
  649. Dout(dc::con, "allocate SourceTargetDim");
  650. SourceTargetDim1 VecConHeader;
  651. Dout(dc::con, "read VecHeader");
  652. FileReader.read("VecHeader", VecConHeader);
  653. ns = VecConHeader.NSource;
  654. nt = VecConHeader.NTarget;
  655. SourceNx = VecConHeader.NSx;
  656. SourceNy = VecConHeader.NSy;
  657. TargetNx = VecConHeader.NTx;
  658. TargetNy = VecConHeader.NTy;
  659. ArrayOrderXFast = VecConHeader.ArrayOrderXFast == 1;
  660. Dout(dc::con, "XFast=" << ArrayOrderXFast);
  661. Dout(dc::con, "read vectors");
  662. FileReader.readAndCastVector("SourceNr", SynSourceNr);
  663. FileReader.readAndCastVector("TargetNr", SynTargetNr);
  664. FileReader.readAndCastVector("Delays", SynDelays);
  665. FileReader.readVector("Weights", SynWeights);
  666. FileReader.close();
  667. Dout(dc::con, "Load_VecConnection_2_1: Data loaded");
  668. //Dout(dc::con, "Delays=" << stringifyVector(OriginalDelays));
  669. vector<T_Delays>::iterator it = max_element(SynDelays.begin(),SynDelays.end());
  670. maximumDelay = (*it)+1;
  671. if (maximumDelay >= Dmax) {
  672. throw RequestedDelayTooLarge(maximumDelay);
  673. }
  674. updateNSynapses();
  675. SetupDelaysArray();
  676. SetupPreSynNrArray();
  677. }
  678. int VecConnection::Save(const string& SaveWeightFileName)
  679. {
  680. string DirAndFileName = DataDirectory+SaveWeightFileName;
  681. Save_VecConnection_2_0(DirAndFileName);
  682. }
  683. void VecConnection::Save_VecConnection_1_0(const string& DirAndFileName)
  684. {
  685. if (compiledWithMemsave()) {
  686. throw NotSupportedWithMemsave("Save_VecConnection_1_0 not available when compiled with -sMEMSAVE=1");
  687. }
  688. int i,j,k;
  689. Dout(dc::con, "Save VecCon.file: " << DirAndFileName);
  690. FILE *fw;
  691. // save weights
  692. fw = fopen(DirAndFileName.c_str(),"w");
  693. // save file header
  694. const char *fts = FileTypeString.c_str();
  695. char ftsLength = strlen(fts);
  696. fwrite(&ftsLength, sizeof(ftsLength), 1, fw);
  697. fwrite(fts, ftsLength, 1, fw);
  698. Dout(dc::con, "SourceNx=" << SourceNx << " SourceLayer->Nx=" << SourceLayer->Nx);
  699. // save Synapses
  700. fwrite(&ns, sizeof(ns), 1, fw);
  701. fwrite(&(SourceNx), sizeof(SourceNx), 1, fw);
  702. fwrite(&(SourceNy), sizeof(SourceNy), 1, fw);
  703. fwrite(&nt, sizeof(nt), 1, fw);
  704. fwrite(&(TargetNx), sizeof(TargetNx), 1, fw);
  705. fwrite(&(TargetNy), sizeof(TargetNy), 1, fw);
  706. fwrite(&maximumDelay, sizeof(maximumDelay), 1, fw);
  707. fwrite(&NSynapses, sizeof(NSynapses), 1, fw);
  708. int SynSize = SynWeights.size();
  709. for (int SynNr=0;SynNr<SynSize;++SynNr) {
  710. if (SynSourceNr[SynNr] != -1) {
  711. fwrite(&SynWeights[SynNr], sizeof(SynWeights[SynNr]), 1, fw);
  712. fwrite(&SynSourceNr[SynNr], sizeof(SynSourceNr[SynNr]), 1, fw);
  713. fwrite(&SynTargetNr[SynNr], sizeof(SynTargetNr[SynNr]), 1, fw);
  714. fwrite(&SynDelays[SynNr], sizeof(SynDelays[SynNr]), 1, fw);
  715. } // else {Dout(dc::con, "not saving deleted Syn");}
  716. }
  717. fclose(fw);
  718. Dout(dc::con, " saved weights ");
  719. }
  720. int VecConnection::DeleteSynapseArrays()
  721. {
  722. SynWeights.clear();
  723. SynSourceNr.clear();
  724. SynTargetNr.clear();
  725. SynDelays.clear();
  726. for (vector<vector<T_NSynapses> >::iterator it=PreSynNr.begin(); it!=PreSynNr.end(); ++it) {
  727. (*it).clear();
  728. }
  729. for (vector<vector<vector<T_NSynapses> > >::iterator ita=delays.begin(); ita!=delays.end(); ++ita) {
  730. for (vector< vector<T_NSynapses> >::iterator itb=(*ita).begin();itb!=(*ita).end();++itb) {
  731. (*itb).clear();
  732. }
  733. }
  734. NSynapses=0;maximumDelay=1;
  735. }
  736. int VecConnection::Load()
  737. {
  738. return Load(WeightFileName);
  739. }
  740. int VecConnection::Load(const string& FileName)
  741. {
  742. return Load(FileName, DataDirectory);
  743. }
  744. int VecConnection::ReserveSynapses(int _nsynapses)
  745. {
  746. SynWeights.reserve(_nsynapses);
  747. SynTargetNr.reserve(_nsynapses);
  748. SynSourceNr.reserve(_nsynapses);
  749. SynDelays.reserve(_nsynapses);
  750. }
  751. /** @brief load VecConnection from data file
  752. @todo design new weight file format, especially with respect
  753. to different data types for Source and Target neurons (T_Delays) and delays (T_Delays)
  754. hints for file format design:
  755. http://www.magicdb.org/filedesign.html
  756. http://decoy.iki.fi/texts/filefd/filefd
  757. */
  758. int VecConnection::Load(const string& FileName, const string& DirName)
  759. {
  760. std::string DirAndFileName = DirName+FileName;
  761. Dout(dc::con, "DirAndFileName=" << DirAndFileName << "");
  762. const char* DFileName = DirAndFileName.c_str();
  763. if (!fexist(DFileName)) {
  764. cerr << "\n\nERROR: connection file " << DFileName << " doesn't exist \n\n";
  765. fflush(stderr);
  766. return(2);
  767. }
  768. Dout(dc::con, "LoadWeightFile: " << DFileName << " ");
  769. FileFormat WeightFileFormat = readFileFormat(DirAndFileName);
  770. Dout(dc::con, "FileFormat = " << WeightFileFormat.print());
  771. //cout << "FileFormat = " << WeightFileFormat.print() << "\n";
  772. if (WeightFileFormat.isEqual("VecConnection_1.0")) {
  773. Dout(dc::con, "Load VecConnection_1.0");
  774. return Load_VecConnection_1_0(DirAndFileName);
  775. } else if (WeightFileFormat.isEqual(FileFormat("VecConnection",2,0))) {
  776. Dout(dc::con, "Load VecConnection_2.0");
  777. Load_VecConnection_2_0(DirAndFileName);
  778. return 0;
  779. } else if (WeightFileFormat.isEqual(FileFormat("VecConnection",2,1))) {
  780. Dout(dc::con, "Load VecConnection_2.1");
  781. Load_VecConnection_2_1(DirAndFileName);
  782. return 0;
  783. }
  784. return -1;
  785. }
  786. int VecConnection::Load_VecConnection_1_0(const string& FileNameWithDir)
  787. {
  788. if (compiledWithLowMemConfig()) {
  789. throw NotSupportedWithMemsave("Load_VecConnection_1_0 not available when compiled with -sMEMSAVE=1");
  790. }
  791. int i,j,k;
  792. FILE *fw;
  793. int dummy;
  794. fw = fopen(FileNameWithDir.c_str(), "r");
  795. // Load file header
  796. unsigned char ftsLength;
  797. fread(&ftsLength, sizeof(ftsLength), 1, fw);
  798. int FLength = static_cast<int>(ftsLength);
  799. Dout(dc::con, "StrLen of file type = " << FLength << "");
  800. char *LoadedFileType = new char [ftsLength+1];
  801. fread(LoadedFileType, ftsLength, 1, fw);
  802. LoadedFileType[ftsLength]=0;
  803. Dout(dc::con, "loaded file type:" << LoadedFileType << "");
  804. if (strcmp(LoadedFileType, FileTypeString.c_str())) {
  805. cerr << "VecConnection.load() tried to load wrong file type\n";
  806. cerr << "file type was: " << LoadedFileType <<"\n";
  807. cerr << "file type should have been: " << FileTypeString << "\n";
  808. return(-1);
  809. };
  810. delete[]LoadedFileType;
  811. DeleteSynapseArrays();
  812. fread(&ns, sizeof(ns), 1, fw);
  813. Dout(dc::con, "SourceN=" << ns << "");
  814. fread(&SourceNx, sizeof(SourceNx), 1, fw);
  815. fread(&SourceNy, sizeof(SourceNy), 1, fw);
  816. fread(&nt, sizeof(nt), 1, fw);
  817. Dout(dc::con, "TargetN=" << nt << "");
  818. fread(&TargetNx, sizeof(TargetNx), 1, fw);
  819. fread(&TargetNy, sizeof(TargetNy), 1, fw);
  820. fread(&maximumDelay, sizeof(maximumDelay), 1, fw);
  821. Dout(dc::con, "maximumDelay=" << maximumDelay << "");
  822. Dout(dc::con, "ConDimensions= (" << SourceNx << ", " << SourceNy << ", " << TargetNx << ", " << TargetNy << ")");
  823. if (CheckHeaderConsistency()==false) {
  824. return(-1);
  825. }
  826. delays.resize(ns);
  827. PreSynNr.resize(nt);
  828. int _nsynapses;
  829. fread(&_nsynapses, sizeof(_nsynapses), 1, fw);
  830. Dout(dc::con, "NSynapses=" << _nsynapses<< "");
  831. ReserveSynapses(_nsynapses);
  832. Dout(dc::con, "Synapses reserved ");
  833. float w;
  834. int tar,src,del;
  835. for (int i=0;i<_nsynapses;++i) {
  836. fread(&w,sizeof(w),1,fw);
  837. fread(&src,sizeof(src),1,fw);
  838. fread(&tar,sizeof(tar),1,fw);
  839. fread(&del,sizeof(del),1,fw);
  840. PushBackNewSynapse(src,tar,w,del);
  841. if (w>0) {
  842. // Dout(dc::con, "(x,src,tar,del)=(" <<w<<","<<src<<","<<tar<<","<<del<<")");
  843. }
  844. }
  845. if (_nsynapses != NSynapses) {
  846. cerr << "ERROR in VecConnection::Load\n";
  847. exit(1);
  848. }
  849. SetupDelaysArray();
  850. SetupPreSynNrArray();
  851. Dout(dc::con, "finished loading connection");
  852. return(0);
  853. }
  854. int VecConnection::MultiplyTargetWeights(int TargetNr, float Factor)
  855. {
  856. for (vector<T_NSynapses>::iterator it=PreSynNr[TargetNr].begin(); it!=PreSynNr[TargetNr].end(); ++it) {
  857. SynWeights[(*it)] *= Factor;
  858. }
  859. }
  860. int VecConnection::MultiplyAllTargetWeights(float Factor)
  861. {
  862. for (int t=0; t<nt;++t) MultiplyTargetWeights(t, Factor);
  863. }
  864. float VecConnection::GetWeightSum(int TargetNr, bool quadratic)
  865. {
  866. float CurWeightSum=0;
  867. for (vector<T_NSynapses>::iterator it=PreSynNr[TargetNr].begin(); it!=PreSynNr[TargetNr].end(); ++it) {
  868. CurWeightSum += SynWeights[(*it)];
  869. }
  870. return CurWeightSum;
  871. }
  872. /**
  873. * calculate mean of all weights
  874. *
  875. * @return MeanWeight; or -1 if there are no synapses
  876. */
  877. float VecConnection::GetMeanWeight()
  878. {
  879. int NumberOfSynapses = SynDelays.size();
  880. int SynCount=0;
  881. float WeightSum=0;
  882. for (int SynNr=0;SynNr<NumberOfSynapses;++SynNr) {
  883. if (SynDelays[SynNr] != -1) { // deleted synapses have dalay -1
  884. WeightSum+=SynWeights[SynNr];
  885. ++SynCount;
  886. }
  887. }
  888. assert(SynCount==NSynapses);
  889. float MeanWeight=-1;
  890. if (SynCount>0) {
  891. MeanWeight=WeightSum/SynCount;
  892. }
  893. return MeanWeight;
  894. }
  895. int VecConnection::WriteSimInfo(fstream &fw)
  896. {
  897. stringstream sstr;
  898. sstr << "<Source id=\"" << SourceLayer->IdNumber << "\"/> \n";
  899. sstr << "<Target id=\"" << TargetLayer->IdNumber << "\"/> \n";
  900. sstr << "<MaxWeight value=\"" << maxWeight << "\"/> \n";
  901. sstr << "<Strength value=\"" << Strength << "\"/> \n";
  902. sstr << "<InputNumber value=\"" << int(InputChannel) << "\"/> \n";
  903. if (learnobj) sstr << learnobj->GetSimInfo();
  904. SimElement::WriteSimInfo(fw, sstr.str());
  905. }
  906. int VecConnection::WriteSimInfo(fstream &fw, const string &ChildInfo)
  907. {
  908. stringstream sstr;
  909. sstr << "<Source id=\"" << SourceLayer->IdNumber << "\"/> \n";
  910. sstr << "<Target id=\"" << TargetLayer->IdNumber << "\"/> \n";
  911. sstr << ChildInfo;
  912. sstr << "<MaxWeight value=\"" << maxWeight << "\"/> \n";
  913. sstr << "<Strength value=\"" << Strength << "\"/> \n";
  914. sstr << "<InputNumber value=\"" << int(InputChannel) << "\"/> \n";
  915. if (learnobj) sstr << learnobj->GetSimInfo();
  916. SimElement::WriteSimInfo(fw, sstr.str());
  917. }
  918. int VecConnection::Rewire(float minWeight, float maxConnectivity)
  919. {
  920. // first: delete low weights
  921. Dout(dc::con, "DeleteLowWeights"); fflush(stdout);
  922. int NFreeWeights = DeleteLowWeights(minWeight);
  923. // second: set new weights randomly
  924. Dout(dc::con, "SetNewWeights"); fflush(stdout);
  925. SetNewWeights(maxConnectivity, InitialWeight);
  926. }
  927. int VecConnection::SetLearn(bool l)
  928. {
  929. if (learnobj != 0) learn = l;
  930. }
  931. int VecConnection::SetLearnObj(veclearning* lo)
  932. {
  933. learnobj = lo;
  934. SetLearn(true);
  935. }
  936. float VecConnection::GetSourceWeights(int SourceNr, vector<float>& WeightMatrix)
  937. {
  938. WeightMatrix.clear();
  939. WeightMatrix.resize(nt);
  940. float MaxWeight=0;
  941. for(int CurDelay=0;CurDelay<maximumDelay;++CurDelay)
  942. {
  943. Dout(dc::con, "(Delay=" << CurDelay << ") ");
  944. for (vector <T_NSynapses>::iterator it=delays[SourceNr][CurDelay].begin();
  945. it != delays[SourceNr][CurDelay].end();++it)
  946. {
  947. int CurTarget=SynTargetNr[(*it)];
  948. Dout(dc::con, "(Target=" << CurTarget << ") ");
  949. float CurWeight=SynWeights[(*it)];
  950. WeightMatrix[CurTarget]=CurWeight;
  951. if (CurWeight>MaxWeight) {
  952. MaxWeight=CurWeight;
  953. }
  954. }
  955. }
  956. return MaxWeight;
  957. }
  958. /** returns list of synapses form source neuron
  959. * @param [IN] SourceNr
  960. * @param [OUT] SynList is a return vector containing a list of all synapses from source neuron SourceNr
  961. * @return
  962. */
  963. float VecConnection::GetSourceWeights(int SourceNr, vector<Synapse>& SynList, int &MaxDelay)
  964. {
  965. SynList.clear();
  966. float MaxWeight=0;
  967. MaxDelay=0;
  968. for(int CurDelay=0;CurDelay<maximumDelay;++CurDelay)
  969. {
  970. for (vector <T_NSynapses>::iterator it=delays[SourceNr][CurDelay].begin();
  971. it != delays[SourceNr][CurDelay].end();++it)
  972. {
  973. int CurTarget=SynTargetNr[(*it)];
  974. int CurSource=SynSourceNr[(*it)];
  975. if (SourceNr!=CurSource) {
  976. cerr << "*******ERROR*****************************************\n";
  977. cerr << "float GetSourceWeights(int SourceNr, vector<Synapse>& SynList)\n";
  978. cerr << "korrupt\n";
  979. }
  980. float CurWeight=SynWeights[(*it)];
  981. SynList.push_back(Synapse(CurSource, CurTarget, CurWeight, CurDelay));
  982. if (CurWeight>MaxWeight) {
  983. MaxWeight=CurWeight;
  984. }
  985. if (CurDelay>MaxDelay) {
  986. MaxDelay=CurDelay;
  987. }
  988. }
  989. }
  990. return MaxWeight;
  991. }
  992. float VecConnection::GetTargetWeights(int TargetNr, vector<float>& WeightMatrix)
  993. {
  994. WeightMatrix.clear();
  995. WeightMatrix.resize(ns);
  996. float MaxWeight=0;
  997. for (vector<T_NSynapses>::iterator it=PreSynNr[TargetNr].begin();
  998. it!=PreSynNr[TargetNr].end();++it)
  999. {
  1000. int CurSynNr=(*it);
  1001. float CurWeight=SynWeights[CurSynNr];
  1002. int SourceNr=SynSourceNr[CurSynNr];
  1003. Dout(dc::con, "SynNr=" << CurSynNr << " SourceNr=" << SourceNr << " CurWeight=" << CurWeight << "");
  1004. WeightMatrix[SourceNr]=CurWeight;
  1005. if (CurWeight>MaxWeight) {
  1006. MaxWeight=CurWeight;
  1007. }
  1008. }
  1009. return MaxWeight;
  1010. }
  1011. float VecConnection::GetTargetWeights(int TargetNr, vector<Synapse>& SynList, int &MaxDelay)
  1012. {
  1013. SynList.clear();
  1014. float MaxWeight=0;
  1015. MaxDelay=0;
  1016. for (vector<T_NSynapses>::iterator it=PreSynNr[TargetNr].begin();
  1017. it!=PreSynNr[TargetNr].end();++it)
  1018. {
  1019. int CurSynNr=(*it);
  1020. float CurWeight=SynWeights[CurSynNr];
  1021. int CurSource=SynSourceNr[CurSynNr];
  1022. int CurTarget = SynTargetNr[CurSynNr];
  1023. int CurDelay = SynDelays[CurSynNr];
  1024. if (CurTarget!=TargetNr) {
  1025. cerr << "*******ERROR*****************************************\n";
  1026. cerr << "float GetTargetWeights(int TargetNr, vector<Synapse>& SynList)\n";
  1027. cerr << "korrupt\n";
  1028. }
  1029. SynList.push_back(Synapse(CurSource, CurTarget, CurWeight, CurDelay));
  1030. if (CurWeight>MaxWeight) {
  1031. MaxWeight=CurWeight;
  1032. }
  1033. if (CurDelay>MaxDelay) {
  1034. MaxDelay=CurDelay;
  1035. }
  1036. }
  1037. return MaxWeight;
  1038. }
  1039. void VecConnection::GetWeightMatrix4D(Matrix4D& matrix)
  1040. {
  1041. matrix.Resize(SourceNx, SourceNy, TargetNx, TargetNy);
  1042. matrix.SetValue(-1);
  1043. for (vector<vector<vector<T_NSynapses> > >::iterator DelSource=delays.begin();
  1044. DelSource!=delays.end();++DelSource) {
  1045. for (vector<vector<T_NSynapses> >::iterator Delay=(*DelSource).begin();
  1046. Delay!=(*DelSource).end();++Delay) {
  1047. for (vector<T_NSynapses>::iterator Index=(*Delay).begin();
  1048. Index!=(*Delay).end(); ++Index) {
  1049. int SourceNr=SynSourceNr[(*Index)];
  1050. int TargetNr=SynTargetNr[(*Index)];
  1051. int SourceY = SourceNr/SourceNx;
  1052. int SourceX = SourceNr-(SourceY*SourceNx);
  1053. int TargetY = TargetNr/TargetNx;
  1054. int TargetX = TargetNr-(TargetY*TargetNx);
  1055. matrix.SetValue(SynWeights[(*Index)], SourceX, SourceY, TargetX, TargetY);
  1056. }
  1057. }
  1058. }
  1059. }
  1060. float VecConnection::GetMaxWeight()
  1061. {
  1062. float MaxWeight=0;
  1063. for (int SourceNr=0;SourceNr<ns;++SourceNr) {
  1064. for(int CurDelay=0;CurDelay<maximumDelay;++CurDelay)
  1065. {
  1066. for (vector <T_NSynapses>::iterator it=delays[SourceNr][CurDelay].begin();
  1067. it != delays[SourceNr][CurDelay].end();++it)
  1068. {
  1069. int CurTarget=SynTargetNr[(*it)];
  1070. int CurSource=SynSourceNr[(*it)];
  1071. if (SourceNr!=CurSource) {
  1072. cerr << "*******ERROR*****************************************\n";
  1073. cerr << "float GetSourceWeights(int SourceNr, vector<Synapse>& SynList)\n";
  1074. cerr << "korrupt\n";
  1075. }
  1076. float CurWeight=SynWeights[(*it)];
  1077. if (CurWeight>MaxWeight) {
  1078. MaxWeight=CurWeight;
  1079. }
  1080. }
  1081. }
  1082. }
  1083. return MaxWeight;
  1084. }
  1085. float VecConnection::GetMinWeight()
  1086. {
  1087. bool MinNotInitialized=true;
  1088. float MinWeight;
  1089. for (int SourceNr=0;SourceNr<ns;++SourceNr) {
  1090. for(int CurDelay=0;CurDelay<maximumDelay;++CurDelay)
  1091. {
  1092. for (vector <T_NSynapses>::iterator it=delays[SourceNr][CurDelay].begin();
  1093. it != delays[SourceNr][CurDelay].end();++it)
  1094. {
  1095. int CurTarget=SynTargetNr[(*it)];
  1096. int CurSource=SynSourceNr[(*it)];
  1097. if (SourceNr!=CurSource) {
  1098. cerr << "*******ERROR*****************************************\n";
  1099. cerr << "float GetSourceWeights(int SourceNr, vector<Synapse>& SynList)\n";
  1100. cerr << "korrupt\n";
  1101. }
  1102. float CurWeight=SynWeights[(*it)];
  1103. if (MinNotInitialized) MinWeight = CurWeight;
  1104. if (CurWeight<MinWeight) MinWeight = CurWeight;
  1105. }
  1106. }
  1107. }
  1108. return MinWeight;
  1109. }
  1110. int VecConnection::GetMaxDelay()
  1111. {
  1112. int MaxDelay=0;
  1113. for (int SourceNr=0;SourceNr<ns;++SourceNr) {
  1114. for(int CurDelay=MaxDelay;CurDelay<maximumDelay;++CurDelay) {
  1115. if (delays[SourceNr][CurDelay].size()>0) {
  1116. if (CurDelay>MaxDelay) {
  1117. MaxDelay=CurDelay;
  1118. }
  1119. }
  1120. }
  1121. }
  1122. return MaxDelay;
  1123. }
  1124. long VecConnection::calcMemoryConsumption()
  1125. {
  1126. long long MemSum=0;
  1127. MemSum += SynWeights.size()*sizeof(float);
  1128. MemSum += SynTargetNr.size()*sizeof(T_NNeurons);
  1129. MemSum += SynSourceNr.size()*sizeof(T_NNeurons);
  1130. MemSum += SynDelays.size()*sizeof(T_Delays);
  1131. int PreSynSum = 0;
  1132. PreSynSum += PreSynNr.size()*sizeof(vector<T_NSynapses>);
  1133. for (int i=0;i<PreSynNr.size();++i) {
  1134. PreSynSum += PreSynNr[i].size()*sizeof(T_NSynapses);
  1135. }
  1136. MemSum += PreSynSum;
  1137. cout << "PreSynSum=" << PreSynSum << "\n";
  1138. cout << "NSynapses = " << NSynapses << " MAXINT=" << INT_MAX << "\n";
  1139. int DelaySum=0;
  1140. DelaySum += delays.size()*sizeof(vector<vector<T_NSynapses> >);
  1141. for (int i=0;i<delays.size();++i) {
  1142. DelaySum += delays[i].size()*sizeof(vector<T_NSynapses>);
  1143. for (int j=0;j<delays[i].size();++j) {
  1144. DelaySum += delays[i][j].size()*sizeof(T_NSynapses);
  1145. }
  1146. }
  1147. MemSum += DelaySum;
  1148. cout << "DelaySum=" << DelaySum << "\n";
  1149. return MemSum;
  1150. }
  1151. ////////////////////////////
  1152. Connection* LoadConnection(const char* FileName)
  1153. {
  1154. // try loading VecConnection
  1155. VecConnection* vcon = new VecConnection();
  1156. int err=vcon->Load(FileName, "");
  1157. if (err==0) {
  1158. return vcon;
  1159. } else {
  1160. delete vcon;
  1161. vcon=0;
  1162. Dout(dc::con, "Could not load VecConnection file " << FileName);
  1163. Dout(dc::con, "try loading as connection");
  1164. connection* con = new connection();
  1165. err = con->Load(FileName, "");
  1166. if (err==0) {
  1167. return con;
  1168. } else {
  1169. delete con;
  1170. cerr << "ERROR: failed loading connection file " << FileName << "\n";
  1171. }
  1172. }
  1173. return 0;
  1174. }
  1175. ////////////////////////////
  1176. VecDepressiveConnection::VecDepressiveConnection(
  1177. layer* SL, layer* TL,
  1178. csimInputChannel InputNumber, float _TauRec, float _U_se, float InitializeFrequency)
  1179. : VecConnection(SL, TL, InputNumber),
  1180. U_SE(_U_se),
  1181. TauRec(_TauRec/dt), U_se_fac(1-_U_se)
  1182. {
  1183. E0=InitializeEfficacy(InitializeFrequency);
  1184. Dout(dc::con, " VecDepressiveConnection::VecDepressiveConnection ");
  1185. Dout(dc::con, " Initialize Efficacy values to E0=" << E0);
  1186. }
  1187. int VecDepressiveConnection::proceede(int TotalTime)
  1188. {
  1189. int t = TotalTime % MacroTimeStep;
  1190. int i,j,k,mi,ipre;
  1191. // calculate input for target layer
  1192. k=SourceLayer->N_firings;
  1193. // Nur Spikes, die nicht laenger als maximumDelay in der
  1194. // Vergangeheit liegen werden beruecksichtigt. firins-array
  1195. // besteht aus firings[SpikeNr][0]:zeitpunkte,
  1196. // firnigs[SpikeNr][1]:Neuronennummer
  1197. while (t-SourceLayer->firings[--k][0] < maximumDelay)
  1198. {
  1199. int CurDelay = t-SourceLayer->firings[k][0];
  1200. ipre = SourceLayer->firings[k][1];
  1201. for (vector <T_NSynapses>::iterator it=delays[ipre][CurDelay].begin();
  1202. it != delays[ipre][CurDelay].end();
  1203. ++it)
  1204. {
  1205. // exponential recovery from last epsp
  1206. if (TauRec >0) {
  1207. Efficacy[(*it)] = (1-(1-Efficacy[(*it)])*exp(-(t-LastEpsp[(*it)])/TauRec));
  1208. } else {
  1209. Efficacy[(*it)]=1;
  1210. }
  1211. InputPointer[SynTargetNr[(*it)]] += SynWeights[(*it)]*Efficacy[(*it)];
  1212. Efficacy[(*it)] *= U_se_fac;
  1213. LastEpsp[(*it)] = t;
  1214. }
  1215. }
  1216. if (learn == true) {
  1217. learnobj->proceede(TotalTime);
  1218. }
  1219. }
  1220. int VecDepressiveConnection::PushBackNewSynapse(int source, int target, float weight, int delay)
  1221. {
  1222. SynSourceNr.push_back(source);
  1223. SynTargetNr.push_back(target);
  1224. SynWeights.push_back(weight);
  1225. SynDelays.push_back(delay);
  1226. Efficacy.push_back(E0);
  1227. LastEpsp.push_back(0);
  1228. PreSynNr[target].push_back(NSynapses);
  1229. ++NSynapses;
  1230. }
  1231. int VecDepressiveConnection::ReserveSynapses(int _nsynapses)
  1232. {
  1233. SynWeights.reserve(_nsynapses);
  1234. SynTargetNr.reserve(_nsynapses);
  1235. SynSourceNr.reserve(_nsynapses);
  1236. SynDelays.reserve(_nsynapses);
  1237. Efficacy.reserve(_nsynapses);
  1238. LastEpsp.reserve(_nsynapses);
  1239. }
  1240. int VecDepressiveConnection::reset(int t)
  1241. {
  1242. int nsyn=Efficacy.size();
  1243. if (nsyn!=LastEpsp.size()) {
  1244. Dout(dc::con, "ERROR in VecDepressiveConnection::reset"); exit(1);
  1245. }
  1246. for (int i=0;i<nsyn;++i) {
  1247. Efficacy[i]=E0;
  1248. LastEpsp[i]=1;
  1249. }
  1250. }
  1251. void VecDepressiveConnection::SetParameter(ParaType p, double value)
  1252. {
  1253. switch (p)
  1254. {
  1255. case PARA_U_SE:
  1256. U_SE=value;
  1257. U_se_fac =1-U_SE;
  1258. break;
  1259. case PARA_TAU_REC:
  1260. TauRec = value/dt;
  1261. break;
  1262. }
  1263. }
  1264. int VecDepressiveConnection::prepare(int step)
  1265. {
  1266. VecConnection::prepare(step);
  1267. for (vector <int>::iterator it=LastEpsp.begin(); it!=LastEpsp.end();++it) {
  1268. (*it) -=MacroTimeStep;
  1269. }
  1270. }
  1271. int VecDepressiveConnection::WriteSimInfo(fstream &fw)
  1272. {
  1273. stringstream sstr;
  1274. sstr << "<TauRec value=\"" << TauRec*dt << "\"/> \n";
  1275. sstr << "<U_SE value=\"" << U_SE << "\"/> \n";
  1276. VecConnection::WriteSimInfo(fw, sstr.str());
  1277. }
  1278. int VecDepressiveConnection::DeleteSynapseArrays()
  1279. {
  1280. LastEpsp.clear();
  1281. Efficacy.clear();
  1282. VecConnection::DeleteSynapseArrays();
  1283. }
  1284. /** initialize Efficacy according to assumed spike frequency
  1285. \f[
  1286. E_0=\frac{1-e^{\frac{-1}{f\tau}}-U_{SE}}{1-e^{\frac{-1}{f\tau}}}
  1287. \f]
  1288. @param f: frequency
  1289. */
  1290. float VecDepressiveConnection::InitializeEfficacy(float f)
  1291. {
  1292. float E0=1.0;
  1293. float EinsMinExp=0.0;
  1294. float EFact = exp(-1/(f*0.001*TauRec));
  1295. if (f>0.) {
  1296. E0=(U_se_fac-U_se_fac*EFact)/(1-U_se_fac*EFact);
  1297. Dout(dc::con, "E0=" << E0 << "");
  1298. }
  1299. if (E0>0) {
  1300. return E0;
  1301. } else {
  1302. return 0.0;
  1303. }
  1304. }
  1305. long VecDepressiveConnection::calcMemoryConsumption()
  1306. {
  1307. long long MemSum=VecConnection::calcMemoryConsumption();
  1308. MemSum += Efficacy.size()*sizeof(float);
  1309. MemSum += LastEpsp.size()*sizeof(int);
  1310. return MemSum;
  1311. }
  1312. bool VecConnection::hasEqualSynapses(const VecConnection & OtherVecCon)
  1313. {
  1314. bool WeightsAreEqual = vectorsAreEqual(SynWeights, OtherVecCon.SynWeights);
  1315. bool TargetNrAreEqual = vectorsAreEqual(SynTargetNr, OtherVecCon.SynTargetNr);
  1316. bool SourceNrAreEqual = vectorsAreEqual(SynSourceNr, OtherVecCon.SynSourceNr);
  1317. bool DelaysAreEqual = vectorsAreEqual(SynDelays, OtherVecCon.SynDelays);
  1318. Dout(dc::con, "VecConnection::hasEqualSynapses: NSyn=" << NSynapses
  1319. << " OtherNSyn=" << OtherVecCon.NSynapses
  1320. << " W=" << WeightsAreEqual
  1321. << " T=" << TargetNrAreEqual
  1322. << " S=" << SourceNrAreEqual
  1323. << " D=" << DelaysAreEqual);
  1324. //Dout(dc::con, stringifyVector(SynDelays));
  1325. //Dout(dc::con, stringifyVector(OtherVecCon.SynDelays));
  1326. return (NSynapses == OtherVecCon.NSynapses)
  1327. && WeightsAreEqual && TargetNrAreEqual && SourceNrAreEqual && DelaysAreEqual ;
  1328. }
  1329. void VecConnection::updateNSynapses()
  1330. {
  1331. NSynapses=0;
  1332. for (vector<T_Delays>::iterator it=SynDelays.begin(); it!=SynDelays.end(); ++it) {
  1333. if ((*it) != DELETED_SYNAPSE) {
  1334. ++NSynapses;
  1335. }
  1336. }
  1337. }