1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058 |
- #include "sys.hpp" // for libcwd
- #include "debug.hpp" // for libcwd
- #include "connection.hpp"
- #include "learn.hpp"
- #include "layer.hpp"
- #include "simloop.hpp"
- #include <boost/multi_array.hpp>
- ////////////////////////////////////
- Connection::Connection(): SourceLayer(0), TargetLayer(0), InputPointer(0), ArrayOrderXFast(true)
- {
- Dmax=DMAX;
- maximumDelay = Dmax;
- }
- Connection::Connection(layer* SL, layer* TL, csimInputChannel _InputChannel, bool _nonself)
- : SimElement(seConnection), SourceLayer(SL),
- TargetLayer(TL), connectivity(0.1), Strength(0), cpu_time_used(0),
- AutoSave(true), NonSelf(_nonself), RewireOn(false),
- learn(false), InputChannel(_InputChannel), ArrayOrderXFast(true)
- {
- Name="Connection";
- WeightFileName ="weights.dat";
- dt = TargetLayer->GetDt();
- MacroTimeStep = TargetLayer->GetMacroTimeStep();
- InputPointer = TargetLayer->GetInputPointer(InputChannel);
- Dmax=DMAX;
- maximumDelay = Dmax;
- ns = SourceLayer->N;
- nt = TargetLayer->N;
- SourceNx=SourceLayer->Nx;
- SourceNy=SourceLayer->Ny;
- TargetNx=TargetLayer->Nx;
- TargetNy=TargetLayer->Ny;
-
- Dout(dc::con, "Connection::Connection " << SourceLayer->Name
- << "->"<<TargetLayer->Name);
- Dout(dc::con, "Ntarget= " << TargetLayer->N << " Nsource= "
- << ns << "TimeStep=" << dt << "ms InputNumber=" << int(InputChannel));
- }
- Connection::~Connection()
- {
- Dout(dc::con, "Connection Destructor");
- }
- bool Connection::Learning()
- {
- return learn;
- }
- layer* Connection::GetTargetLayer()
- {
- return TargetLayer;
- }
- int Connection::SetAutoSave(bool a)
- {
- AutoSave = a;
- }
- void Connection::SetNonSelf(bool value)
- {
- NonSelf=value;
- }
- int Connection::SetRewiring(float threshold, float _con, int _nnewcon, float _InitialWeight)
- {
- RewireOn=true;
- RewireMaxConnectivity=_con;
- RewireNNewConnections=_nnewcon;
- RewireThreshold=threshold;
- if (_InitialWeight == -1) {
- InitialWeight = 2*RewireThreshold;
- } else {
- InitialWeight=_InitialWeight;
- }
- Dout(dc::con, "Set up Rewiring, thres=" << RewireThreshold << " connectivity=" << RewireMaxConnectivity << "");
- }
- void Connection::SetRewiringOff()
- {
- RewireOn=false;
- }
- /** check consistency of file header data. Used on weight file loading
- *
- *
- * @return true if consistent
- */
- bool Connection::CheckHeaderConsistency()
- {
- bool consistent=true;
- if (float(SourceNx)*float(SourceNy)>ns) {
- cerr << "ERROR: float(SourceNx)*float(SourceNy)>ns \n";
- consistent=false;
- }
- if (float(TargetNx)*float(TargetNy)>nt) {
- cerr << "ERROR: float(TargetNx)*float(TargetNy)>nt \n";
- consistent=false;
- }
- if (maximumDelay>Dmax) {
- cerr << "ERROR: maximumDelay>Dmax \n";
- consistent=false;
- }
-
- if (SourceLayer) { // Konsistenz-Prüfung, falls connection mit SourceLayer verbunden
- Dout(dc::con, "Check consistency with SourceLayer");
- if (ns != SourceLayer->N) {
- cerr << "ERROR: SourceN doesn't fit\n"; fflush(stderr);
- consistent=false;
- }
- }
- if (TargetLayer) { // Konsistenz-Prüfung, falls connection mit TargetLayer verbunden
- Dout(dc::con, "Check consistency with TargetLayer");
- if (nt != TargetLayer->N) {
- cerr << "ERROR: TargetN doesn't fit: nt=" << nt << "\n"; fflush(stderr);
- consistent=false;
- }
- if (maximumDelay > TargetLayer->Dmax) {
- cerr << "ERROR: maximumDelay > TargetDmax \n";fflush(stderr);
- consistent=false;
- }
- }
- return consistent;
- }
- ///////////////////////////////////////
- connection::connection(): learnobj(0)
- {
- Name="connection";
- // initialize all pointers with 0
- post=0;
- s=sd=0;
- delays_length=0;
- delays=0;
- N_pre=0;
- I_pre=D_pre=0;
- s_pre=sd_pre=0;
- N_post=0;
- }
- connection::connection(layer* SL, layer* TL, csimInputChannel _InputChannel, bool _nonself)
- : Connection(SL, TL, _InputChannel, _nonself), learnobj(0)
- {
- Name="connection";
- // initialize all pointers with 0
- post=0;
- s=sd=0;
- delays_length=0;
- delays=0;
- N_pre=0;
- I_pre=D_pre=0;
- s_pre=sd_pre=0;
- N_post=0;
- }
- connection::~connection()
- {
- Dout(dc::con, "connection Destructor");
- if (learnobj != 0) delete learnobj;
- DeleteDynamicalArrays();
- DeletePresynapticInfo();
- }
- int connection::SetLearn(bool l)
- {
- if (learnobj != 0) learn = l;
- }
- int connection::SetLearnObj(learning* lo)
- {
- learnobj = lo;
- SetLearn(true);
- }
- int connection::ObserveSynapse(int SourceNumber, int MNumber, char* RecName)
- {
- StartRecorder(RecName);
- if (SourceNumber < ns) Observe_s = SourceNumber; else Observe_s=0;
- if (MNumber < M) Observe_m = MNumber; else Observe_m=0;
- }
- int connection::ObserveSynapsePrePost(int SourceNumber, int TargetNumber, char* RecName)
- {
- int TargetM=0;
- while((TargetM<M) && post[SourceNumber][TargetM] != TargetNumber) ++TargetM;
- if (TargetM<M) {
- ObserveSynapse(SourceNumber, TargetM, RecName);
- cout << "Observe Synapse: SourceNumber="
- << SourceNumber
- << " TargetNumber=" << TargetNumber
- << " TargetM=" << TargetM << "\n";
- }
- }
- int connection::StartBinRec(int PostsynNumber)
- {
- int NumPreSyn = N_pre[PostsynNumber];
- if (NumPreSyn > 0) {
- Dout(dc::con, "\n\nSSSSSSSSSSSSSSSSSSSSNumPreSyn=" << NumPreSyn << "\nSSSSSSSSSSSSS");
- float** Buffer = new float* [NumPreSyn];
- for (int i=0;i<NumPreSyn; ++i) {
- Buffer[i] = s_pre[PostsynNumber][i];
- // Dout(dc::con, "i=" << i << " " << *Buffer[i]);
- }
- string FileName("synweight.dat.bin");
- BinRec = new BinRecorder(MacroTimeStep, NumPreSyn, NumPreSyn, Buffer, dt, (DataDirectory+FileName).c_str());
- }
- }
- int connection::WriteSimInfo(fstream &fw)
- {
- stringstream sstr;
- sstr << "<Source id=\"" << SourceLayer->IdNumber << "\"/> \n";
- sstr << "<Target id=\"" << TargetLayer->IdNumber << "\"/> \n";
- sstr << "<MaxWeight value=\"" << maxWeight << "\"/> \n";
- sstr << "<Strength value=\"" << Strength << "\"/> \n";
- sstr << "<InputNumber value=\"" << int(InputChannel) << "\"/> \n";
- if (learnobj) sstr << learnobj->GetSimInfo();
- SimElement::WriteSimInfo(fw, sstr.str());
- }
- int connection::WriteSimInfo(fstream &fw, const string &ChildInfo)
- {
- stringstream sstr;
- sstr << "<Source id=\"" << SourceLayer->IdNumber << "\"/> \n";
- sstr << "<Target id=\"" << TargetLayer->IdNumber << "\"/> \n";
- sstr << ChildInfo;
- sstr << "<MaxWeight value=\"" << maxWeight << "\"/> \n";
- sstr << "<Strength value=\"" << Strength << "\"/> \n";
- sstr << "<InputNumber value=\"" << int(InputChannel) << "\"/> \n";
- if (learnobj) sstr << learnobj->GetSimInfo();
- SimElement::WriteSimInfo(fw, sstr.str());
- }
- ConnectionInfo connection::GetConnectionInfo()
- {
- ConnectionInfo info;
- info.Dmax = Dmax;
- info.maximumDelay = maximumDelay;
- info.MaxWeight = maxWeight;
- info.TargetLayer = TargetLayer;
- info.SourceLayer = SourceLayer;
- info.M = M;
- info.WeightPointer = s;
- info.WeightDerivativePointer = sd;
- info.delays_length = delays_length;
- info.delays = delays;
- info.post = post;
- info.maxN_pre = maxN_pre;
- info.I_pre = I_pre;
- info.N_pre = N_pre;
- info.s_pre = s_pre;
- info.sd_pre = sd_pre;
- info.D_pre = D_pre;
- info.m_pre = m_pre;
- return info;
- }
- int connection::InitializeDynamicalArrays(const int _N, const int _M, const int _Dmax)
- {
- // initialize dynamical arrays
- cout<<"initialize dynamical begins";
- MainSimLoop->SetMaximumDelay(_Dmax);
- // indeces of postsynaptic neurons
- NewArray2d(post, _N, _M);
- for (int i=0;i<_N;++i) for (int j=0;j<_M;++j) post[i][j]=-1; // initialize with -1 (not connected)
- // matrix of synaptic weights
- NewArray2d(s, _N,_M);
- // NewArray2d(sd,_N,_M); weight derivatives, not used yet
- NewArray2d(delays_length, _N, _Dmax); // distribution of delays
- // initialize with zero
- for (int i=0;i<_N;++i) for (int j=0;j<_Dmax;++j) delays_length[i][j] = 0;
- NewArray3d(delays, _N, _Dmax, _M); // arrangement of delays
- Dout(dc::con,"initialize dynamical ends");
- }
- int connection::DeleteDynamicalArrays()
- {
- if(post!=0) DeleteArray2d(post,ns);
- if(s!=0) DeleteArray2d(s,ns);
- if(sd!=0) DeleteArray2d(sd,ns);
- if(delays_length!=0) DeleteArray2d(delays_length,ns);
- if(delays!=0) DeleteArray3d(delays,ns,maximumDelay);
- }
- int connection::ConnectRandom(float c, float InitialWeights, float maxDelay, float minDelay, bool RandomDelays)
- {
- Strength=InitialWeights;
- maxWeight=1;
- connectivity=c;
- nt = TargetLayer->N;
- // float TAULEARN_pre=50;
- // dec_pre = exp(-1./TAULEARN_pre);
- // float TAULEARN_post=5;
- // dec_pre = exp(-1./TAULEARN_post);
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- M= (int) floor(c*nt);
- maxN_pre = 3*ns*M/nt;
- if (maxN_pre >= ns) maxN_pre = ns; // because there are no double synapses
- Dout(dc::con, "M= " << M << " c= " << c << " ns= " << ns << "maxN_pre=" << maxN_pre);
-
- if (M>nt) Dout(dc::con, "ERROR: too many connections, reduce connectivity!!");
- SetMinMaxDelay(maxDelay, minDelay);
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- int i,j,k,exists, r;
- Dout(dc::con, "connect");
- for (i=0;i<ns;i++) for (j=0;j<M;j++)
- {
- do {
- exists = 0; // avoid multiple synapses
- r = getrandom(nt);
- if (r==i) exists=1; // no self-synapses
- for (k=0;k<j;k++) if (post[i][k]==r) exists = 1; // synapse already exists
- } while (exists == 1);
- post[i][j]=r;
- }
- Dout(dc::con, "connected");
- for (i=0;i<ns;i++) for (j=0;j<M;j++) s[i][j]=InitialWeights; // initial exc. synaptic weights
-
- if (RandomDelays) {
- SetupRandomDelays();
- } else {
- SetupDelays();
- }
- SetupPresynapticInfo();
- // for (i=0;i<ns;i++) for (j=0;j<1001+Dmax;j++) LTP[i][j]=0.0;
- // for (i=0;i<nt;i++) LTD[i]=0.0;
-
- // learnobj = new learning(this);
- Dout(dc::con, "Connection initialized");
- }
- // ueberladener Funktionsname ConnectRandom2 mit unterschiedlichen Parameter-Typen
- // float _Connectivity vs. int NIncommingConnections
- int connection::ConnectRandom2(float _Connectivity, float InitialWeights, float maxDelay, float minDelay, bool RandomDelays)
- {
- if (_Connectivity <= 1) {
- ns = SourceLayer->N;
- int NConnections = int(round(float(ns)*_Connectivity));
- ConnectRandom2(NConnections, InitialWeights, maxDelay, minDelay, RandomDelays);
- } else {
- cerr << "ERROR in connection::ConnectRandom2: _Connectivity should be [0..1]\n";
- }
- }
- int connection::ConnectRandom2(int NIncommingConnections, float InitialWeights, float maxDelay, float minDelay, bool RandomDelays)
- {
- // random connections, but connectivity from postsynaptic perspective
- // every postsynaptic neuron has the same number of incomming connections
- // source neurons have nearly the same number of outgoing connections (plusminus 1)
- Dout(dc::con, " connection::ConnectRandom2");
- Strength=InitialWeights;
- maxWeight=1;
- nt = TargetLayer->N;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int NConnections = NIncommingConnections*nt;
- connectivity = float(NIncommingConnections)/ns;
- float c = connectivity; // remove this
- M= (int) ceil(float(NConnections)/ns);
- bool MultipleSynapses=false;
- maxN_pre = NIncommingConnections;
- if (maxN_pre >= ns) {
- cerr << "ERROR in connection::ConnectRandom2: more incomming connections than source neurons\n";
- MultipleSynapses = true;
- }
- Dout(dc::con, "M= " << M << " c= " << connectivity << " ns= " << ns << "maxN_pre=" << maxN_pre);
- Dout(dc::con, "NConnections=" << NConnections << "nt=" << nt << "ns=" << ns << "NIncommingConnections=" << NIncommingConnections << "");
- if (M>nt) cerr << "ERROR: too many connections, reduce connectivity!!\n";
- SetMinMaxDelay(maxDelay, minDelay);
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- for (int i=0;i<N;++i) for (int j=0;j<M;++j) post[i][j]=-1;
- // if post is -1 then this is not a valid connection
- int i,j,k,exists, r;
- vector<vector <int> > TmpPreI(nt);
- vector<int> TmpNPost(ns);
- vector<int> ConnectionPool(ns*M);
- for (i=0;i<ns;++i) for (j=0;j<M;++j) ConnectionPool[j+M*i]=i;
- Dout(dc::con, "connect");
- int MaxReTry=5;
- int isource;
- for (int MPre=0;MPre<maxN_pre;++MPre) for (int itarget=0;itarget<nt;++itarget)
- {
- int Try=0;
- do {
- exists = 0; // avoid multiple synapses
- r = gsl_rng_uniform_int(gslr, ConnectionPool.size());
- isource = ConnectionPool[r];
- if ((isource==itarget) && NonSelf) exists=1; // no self-synapses
- for (vector<int>::iterator it=TmpPreI[itarget].begin();it!=TmpPreI[itarget].end();++it) {
- if ((*it)==isource) {
- exists = 1; // synapse already exists
- // cerr << "synapse allready exists\n";
- }
- }
- ++Try;
- } while ((exists == 1) && (Try<MaxReTry));
- if (Try == MaxReTry) cerr << "ERROR: multiple synapses\n";
- post[isource][TmpNPost[isource]]=itarget;
- ++TmpNPost[isource];
- TmpPreI[itarget].push_back(isource);
- ConnectionPool.erase(ConnectionPool.begin()+r);
- }
- Dout(dc::con, "connected");
- for (i=0;i<ns;i++) for (j=0;j<M;j++) s[i][j]=InitialWeights; // initial exc. synaptic weights
- SetupRandomDelays();
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
- }
- int connection::ConnectPartialRandom(float c, float InitialWeights, int _MaxTarget, int _MaxSource, float maxDelay, float minDelay)
- {
- Strength=InitialWeights;
- Dout(dc::con, "ConnectPartialRandom");
- // maxWeight=1;
- connectivity=c;
- nt = TargetLayer->N;
- // float TAULEARN_pre=50;
- // dec_pre = exp(-1./TAULEARN_pre);
- // float TAULEARN_post=5;
- // dec_pre = exp(-1./TAULEARN_post);
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int MaxTarget = (_MaxTarget<nt) ? _MaxTarget : nt;
- int MaxSource = (_MaxSource<ns) ? _MaxSource: ns;
- M= (int) floor(c*MaxTarget);
- maxN_pre = 3*ns*M/MaxTarget;
- if (maxN_pre >= ns) maxN_pre = ns; // because there are no double synapses
- Dout(dc::con, "M= " << M << " c= " << c << " ns= " << ns << "maxN_pre=" << maxN_pre);
-
- if (M>nt) Dout(dc::con, "ERROR: too many connections, reduce connectivity!!");
- SetMinMaxDelay(maxDelay, minDelay);
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- int i,j,k,exists, r;
- Dout(dc::con, "connect");
- for (i=0;i<ns;i++) for (j=0;j<M;j++)
- {
- do {
- exists = 0; // avoid multiple synapses
- r = getrandom(MaxTarget);
- if (r==i) exists=1; // no self-synapses
- for (k=0;k<j;k++) if (post[i][k]==r) exists = 1; // synapse already exists
- } while (exists == 1);
- post[i][j]=r;
- }
- Dout(dc::con, "connected");
- for (i=0;i<ns;i++) for (j=0;j<M;j++) s[i][j]=InitialWeights; // initial exc. synaptic weights
-
- SetupDelays();
- SetupPresynapticInfo();
- // for (i=0;i<ns;i++) for (j=0;j<1001+Dmax;j++) LTP[i][j]=0.0;
- // for (i=0;i<nt;i++) LTD[i]=0.0;
-
- // learnobj = new learning(this);
- Dout(dc::con, "Connection initialized");
- }
- int connection::ConnectGaussian(float Sigma, float MaxWeight, float maxDelay, float minDelay, bool Cyclic)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectGaussian");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int i,j,k,exists, r;
- // Dout(dc::con, "connect");
- // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
- int TargetCount;
- M=0;
- // typedef boost::multi_array<int, 2> int2d_array;
- // typedef int2d_array::index index;
- boost::multi_array<int, 2> TempPost(boost::extents[ns][nt]);
- boost::multi_array<float, 2> TempDistance(boost::extents[ns][nt]);
- for (i=0;i<ns;++i) for (j=0;j<nt;++j)
- {
- TempPost[i][j]=-1;
- TempDistance[i][j]=-1;
- }
-
- vector<int> TempNpre (nt);
- for (i=0;i<nt;++i) TempNpre[i]=0;
- float InitialWeights=1;
- int CurDelay;
- SetMinMaxDelay(maxDelay, minDelay);
- maxN_pre = 0;
- float Distance;
- vector2d basis(1,1);
- SimpleTextProgressBar pgbar(ns);
- float MaxConDistance = 3*Sigma;
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- TargetCount=0;
- for (j=0;j<nt;j++)
- {
- // set connection in temporary boost multi_array
- if (Cyclic) Distance = (TargetLayer->Pos[j]).CyclicDistance(SourceLayer->Pos[i], basis);
- else Distance = (TargetLayer->Pos[j] - SourceLayer->Pos[i]).abs() ;
- if ((Distance < MaxConDistance) && (!NonSelf || (i!=j))) {
- TempDistance[i][TargetCount] = Distance;
- TempPost[i][TargetCount]=j;
- ++TempNpre[j]; // count presynaptic Targeting of postsynaptic neuron
- ++TargetCount;
- ++ConnectionCount;
- }
- }
- if (TargetCount > M) M=TargetCount;
- }
- for (j=0;j<nt;++j) if (TempNpre[j] > maxN_pre) maxN_pre = TempNpre[j];
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre);
- Dout(dc::con, "ConnectionCount=" << ConnectionCount);
-
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- // copy temporary TempPost to post array
- for (i=0;i<ns;++i) for (j=0;j<M;++j) post[i][j] = TempPost[i][j];
- // setup delays and initial weights
- pgbar.Reset(ns);
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- for (j=0;j<M;j++)
- {
- if (post[i][j] != -1) {
- s[i][j] = MaxWeight*gauss(TempDistance[i][j],Sigma);
- // CurDelay = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
- CurDelay = minimumDelay + int(TempDistance[i][j]*DelayDiff/MaxConDistance); // ToDo: FixMe!!!!!!!!!!!!!!!!!!! think a lot!!!
- if (CurDelay>=maximumDelay)
- {
- CurDelay = maximumDelay;
- Dout(dc::con, "ERROR: delay too high");
- }
- delays[i][CurDelay][delays_length[i][CurDelay]] = j;
- ++delays_length[i][CurDelay];
- }
- }
- }
- Dout(dc::con, "connected");
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
-
- }
- int connection::ConnectGaussianColumnwise(float Sigma, float MaxWeight, int dimx, int dimy, bool Cyclic,bool Shifted, float maxDelay, float minDelay,bool divergent,bool notstraight)
- { Strength=MaxWeight;
-
- int shiftitornot=0;
- if (Shifted==true) shiftitornot=1;
- Dout(dc::con, "Connection::ConnectGaussianColumnwise");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- float ntns;
- if (divergent = true) ntns=(nt/dimy)/ns;
- else ntns=1;
- float dimxdimy=dimx/dimy;
- int i,x,y,j,exists, r;
- // Dout(dc::con, "connect"); fflush(stdout);
- // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
- int TargetCount;
- M=0;
- // typedef boost::multi_array<int, 2> int2d_array;
- // typedef int2d_array::index index;
- boost::multi_array<int, 2> TempPost(boost::extents[ns][nt]);
- boost::multi_array<float, 2> TempDistance(boost::extents[ns][nt]);
- for (i=0;i<ns;++i) for (j=0;j<nt;++j)
- {
- TempPost[i][j]=-1;
- TempDistance[i][j]=-1;
- }
- vector<int> TempNpre (nt);
- for (i=0;i<nt;++i) TempNpre[i]=0;
- float InitialWeights=1;
- int CurDelay;
- SetMinMaxDelay(maxDelay, minDelay);
- maxN_pre = 0;
- float Distance;
- vector2d basis(1,1);
- SimpleTextProgressBar pgbar(ns);
- float MaxConDistance = 3*Sigma;
- float yshift=0.;
- for (i=0;i<ns;i++)
- {
- pgbar.Next(i);
- TargetCount=0;
- for (y=0;y<dimy;y++)
- {
- for (x=0;x<dimx;x++)
- { int synapse_number=x+y*dimx;
- // set connection in temporary boost multi_array
- yshift=y*(dimxdimy);
- int Dist_help;
- if (notstraight) Dist_help=int(1000*(x+(yshift)*shiftitornot));
- else Dist_help=int(1000*((yshift)*shiftitornot));
- int dimx_help=1000*dimx;
- if (Cyclic) Distance =(TargetLayer->Pos[(Dist_help%dimx_help)/1000].CyclicDistance(SourceLayer->Pos[i], basis));
- else Distance = (TargetLayer->Pos[x] - SourceLayer->Pos[i]).abs();
- if ((Distance < MaxConDistance) && (!NonSelf || (i!=synapse_number)))
- {
- TempDistance[i][TargetCount] = Distance;
- TempPost[i][TargetCount]=synapse_number;
- // TempPost2[i][TargetCount][0]=k;
- // TempPost2[i][TargetCount][1]=j;
- ++TempNpre[synapse_number]; // count presynaptic Targeting of postsynaptic neuron
- ++TargetCount;
- ++ConnectionCount;
- }
- }
-
- }
- if (TargetCount > M) M=TargetCount;
- }
-
- for (j=0;j<nt;++j) if (TempNpre[j] > maxN_pre) maxN_pre = TempNpre[j];
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre);
- Dout(dc::con, "ConnectionCount=" << ConnectionCount);
-
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- // copy temporary TempPost to post array
- for (i=0;i<ns;++i) for (j=0;j<M;++j) post[i][j] = TempPost[i][j];
- // setup delays and initial weights
- pgbar.Reset(ns);
-
- for (i=0;i<ns;i++) {
-
- pgbar.Next(i);
- for (y=0;y<dimy;y++)
- {
- for (x=0;x<dimx;x++)
- { int synapse_number=x+y*dimx;
- if ((post[i][synapse_number] != -1) && (synapse_number < M)){
- s[i][synapse_number]=MaxWeight*gauss(TempDistance[i][synapse_number],Sigma);
- // CurDelay = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
- CurDelay = minimumDelay + int(TempDistance[i][synapse_number]*DelayDiff/MaxConDistance); // ToDo: FixMe!!!!!!!!!!!!!!!!!!! think a lot!!!
- if (CurDelay>=maximumDelay)
- {
- CurDelay = maximumDelay;
- cerr << "ERROR: delay too high\n";
- }
- delays[i][CurDelay][delays_length[i][CurDelay]] = synapse_number;
- ++delays_length[i][CurDelay];
- }
- }
-
- }
- }
- Dout(dc::con, "connected");
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
-
- }
- int connection::ConnectIdenticalGaussian(float Sigma, float MaxWeight, float maxDelay, float minDelay, float mpx, float mpy)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectIdenticalGaussian");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int i,j,k,exists, r;
- // Dout(dc::con, "connect"); fflush(stdout);
- // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
- int TargetCount;
- M=0;
- // typedef boost::multi_array<int, 2> int2d_array;
- // typedef int2d_array::index index;
- boost::multi_array<int, 2> TempPost(boost::extents[ns][nt]);
- boost::multi_array<float, 2> TempDistance(boost::extents[ns][nt]);
- for (i=0;i<ns;++i) for (j=0;j<nt;++j)
- {
- TempPost[i][j]=-1;
- TempDistance[i][j]=-1;
- }
-
- vector<int> TempNpre (nt);
- for (i=0;i<nt;++i) TempNpre[i]=0;
- float InitialWeights=1;
- int CurDelay;
- SetMinMaxDelay(maxDelay, minDelay);
- maxN_pre = 0;
- float Distance;
- vector2d basis(1,1);
- vector2d ConstMP(mpx, mpy);
- SimpleTextProgressBar pgbar(ns);
- float MaxConDistance = 3*Sigma;
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- TargetCount=0;
- Distance = (SourceLayer->Pos[i] - ConstMP).abs() ;
- for (j=0;j<nt;j++)
- {
- // set connection in temporary boost multi_array
- // Distance = (TargetLayer->Pos[j]).CyclicDistance(SourceLayer->Pos[i], basis);
- if ((Distance < MaxConDistance) && (!NonSelf || (i!=j))) {
- TempDistance[i][TargetCount] = Distance;
- TempPost[i][TargetCount]=j;
- ++TempNpre[j]; // count presynaptic Targeting of postsynaptic neuron
- ++TargetCount;
- ++ConnectionCount;
- }
- }
- if (TargetCount > M) M=TargetCount;
- }
- for (j=0;j<nt;++j) if (TempNpre[j] > maxN_pre) maxN_pre = TempNpre[j];
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre);
- Dout(dc::con, "ConnectionCount=" << ConnectionCount);
-
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- // copy temporary TempPost to post array
- for (i=0;i<ns;++i) for (j=0;j<M;++j) post[i][j] = TempPost[i][j];
- // setup delays and initial weights
- pgbar.Reset(ns);
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- for (j=0;j<M;j++)
- {
- if (post[i][j] != -1) {
- s[i][j] = MaxWeight*gauss(TempDistance[i][j],Sigma);
- // CurDelay = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
- CurDelay = minimumDelay + int(TempDistance[i][j]*DelayDiff/MaxConDistance); // ToDo: FixMe!!!!!!!!!!!!!!!!!!! think a lot!!!
- if (CurDelay>=maximumDelay)
- {
- CurDelay = maximumDelay;
- Dout(dc::con, "ERROR: delay too high");
- }
- delays[i][CurDelay][delays_length[i][CurDelay]] = j;
- ++delays_length[i][CurDelay];
- }
- }
- }
- Dout(dc::con, "connected");
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
-
- }
- // Connect according to a profile function that is defined on distance
- int connection::ConnectProfile(DistanceProfile* profile, float MaxWeight, float maxDelay, float minDelay, bool Cyclic)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectProfile");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
-
- int i,j,k,exists, r;
- // Dout(dc::con, "connect"); fflush(stdout);
- // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
- int TargetCount;
- M=0;
- // typedef boost::multi_array<int, 2> int2d_array;
- // typedef int2d_array::index index;
- Dout(dc::con, "MakeFirstTempArray, ns=" << ns << "nt=" << nt);
- // boost::multi_array<int, 2> TempPost(boost::extents[ns][nt]);
- int** TempPost;
- NewArray2d(TempPost, ns, nt);
- Dout(dc::con, "MakeSecondTempArray");
- boost::multi_array<float, 2> TempDistance(boost::extents[ns][nt]);
- for (i=0;i<ns;++i) for (j=0;j<nt;++j)
- {
- TempPost[i][j]=-1;
- TempDistance[i][j]=-1;
- }
-
- vector<int> TempNpre (nt);
- for (i=0;i<nt;++i) TempNpre[i]=0;
- float InitialWeights=1;
- int CurDelay;
- SetMinMaxDelay(maxDelay, minDelay);
- maxN_pre = 0;
- float Distance;
- vector2d basis(1,1);
- SimpleTextProgressBar pgbar(ns);
- Dout(dc::con, "Getmaxdist");fflush(stdout);
- float MaxConDistance = profile->GetMaxConDistance(0.05);
- float ConValue;
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- TargetCount=0;
- for (j=0;j<nt;j++)
- {
- // set Distance in temporary boost multi_array
- if (Cyclic) Distance = (TargetLayer->Pos[j]).CyclicDistance(SourceLayer->Pos[i], basis);
- else Distance = (TargetLayer->Pos[j] - SourceLayer->Pos[i]).abs() ;
- if ((Distance < MaxConDistance) && (!NonSelf || (i!=j))) {
- TempDistance[i][TargetCount] = Distance;
- TempPost[i][TargetCount]=j;
- ++TempNpre[j]; // count presynaptic Targeting of postsynaptic neuron
- ++TargetCount;
- ++ConnectionCount;
- }
- }
- if (TargetCount > M) M=TargetCount;
- }
- for (j=0;j<nt;++j) if (TempNpre[j] > maxN_pre) maxN_pre = TempNpre[j];
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
- Dout(dc::con, "ConnectionCount=" << ConnectionCount << "");
-
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- // copy temporary TempPost to post array
- for (i=0;i<ns;++i) for (j=0;j<M;++j) post[i][j] = TempPost[i][j];
- // setup delays and initial weights
- pgbar.Reset(ns);
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- for (j=0;j<M;j++)
- {
- if (post[i][j] != -1) {
- s[i][j] = MaxWeight*profile->GetValue(TempDistance[i][j]);
- // CurDelay = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
- CurDelay = minimumDelay + int(TempDistance[i][j]*DelayDiff/MaxConDistance); // ToDo: FixMe!!!!!!!!!!!!!!!!!!! think a lot!!!
- if (CurDelay>=maximumDelay)
- {
- CurDelay = maximumDelay-1;
- Dout(dc::con, "ERROR: delay too high");
- }
- delays[i][CurDelay][delays_length[i][CurDelay]] = j;
- ++delays_length[i][CurDelay];
- }
- }
- }
- Dout(dc::con, "connected"); fflush(stdout);
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
-
- }
- /////////
- // Connect with given Weight matrix
- int connection::ConnectMatrix(const vector<vector<float> > &WeightMatrix, float maxDelay, float minDelay)
- {
- // Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectMatrix");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
-
- int i,j,k,exists, r;
- // Dout(dc::con, "connect"); fflush(stdout);
- // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
- int TargetCount;
- M=0;
- // typedef boost::multi_array<int, 2> int2d_array;
- // typedef int2d_array::index index;
- Dout(dc::con, "MakeFirstTempArray, ns=" << ns << "nt=" << nt << "");fflush(stdout);
- // boost::multi_array<int, 2> TempPost(boost::extents[ns][nt]);
- int** TempPost;
- NewArray2d(TempPost, ns, nt);
- Dout(dc::con, "MakeSecondTempArray");fflush(stdout);
- boost::multi_array<float, 2> TempDistance(boost::extents[ns][nt]);
- for (i=0;i<ns;++i) for (j=0;j<nt;++j)
- {
- TempPost[i][j]=-1;
- }
-
- vector<int> TempNpre (nt);
- for (i=0;i<nt;++i) TempNpre[i]=0;
- float InitialWeights=1;
- int CurDelay;
- SetMinMaxDelay(maxDelay, minDelay);
- // ToDo: care for the delays
- maxN_pre = 0;
- float Distance;
- vector2d basis(1,1);
- SimpleTextProgressBar pgbar(ns);
- float ConValue;
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- TargetCount=0;
- for (j=0;j<nt;j++)
- {
- //Dout(dc::con, "i=" << i << " j=" << j <<" wm[i][j]=" << WeightMatrix[i][j] << ""); fflush(stdout);
- if (WeightMatrix[i][j] != 0) {
- TempPost[i][TargetCount]=j;
- ++TempNpre[j]; // count presynaptic Targeting of postsynaptic neuron
- ++TargetCount;
- ++ConnectionCount;
- }
- }
- if (TargetCount > M) M=TargetCount;
- }
- for (j=0;j<nt;++j) if (TempNpre[j] > maxN_pre) maxN_pre = TempNpre[j];
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
- Dout(dc::con, "ConnectionCount=" << ConnectionCount << "");
-
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- // copy temporary TempPost to post array
- for (i=0;i<ns;++i) for (j=0;j<M;++j) post[i][j] = TempPost[i][j];
- // setup delays and initial weights
- pgbar.Reset(ns);
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- for (j=0;j<M;j++)
- {
- if (post[i][j] != -1) {
- // s[i][j] = MaxWeight; // ?? TestIt: = WeightMatrix[i][post[i][j]]
- s[i][j] = WeightMatrix[i][post[i][j]];
- CurDelay = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
- if (CurDelay>=maximumDelay)
- {
- CurDelay = maximumDelay-1;
- Dout(dc::con, "ERROR: delay too high");
- }
- delays[i][CurDelay][delays_length[i][CurDelay]] = j;
- ++delays_length[i][CurDelay];
- }
- }
- }
- Dout(dc::con, "connected"); fflush(stdout);
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
-
- }
- //////////
- // circular connections, const number of postsynaptic connections
- int connection::ConnectCircular(float PreConnectivity, float MaxWeight, float maxDelay, float minDelay)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectProfile");
- nt = TargetLayer->N;
- if (PreConnectivity > 1) PreConnectivity=1;
- if (PreConnectivity <0) Dout(dc::con, "connection::ConnectCircular: ERROR: PreConnectivity shouldn't be zero!!");
- int NTargetConnections = int(PreConnectivity*nt);
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
-
- int i,j,k,exists, r;
- // Dout(dc::con, "connect"); fflush(stdout);
- // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
- int TargetCount;
- M=0;
- // typedef boost::multi_array<int, 2> int2d_array;
- // typedef int2d_array::index index;
- Dout(dc::con, "MakeFirstTempArray, ns=" << ns << "nt=" << nt << "");fflush(stdout);
- // boost::multi_array<int, 2> TempPost(boost::extents[ns][nt]);
- int** TempPost;
- NewArray2d(TempPost, ns, nt);
- Dout(dc::con, "MakeSecondTempArray");fflush(stdout);
- boost::multi_array<float, 2> TempDistance(boost::extents[ns][nt]);
- for (i=0;i<ns;++i) for (j=0;j<nt;++j)
- {
- TempPost[i][j]=-1;
- TempDistance[i][j]=-1;
- }
-
- vector<int> TempNpre (nt);
- for (i=0;i<nt;++i) TempNpre[i]=0;
- float InitialWeights=1;
- int CurDelay;
- float MaxConDistance =0;
- SetMinMaxDelay(maxDelay, minDelay);
- maxN_pre = 0;
- float Distance;
- vector2d basis(1,1);
- SimpleTextProgressBar pgbar(ns);
- Dout(dc::con, "Getmaxdist");fflush(stdout);
- float ConValue;
- // for each presynaptic neuron calc a sorted Distance List, and then connect until NTargetConnections is reached
- // type map<double,int> DistMap;
- map<double,int> CurPostDistances;
- map<double,int>::iterator CurDist=CurPostDistances.begin();
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- TargetCount=0;
- CurPostDistances.clear();
- for (j=0;j<nt;j++)
- {
- // set Distance in temporary boost multi_array
- Distance = (TargetLayer->Pos[j] - SourceLayer->Pos[i]).abs() ;
- CurPostDistances[Distance+0.000001*j] = j; //add minimal value to Distance, because each Distance must be unique (map has no duplicate keys)
- }
- CurDist=CurPostDistances.begin();
- while((TargetCount<NTargetConnections) && (CurDist!=CurPostDistances.end()))
- {
- j=CurDist->second;
- if (!NonSelf || (i!=j))
- {
- TempDistance[i][TargetCount] = CurDist->first;
- TempPost[i][TargetCount]=j;
- ++TempNpre[j]; // count presynaptic Targeting of postsynaptic neuron
- ++TargetCount;
- ++ConnectionCount;
- if (CurDist->first > MaxConDistance) MaxConDistance=CurDist->first;
- }
- ++CurDist;
- }
- if (TargetCount > M) M=TargetCount;
- }
- for (j=0;j<nt;++j) if (TempNpre[j] > maxN_pre) maxN_pre = TempNpre[j];
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
- Dout(dc::con, "ConnectionCount=" << ConnectionCount << "");
-
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- // copy temporary TempPost to post array
- for (i=0;i<ns;++i) for (j=0;j<M;++j) post[i][j] = TempPost[i][j];
- // setup delays and initial weights
- pgbar.Reset(ns);
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- for (j=0;j<M;j++)
- {
- if (post[i][j] != -1) {
- s[i][j] = MaxWeight;
- // CurDelay = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
- CurDelay = minimumDelay + int(TempDistance[i][j]*DelayDiff/MaxConDistance); // ToDo: FixMe!!!!!!!!!!!!!!!!!!! think a lot!!!
- if (CurDelay>=maximumDelay)
- {
- CurDelay = maximumDelay-1;
- Dout(dc::con, "ERROR: delay too high");
- }
- delays[i][CurDelay][delays_length[i][CurDelay]] = j;
- ++delays_length[i][CurDelay];
- }
- }
- }
- Dout(dc::con, "connected"); fflush(stdout);
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
-
- }
- int connection::ConnectCircularPre(float Connectivity, float MaxWeight, float maxDelay, float minDelay)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectProfile");
- nt = TargetLayer->N;
- int N;
- ns = N = SourceLayer->N;
- if (Connectivity > 1) Connectivity=1;
- if (Connectivity <0) Dout(dc::con, "connection::ConnectCircular: ERROR: Connectivity shouldn't be zero!!");
- int NSourceConnections = int(Connectivity*ns);
- Dout(dc::con, "NSourceConnections=" << NSourceConnections << "");
- int ConnectionCount=0;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
-
- int i,j,k,exists, r;
- // Dout(dc::con, "connect"); fflush(stdout);
- // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
- M=0;
- // typedef boost::multi_array<int, 2> int2d_array;
- // typedef int2d_array::index index;
- Dout(dc::con, "MakeFirstTempArray, ns=" << ns << "nt=" << nt << "");fflush(stdout);
- // boost::multi_array<int, 2> TempPost(boost::extents[ns][nt]);
- int** TempPost;
- NewArray2d(TempPost, ns, nt);
- Dout(dc::con, "MakeSecondTempArray");fflush(stdout);
- boost::multi_array<float, 2> TempDistance(boost::extents[ns][nt]);
- for (i=0;i<ns;++i) for (j=0;j<nt;++j)
- {
- TempPost[i][j]=-1;
- TempDistance[i][j]=-1;
- }
-
- vector<int> TargetCount(ns); //by default vectors are initialized with zero
- vector<int> TempNpre (nt);
- float InitialWeights=1;
- int CurDelay;
- float MaxConDistance =0;
- SetMinMaxDelay(maxDelay, minDelay);
- maxN_pre = 0;
- float Distance;
- vector2d basis(1,1);
- SimpleTextProgressBar pgbar(ns);
- Dout(dc::con, "Getmaxdist");fflush(stdout);
- float ConValue;
- // for each presynaptic neuron calc a sorted Distance List, and then connect until NTargetConnections is reached
- // type map<double,int> DistMap;
- map<double,int> CurPostDistances;
- map<double,int>::iterator CurDist=CurPostDistances.begin();
- for (j=0;j<nt;j++) {
- pgbar.Next(j);
- CurPostDistances.clear();
- for (i=0;i<ns;i++)
- {
- Distance = (TargetLayer->Pos[j] - SourceLayer->Pos[i]).abs() ;
- CurPostDistances[Distance+0.000001*i] = i; //add minimal value to Distance, because each Distance must be unique (map has no duplicate keys)
- }
- CurDist=CurPostDistances.begin();
- while((TempNpre[j]<NSourceConnections) && (CurDist!=CurPostDistances.end()))
- {
- i=CurDist->second;
- if (!NonSelf || (i!=j))
- {
- TempDistance[i][TargetCount[i]] = CurDist->first;
- TempPost[i][TargetCount[i]]=j;
- ++TempNpre[j]; // count presynaptic Targeting of postsynaptic neuron
- ++TargetCount[i];
- ++ConnectionCount;
- if (CurDist->first > MaxConDistance) MaxConDistance=CurDist->first;
- }
- ++CurDist;
- }
- }
- for (i=0;i<ns;++i) if (TargetCount[i] > M) M=TargetCount[i];
- for (j=0;j<nt;++j) if (TempNpre[j] > maxN_pre) maxN_pre = TempNpre[j];
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
- Dout(dc::con, "ConnectionCount=" << ConnectionCount << "");
-
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- // copy temporary TempPost to post array
- for (i=0;i<ns;++i) for (j=0;j<M;++j) post[i][j] = TempPost[i][j];
- // setup delays and initial weights
- pgbar.Reset(ns);
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- for (j=0;j<M;j++)
- {
- if (post[i][j] != -1) {
- s[i][j] = MaxWeight;
- // CurDelay = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
- CurDelay = minimumDelay + int(TempDistance[i][j]*DelayDiff/MaxConDistance); // ToDo: FixMe!!!!!!!!!!!!!!!!!!! think a lot!!!
- if (CurDelay>=maximumDelay)
- {
- CurDelay = maximumDelay-1;
- Dout(dc::con, "ERROR: delay too high");
- }
- delays[i][CurDelay][delays_length[i][CurDelay]] = j;
- ++delays_length[i][CurDelay];
- }
- }
- }
- Dout(dc::con, "connected"); fflush(stdout);
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
-
- }
- int connection::ConnectGaussianProb(float Sigma, float MaxWeight, float maxDelay, float minDelay, float MinConDistance, float MaxConnectivity, bool Cyclic)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectGaussianProb");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int i,j,k,exists, r;
- // Dout(dc::con, "connect"); fflush(stdout);
- // double loop (ns, nt) goes throug every possible synapse and checks wether to connect or not
- int TargetCount;
- M=0;
- // typedef boost::multi_array<int, 2> int2d_array;
- // typedef int2d_array::index index;
- Dout(dc::con, "make temporary arrays");fflush(stdout);
- boost::multi_array<int, 2> TempPost(boost::extents[ns][nt]);
- Dout(dc::con, "ready temporary arrays");fflush(stdout);
- for (i=0;i<ns;++i) for (j=0;j<nt;++j) TempPost[i][j]=-1;
- vector<int> TempNpre (nt);
- for (i=0;i<nt;++i) TempNpre[i]=0;
- float InitialWeights=1;
- int CurDelay;
- SetMinMaxDelay(maxDelay, minDelay);
- maxN_pre = 0;
- float Distance;
- vector2d basis(1,1);
- // set connections and calculate M and maxN_pre;
- SimpleTextProgressBar pgbar(ns);
- for (i=0;i<ns;i++) {
- TargetCount=0;
- pgbar.Next(i);
- for (j=0;j<nt;j++)
- {
- // set connection in temporary boost multi_array
- if (Cyclic) {
- Distance = (TargetLayer->Pos[j]).CyclicDistance(SourceLayer->Pos[i], basis);
- } else {
- Distance = (TargetLayer->Pos[j] - SourceLayer->Pos[i]).abs() ;
- }
- if ((Distance >= MinConDistance) && (gsl_rng_uniform(gslr) < MaxConnectivity*gauss(Distance,Sigma)))
- {
- TempPost[i][TargetCount]=j;
- ++TempNpre[j]; // count presynaptic Targeting of postsynaptic neuron
- ++TargetCount;
- ++ConnectionCount;
- }
- }
- if (TargetCount > M) M=TargetCount;
- }
- for (j=0;j<nt;++j) if (TempNpre[j] > maxN_pre) maxN_pre = TempNpre[j];
- // now M and maxN_pre are calculated
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
- Dout(dc::con, "ConnectionCount=" << ConnectionCount << "");
-
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
-
- // copy temporary TempPost to post array
- for (i=0;i<ns;++i) for (j=0;j<M;++j) post[i][j] = TempPost[i][j];
- // setup delays and initial weights
- pgbar.Reset(ns);
- for (i=0;i<ns;i++) {
- pgbar.Next(i);
- for (j=0;j<M;j++)
- {
- if (post[i][j] != -1) {
- s[i][j] = MaxWeight;
- CurDelay = minimumDelay + gsl_rng_uniform_int(gslr, DelayDiff+1);
- delays[i][CurDelay][delays_length[i][CurDelay]] = j;
- ++delays_length[i][CurDelay];
- }
- }
- }
- Dout(dc::con, "connected"); fflush(stdout);
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized");
-
- }
- int connection::ConnectFull(float MaxWeight, float maxDelay, float minDelay, bool RandomWeights)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectFull");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int i,j,k,exists, r;
- if (NonSelf) M=nt-1; else M=nt;
- maxN_pre = ns;
- SetMinMaxDelay(maxDelay, minDelay);
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- float InitialWeights=1;
- int CurM;
- for (i=0;i<ns;i++) {
- CurM=0;
- for (j=0;j<nt;j++)
- {
- if ((!NonSelf) || (i != j)) {
- post[i][CurM] = j;
- if (RandomWeights) {
- s[i][CurM] = gsl_rng_uniform(gslr)*MaxWeight;
- } else {
- s[i][CurM] = MaxWeight;
- }
- ++CurM;
- }
- }
- }
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
-
- Dout(dc::con, "connected"); fflush(stdout);
- SetupRandomDelays();
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized ");
-
- }
- int connection::ConnectFullColumnwise(float MaxWeight, int tdimx, int tdimy, int sdimx, int sdimy, float maxDelay, float minDelay,bool divergentrow,float sigma_divrow, bool Cyclic, bool convergent)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectFull");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int x_s,y_s,x_t,y_t,i,j,k,exists, r;
- if (NonSelf) M=nt-1; else M=nt;
- maxN_pre = ns;
- int ntns;
- if (divergentrow = true) ntns=tdimy/sdimy;
- else ntns=1;
- SetMinMaxDelay(maxDelay, minDelay);
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- float InitialWeights=1;
- int CurM;
- CurM=0;
- /* for (y_source=0;y_source<sourcedimy;y_source++)
- {
- for (x_source=y_source*nsx; x_source<(y_source+1)*nsx; x_source++) {
-
- for (y_target=0;y_target<dimy; y_target++)
- for (x_target=dimx*y_target; x_target<dimx*(y_target+1);x_target++)
- {
- post[x_source+y_source][x_target+y_target]=CurM;
- s[x_source+y_source][x_target+y_target]=1;
- CurM++;
- }
- }
- } */
- float Distance;
- vector2d basis(1,1);
- for (y_s=0;y_s<sdimy;y_s++)
- {
- for (x_s=0;x_s<sdimx;x_s++)
- {
- CurM=0;
- int a=y_s*sdimx;
- int here= x_s+a;
-
- for (y_t=0;y_t<tdimy;y_t++)
- {
- for (x_t=0;x_t<tdimx;x_t++)
- {
- post[here][CurM] =x_t+y_t*tdimx;//+y_t*tdimx;
-
- if (y_t == y_s*ntns) s[here][CurM] = MaxWeight;
- else s[here][CurM]=0;
- if (Cyclic) Distance= (TargetLayer->Pos[y_t].CyclicDistance(SourceLayer->Pos[y_s*sdimx], basis));
- else Distance=y_t-y_s*ntns;
- if (divergentrow == true) {
- s[here][CurM]=MaxWeight*exp(-0.5*(Distance)*(Distance)/(sigma_divrow*sigma_divrow));
- } //1./(sigma_divrow*sqrt(2.*3.14))
- ++CurM;
- }
- }
-
- }
- }
-
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
- Dout(dc::con, "connected"); fflush(stdout);
- SetupRandomDelays();
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized ");
-
- }
- int connection::ConnectGradientFields(float MaxWeight, float xslope, float yslope, int tdimx, int tdimy, int sdimx, int sdimy, float maxDelay, float minDelay)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectGradientField ");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int x_s,y_s,x_t,y_t,i,j,k,exists, r;
- if (NonSelf) M=nt-1; else M=nt;
- maxN_pre = ns;
- SetMinMaxDelay(maxDelay, minDelay);
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- float InitialWeights=1;
- int CurM;
- CurM=0;
- float Distance;
- vector2d basis(1,1);
- for (x_s=0;x_s<sdimx;x_s++)
- {
- for (y_s=0;y_s<sdimy;y_s++)
- {
- CurM=0;
- for (x_t=0;x_t<tdimx;x_t++)
- {
- for (y_t=0;y_t<tdimy;y_t++)
- {
- post[x_s+sdimx*y_s][CurM]=x_t+y_t*tdimx;
- s[x_s+sdimx*y_s][CurM]=(-xslope*x_s+MaxWeight/2)+(-yslope*y_s+MaxWeight/2);
- ++CurM;
- }
- }
-
- }
- }
-
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
- Dout(dc::con, "connected"); fflush(stdout);
- SetupRandomDelays();
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized ");
-
- }
- int connection::SetMinMaxDelay(float maxDelay, float minDelay)
- {
- maximumDelay = int(maxDelay/dt);
- if (maximumDelay == 0) maximumDelay = 1;
- // equivalent to Dmax; compare allways with "<" (not with "<=")
- minimumDelay = int(minDelay/dt);
- if (maximumDelay >= Dmax) {
- cout << "connection::SetMinMaxDelay: Initialization "
- << "Parameter Error: maximumDelay="
- << maximumDelay << " > Dmax=" << "Dmax \n";
- maximumDelay = Dmax-1;
- }
- if (minimumDelay > maximumDelay) {
- cout << "connection::SetMinMaxDelay: Initialization Parameter Error:"
- << " minimumDelay > maximumDelay\n";
- minimumDelay = 0;
- }
- DelayDiff = maximumDelay-minimumDelay-1;
- cout << "MaximumDelay=" << maximumDelay
- << " MinimumDelay=" << minimumDelay
- << " DelayDiff= " << DelayDiff << "\n";
- }
- int connection::ConnectSelf(float MaxWeight, float maxDelay, float minDelay)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectSelf");
- nt = TargetLayer->N;
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- int i,j,k,exists, r;
- M=1;
- maxN_pre = 1;
- SetMinMaxDelay(maxDelay, minDelay);
- // initialize dynamical arrays
- // InitializeDynamicalArrays(N, M, Dmax);
- InitializeDynamicalArrays(N, M, maximumDelay);
- int CurDelay;
- float InitialWeights=1;
- int MaxCon = min(ns,nt);
- int rnum;
- Dout(dc::con, "MaxCon=" << MaxCon << "");
- for (i=0;i<MaxCon;i++)
- {
- post[i][0] = i;
- s[i][0] = MaxWeight;
- CurDelay = minimumDelay +getrandom(DelayDiff+1);
- delays[i][CurDelay][delays_length[i][CurDelay]] = 0;
- ++delays_length[i][CurDelay];
- }
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
- Dout(dc::con, "connected"); fflush(stdout);
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized ");
-
- }
- int connection::ConnectPartial(int maxTarget, float MaxWeight, float maxDelay, float minDelay)
- {
- Strength=MaxWeight;
- Dout(dc::con, "Connection::ConnectPartial");
- nt = TargetLayer->N;
- if (maxTarget>nt)
- {
- maxTarget=nt;
- Dout(dc::con, "ERROR: maxTarget > TargetLayer->N");
- }
- int ConnectionCount=0;
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- int i,j,k,exists, r;
- M=maxTarget;
- maxN_pre=ns;
- // initialize dynamical arrays
- SetMinMaxDelay(maxDelay, minDelay);
- InitializeDynamicalArrays(N, M, maximumDelay);
- float InitialWeights=1;
- for (i=0;i<ns;i++) {
- for (j=0;j<M;j++)
- {
- post[i][j] = j;
- s[i][j] = MaxWeight;
- }
- }
- Dout(dc::con, "M= " << M << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
-
- Dout(dc::con, "connected"); fflush(stdout);
- SetupRandomDelays();
- SetupPresynapticInfo();
- Dout(dc::con, "Connection initialized ");
-
- }
- int connection::SetWeights(vector<vector<float> >& s_new)
- {
- if(s_new.size()==ns && s_new[0].size()==M)
- {
- for(int i=0;i<ns;i++)
- for(int j=0;j<M;j++)
- s[i][j]=s_new[i][j];
- cout<<"Weights has been set\n";
- }else{
- cout<<"couldn't SetWeights because of wrong array-dimension\n";
- }
- }
- int connection::SetupRandomDelays(float maxDelay, float minDelay)
- {
- SetMinMaxDelay(maxDelay, minDelay);
- SetupRandomDelays();
- }
- int connection::SetupRandomDelays()
- {
- Dout(dc::con, "SetupRandomDelays");
- int i,j,k;
-
- int CurDelay;
- for (i=0;i<ns;i++)
- {
- for (j=0;j<M;++j)
- {
- // check if valid connection (invalid: post == -1)
- if (post[i][j] >= 0) {
- CurDelay = minimumDelay + getrandom(DelayDiff+1);
- delays[i][CurDelay][delays_length[i][CurDelay]] = j;
- ++delays_length[i][CurDelay];
- }
- }
- }
- }
- int connection::SetupDelays(float maxDelay, float minDelay)
- {
- SetMinMaxDelay(maxDelay, minDelay);
- SetupDelays();
- }
- int connection::SetupDelays()
- {
- // setup delays
- // connections must be allready set up!!!
- // ToDo: check this!!
- // ToDo: delays are set up systematicaly (ordered); this works only if the weights are chosen randomly!!
- // --> SetupDelaysRandom() needed!!!!!!!!!
- int i,j,k;
- int MeanDelays_Length = M/DelayDiff;
- cout << "MaximumDelay=" << maximumDelay << " MinimumDelay=" << minimumDelay
- << " MeanDelays_length= " << MeanDelays_Length << " DelayDiff= " << DelayDiff << "\n";
- for (i=0;i<ns;i++)
- {
- short ind=0;
- for (j=0;j<maximumDelay;j++)
- {
- if (j>=minimumDelay)
- {
- if ((M-ind)>MeanDelays_Length) delays_length[i][j]=MeanDelays_Length;
- else delays_length[i][j] = M-ind;
- } else {
- delays_length[i][j]=0;
- }
-
- for (k=0;k<delays_length[i][j];k++)
- delays[i][j][k]=ind++; // connection index of presynaptic neuron
- }
- }
- }
- int connection::ConnectDirectional(float c, float InitialWeights)
- {
- Dout(dc::con, "Directional Connections");
- learn=true;
- maxWeight=1;
- connectivity=c;
- nt = TargetLayer->N;
- // float TAULEARN_pre=50;
- // dec_pre = exp(-1./TAULEARN_pre);
- // float TAULEARN_post=5;
- // dec_pre = exp(-1./TAULEARN_post);
- int N;
- ns = N = SourceLayer->N;
- if (ns != nt) NonSelf = false; // NonSelf makes sense only for layers with same size
- M= (int) floor(c*ns);
- maxN_pre = 3*ns*M/nt;
- Dout(dc::con, "M= " << M << " c= " << c << " ns= " << ns << "maxN_pre=" << maxN_pre << ""); fflush(stdout);
-
- minimumDelay=0;
- maximumDelay=Dmax;
- // SetMinMaxDelay(maxDelay, minDelay);
- if (M>nt) Dout(dc::con, "ERROR: too many connections, reduce connectivity!!");
- if ( M%maximumDelay != 0) {
- Dout(dc::con, "ERROR: bad connectivity value!! M%D= " << M%maximumDelay << " !=0 \n" );
- // M += 20 - (M%Dmax);
- M -= M%maximumDelay;
- Dout(dc::con, "reducing weights to M= " << M << ""); // M must be a multiple of Dmax for this random weight initialization;
- }
- // initialize dynamical arrays
- InitializeDynamicalArrays(N, M, maximumDelay);
- int i,j,k,exists, r;
- for (i=0;i<ns;i++) for (j=0;j<M;j++)
- {
- do {
- exists = 0; // avoid multiple synapses
- r = (i+getrandom(nt/10))%nt;
- if (i==0) Dout(dc::con, r << " ");
- if (r==i) exists=1; // no self-synapses
- for (k=0;k<j;k++) if (post[i][k]==r) exists = 1; // synapse already exists
- } while (exists == 1);
- post[i][j]=r;
- }
- for (i=0;i<ns;i++) for (j=0;j<M;j++) s[i][j]=InitialWeights; // initial exc. synaptic weights
-
-
- for (i=0;i<N;i++)
- {
- short ind=0;
- for (j=0;j<maximumDelay;j++)
- { delays_length[i][j]=M/maximumDelay; // uniform distribution of exc. synaptic delays
- for (k=0;k<delays_length[i][j];k++)
- delays[i][j][k]=ind++;
- }
- }
- SetupPresynapticInfo();
- // for (i=0;i<ns;i++) for (j=0;j<1001+Dmax;j++) LTP[i][j]=0.0;
- // for (i=0;i<nt;i++) LTD[i]=0.0;
-
- // learnobj = new learning(this);
- Dout(dc::con, "Connection initialized ");
- Save();
- }
- // ToDo: validate this routine!!
- int connection::SetupPresynapticInfo_old()
- {
- int i,j,k,dd,jj;
- Dout(dc::con, "Setup Presynaptic Information (this might take a while ");
- if ((N_pre != 0) || (I_pre != 0) || (D_pre != 0) || (s_pre != 0) || (sd_pre != 0))
- Dout(dc::con, "SetupPresynapticInfo-ERROR: Pointers must be 0");
-
- N_pre = new int[nt];
- NewArray2d(I_pre, nt,maxN_pre); NewArray2d(D_pre,nt,maxN_pre); // presynaptic information
- NewArray2d(s_pre,nt,maxN_pre); NewArray2d(sd_pre,nt,maxN_pre); // presynaptic weights
-
- int ConIndex;
- // get presynaptic information
- SimpleTextProgressBar pgbar(ns);
- for (i=0;i<nt;++i) N_pre[i]=0;
- for (j=0;j<ns;j++)
- {
- pgbar.Next(j);
-
- for (dd=0;dd<Dmax;++dd)
- {
- for(k=0;k<delays_length[j][dd];++k)
- {
- ConIndex = delays[j][dd][k]; // presynaptic connection index
- i = post[j][ConIndex]; // i: target neuron index
- I_pre[i][N_pre[i]]=j; // add this neuron to the list
- if (N_pre[i] > maxN_pre)
- cout << "programming error: N_pre[i]="
- << N_pre[i] <<" too high (higher than maxN_pre="
- << maxN_pre << "); j=" << j << " i=" << i << "\n";
- D_pre[i][N_pre[i]]=dd; // add the delay
- s_pre[i][N_pre[i]]=&s[j][ConIndex]; // pointer to the synaptic weight
- // sd_pre[i][N_pre[i]]=&sd[j][ConIndex];// pointer to the derivative
- ++N_pre[i];
- }
- }
- }
- CheckPresynapticInfo_old();
- }
- int connection::CheckPresynapticInfo_old()
- {
- // 1.
- // total number of connections: Sum of delays_length
- // should be equel to: Sum of N_pre
- Dout(dc::con, "Checking Presynaptic Info");
- int i, j, k;
- int countPre, countPost;
- countPre=countPost=0;
- for (i=0;i<ns;++i) for (j=0;j<Dmax;++j) countPre += delays_length[i][j];
- for (i=0;i<nt;++i) countPost += N_pre[i];
- if (countPre == countPost) Dout(dc::con, countPre << " bastscho");
- else Dout(dc::con, "countPre=" << countPre << " != countPost" << countPost << "!!!!!!!!!!!!!!");
- // 2.
- // take every synapse (via delays, delays_length, post)
- // and find the corresponding I_pre
- int ErrorSum=0;
- int CurTarget;
- int ppp;
- SimpleTextProgressBar pgbar(ns);
- for (i=0;i<ns;++i)
- {
- pgbar.Next(i);
- for (j=0;j<Dmax;++j) for (k=0;k<delays_length[i][j];++k)
- {
- CurTarget = post[i][delays[i][j][k]];
- // find Source in I_pre[CurTarget]
- ppp=0;
- while((I_pre[CurTarget][ppp] != i) && (ppp++<N_pre[CurTarget]));
- if (ppp == N_pre[CurTarget]) Dout(dc::con, "ERROR!!!!!!!!!! PresynapticInfo not correct!!!!!!");
- // else Dout(dc::con, ppp << " ");
- }
- }
- }
- // ToDo: validate this routine!!
- int connection::SetupPresynapticInfo()
- {
- int i,j,k,dd,jj;
- Dout(dc::con, "Setup Presynaptic Information (this might take a while "); fflush(stdout);
- if ((N_pre != 0) || (I_pre != 0) || (D_pre != 0) || (s_pre != 0) || (sd_pre != 0))
- Dout(dc::con, "SetupPresynapticInfo-ERROR: Pointers must be 0");
-
- N_pre = new int[nt];
- NewArray2d(I_pre, nt,maxN_pre); NewArray2d(D_pre,nt,maxN_pre); // presynaptic information
- NewArray2d(s_pre,nt,maxN_pre); // presynaptic weights
- // NewArray2d(sd_pre,nt,maxN_pre); // weight derivatives, not used
- NewArray2d(m_pre,nt,maxN_pre);
- N_post = new int[ns];
- int ConIndex;
- // get presynaptic information
- SimpleTextProgressBar pgbar(ns);
- for (i=0;i<nt;++i) N_pre[i]=0;
- for (j=0;j<ns;j++)
- {
- pgbar.Next(j);
-
- for (dd=0;dd<maximumDelay;++dd)
- {
- for(k=0;k<delays_length[j][dd];++k)
- {
- ++N_post[j]; // increase synapse count for presynaptic neuron
- // Dout(dc::con, "N_post[" <<j<<"]=" << N_post[j] << "");
- ConIndex = delays[j][dd][k]; // presynaptic connection index
- i = post[j][ConIndex]; // i: target neuron index
- I_pre[i][N_pre[i]]=j; // add this neuron to the list
- m_pre[i][N_pre[i]]=ConIndex;
- if (N_pre[i] > maxN_pre)
- cout << "programming error: N_pre[i]="
- << N_pre[i] <<" too high (higher than maxN_pre="
- << maxN_pre << "); j=" << j << " i=" << i << "\n";
- D_pre[i][N_pre[i]]=dd; // add the delay
- s_pre[i][N_pre[i]]=&s[j][ConIndex]; // pointer to the synaptic weight
- // sd_pre[i][N_pre[i]]=&sd[j][ConIndex];// pointer to the derivative
- ++N_pre[i];
- }
- }
- }
- CheckPresynapticInfo();
- }
- int connection::DeletePresynapticInfo()
- {
-
- if(N_pre!=0) {
- delete[] N_pre;
- N_pre=0;
- }
- if(I_pre!=0) {
- DeleteArray2d(I_pre,nt);
- I_pre=0;
- }
- if(D_pre!=0) {
- DeleteArray2d(D_pre,nt);
- D_pre=0;
- }
- if(s_pre!=0) {
- DeleteArray2d(s_pre,nt);
- s_pre=0;
- }
- if(sd_pre!=0) {
- DeleteArray2d(sd_pre,nt);
- sd_pre=0;
- }
-
- }
- int connection::Print()
- {
- for (int SourceNr=0;SourceNr<ns;++SourceNr) {
- Dout(dc::con, "S=" << SourceNr << "|");
- for (int SynNr=0;SynNr<N_post[SourceNr];++SynNr) {
- Dout(dc::con, " " << post[SourceNr][SynNr]);
- }
- Dout(dc::con, "");
- }
- for (int TargetNr=0;TargetNr<nt;++TargetNr) {
- Dout(dc::con, "T=" << TargetNr << "|");
- for (int PreSynNr=0;PreSynNr<N_pre[TargetNr];++PreSynNr) {
- Dout(dc::con, " " << I_pre[TargetNr][PreSynNr]);
- }
- Dout(dc::con, "");
- }
- for (int TargetNr=0;TargetNr<nt;++TargetNr) {
- Dout(dc::con, "T=" << TargetNr << "|");
- for (int PreSynNr=0;PreSynNr<N_pre[TargetNr];++PreSynNr) {
- Dout(dc::con, " " << m_pre[TargetNr][PreSynNr]);
- }
- Dout(dc::con, "");
- }
- }
- int connection::CheckConnection()
- {
- int TargetNr=0;
- int SynNr=0;
- for (int SourceNr=0;SourceNr<ns;++SourceNr) {
- for (int Delay=0;Delay<maximumDelay;++Delay) {
- for (int m=0;m<delays_length[SourceNr][Delay];++m) {
- SynNr=delays[SourceNr][Delay][m];
- TargetNr=post[SourceNr][SynNr];
- if (TargetNr <0) {
- cerr << "CheckConnection: ERROR, TargetNr <0\n";
- exit(1);
- }
-
- }
- }
- }
- }
- int connection::CheckPresynapticInfo()
- {
- // 1.
- // total number of connections: Sum of delays_length
- // should be equel to: Sum of N_pre
- Dout(dc::con, "Checking Presynaptic Info");
- int i, j, k;
- int countPre, countPost;
- countPre=countPost=0;
- for (i=0;i<ns;++i) for (j=0;j<maximumDelay;++j) countPre += delays_length[i][j];
- for (i=0;i<nt;++i) countPost += N_pre[i];
- if (countPre == countPost) Dout(dc::con, countPre << " bastscho");
- else Dout(dc::con, "countPre=" << countPre << " != countPost" << countPost << "!!!!!!!!!!!!!!");
- // 2.
- // take every synapse (via delays, delays_length, post)
- // and find the corresponding I_pre
- int ErrorSum=0;
- int CurTarget;
- int PostIndex;
- SimpleTextProgressBar pgbar(ns);
- int SourceSynNumber;
- int TargetSynNumber;
- for (i=0;i<ns;++i) {
- pgbar.Next(i);
- for (j=0;j<maximumDelay;++j) for (k=0;k<delays_length[i][j];++k) {
- SourceSynNumber = delays[i][j][k];
- CurTarget = post[i][SourceSynNumber];
- if (CurTarget <0) {
- cerr << "CheckPresynapticInfo-ERROR: CurTarget < 0\n";
- cerr << "DelaysLength = " << delays_length[i][j] << "\n";
- cerr << "CurTarget = " << CurTarget << "\n";
- cerr << "SourceSynNumber =" << SourceSynNumber << "\n";
- cerr << "i=" << i << " j=" << j << " k=" << k << "\n";
- fflush(stderr); fflush(stdout);
- exit(1);
- }
- // find Source in I_pre[CurTarget]
- PostIndex=0;
- while((s_pre[CurTarget][PostIndex] != &s[i][SourceSynNumber]) && (++PostIndex<N_pre[CurTarget]));
- // cout << "S=" << i << " --> T="
- // << CurTarget << " PostIndex="
- // << PostIndex << "m=" << SourceSynNumber
- // << " N_pre[T] =" << N_pre[CurTarget] << " ";
- // fflush(stdout);
- if (PostIndex == N_pre[CurTarget]) {
- cerr << "ERROR!!!!!!!!!! PresynapticInfo not correct!!!!!!\n";
- cerr << "Postindex = " << PostIndex << " N_pre[CurTarget] = "
- << N_pre[CurTarget] << "\n";
- } else {
- if ((s_pre[CurTarget][PostIndex] == &s[i][SourceSynNumber]) &&
- (D_pre[CurTarget][PostIndex] == j) &&
- (m_pre[CurTarget][PostIndex] == SourceSynNumber)
- ) {
- // Dout(dc::con, "correct ");
- } else {
- cerr << "ERROR \n";
- cerr << "SourceNr=" << i << " SourceSynNr="
- << SourceSynNumber << " TargetNr=" << CurTarget
- << "\n";
- cerr << "s=s: "
- << (s_pre[CurTarget][PostIndex] == &s[i][SourceSynNumber])
- << " Delay=" << j
- << " D_pre["<<CurTarget<<"][" << PostIndex << "]="
- << D_pre[CurTarget][PostIndex]
- << " d=d: " << (D_pre[CurTarget][PostIndex] == j)
- << " m=m: "
- << (m_pre[CurTarget][PostIndex] == SourceSynNumber)
- << "m=" << SourceSynNumber
- << " m_pre=" << m_pre[CurTarget][PostIndex]
- << "\n";
- cerr << "s=" << &s[i][SourceSynNumber]
- << " s_pre= " << s_pre[CurTarget][PostIndex] << "\n";
- cerr << "CheckPresynapticInfo: ERROR!!!!!!!!!!!!!!!!!!!!!!\n";
- fflush(stderr);
- exit (1);
- }
- }
- }
- }
- }
- int connection::proceede(int TotalTime)
- {
- int t = TotalTime % MacroTimeStep;
- int i,j,k, mi, ipre;
- // calculate input for target layer
- k=SourceLayer->N_firings;
- // cpu_start = clock();
- if (rec) rec->record(dt*TotalTime, s[Observe_s][Observe_m]);
- if (BinRec) BinRec->record();
- while (t-(SourceLayer->firings[--k][0]) < maximumDelay) // Nur Spikes, die nicht laenger als maximumDelay in der Vergangeheit liegen werden berücksichtigt. firins-array besteht aus firings[SpikeNr][0]:zeitpunkte, firnigs[SpikeNr][1]:Neuronennummer
- {
- ipre = SourceLayer->firings[k][1];
- for (j=0; j< delays_length[ipre][t-SourceLayer->firings[k][0]]; j++)
- {
- mi = delays[ipre][t-SourceLayer->firings[k][0]][j];
- i=post[ipre][mi];
- InputPointer[i]+=s[ipre][mi];
- // ToDo: check wether the code above (new) does the same as the code below (old)
- // check performance!!
- // i=post[SourceLayer->firings[k][1]] [delays[SourceLayer->firings[k][1]][t-SourceLayer->firings[k][0]][j]];
- // InputPointer[i]+=s[SourceLayer->firings[k][1]][delays[SourceLayer->firings[k][1]][t-SourceLayer->firings[k][0]][j]];
- }
- }
- // cpu_end = clock();
- // cpu_time_used += ((double) (cpu_end - cpu_start)) / CLOCKS_PER_SEC;
- if (learn == true) {
- learnobj->proceede(TotalTime);
- }
- }
- int connection::prepare(int step)
- {
- SimElement::prepare(step);
- if ((learn == true) && (learnobj != 0)) {
- learnobj->prepare();
- if (RewireOn) {
- Rewire(RewireThreshold, RewireMaxConnectivity);
- }
- if (AutoSave) Save();
- }
- // Dout(dc::con, "PERFORMANCE: cpu-time = " << cpu_time_used << " sec");
- }
- void connection::SetName(const char* _name)
- {
- SimElement::SetName(_name);
- WeightFileName = Name + "weights.dat";
- Dout(dc::con, "xxxxxxxxxxxxxxxxWeightFileName = " << WeightFileName << "");
- }
- int connection::SetFileName(char* FileName)
- {
- WeightFileName = FileName;
- Dout(dc::con, "Set WeightFileName to : " << WeightFileName << "");
- }
- int connection::Save()
- {
- Save(WeightFileName.c_str());
- }
- int connection::Save(int nr)
- {
- Save((WeightFileName + stringify(nr)).c_str());
- }
- int connection::Save(const string& SaveWeightFileName)
- {
- int i,j,k;
- Dout(dc::con, " Save Con.file: " << SaveWeightFileName);fflush(stdout);
- FILE *fw;
- // save weights
- fw = fopen((DataDirectory+SaveWeightFileName).c_str(),"w");
- fwrite(&ns, sizeof(ns), 1, fw);
- fwrite(&(SourceLayer->Nx), sizeof(SourceLayer->Nx), 1, fw);
- fwrite(&(SourceLayer->Ny), sizeof(SourceLayer->Ny), 1, fw);
- fwrite(&nt, sizeof(nt), 1, fw);
- fwrite(&(TargetLayer->Nx), sizeof(TargetLayer->Nx), 1, fw);
- fwrite(&(TargetLayer->Ny), sizeof(TargetLayer->Ny), 1, fw);
- fwrite(&M, sizeof(M), 1, fw);
- fwrite(&maximumDelay, sizeof(maximumDelay), 1, fw);
- fwrite(&maxN_pre, sizeof(maxN_pre), 1, fw);
- // NewArray2d(delays_length,N,Dmax); // distribution of delays
- // NewArray3d(delays,N,Dmax,M); // arrangement of delays
- for (i=0;i<ns;++i) fwrite(post[i], M*sizeof(post[0][0]), 1, fw);
- for (i=0;i<ns;++i) fwrite(s[i], M*sizeof(s[0][0]), 1, fw);
- for (i=0;i<ns;++i) fwrite(delays_length[i], maximumDelay*sizeof(delays_length[0][0]),1,fw);
- for (i=0;i<ns;++i) for (j=0;j<maximumDelay;++j) fwrite(delays[i][j], M*sizeof(delays[0][0][0]),1,fw);
-
- // fwrite(&N_pre, sizeof(N_pre), 1, fw);
- // fwrite(&I_pre, sizeof(I_pre), 1, fw);
- // fwrite(&D_pre, sizeof(D_pre), 1, fw);
- // fwrite(&s, sizeof(s), 1, fw);
- fclose(fw);
- Dout(dc::con, " saved weights ");
- }
- bool connection::CheckHeaderConsistency()
- {
- bool consistent = Connection::CheckHeaderConsistency();
- if (maxN_pre>ns) {
- cerr << "ERROR: maxN_pre>ns \n";
- consistent=false;
- }
- }
-
- int connection::Load()
- {
- Load(WeightFileName.c_str());
- }
- int connection::Load(const char* FileName)
- {
- Load(FileName, DataDirectory.c_str());
- }
-
-
- int connection::Load(const char* FileName, const char* DirName)
- {
- int i,j,k;
- FILE *fw;
-
- std::string DirAndFileName = (std::string(DirName)+FileName);
- Dout(dc::con, "DirAndFileName=" << DirAndFileName << "");
- fflush(stdout);
- const char* DFileName = DirAndFileName.c_str();
- if (!fexist(DFileName)) {
- cerr << "\n\nERROR: connection file " << DFileName << " doesn't exist \n\n";
- fflush(stderr);
- return(2);
- } else {
- Dout(dc::con, "\nLoadWeightFile: " << DFileName << ""); fflush(stdout);
- fw = fopen( (std::string(DirName)+FileName).c_str(), "r");
- fread(&ns, sizeof(ns), 1, fw);
- fread(&SourceNx, sizeof(SourceNx), 1, fw);
- fread(&SourceNy, sizeof(SourceNy), 1, fw);
- fread(&nt, sizeof(nt), 1, fw);
- fread(&TargetNx, sizeof(TargetNx), 1, fw);
- fread(&TargetNy, sizeof(TargetNy), 1, fw);
- fread(&M, sizeof(M), 1, fw);
- fread(&maximumDelay, sizeof(maximumDelay), 1, fw);
- fread(&maxN_pre, sizeof(maxN_pre), 1, fw);
- cout << "ns=" << ns << "\n"
- << "nt=" << nt << "\n"
- << "SourceNx=" << SourceNx << "\n"
- << "SourceNy=" << SourceNy << "\n"
- << "TargetNx=" << TargetNx << "\n"
- << "TargetNy=" << TargetNy << "\n"
- << "M=" << M << "\n"
- << "maxN_pre=" << maxN_pre << "\n"
- << "maximumDelay=" << maximumDelay <<"\n"
- << "Dmax=" << Dmax << "\n";
- fflush(stdout);
-
- bool SuccessfullyLoaded=CheckHeaderConsistency();
-
- if (!SuccessfullyLoaded) {
- cerr << "\n\nERROR while trying to load " << FileName <<"\n";
- cerr << "try option --NoLoadWeights \n\n";
- fflush(stderr);
- return(1);
- }
-
- if (post == 0 && (s ==0) && (sd==0) && (delays_length==0) && (delays==0))
- {
- // initialize dynamical arrays
-
- InitializeDynamicalArrays(ns, M, maximumDelay);
- // NewArray2d(post, ns, M); // indeces of postsynaptic neurons
- // NewArray2d(s, ns,M); NewArray2d(sd,ns,M);
- // NewArray2d(delays_length,ns,Dmax); // distribution of delays
- // NewArray3d(delays,ns,Dmax,M); // arrangement of delays
-
- for (i=0;i<ns;++i) fread(post[i], M*sizeof(post[0][0]), 1, fw);
- // for (i=0;i<ns;++i) for (j=0;j<M;++j) Dout(dc::con, post[i][j] << " ");
- for (i=0;i<ns;++i) fread(s[i], M*sizeof(s[0][0]), 1, fw);
- for (i=0;i<ns;++i) fread(delays_length[i], maximumDelay*sizeof(delays_length[0][0]),1,fw);
- for (i=0;i<ns;++i) for (j=0;j<maximumDelay;++j) fread(delays[i][j], M*sizeof(delays[0][0][0]),1,fw);
- } else Dout(dc::con, "WeightFileLoadingERROR: Pointers to dynamical arrays must be NULL");
-
-
- SetupPresynapticInfo();
- Dout(dc::con, "Loaded Connections");
- fflush(stdout);
- return(0);
- }
- }
- // not in use!!!
- int connection::DeleteWeight(int SourceNr, int ConnectionNr)
- {
- // delete weight in DelayArray
- // mark synapse in post array with -1
- // mark weight with NaN???
- // presynaptic arrays are not corrected but later rebuild from scratch
- bool found = false;
- int WeightDelay=-1;
- int DelayIndex=-1;
- if (SourceNr >= ns) return -1;
- // find Weight in delays array
- for (int delay=0;delay<maximumDelay;++delay) {
- for (int i=0; i<M;++i) {
- if (delays[SourceNr][delay][i] == ConnectionNr) {
- found=true;
- WeightDelay=delay;
- DelayIndex=i;
- i=M;
- delay=maximumDelay;
- }
- }
- }
- if (found) {
- // Verbindung aus Delay-Liste loeschen
- for (int pos=DelayIndex;pos<delays_length[SourceNr][WeightDelay]-1;++pos) {
- delays[SourceNr][WeightDelay][pos] = delays[SourceNr][WeightDelay][pos+1];
- }
- // delays_length anpassen
- --delays_length[SourceNr][WeightDelay];
-
- post[SourceNr][ConnectionNr]=-1;
- s[SourceNr][ConnectionNr]=0; // noetig??
- --N_post[SourceNr];
-
- } else return -1;
- return 0;
- }
- int connection::SetSystematicWeights()
- {
- int TargetNr=-1;
- for (int SourceNr=0; SourceNr<ns; ++SourceNr) {
- for (int delay=0;delay<maximumDelay;++delay) {
- for (int i=0; i<delays_length[SourceNr][delay];++i) {
- TargetNr= post[SourceNr][delays[SourceNr][delay][i]];
- s[SourceNr][delays[SourceNr][delay][i]]=float(SourceNr) + 0.001*TargetNr;
- }
- }
- }
- }
- int connection::CheckSystematicWeights()
- {
- Dout(dc::con, "checking systematic weights");
- int SourceNr=-1;
- int errors =0;
- for (int TargetNr=0; TargetNr<nt;++TargetNr) {
- for (int i=0; i<N_pre[TargetNr]; ++i) {
- SourceNr = I_pre[TargetNr][i];
- // Dout(dc::con, "t=" << TargetNr << " s=" << SourceNr << " i=" << i << " weight=" << *s_pre[TargetNr][i] << " "); fflush(stdout);
- if ((*s_pre[TargetNr][i] - (float(SourceNr) + 0.001*TargetNr)) > 0.00001) {
- ++errors;
- cerr << "ERROR nr. " << errors << "\n"; fflush(stderr);
- }
- }
- }
- Dout(dc::con, "Checked Systematic Weights, errors=" << errors << "");
- }
- // 2007/12/11: routine tested with condelins.cpp, seems to work
- int connection::DeleteWeightCorrectPreInfo(int SourceNr, int ConnectionNr, int SupposedDelay)
- {
- bool found = false;
- int WeightDelay=-1;
- int m=-1;
- if (SourceNr >= ns) return -1;
- // find Weight in delays array
- for (int delay=0;delay<maximumDelay;++delay) {
- for (int i=0; i<delays_length[SourceNr][delay];++i) {
- if (delays[SourceNr][delay][i] == ConnectionNr) {
- found=true;
- WeightDelay=delay;
- m=i;
- if ((SupposedDelay !=-1) && (SupposedDelay != WeightDelay)) {
- cerr << "Delays don't fit!!!\n";
- cerr << "SupposedDelay=" << SupposedDelay << "\n";
- cerr << "WeightDelay=" << WeightDelay << "\n";
- }
- break;
- }
- }
- if (found) break; // the previous break ends only the inner loop
- }
- if (found) {
- // Verbindung in praesynaptischen Arrays finden
- int TargetNr = post[SourceNr][ConnectionNr];
- Dout(dc::con, "Delete Connection ");
- Dout(dc::con, "SourceNr=" << SourceNr << "m=" << m << " ConNr=" << ConnectionNr << " TargetNr=" << TargetNr << " Delay=" << WeightDelay << "DelayLength=" << delays_length[SourceNr][WeightDelay] << "");
- int PostIndex = -1;
- for (int i=0;i<N_pre[TargetNr];++i) {
- if (s_pre[TargetNr][i] == &s[SourceNr][ConnectionNr]) {
- PostIndex = i;
- }
- }
- if (PostIndex == -1) {
- cerr << "DeleteWeightCorrectPreInfo: ERROR: connection not found in s_pre array\n";
- fflush(stderr);
- exit(1);
- }
-
- // Verbindung aus Delay-Liste loeschen
- for (int pos=m;pos<delays_length[SourceNr][WeightDelay]-1;++pos) {
- delays[SourceNr][WeightDelay][pos] = delays[SourceNr][WeightDelay][pos+1];
- }
- // delays_length anpassen
- --delays_length[SourceNr][WeightDelay];
- // connection is not removed from post[N][M] and s[N][M] arrays
- // it is only marked as deleted by setting post[SourceNr][ConnectionNr] to -1
-
- post[SourceNr][ConnectionNr] = -1;
- --N_post[SourceNr];
-
- // presynaptic info korrigieren:
- // N_pre: -1
- // I_pre, D_pre, m_pre, s_pre: Eintrag loeschen
- for (int k=PostIndex; k<N_pre[TargetNr]-1;++k) {
- I_pre[TargetNr][k] = I_pre[TargetNr][k+1];
- D_pre[TargetNr][k] = D_pre[TargetNr][k+1];
- s_pre[TargetNr][k] = s_pre[TargetNr][k+1];
- m_pre[TargetNr][k] = m_pre[TargetNr][k+1];
- }
- --N_pre[TargetNr];
-
- fflush(stdout);fflush(stderr);
- } else {
- cerr << "Connection which to delete was not found\n";
- return -1;
- }
- return 0;
- }
- int connection::InsertNewWeight(int SourceNr, int TargetNr, float InitialWeight, int delay)
- {
- // check parameter
- if (TargetNr >= nt) {
- cerr << "Error in InsertNewWeight: TargetNr too high\n";
- return -1;
- }
- if (delay >= maximumDelay) {
- cerr << "ERROR in InsertNewWeight: delay >= maximumDelay\n";
- return -1;
- }
- // find free position in post array
- int m=-1;
- for (int i=0;i<M;++i) {
- if (post[SourceNr][i] == -1) {
- m=i;
- break;
- }
- }
- if (m!=-1) {
- post[SourceNr][m]=TargetNr;
- s[SourceNr][m]=InitialWeight;
- delays[SourceNr][delay][delays_length[SourceNr][delay]] = m;
- ++delays_length[SourceNr][delay];
- ++N_post[SourceNr];
- // add presynaptic info
- I_pre[TargetNr][N_pre[TargetNr]] = SourceNr;
- D_pre[TargetNr][N_pre[TargetNr]] = delay;
- m_pre[TargetNr][N_pre[TargetNr]] = m;
- s_pre[TargetNr][N_pre[TargetNr]] = &s[SourceNr][m];
- ++N_pre[TargetNr];
- } else {
- cerr << "ERROR: no free position for additional connection found\n";
- return -1;
- }
- return 0;
- }
- int connection::DeleteLowWeights(float threshold)
- {
- // alle Gewichte durchgehen und mit Schwelle vergleichen
- // Delay-Schleife verwenden
- // umkopieren oder nur einzelne Gewichte l�schen und presynaptic info neu berechnen???
- int DelCount=0;
- for (int SourceNr=0; SourceNr<ns;++SourceNr) {
- for (int CurDelay=0;CurDelay<maximumDelay;++CurDelay) {
- for (int j=0;j<delays_length[SourceNr][CurDelay];++j) {
- int SynapseNr=delays[SourceNr][CurDelay][j];
- if (s[SourceNr][SynapseNr] <= threshold) {
- DeleteWeightCorrectPreInfo(SourceNr,SynapseNr, CurDelay);
- ++DelCount;
- --j; // because synapse is deleted, the next synapse now has the position of the deleted synapse
- }
- }
- }
- }
- Dout(dc::con, "DelCount=" << DelCount << " weights deleted");
- // DeletePresynapticInfo();
- // SetupPresynapticInfo();
- // CheckPresynapticInfo();
-
- return DelCount;
- }
- // set new outgoing connections for Source (presynaptic) neurons
- // problem: we want it the other way around
- // setting new incoming weights to a specific Target (postsynaptic) neuron
- // --> SetNewWeights2
- int connection::SetNewWeights(int NNewTargets, int NMaxTargets)
- {
- int i;
-
- int AvailableTargetNeurons [nt];
- for (int SourceNr=0;SourceNr<ns;++SourceNr) {
- if (N_post[SourceNr] >= NMaxTargets) {
- Dout(dc::con, "N_post[SourceNr] =" << N_post[SourceNr] << ">= NMaxTargets=" << NMaxTargets << "");
- } else {
- for (i=0;i<nt;++i) {
- AvailableTargetNeurons[i] = 1;
- }
- int NAvailableTargetNeurons = nt;
- int TargetNr=-1;
-
- // delete existing connections from available target list
- for (int delay=0;delay<maximumDelay;++delay) {
- for (i=0; i<delays_length[SourceNr][delay];++i) {
- TargetNr = post[SourceNr][delays[SourceNr][delay][i]];
- if (TargetNr <0) {
- cerr << "FEHLER: TargetNr<0, should never happen\n";
- fflush(stderr);
- exit(1);
- }
- AvailableTargetNeurons[TargetNr] = 0;
- --NAvailableTargetNeurons;
- }
- }
-
- for (int NewTargetNr=0;NewTargetNr<NNewTargets;++NewTargetNr) {
- if (NAvailableTargetNeurons>0) {
- int TargetIndex = gsl_rng_uniform_int(gslr, NAvailableTargetNeurons);
- int count=-1;
- TargetNr=-1;
- for (i=0;i<nt;++i) {
- if (AvailableTargetNeurons[i] ==1) {
- ++count; // because it started with -1
- if (count == TargetIndex) {
- TargetNr = i;
- break;
- }
- }
- }
- if (InsertNewWeight(SourceNr, TargetNr, InitialWeight) != -1) {
- AvailableTargetNeurons[TargetNr] = 0;
- --NAvailableTargetNeurons;
- Dout(dc::con, "Set new connection: "<< SourceNr << "to " << TargetNr << ", available Target neurons: " << NAvailableTargetNeurons << "");
- }
- } else {
- break;
- }
- }
- }
- }
- }
- // HIER GEHTS WEITER
- int connection::SetNewWeights2(int _NNewSources, int NMaxSources)
- {
- int i;
-
- // find out, which source neurons can make connections (check N_post[SourceNr] < M)
- // make a list (vector or map: number of source neuron, number of free connections)
- int NAvailableSource = 0;
- vector <int> AvailableSourceList;
- for (int SourceNr=0;SourceNr<ns;++SourceNr) {
- if (N_post[SourceNr] <M) {
- NAvailableSource += 1;
- AvailableSourceList.push_back(SourceNr);
- }
- }
- for (int TargetNr=0; TargetNr<nt;++TargetNr) {
- int NNewSources = min(_NNewSources, NMaxSources-N_pre[TargetNr]);
- for (int NewConNr=0; NewConNr<NNewSources; ++NewConNr) {
- // choose source neuron (random, use number of available source neurons)
-
-
- }
- }
- // connect to selected source neuron
- // if (N_post[SourceNr] == 0) then delete source neuron from list of available source neurons
- // next connection
- // next target neuron
-
- // int AvailableTargetNeurons [nt];
- // for (int SourceNr=0;SourceNr<ns;++SourceNr) {
- // if (N_post[SourceNr] >= NMaxTargets) {
- // Dout(dc::con, "N_post[SourceNr] =" << N_post[SourceNr] << ">= NMaxTargets=" << NMaxTargets << "");
- // } else {
- // for (i=0;i<nt;++i) {
- // AvailableTargetNeurons[i] = 1;
- // }
- // int NAvailableTargetNeurons = nt;
- // int TargetNr=-1;
-
- // // delete existing connections from available target list
- // for (int delay=0;delay<maximumDelay;++delay) {
- // for (i=0; i<delays_length[SourceNr][delay];++i) {
- // TargetNr = post[SourceNr][delays[SourceNr][delay][i]];
- // if (TargetNr <0) {
- // cerr << "FEHLER: TargetNr<0, should never happen\n";
- // fflush(stderr);
- // exit(1);
- // }
- // AvailableTargetNeurons[TargetNr] = 0;
- // --NAvailableTargetNeurons;
- // }
- // }
-
- // for (int NewTargetNr=0;NewTargetNr<NNewTargets;++NewTargetNr) {
- // if (NAvailableTargetNeurons>0) {
- // int TargetIndex = gsl_rng_uniform_int(gslr, NAvailableTargetNeurons);
- // int count=-1;
- // TargetNr=-1;
- // for (i=0;i<nt;++i) {
- // if (AvailableTargetNeurons[i] ==1) {
- // ++count; // because it started with -1
- // if (count == TargetIndex) {
- // TargetNr = i;
- // break;
- // }
- // }
- // }
- // if (InsertNewWeight(SourceNr, TargetNr, InitialWeight) != -1) {
- // AvailableTargetNeurons[TargetNr] = 0;
- // --NAvailableTargetNeurons;
- // Dout(dc::con, "Set new connection: "<< SourceNr << "to " << TargetNr << ", available Target neurons: " << NAvailableTargetNeurons << "");
- // }
- // } else {
- // break;
- // }
- // }
- // }
- // }
- }
- int connection::Rewire(float minWeight, float maxConnectivity)
- {
- // first: delete low weights
- int NFreeWeights = DeleteLowWeights(minWeight);
- // second: set new weights randomly
- SetNewWeights(NFreeWeights, int(floor(maxConnectivity*nt)));
- }
- /** returns list of synapses form source neuron
- *
- * @param [IN] SourceNr
- * @param [OUT] SynList is a return vector containing a list of all synapses from source neuron SourceNr
- * @return maximum weight
- */
- float connection::GetSourceWeights(int CurSource, vector<Synapse>& SynList, int& MaxDelay)
- {
- // Dout(dc::con, "connection::GetSourceWeights");
- SynList.clear();
- float MaxWeight=0;
- MaxDelay=0;
- for(int CurDelay=0;CurDelay<maximumDelay;++CurDelay)
- {
- for (int j=0; j< delays_length[CurSource][CurDelay]; j++) {
- short SynIndex = delays[CurSource][CurDelay][j];
- int CurTarget=post[CurSource][SynIndex];
- float CurWeight=s[CurSource][SynIndex];
- SynList.push_back(Synapse(CurSource,CurTarget, CurWeight, CurDelay));
- if (CurWeight>MaxWeight) {
- MaxWeight=CurWeight;
- }
- if (CurDelay>MaxDelay) {
- MaxDelay=CurDelay;
- }
- }
- }
- // Dout(dc::con, "MaxWeight=" << MaxWeight << "");
- return MaxWeight;
- }
- /** return a vector of all synapses to target neuron
- *
- * @param [IN] index of target neuron
- * @param [OUT] output vector containing all synapses to target neuron nr CurTarget
- * @return maximum weight
- */
- float connection::GetTargetWeights(int CurTarget, vector<Synapse>& SynList, int &MaxDelay)
- {
- // Dout(dc::con, "connection::GetTargetWeights");
- SynList.clear();
- float MaxWeight=0;
- MaxDelay=0;
- for (int j=0;j<N_pre[CurTarget];j++) {
- int CurSource=I_pre[CurTarget][j];
- float CurWeight=*(s_pre[CurTarget][j]);
- int CurDelay = D_pre[CurTarget][j];
- SynList.push_back(Synapse(CurSource, CurTarget, CurWeight, CurDelay));
- if (CurWeight>MaxWeight) {
- MaxWeight=CurWeight;
- }
- if (CurDelay>MaxDelay) {
- MaxDelay=CurDelay;
- }
- }
- // Dout(dc::con, "MaxWeight=" << MaxWeight << "");
- return MaxWeight;
- }
- /** return maximum synaptic weight value
- *
- * @return maximum weight
- */
- float connection::GetMaxWeight()
- {
- float MaxWeight=0;
- for (int CurSource=0;CurSource<ns;++CurSource)
- {
- for(int CurDelay=0;CurDelay<maximumDelay;++CurDelay)
- {
- for (int j=0; j< delays_length[CurSource][CurDelay]; j++) {
- short SynIndex = delays[CurSource][CurDelay][j];
- float CurWeight=s[CurSource][SynIndex];
- if (CurWeight>MaxWeight) {
- MaxWeight=CurWeight;
- }
- }
- }
- }
- // Dout(dc::con, "MaxWeight=" << MaxWeight << "");
- return MaxWeight;
- }
- int connection::GetMaxDelay()
- {
- int MaxDelay=0;
- for (int CurSource=0;CurSource<ns;++CurSource)
- {
- for(int CurDelay=MaxDelay;CurDelay<maximumDelay;++CurDelay)
- {
- if (delays_length[CurSource][CurDelay]>0) {
- if (CurDelay>MaxDelay) {
- MaxDelay=CurDelay;
- }
- }
- }
- }
- return MaxDelay;
- }
- ////////////////////////
- DepressiveConnection::DepressiveConnection(
- layer* SL, layer* TL,
- csimInputChannel InputNumber, float _TauRec, float _U_se)
- : connection(SL, TL, InputNumber),
- // efficacy(boost::extents[SourceLayer->N][M]),
- // LastEpsp(boost::extents[SourceLayer->N][M]),
- U_SE(_U_se),
- TauRec(_TauRec/dt), U_se_fac(1-_U_se)
- {
- Dout(dc::con, "Initialize DepressiveConnection");
- }
- DepressiveConnection::~DepressiveConnection()
- {
- DeleteArray2d(efficacy, SourceLayer->N);
- DeleteArray2d(LastEpsp, SourceLayer->N);
- }
- int DepressiveConnection::WriteSimInfo(fstream &fw)
- {
- stringstream sstr;
- sstr << "<TauRec value=\"" << TauRec*dt << "\"/> \n";
- sstr << "<U_SE value=\"" << U_SE << "\"/> \n";
- connection::WriteSimInfo(fw, sstr.str());
- }
- int DepressiveConnection::proceede(int TotalTime)
- {
- int t = TotalTime % MacroTimeStep;
- int i,j,k,mi,ipre;
- // calculate input for target layer
- k=SourceLayer->N_firings;
- // while (t-SourceLayer->firings[--k][0] < Dmax)
- while (t-SourceLayer->firings[--k][0] < maximumDelay)
- {
- ipre = SourceLayer->firings[k][1];
- for (j=0; j< delays_length[SourceLayer->firings[k][1]][t-SourceLayer->firings[k][0]]; j++)
- {
- mi = delays[ipre][t-SourceLayer->firings[k][0]][j];
- i=post[ipre][mi];
- if (TauRec > 0) {
- efficacy[ipre][mi] = (1-(1-efficacy[ipre][mi])*exp(-(t-LastEpsp[ipre][mi])/TauRec));
- } else {
- efficacy[ipre][mi] =1;
- }
- InputPointer[i]+= s[ipre][mi] * efficacy[ipre][mi] ;
- efficacy[ipre][mi] *= U_se_fac;
- LastEpsp[ipre][mi] = t;
- }
- }
-
- if (learn == true) {
- learnobj->proceede(TotalTime);
- }
- }
- int DepressiveConnection::reset(int t)
- {
- for(int i=0;i<ns;++i) for (int j=0;j<M;++j) {
- efficacy[i][j]=1;
- LastEpsp[i][j]=0;
- }
- }
- int DepressiveConnection::prepare(int step)
- {
- // if ((learn == true) && (learnobj != 0)) {
- // learnobj->prepare();
- // Save();
- // }
- connection::prepare(step);
- // Dout(dc::con, "PERFORMANCE: cpu-time = " << cpu_time_used << " sec");
- int i,j;
-
- for (i=0;i<SourceLayer->N;++i) for (j=0;j<M;++j) {
- LastEpsp[i][j]-=MacroTimeStep;
- }
- }
- int DepressiveConnection::InitializeDynamicalArrays(
- const int _N, const int _M, const int Dmax)
- {
- cout <<"DepressiveConnection::InitializeDynamicalArrays\n";
- connection::InitializeDynamicalArrays(_N,_M,Dmax);
- int i,j;
- Dout(dc::con, "SN=" << SourceLayer->N << "");
- Dout(dc::con, "M=" << M << "");
- NewArray2d(efficacy, SourceLayer->N, M);
- NewArray2d(LastEpsp, SourceLayer->N, M);
- for (i=0;i<SourceLayer->N;++i) for (j=0;j<M;++j) {
- efficacy[i][j]=1;
- LastEpsp[i][j]=0;
- }
- }
- ///////////////////////////////////
- FacilitativeConnection::FacilitativeConnection(
- layer* SL, layer* TL, csimInputChannel InputNumber, float _TauRec,
- float _U_se, float _UseTauDec, float _UseInc):
- DepressiveConnection(SL, TL, InputNumber, _TauRec, _U_se), UseTauDec(_UseTauDec/dt), UseInc(_UseInc), UseConst(_U_se)
- {
- Dout(dc::con, "Initialize Facilitative Connection ");
- Dout(dc::con, " UseTauDec=" << UseTauDec << " UseInc=" << UseInc << " UseConst" << UseConst << "");
- }
- FacilitativeConnection::~FacilitativeConnection()
- {
- DeleteArray2d(U_SEvalue,SourceLayer->N);
- }
- int FacilitativeConnection::InitializeDynamicalArrays(
- const int _N, const int _M, const int Dmax)
- {
- cout <<"FacilitativeConnection::InitializeDynamicalArrays\n";
- DepressiveConnection::InitializeDynamicalArrays(_N,_M,Dmax);
- int i,j;
- NewArray2d(U_SEvalue, SourceLayer->N, M);
- for (i=0;i<SourceLayer->N;++i) for (j=0;j<M;++j) {
- U_SEvalue[i][j]=0;
- }
- }
- int FacilitativeConnection::reset(int t)
- {
- DepressiveConnection::reset(t);
- for(int i=0;i<ns;++i) for (int j=0;j<M;++j) {
- U_SEvalue[i][j] = 0;
- }
- }
- int FacilitativeConnection::proceede(int TotalTime)
- {
- int t = TotalTime % MacroTimeStep;
- int i,j,k,mi,ipre;
- // calculate input for target layer
- k=SourceLayer->N_firings;
- // while (t-SourceLayer->firings[--k][0] < Dmax)
- float CurU_SE;
- while (t-SourceLayer->firings[--k][0] < maximumDelay)
- {
- ipre = SourceLayer->firings[k][1];
- for (j=0; j< delays_length[SourceLayer->firings[k][1]][t-SourceLayer->firings[k][0]]; j++)
- {
- mi = delays[ipre][t-SourceLayer->firings[k][0]][j];
- i=post[ipre][mi];
- U_SEvalue[ipre][mi] *= exp(-(t-LastEpsp[ipre][mi])/UseTauDec);
- CurU_SE = UseConst + U_SEvalue[ipre][mi];
- efficacy[ipre][mi] = (1-(1-efficacy[ipre][mi])*exp(-float(t-LastEpsp[ipre][mi])/TauRec));
- // Dout(dc::con, efficacy[ipre][mi] << " "); //remove
- InputPointer[i]+= s[ipre][mi] * efficacy[ipre][mi] * CurU_SE;
- efficacy[ipre][mi] *= (1-CurU_SE);
- LastEpsp[ipre][mi] = t;
- U_SEvalue[ipre][mi] += (1-CurU_SE)*UseInc;
- }
- }
- if (BinRec) BinRec->record();
-
- if (learn == true) {
- learnobj->proceede(TotalTime);
- }
- }
- int FacilitativeConnection::StartBinRec(int PreSynNumber)
- {
- int NumObserve = 2*M;
- float** Buffer = new float* [NumObserve];
- for (int i=0;i<M; ++i) {
- Buffer[i] = &efficacy[PreSynNumber][i];
-
- }
- for (int i=0;i<M; ++i) {
- Buffer[i+M] = &U_SEvalue[PreSynNumber][i];
-
- }
- string FileName("effsynweight.dat.bin");
- BinRec = new BinRecorder(MacroTimeStep, NumObserve, M, Buffer, dt, (DataDirectory+FileName).c_str());
- }
- ///////////////////////////////
- PspConnection::PspConnection(layer* SL, layer* TL, csimInputChannel InputNumber, bool _nonself): connection(SL,TL, InputNumber, _nonself), PspArrayPointer(0)
- {
- float tau2=20;
- float tau1=5;
- float _PspDuration = 50;
- PspDuration = int(_PspDuration/dt);
- PspTemplate = new float [PspDuration];
- NewArray2d(Psp, TargetLayer->N, PspDuration);
-
- }
- PspConnection::~PspConnection()
- {
- delete[] PspTemplate;
- DeleteArray2d(Psp,TargetLayer->N);
- }
|