updateW_v5.m 1.3 KB

1
  1. function wT1 = updateW_v5(wT, yT, xT, dT1, etaW, capW, maxW) %%%%% % run this for each timestep to update weight mat. for next timestep % inputs are: % wT, weight mat for given timestep % yT, outputs for timestep % xT, input for timestep % dT1 is memorized activity of neuron up to and including next timestep % (ie, updateD must be run before this) % % output is: % wT1, output weights matrix for next timestep %%%%% %%% added modulatory term to slow learning... %%% %%% m = size(wT,1); %num out n = size(wT,2); %num in wT1 = wT; for idx = 1:m for jdx = 1:n t1 = wT(idx,jdx); %term 1 of w update t2n = etaW* yT(idx) * xT(jdx);% - (wT(idx,jdx)*yT(idx))); %num of t2 t2d = 1;%dT1(idx) + eps; %denom of t2 t2 = t2n/t2d; %term 2 of w update %not originally rectified wT1(idx, jdx) = t1 + t2; end end wT1(wT1==0) = eps; %don't kill synapses (but negs okay) wT1(wT1>maxW) = maxW; %max magnitude of input weight wT1(wT1< (-1*maxW)) = -1*maxW; %max magnitude of input weight for idx = 1:m % cap weight to each output if norm(wT1(idx, :)) > capW %cap! wT1(idx,:) = wT1(idx,:) ./ (norm(wT1(idx,:))/capW); end end if isnan(sum(wT1(:))) wT1(isnan(wT1)) = eps; end end