FunctionDefinitions.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716
  1. '''Import Packages'''
  2. import sys
  3. from brian2 import *
  4. import sympy
  5. import matplotlib.pyplot as plt
  6. import numpy as np
  7. import pandas as pd
  8. from pathlib import Path
  9. import seaborn as sns
  10. from scipy import signal
  11. from scipy import optimize
  12. from scipy import stats
  13. from scipy.stats import gaussian_kde
  14. import pickle
  15. import random
  16. import matplotlib
  17. # matplotlib.rcParams['pdf.fonttype'] = 42
  18. '''Figure Styles'''
  19. def figsave(figure_folder,figname):
  20. plt.savefig(figure_folder / (figname + '.pdf'),format='pdf',dpi=300, transparent = True)
  21. def create_fig():
  22. hfig = plt.figure(figsize=[1.5,1.5])
  23. ax = hfig.add_axes([0.2,0.2,0.7,0.7],frameon=False)
  24. return hfig,ax
  25. def create_fig_tuning():
  26. figsize=[1.2,1.5]
  27. hfig = plt.figure(figsize = figsize)
  28. ax = hfig.add_axes([0.3,0.2,0.6,0.7])
  29. return hfig,ax
  30. def set_fig_style():
  31. rc = matplotlib.rcParams
  32. fontsize = 7
  33. rc['font.size'] = fontsize
  34. rc['axes.labelsize'] = fontsize
  35. rc['axes.titlesize'] = fontsize
  36. rc['axes.linewidth'] = 0.5
  37. rc['axes.labelpad'] = 0
  38. rc['font.family'] = 'sans-serif'
  39. rc['font.sans-serif'] = ['Helvetica','Arial','sans-serif']
  40. rc['xtick.major.width'] = 0.5
  41. rc['ytick.major.width'] = 0.5
  42. rc['xtick.minor.width'] = 0.5
  43. rc['ytick.minor.width'] = 0.5
  44. rc['xtick.major.size'] = 2
  45. rc['ytick.major.size'] = 2
  46. rc['xtick.minor.size'] = 2
  47. rc['ytick.minor.size'] = 2
  48. rc['xtick.major.pad'] = 0
  49. rc['ytick.major.pad'] = 0
  50. rc['xtick.minor.pad'] = 0
  51. rc['ytick.minor.pad'] = 0
  52. rc['ytick.labelsize'] = fontsize
  53. rc['xtick.labelsize'] = fontsize
  54. rc['pdf.fonttype'] = 42
  55. rc['ps.fonttype'] = 42
  56. # matplotlib.rcParams.update(rc)
  57. return rc
  58. def plot_corr_matrix_multigauss(rv):
  59. hfig = plt.figure(figsize = (15,5))
  60. ax1 = hfig.add_axes([0.1,0.2,0.4,0.7])
  61. ax1.axis('scaled')
  62. ax2 = hfig.add_axes([0.5,0.2,0.4,0.7])
  63. ax2.axis('scaled')
  64. covmat = rv.covariance
  65. sns.heatmap(covmat,
  66. ax = ax1,
  67. annot=True,
  68. cbar = False,
  69. fmt="0.3f",
  70. cmap="YlGnBu",
  71. xticklabels=['stretch','tau','offset'],#range(np.shape(dataset)[1]),
  72. yticklabels=['stretch','tau','offset'])
  73. ax1.set_title("Covariance matrix")
  74. corrmat = correlation_from_covariance(covmat)
  75. sns.heatmap(corrmat,
  76. ax = ax2,
  77. annot=True,
  78. cbar = False,
  79. fmt="0.3f",
  80. cmap="YlGnBu",
  81. xticklabels=['stretch','tau','offset'],#range(np.shape(dataset)[1]),
  82. yticklabels=['stretch','tau','offset'])
  83. ax2.set_title("Correlation matrix")
  84. '''Analysis'''
  85. def exp_fit(x, a, k, b):
  86. return a*np.exp(x*k) + b
  87. def get_example_cmd(exptname,boutstr,sweepdur):
  88. ''' Format of inputs:
  89. exptname = '20191218_009'
  90. boutstr= '[expt.get_bout_win('R','Keyboard')[0],
  91. expt.get_bout_win('R','Keyboard')[1],
  92. expt.get_bout_win('N','Keyboard')[0],
  93. expt.get_bout_win('N','Keyboard')[1]]''
  94. sweepdur = 0.05'''
  95. expt = AmpShift_Stable()
  96. expt.load_expt(exptname, data_folder)
  97. expt.set_channels('CmdTrig','lowgain','spikes','SIU','DigMark')
  98. bout = eval(boutstr)
  99. marker_df = expt.get_marker_table()
  100. dt = expt.get_dt('lowgain')
  101. bout_df = expt.filter_marker_df_time(marker_df,bout)
  102. b_df = expt.filter_marker_df_code(bout_df,['B'])
  103. u_df = expt.filter_marker_df_code(bout_df,['U'])
  104. #from uncoupled trials get times for command responses
  105. cmd_t = np.asarray([np.max(b_df.time.values[b_df.time.values<t]) for t in u_df.time.values])
  106. xtime,sweeps = expt.get_sweepsmat('lowgain',cmd_t,sweepdur)
  107. cmd_ = np.mean(sweeps,1)-np.mean(sweeps,1)[0]
  108. return xtime, cmd_
  109. def correlation_from_covariance(covariance):
  110. v = np.sqrt(np.diag(covariance))
  111. outer_v = np.outer(v, v)
  112. correlation = covariance / outer_v
  113. correlation[covariance == 0] = 0
  114. return correlation
  115. def exclude_HighThreshAff(meta_df):
  116. x = np.unique(meta_df.ampshift) #array([-40,-30,-20,-10,-5,0,5,10,20,30,40])
  117. spk_thresh = 12
  118. n_excluded = 0
  119. n_included = 0
  120. expt_excluded = []
  121. meta_params_df = pd.DataFrame()
  122. colors = ['blue','orange','brown','green']
  123. for i,animal in enumerate(np.unique(meta_df['animalID'])):
  124. animal_df = meta_df[meta_df['animalID'] == animal]
  125. print(str(animal) + '(' + colors[i] + ')' + ' has a total of ' + str(len(np.unique(animal_df['exptname']))) + ' afferents recorded')
  126. params_all = []
  127. sse_all = []
  128. df_all = []
  129. for name in np.unique(animal_df['exptname']):
  130. expt_df = animal_df[animal_df['exptname'] == name]
  131. try:
  132. x_data = expt_df.dropna(0).ampshift.values
  133. y_data = expt_df.dropna(0).fsl.values
  134. if len(np.unique(x_data)) > 5 :
  135. n_included += 1
  136. spk_thresh = np.max(y_data)
  137. params, params_covariance = optimize.curve_fit(exp_fit, x_data, y_data,
  138. p0=[1,-0.05, 3],
  139. bounds = ([0,-1,1],[np.inf,0,10]),
  140. maxfev=1000)
  141. params_all.append(params)
  142. c = []
  143. for x_,y_ in zip(x_data,y_data):
  144. yh = exp_fit(x_,params[0],params[1],params[2])
  145. s = np.std(y_data[x_data==x_]) #calc std of y measure at each x to adjust chi2
  146. c.append(((y_ - yh)**2))
  147. sse_all.append(np.sum(c))
  148. df_all.append(len(y_data)-3)
  149. if len(np.unique(x_data)) <= 5:
  150. n_excluded += 1
  151. expt_excluded.append(name)
  152. params = array([np.NaN,np.NaN,np.NaN])
  153. params_all.append(params)
  154. sse_all.append(np.NaN)
  155. df_all.append(len(y_data)-3)
  156. except:
  157. n_excluded += 1
  158. expt_excluded.append(name)
  159. params = array([np.NaN,np.NaN,np.NaN])
  160. params_all.append(params)
  161. sse_all.append(np.NaN)
  162. df_all.append(len(y_data)-3)
  163. pass # doing nothing on exception
  164. exptname = animal_df.groupby('exptname').exptname.describe().top.values
  165. max_fsl = animal_df.groupby('exptname').fsl.max().values
  166. a = [p[0] for p in params_all]
  167. k = [p[1] for p in params_all]
  168. b = [p[2] for p in params_all]
  169. meta_params = {
  170. 'exptname' : exptname,
  171. 'animal' : animal,
  172. 'stretch' : a,
  173. 'tau' : k,
  174. 'offset' : b,
  175. 'max_fsl' : max_fsl,
  176. 'sse' : sse_all,
  177. 'df' : df_all
  178. }
  179. animal_df = pd.DataFrame(meta_params)
  180. meta_params_df = meta_params_df.append(animal_df,sort = False,ignore_index = False)
  181. print('number afferents excluded because first fsl threshold too high or could not be fit: ' + str(n_excluded))
  182. print('number afferents included (spike thresh at least 0%): ' + str(n_included))
  183. return meta_params_df,expt_excluded
  184. def assess_fits(meta_params_df):
  185. un_fit = meta_params_df[meta_params_df['sse'].gt(meta_params_df['sse'].quantile(.9))]
  186. well_fit = meta_params_df[meta_params_df['sse'].lt(meta_params_df['sse'].quantile(.9))]
  187. plt.figure(figsize = (2,4))
  188. well_fit.boxplot('sse')
  189. print(un_fit)
  190. return well_fit,un_fit
  191. def calc_peaks(xtime,sweeps, order, min_peakt,threshold_h,dt):
  192. R = sweeps
  193. nsamp=int(order/dt) #the window of comparison in nsamp for order; 2msec seemed good
  194. epsp_ = signal.argrelextrema(R,np.greater_equal,order = nsamp)[0]
  195. epsp_ = epsp_[np.where((epsp_*dt)>min_peakt)[0]]
  196. epsp = []
  197. measure = epsp_
  198. for i in measure:
  199. lb = int(min_peakt/dt)
  200. rb = len(R)-1
  201. min_height = np.min([abs(R[i]-R[lb]),abs(R[i]-R[rb])])
  202. if min_height>threshold_h:
  203. epsp.append(i)
  204. if len(epsp)>0:
  205. epsp = np.min(epsp)
  206. elif len(epsp)==0:
  207. epsp = np.NaN
  208. R_filt = R
  209. y = signal.medfilt(np.concatenate([[0],np.diff(R_filt)]),[25])
  210. accel = signal.medfilt(np.concatenate([[0],np.diff(y)]),[11])
  211. dvdt_start = int(0.002/dt)
  212. if ~np.isnan([epsp]).any():
  213. epsp_t = xtime[epsp]
  214. max_dvdt = np.max(y[dvdt_start:epsp])
  215. dvdt_threshold = np.max([0.01,0.15*max_dvdt])
  216. onset_options = np.where((np.sign(y-dvdt_threshold)>0) & (np.sign(accel)>=0))[0]
  217. valid_onsets = onset_options[(onset_options>dvdt_start)&(onset_options<epsp)]
  218. if len(valid_onsets) > 0:
  219. if (epsp_t-(np.min(valid_onsets)*dt)) > 0: #ensure that onset is before peak
  220. epsp_onset_ind = np.min(valid_onsets) #min after stim artifact
  221. epsp_amp = R[epsp]-R[epsp_onset_ind]
  222. epsp_onset = xtime[epsp_onset_ind]
  223. elif (epsp_t-(np.min(valid_onsets)*dt)) <= 0:
  224. epsp_t = np.NaN
  225. epsp_onset = np.NaN
  226. epsp_amp = 0
  227. elif len(valid_onsets)==0:
  228. epsp_t = np.NaN
  229. epsp_onset = np.NaN
  230. epsp_amp = 0
  231. elif np.isnan([epsp]).any():
  232. epsp_t = np.NaN
  233. epsp_onset = np.NaN
  234. epsp_amp = 0
  235. return epsp_t, epsp_amp, epsp_onset
  236. def exponential(x, a, k, b):
  237. return a*np.exp(x*k) + b
  238. def get_afferents_subsampled(meta_df,n_inputs):
  239. # get spike times for afferent population
  240. subset = random.choices(np.unique(meta_df['exptname']),k=n_inputs) #divide by 4 if multispike
  241. multispike = []
  242. fsl = []
  243. # get spike times across stimamps
  244. for exptname in subset:
  245. expt_df = meta_df[meta_df['exptname']==exptname]
  246. multispike.append(expt_df.groupby('ampshift').mean()['s0t'].values)
  247. fsl.append(expt_df.groupby('ampshift').mean()['s0t'].values)
  248. multispike.append(expt_df.groupby('ampshift').mean()['s1t'].values)
  249. multispike.append(expt_df.groupby('ampshift').mean()['s2t'].values)
  250. multispike.append(expt_df.groupby('ampshift').mean()['s3t'].values)
  251. multispike = np.asarray(multispike).T
  252. fsl = np.asarray(fsl).T
  253. return multispike, fsl
  254. def generate_y_fit_multispike_gauss(x,n_inputs,dataset,rv):
  255. #generate y_fit array on each iteration
  256. stretch_min = np.min(dataset[:,0])
  257. stretch_max = np.max(dataset[:,0])
  258. tau_min = np.min(dataset[:,1])
  259. tau_max = np.max(dataset[:,1])
  260. offset_min = np.min(dataset[:,2])
  261. offset_max = np.max(dataset[:,2])
  262. # max_fsl = np.max(dataset[:,3])
  263. max_fsl = 11
  264. offset_2 = 1.74 #(these are averages from empirical data in ms from first spikes)
  265. offset_3 = 3.66
  266. offset_4 = 6.37
  267. i = 0
  268. y_fit = []
  269. fsl = []
  270. while i <n_inputs:
  271. [s,t,o] = rv.resample(1)
  272. while (s<stretch_min)\
  273. | (s>stretch_max)\
  274. | (t<tau_min)\
  275. | (t>tau_max)\
  276. | (o<offset_min)\
  277. | (o>offset_max):
  278. [s,t,o] = rv.resample(1)
  279. i+=1
  280. y = exp_fit(x,s,t,o)
  281. spk_thresh = max_fsl
  282. y[y>spk_thresh] = np.NaN
  283. y_fit.append(y)
  284. fsl.append(y)
  285. y2 = y + offset_2
  286. y2[y2>spk_thresh] = np.NaN
  287. y_fit.append(y2)
  288. y3 = y + offset_3
  289. y3[y3>spk_thresh] = np.NaN
  290. y_fit.append(y3)
  291. y4 = y + offset_4
  292. y4[y4>spk_thresh] = np.NaN
  293. y_fit.append(y4)
  294. y_fit = np.asarray(y_fit).T
  295. fsl = np.asarray(fsl).T
  296. return y_fit, fsl
  297. # y_fit = np.asarray(y_fit).T
  298. def generate_y_fit_multispike_homogisi(meta_df,x,n_inputs,dataset,rv):
  299. basis, _ = generate_y_fit_multispike_gauss(x,1,dataset,rv)
  300. # select a subset of real afferent p(spike) to mimic
  301. subset = random.choices(np.unique(meta_df['exptname']),k=n_inputs) #divide by 4 if multispike
  302. multispike = []
  303. # get spike times across stimamps
  304. for expt_ in subset:
  305. spkt = meta_df[meta_df['exptname'].isin([expt_])].groupby(['ampshift']).mean()[['s0t','s1t','s2t','s3t']].values
  306. spk_mask = np.isnan(spkt)
  307. y = basis.copy()
  308. y[spk_mask] = np.nan
  309. multispike.extend(y.T)
  310. multispike = np.asarray(multispike).T
  311. return multispike
  312. def generate_y_fit_multispike_homog(meta_df,x,n_inputs,dataset,rv):
  313. #generate y_fit array on each iteration
  314. stretch_static = np.median(dataset[:,0])
  315. tau_static = np.median(dataset[:,1])
  316. offset_static = np.median(dataset[:,2])
  317. max_fsl = 11
  318. # offset_min = np.min(dataset[:,2])
  319. # offset_max = np.max(dataset[:,2])
  320. offset_2 = 1.74 #(these are averages from empirical data in ms from first spikes)
  321. offset_3 = 3.66
  322. offset_4 = 6.37
  323. i = 0
  324. y_fit = []
  325. fsl = []
  326. for i in range(n_inputs):
  327. y = exp_fit(x,stretch_static,tau_static,offset_static)
  328. spk_thresh = max_fsl
  329. y[y>spk_thresh] = np.NaN
  330. y_fit.append(y)
  331. fsl.append(y)
  332. y2 = y + offset_2
  333. y2[y2>spk_thresh] = np.NaN
  334. y_fit.append(y2)
  335. y3 = y + offset_3
  336. y3[y3>spk_thresh] = np.NaN
  337. y_fit.append(y3)
  338. y4 = y + offset_4
  339. y4[y4>spk_thresh] = np.NaN
  340. y_fit.append(y4)
  341. # while i <n_inputs:
  342. # [s,t,o] = rv.resample(1)
  343. # while (o<offset_min)\
  344. # | (o>offset_max):
  345. # [s,t,o] = rv.resample(1)
  346. # i+=1
  347. # y = exp_fit(x,stretch_static,tau_static,o)
  348. # spk_thresh = max_fsl
  349. # y[y>spk_thresh] = np.NaN
  350. # y_fit.append(y)
  351. # fsl.append(y)
  352. # y2 = y + offset_2
  353. # y2[y2>spk_thresh] = np.NaN
  354. # y_fit.append(y2)
  355. # y3 = y + offset_3
  356. # y3[y3>spk_thresh] = np.NaN
  357. # y_fit.append(y3)
  358. # y4 = y + offset_4
  359. # y4[y4>spk_thresh] = np.NaN
  360. # y_fit.append(y4)
  361. y_fit = np.asarray(y_fit).T
  362. fsl = np.asarray(fsl).T
  363. return y_fit, fsl
  364. def reset_namespace_defaults():
  365. namespace_sim = {
  366. 'N_inputs' : 4*4, # 4 inputs with 4 possible spikes each
  367. 'N_runs' : 15,
  368. 'sim_dt' : 0.1*ms,
  369. 'duration' : 0.05*second,
  370. 'Cm' : 6*pF,
  371. 'E_l' : -70*mV,
  372. 'g_l' : 1*nS, # a 1MOhm cell has gl = 1*nS
  373. 'E_e' : 0*mV,
  374. 'E_e_lmi' : -80*mV,
  375. 'V_th' : 0*mV,
  376. 'V_r' : -70*mV,
  377. 'tau_r' : 1*ms,
  378. 'w_e' : 0.1*nS,
  379. 'w_e_lmi' : 20*nS, #0,#either on and off... weight by logistic 0*nS,
  380. 'tau_e1' : 4*ms,
  381. 'tau_e2' : 1*ms,
  382. 'tau_e_lmi' : 10*ms,
  383. 'onset_offset' : 0, # 5msec is for figure making because data plotted with 5msec pre-stimonset #4.5, #ms, time of normal stimulus onset relative to cmd
  384. 'e_lmi_delay' : 4, #ms
  385. 'invpeak' : 1
  386. }
  387. return namespace_sim
  388. def initialize_model(namespace_sim,meta_params):
  389. start_scope()
  390. ################################################################################
  391. # Model definition
  392. ################################################################################
  393. ### Neurons
  394. neuron_eq = '''
  395. dv/dt = (g_l*(E_l-v) + g_e*(E_e-v) + g_e_lmi*(E_e_lmi-v))/Cm : volt
  396. dg_e/dt = (invpeak * s - g_e)/tau_e1 : siemens
  397. ds/dt = -s/tau_e2 : siemens
  398. dg_e_lmi/dt = -g_e_lmi/tau_e_lmi : siemens # post-synaptic electrosensory inhibitory conductance
  399. '''
  400. neurons = NeuronGroup(1, model=neuron_eq, method='euler',
  401. name = 'grc',namespace=namespace_sim)
  402. neurons.v = -70*mV
  403. # set up monitor of post-synaptic neuron parameters
  404. state_mon = StateMonitor(neurons, ['v', 'g_e', 'g_e_lmi'],
  405. record=0, name = 'state_mon')
  406. n_aff = meta_params['N_inputs']
  407. aff_ind = arange(n_aff)
  408. aff_t = sorted(np.zeros(n_aff))*ms
  409. G_e = SpikeGeneratorGroup(n_aff, aff_ind, aff_t, name='aff_input')
  410. lmi_t = np.zeros(1)*ms
  411. G_e_lmi = SpikeGeneratorGroup(1, array([0]),lmi_t,name = 'lmi_input')
  412. syn_e = Synapses(G_e, neurons,on_pre='s += w_e',namespace=namespace_sim,name='aff_syn')
  413. syn_e_lmi = Synapses(G_e_lmi, neurons, on_pre='g_e_lmi_post += w_e_lmi',
  414. delay=meta_params['e_lmi_delay'],namespace=namespace_sim,
  415. name='aff_lmi_syn')
  416. #Now connect the synapses...
  417. syn_e.connect(i=aff_ind, j = 0)
  418. syn_e_lmi.connect(i=0, j = 0)
  419. net = Network(collect())
  420. return net
  421. def run_4_model_conditions(x, n_runs, n_inputs):
  422. # get a canonical multispike tuning function from fsl generative model and offsets with basic spike threshold at 12ms
  423. # x = np.asarray([-40,-30,-20,-10,-5,0,5,10,20,30,40])
  424. net = initialize_model(namespace_sim,meta_params)
  425. net.store('intialized',filename=sim_filepath)
  426. dvdt = []
  427. R_amp = [] # initialize array to store the result of each run
  428. for _ in arange(n_runs):
  429. multispike = generate_y_fit_multispike_homogisi(x,n_inputs,dataset,rv)
  430. R_amp_ = [] # initialize array to store responses across stimulus set
  431. dvdt_ = []
  432. for y in multispike:
  433. net.restore('intialized',filename=sim_filepath) # this resets simulation clock to 0
  434. # the state of the system at time of restore should affect the network operation accordingly
  435. y = y[~np.isnan(y)] # remove nan values from array of spike times
  436. spk_t_aff = np.asarray(y)*ms # create spike time array for SpikeGeneratorGroup, sorted
  437. ind_aff = np.empty(0) # create default afferent index array in case all were nan
  438. spk_t_lmi = np.empty(0)*ms # create default lmi spike time in case where no afferent input (all nan)
  439. ind_lmi = np.empty(0) # create default lmi index array in case all were nan
  440. if len(y)!=0: # if not all were nan, create index array for afferent spikes, lmi neuron, and lmi spike
  441. ind_aff = np.arange(len(y))
  442. ind_lmi = np.arange(1)
  443. spk_t_lmi = [np.min(y)]*ms
  444. # update SpikeGeneratorGroup neuron indices and spikes
  445. net['aff_input'].set_spikes(ind_aff,spk_t_aff)
  446. net['lmi_input'].set_spikes(ind_lmi,spk_t_lmi)
  447. net.run(duration = meta_params['duration'])
  448. r =net['state_mon'].v/mV # set r to voltage trace from simulation trial
  449. r = r.reshape(-1)
  450. R_amp_.append(np.max(r)-r[0]) # append response for this stimamp to response mat across all stimamp
  451. xtime = net['state_mon'].t/ms
  452. if len(y)!=0:
  453. dv = r[np.argmax(r)]-r[xtime>(np.min(y))][0]
  454. dt = xtime[np.argmax(r)]-xtime[xtime>(np.min(y))][0]
  455. if len(y)==0:
  456. dv = np.nan
  457. dt = np.nan
  458. dvdt_.append(dv/dt)
  459. R_amp.append((np.asarray(R_amp_).T).flatten()) #append the result from this run across stimamp
  460. dvdt.append(np.asarray(dvdt_).T) #append the result from this run across stimamp
  461. R_amp = np.asarray(R_amp).T
  462. dvdt = np.asarray(dvdt).T
  463. homog_isi = {'amp':R_amp,'dvdt':dvdt}# = R_amp #np.nanmean(R_amp,1) # append the average across
  464. # homog_isi = R_amp #np.nanmean(R_amp,1)
  465. ### subsample from data
  466. dvdt = []
  467. R_amp = [] # initialize array to store the result of each run
  468. for _ in arange(n_runs):
  469. # get spike times for afferent population
  470. multispike,fsl = get_afferents_subsampled(n_inputs)
  471. R_amp_ = [] # initialize array to store responses across stimulus set
  472. dvdt_ = []
  473. for y in multispike:
  474. net.restore('intialized',filename=sim_filepath) # this resets simulation clock to 0
  475. # the state of the system at time of restore should affect the network operation accordingly
  476. y = y[~np.isnan(y)] # remove nan values from array of spike times
  477. spk_t_aff = np.asarray(y)*ms # create spike time array for SpikeGeneratorGroup, sorted
  478. ind_aff = np.empty(0) # create default afferent index array in case all were nan
  479. spk_t_lmi = np.empty(0)*ms # create default lmi spike time in case where no afferent input (all nan)
  480. ind_lmi = np.empty(0) # create default lmi index array in case all were nan
  481. if len(y)!=0: # if not all were nan, create index array for afferent spikes, lmi neuron, and lmi spike
  482. ind_aff = np.arange(len(y))
  483. ind_lmi = np.arange(1)
  484. spk_t_lmi = [np.min(y)]*ms
  485. # update SpikeGeneratorGroup neuron indices and spikes
  486. net['aff_input'].set_spikes(ind_aff,spk_t_aff)
  487. net['lmi_input'].set_spikes(ind_lmi,spk_t_lmi)
  488. net.run(duration = meta_params['duration'])
  489. r =net['state_mon'].v/mV # set r to voltage trace from simulation trial
  490. r = r.reshape(-1)
  491. R_amp_.append(np.max(r)-r[0]) # append response for this stimamp to response mat across all stimamp
  492. xtime = net['state_mon'].t/ms
  493. if len(y)!=0:
  494. dv = r[np.argmax(r)]-r[xtime>(np.min(y))][0]
  495. dt = xtime[np.argmax(r)]-xtime[xtime>(np.min(y))][0]
  496. if len(y)==0:
  497. dv = np.nan
  498. dt = np.nan
  499. dvdt_.append(dv/dt)
  500. R_amp.append((np.asarray(R_amp_).T).flatten()) #append the result from this run across stimamp
  501. dvdt.append(np.asarray(dvdt_).T) #append the result from this run across stimamp
  502. R_amp = np.asarray(R_amp).T
  503. dvdt = np.asarray(dvdt).T
  504. subsamp = {'amp':R_amp,'dvdt':dvdt}# = R_amp #np.nanmean(R_amp,1) # append the average across
  505. ### heterogeneous tuning
  506. dvdt = []
  507. R_amp = [] # initialize array to store the result of each run
  508. for _ in arange(n_runs):
  509. # get spike times for afferent population
  510. multispike,fsl = generate_y_fit_multispike_gauss(x,n_inputs,dataset,rv)
  511. R_amp_ = [] # initialize array to store responses across stimulus set
  512. dvdt_ = []
  513. for y in multispike:
  514. net.restore('intialized',filename=sim_filepath) # this resets simulation clock to 0
  515. # the state of the system at time of restore should affect the network operation accordingly
  516. y = y[~np.isnan(y)] # remove nan values from array of spike times
  517. spk_t_aff = np.asarray(y)*ms # create spike time array for SpikeGeneratorGroup, sorted
  518. ind_aff = np.empty(0) # create default afferent index array in case all were nan
  519. spk_t_lmi = np.empty(0)*ms # create default lmi spike time in case where no afferent input (all nan)
  520. ind_lmi = np.empty(0) # create default lmi index array in case all were nan
  521. if len(y)!=0: # if not all were nan, create index array for afferent spikes, lmi neuron, and lmi spike
  522. ind_aff = np.arange(len(y))
  523. ind_lmi = np.arange(1)
  524. spk_t_lmi = [np.min(y)]*ms
  525. # update SpikeGeneratorGroup neuron indices and spikes
  526. net['aff_input'].set_spikes(ind_aff,spk_t_aff)
  527. net['lmi_input'].set_spikes(ind_lmi,spk_t_lmi)
  528. net.run(duration = meta_params['duration'])
  529. r =net['state_mon'].v/mV # set r to voltage trace from simulation trial
  530. r = r.reshape(-1)
  531. R_amp_.append(np.max(r)-r[0]) # append response for this stimamp to response mat across all stimamp
  532. xtime = net['state_mon'].t/ms
  533. if len(y)!=0:
  534. dv = r[np.argmax(r)]-r[xtime>(np.min(y))][0]
  535. dt = xtime[np.argmax(r)]-xtime[xtime>(np.min(y))][0]
  536. if len(y)==0:
  537. dv = np.nan
  538. dt = np.nan
  539. dvdt_.append(dv/dt)
  540. R_amp.append((np.asarray(R_amp_).T).flatten()) #append the result from this run across stimamp
  541. dvdt.append(np.asarray(dvdt_).T) #append the result from this run across stimamp
  542. R_amp = np.asarray(R_amp).T
  543. dvdt = np.asarray(dvdt).T
  544. homog_pspike = {'amp':R_amp,'dvdt':dvdt}# = R_amp #np.nanmean(R_amp,1) # append the average across
  545. R_amp = [] # initialize array to store the result of each run
  546. dvdt = []
  547. for _ in arange(n_runs):
  548. # get spike times for afferent population
  549. multispike,fsl = generate_y_fit_multispike_homog(x,n_inputs,dataset,rv)
  550. R_amp_ = [] # initialize array to store responses across stimulus set
  551. dvdt_ = []
  552. for y in multispike:
  553. net.restore('intialized',filename=sim_filepath) # this resets simulation clock to 0
  554. # the state of the system at time of restore should affect the network operation accordingly
  555. y = y[~np.isnan(y)] # remove nan values from array of spike times
  556. spk_t_aff = np.asarray(y)*ms # create spike time array for SpikeGeneratorGroup, sorted
  557. ind_aff = np.empty(0) # create default afferent index array in case all were nan
  558. spk_t_lmi = np.empty(0)*ms # create default lmi spike time in case where no afferent input (all nan)
  559. ind_lmi = np.empty(0) # create default lmi index array in case all were nan
  560. if len(y)!=0: # if not all were nan, create index array for afferent spikes, lmi neuron, and lmi spike
  561. ind_aff = np.arange(len(y))
  562. ind_lmi = np.arange(1)
  563. spk_t_lmi = [np.min(y)]*ms
  564. # update SpikeGeneratorGroup neuron indices and spikes
  565. net['aff_input'].set_spikes(ind_aff,spk_t_aff)
  566. net['lmi_input'].set_spikes(ind_lmi,spk_t_lmi)
  567. net.run(duration = meta_params['duration'])
  568. r =net['state_mon'].v/mV # set r to voltage trace from simulation trial
  569. r = r.reshape(-1)
  570. R_amp_.append(np.max(r)-r[0]) # append response for this stimamp to response mat across all stimamp
  571. xtime = net['state_mon'].t/ms
  572. if len(y)!=0:
  573. dv = r[np.argmax(r)]-r[xtime>(np.min(y))][0]
  574. dt = xtime[np.argmax(r)]-xtime[xtime>(np.min(y))][0]
  575. if len(y)==0:
  576. dv = np.nan
  577. dt = np.nan
  578. dvdt_.append(dv/dt)
  579. R_amp.append((np.asarray(R_amp_).T).flatten()) #append the result from this run across stimamp
  580. dvdt.append(np.asarray(dvdt_).T) #append the result from this run across stimamp
  581. R_amp = np.asarray(R_amp).T
  582. dvdt = np.asarray(dvdt).T
  583. homog_all = {'amp':R_amp,'dvdt':dvdt}# = R_amp #np.nanmean(R_amp,1) # append the average across
  584. # homog_all = R_amp
  585. return subsamp, homog_all, homog_isi, homog_pspike