GRC_anal_Istep.py 6.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. #!/usr/bin/env python
  2. # coding: utf-8
  3. #%%
  4. print('initializing packages')
  5. import platform
  6. import sys
  7. os_name = platform.system()
  8. if os_name == 'Darwin':
  9. sys.path.append('/Users/kperks/mnt/engram/scripts/Python/Analysis/')
  10. if os_name == 'Linux':
  11. sys.path.append('/mnt/engram/scripts/Python/Analysis/')
  12. from ClassDef_AmplitudeShift_Stable import AmpShift_Stable
  13. import matplotlib.pyplot as plt
  14. from pathlib import Path
  15. import numpy as np
  16. import pandas as pd
  17. import seaborn as sns
  18. from scipy import signal
  19. import random
  20. import matplotlib
  21. matplotlib.rcParams.update({'font.size': 22})
  22. #%%
  23. # print('changing to data_processed folder and defining folders used in script')
  24. # chdir('/Users/kperks/mnt/engram/spikedata/data_processed/')
  25. exptpath = Path.cwd().resolve().parents[0] #assumes running notebook from /data_processed
  26. data_folder = exptpath / 'data_raw'
  27. figure_folder = exptpath / 'data_processed' / 'Figures_GRC_properties'
  28. df_folder = exptpath / 'data_processed' / 'df_GRC_properties'
  29. cell_list = {
  30. '20170206_003': [0,1,2],
  31. '20170502_002': [0],
  32. '20170912_005': [0],
  33. '20171010_002': [0],
  34. '20171010_005': [0],
  35. '20171011_001': [0],
  36. '20171027_000': [0],
  37. '20171031_004': [0],
  38. '20171107_002': [0],
  39. '20180103_001': [0],
  40. '20180108_004': [0],
  41. '20180122_001': [0],
  42. '20180122_002': [0],
  43. '20180130_000': [0],
  44. '20181213_002': [0],
  45. '20190107_003': [0],
  46. '20190227_001': [0],
  47. '20191218_005': [0],
  48. '20191218_007': [0],
  49. '20191218_009': [0],
  50. '20200113_003': [0],
  51. '20200113_004': [0],
  52. '20200225_000': [0],
  53. '20200226_002': [0,1],
  54. '20200312_002': [0]
  55. }
  56. win_list = {
  57. '20170206_003': [50,250],
  58. '20170502_002': [50,250],
  59. '20170912_005': [50,250],
  60. '20171010_002': [50,250],
  61. '20171010_005': [50,250],
  62. '20171011_001': [50,250],
  63. '20171027_000': [50,250],
  64. '20171031_004': [50,250],
  65. '20171107_002': [50,250],
  66. '20180103_001': [5,55],
  67. '20180108_004': [5,55],
  68. '20180122_001': [5,55],
  69. '20180122_002': [5,55],
  70. '20180130_000': [5,55],
  71. '20181213_002': [30,80],
  72. '20190107_003': [30,80],
  73. '20190227_001': [30,42],
  74. '20191218_005': [30,130],
  75. '20191218_007': [30,130],
  76. '20191218_009': [30,130],
  77. '20200113_003': [30,130],
  78. '20200113_004': [30,130],
  79. '20200225_000': [30,130],
  80. '20200226_002': [30,130],
  81. '20200312_002': [5,55]
  82. }
  83. scale_factor = {
  84. '20170206_003': [10000],
  85. '20170502_002': [1],
  86. '20170912_005': [1],
  87. '20171010_002': [1],
  88. '20171010_005': [1],
  89. '20171011_001': [1],
  90. '20171027_000': [1],
  91. '20171031_004': [1],
  92. '20171107_002': [1],
  93. '20180103_001': [1],
  94. '20180108_004': [1],
  95. '20180122_001': [1],
  96. '20180122_002': [1],
  97. '20180130_000': [1],
  98. '20181213_002': [1],
  99. '20190107_003': [1],
  100. '20190227_001': [1],
  101. '20191218_005': [10000],
  102. '20191218_007': [10000],
  103. '20191218_009': [10000],
  104. '20200113_003': [10000],
  105. '20200113_004': [10000],
  106. '20200225_000': [10000],
  107. '20200226_002': [1],
  108. '20200312_002': [1]
  109. }
  110. # fig1 = plt.figure(figsize=(10,5));
  111. # ax1_0 = fig1.add_axes([0.2,0.2,0.3,0.7])
  112. # ax1_0.margins(0.05, tight=True)
  113. # ax1_1 = fig1.add_axes([0.625,0.3,0.3,0.5])
  114. # ax1_1.axis('equal')
  115. # fig2 = plt.figure(figsize=(5,8));
  116. # ax2_0 = fig2.add_axes([0.3,0.45,0.6,0.3])
  117. # ax2_1 = fig2.add_axes([0.3,0.1,0.6,0.3])
  118. sweepdur = 0.3
  119. subwin = 25
  120. meta_df = pd.read_csv('DF_Istep.csv')#pd.DataFrame()
  121. for exptname,val,win,scale in zip(list(cell_list.keys())[0:14],list(cell_list.values())[0:14],list(win_list.values())[0:14],list(scale_factor.values())[0:14]):
  122. # exptname = list(cell_list.keys())[exptind]
  123. # val = list(cell_list.values())[exptind]
  124. # ax1_0.cla()
  125. # ax1_1.cla()
  126. # ax2_0.cla()
  127. # ax2_1.cla()
  128. scale = scale[0]
  129. # set up expt object instance
  130. expt = AmpShift_Stable()
  131. expt.load_expt(exptname, data_folder)
  132. expt.set_channels('CmdTrig','lowgain','spikes','SIU','DigMark')
  133. marker_df = expt.get_marker_table()
  134. dt = expt.get_dt('lowgain')
  135. # get time windows of Isteps (bout = [[start,stop]]) from dictionary index
  136. s = 'bout = [expt.get_bout_win("I","Keyboard")[' + str(val[0]) + ']'
  137. if len(val) > 1:
  138. for i in val[1:]:
  139. s += ',expt.get_bout_win("I","Keyboard")[' + str(i) + ']'
  140. s += ']'
  141. exec(s)
  142. # use time windows to filter 'trigevt' events that start Istep trials
  143. event_chan = 'trigevt'
  144. #first get events
  145. events = expt.get_events(event_chan)
  146. trialT = expt.filter_events(events,bout)
  147. xtime_R,R = expt.get_sweepsmat('lowgain',trialT,sweepdur)
  148. xtime_I,I = expt.get_sweepsmat('current',trialT,sweepdur)
  149. dt_I = expt.get_dt('current')
  150. dt_R = expt.get_dt('lowgain')
  151. current_u = []
  152. onset_peak = []
  153. offset_peak = []
  154. for i,r in zip(I.T,R.T):
  155. curr_inj = scale * (np.mean(i[int(win[0]/dt_I/1000):int(win[1]/dt_I/1000)])-np.mean(i[0:int(win[0]/dt_I/1000)]))
  156. current_u.append(curr_inj)
  157. if curr_inj<0:
  158. onset_peak.append(np.min(r[int(win[0]/dt_R/1000):int(win[0]/dt_R/1000)+int(subwin/dt_R/1000)])-
  159. np.median(r[0:int(win[0]/dt_R/1000)]))
  160. offset_peak.append(np.min(r[int(win[1]/dt_R/1000)-int(subwin/dt_R/1000):int(win[1]/dt_R/1000)])-
  161. np.median(r[0:int(win[0]/dt_R/1000)]))
  162. if curr_inj>=0:
  163. onset_peak.append(np.max(r[int(win[0]/dt_R/1000):int(win[0]/dt_R/1000)+int(subwin/dt_R/1000)])-
  164. np.median(r[0:int(win[0]/dt_R/1000)]))
  165. offset_peak.append(np.max(r[int(win[1]/dt_R/1000)-int(subwin/dt_R/1000):int(win[1]/dt_R/1000)])-
  166. np.median(r[0:int(win[0]/dt_R/1000)]))
  167. trial_df = pd.DataFrame({
  168. 'exptname' : exptname,
  169. 'current_inj' : current_u,
  170. 'onset_peak' : onset_peak,
  171. 'offset_peak' : offset_peak
  172. })
  173. meta_df = meta_df.append(trial_df,sort = False,ignore_index = False)
  174. meta_df.to_csv('DF_Istep.csv')
  175. # ax1_0.scatter(np.asarray(current_u),onset_peak)
  176. # ax1_0.set_ylabel('onset_peak')
  177. # ax1_0.set_xlabel('current injected')
  178. # ax1_1.scatter(onset_peak,offset_peak)
  179. # lim = [np.min([np.min(onset_peak),np.min(offset_peak)]),np.max([np.max(onset_peak),np.max(offset_peak)])]
  180. # ax1_1.plot(lim, lim)
  181. # ax1_1.set_ylabel('offset_peak')
  182. # ax1_1.set_xlabel('onset_peak')
  183. # plt.figure(fig1.number)
  184. # plt.savefig(('Figures_GRC_properties/Istep/' + exptname + '_quantified.png'),format='png')
  185. # plt.savefig(('Figures_GRC_properties/Istep/' + exptname + '_quantified.eps'),format='eps',dpi = 1200)
  186. # ax2_0.plot(xtime_R,R);
  187. # ax2_1.plot(xtime_I,I*scale);
  188. # plt.figure(fig2.number)
  189. # plt.savefig(('Figures_GRC_properties/Istep/' + exptname + '.png'),format='png')
  190. # plt.savefig(('Figures_GRC_properties/Istep/' + exptname + '.eps'),format='eps',dpi = 1200)