Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

Validation_stability.py 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280
  1. #!/usr/bin/env python
  2. # coding: utf-8
  3. # ### Link to the file with meta information on recordings
  4. # In[14]:
  5. #import matplotlib.pyplot as plt
  6. #plt.rcParams["figure.figsize"] = (20,3)
  7. database_path = '/media/andrey/My Passport/GIN/Anesthesia_CA1/meta_data/meta_recordings_transition_state.xlsx'
  8. # ### Select the range of recordings for the analysis (see "Number" row in the meta data file)
  9. # In[4]:
  10. rec = [x for x in range(0,198+1)]
  11. #rec = [127,128]
  12. # In[1]:
  13. import numpy as np
  14. import numpy.ma as ma
  15. import matplotlib.pyplot as plt
  16. import matplotlib.ticker as ticker
  17. import pandas as pd
  18. import seaborn as sns
  19. import pickle
  20. import os
  21. sns.set()
  22. sns.set_style("whitegrid")
  23. from scipy.signal import medfilt
  24. from scipy.stats import skew, kurtosis, zscore
  25. from scipy import signal
  26. from sklearn.linear_model import LinearRegression, TheilSenRegressor
  27. plt.rcParams['figure.figsize'] = [16, 8]
  28. color_awake = (0,191/255,255/255)
  29. color_mmf = (245/255,143/255,32/255)
  30. color_keta = (181./255,34./255,48./255)
  31. color_iso = (143./255,39./255,143./255)
  32. custom_palette ={'keta':color_keta, 'iso':color_iso,'mmf':color_mmf,'awake':color_awake}
  33. # In[2]:
  34. from capipeline import *
  35. # ### Run the analysis
  36. # /media/andrey/My Passport/GIN/Anesthesia_CA1/validation/calcium_imaging
  37. # It creates a data frame *df_estimators* that contains basic information regarding stability of the recordings, such as
  38. #
  39. # - total number of identified neurons,
  40. # - traces and neuropils median inntensities for each ROI
  41. # - their standard deviation
  42. # - skewness of the signal
  43. # - estamation of their baseline (defined as a bottom quartile of signal intensities)
  44. # - their temporal stability (defined as the ratio between median signals of all ROIs in the first and the second parts of the recording)
  45. # In[5]:
  46. '''
  47. df_estimators = pd.DataFrame()
  48. for r in rec:
  49. Traces, Npils, n_accepted_and_rejected = traces_and_npils(r, database_path, concatenation=False)
  50. print(Traces)
  51. print("Shape:" + str(Traces.shape[0]) + "N_accept_reject" + str(n_accepted_and_rejected))
  52. animal = get_animal_from_recording(r, database_path)
  53. condition = get_condition(r, database_path)
  54. print("#" + str(r) + " " + str(animal) + " " + str(condition) + " ")
  55. Traces_median = ma.median(Traces, axis=1)
  56. Npils_median = ma.median(Npils, axis=1)
  57. Traces_std = ma.std(Npils, axis=1)
  58. Npils_std = ma.std(Npils, axis=1)
  59. Traces_skewness = skew(Traces,axis=1)
  60. Npils_skewness = skew(Npils,axis=1)
  61. baseline = np.quantile(Traces,0.25,axis=1)
  62. num_cells = np.shape(Traces)[0]
  63. decay_isol = np.zeros((num_cells))
  64. fs = 30
  65. for neuron in np.arange(num_cells):
  66. if np.all(np.isnan(Traces[neuron])):
  67. decay_isol[neuron] = np.nan
  68. else:
  69. _, _, _, decay_neuron_isolated10, _ = deconvolve(np.double(Traces[neuron, ] + 100000),
  70. penalty = 0, optimize_g = 10)
  71. decay_isol[neuron] = - 1 / (fs * np.log(decay_neuron_isolated10))
  72. recording_length = int(Traces.shape[1])
  73. half = int(recording_length/2)
  74. print("Recording: " + str(r) + "Recording length:" + str(recording_length))
  75. m1 = ma.median(Traces[:,:half])
  76. m2 = ma.median(Traces[:,half:])
  77. print("Stability:",m2/m1*100)
  78. norm_9000 = 9000/recording_length # normalize to 9000 frames (5 min recording)
  79. traces_median_half_vs_half = norm_9000*(m2-m1)*100/m1 + 100
  80. print("Stability (9000 frames normalization)",traces_median_half_vs_half)
  81. df_e = pd.DataFrame({ "animal":animal,
  82. "recording":r,
  83. "condition":condition,
  84. "number.neurons":Traces.shape[0],
  85. "traces.median":Traces_median,
  86. "npils.median":Npils_median,
  87. "traces.std":Traces_std,
  88. "npils.std":Npils_std,
  89. "traces.skewness":Traces_skewness,
  90. "npils.skewness":Npils_skewness,
  91. "baseline.quantile.25":baseline,
  92. "decay":decay_isol,
  93. "median.stability":traces_median_half_vs_half # in percent
  94. })
  95. df_estimators = pd.concat([df_estimators,df_e])
  96. print("*****")
  97. # ### Save the result of the analysis
  98. # In[7]:
  99. df_estimators.to_pickle("./transition_state_calcium_imaging_stability_validation.pkl")
  100. '''
  101. # ### Load the result of the analysis
  102. # In[8]:
  103. df_estimators = pd.read_pickle("./transition_state_calcium_imaging_stability_validation.pkl")
  104. df_estimators['neuronID'] = df_estimators.index
  105. #df_estimators["animal"] = df_estimators["animal"]
  106. print(np.unique(df_estimators["condition"]))
  107. df_estimators["CONDITION"] = df_estimators["condition"]
  108. df_estimators.loc[:,"CONDITION"] = 'awake'
  109. df_estimators.loc[(df_estimators.condition == 'iso'),"CONDITION"] = 'iso'
  110. df_estimators.loc[(df_estimators.condition == 'keta'),"CONDITION"] = 'keta'
  111. df_estimators.loc[(df_estimators.condition == 'mmf'),"CONDITION"] = 'mmf'
  112. print(np.unique(df_estimators["CONDITION"]))
  113. df_estimators["multihue"] = df_estimators["CONDITION"] + df_estimators["animal"].astype("string")
  114. print(np.unique(df_estimators["multihue"]))
  115. print(np.unique(df_estimators["CONDITION"]))
  116. # ### Plot
  117. # In[9]:
  118. parameters = ['number.neurons','traces.median','traces.skewness','decay','median.stability']
  119. labels = ['Extracted \n ROIs','Median, \n A.U.','Skewness','Decay time, \n s','1st/2nd \n ratio, %']
  120. number_subplots = len(parameters)
  121. recordings_ranges = [[0,198]]
  122. for rmin,rmax in recordings_ranges:
  123. f, axes = plt.subplots(number_subplots, 1, figsize=(8, 5)) # sharex=Truerex=True
  124. #plt.subplots_adjust(left=None, bottom=0.1, right=None, top=0.9, wspace=None, hspace=0.2)
  125. #f.tight_layout()
  126. sns.despine(left=True)
  127. for i, param in enumerate(parameters):
  128. lw = 0.8
  129. #else:
  130. sns.boxplot(x='multihue', y=param, data=df_estimators[(df_estimators.recording>=rmin)&(df_estimators.recording<=rmax)], width=0.9, hue = "CONDITION", palette = custom_palette, dodge=False, showfliers = False,ax=axes[i],linewidth=lw)
  131. #if (i == 0):
  132. # param = "animal"
  133. # print(np.unique(df_estimators[param]))
  134. # axes[i].set_yticks(np.unique(df_estimators[param]))
  135. # sns.swarmplot(x='recording', y=param, data=df_estimators[(df_estimators.recording>=rmin)&(df_estimators.recording<=rmax)&(df_estimators['neuronID'] == 0)],dodge=False, s=1, edgecolor='black', linewidth=1, ax=axes[i])
  136. #ax.set(ylabel="")
  137. if i > 0:
  138. axes[i].set_ylim([0.0,1000.0])
  139. if i > 1:
  140. axes[i].set_ylim([0.0,10.0])
  141. if i > 2:
  142. axes[i].set_ylim([0.0,1.0])
  143. if i > 3:
  144. axes[i].set_ylim([80,120])
  145. axes[i].get_xaxis().set_visible(True)
  146. else:
  147. axes[i].get_xaxis().set_visible(False)
  148. if i < number_subplots-1:
  149. axes[i].xaxis.label.set_visible(False)
  150. #if i==0:
  151. # axes[i].set_title("Validation: stability check (recordings #%d-#%d)" % (rmin,rmax), fontsize=9, pad=30) #45
  152. axes[i].set_ylabel(labels[i], fontsize=9,labelpad=5) #40
  153. #axes[i].set_xlabel("Recording", fontsize=5,labelpad=5) #40
  154. #axes[i].axis('off')
  155. axes[i].xaxis.set_tick_params(labelsize=9) #35
  156. axes[i].yaxis.set_tick_params(labelsize=9) #30
  157. axes[i].get_legend().remove()
  158. #axes[i].xaxis.set_major_locator(ticker.MultipleLocator(10))
  159. #axes[i].xaxis.set_major_formatter(ticker.ScalarFormatter())
  160. plt.xlabel('xlabel', fontsize=6)
  161. plt.xticks(rotation=90)
  162. #plt.legend(bbox_to_anchor=(1.01, 1), loc=2, borderaxespad=0.,fontsize=25)
  163. plt.savefig("Validation_stability_check_rec_#%d-#%d).png" % (rmin,rmax),dpi=400)
  164. plt.savefig("Validation_stability_check_rec_#%d-#%d).svg" % (rmin,rmax))
  165. #plt.show()
  166. # In[13]:
  167. '''
  168. sns.displot(data=df_estimators, x="median.stability", kind="kde", hue = "animal")
  169. plt.xlim([80,120])
  170. plt.xlabel("Stability, %", fontsize = 15)
  171. plt.title("Validation: summary on stability (recordings #%d-#%d)" % (min(rec),max(rec)), fontsize = 20, pad=20)
  172. plt.grid(False)
  173. plt.savefig("Validation_summary_stability_recordings_#%d-#%d)" % (min(rec),max(rec)))
  174. #plt.show()
  175. # In[62]:
  176. '''