Browse Source

gin commit from andrey-UX510UXK

New files: 16
Modified files: 12
Deleted files: 8
Andrey Formozov 3 years ago
parent
commit
a2ac046002
36 changed files with 42416 additions and 2870 deletions
  1. BIN
      validation/calcium_imaging/FOV_Validation_37527.png
  2. BIN
      validation/calcium_imaging/FOV_Validation_37528.png
  3. BIN
      validation/calcium_imaging/FOV_Validation_37529.png
  4. BIN
      validation/calcium_imaging/FOV_Validation_37530.png
  5. BIN
      validation/calcium_imaging/FOV_Validation_48.png
  6. BIN
      validation/calcium_imaging/FOV_Validation_51.png
  7. BIN
      validation/calcium_imaging/FOV_Validation_53.png
  8. BIN
      validation/calcium_imaging/Validation_stability_check_rec_#0-#188).png
  9. 39817 0
      validation/calcium_imaging/Validation_stability_check_rec_#0-#188).svg
  10. BIN
      validation/calcium_imaging/Validation_stability_check_rec_#0-#31).png
  11. BIN
      validation/calcium_imaging/Validation_stability_check_rec_#134-#160).png
  12. BIN
      validation/calcium_imaging/Validation_stability_check_rec_#161-#188).png
  13. BIN
      validation/calcium_imaging/Validation_stability_check_rec_#32-#55).png
  14. BIN
      validation/calcium_imaging/Validation_stability_check_rec_#56-#95).png
  15. BIN
      validation/calcium_imaging/Validation_stability_check_rec_#96-#133).png
  16. BIN
      validation/calcium_imaging/Validation_summary_stability_recordings_#0-#188).png
  17. BIN
      validation/calcium_imaging/calcium_imaging_stability_validation.pkl
  18. BIN
      validation/calcium_imaging/cmap.png
  19. 0 2870
      validation/calcium_imaging/registration_logs.txt
  20. BIN
      validation/calcium_imaging_transition_state/FOV_Validation_8237.png
  21. BIN
      validation/calcium_imaging_transition_state/FOV_Validation_TS_C_ISO_8237.png
  22. BIN
      validation/calcium_imaging_transition_state/StackReg/8237_best_methods.npy
  23. BIN
      validation/calcium_imaging_transition_state/StackReg/8237_best_score.npy
  24. BIN
      validation/calcium_imaging_transition_state/StackReg/8237_best_tmats.npy
  25. 457 0
      validation/calcium_imaging_transition_state/Validation_FOV_alignment.py
  26. 252 0
      validation/calcium_imaging_transition_state/Validation_stability.py
  27. BIN
      validation/calcium_imaging_transition_state/__pycache__/capipeline.cpython-38.pyc
  28. 485 0
      validation/calcium_imaging_transition_state/capipeline.py
  29. 1405 0
      validation/calcium_imaging_transition_state/registration_logs.txt
  30. BIN
      validation/calcium_imaging_transition_state/sleep_data_calcium_imaging_stability_validation.pkl
  31. BIN
      validation/calcium_imaging_transition_state/transition_state_calcium_imaging_stability_validation.pkl
  32. BIN
      validation/sleep_data_calcium_imaging/FOV_Validation_8235.png
  33. BIN
      validation/sleep_data_calcium_imaging/FOV_Validation_8237.png
  34. BIN
      validation/sleep_data_calcium_imaging/FOV_Validation_8238.png
  35. BIN
      validation/sleep_data_calcium_imaging/Validation_stability_check_rec_#0-#74).png
  36. BIN
      validation/sleep_data_calcium_imaging/Validation_summary_stability_recordings_#0-#74).png

BIN
validation/calcium_imaging/FOV_Validation_37527.png


BIN
validation/calcium_imaging/FOV_Validation_37528.png


BIN
validation/calcium_imaging/FOV_Validation_37529.png


BIN
validation/calcium_imaging/FOV_Validation_37530.png


BIN
validation/calcium_imaging/FOV_Validation_48.png


BIN
validation/calcium_imaging/FOV_Validation_51.png


BIN
validation/calcium_imaging/FOV_Validation_53.png


BIN
validation/calcium_imaging/Validation_stability_check_rec_#0-#188).png


File diff suppressed because it is too large
+ 39817 - 0
validation/calcium_imaging/Validation_stability_check_rec_#0-#188).svg


BIN
validation/calcium_imaging/Validation_stability_check_rec_#0-#31).png


BIN
validation/calcium_imaging/Validation_stability_check_rec_#134-#160).png


BIN
validation/calcium_imaging/Validation_stability_check_rec_#161-#188).png


BIN
validation/calcium_imaging/Validation_stability_check_rec_#32-#55).png


BIN
validation/calcium_imaging/Validation_stability_check_rec_#56-#95).png


BIN
validation/calcium_imaging/Validation_stability_check_rec_#96-#133).png


BIN
validation/calcium_imaging/Validation_summary_stability_recordings_#0-#188).png


BIN
validation/calcium_imaging/calcium_imaging_stability_validation.pkl


BIN
validation/calcium_imaging/cmap.png


File diff suppressed because it is too large
+ 0 - 2870
validation/calcium_imaging/registration_logs.txt


BIN
validation/calcium_imaging_transition_state/FOV_Validation_8237.png


BIN
validation/calcium_imaging_transition_state/FOV_Validation_TS_C_ISO_8237.png


BIN
validation/calcium_imaging_transition_state/StackReg/8237_best_methods.npy


BIN
validation/calcium_imaging_transition_state/StackReg/8237_best_score.npy


BIN
validation/calcium_imaging_transition_state/StackReg/8237_best_tmats.npy


+ 457 - 0
validation/calcium_imaging_transition_state/Validation_FOV_alignment.py

@@ -0,0 +1,457 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# ### Define paths and animals for the analysis
+
+# In[1]:
+
+
+path = '/media/andrey/My Passport/GIN/Anesthesia_CA1/meta_data/meta_recordings_transition_state.xlsx'
+path4results = '/media/andrey/My Passport/GIN/Anesthesia_CA1/validation/calcium_imaging_transition_state/' #To store transformation matrix
+save_plots_path = '/media/andrey/My Passport/GIN/Anesthesia_CA1/validation/calcium_imaging_transition_state/'
+log_file_path = save_plots_path + 'registration_logs.txt'
+
+animals_for_analysis = [8237]
+
+# ### Align FOV's for all recordings
+
+# In[2]:
+
+
+repeat_calc = 1
+silent_mode = False
+
+#######################
+
+import pandas as pd
+import numpy as np
+import os
+
+import matplotlib.pyplot as plt
+
+import sys
+np.set_printoptions(threshold=sys.maxsize)
+
+from pystackreg import StackReg
+
+# Sobel filter (not used)
+#from scipy import ndimage #https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.sobel.html
+
+meta_data = pd.read_excel(path)
+
+#%% compute transformations matrices between recordings
+
+recordings = meta_data['Number']
+
+animals = animals_for_analysis
+
+#print("Recordings: ", recordings)
+
+#ALIGNMENT ALGORITHM
+
+# log file
+f = open(log_file_path, "a")
+
+print("n, i, j, rigid_mean_enh, rigid_mean, affine_mean_enh, affine_mean, best_method ", file=f)
+
+if (silent_mode!=True):
+    print(" RMSD's: (rigid, mean_enh) | (rigid, mean) | (affine, mean_enh) | (affine, mean) | best method ")
+
+for animal in animals:
+    
+    if (silent_mode!=True):
+        print("Animal #", str(animal))
+    
+    
+    if not os.path.exists(path4results + 'StackReg/' +
+                          str(animal) + '.npy') or repeat_calc == 1:
+#    if not os.path.exists('Q:/Personal/Mattia/Calcium Imaging/results/StackRegEnhImage/' +
+#                          str(animal) + '.npy') or repeat_calc == 1:
+#    if not os.path.exists('Q:/Personal/Mattia/Calcium Imaging/results/StackReg/' +
+#                          str(animal) + '.npy') or repeat_calc == 1:
+       
+        meta_animal = meta_data[meta_data['Mouse'] == animal]
+        recordings = meta_animal['Number']
+        
+        images_mean = np.zeros((512, 512, np.shape(recordings)[0]))
+        images_mean_enh = np.zeros((512, 512, np.shape(recordings)[0]))
+
+        images_quality_check = np.zeros((512, 512, np.shape(recordings)[0]))
+
+        best_tmats = np.zeros((np.shape(recordings)[0], np.shape(recordings)[0], 3, 3))
+        best_score = np.zeros((np.shape(recordings)[0], np.shape(recordings)[0]))
+        best_methods =  np.zeros((np.shape(recordings)[0], np.shape(recordings)[0]))
+
+
+        tmats_affine = np.zeros((np.shape(recordings)[0], np.shape(recordings)[0], 3, 3))
+        tmats_rigid = np.zeros((np.shape(recordings)[0], np.shape(recordings)[0], 3, 3))
+        tmats_affine_enh = np.zeros((np.shape(recordings)[0], np.shape(recordings)[0], 3, 3))
+        tmats_rigid_enh = np.zeros((np.shape(recordings)[0], np.shape(recordings)[0], 3, 3))
+
+        # load all (enhanced) images
+        for idx, recording in enumerate(recordings):
+            print(recording)
+            options = np.load(str(meta_data['Folder'][recording]) + 
+                              str(meta_data['Subfolder'][recording]) +
+                             # str(int(meta_data['Recording idx'][recording])) +
+                              '/suite2p/plane0/ops.npy',
+                              allow_pickle=True)
+        # mean image or mean enhanced image
+            images_mean[:, :, idx] = options.item(0)['meanImg']
+            images_mean_enh[:, :, idx] = options.item(0)['meanImgE']
+            #cut_boarders=50
+            #quality check
+            images_quality_check[:, :, idx] = options.item(0)['meanImg']
+
+
+        # loop through every pair and compute the transformation matrix
+        
+        #conditions = [meta_data['Condition'][recording] for recording in recordings]
+        
+        for idx0 in range(np.shape(images_mean)[2]):
+            #if (idx0!=14):
+                    #continue
+            
+            for idx1 in range(idx0, np.shape(images_mean)[2]):
+                #if (idx1!=16):
+                    #continue
+           
+                      
+                fraction_of_non_zero_pixels = [0.0,0.0,0.0,0.0]
+                
+### MEAN RIGID and AFFINE
+
+                reference_image = images_mean[:, :, idx0]
+                initial_image = images_mean[:, :, idx1]
+
+                #sx = ndimage.sobel(reference_image, axis=0, mode='constant')
+                #sy = ndimage.sobel(reference_image, axis=1, mode='constant')
+                #reference_image = np.hypot(sx, sy)
+
+                #sx = ndimage.sobel(initial_image, axis=0, mode='constant')
+                #sy = ndimage.sobel(initial_image, axis=1, mode='constant')
+                #initial_image = np.hypot(sx, sy)
+
+                boarder_cut = 100
+                sr = StackReg(StackReg.AFFINE)
+                tmats_affine[idx0, idx1, :, :] = sr.register(reference_image, initial_image)
+                
+                image_transformed =  sr.transform(images_quality_check[:, :, idx1], tmats_affine[idx0, idx1, :, :])
+                image_difference = images_quality_check[:, :, idx0] -  image_transformed
+                fraction_of_non_zero_pixels[3] = np.count_nonzero(image_transformed[:,:]<0.001)/262144
+                #plt.imshow(image_transformed)
+                #plt.show()
+                image_difference = image_difference[boarder_cut:-boarder_cut, boarder_cut:-boarder_cut]
+                image_difference = np.square(image_difference)
+                rmsd_affine = np.sqrt(image_difference.sum()/(512 - 2 * boarder_cut)**2)
+                if (silent_mode!=True):
+                    print("Fraction of non-zero pixels in 3 (mean affine): ", fraction_of_non_zero_pixels[3]," Score:",rmsd_affine)
+
+                sr = StackReg(StackReg.RIGID_BODY)
+                tmats_rigid[idx0, idx1, :, :] = sr.register(reference_image, initial_image)
+                image_transformed = sr.transform(images_quality_check[:, :, idx1], tmats_rigid[idx0, idx1, :, :])
+                image_difference = images_quality_check[:, :, idx0] - image_transformed
+                fraction_of_non_zero_pixels[1] = np.count_nonzero(image_transformed[:,:]<0.001)/262144
+
+                #plt.imshow(image_transformed)
+                #plt.show()
+                image_difference = image_difference[boarder_cut:-boarder_cut, boarder_cut:-boarder_cut]
+                image_difference = np.square(image_difference)
+                rmsd_rigid = np.sqrt(image_difference.sum()/(512 - 2 * boarder_cut)**2)
+                if (silent_mode!=True):
+                    print("Fraction of non-zero pixels in 1 (mean rigid): ", fraction_of_non_zero_pixels[1], "Score", rmsd_rigid)
+
+                #plt.imshow(image_difference)
+
+
+### MEAN_ENH RIGID and AFFINE
+
+                reference_image = images_mean_enh[:, :, idx0]
+                initial_image = images_mean_enh[:, :, idx1]
+
+                # sx = ndimage.sobel(reference_image, axis=0, mode='constant')
+                # sy = ndimage.sobel(reference_image, axis=1, mode='constant')
+                # reference_image = np.hypot(sx, sy)
+
+                # sx = ndimage.sobel(initial_image, axis=0, mode='constant')
+                # sy = ndimage.sobel(initial_image, axis=1, mode='constant')
+                # initial_image = np.hypot(sx, sy)
+
+                boarder_cut = 100
+                sr = StackReg(StackReg.AFFINE)
+                tmats_affine_enh[idx0, idx1, :, :] = sr.register(reference_image, initial_image)
+
+                image_transformed = sr.transform(images_quality_check[:, :, idx1], tmats_affine_enh[idx0, idx1, :, :])
+                image_difference = images_quality_check[:, :, idx0] - image_transformed  #TODO: delete image quality check! replace it with meanimage 
+                fraction_of_non_zero_pixels[2] = np.count_nonzero(image_transformed[:,:]<0.001)/262144
+
+                #plt.imshow(image_transformed)
+                #plt.show()
+                image_difference = image_difference[boarder_cut:-boarder_cut, boarder_cut:-boarder_cut]
+                image_difference = np.square(image_difference)
+                rmsd_affine_enh = np.sqrt(image_difference.sum()/(512 - 2 * boarder_cut)**2)
+                if (silent_mode!=True):
+                    print("Fraction of non-zero pixels in 2 (mean enh affine): ", fraction_of_non_zero_pixels[2],"Score:", rmsd_affine_enh)
+
+                sr = StackReg(StackReg.RIGID_BODY)
+                tmats_rigid_enh[idx0, idx1, :, :] = sr.register(reference_image, initial_image)
+                image_transformed =  sr.transform(images_quality_check[:, :, idx1], tmats_rigid_enh[idx0, idx1, :, :])
+                image_difference = images_quality_check[:, :, idx0] - image_transformed
+                fraction_of_non_zero_pixels[0] = np.count_nonzero(image_transformed[:,:]<0.001)/262144
+
+                #plt.imshow(image_transformed)
+                #plt.show()
+                image_difference = image_difference[boarder_cut:-boarder_cut, boarder_cut:-boarder_cut]
+                image_difference = np.square(image_difference)
+                rmsd_rigid_enh = np.sqrt(image_difference.sum()/(512 - 2 * boarder_cut)**2)
+                if (silent_mode!=True):
+                    print("Fraction of non-zero pixels in 0 (mean enh rigid): ", fraction_of_non_zero_pixels[0],"Score", rmsd_rigid_enh)
+
+                rmsds=[rmsd_rigid_enh,rmsd_rigid,rmsd_affine_enh,rmsd_affine]
+                tmatss=[tmats_rigid_enh[idx0, idx1, :, :],tmats_rigid[idx0, idx1, :, :],tmats_affine_enh[idx0, idx1, :, :],tmats_affine[idx0, idx1, :, :]]
+                methods=["rigid_mean_enh", "rigid_mean" ,"affine_mean_enh","affine_mean"]
+                #print(tmats_rigid_enh,tmats_rigid,tmats_affine_enh,tmats_affine)
+                #print(" ")
+                #best_method_idx = rmsds.index(min(rmsds))
+                #smaller_fraction_idx = fraction_of_non_zero_pixels.index(min(fraction_of_non_zero_pixels))
+                #smaller_fraction_idx = 1
+                #print(best_method_idx)
+                #print(smaller_fraction_idx)
+                
+                list_of_methods=np.argsort(rmsds)
+                
+                best_score[idx1, idx0] = np.sort(rmsds)[0]
+                best_score[idx0, idx1] = np.sort(rmsds)[0]
+                
+                the_best_idx = list_of_methods[0]
+
+                
+                if (fraction_of_non_zero_pixels[list_of_methods[0]] > 0.1):
+                    print("Warning: alignment with the best method failed. The second best method is applied")
+                    the_best_idx = list_of_methods[1]
+                    if (fraction_of_non_zero_pixels[list_of_methods[1]] > 0.1):
+                        print("Warning: alignment with the second best method failed. The 3rd best method is applied")
+                        the_best_idx = list_of_methods[2]
+                    
+                best_method = methods[the_best_idx]
+                best_tmats[idx0, idx1, :, :]=tmatss[the_best_idx]
+                best_methods[idx1, idx0]=the_best_idx
+                best_methods[idx0, idx1]=the_best_idx
+            
+                
+                best_tmats[idx1, idx0, :, :]=np.linalg.inv(best_tmats[idx0, idx1, :, :])
+
+
+                if(idx0==idx1):
+                    best_method="-,-"
+                
+                if (silent_mode!=True):
+                    print("{0:d}, {1:d},  {2:4.4f}, {3:4.4f}, {4:4.4f}, {5:4.4f}, {6:s}".format(idx0, idx1, rmsd_rigid_enh, rmsd_rigid, rmsd_affine_enh, rmsd_affine, best_method))
+                
+                print("{0:d}, {1:d},  {2:4.4f}, {3:4.4f}, {4:4.4f}, {5:4.4f}, {6:s}".format(idx0, idx1, rmsd_rigid_enh, rmsd_rigid,
+                                                                        rmsd_affine_enh, rmsd_affine, best_method), file=f)
+
+                #print(" " + str(idx0) + "-" + str(idx1) + " " + str(rmsd_rigid_enh) + " " + str(rmsd_rigid) + " " + str(rmsd_affine_enh) + " " +  str(rmsd_affine))
+                # plt.imshow(image_difference)
+
+                #plt.savefig(save_plots_path + "StackRegVisualInspection/" + file_title + "_d_reference_m_corrected.png")
+
+                #print(str(idx0) + '-' + str(idx1))
+        # save all the transformation matrices
+        if not os.path.exists(path4results+'StackReg'):
+            os.makedirs(path4results+'StackReg')
+        #print(best_tmats)
+        np.save(path4results+'StackReg/' + str(animal) + "_best_tmats", best_tmats)
+        np.save(path4results+'StackReg/' + str(animal) + "_best_methods", best_methods)
+        np.save(path4results+'StackReg/' + str(animal) + "_best_score", best_score)
+
+#        if not os.path.exists('Q:/Personal/Mattia/Calcium Imaging/results/StackRegEnhImage'):
+#            os.makedirs('Q:/Personal/Mattia/Calcium Imaging/results/StackRegEnhImage')
+#        np.save('Q:/Personal/Mattia/Calcium Imaging/results/StackRegEnhImage/' + str(animal), tmats)
+#        if not os.path.exists(save_plots_path+ 'StackRegAffine'):
+#            os.makedirs(save_plots_path + 'StackRegAffine')
+#        np.save(save_plots_path+ 'StackRegAffine/' + str(animal), tmats)
+f.close()
+
+# ### Install package for image comparison (similarity index)
+
+# In[79]:
+
+
+#get_ipython().system('conda install -c conda-forge imagehash --yes')
+#!pip install ImageHash # as alternative if anaconda is not installed
+
+
+# In[33]:
+
+from PIL import Image
+import imagehash
+
+
+for animal in animals:
+    print("Animal #", str(animal))
+    
+    meta_animal = meta_data[meta_data['Mouse'] == animal]
+    recordings = meta_animal['Number']
+    index_similarity = np.zeros((np.shape(recordings)[0], np.shape(recordings)[0]))
+    
+    cut = 100
+       
+    images_mean = np.zeros((512, 512, np.shape(recordings)[0]))
+    images_mean_enh = np.zeros((512, 512, np.shape(recordings)[0]))
+
+        # load all (enhanced) images
+    for idx, recording in enumerate(recordings):
+        options = np.load(str(meta_data['Folder'][recording]) + 
+                              str(meta_data['Subfolder'][recording]) +
+                              #str(int(meta_data['Recording idx'][recording])) +
+                              '/suite2p/plane0/ops.npy',
+                              allow_pickle=True)
+        # mean image or mean enhanced image
+        images_mean[:, :, idx] = options.item(0)['meanImg']
+        images_mean_enh[:, :, idx] = options.item(0)['meanImgE']
+ 
+
+    for idx0 in range(np.shape(images_mean)[2]):            
+        for idx1 in range(idx0, np.shape(images_mean)[2]):
+            hash1=imagehash.average_hash(Image.fromarray(images_mean[cut:-cut, cut:-cut, idx0]))
+            otherhash=imagehash.average_hash(Image.fromarray(images_mean[cut:-cut, cut:-cut, idx1]))
+            index_similarity[idx0,idx1] = (hash1 - otherhash)
+            index_similarity[idx1,idx0] = (hash1 - otherhash)
+    #index_similarity = (np.max(index_similarity)-index_similarity)/np.max(index_similarity)    
+    
+    best_tmats = np.load(path4results+'StackReg/' + str(animal) + "_best_tmats.npy")
+    best_score = np.load(path4results+'StackReg/' + str(animal) + "_best_score.npy")
+    
+    metric_best_tmats = np.abs(best_tmats)
+    metric_best_tmats = metric_best_tmats.sum(axis=(2,3))
+    metric_best_tmats = np.max(metric_best_tmats) - metric_best_tmats
+    
+    metric_best_score = (np.max(best_score)-best_score)/np.max(best_score)
+    #plt.xticks(np.arange(0, np.shape(images_mean)[2], 1));
+    #plt.yticks(np.arange(0, np.shape(images_mean)[2], 1));
+    
+    fig, ax = plt.subplots(1,3, figsize=(21, 7))
+
+    index_similarity[index_similarity > 30] = 30 
+    image = ax[0].imshow(index_similarity,cmap='viridis_r') #,cmap=cmap, norm=norm
+    cbar = fig.colorbar(image, ax=ax[0], orientation='horizontal', fraction=.1)
+    
+    ax[0].set_title("Similarity index", fontsize = 15)
+    ax[1].imshow(metric_best_tmats)
+    ax[1].set_title("Displacement index", fontsize = 15)
+    ax[2].imshow(metric_best_score)
+    ax[2].set_title("Distance function", fontsize = 15)
+    fig.suptitle('Animal #' + str(animal), fontsize = 20)
+    
+    plt.savefig("./FOV_Validation_" + str(animal) + ".png")
+
+
+# ### Plot all corrected FOV's for comparison (optional)
+# 
+# **The running takes considerable amount of time!**
+
+# In[23]:
+
+
+for animal in animals:
+
+    tmats_loaded = np.load(path4results + 'StackReg/' + str(animal) + "_best_tmats" + '.npy')
+    meta_animal = meta_data[meta_data['Mouse'] == animal]
+    recordings = meta_animal['Number']
+    images = np.zeros((512, 512, np.shape(meta_animal)[0]))
+    
+    images_mean = np.zeros((512, 512, np.shape(recordings)[0]))
+    images_mean_enh = np.zeros((512, 512, np.shape(recordings)[0])) 
+    
+        # load all (enhanced) images
+    for idx, recording in enumerate(recordings):
+        options = np.load(meta_data['Folder'][recording] + 
+                              meta_data['Subfolder'][recording] +
+                              str(int(meta_data['Recording idx'][recording])) +
+                              '/suite2p/plane0/ops.npy',
+                              allow_pickle=True)
+        # mean image or mean enhanced image
+        images_mean[:, :, idx] = options.item(0)['meanImg']
+        images_mean_enh[:, :, idx] = options.item(0)['meanImgE']
+            #cut_boarders=50
+            #quality check
+        #images_quality_check[:, :, idx] = options.item(0)['meanImg']
+
+
+        # loop through every pair and compute the transformation matrix
+        
+    conditions = [meta_data['Condition'][recording] for recording in recordings]
+    recording_idx = [meta_data['Recording idx'][recording] for recording in recordings]
+
+    for idx0 in range(np.shape(images_mean)[2]):
+        #if (idx0!=14):
+            #continue
+        for idx1 in range(idx0, np.shape(images_mean)[2]):
+            #if (idx1!=16):
+                #continue
+           
+            reference_image = images_mean_enh[:, :, idx0]
+            initial_image = images_mean_enh[:, :, idx1] 
+    
+            if not os.path.exists(save_plots_path + 'StackRegVisualInspection/'):
+                os.makedirs(save_plots_path + 'StackRegVisualInspection/')
+            if not os.path.exists(save_plots_path + 'StackRegVisualInspection/'  + str(animal) + '/'):
+                os.makedirs(save_plots_path + 'StackRegVisualInspection/'  + str(animal) + '/')
+
+ 
+            plt.imshow(reference_image)
+
+                # image_title = meta_data['Subfolder'][recording][:-1] + str(meta_data['Recording idx'][recording]) + "\n" + "_condition_" + \
+                #    meta_data['Condition'][recording]
+                # plt.title(image_title)
+                #file_title = meta_data['Subfolder'][recording][:-1] + str(
+                 #   meta_data['Recording idx'][recording]) + "_condition_" + \
+                 #            meta_data['Condition'][recording] + "_" + str(idx0) + "_" + str(idx1)
+        
+            file_title = str(str(idx0) + '_' + str(idx1) + '_' + conditions[idx0]) + '_' + str(recording_idx[idx0]) + '_' + str(conditions[idx1]) + "_" + str(recording_idx[idx1]) 
+            print(file_title)
+                
+            plt.savefig(save_plots_path + "StackRegVisualInspection/"  + str(animal) + '/' + file_title + "_b_reference.png")
+                #sx = ndimage.sobel(images[:, :, idx1], axis=0, mode='constant')
+                #sy = ndimage.sobel(images[:, :, idx1], axis=1, mode='constant')
+                #sob = np.hypot(sx, sy)
+                #plt.imshow(sob)
+            plt.imshow(initial_image)
+            plt.savefig(save_plots_path + "StackRegVisualInspection/"  + str(animal) + '/' + file_title + "_a_initial.png")
+                #grad = np.gradient(images[:, :, idx1])
+
+                #print(images[50:55, 50:55, idx1].shape)
+                #grad = np.gradient(images[50:55, 50:55, idx1])
+                #print(images[50:55, 50:55, idx1])
+                #print(" ")
+                #print(grad)
+
+                #image_inversed = sr.transform(reference_image, best_tmats[idx1, idx0, :, :])
+                #plt.imshow(image_inversed)
+                #plt.savefig(save_plots_path + "StackRegVisualInspection/"  + str(animal) + '/' + file_title + "_1_inversed.png")
+
+                #sx = ndimage.sobel(images[:, :, idx1], axis=0, mode='constant')
+                #sy = ndimage.sobel(images[:, :, idx1], axis=1, mode='constant')
+                #sob = np.hypot(sx, sy)
+                #plt.imshow(images[:, :, idx1])
+                #plt.savefig(save_plots_path + "StackRegVisualInspection/" + file_title + "_sobel.png")
+            image_corrected = sr.transform(initial_image,  tmats_loaded[idx0, idx1, :, :])
+            plt.imshow(image_corrected)
+            plt.savefig(save_plots_path + "StackRegVisualInspection/"  + str(animal) + '/' + file_title + "_c_corrected.png")
+                #image_difference = images_quality_check[:, :, idx0] - sr.transform(images_quality_check[:, :, idx1], best_tmats[idx0, idx1, :, :])
+                #image_difference = reference_image - image_corrected
+                #plt.imshow(image_difference)
+                #plt.savefig(save_plots_path + "StackRegVisualInspection/"  + str(animal) + '/' + file_title + "_d_reference_m_corrected.png")
+
+
+# In[9]:
+
+
+###TODO Seaborn Scatterplot heatmap could be a nice option for plotting
+
+
+# In[ ]:
+
+
+

+ 252 - 0
validation/calcium_imaging_transition_state/Validation_stability.py

@@ -0,0 +1,252 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# ### Link to the file with meta information on recordings
+
+# In[14]:
+
+#import matplotlib.pyplot as plt
+#plt.rcParams["figure.figsize"] = (20,3)
+
+
+database_path = '/media/andrey/My Passport/GIN/Anesthesia_CA1/meta_data/meta_recordings_transition_state.xlsx'
+
+
+# ### Select the range of recordings for the analysis (see "Number" row in the meta data file)
+
+# In[4]:
+
+
+#rec = [x for x in range(0,171+1)]
+rec = [1,2]
+
+# In[1]:
+
+
+import numpy as np
+
+import numpy.ma as ma
+
+import matplotlib.pyplot as plt
+import matplotlib.ticker as ticker
+
+import pandas as pd
+import seaborn as sns
+import pickle
+import os
+
+sns.set()
+sns.set_style("whitegrid")
+
+from scipy.signal import medfilt 
+
+from scipy.stats import skew, kurtosis, zscore
+
+from scipy import signal
+
+from sklearn.linear_model import LinearRegression, TheilSenRegressor
+
+plt.rcParams['figure.figsize'] = [16, 8]
+
+color_awake = (0,191/255,255/255)
+color_mmf = (245/255,143/255,32/255)
+color_keta = (181./255,34./255,48./255)
+color_iso = (143./255,39./255,143./255)
+
+custom_palette ={'keta':color_keta, 'iso':color_iso,'fenta':color_mmf,'awa':color_awake}
+
+
+# In[2]:
+
+
+from capipeline import *
+
+
+# ### Run the analysis
+# /media/andrey/My Passport/GIN/Anesthesia_CA1/validation/calcium_imaging
+# It creates a data frame *df_estimators* that contains basic information regarding stability of the recordings, such as 
+# 
+# - total number of identified neurons,
+# - traces and neuropils median inntensities for each ROI
+# - their standard deviation
+# - skewness of the signal
+# - estamation of their baseline (defined as a bottom quartile of signal intensities) 
+# - their temporal stability (defined as the ratio between median signals of all ROIs in the first and the second parts of the recording)  
+
+# In[5]:
+
+
+df_estimators = pd.DataFrame()
+
+
+for r in rec:
+    
+    Traces, Npils, n_accepted_and_rejected = traces_and_npils(r, database_path, concatenation=False)
+    
+    animal = get_animal_from_recording(r, database_path)
+        
+    condition = get_condition(r, database_path)
+    
+    print("#" +  str(r) + " " + str(animal) + " " + str(condition) + " ")
+    
+    Traces_median = ma.median(Traces, axis=1) 
+    Npils_median = ma.median(Npils, axis=1)
+    
+    Traces_std = ma.std(Npils, axis=1)    
+    Npils_std = ma.std(Npils, axis=1)
+    
+    Traces_skewness = skew(Traces,axis=1)
+    Npils_skewness = skew(Npils,axis=1)
+    
+    baseline = np.quantile(Traces,0.25,axis=1)
+    
+    recording_length = int(Traces.shape[1])
+    
+    half = int(recording_length/2)
+    
+    print(recording_length)
+    m1 = ma.median(Traces[:,:half])
+    m2 = ma.median(Traces[:,half:])
+
+    print("Stability:",m2/m1*100)
+
+    norm_9000 = 9000/recording_length   # normalize to 9000 frames (5 min recording)
+    
+    traces_median_half_vs_half = norm_9000*(m2-m1)*100/m1 + 100
+
+    print("Stability (9000 frames normalization)",traces_median_half_vs_half)
+
+    df_e = pd.DataFrame({ "animal":animal,
+                        "recording":r,
+                        "condition":condition,
+                        "number.neurons":n_accepted_and_rejected,
+                        "traces.median":Traces_median,
+                        "npils.median":Npils_median,
+                        "traces.std":Traces_std,
+                        "npils.std":Npils_std,
+    
+                        "traces.skewness":Traces_skewness,
+                        "npils.skewness":Npils_skewness,
+     
+                        "baseline.quantile.25":baseline,
+                         
+                        "median.stability":traces_median_half_vs_half # in percent
+
+                      })
+        
+    df_estimators = pd.concat([df_estimators,df_e])
+
+    print("*****")
+   
+
+
+
+# ### Save the result of the analysis
+
+# In[7]:
+
+
+df_estimators.to_pickle("./transition_state_calcium_imaging_stability_validation.pkl") 
+
+
+# ### Load the result of the analysis
+
+# In[8]:
+
+
+df_estimators = pd.read_pickle("./transition_state_calcium_imaging_stability_validation.pkl") 
+df_estimators['neuronID'] = df_estimators.index
+
+#df_estimators["animal"] = df_estimators["animal"]
+
+#print(df_estimators["animal"])
+
+#df_estimators["animal_cat"] = df_estimators["animal"].astype("category")
+
+
+# ### Plot 
+
+# In[9]:
+
+
+parameters = ["animal",'number.neurons','baseline.quantile.25','traces.median','traces.skewness','median.stability']
+labels = ["Animal \n ID",'Extracted \n ROIs','Baseline, \n A.U.','Median, \n A.U.','Skewness','Stability, \n %']
+number_subplots = len(parameters)
+
+recordings_ranges = [[0,74]]
+
+for rmin,rmax in recordings_ranges:
+
+    f, axes = plt.subplots(number_subplots, 1, figsize=(8, 5)) # sharex=Truerex=True
+    #plt.subplots_adjust(left=None, bottom=0.1, right=None, top=0.9, wspace=None, hspace=0.2)
+    #f.tight_layout()
+    sns.despine(left=True)
+
+    for i, param in enumerate(parameters):
+        
+        lw = 3
+        if (i == 5)|(i == 0)|(i == 1):
+            lw = 1
+        else:
+            lw = 1
+
+
+        #else:
+        sns.boxplot(x='recording', y=param, data=df_estimators[(df_estimators.recording>=rmin)&(df_estimators.recording<=rmax)], width=0.9, dodge=False, showfliers = False,ax=axes[i],linewidth=lw)
+        if (i == 0):
+            param = "animal"
+            print(np.unique(df_estimators[param]))
+            axes[i].set_yticks(np.unique(df_estimators[param]))
+ 
+            sns.swarmplot(x='recording', y=param, data=df_estimators[(df_estimators.recording>=rmin)&(df_estimators.recording<=rmax)&(df_estimators['neuronID'] == 0)],dodge=False,  s=1,  edgecolor='black', linewidth=1, ax=axes[i])
+            #ax.set(ylabel="")
+      
+        if i > 1:
+            axes[i].set_ylim([0.0,2000.0])
+        if i > 3:
+            axes[i].set_ylim([0.0,10.0])
+        if i > 4:
+            axes[i].set_ylim([80,120])
+            axes[i].get_xaxis().set_visible(True)
+        else:
+            axes[i].get_xaxis().set_visible(False)
+           
+        if i < number_subplots-1:
+            axes[i].xaxis.label.set_visible(False)
+        if i==0:
+            axes[i].set_title("Validation: stability check (recordings #%d-#%d)" % (rmin,rmax), fontsize=9, pad=30) #45
+        axes[i].set_ylabel(labels[i], fontsize=9,labelpad=5) #40
+        axes[i].set_xlabel("Recording", fontsize=9,labelpad=5) #40
+        #axes[i].axis('off')
+
+        axes[i].xaxis.set_tick_params(labelsize=9) #35
+        axes[i].yaxis.set_tick_params(labelsize=9) #30
+
+        #axes[i].get_legend().remove()
+        axes[i].xaxis.set_major_locator(ticker.MultipleLocator(10))
+        axes[i].xaxis.set_major_formatter(ticker.ScalarFormatter())
+
+    #plt.legend(loc='upper right',fontsize=35)
+    #plt.legend(bbox_to_anchor=(1.01, 1), loc=2, borderaxespad=0.,fontsize=25)
+    plt.savefig("Validation_stability_check_rec_#%d-#%d).png" % (rmin,rmax),dpi=400)
+    plt.savefig("Validation_stability_check_rec_#%d-#%d).svg" % (rmin,rmax))
+    #plt.show()
+
+
+# In[13]:
+
+
+sns.displot(data=df_estimators, x="median.stability", kind="kde", hue = "animal")
+plt.xlim([80,120])
+plt.xlabel("Stability, %", fontsize = 15)
+plt.title("Validation: summary on stability (recordings #%d-#%d)" % (min(rec),max(rec)), fontsize = 20, pad=20)
+plt.grid(False)
+plt.savefig("Validation_summary_stability_recordings_#%d-#%d)" % (min(rec),max(rec)))
+#plt.show()
+
+
+# In[62]:
+
+
+df_estimators["median.stability"].describe()
+

BIN
validation/calcium_imaging_transition_state/__pycache__/capipeline.cpython-38.pyc


+ 485 - 0
validation/calcium_imaging_transition_state/capipeline.py

@@ -0,0 +1,485 @@
+import numpy as np
+import numpy.ma as ma
+import matplotlib.pyplot as plt
+import pandas as pd
+import seaborn as sns
+import pickle
+import os
+
+sns.set()
+sns.set_style("whitegrid")
+
+from scipy.signal import medfilt 
+from scipy.stats import skew, kurtosis, zscore
+
+from scipy import signal
+
+from sklearn.linear_model import LinearRegression, TheilSenRegressor
+
+from sys import path
+### set path to where OASIS is located
+### make sure to run ' python setup.py build_ext --inplace ' in OASIS-master folder
+path.append(r'/media/andrey/My Passport/OASIS-master')
+from oasis.functions import deconvolve
+from oasis.functions import estimate_time_constant
+
+
+def get_recordings_for_animals(animals, path):
+    
+    recordings = []
+        
+    for animal in animals:
+        meta_data = pd.read_excel(path)
+        meta_animal = meta_data[meta_data['Mouse'] == animal]
+        recs = meta_animal['Number'].to_list()
+        for r in recs:
+            recordings.append(r)
+        
+    return recordings
+
+def get_animal_from_recording(recording, path):
+    meta_data = pd.read_excel(path)
+    meta_recording = meta_data[meta_data['Number'] == recording]
+    animal = (meta_recording['Mouse'])
+    animal = animal.to_numpy()[0]
+    return animal
+
+def get_condition(recording, path):
+    meta_data = pd.read_excel(path)
+    condition = meta_data['Condition'][meta_data['Number'] == recording].values[0]
+    return condition
+
+
+def traces_and_npils(recording, path, concatenation=True):
+    
+    meta_data = pd.read_excel(path)
+    
+    if (concatenation==True):
+        
+        path_excel_rec = str(meta_data['Folder'][recording]) + str(meta_data['Subfolder'][recording]) + 'suite2p/plane0'
+    
+        stats = np.load(path_excel_rec + '/stat.npy',allow_pickle=True)
+        Traces = np.load(path_excel_rec + '/F.npy')
+        Npil = np.load(path_excel_rec + '/Fneu.npy')
+        iscell = np.load(path_excel_rec + '/iscell.npy')
+        
+        print("Total trace length: " + str(Traces.shape[1]))
+        
+        starting_frame = int(meta_data['Starting frame'][recording])
+        recording_length = int(meta_data['Recording length'][recording])
+        
+        #analysis_period = meta_data['Analysis period'][recording]
+        #analysis_period = [int(s) for s in analysis_period.split(',')]
+        
+        n_accepted_rejected = Traces.shape[0] 
+        
+        #print("Period for the analysis (absolute): " + str(analysis_period[0]+starting_frame)+" "+str(analysis_period[1]+starting_frame))
+        
+        print("Recording length: " + str(recording_length))
+        
+        #print("Period for the analysis (relative): " + str(analysis_period[0])+" "+str(analysis_period[1]))
+        
+        good_recording = np.zeros(shape=(recording_length))
+        
+        
+        
+        if isinstance(meta_data['Analysis period'][recording], str): # as previous script
+            #good_recording = np.zeros((18000))
+            recording2keep = [int(s) for s in meta_data['Analysis period'][recording].split(',')]
+            print("Analysis periods: " + str(recording2keep))
+            begin = recording2keep[0::2]
+            ending = recording2keep[1::2]
+            for idx_begin in range(int(len(begin))):
+                good_recording[begin[idx_begin] : ending[idx_begin]] = 1
+        #else:
+        #    good_recording = np.ones_like(Spikes[0, :])
+        
+        good_recording = good_recording > 0  
+        
+    
+        
+        Traces = Traces[:,starting_frame:starting_frame+recording_length]
+        Npil = Npil[:,starting_frame:starting_frame+recording_length]
+        
+   
+        
+        Traces = Traces[:, good_recording]     
+        Npil = Npil[:, good_recording]
+
+        print("Analysis period total frames: ", Traces.shape[1])
+        
+        #Traces = Traces[:,analysis_period[0]+starting_frame:analysis_period[1]+starting_frame]
+        #Npil = Npil[:,analysis_period[0]+starting_frame:analysis_period[1]+starting_frame]
+
+        Traces = Traces[iscell[:, 0].astype(bool), :]
+        Npil = Npil[iscell[:, 0].astype(bool), :] 
+         
+    else:
+        print("record",recording)
+        path_excel_rec = str(meta_data['Folder'][recording]) + str(meta_data['Subfolder'][recording]) + '/suite2p/plane0'
+        
+        stats = np.load(path_excel_rec + '/stat.npy',allow_pickle=True)
+        Traces = np.load(path_excel_rec + '/F.npy')
+        Npil = np.load(path_excel_rec + '/Fneu.npy')
+        iscell = np.load(path_excel_rec + '/iscell.npy')
+        
+        n_accepted_rejected = Traces.shape[0]
+        Traces = Traces[iscell[:, 0].astype(bool), :]
+        Npil = Npil[iscell[:, 0].astype(bool), :]   
+
+        
+    return Traces, Npil, n_accepted_rejected # n_accepted_rejected = Accepted + Rejected
+
+def median_stabilities(Npils):
+
+    number_of_neurons = Npils.shape[0] 
+    length = Npils.shape[1]
+    
+    #print(length)
+    
+    l = int(length / 1000)
+
+    Npils = Npils[:,:l*1000]
+    
+    npils_medians = ma.median(Npils, axis=1)
+    
+    #Npils = Npils - np.tile(ma.expand_dims(ma.median(Npils, axis=1), axis=1),
+    #                              (1, ma.shape(Npils)[1]))
+    
+    Npils = Npils.reshape(Npils.shape[0],l,1000)
+    
+    #npils_medians = npils_medians.reshape(Npils.shape[0],l)
+    #print(Npils.shape)
+    
+    ###TODO npils_median
+    
+    median_stabilities = ma.median(Npils,axis=2)
+    median_for_all_trace = ma.median(Npils,axis=[1,2])
+
+    median_stabilities = ma.abs(median_stabilities-median_for_all_trace[:,np.newaxis])
+
+    median_stabilities = ma.sum(median_stabilities,axis=1)/l
+
+    return median_stabilities
+
+def get_data_frame(recording, path, threshold=200, baseline_correction=True, concatenation=True, correlations = True):
+    
+    df_estimators = pd.DataFrame()
+
+    df_corr =  pd.DataFrame()
+
+    r = recording
+    
+    animal = get_animal_from_recording(r, path)
+        
+    #condition = get_condition(r, path)
+    
+    #print(str(r)+" "+str(animal)+" "+str(condition))
+    
+    plt.cla()
+    
+    Traces, Npils, n_accepted_and_rejected = traces_and_npils(r, path, concatenation)
+
+    Tm0p7N = Traces - 0.7*Npils
+    
+    if (baseline_correction==True):
+    
+        #median of all traces
+        med_of_all_traces = np.median(Tm0p7N,axis=0)
+
+        plt.plot(med_of_all_traces,'k')
+
+        #filtering 
+        Tm0p7N_midfilt = medfilt(med_of_all_traces,31)
+
+        plt.plot( Tm0p7N_midfilt, 'b')
+
+        #regression
+        TSreg = TheilSenRegressor(random_state=42)
+        x = np.arange(Tm0p7N.shape[1])
+        X = x[:, np.newaxis]
+        fit = TSreg.fit(X, Tm0p7N_midfilt) 
+                #print(fit.get_params())
+        y_pred =  TSreg.predict(X)
+
+
+        plt.plot(  y_pred, 'w')
+
+        #subtract
+        y_pred =  y_pred - y_pred[0]
+        
+        ab,resid,_,_,_ = ma.polyfit(x, y_pred, 1,full = True)    
+
+        #print(ab,resid)    
+
+        Tm0p7N[:,:] -= y_pred[np.newaxis, :]
+
+
+        plt.title("median for all traces in  recording")
+        plt.show()
+  
+    recording_lenght = Traces.shape[1]
+    
+    # old baseline, worked well 
+    
+    baseline = np.quantile(Tm0p7N,0.25,axis=1)
+    
+    print("Median baseline: {:.2f}".format(np.median(baseline)))
+    
+    Baseline_subtracted = Tm0p7N.copy() 
+    
+    for i in range(Baseline_subtracted.shape[0]):
+        Baseline_subtracted[i,:] -= baseline[i]
+    
+    integral = Baseline_subtracted.sum(axis=1)/recording_lenght
+    
+    
+    #print(r)
+    #Npil_regression = np.polyfit(np.arange(Npil.shape[1]),Npil,1)
+    
+    #Npil_stability[neuron_id] = Npil_regression[0]*9000/Npil_regression[1]
+
+    #for npil in Npil:
+    #    print(npil.shape)
+ 
+    
+    n_accepted = Traces.shape[0]
+    
+    neuronID = ma.arange(n_accepted)
+    
+    n_accepted_and_rejected = n_accepted_and_rejected
+    
+    Traces_median = ma.median(Traces, axis=1) 
+    Npils_median = ma.median(Npils, axis=1) 
+    
+    Tm0p7N_median = ma.median(Tm0p7N, axis=1)   
+    
+    Traces_std = ma.std(Npils, axis=1)    
+    Npils_std = ma.std(Npils, axis=1)
+    Tm0p7N_std = ma.std(Tm0p7N, axis=1) 
+    
+    Traces_mins = ma.min(Traces, axis=1)
+    Traces_maxs = ma.max(Traces, axis=1)
+    Traces_peak_to_peak = Traces_maxs - Traces_mins
+    
+    Npils_mins = ma.min(Npils, axis=1)
+    Npils_maxs = ma.max(Npils, axis=1)
+    Npils_peak_to_peak = Npils_maxs - Npils_mins
+    
+    #median subtraction
+    #Traces = Traces - np.tile(ma.expand_dims(ma.median(Traces, axis=1), axis=1),
+     #                             (1, ma.shape(Traces)[1]))
+    #Npil = Npil - np.tile(ma.expand_dims(ma.median(Npil, axis=1), axis=1),
+      #                            (1, ma.shape(Npil)[1]))
+    
+    Traces_skewness = skew(Traces,axis=1)
+    Npils_skewness = skew(Npils,axis=1)
+    
+    Tm0p7N_skewness = skew(Tm0p7N,axis=1)
+    
+    
+    
+    Traces_kurtosis = kurtosis(Traces, axis=1)
+    Npils_kurtosis = kurtosis(Npils, axis=1)    
+
+    
+    Npils_median_stabilities = median_stabilities(Npils)
+    
+    slope = ma.zeros(Npils.shape[0])
+    intercept = ma.zeros(Npils.shape[0])
+    residuals = ma.zeros(Npils.shape[0])
+
+    if correlations: 
+	    #Replace with smarter solution to take into account correlations with other neurons.
+	    Tcorr = np.corrcoef(Traces).flatten()
+	    Ncorr = np.corrcoef(Npils).flatten()
+	    
+	    Tm0p7Ncorr_mean = np.mean(np.corrcoef(Tm0p7N),axis=1)
+	    
+	    Tm0p7Ncorr = np.corrcoef(Tm0p7N).flatten()
+	    
+	    Tm0p7Ncorr[Tm0p7Ncorr>0.99999] = np.nan
+	    
+	    Tm0p7Ncorr_first100 = np.corrcoef(Tm0p7N[:100,:]).flatten()
+	    
+	    Tm0p7Ncorr_first100 [Tm0p7Ncorr_first100>0.99999] = np.nan
+	       
+	        #quick trick
+	    
+	    #print(Tm0p7N.shape)
+	    
+	    #Tm0p7N_10bins = Tm0p7N.reshape(Tm0p7N.shape[0],int(Tm0p7N.shape[1]/10),10).mean(axis=2) 
+	    
+	   # print(Tm0p7N_10bins.shape)
+	    
+	    #Tm0p7Ncorr_10bins = np.corrcoef(Tm0p7N_10bins).flatten()
+	    
+	   # Tm0p7Ncorr_10bins[Tm0p7Ncorr_10bins>0.99999] = np.nan
+	
+	    
+	    
+	    df_corr =  pd.DataFrame({ "animal":animal,
+	                        "recording":r,
+	                        #"condition":condition,
+	                        #"Tcorr":Tcorr,
+	                        #"Ncorr":Ncorr,
+	                        #"Tm0p7Ncorr":Tm0p7Ncorr,
+	                        #"Tm0p7Ncorr.abs":np.absolute(Tm0p7Ncorr)
+	                             
+	                        "Tm0p7Ncorr":Tm0p7Ncorr,
+	                        "Tm0p7Ncorr.abs":np.absolute(Tm0p7Ncorr)  
+	                          #          "Tm0p7Ncorr_10bins":Tm0p7Ncorr_10bins,
+	                        #"Tm0p7Ncorr_10bins.abs":np.absolute(Tm0p7Ncorr_10bins) 
+	                            
+                            })
+                             
+    i=0
+    for npil in Npils: 
+        ab,resid,_,_,_ = ma.polyfit(np.arange(npil.shape[0]), npil, 1,full = True)    
+        slope[i] = ab[0]
+        intercept[i] = ab[1]
+       
+        residuals[i] = resid
+        i=i+1
+        
+    slope_per_median = ma.divide(slope,Npils_median)    
+    slope_in_percent = ma.divide(ma.multiply(slope,Npils.shape[1]),Npils_median)
+    
+    print("Number of neurons accepted: " + str(Npils_std.shape[0]))
+    
+    ### decay constant and peak characterization
+    
+    num_cells = np.shape(Traces)[0]
+    decay_isol = np.zeros((num_cells))
+    decay_no_isol = np.zeros((num_cells))
+    n_peaks = np.zeros((num_cells))
+    height = np.zeros((num_cells))
+    width = np.zeros((num_cells))
+    
+    baseline_oasis = np.zeros((num_cells))
+    
+    Baseline_subtracted = Tm0p7N.copy() 
+    
+    integral = np.zeros((num_cells))
+
+    #from oasis.functions import deconvolve
+    #from oasis.functions import estimate_time_constant
+    
+    for neuron in range(num_cells):
+       
+        fs=30
+        _, _, b, decay_neuron_isolated10, _ = deconvolve(np.double(Tm0p7N[neuron, ]),
+                                                                 penalty = 0, sn=25, optimize_g = 10)
+        _, _, _, decay_neuron_no_isolated, _ = deconvolve(np.double(Tm0p7N[neuron, ]), sn=25,
+                                                              penalty = 0)
+        
+        baseline_oasis[neuron] = b
+        
+        Baseline_subtracted[neuron] -= b
+        
+        integral[neuron] = Baseline_subtracted[neuron].sum()/recording_lenght
+        #ARcoef = estimate_time_constant(Tm0p7N[neuron, ], p=2, sn=None, lags=10, fudge_factor=1.0, nonlinear_fit=False)
+        
+        #print(ARcoef)
+    
+        peak_ind, peaks = signal.find_peaks(Baseline_subtracted[neuron, ], height = threshold,
+                                         distance = 10, prominence = threshold,
+                                         width = (None, None),
+                                         rel_height = 0.9)
+        
+        decay_isol[neuron] = - 1 / (fs * np.log(decay_neuron_isolated10))
+        decay_no_isol[neuron] = - 1 / (fs * np.log(decay_neuron_no_isolated))
+        
+        if decay_isol[neuron]>0:
+            pass
+        else:
+            decay_isol[neuron]== np.nan
+            
+        
+    
+        #print(decay_neuron_isolated10,decay_isol[neuron]," s")
+
+        fs = 30
+        trace_len = np.shape(Traces)[1] / (fs * 60) # in minutes  
+        
+        n_peaks[neuron] = len(peaks['peak_heights']) / trace_len
+    
+        if n_peaks[neuron] > 0:
+            height[neuron, ] = np.median(peaks['peak_heights'])
+            width[neuron, ] = np.median(peaks['widths'])
+        else:
+            height[neuron, ] = np.nan
+            width[neuron, ] = np.nan
+    
+
+    df_estimators = pd.DataFrame({ "animal":animal,
+                        "recording":r,
+                        #"condition":condition,
+                        "neuronID":neuronID,
+                        "n.accepted":n_accepted,
+                        "length.frames":recording_lenght,
+                        "length.minutes":trace_len,
+                        "n.accepted_and_rejected":n_accepted_and_rejected,
+                        "traces.median":Traces_median,
+                        "npil.median":Npils_median,
+                        "trace.std":Traces_std,
+                        "npil.std":Npils_std,
+    
+                        "trace.mins":Traces_mins,
+                        "trace.maxs":Traces_maxs,
+                        "trace.peak_to_peak":Traces_peak_to_peak,
+    
+                        "npil.mins":Npils_mins,
+                        "npil.maxs":Npils_maxs,
+                        "npil.peak_to_peak":Npils_peak_to_peak,
+    
+                        "trace.skewness":Traces_skewness,
+                        "npil.skewness":Npils_skewness,
+                       
+                        "Tm0p7N.skewness":Tm0p7N_skewness,
+                        "Tm0p7N.median":Tm0p7N_median,
+                        "Tm0p7N.std":Tm0p7N_std,
+    
+                        "trace.kurtosis":Traces_kurtosis,
+                        "npil.kurtosis": Npils_kurtosis, 
+    
+                        "npil.slope":slope,
+                        "npil.intercept":intercept,
+                        "npil.residual":residuals,
+                       
+                        "npil.slope_per_median":slope_in_percent, 
+                        "npil.slope_in_percent":slope_in_percent,
+     
+                        "npil.mstab.1000":Npils_median_stabilities,
+                      
+                        "baseline.quantile.25":baseline,
+                        "baseline.oasis":baseline_oasis,
+                        "integral":integral,
+                       
+                        #"Tm0p7Ncorr.mean":Tm0p7Ncorr_mean,
+                       
+                           ### decay constant and peak characterization
+                        "peak_detection_threshold":threshold,
+                        "decay_isol":decay_isol,
+                        "decay_no_isol":decay_no_isol,
+                        "n_peaks":n_peaks,
+                        "n_peaks_per_recording":n_peaks*trace_len,
+                        "height.median":height,
+                        "width.median":width
+                       
+                      })
+    
+    return  df_estimators ,df_corr
+
+def get_raster(starting_recording, n_neurons_max,database_path, concatenation):
+    
+    traces, npil, number_of_neruons = traces_and_npils(starting_recording, database_path, concatenation)
+                                                       
+    Tm0p7N = traces - 0.7*npil
+
+    Tm0p7N = zscore(Tm0p7N,axis=1)   
+    Tm0p7N = np.reshape(Tm0p7N,(Tm0p7N.shape[0], 500,10)).mean(axis=2)
+    Tm0p7N = np.maximum(-4, np.minimum(8,  Tm0p7N)) + 4
+    Tm0p7N/= 12
+    
+    return Tm0p7N[:n_neurons_max,:]

File diff suppressed because it is too large
+ 1405 - 0
validation/calcium_imaging_transition_state/registration_logs.txt


BIN
validation/calcium_imaging_transition_state/sleep_data_calcium_imaging_stability_validation.pkl


BIN
validation/calcium_imaging_transition_state/transition_state_calcium_imaging_stability_validation.pkl


BIN
validation/sleep_data_calcium_imaging/FOV_Validation_8235.png


BIN
validation/sleep_data_calcium_imaging/FOV_Validation_8237.png


BIN
validation/sleep_data_calcium_imaging/FOV_Validation_8238.png


BIN
validation/sleep_data_calcium_imaging/Validation_stability_check_rec_#0-#74).png


BIN
validation/sleep_data_calcium_imaging/Validation_summary_stability_recordings_#0-#74).png