Browse Source

some new analyses and refactoring

asobolev 8 months ago
parent
commit
e91c6c514a
36 changed files with 532549 additions and 40680 deletions
  1. 113 162
      analysis/AEPs/A1 + PPC AEPs.ipynb
  2. 193 31
      analysis/AEPs/AEP - Unit Timeline.ipynb
  3. 100 85
      analysis/AEPs/AEPs - overview.ipynb
  4. 75 198
      analysis/AEPs/AEPs - single.ipynb
  5. 37 16
      analysis/AEPs/Metrics (P1 - N1 - P2 - P3).ipynb
  6. 538 142
      analysis/AEPs/preprocessing.ipynb
  7. 0 218
      analysis/AEPs/spectrum.ipynb
  8. 269 0
      analysis/AL/ITI units by state.ipynb
  9. 0 0
      analysis/AL/active listening.ipynb
  10. 1458 0
      analysis/Behavior/syllables.ipynb
  11. 280 0
      analysis/CCR/AEP metrics.ipynb
  12. 493 0
      analysis/CCR/Events.ipynb
  13. 294 0
      analysis/CCR/LFP4Hz - AEP Metrics.ipynb
  14. 190 0
      analysis/CCR/LFP4Hz - Brain Areas.ipynb
  15. 286 0
      analysis/CCR/LFP4Hz - Units.ipynb
  16. 0 573
      analysis/CCRs.ipynb
  17. 755 0
      analysis/LFP Spectrum.ipynb
  18. 321 0
      analysis/PSTH/By unit.ipynb
  19. 70 182
      analysis/PSTH.ipynb
  20. 219 0
      analysis/PSTH/noise.ipynb
  21. 223 0
      analysis/PSTH/reward.ipynb
  22. 360 0
      analysis/PSTH/shuffle.ipynb
  23. 222 0
      analysis/PSTH/silence.ipynb
  24. 245 0
      analysis/Target Onset/AEP profile first pulse success - miss.ipynb
  25. 504 0
      analysis/Target Onset/Based on state.ipynb
  26. 383 0
      analysis/Target Onset/Based on success - miss.ipynb
  27. 315 0
      analysis/Target Onset/Target Onset firing between brain areas.ipynb
  28. 704 0
      analysis/Target Onset/population - micro.ipynb
  29. 97 0
      analysis/loading.py
  30. 41 12
      analysis/performance - general.ipynb
  31. 48 17
      analysis/target.py
  32. 274 742
      postprocessing/MoSeq.ipynb
  33. 21 6
      postprocessing/execute.ipynb
  34. 529 578
      session/overview.ipynb
  35. 16 8
      session/sessions.py
  36. 522876 37710
      sorting/ss-neurosuite.ipynb

File diff suppressed because it is too large
+ 113 - 162
analysis/AEPs/A1 + PPC AEPs.ipynb


File diff suppressed because it is too large
+ 193 - 31
analysis/AEPs/AEP - Unit Timeline.ipynb


File diff suppressed because it is too large
+ 100 - 85
analysis/AEPs/AEPs - overview.ipynb


File diff suppressed because it is too large
+ 75 - 198
analysis/AEPs/AEPs - single.ipynb


File diff suppressed because it is too large
+ 37 - 16
analysis/AEPs/Metrics (P1 - N1 - P2 - P3).ipynb


File diff suppressed because it is too large
+ 538 - 142
analysis/AEPs/preprocessing.ipynb


File diff suppressed because it is too large
+ 0 - 218
analysis/AEPs/spectrum.ipynb


File diff suppressed because it is too large
+ 269 - 0
analysis/AL/ITI units by state.ipynb


analysis/active listening.ipynb → analysis/AL/active listening.ipynb


File diff suppressed because it is too large
+ 1458 - 0
analysis/Behavior/syllables.ipynb


File diff suppressed because it is too large
+ 280 - 0
analysis/CCR/AEP metrics.ipynb


File diff suppressed because it is too large
+ 493 - 0
analysis/CCR/Events.ipynb


File diff suppressed because it is too large
+ 294 - 0
analysis/CCR/LFP4Hz - AEP Metrics.ipynb


File diff suppressed because it is too large
+ 190 - 0
analysis/CCR/LFP4Hz - Brain Areas.ipynb


File diff suppressed because it is too large
+ 286 - 0
analysis/CCR/LFP4Hz - Units.ipynb


File diff suppressed because it is too large
+ 0 - 573
analysis/CCRs.ipynb


File diff suppressed because it is too large
+ 755 - 0
analysis/LFP Spectrum.ipynb


File diff suppressed because it is too large
+ 321 - 0
analysis/PSTH/By unit.ipynb


File diff suppressed because it is too large
+ 70 - 182
analysis/PSTH.ipynb


File diff suppressed because it is too large
+ 219 - 0
analysis/PSTH/noise.ipynb


File diff suppressed because it is too large
+ 223 - 0
analysis/PSTH/reward.ipynb


File diff suppressed because it is too large
+ 360 - 0
analysis/PSTH/shuffle.ipynb


File diff suppressed because it is too large
+ 222 - 0
analysis/PSTH/silence.ipynb


File diff suppressed because it is too large
+ 245 - 0
analysis/Target Onset/AEP profile first pulse success - miss.ipynb


File diff suppressed because it is too large
+ 504 - 0
analysis/Target Onset/Based on state.ipynb


File diff suppressed because it is too large
+ 383 - 0
analysis/Target Onset/Based on success - miss.ipynb


File diff suppressed because it is too large
+ 315 - 0
analysis/Target Onset/Target Onset firing between brain areas.ipynb


File diff suppressed because it is too large
+ 704 - 0
analysis/Target Onset/population - micro.ipynb


+ 97 - 0
analysis/loading.py

@@ -0,0 +1,97 @@
+import sys, os
+sys.path.append(os.path.join(os.getcwd(), '..'))
+sys.path.append(os.path.join(os.getcwd(), '..', '..'))
+
+from imports import *
+from target import build_tgt_matrix
+
+
+def load_session_data(session):
+    all_areas = ['A1', 'PPC', 'HPC']
+
+    animal      = session.split('_')[0]
+    sessionpath = os.path.join(source, animal, session)
+    aeps_file   = os.path.join(sessionpath, 'AEPs.h5')
+    h5_file     = os.path.join(sessionpath, session + '.h5')
+    report_path = os.path.join(report, 'PSTH', session)
+    if not os.path.exists(report_path):
+        os.makedirs(report_path)
+        
+    # load timeline and configuration
+    with h5py.File(h5_file, 'r') as f:
+        tl = np.array(f['processed']['timeline'])  # time, X, Y, speed, etc.
+        trials = np.array(f['processed']['trial_idxs'])  # t_start_idx, t_end_idx, x_tgt, y_tgt, r_tgt, result
+        cfg = json.loads(f['processed'].attrs['parameters'])
+
+    # load AEPs
+    aeps = {}
+    with h5py.File(aeps_file, 'r') as f:
+        for area in all_areas:
+            if not area in f:
+                continue
+            aeps[area] = np.array(f[area]['aeps'])
+        aeps_events = np.array(f['aeps_events'])
+
+    areas = list(aeps.keys())
+    # TODO find better way. Remove outliers
+    if 'A1' in areas:
+        aeps['A1'][aeps['A1'] >  5000]   =   5000
+        aeps['A1'][aeps['A1'] < -5000]   =  -5000
+    if 'PPC' in areas:
+        aeps['PPC'][aeps['PPC'] >  1500] =   1500
+        aeps['PPC'][aeps['PPC'] < -1500] =  -1500
+    if 'HPC' in areas:
+        aeps['HPC'][aeps['HPC'] >  1500] =   1500
+        aeps['HPC'][aeps['HPC'] < -1500] =  -1500
+    aeps[areas[0]].shape
+    
+    # load AEP metrics
+    AEP_metrics_lims = dict([(area, {}) for area in areas])
+    AEP_metrics_raw  = dict([(area, {}) for area in areas])
+    AEP_metrics_norm = dict([(area, {}) for area in areas])
+    with h5py.File(aeps_file, 'r') as f:
+        for area in areas:
+            grp = f[area]
+
+            for metric_name in grp['raw']:
+                AEP_metrics_raw[area][metric_name]  = np.array(grp['raw'][metric_name])
+                AEP_metrics_norm[area][metric_name] = np.array(grp['norm'][metric_name])
+                AEP_metrics_lims[area][metric_name] = [int(x) for x in grp['raw'][metric_name].attrs['limits'].split(',')]
+                
+    # build target matrix
+    tgt_matrix = build_tgt_matrix(tl, trials, aeps_events)
+
+    # get available units
+    unit_names = []
+    with h5py.File(h5_file, 'r') as f:
+        unit_names = [x for x in f['units']]
+
+    # read single units
+    single_units = {}
+    spike_times = {}
+
+    with h5py.File(h5_file, 'r') as f:
+        for unit_name in unit_names:
+            spike_times[unit_name] = np.array(f['units'][unit_name][H5NAMES.spike_times['name']])
+            single_units[unit_name] = np.array(f['units'][unit_name][H5NAMES.inst_rate['name']])
+            #single_units[unit_name] = instantaneous_rate(unit_times, tl[:, 0], k_width=50)
+            
+    return {
+        'tl': tl,
+        'trials': trials,
+        'cfg': cfg,
+        'areas': areas,
+        'aeps': aeps,
+        'aeps_events': aeps_events,
+        'AEP_metrics_lims': AEP_metrics_lims,
+        'AEP_metrics_raw': AEP_metrics_raw,
+        'AEP_metrics_norm': AEP_metrics_norm,
+        'tgt_matrix': tgt_matrix,
+        'single_units': single_units,
+        'spike_times': spike_times,
+        'unit_names': unit_names,
+        'animal': animal,
+        'aeps_file': aeps_file,
+        'h5_file': h5_file,
+        'report_path': report_path
+    }

+ 41 - 12
analysis/performance - general.ipynb

@@ -1000,7 +1000,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 3,
    "id": "0ed0e8e4",
    "metadata": {
     "scrolled": true
@@ -1009,14 +1009,14 @@
     {
      "data": {
       "text/plain": [
-       "['009266_hippoSIT_2023-02-28_19-53-56',\n",
-       " '009266_hippoSIT_2023-03-01_17-53-11',\n",
-       " '009266_hippoSIT_2023-03-06_15-10-36',\n",
-       " '009266_hippoSIT_2023-03-06_20-43-19',\n",
-       " '009266_hippoSIT_2023-03-08_17-06-45']"
+       "['009266_hippoSIT_2022-12-20_16-33-45',\n",
+       " '009266_hippoSIT_2022-12-21_16-59-56',\n",
+       " '009266_hippoSIT_2022-12-22_10-27-35',\n",
+       " '009266_hippoSIT_2022-12-23_10-10-41',\n",
+       " '009266_hippoSIT_2022-12-26_12-18-22']"
       ]
      },
-     "execution_count": 8,
+     "execution_count": 3,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -1032,7 +1032,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 9,
+   "execution_count": 5,
    "id": "1496cfad",
    "metadata": {},
    "outputs": [],
@@ -1046,6 +1046,8 @@
     "    with h5py.File(h5name, 'r') as f:\n",
     "        #cfg = json.loads(f['processed'].attrs['parameters'])\n",
     "        #tl = np.array(f['processed']['timeline'])  # time, X, Y, speed\n",
+    "        if not 'processed' in f:\n",
+    "            continue\n",
     "        trial_idxs = np.array(f['processed']['trial_idxs']) # idx start, idx end, X, Y, R, trial result (idx to tl)\n",
     "\n",
     "    success_rates[session] = trial_idxs[:, 5].sum() / len(trial_idxs)"
@@ -1053,20 +1055,38 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 22,
+   "execution_count": 6,
    "id": "13c14c76",
    "metadata": {},
    "outputs": [
     {
      "data": {
       "text/plain": [
-       "{'009266_hippoSIT_2023-02-28_19-53-56': 0.40625,\n",
+       "{'009266_hippoSIT_2022-12-20_16-33-45': 0.675,\n",
+       " '009266_hippoSIT_2022-12-21_16-59-56': 0.5142857142857142,\n",
+       " '009266_hippoSIT_2022-12-22_10-27-35': 0.36363636363636365,\n",
+       " '009266_hippoSIT_2022-12-23_10-10-41': 0.9056603773584906,\n",
+       " '009266_hippoSIT_2022-12-26_12-21-18': 0.9433962264150944,\n",
+       " '009266_hippoSIT_2022-12-27_11-29-41': 0.7222222222222222,\n",
+       " '009266_hippoSIT_2022-12-27_17-22-28': 0.6086956521739131,\n",
+       " '009266_hippoSIT_2022-12-28_11-51-50': 0.7115384615384616,\n",
+       " '009266_hippoSIT_2022-12-28_15-51-10': 0.7741935483870968,\n",
+       " '009266_hippoSIT_2022-12-29_11-52-42': 0.78125,\n",
+       " '009266_hippoSIT_2023-02-02_17-05-17': 0.75,\n",
+       " '009266_hippoSIT_2023-02-27_09-33-24': 0.25,\n",
+       " '009266_hippoSIT_2023-02-28_10-11-03': 0.2727272727272727,\n",
+       " '009266_hippoSIT_2023-02-28_19-53-56': 0.40625,\n",
        " '009266_hippoSIT_2023-03-01_17-53-11': 0.16129032258064516,\n",
+       " '009266_hippoSIT_2023-03-02_21-07-57': 0.3125,\n",
+       " '009266_hippoSIT_2023-03-03_10-24-59': 0.6744186046511628,\n",
+       " '009266_hippoSIT_2023-03-03_16-53-46': 0.6153846153846154,\n",
+       " '009266_hippoSIT_2023-03-05_17-43-50': 0.6153846153846154,\n",
        " '009266_hippoSIT_2023-03-06_15-10-36': 0.6842105263157895,\n",
        " '009266_hippoSIT_2023-03-06_20-43-19': 0.3333333333333333,\n",
        " '009266_hippoSIT_2023-03-08_17-06-45': 0.6388888888888888,\n",
        " '009266_hippoSIT_2023-03-09_09-37-07': 0.6052631578947368,\n",
        " '009266_hippoSIT_2023-03-09_19-12-22': 0.4411764705882353,\n",
+       " '009266_hippoSIT_2023-03-10_08-59-25': 0.4411764705882353,\n",
        " '009266_hippoSIT_2023-04-12_15-49-49': 0.30303030303030304,\n",
        " '009266_hippoSIT_2023-04-13_08-57-46': 0.5,\n",
        " '009266_hippoSIT_2023-04-14_09-17-34': 0.5142857142857142,\n",
@@ -1105,10 +1125,19 @@
        " '009266_hippoSIT_2023-05-23_09-18-05': 0.7857142857142857,\n",
        " '009266_hippoSIT_2023-05-23_17-48-12': 0.7297297297297297,\n",
        " '009266_hippoSIT_2023-05-25_09-56-32': 0.7804878048780488,\n",
-       " '009266_hippoSIT_2023-05-25_15-55-57': 0.7}"
+       " '009266_hippoSIT_2023-05-25_15-55-57': 0.7,\n",
+       " '009266_hippoSIT_2023-06-13_08-49-11': 0.6904761904761905,\n",
+       " '009266_hippoSIT_2023-06-14_08-21-23': 0.7674418604651163,\n",
+       " '009266_hippoSIT_2023-06-15_09-25-15': 0.5142857142857142,\n",
+       " '009266_hippoSIT_2023-06-15_17-30-45': 0.45454545454545453,\n",
+       " '009266_hippoSIT_2023-06-16_08-49-13': 0.42424242424242425,\n",
+       " '009266_hippoSIT_2023-06-19_08-58-35': 0.6410256410256411,\n",
+       " '009266_hippoSIT_2023-06-20_08-26-29': 0.4117647058823529,\n",
+       " '009266_hippoSIT_2023-06-21_08-15-10': 0.5135135135135135,\n",
+       " '009266_hippoSIT_2023-06-21_20-39-34': 0.5142857142857142}"
       ]
      },
-     "execution_count": 22,
+     "execution_count": 6,
      "metadata": {},
      "output_type": "execute_result"
     }

+ 48 - 17
analysis/target.py

@@ -3,33 +3,33 @@ import numpy as np
 
 def build_tgt_matrix(tl, trials, aeps_events):
     # compute timeline / AEP indices of entrances / exist to the target
-    tl_tgt_start_idxs = []  # timeline indices of entrance in target
-    tl_tgt_end_idxs   = []  # timeline indices of exit from target
-
+    tl_tgt_start_idxs   = []  # timeline indices of first target pulse
+    tl_tgt_end_idxs     = []  # timeline indices of last target pulse
+    aeps_tgt_start_idxs = []  # indices of first AEPs in target
+    aeps_tgt_end_idxs   = []  # indices of last AEPs in target
+    
     for i in range(len(tl) - 1):
         if tl[i][6] < 2 and tl[i+1][6] == 2:
-            tl_tgt_start_idxs.append(i + 1)
+            nearest_aep_idx = np.abs(aeps_events[:, 0] - tl[i+1][0]).argmin()
+            aeps_tgt_start_idxs.append(nearest_aep_idx)
+            t_event = aeps_events[nearest_aep_idx][0]
+            tl_tgt_start_idxs.append(np.abs(tl[:, 0] - t_event).argmin())
         if tl[i][6] == 2 and tl[i+1][6] < 2:
-            tl_tgt_end_idxs.append(i)
-
+            nearest_aep_idx = np.abs(aeps_events[:, 0] - tl[i][0]).argmin()
+            aeps_tgt_end_idxs.append(nearest_aep_idx)
+            t_event = aeps_events[nearest_aep_idx][0]
+            tl_tgt_end_idxs.append(np.abs(tl[:, 0] - t_event).argmin())
+            
     # ignore first/last target if not ended
     if tl_tgt_start_idxs[-1] > tl_tgt_end_idxs[-1]:
         tl_tgt_start_idxs = tl_tgt_start_idxs[:-1]
+        aeps_tgt_start_idxs = aeps_tgt_start_idxs[:-1]
     if tl_tgt_end_idxs[0] < tl_tgt_start_idxs[0]:
         tl_tgt_end_idxs = tl_tgt_end_idxs[1:]
+        aeps_tgt_end_idxs = aeps_tgt_end_idxs[1:]
     tl_tgt_start_idxs = np.array(tl_tgt_start_idxs)
     tl_tgt_end_idxs   = np.array(tl_tgt_end_idxs)
 
-    aeps_tgt_start_idxs = []  # indices of first AEPs in target
-    aeps_tgt_end_idxs   = []  # indices of last AEPs in target
-
-    for idx in tl_tgt_start_idxs:
-        aeps_tgt_start_idxs.append(np.abs(aeps_events[:, 0] - tl[idx][0]).argmin())
-    for idx in tl_tgt_end_idxs:
-        aeps_tgt_end_idxs.append(np.abs(aeps_events[:, 0] - tl[idx][0]).argmin())
-    aeps_tgt_start_idxs = np.array(aeps_tgt_start_idxs)
-    aeps_tgt_end_idxs = np.array(aeps_tgt_end_idxs)
-
     # successful / missed
     tgt_results = np.zeros(len(tl_tgt_start_idxs))
     for idx_tl_success_end in trials[trials[:, 5] == 1][:, 1]:
@@ -43,4 +43,35 @@ def build_tgt_matrix(tl, trials, aeps_events):
         aeps_tgt_start_idxs,
         aeps_tgt_end_idxs,
         tgt_results
-    ]).astype(np.int32)
+    ]).astype(np.int32)
+
+
+def build_silence_matrix(tl):
+    idxs_silence_start, idxs_silence_end = [], []
+    for i in range(len(tl) - 1):
+        if tl[i][6] != 0 and tl[i+1][6] == 0:    # silence start
+            idxs_silence_start.append(i+1)
+        elif tl[i][6] == 0 and tl[i+1][6] != 0:  # silence end
+            idxs_silence_end.append(i)
+
+    if len(idxs_silence_start) > len(idxs_silence_end):
+        idxs_silence_start = idxs_silence_start[:-1]
+    idxs_silence_start = np.array(idxs_silence_start)
+    idxs_silence_end   = np.array(idxs_silence_end)
+
+    return np.column_stack([idxs_silence_start, idxs_silence_end])
+
+
+def get_spike_counts(spk_times, pulse_times, hw=0.25, bin_count=51):
+    collected = []
+    for t_pulse in pulse_times:
+        selected = spk_times[(spk_times > t_pulse - hw) & (spk_times < t_pulse + hw)]
+        collected += [x for x in selected - t_pulse]
+    collected = np.array(collected)
+
+    bins = np.linspace(-hw, hw, bin_count)
+    counts, _ = np.histogram(collected, bins=bins)
+    counts = (counts / len(pulse_times))# * 1/((2. * hw)/float(bin_count - 1))
+    counts = counts / (bins[1] - bins[0])  # divide by bin size to get firing rate
+    
+    return bins, counts

File diff suppressed because it is too large
+ 274 - 742
postprocessing/MoSeq.ipynb


+ 21 - 6
postprocessing/execute.ipynb

@@ -2,7 +2,7 @@
  "cells": [
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 1,
    "id": "c37dce82",
    "metadata": {
     "scrolled": true
@@ -39,7 +39,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 2,
    "id": "a8ea2991",
    "metadata": {},
    "outputs": [
@@ -75,7 +75,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 3,
    "id": "3891b792",
    "metadata": {},
    "outputs": [],
@@ -106,10 +106,21 @@
     "#'009266_hippoSIT_2023-04-19_10-33-51',  # ch17, 4 + 55 correction, 6424 events: very weird 1-2nd in target, find out\n",
     "#'009266_hippoSIT_2023-04-20_08-57-39',  # ch1, 1 + 55 correction, 6424 events. Showcase for N2 / N3 mod in target\n",
     "#'009266_hippoSIT_2023-04-24_16-56-55',  # ch17, 5 + 55* correction, 6165 events, frequency\n",
-    "'009266_hippoSIT_2023-04-26_08-20-17',  # ch17, 12 + 55* correction, 6095 events, duration - showcase for N2 \n",
+    "#'009266_hippoSIT_2023-04-26_08-20-17',  # ch17, 12 + 55* correction, 6095 events, duration - showcase for N2 \n",
     "#'009266_hippoSIT_2023-05-02_12-22-14',  # ch20, 10 + 55 correction, 5976 events, FIXME very weird 1-2nd in target, find out\n",
     "#'009266_hippoSIT_2023-05-04_09-11-06',  # ch17, 5 + 55* correction, 4487 events, coma session with baseline AEPs\n",
     "#'009266_hippoSIT_2023-05-04_19-47-15',\n",
+    "    # PPC\n",
+    "#'009266_hippoSIT_2023-04-20_15-24-14',  # A1 ch20, PPC ch32, 60 + 55 correction, 5612 events\n",
+    "#'009266_hippoSIT_2023-04-21_08-43-00',  # A1 ch20, PPC ch32, 72 + 55 correction, 6282 events\n",
+    "#'009266_hippoSIT_2023-04-21_13-12-31',  # A1 ch20, PPC ch32, 72 + 55 correction, 6041 events\n",
+    "#'009266_hippoSIT_2023-04-24_10-08-11',  # A1 ch20, PPC ch40, 80 + 55 correction, 5579 events\n",
+    "    # HPC\n",
+    "'009266_hippoSIT_2023-05-22_09-27-22',\n",
+    "'009266_hippoSIT_2023-05-23_09-18-05',\n",
+    "'009266_hippoSIT_2023-05-25_15-55-57',\n",
+    "'009266_hippoSIT_2023-06-14_08-21-23',\n",
+    "'009266_hippoSIT_2023-06-19_08-58-35',\n",
     "]\n",
     "\n",
     "# FIXME move occupancy outside units\n",
@@ -149,7 +160,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 4,
    "id": "24cd2f6e",
    "metadata": {},
    "outputs": [
@@ -157,7 +168,11 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "session 009266_hippoSIT_2023-04-26_08-20-17 done\n"
+      "session 009266_hippoSIT_2023-05-22_09-27-22 done\n",
+      "session 009266_hippoSIT_2023-05-23_09-18-05 done\n",
+      "session 009266_hippoSIT_2023-05-25_15-55-57 done\n",
+      "session 009266_hippoSIT_2023-06-14_08-21-23 done\n",
+      "session 009266_hippoSIT_2023-06-19_08-58-35 done\n"
      ]
     }
    ],

File diff suppressed because it is too large
+ 529 - 578
session/overview.ipynb


+ 16 - 8
session/sessions.py

@@ -1,19 +1,27 @@
 selected_009266 = [
-# frequency
+
+# PPC
 '009266_hippoSIT_2023-04-17_17-04-17',  # ch17, 20 + 55 correction, 5067 events. Showcase for N2 / N3 mod in target
 '009266_hippoSIT_2023-04-18_10-10-37',  # ch17, 10 + 55 correction, 5682 events
 '009266_hippoSIT_2023-04-18_17-03-10',  # ch17, 6 + 55 correction, 5494 events: FIXME very weird 1-2nd in target, find out
 '009266_hippoSIT_2023-04-19_10-33-51',  # ch17, 4 + 55 correction, 6424 events: very weird 1-2nd in target, find out
-'009266_hippoSIT_2023-04-24_16-56-55',  # ch17, 5 + 55* correction, 6165 events, frequency
-'009266_hippoSIT_2023-05-02_12-22-14',  # ch20, 10 + 55 correction, 5976 events, FIXME very weird 1-2nd in target, find out
-    
-# duration
 '009266_hippoSIT_2023-04-20_08-57-39',  # ch1, 1 + 55 correction, 6424 events. Showcase for N2 / N3 mod in target
+'009266_hippoSIT_2023-04-20_15-24-14',  # A1 ch20, PPC ch32, 60 + 55 correction, 5612 events
+'009266_hippoSIT_2023-04-21_08-43-00',  # A1 ch20, PPC ch32, 72 + 55 correction, 6282 events
+'009266_hippoSIT_2023-04-21_13-12-31',  # A1 ch20, PPC ch32, 72 + 55 correction, 6041 events
+'009266_hippoSIT_2023-04-24_10-08-11',  # A1 ch20, PPC ch40, 80 + 55 correction, 5579 events
+'009266_hippoSIT_2023-04-24_16-56-55',  # ch17, 5 + 55* correction, 6165 events, frequency
 '009266_hippoSIT_2023-04-26_08-20-17',  # ch17, 12 + 55* correction, 6095 events, duration - showcase for N2 
-'009266_hippoSIT_2023-05-04_19-47-15',  # ch20, 2 + 55 correction, 5678 events, duration
-
-# COMA
+'009266_hippoSIT_2023-05-02_12-22-14',  # ch20, 10 + 55 correction, 5976 events, FIXME very weird 1-2nd in target, find out
 '009266_hippoSIT_2023-05-04_09-11-06',  # ch17, 5 + 55* correction, 4487 events, COMA session with baseline AEPs
+'009266_hippoSIT_2023-05-04_19-47-15',  # ch20, 2 + 55 correction, 5678 events, duration
+    
+# HPC
+'009266_hippoSIT_2023-05-22_09-27-22',
+'009266_hippoSIT_2023-05-23_09-18-05',
+'009266_hippoSIT_2023-05-25_15-55-57',
+'009266_hippoSIT_2023-06-14_08-21-23',
+'009266_hippoSIT_2023-06-19_08-58-35',
 ]
 
 selected_008229 = [

File diff suppressed because it is too large
+ 522876 - 37710
sorting/ss-neurosuite.ipynb