{ "cells": [ { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "# -*- coding: utf-8 -*-\n", "\"\"\"\n", "Created on Mon Aug 6 13:16:08 2018\n", "\n", "@author: kperks\n", "\"\"\"\n", "# INITIALIZATION\n", "from neo import Spike2IO\n", "import os\n", "import numpy as np\n", "import scipy.io as sio\n", "import scipy \n", "import h5py\n", "from scipy import stats\n", "from scipy import signal\n", "import matplotlib.pyplot as plt\n", "import matplotlib.axes as ax\n", "from pathlib import Path\n", "import random\n", "import sys\n", "import pandas as pd\n", "import seaborn as sns\n", "from scipy.stats import spearmanr, ks_2samp " ] }, { "cell_type": "code", "execution_count": 36, "metadata": {}, "outputs": [], "source": [ "def get_range_bounds(win_times,trigger):\n", " start = np.min(np.where(trigger>win_times[0])[0])\n", " stop= np.max(np.where(triggert1)&(analeventt)&(data= np.min(tin)) & (x <= np.max(tin))]\n", " dx = np.sort(np.diff(np.sort(x)))\n", " dt_samp = dx[np.nonzero(dx)][0]\n", " if dt_samp > np.min(np.diff(tin)):\n", " t = np.linspace(np.min(tin), np.max(tin), np.min([int(np.ceil(T / dt_samp)), int(1e3)]))\n", " else:\n", " t = tin\n", "\n", " # calculate delta t\n", " dt = min(np.diff(t))\n", "\n", " # create the finest histogram\n", " thist = np.concatenate((t, (t[-1]+dt)[np.newaxis]))\n", " y_hist = np.histogram(x_ab, thist-dt/2)[0]\n", " N = sum(y_hist).astype(np.float)\n", " y_hist = y_hist / N / dt\n", "\n", " # always provide W\n", " C = np.zeros((1, len(W)))[0]\n", " C_min = np.Inf\n", " ymat=[]\n", " for k, w in enumerate(W):\n", " C[k], yh = CostFunction(y_hist, N, w, dt)\n", " ymat.append(yh)\n", " if((C[k] < C_min).any()):\n", " C_min = C[k]\n", " optw = w\n", " y = yh\n", " ymat = np.asarray(ymat) \n", " \n", " return y, t, optw, C, C_min\n", "\n", "def get_spikes_kde(trigger_times,data_times,trigger,trial_duration):\n", " aligned_times = []\n", " for t in trigger_times:\n", " thesetimes = data_times[\n", " np.where((data_times>(trigger[t]))&(data_times<(trigger[t]+trial_duration)))[0]\n", " ]-(trigger[t])\n", " aligned_times = np.concatenate((aligned_times,thesetimes)) \n", "\n", " return np.asarray(aligned_times)\n", "\n", "def get_optw(these_trials,spikes,xtime,trigger,trial_duration):\n", " W=np.arange(0.006,0.2,0.002)\n", " aligned_times = get_spikes_kde(these_trials,spikes,trigger,trial_duration)\n", " y, t, optw, C, C_min = sskernel(aligned_times, xtime, W)\n", " \n", " return y, t, optw, C, C_min" ] }, { "cell_type": "code", "execution_count": 48, "metadata": {}, "outputs": [], "source": [ "meta = pd.read_csv('MetaData.csv')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "What are the summary statistics for the trial duration in paired experiments? (for Methods section)" ] }, { "cell_type": "code", "execution_count": 58, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "count 33.000000\n", "mean 2.757576\n", "std 1.565603\n", "min 1.000000\n", "25% 1.500000\n", "50% 2.000000\n", "75% 4.000000\n", "max 6.000000\n", "Name: trial_duration, dtype: float64" ] }, "execution_count": 58, "metadata": {}, "output_type": "execute_result" } ], "source": [ "meta[((meta['trigger_type']=='paired')\n", " &(meta['phase']=='phase1')\n", " &((meta['condition']=='aen_vent') \n", " | (meta['condition']=='aen_proprio') \n", " |(meta['condition']=='aff_proprio')))].trial_duration.describe()" ] }, { "cell_type": "code", "execution_count": 47, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "0 20050727_3224_SPK.mat\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/anaconda3/lib/python3.6/site-packages/neo-0.8.0.dev0-py3.6.egg/neo/rawio/spike2rawio.py:609: RuntimeWarning: overflow encountered in short_scalars\n", " info['time_per_adc']) * 1e-6\n", "/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:61: DeprecationWarning: object of type cannot be safely interpreted as an integer.\n", "/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:96: DeprecationWarning: object of type cannot be safely interpreted as an integer.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1 20050728_3404_latency1_SPK.mat\n", "2 20050728_3404_latency2_SPK.mat\n", "3 20050808_3629_latency1_SPK.mat\n", "4 20050808_3629_latency2_SPK.mat\n", "5 20050811_3221_SPK.mat\n", "6 20050818_2874_SPK.mat\n", "7 20050819_2601_SPK.mat\n", "8 20050820_2960_trial1_SPK.mat\n", "9 20050824_2627_control_SPK.mat\n", "10 20050824_2627_trial1_SPK.mat\n", "11 20060626_2815_SPK.mat\n", "12 20060627_2673_SPK.mat\n", "13 20060708_3004_SPK.mat\n", "14 20060710_2767_SPK.mat\n", "15 20060710_3196_SPK.mat\n", "16 20060804_2107_SPK.mat\n", "17 20060812_2921_SPK.mat\n", "18 20060812_3066_SPK.mat\n", "19 20060812_3205_SPK.mat\n", "20 20060816_3893_SPK.mat\n", "21 20070718_598_coupled_SPK.mat\n", "22 20070719_91_coupled_SPK.mat\n", "23 20070720_3352_SPK.mat\n", "24 20070723_287_SPK.mat\n", "25 20070723_311_SPK.mat\n", "26 20070723_339_SPK.mat\n", "27 20070723_415_SPK.mat\n", "28 20070727_3798_SPK.mat\n", "29 20070727_3798_control_SPK.mat\n", "30 20070816_3375_SPK.mat\n", "31 20070816_3509_SPK.mat\n", "32 20071025_3335_SPK.mat\n", "33 20071025_3335_control_SPK.mat\n", "using pre pairing expt to get baseline (recovery end)\n", "34 20071026_3108_SPK.mat\n", "35 20071026_3108_control_SPK.mat\n", "using pre pairing expt to get baseline\n", "36 20170810_expt5_vent_SPK.mat\n", "37 20180124_expt1_swim_SPK.mat\n", "38 20170815_expt8_vent_SPK.mat\n", "39 20170815_expt9_vent_SPK.mat\n", "40 20180124_expt2_swim_SPK.mat\n", "41 20180407_freerun2_SPK.mat\n", "42 20180407_freerun3_SPK.mat\n", "43 20180407_freerun4_SPK.mat\n", "44 20180407_freerun5_SPK.mat\n", "45 20170815_expt10_swim_SPK.mat\n", "46 20170810_expt4_vent_SPK.mat\n", "47 20050808_3629_latency2b_SPK.mat\n", "48 20050808_3629_latency1b_SPK.mat\n", "49 20070801_3838_SPK.mat\n", "50 20070801_3838_control_SPK.mat\n", "51 20070808_3727_SPK.mat\n" ] } ], "source": [ "######MAIN##########\n", "\n", "meta = pd.read_csv('MetaData.csv')\n", "\n", "for iexpt,thisexpt_name in enumerate(meta.exptname):\n", " dt = 1/1000 \n", " ntrials_per_period = 75 #results did not depend on choice of 50, 75, or 100\n", "\n", " print(iexpt,thisexpt_name)\n", "\n", " thisexpt = meta.loc[meta['exptname'] == thisexpt_name]\n", "\n", " trial_duration = thisexpt.trial_duration.values[0]\n", "\n", " basename = thisexpt_name[0:-8]\n", " data_folder = Path.cwd() / basename\n", " file_to_open = data_folder / Path(basename + '.smr')\n", " bl = Spike2IO(file_to_open,try_signal_grouping=False).read_block()\n", "\n", " trigger = []\n", " for sublist in np.asarray([seg.events[[s.name for \n", " s in seg.events].index(thisexpt.chan_trigger.values[0])].magnitude \n", " for seg in bl.segments]):\n", " for item in sublist:\n", " trigger.append(item)\n", " trigger = np.asarray(trigger)\n", "\n", " if thisexpt.spike_times_from.values[0] == 'Spyking-Circus':\n", " spikes = np.load(data_folder / 'spikes.npy')\n", "\n", " if thisexpt.spike_times_from.values[0] == 'Spike2':\n", " spikes = []\n", " for sublist in np.asarray([seg.events[[s.name for \n", " s in seg.events].index(thisexpt.chan_spikes.values[0])].magnitude \n", " for seg in bl.segments]):\n", " for item in sublist:\n", " spikes.append(item)\n", " spikes = np.asarray(spikes)\n", "\n", " postpairing_range = get_range_bounds([thisexpt.post_start.values[0],\n", " thisexpt.post_stop.values[0]],trigger)\n", " stim_range = get_range_bounds([thisexpt.stim_start.values[0],\n", " thisexpt.stim_stop.values[0]],trigger)\n", " base_range = get_range_bounds([thisexpt.base_start.values[0],\n", " thisexpt.base_stop.values[0]],trigger)\n", "\n", " if thisexpt_name == '20060812_3066_SPK.mat':\n", " #removing a few trials in which spike detection was clearly mis-triggered due to movement artifacts\n", " removeinds = np.arange(360,np.max(postpairing_range)-1,1)\n", " [postpairing_range.remove(x) for x in removeinds]\n", " removeinds = [263, 264, 265, 326, 327] \n", " [postpairing_range.remove(x) for x in removeinds]\n", "\n", " xtime = np.linspace(0,trial_duration,trial_duration/dt)\n", "\n", " iei = [(TrialEventTimes(trigger,trigger,trial,10))[0] if len(TrialEventTimes(trigger,trigger,trial,10))>0 else 0 for trial in list(range(0,len(trigger)-1))]\n", "\n", " these_trials = base_range\n", " y, t, optw, C, C_min= get_optw(these_trials,spikes,xtime,trigger,trial_duration)\n", "\n", " data_func = filtered_response(spikes, optw)\n", "\n", " baseline_r = get_response(trigger[these_trials],data_func,trial_duration,optw)\n", " baseline_mean = np.mean(baseline_r,0)\n", "\n", " n_trials = len(base_range)\n", " base_spkrt = [np.shape(np.where((spikes>(trigger[t]))&(spikes<(trigger[t]+trial_duration)))[0])[0]\n", " /trial_duration for t in base_range]\n", "\n", " spikemat_base = [spikes[np.where((spikes>trigger[t])&\n", " (spikes(supptrigger[t]))&\n", " (suppspikes<(supptrigger[t]+trial_duration)))[0])[0]\n", " /trial_duration for t in suppbase_range]\n", " spikemat_base = [suppspikes[np.where((suppspikes>supptrigger[t])&\n", " (suppspikes(supptrigger[t]))&\n", " (suppspikes<(supptrigger[t]+trial_duration)))[0])[0]\n", " /trial_duration for t in suppbase_range]\n", " spikemat_base = [suppspikes[np.where((suppspikes>supptrigger[t])&\n", " (suppspikes(trigger[t]))&(spikes<(trigger[t]+trial_duration)))[0])[0]\n", " /trial_duration for t in these_trials]\n", " y, t, optw_stim, C, C_min= get_optw(these_trials,spikes,xtime,trigger,trial_duration)\n", " stiminitial_r = get_response(trigger[these_trials],data_func,trial_duration,optw)\n", " stiminitial_response = np.mean(stiminitial_r,0)\n", "\n", " stimfinal_trials = stim_range[-int(ntrials/2):]\n", " these_trials = stimfinal_trials\n", " stimf_spkrt = [np.shape(np.where((spikes>(trigger[t]))&(spikes<(trigger[t]+trial_duration)))[0])[0]\n", " /trial_duration for t in these_trials]\n", " stimfinal_r = get_response(trigger[these_trials],data_func,trial_duration,optw)\n", " stimfinal_response = np.mean(stimfinal_r,0)\n", "\n", " npost = len(postpairing_range)\n", " ntrials = np.min([npost,ntrials_per_period])\n", " postpairing_trials = postpairing_range[0:ntrials]\n", " these_trials = postpairing_trials\n", " post_spkrt = [np.shape(np.where((spikes>(trigger[t]))&(spikes<(trigger[t]+trial_duration)))[0])[0]\n", " /trial_duration for t in these_trials]\n", " postpairing_r = get_response(trigger[these_trials],data_func,trial_duration,optw)\n", " postpairing_response = np.mean(postpairing_r,0)\n", "\n", " recovery_spkrt = np.nan\n", " recovery_response = np.full_like(np.empty(len(baseline_mean)),np.nan)\n", " recovery_trials = [np.nan]\n", " if npost>=(300+ntrials_per_period):\n", " recovery_trials = postpairing_range[300:300+ntrials_per_period]\n", " recovery_spkrt = [np.shape(np.where((spikes>(trigger[t]))&(spikes<(trigger[t]+trial_duration)))[0])[0]\n", " /trial_duration for t in recovery_trials]\n", " these_trials = recovery_trials\n", " recovery_r = get_response(trigger[these_trials],data_func,trial_duration,optw)\n", " recovery_response = np.mean(recovery_r,0)\n", "\n", "\n", " spikemat_stim = [spikes[np.where((spikes>trigger[t])&\n", " (spikestrigger[t])&\n", " (spikes