|
@@ -0,0 +1,550 @@
|
|
|
+clear;
|
|
|
+
|
|
|
+%% IMPORTANT NOTES
|
|
|
+
|
|
|
+% Voltage conversion equation: Force = m * ([Voltage] - b)
|
|
|
+% These constants will change if you use a different pull module or
|
|
|
+% recalibrate it (use Check_Pull_Calibration_Values script to recalculate)
|
|
|
+
|
|
|
+% paper position for saving .pdfs
|
|
|
+% x = -0.8, y = 1.0, width = 11.0, height = 7.1875
|
|
|
+
|
|
|
+%% Set Variables
|
|
|
+
|
|
|
+fr = 29.6; %Framerate
|
|
|
+frames = 10000; %Number of Frames in the Session
|
|
|
+m = 25.045; %Contants from Pull Calibration Script:
|
|
|
+b = 1.628; %Force = m * ([Voltage] - b)
|
|
|
+
|
|
|
+% m = 21.973, b = 0.049 (Newer values, but recalibrate for future experiments)
|
|
|
+
|
|
|
+%% Select one h5 file:
|
|
|
+
|
|
|
+foldername = uigetdir('C:\','Choose folder to save results');
|
|
|
+[filename, pathname] = uigetfile('*.h5', 'Pick a Sync Episode file');
|
|
|
+if isequal(filename,0) || isequal(pathname,0)
|
|
|
+ disp('User pressed cancel')
|
|
|
+ return
|
|
|
+else
|
|
|
+ disp(['User selected ', fullfile(pathname, filename)])
|
|
|
+end
|
|
|
+
|
|
|
+fnam = [pathname,filename];
|
|
|
+
|
|
|
+dataOut = ThorsyncLoadSimple(fnam);
|
|
|
+time = dataOut.time;
|
|
|
+trial = dataOut.FrameIn;
|
|
|
+
|
|
|
+time(trial(1,:) == 0) = [];
|
|
|
+
|
|
|
+Frame_In = uniquetol(time', 0.0001) - 0.05;
|
|
|
+pTA = 2;
|
|
|
+end_frames = frames - 1;
|
|
|
+
|
|
|
+lower = (15/m) + b;
|
|
|
+upper = (18/m) + b;
|
|
|
+three = (3/m) + b;
|
|
|
+nine = (9/m) + b;
|
|
|
+
|
|
|
+%% Extract and Classify Pulls from Thorsync File
|
|
|
+
|
|
|
+[pks, locs, widths] = findpeaks(dataOut.Pull, dataOut.time, 'MinPeakHeight', three, 'MinPeakDistance', 0.4, 'MaxPeakWidth', 1); %Finds peaks and their corresponding locations for pulls over 3 grams at least 0.4s apart
|
|
|
+peakTimes = locs;
|
|
|
+peakFrames = peakTimes * fr;
|
|
|
+pks = pks(1: length(peakTimes));
|
|
|
+NumPeaks = length(peakFrames);
|
|
|
+trial_start = 1:1:NumPeaks;
|
|
|
+trial_end = 1:1:NumPeaks;
|
|
|
+widthFrames = (widths/2) * fr;
|
|
|
+widthFrames_full = widths * fr;
|
|
|
+avg_pull_frame = mean(widthFrames_full);
|
|
|
+avg_pull_frame = round(avg_pull_frame, 0);
|
|
|
+
|
|
|
+if avg_pull_frame == 0
|
|
|
+ avg_pull_frame = 1;
|
|
|
+end
|
|
|
+
|
|
|
+intFrames = round(peakFrames);
|
|
|
+intTimes = round((peakFrames/29.6) * 100000);
|
|
|
+avg_pull_time = round((avg_pull_frame/29.6) * 100000);
|
|
|
+
|
|
|
+if avg_pull_time == 0
|
|
|
+ avg_pull_time = 1;
|
|
|
+end
|
|
|
+
|
|
|
+
|
|
|
+for i = 1:NumPeaks
|
|
|
+ trial_start(1,i) = peakFrames(1,i) - widthFrames(1,i);
|
|
|
+ trial_end(1,i) = peakFrames(1,i) + widthFrames(1,i);
|
|
|
+end
|
|
|
+
|
|
|
+R_trial_start = floor(trial_start);
|
|
|
+R_trial_end = ceil(trial_end);
|
|
|
+
|
|
|
+
|
|
|
+%Find start and end times of pulls based on the width of the peak
|
|
|
+pullFrames = reshape([R_trial_start(:) R_trial_end(:)]', [1, 2* NumPeaks]);
|
|
|
+
|
|
|
+if pullFrames(1,1) < 3 %excludes first trial if there are an insufficient number of pull Frames for it
|
|
|
+ pullFrames(:,1) = [];
|
|
|
+ pullFrames(:,1) = [];
|
|
|
+ pks(:,1) = [];
|
|
|
+end
|
|
|
+
|
|
|
+%excludes pulls that are cut off when ThorSync recording stops
|
|
|
+pullFrames(pullFrames > end_frames - pTA) = [];
|
|
|
+if mod(length(pullFrames), 2) > 0
|
|
|
+ pullFrames(end) = [];
|
|
|
+ pks(end) = [];
|
|
|
+end
|
|
|
+
|
|
|
+pullForce = m * (pks - b);
|
|
|
+
|
|
|
+save(fullfile(foldername,'pullForce'),'pullForce','-v7.3');
|
|
|
+
|
|
|
+[row, col] = find(pks > lower & pks < upper);
|
|
|
+ temp = peakTimes;
|
|
|
+ temp(:,unique(col)) = [];
|
|
|
+ widthFrames(:,unique(col)) = [];
|
|
|
+ unsuccessWidth = widthFrames;
|
|
|
+ unsuccessPeaks = temp * fr;
|
|
|
+
|
|
|
+[row, col] = find(pks > three & pks < nine);
|
|
|
+ threetonine = peakTimes(:,col);
|
|
|
+ threetonineFrames = threetonine * fr;
|
|
|
+
|
|
|
+[row, col] = find(pks > nine & pks < lower);
|
|
|
+ ninetofifteen = peakTimes(:,col);
|
|
|
+ ninetofifteenFrames = ninetofifteen * fr;
|
|
|
+
|
|
|
+[row, col] = find(pks > upper);
|
|
|
+ eighteenplus = peakTimes(:,col);
|
|
|
+ eighteenplusFrames = eighteenplus * fr;
|
|
|
+
|
|
|
+%% Classify Successful and Unsuccessful Peaks based on Thorsync Voltage
|
|
|
+
|
|
|
+trial_start = 1:1:length(unsuccessPeaks);
|
|
|
+trial_end = 1:1:length(unsuccessPeaks);
|
|
|
+
|
|
|
+for i = 1:length(unsuccessPeaks)
|
|
|
+ trial_start(1,i) = unsuccessPeaks(1,i) - unsuccessWidth(1,i);
|
|
|
+ trial_end(1,i) = unsuccessPeaks(1,i) + unsuccessWidth(1,i);
|
|
|
+end
|
|
|
+
|
|
|
+R_trial_start = floor(trial_start);
|
|
|
+R_trial_end = ceil(trial_end);
|
|
|
+
|
|
|
+unsuccessFrames = reshape([R_trial_start(:) R_trial_end(:)]', [1, 2* length(unsuccessPeaks)]);
|
|
|
+
|
|
|
+if unsuccessFrames(1,1) < 3 %excludes first trial if there are an insufficient number of pull Frames for it
|
|
|
+ unsuccessFrames(:,1) = [];
|
|
|
+ unsuccessFrames(:,1) = [];
|
|
|
+end
|
|
|
+
|
|
|
+%excludes pulls that are cut off when ThorSync recording stops
|
|
|
+unsuccessFrames(unsuccessFrames > frames - pTA) = [];
|
|
|
+if mod(length(unsuccessFrames), 2) > 0
|
|
|
+ unsuccessFrames(end) = [];
|
|
|
+end
|
|
|
+
|
|
|
+[pks, locs, widths] = findpeaks(dataOut.Pull, dataOut.time, 'MinPeakHeight', lower , 'MinPeakDistance', 0.4, 'MaxPeakWidth', 1);
|
|
|
+
|
|
|
+
|
|
|
+if pks > 0
|
|
|
+ for i = 1:length(pks)
|
|
|
+ if pks(:,i) > upper
|
|
|
+ locs(:,i) = 1;
|
|
|
+ widths(:,i) = 1;
|
|
|
+ end
|
|
|
+ end
|
|
|
+
|
|
|
+ locs(locs == 1) = [];
|
|
|
+ widths(widths == 1) = [];
|
|
|
+if locs > 0
|
|
|
+ successPeaks = locs * fr;
|
|
|
+ successWidth = (widths/2) * fr;
|
|
|
+
|
|
|
+ trial_start = 1:1:length(locs);
|
|
|
+ trial_end = 1:1:length(locs);
|
|
|
+
|
|
|
+ for i = 1:length(locs)
|
|
|
+ trial_start(1,i) = successPeaks(1,i) - successWidth(1,i);
|
|
|
+ trial_end(1,i) = successPeaks(1,i) + successWidth(1,i);
|
|
|
+ end
|
|
|
+
|
|
|
+ R_trial_start = floor(trial_start);
|
|
|
+ R_trial_end = ceil(trial_end);
|
|
|
+
|
|
|
+ successFrames = reshape([R_trial_start(:) R_trial_end(:)]', [1, 2* length(locs)]);
|
|
|
+
|
|
|
+ if successFrames(1,1) < 3 %excludes first trial if there are an insufficient number of pull Frames for it
|
|
|
+ successFrames(:,1) = [];
|
|
|
+ successFrames(:,1) = [];
|
|
|
+ end
|
|
|
+
|
|
|
+ %excludes pulls that are cut off when ThorSync recording stops
|
|
|
+ successFrames(successFrames > frames - pTA) = [];
|
|
|
+ if mod(length(successFrames), 2) > 0
|
|
|
+ successFrames(end) = [];
|
|
|
+ end
|
|
|
+ else
|
|
|
+ successPeaks = [];
|
|
|
+ successFrames = [];
|
|
|
+end
|
|
|
+else
|
|
|
+ successPeaks = [];
|
|
|
+ successFrames = [];
|
|
|
+end
|
|
|
+%outputs an array of pulls between 15 and 18 grams
|
|
|
+
|
|
|
+for i = 1:NumPeaks
|
|
|
+ mean_start(1,i) = intFrames(1,i) - avg_pull_frame;
|
|
|
+ mean_end(1,i) = intFrames(1,i) + avg_pull_frame;
|
|
|
+ time_start(1,i) = intTimes(1,i) - avg_pull_time;
|
|
|
+ time_end(1,i) = intTimes(1,i) + avg_pull_time;
|
|
|
+end
|
|
|
+
|
|
|
+meanFrames = reshape([mean_start(:) mean_end(:)]', [1, 2*NumPeaks]);
|
|
|
+meanTimes = reshape([time_start(:) time_end(:)]', [1, 2*NumPeaks]);
|
|
|
+
|
|
|
+if meanFrames(1,1) < 3
|
|
|
+ meanFrames(:,1) = [];
|
|
|
+ meanFrames(:,1) = [];
|
|
|
+end
|
|
|
+
|
|
|
+meanFrames(meanFrames > end_frames) = [];
|
|
|
+if mod(length(meanFrames), 2) > 0
|
|
|
+ meanFrames(end) = [];
|
|
|
+end
|
|
|
+
|
|
|
+if meanTimes(1,1) < (3/29.6 * 100000)
|
|
|
+ meanTimes(:,1) = [];
|
|
|
+ meanTimes(:,1) = [];
|
|
|
+end
|
|
|
+
|
|
|
+meanTimes(meanTimes > (end_frames/29.6 * 100000)) = [];
|
|
|
+if mod(length(meanTimes), 2) > 0
|
|
|
+ meanTimes(end) = [];
|
|
|
+end
|
|
|
+
|
|
|
+
|
|
|
+window = 5; %# of seconds in each trial
|
|
|
+Frame_In = round(Frame_In * 100000);
|
|
|
+Frame_Out = Frame_In + (window * 100000);
|
|
|
+
|
|
|
+Frame_times = reshape([Frame_In(:) Frame_Out(:)]', [1, 2*length(Frame_In)]);
|
|
|
+
|
|
|
+Frame_times(Frame_times > ((end_frames/29.6)*100000)) = [];
|
|
|
+if mod(length(Frame_times), 2) > 0
|
|
|
+ Frame_times(end) = [];
|
|
|
+end
|
|
|
+%% Initialize variables for Correlations and Classification
|
|
|
+
|
|
|
+dir = '';
|
|
|
+autoClassifyNeurons = true;
|
|
|
+pTA = 2; % frames before and after pull that should be included in the average
|
|
|
+
|
|
|
+[newNeurons,fluorescenceData,classifications,binaryPullTimes,pulls,options] = processDFFVars(dir,pullFrames,fr,autoClassifyNeurons,pTA);
|
|
|
+
|
|
|
+% Unpack Data from function call
|
|
|
+numFrames = options.numFrames;
|
|
|
+numNeurons = options.numNeurons;
|
|
|
+xpoints = options.xpoints;
|
|
|
+framerate = options.framerate;
|
|
|
+%
|
|
|
+Fdf = fluorescenceData.Fdf;
|
|
|
+Cd = fluorescenceData.Cd;
|
|
|
+Sp = fluorescenceData.Sp;
|
|
|
+
|
|
|
+if locs > 0
|
|
|
+ binarySuccessTimes = zeros(1,numFrames);
|
|
|
+ for i = 1:2:length(successFrames)
|
|
|
+ binarySuccessTimes(successFrames(i) - pTA:successFrames(i+1) + pTA) = 1;
|
|
|
+ end
|
|
|
+else
|
|
|
+ binarySuccessTimes = [];
|
|
|
+end
|
|
|
+
|
|
|
+binaryUnsuccessTimes = zeros(1,numFrames);
|
|
|
+ for i = 1:2:length(unsuccessFrames)
|
|
|
+ binaryUnsuccessTimes(unsuccessFrames(i) - pTA:unsuccessFrames(i+1) + pTA) = 1;
|
|
|
+ end
|
|
|
+
|
|
|
+Cd_N = [];
|
|
|
+for i = 1:numNeurons
|
|
|
+ Cd_N(i,:) = (Cd(i,:) - min(Cd(i,:))) / (max(Cd(i,:)) - min(Cd(i,:)));
|
|
|
+end
|
|
|
+
|
|
|
+fulldff = Cd_N';
|
|
|
+fullCd = Cd;
|
|
|
+
|
|
|
+activeCd = Cd_N(classifications.active,:);
|
|
|
+quiescentCd = Cd_N(classifications.quiescent,:);
|
|
|
+indiscCd = Cd_N(classifications.indisc,:);
|
|
|
+
|
|
|
+cells = cat(1, classifications.active, classifications.quiescent, classifications.indisc);
|
|
|
+cells = sortrows(cells, 'ascend');
|
|
|
+
|
|
|
+Cd_N = Cd_N(cells,:);
|
|
|
+Cd = Cd(cells,:);
|
|
|
+
|
|
|
+numNeurons = size(Cd_N,1);
|
|
|
+
|
|
|
+numPulls = length(peakFrames);
|
|
|
+peakFrames_2 = round(peakFrames');
|
|
|
+n = 10;
|
|
|
+peakFrames(peakFrames < n + 1) = [];
|
|
|
+
|
|
|
+start = 1:1:numPulls;
|
|
|
+finish = 1:1:numPulls;
|
|
|
+
|
|
|
+for i = 1:numPulls % modify it to look at n frames before and after pull
|
|
|
+start(1,i) = peakFrames_2(i,1) - n;
|
|
|
+finish(1,i) = peakFrames_2(i,1) + n;
|
|
|
+end
|
|
|
+
|
|
|
+pullFrames_2 = reshape([start(:) finish(:)]', [1, 2* numPulls]);
|
|
|
+pullFrames_2(pullFrames_2 > length(Cd_N)) = [];
|
|
|
+if mod(length(pullFrames_2), 2) > 0
|
|
|
+ pullFrames_2(end) = [];
|
|
|
+end
|
|
|
+
|
|
|
+behavior = cell(1, length(meanFrames) / 2);
|
|
|
+activity = cell(numNeurons, length(meanFrames) / 2);
|
|
|
+
|
|
|
+for i = 1:numNeurons
|
|
|
+ for j = 1:2:length(meanFrames)%j = 1:2:length(Frame_times)
|
|
|
+ behavior{1,j} = dataOut.Pull(1, meanTimes(j) : meanTimes(j+1));
|
|
|
+ activity{i,j} = Cd_N(i, pullFrames_2(j) : pullFrames_2(j+1));
|
|
|
+ end
|
|
|
+end
|
|
|
+
|
|
|
+activity(cellfun('isempty',activity)) = [];
|
|
|
+activity = reshape(activity, [numNeurons, length(meanFrames)/2]);
|
|
|
+behavior(cellfun('isempty',behavior)) = [];
|
|
|
+activity_cat = cell2mat(activity);
|
|
|
+
|
|
|
+% [coeff,score,latent,tsquared,explained,mu] = pca(activity_cat);
|
|
|
+%Preliminary pca analysis for activity
|
|
|
+
|
|
|
+for i = 1:numNeurons
|
|
|
+ for j = 1:size(behavior,2) - 1
|
|
|
+ temp = corrcoef(behavior{1,j}, behavior{1,j+1});
|
|
|
+ pull_corr{1,j} = temp(1,2);
|
|
|
+ end
|
|
|
+end
|
|
|
+
|
|
|
+for i = 1:numNeurons - 1
|
|
|
+ temp2 = corrcoef(activity_cat(i,:), activity_cat(i+1,:));
|
|
|
+ activity_corr(i,:) = temp2(1,2);
|
|
|
+end
|
|
|
+
|
|
|
+avg_pull_corr = mean(abs(cell2mat(pull_corr)));
|
|
|
+avg_act_corr = mean(activity_corr);
|
|
|
+Correlations = struct('pull_corr',avg_pull_corr, 'activity_corr', avg_act_corr);
|
|
|
+
|
|
|
+%% Code for activity correlation between successful pulls and previous or following pull
|
|
|
+
|
|
|
+% [~, index_successPeaks, index_peakTimes] = intersect(successPeaks',peakFrames','rows');
|
|
|
+% successloc = zeros(1, length(peakTimes));
|
|
|
+%
|
|
|
+% successloc(:,index_peakTimes) = 1;
|
|
|
+% successloc(1,1) = 0;
|
|
|
+%
|
|
|
+% for i = 1:size(behavior,2) - 1
|
|
|
+% if successloc(:,i) == 1
|
|
|
+% temp = corrcoef(behavior{1,i}, behavior{1,i-1});
|
|
|
+% suc_prevcorr{1,i} = temp(1,2);
|
|
|
+% else
|
|
|
+% temp = NaN;
|
|
|
+% suc_prevcorr{1,i} = NaN;
|
|
|
+% end
|
|
|
+% end
|
|
|
+%
|
|
|
+% for i = 1:size(behavior,2) - 1
|
|
|
+% if successloc(:,i) == 1
|
|
|
+% temp = corrcoef(behavior{1,i}, behavior{1,i+1});
|
|
|
+% suc_postcorr{1,i} = temp(1,2);
|
|
|
+% else
|
|
|
+% temp = NaN;
|
|
|
+% suc_postcorr{1,i} = NaN;
|
|
|
+% end
|
|
|
+% end
|
|
|
+%
|
|
|
+% avg_presucc_corr = nanmean(abs(cell2mat(suc_prevcorr)));
|
|
|
+% avg_postsucc_corr = nanmean(abs(cell2mat(suc_postcorr)));
|
|
|
+%
|
|
|
+%
|
|
|
+% x1 = [1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]';
|
|
|
+% temp = repmat(x1, size(activity,2));
|
|
|
+% x1 = temp(:,1);
|
|
|
+% x2 = [0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0]';
|
|
|
+% temp = repmat(x2, size(activity,2));
|
|
|
+% x2 = temp(:,1);
|
|
|
+% x3 = [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1]';
|
|
|
+% temp = repmat(x3, size(activity,2));
|
|
|
+% x3 = temp(:,1);
|
|
|
+%
|
|
|
+% activity_cat = cell2mat(activity);
|
|
|
+%
|
|
|
+% coefficients = cell(numNeurons,1);
|
|
|
+% for i = 1:numNeurons
|
|
|
+% x4 = activity_cat(i,:)';
|
|
|
+% tbl = table(x1,x2,x3,x4, 'VariableNames', {'pre', 'pull', 'post', 'activity'});
|
|
|
+% mdl = fitlm(tbl);
|
|
|
+% coefficients{i,1} = mdl.Coefficients;
|
|
|
+% %r_square{i,1} = mdl.Rsquared;
|
|
|
+% end
|
|
|
+
|
|
|
+%% Preliminary Analysis of Peaks during Pulls vs Queiscent Periods
|
|
|
+
|
|
|
+flourFrames = cell(1,numNeurons);
|
|
|
+behaviorPeaks = zeros(1,numNeurons);
|
|
|
+nonBehaviorPeaks = zeros(1,numNeurons);
|
|
|
+SbehaviorPeaks = zeros(1,numNeurons);
|
|
|
+UbehaviorPeaks = zeros(1,numNeurons);
|
|
|
+allPeaks = zeros(1,numNeurons);
|
|
|
+
|
|
|
+for i = 1:numNeurons
|
|
|
+ ROI = Cd_N(i,:)';
|
|
|
+ [pks, locs, widths] = findpeaks(ROI, xpoints/framerate, 'MinPeakHeight', 0.1);
|
|
|
+ temp = locs * fr;
|
|
|
+ temp2 = locs * fr;
|
|
|
+ temp2 = locs * fr;
|
|
|
+ flourFrames{1,i} = round(temp,0);
|
|
|
+ for k = 1:length(locs)
|
|
|
+ if binaryPullTimes(1,flourFrames{1,i}(1,k)) > 0
|
|
|
+ temp(1,k) = 1;
|
|
|
+ else
|
|
|
+ temp(1,k) = 0;
|
|
|
+ end
|
|
|
+ end
|
|
|
+ for k = 1:length(locs)
|
|
|
+ if isempty(binarySuccessTimes)
|
|
|
+ temp2(1,k) = 0;
|
|
|
+ elseif binarySuccessTimes(1,flourFrames{1,i}(1,k)) > 0
|
|
|
+ temp2(1,k) = 1;
|
|
|
+ end
|
|
|
+ end
|
|
|
+ for k = 1:length(locs)
|
|
|
+ if binaryUnsuccessTimes(1,flourFrames{1,i}(1,k)) > 0
|
|
|
+ temp2(1,k) = 1;
|
|
|
+ end
|
|
|
+ end
|
|
|
+ behaviorPeaks(1,i) = length(temp(temp == 1));
|
|
|
+ nonBehaviorPeaks(1,i) = length(temp(temp == 0));
|
|
|
+ SbehaviorPeaks(1,i) = length(temp2(temp2 == 1));
|
|
|
+ UbehaviorPeaks(1,i) = length(temp2(temp2 == 1));
|
|
|
+ allPeaks(1,i) = length(temp);
|
|
|
+end
|
|
|
+
|
|
|
+peakPercent = behaviorPeaks ./ allPeaks;
|
|
|
+peakPercent(allPeaks < 5) = [];
|
|
|
+
|
|
|
+[plotPercent,index] = sortrows(peakPercent', 'descend');
|
|
|
+x = 1:length(peakPercent);
|
|
|
+% figure
|
|
|
+% scatter(x, plotPercent)
|
|
|
+
|
|
|
+behavior_ca = struct('behaviorPeaks', behaviorPeaks, 'nonBehaviorPeaks', nonBehaviorPeaks, 'SbehaviorPeaks', SbehaviorPeaks, 'UbehaviorPeaks', UbehaviorPeaks, 'allPeaks', allPeaks);
|
|
|
+
|
|
|
+% Choose which data we want to use for everything.
|
|
|
+dff = Cd_N';
|
|
|
+%Cd_N = normalized, Cd = not normalized
|
|
|
+
|
|
|
+figure;
|
|
|
+K = heatmap(Cd);
|
|
|
+K.GridVisible = 'off';
|
|
|
+K.Colormap = hot;
|
|
|
+
|
|
|
+%% Save files for segmentation
|
|
|
+
|
|
|
+save(fullfile(foldername,'Cd_N'),'Cd_N','-v7.3');
|
|
|
+save(fullfile(foldername,'classifications'),'classifications','-v7.3');
|
|
|
+save(fullfile(foldername,'activeCd'),'activeCd','-v7.3');
|
|
|
+save(fullfile(foldername,'quiescentCd'),'quiescentCd','-v7.3');
|
|
|
+save(fullfile(foldername,'indiscCd'),'indiscCd','-v7.3');
|
|
|
+save(fullfile(foldername,'peakFrames'),'peakFrames','-v7.3');
|
|
|
+save(fullfile(foldername,'successPeaks'),'successPeaks','-v7.3');
|
|
|
+save(fullfile(foldername,'unsuccessFrames'),'unsuccessPeaks','-v7.3');
|
|
|
+save(fullfile(foldername,'threetonineFrames'),'threetonineFrames','-v7.3');
|
|
|
+save(fullfile(foldername,'ninetofifteenFrames'),'ninetofifteenFrames','-v7.3');
|
|
|
+save(fullfile(foldername,'eighteenplusFrames'),'eighteenplusFrames','-v7.3');
|
|
|
+save(fullfile(foldername,'behavior_ca'),'behavior_ca','-v7.3');
|
|
|
+save(fullfile(foldername,'Correlations'),'Correlations','-v7.3');
|
|
|
+
|
|
|
+%% Start by looking at all neurons plotted on same plot and stacked
|
|
|
+% Colors
|
|
|
+co = ...
|
|
|
+ [0 0.4470 0.7410;
|
|
|
+ 0.8500 0.3250 0.0980;
|
|
|
+ 0.9290 0.6940 0.1250;
|
|
|
+ 0.4940 0.1840 0.5560;
|
|
|
+ 0.4660 0.6740 0.1880;
|
|
|
+ 0.3010 0.7450 0.9330;
|
|
|
+ 0.6350 0.0780 0.1840];
|
|
|
+
|
|
|
+%% Plot shows all neuron plots stacked.
|
|
|
+
|
|
|
+figure;
|
|
|
+plot(xpoints/framerate, bsxfun(@plus,dff,0:numNeurons-1))
|
|
|
+hold on
|
|
|
+
|
|
|
+p = patch(xpoints/framerate, binaryUnsuccessTimes * numNeurons, 'k');
|
|
|
+set(p, 'FaceAlpha', 0.2)
|
|
|
+set(p, 'EdgeAlpha', 0)
|
|
|
+
|
|
|
+if successPeaks > 0
|
|
|
+ m = patch(xpoints/framerate, binarySuccessTimes * numNeurons, 'r');
|
|
|
+ set(m, 'FaceAlpha', 0.4)
|
|
|
+ set(m, 'EdgeAlpha', 0)
|
|
|
+end
|
|
|
+
|
|
|
+nvs = gca;
|
|
|
+nvs.XTick = 0:50:max(xpoints/framerate);
|
|
|
+nvs.YTick = 0:10:numNeurons;
|
|
|
+if framerate ~= 1
|
|
|
+ xlabel('Time (seconds)');
|
|
|
+else
|
|
|
+ xlabel('Frame');
|
|
|
+end
|
|
|
+ylabel('Neuron ID');
|
|
|
+title('All Neurons Stacked');
|
|
|
+set(gca,'fontsize',24)
|
|
|
+set(gca,'LooseInset',get(gca,'TightInset'));
|
|
|
+
|
|
|
+saveas(gcf,strcat(foldername,'/dff','.fig'));
|
|
|
+
|
|
|
+
|
|
|
+%% Population, Active, Quiescent, Indiscriminant Averages
|
|
|
+active = classifications.active;
|
|
|
+quiesc = classifications.quiescent;
|
|
|
+indisc = classifications.indisc;
|
|
|
+
|
|
|
+figure;
|
|
|
+plot(xpoints/framerate, mean(fulldff,2)+3, 'col', co(1,:)) % Population Average
|
|
|
+hold on;
|
|
|
+plot(xpoints/framerate, mean(fulldff(:,active),2)+2, 'col', co(2,:)) % Active Average
|
|
|
+
|
|
|
+plot(xpoints/framerate, mean(fulldff(:,quiesc),2)+1, 'col', co(3,:)) % Quiescent Average
|
|
|
+
|
|
|
+plot(xpoints/framerate, mean(fulldff(:,indisc),2), 'col', co(4,:)) % Indiscriminant Active Average
|
|
|
+
|
|
|
+lgd = legend(['Population Average (n=' num2str(length(newNeurons)) ')'], ...
|
|
|
+ ['Active Average (n=' num2str(length(active)) ')'], ...
|
|
|
+ ['Quiescent Average (n=' num2str(length(quiesc)) ')'], ...
|
|
|
+ ['Indiscriminant Average (n=' num2str(length(indisc)) ')'], ...
|
|
|
+ 'Location', 'eastoutside');
|
|
|
+% plot(xpoints/framerate, mean(dffActive,2)+2,'color', [0,0,0]+0.8)
|
|
|
+% plot(xpoints/framerate, mean(dffActive,2)+2, 'col', co(2,:)) % Active Average
|
|
|
+
|
|
|
+lgd.FontSize = 24;
|
|
|
+p = patch(xpoints/framerate, binaryPullTimes * 4, 'b');
|
|
|
+set(p, 'FaceAlpha', 0.4)
|
|
|
+set(p, 'EdgeAlpha', 0) % Pull Bars
|
|
|
+
|
|
|
+set(gca,'YTick',[])
|
|
|
+xlabel('Time (seconds)');
|
|
|
+set(gca,'fontsize',24)
|
|
|
+set(gca,'LooseInset',get(gca,'TightInset'));
|
|
|
+
|