123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289 |
- import matplotlib.pyplot as plt
- import noise
- import numpy as np
- from brian2.units import *
- from scripts.spatial_maps.orientation_map import OrientationMap
- from scripts.interneuron_placement import create_interneuron_sheet_entropy_max_orientation
- import scripts.models as modellib
- from scripts.ring_network.head_direction import get_head_direction_input, \
- ex_in_network
- from scipy.optimize import curve_fit
- from scripts.interneuron_placement import create_grid_of_excitatory_neurons, \
- create_interneuron_sheet_by_repulsive_force, get_excitatory_neurons_in_inhibitory_axonal_clouds
- import multiprocessing
- import itertools
- trials_per_scale = 1
- N_E = 900
- N_I = 90
- sheet_x = 450 * um
- sheet_y = 450 * um
- inhibitory_axon_long_axis = 100 * um
- inhibitory_axon_short_axis = 25 * um
- number_of_excitatory_neurons_per_row = int(np.sqrt(N_E))
- '''
- Neuron and synapse models
- '''
- excitatory_eqs = modellib.hodgkin_huxley_eqs_with_synaptic_conductance + modellib.eqs_ih
- excitatory_params = modellib.hodgkin_huxley_params
- excitatory_params.update(modellib.ih_params)
- excitatory_params.update({"E_i": -80 * mV})
- excitatory_params['ghbar'] = 0. * nS
- lif_interneuron_eqs = """
- dv/dt =1.0/tau* (-v + u_ext) :volt (unless refractory)
- u_ext = u_ext_const : volt
- """
- lif_interneuron_params = {
- "tau": 7 * ms,
- "v_threshold": -40 * mV,
- "v_reset": -60 * mV,
- "tau_refractory": 0.0 * ms,
- "u_ext_const": -50 * mV
- }
- lif_interneuron_options = {
- "threshold": "v>v_threshold",
- "reset": "v=v_reset",
- "refractory": "tau_refractory",
- 'method': 'euler'
- }
- ei_synapse_model = modellib.delta_synapse_model
- ei_synapse_on_pre = modellib.delta_synapse_on_pre
- ei_synapse_param = modellib.delta_synapse_param
- ie_synapse_model = modellib.exponential_synapse
- ie_synapse_on_pre = modellib.exponential_synapse_on_pre
- ie_synapse_param = modellib.exponential_synapse_params
- ie_synapse_param["tau_syn"] = 2 * ms
- '''
- Tuning Maps
- '''
- # tuning_label = "Perlin"
- tuning_label = "Orientation map"
- # optimization_label = "Repulsive"
- optimization_label = "Entropy Optimization"
- corr_len_list = range(0,301,45) #For these values maps exist
- ellipse_trial_sharpening_list = []
- circle_trial_sharpening_list = []
- no_conn_trial_sharpening_list = []
- def get_fwhm_for_corr_len_and_seed(corr_len,seed):
- print(corr_len, seed)
- if tuning_label == "Perlin": # TODO: How to handle scale in Perlin
- tuning_map = lambda x, y: noise.pnoise2(x / 100.0, y / 100.0, octaves=2) * np.pi
- elif tuning_label == "Orientation map":
- map = OrientationMap(number_of_excitatory_neurons_per_row + 1, number_of_excitatory_neurons_per_row + 1,
- corr_len, sheet_x / um, sheet_y / um, seed)
- # map.improve(10)
- try:
- map.load_orientation_map()
- except:
- print(
- 'No map yet with {}x{} pixels and {} pixel correllation length and {} seed'.format(map.x_dim, map.y_dim,
- map.corr_len,
- map.rnd_seed))
- return -1,-1,-1
- tuning_map = lambda x, y: map.tuning(x, y)
- ex_positions, ex_tunings = create_grid_of_excitatory_neurons(sheet_x / um, sheet_y / um,
- number_of_excitatory_neurons_per_row, tuning_map)
- inhibitory_radial_axis = np.sqrt(inhibitory_axon_long_axis * inhibitory_axon_short_axis)
- if optimization_label == "Repulsive":
- inhibitory_axonal_clouds = create_interneuron_sheet_by_repulsive_force(N_I, inhibitory_axon_long_axis / um,
- inhibitory_axon_short_axis / um, sheet_x / um,
- sheet_y / um, random_seed=2, n_iterations=1000)
- inhibitory_axonal_circles = create_interneuron_sheet_by_repulsive_force(N_I, inhibitory_radial_axis / um,
- inhibitory_radial_axis / um, sheet_x / um,
- sheet_y / um, random_seed=2, n_iterations=1000)
- elif optimization_label == "Entropy Optimization":
- inhibitory_axonal_clouds, ellipse_single_trial_entropy = create_interneuron_sheet_entropy_max_orientation(
- ex_positions, ex_tunings, N_I, inhibitory_axon_long_axis / um,
- inhibitory_axon_short_axis / um, sheet_x / um,
- sheet_y / um, trial_orientations=30)
- inhibitory_axonal_circles, circle_single_trial_entropy = create_interneuron_sheet_entropy_max_orientation(
- ex_positions, ex_tunings, N_I, inhibitory_radial_axis / um,
- inhibitory_radial_axis / um, sheet_x / um,
- sheet_y / um, trial_orientations=1)
- '''
- Connectvities
- '''
- # Spatial network with ellipsoid axons
- ie_connections = get_excitatory_neurons_in_inhibitory_axonal_clouds(ex_positions, inhibitory_axonal_clouds)
- inhibitory_synapse_strength = 30 * nS
- in_ex_weights = np.zeros((N_I, N_E)) * nS
- for interneuron_idx, connected_excitatory_idxs in enumerate(ie_connections):
- in_ex_weights[interneuron_idx, connected_excitatory_idxs] = inhibitory_synapse_strength
- excitatory_synapse_strength = 1 * mV
- ex_in_weights = np.where(in_ex_weights > 0 * nS, excitatory_synapse_strength, 0 * mV).T * volt
- # Spatial network with circular axons
- ie_connections_circle = get_excitatory_neurons_in_inhibitory_axonal_clouds(ex_positions,
- inhibitory_axonal_circles)
- in_ex_weights_circle = np.zeros((N_I, N_E)) * nS
- for interneuron_idx, connected_excitatory_idxs in enumerate(ie_connections_circle):
- in_ex_weights_circle[interneuron_idx, connected_excitatory_idxs] = inhibitory_synapse_strength
- excitatory_synapse_strength = 1 * mV
- ex_in_weights_circle = np.where(in_ex_weights_circle > 0 * nS, excitatory_synapse_strength, 0 * mV).T * volt
- # No synapses
- no_conn_ie = np.zeros((N_I, N_E)) * nS
- no_conn_ei = np.zeros((N_E, N_I)) * mV
- '''
- Prepare nets
- '''
- nets = []
- connectivity_label = ["No synapse", "Ellipsoid", "Circle"]
- connectivities = [(no_conn_ei, no_conn_ie), (ex_in_weights, in_ex_weights),
- (ex_in_weights_circle, in_ex_weights_circle)]
- for ei_weights, ie_weights in connectivities:
- net = ex_in_network(N_E, N_I, excitatory_eqs, excitatory_params, lif_interneuron_eqs,
- lif_interneuron_params,
- lif_interneuron_options, ei_synapse_model, ei_synapse_on_pre,
- ei_synapse_param,
- ei_weights, ie_synapse_model, ie_synapse_on_pre,
- ie_synapse_param, ie_weights, random_seed=2)
- nets.append(net)
- '''
- Head direction input
- '''
- ex_input_baseline = 0.0 * nA
- peak_phase = 0
- input_sharpness = 1
- direction_input = get_head_direction_input(peak_phase, input_sharpness)
- max_head_direction_input_amplitude = 0.5 * nA
- input_to_excitatory_population = ex_input_baseline + max_head_direction_input_amplitude * direction_input(
- np.array(ex_tunings))
- '''
- Run simulation
- '''
- skip = 100 * ms
- length = 1100 * ms
- duration = skip + length
- for net in nets:
- excitatory_neurons = net["excitatory_neurons"]
- excitatory_neurons.I = input_to_excitatory_population
- net.run(duration)
- '''
- Get spatial map of rates
- '''
- def get_rates(spike_monitor, min_time=0 * ms):
- spike_trains = spike_monitor.spike_trains()
- isis = [np.ediff1d(np.extract(spike_times / ms > skip / ms, spike_times / ms)) * ms for spike_times in
- spike_trains.values()]
- rates = np.array([1.0 / np.mean(isi / ms) if isi.shape[0] != 0 else 0 for isi in isis]) * khertz
- return rates
- excitatory_rates = [get_rates(net["excitatory_spike_monitor"]) for net in nets]
- '''
- Get rate distribution over angles
- '''
- def gauss(x, *p):
- A, mu, sigma, B = p
- return A * np.exp(-(x - mu) ** 2 / (2. * sigma ** 2)) + B
- p0 = [30., 0., 1., 10.]
- fwhm_list = []
- for rates, label in zip(excitatory_rates, connectivity_label):
- coeff, var_matrix = curve_fit(gauss, ex_tunings, rates / hertz, p0=p0)
- # print('corr_len {}, seed {}: {}-std_dev = {}'.format(corr_len,seed,label,np.abs(coeff[2])))
- # fwhm_list.append(2.355 * np.abs(coeff[2]))
- contrast = np.max(gauss(ex_tunings, *coeff)) - np.min(gauss(ex_tunings, *coeff))
- print('Fitted contrast = ', contrast)
- fwhm_list.append(contrast)
- return fwhm_list
- corr_len_range = range(0, 451, 15)
- corr_len_range_len = len(corr_len_range)
- seed_range = range(10)
- seed_range_len = len(seed_range)
- pool_arguments = itertools.product(corr_len_range,seed_range)
- use_saved_array = False
- if not use_saved_array:
- pool = multiprocessing.Pool()
- data = pool.starmap(get_fwhm_for_corr_len_and_seed,[*pool_arguments])
- print(type(data))
- print(data)
- data_array = np.reshape(np.array(data),(corr_len_range_len,seed_range_len,3))
- data_test = []
- np.save('../../simulations/2020_02_27_sharpening_over_noise_scale/data_test_save.npy', data_array)
- else:
- data_array = np.load('../../simulations/2020_02_27_sharpening_over_noise_scale/data_test_save.npy')
- # ellipse_trial_sharpening_list = np.zeros((len(corr_len_range),len(seed_range)))
- # circle_trial_sharpening_list = np.zeros((len(corr_len_range),len(seed_range)))
- # no_conn_trial_sharpening_list = np.zeros((len(corr_len_range),len(seed_range)))
- print(data_array)
- no_conn_trial_sharpening_list = []
- ellipse_trial_sharpening_list = []
- circle_trial_sharpening_list = []
- for i in range(corr_len_range_len):
- no_conn_trial_sharpening_list.append(data_array[i,:,0])
- ellipse_trial_sharpening_list.append(data_array[i,:,1])
- circle_trial_sharpening_list.append(data_array[i,:,2])
- print(circle_trial_sharpening_list)
- ellipse_sharpening_mean = np.array([np.mean(i) for i in ellipse_trial_sharpening_list])
- circle_sharpening_mean = np.array([np.mean(i) for i in circle_trial_sharpening_list])
- no_conn_sharpening_mean = np.array([np.mean(i) for i in no_conn_trial_sharpening_list])
- ellipse_sharpening_std_dev = np.array([np.std(i) for i in ellipse_trial_sharpening_list])
- circle_sharpening_std_dev = np.array([np.std(i) for i in circle_trial_sharpening_list])
- no_conn_sharpening_std_dev = np.array([np.std(i) for i in no_conn_trial_sharpening_list])
- # print(ellipse_trial_sharpening_list)
- # print(ellipse_entropy_std_dev)
- plt.figure()
- plt.plot(corr_len_range,circle_sharpening_mean, label='Circle', marker='o',color='C1')
- plt.fill_between(corr_len_range,circle_sharpening_mean-circle_sharpening_std_dev,circle_sharpening_mean+circle_sharpening_std_dev,color='C1',alpha=0.4)
- plt.plot(corr_len_range,ellipse_sharpening_mean, label='Ellipse', marker='o',color='C2')
- plt.fill_between(corr_len_range,ellipse_sharpening_mean-ellipse_sharpening_std_dev,ellipse_sharpening_mean+ellipse_sharpening_std_dev,color='C2',alpha=0.4)
- plt.plot(corr_len_range,no_conn_sharpening_mean, label='No Conn.', marker='o',color='C3')
- plt.fill_between(corr_len_range,no_conn_sharpening_mean-no_conn_sharpening_std_dev,no_conn_sharpening_mean+no_conn_sharpening_std_dev,color='C3',alpha=0.4)
- plt.xlabel('Correlation length')
- plt.ylabel('Contrast')
- plt.legend()
- plt.show()
|