nml_main.py 59 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509
  1. #!/usr/bin/env python3
  2. """
  3. Main script for creating scaled versions of NeuroML network and simulating it.
  4. This parses population and connectivity information exported from circuit.py to
  5. create a NeuroML representation of the network.
  6. File: nml_main.py
  7. Copyright 2023 Ankur Sinha
  8. """
  9. import argparse
  10. import bisect
  11. import copy
  12. import inspect
  13. import logging
  14. import math
  15. import pathlib
  16. import random
  17. import sys
  18. import textwrap
  19. import time
  20. import typing
  21. import h5py
  22. import lems.api as lems
  23. import neuroml
  24. import numpy
  25. import pandas
  26. import progressbar
  27. from neuroml.loaders import read_neuroml2_file
  28. from neuroml.utils import component_factory
  29. from neuroml.writers import NeuroMLHdf5Writer
  30. from pyneuroml.lems.LEMSSimulation import LEMSSimulation
  31. from pyneuroml.neuron.nrn_export_utils import get_segment_group_name
  32. from pyneuroml.nsgr import run_on_nsg
  33. from pyneuroml.plot.Plot import generate_plot
  34. from pyneuroml.plot.PlotMorphology import plot_2D
  35. from pyneuroml.plot.PlotMorphologyVispy import plot_interactive_3D
  36. from pyneuroml.pynml import (
  37. generate_sim_scripts_in_folder,
  38. reload_saved_data,
  39. run_lems_with,
  40. write_neuroml2_file,
  41. )
  42. from pyneuroml.utils import rotate_cell
  43. from pyneuroml.utils.units import convert_to_units, get_value_in_si
  44. logger = logging.getLogger(__name__)
  45. logger.setLevel(logging.INFO)
  46. # https://stackoverflow.com/questions/3410976/how-to-round-a-number-to-significant-figures-in-python
  47. # currently unused
  48. def round_to_sig(x):
  49. one_less = round(x, int(math.floor(math.log10(abs(x))))) / 10
  50. return round(x + one_less, int(math.floor(math.log10(abs(one_less)))))
  51. class HL23Net(object):
  52. """HL23 network"""
  53. def __init__(
  54. self,
  55. scale: float = 0.01,
  56. connections: bool = True,
  57. network_input: str = "background",
  58. stimulus: bool = True,
  59. biophysics: bool = True,
  60. tonic_inhibition: bool = True,
  61. new_cells: bool = True,
  62. max_memory: str = "8G",
  63. hdf5=True,
  64. rotate_cells=False,
  65. ):
  66. """Init
  67. :param scale: network scale
  68. :type scale: float
  69. :param connections: toggle creation of network connections
  70. :type connections: bool
  71. :param network_input: select input to provide to cells
  72. - "background": for OU background input
  73. - "step": for a constant step current
  74. - "none": for no input
  75. :type network_input: str
  76. :param stimulus: toggle addition of stimulus to cells for stim protocol
  77. :type stimulus: bool
  78. :param biophysics: toggle addition of biophysics to cells (otherwise
  79. the cells only include morphology)
  80. :type biophysics: bool
  81. :param tonic_inhibition: toggle addition of tonic inhibition to cells
  82. note: requires biophysics to be enabled
  83. :type tonic_inhibition: bool
  84. :param new_cells: toggle regeneration of rotated cells, otherwise
  85. existing rotated cells are used
  86. :type new_cells: bool
  87. :param max_memory: max memory for JVM when running pyneuroml
  88. :type max_memory: str
  89. :param hdf5: toggle exporting to HDF5 format
  90. :type hdf5: bool
  91. :param rotate_cells: generate rotated cells, useful for visualization
  92. but does not affect the spiking simulation (will affect LPF, but
  93. we're not generating those here)
  94. :type rotate_cells: bool
  95. """
  96. object.__init__(self)
  97. self.network_scale = scale
  98. self.connections = connections
  99. self.network_input = network_input
  100. self.stimulus = stimulus
  101. self.biophysics = biophysics
  102. self.tonic_inhibition = tonic_inhibition
  103. self.new_cells = new_cells
  104. self.max_memory = max_memory
  105. self.hdf5 = hdf5
  106. self.rotate_cells = rotate_cells
  107. # data dumped from the simulation
  108. try:
  109. self.cell_data = h5py.File(
  110. "../L23Net/Circuit_output/cell_positions_and_rotations.h5", "r"
  111. )
  112. self.connectivity_data = h5py.File(
  113. "../L23Net/Circuit_output/synapse_connections.h5", "r"
  114. )
  115. self.circuit_params = pandas.read_excel(
  116. "../L23Net/Circuit_param.xls", sheet_name=None, index_col=0
  117. )
  118. # default synaptic parameters: are read from exported H5 files for
  119. # creation of synapses (above)
  120. self.circuit_params["syn_params"] = {
  121. "none": {
  122. "tau_r_AMPA": 0,
  123. "tau_d_AMPA": 0,
  124. "tau_r_NMDA": 0,
  125. "tau_d_NMDA": 0,
  126. "e": 0,
  127. "Dep": 0,
  128. "Fac": 0,
  129. "Use": 0,
  130. "u0": 0,
  131. "gmax": 0,
  132. }
  133. }
  134. # confirmed from self.cell_data.keys()
  135. self.cell_types = [i for i in self.circuit_params["conn_probs"].axes[0]]
  136. except FileNotFoundError:
  137. print("Original simulation datea files not found. Will not create model.")
  138. self.create = False
  139. self.pop_colors = {
  140. "HL23PV": "0 0 1",
  141. "HL23PYR": "1 0 0",
  142. "HL23SST": "0 1 0",
  143. "HL23VIP": "0.5 0.5 0.5",
  144. }
  145. self.simulation_id = f"HL23Sim_{self.network_scale}"
  146. if self.rotate_cells is True:
  147. self.lems_simulation_file = (
  148. f"LEMS_HL23_{self.network_scale}_Sim.rotated.xml"
  149. )
  150. else:
  151. self.lems_simulation_file = f"LEMS_HL23_{self.network_scale}_Sim.xml"
  152. self.netdoc = None
  153. self.network_id = "HL23Network"
  154. if self.rotate_cells is True:
  155. self.netdoc_file_name = f"HL23Net_{self.network_scale}.rotated.net.nml"
  156. else:
  157. self.netdoc_file_name = f"HL23Net_{self.network_scale}.net.nml"
  158. if self.hdf5 is True:
  159. self.netdoc_file_name += ".h5"
  160. self.lems_components_file_name = f"lems_components_{self.network_scale}.xml"
  161. self.stim_start = "200ms"
  162. self.sim_length = "1000ms"
  163. self.sim_end = convert_to_units(
  164. f"{get_value_in_si(self.stim_start) + get_value_in_si(self.sim_length)} s",
  165. "ms",
  166. )
  167. self.dt = "0.025ms"
  168. self.seed = 4587
  169. def create_network(self):
  170. # set the scale of the network
  171. if self.create is False:
  172. print("Not creating network")
  173. return
  174. start = time.time()
  175. print(f"Creating network with scale {self.network_scale}")
  176. self.netdoc = component_factory(neuroml.NeuroMLDocument, id="HL23Network")
  177. self.network = self.netdoc.add(
  178. neuroml.Network,
  179. id=self.network_id,
  180. temperature="34.0 degC",
  181. notes=f"L23 network at {self.network_scale} scale",
  182. validate=False,
  183. )
  184. # synapse types
  185. # LEMS component definitions will be included in simulation file later
  186. self.lems_components = lems.Model()
  187. self.lems_components.add(lems.Include("CaDynamics_E2_NML2.nml"))
  188. self.lems_components.add(lems.Include("synapses/ProbAMPANMDA.synapse.nml"))
  189. self.lems_components.add(lems.Include("synapses/ProbUDF.synapse.nml"))
  190. if self.tonic_inhibition:
  191. self.lems_components.add(lems.Include("channels/Tonic.nml"))
  192. # add all the channel definitions to
  193. channel_files = pathlib.Path("channels").glob("**/*.channel.nml")
  194. for afile in channel_files:
  195. logger.debug(f"Including {afile}")
  196. self.lems_components.add(lems.Include(str(afile)))
  197. self.create_cells()
  198. if self.connections is True:
  199. self.create_connections()
  200. if self.network_input == "background":
  201. self.add_background_input()
  202. elif self.network_input == "step":
  203. self.add_step_current()
  204. elif self.network_input == "none":
  205. pass
  206. else:
  207. print(f"Invalid network_input value: {self.network_input}, not adding any")
  208. pass
  209. if self.stimulus is True:
  210. self.add_stimulus()
  211. print(self.netdoc.summary())
  212. self.netdoc.validate(recursive=True)
  213. print(f"Writing {self.lems_components_file_name} ")
  214. self.lems_components.export_to_file(self.lems_components_file_name)
  215. print(f"Writing {self.netdoc_file_name} ")
  216. if self.hdf5:
  217. NeuroMLHdf5Writer.write(
  218. self.netdoc, self.netdoc_file_name, embed_xml=False, compress=True
  219. )
  220. else:
  221. write_neuroml2_file(self.netdoc, self.netdoc_file_name, validate=False)
  222. end = time.time()
  223. print(f"Creating network took: {(end - start)} seconds.")
  224. def create_cells(self):
  225. """Create cells and add them to the network.
  226. Each rotated cell is a new component in NeuroML, and since each
  227. population can only have one component attached to it when cells are
  228. rotated, each cell will belong to a different single celled population.
  229. However, for simulations where one isn't looking at LFPs etc., one does
  230. not need to rotate the cells. So, one cell component will be used to
  231. create a single population of each cell type.
  232. """
  233. start = time.time()
  234. print("Creating cells")
  235. # make a directory for storing rotated cells
  236. # we include these cells in the network document to ensure that the network
  237. # document doesn't get too large
  238. self.temp_cell_dir = "rotated_cells"
  239. cellfilesdir = pathlib.Path(self.temp_cell_dir)
  240. cellfilesdir.mkdir(exist_ok=True)
  241. # keep track of cell gids for our connections later, required for scaled down
  242. # versions when not all cells are included
  243. self.nml_cell = {} # type: typing.Dict[str, neuroml.Cell]
  244. self.cell_list = {} # type: typing.Dict[str, int]
  245. self.cell_list_by_type = {}
  246. for ctype in self.cell_types:
  247. self.cell_list_by_type[ctype] = []
  248. # create the cell populations
  249. for ctype in self.cell_types:
  250. celldataset = self.cell_data[ctype]
  251. # ['gid', 'x', 'y', 'z', 'x_rot', 'y_rot', 'z_rot']
  252. logger.debug(f"table headers are: {celldataset.dtype.fields.keys()}")
  253. self.nml_cell[ctype] = neuroml.loaders.read_neuroml2_file(
  254. f"{ctype}.cell.nml"
  255. ).cells[0] # type: neuroml.Cell
  256. # replace biophys with empty object
  257. if self.biophysics is False:
  258. self.nml_cell[
  259. ctype
  260. ].biophysical_properties = neuroml.BiophysicalProperties(id="biophys")
  261. self.nml_cell[ctype].biophysical_properties.add(
  262. neuroml.MembraneProperties
  263. )
  264. self.nml_cell[ctype].biophysical_properties.add(
  265. neuroml.IntracellularProperties
  266. )
  267. # include the cell to ensure the ion channel files are included
  268. # self.netdoc.add(neuroml.IncludeType, href=f"{ctype}.cell.nml")
  269. # If we don't rotate cells, we only need one population per cell
  270. # type.
  271. # Make a copy anyway because we're modifying the original cell
  272. if self.rotate_cells is False:
  273. unrotated_cell = copy.deepcopy(self.nml_cell[ctype])
  274. unrotated_cell.id = unrotated_cell.id + "_sim"
  275. unrotated_cell_file = (
  276. f"{self.temp_cell_dir}/{unrotated_cell.id}.cell.nml"
  277. )
  278. unrotated_cell_doc = component_factory(
  279. neuroml.NeuroMLDocument, id=f"{unrotated_cell.id}_doc"
  280. )
  281. unrotated_cell_doc.add(unrotated_cell)
  282. if self.biophysics is True:
  283. self.__add_tonic_inhibition(
  284. ctype, unrotated_cell, unrotated_cell_doc
  285. )
  286. write_neuroml2_file(
  287. unrotated_cell_doc, unrotated_cell_file, validate=False
  288. )
  289. self.netdoc.add(neuroml.IncludeType, href=unrotated_cell_file)
  290. pop = self.network.add(
  291. neuroml.Population,
  292. id=f"{ctype}_pop",
  293. type="populationList",
  294. component=unrotated_cell.id,
  295. )
  296. pop.add(neuroml.Property(tag="color", value=self.pop_colors[ctype]))
  297. pop.add(neuroml.Property(tag="region", value="L23"))
  298. i = 0
  299. step = int(1 / self.network_scale)
  300. maxcell = len(celldataset)
  301. # put in minimum 2 cells of each type
  302. if step >= maxcell:
  303. step = 1
  304. maxcell = 2
  305. for j in range(0, maxcell, step):
  306. acell = celldataset[j]
  307. gid = acell[0]
  308. x = acell[1]
  309. y = acell[2]
  310. z = acell[3]
  311. xrot = acell[4]
  312. yrot = acell[5]
  313. zrot = acell[6]
  314. if self.rotate_cells is True:
  315. rotated_cell = None
  316. rotated_cell_file = (
  317. f"{self.temp_cell_dir}/{self.nml_cell[ctype].id}_{gid}.cell.nml"
  318. )
  319. rotated_cell_id = self.nml_cell[ctype].id + f"_{gid}"
  320. if (
  321. self.new_cells is False
  322. and pathlib.Path(rotated_cell_file).exists()
  323. ):
  324. print(f"{rotated_cell_file} already exists, not overwriting.")
  325. print("Set new_cells=True to regenerate all rotated cell files")
  326. else:
  327. rotated_cell = rotate_cell(
  328. self.nml_cell[ctype],
  329. xrot,
  330. yrot,
  331. zrot,
  332. order="xyz",
  333. relative_to_soma=True,
  334. )
  335. rotated_cell.id = rotated_cell_id
  336. rotated_cell_doc = component_factory(
  337. neuroml.NeuroMLDocument, id=f"{rotated_cell_id}_doc"
  338. )
  339. if self.biophysics is True:
  340. self.__add_tonic_inhibition(
  341. ctype, rotated_cell, rotated_cell_doc
  342. )
  343. rotated_cell_doc.add(rotated_cell)
  344. write_neuroml2_file(
  345. rotated_cell_doc, rotated_cell_file, validate=False
  346. )
  347. self.netdoc.add(neuroml.IncludeType, href=rotated_cell_file)
  348. pop = self.network.add(
  349. neuroml.Population,
  350. id=f"{ctype}_pop_{gid}",
  351. type="populationList",
  352. component=rotated_cell_id,
  353. )
  354. pop.add(neuroml.Property(tag="color", value=self.pop_colors[ctype]))
  355. pop.add(neuroml.Property(tag="region", value="L23"))
  356. pop.add(
  357. neuroml.Instance, id=0, location=neuroml.Location(x=x, y=y, z=z)
  358. )
  359. # track what gids we're including in our network
  360. # used later when creating connections
  361. self.cell_list[gid] = 0
  362. else:
  363. pop.add(
  364. neuroml.Instance, id=i, location=neuroml.Location(x=x, y=y, z=z)
  365. )
  366. self.cell_list[gid] = i
  367. i += 1
  368. # currently unused
  369. self.cell_list_by_type[ctype].append(gid)
  370. print(self.netdoc.summary())
  371. self.netdoc.validate(recursive=True)
  372. end = time.time()
  373. print(f"Creating cells took: {(end - start)} seconds")
  374. def __add_tonic_inhibition(self, ctype, cell, cell_doc):
  375. """Add tonic inhibition to provided cell
  376. :param ctype: cell type
  377. :type ctype: str
  378. :param cell: cell object to add to
  379. :type cell: neuroml.Cell
  380. :param cell_doc: cell doc object
  381. :type cell_doc: neuroml.NeuroMLDocument
  382. """
  383. if self.tonic_inhibition is True:
  384. # add gaba tonic inhibition to cells
  385. # definition file is included in the network, so we don't
  386. # re-include it here.
  387. if ctype == "HL23PYR" or ctype == "HL23SST":
  388. cell.add_channel_density(
  389. nml_cell_doc=cell_doc,
  390. cd_id="TonicInhibition",
  391. ion_channel="TonicPavlov2009",
  392. cond_density="0.000938 S_per_cm2",
  393. erev="-75 mV",
  394. group_id="all_minus_myelin",
  395. ion="non_specific",
  396. ion_chan_def_file="",
  397. )
  398. elif ctype == "HL23PV" or ctype == "HL23VIP":
  399. cell.add_channel_density(
  400. nml_cell_doc=cell_doc,
  401. cd_id="TonicInhibition",
  402. ion_channel="TonicPavlov2009",
  403. cond_density="0.000938 S_per_cm2",
  404. erev="-75 mV",
  405. group_id="all",
  406. ion="non_specific",
  407. ion_chan_def_file="",
  408. )
  409. def create_connections(self):
  410. start = time.time()
  411. print("Creating connections")
  412. # count how many connections we have in total
  413. conn_count = 0
  414. # create dicts to hold values for caching: prevents us from having to
  415. # get this info again and again
  416. ordered_segments = {}
  417. cumulative_lengths = {}
  418. for atype in self.cell_types:
  419. ordered_segments[f"{atype}"] = {}
  420. cumulative_lengths[f"{atype}"] = {}
  421. # create connections
  422. for pretype in self.cell_types:
  423. for posttype in self.cell_types:
  424. conndataset = self.connectivity_data[f"{pretype}:{posttype}"]
  425. # string
  426. mechanism = self.connectivity_data[f"synparams/{pretype}:{posttype}"][
  427. "mechanism"
  428. ][()].decode("utf-8")
  429. # all ints/floats
  430. if "UDF" in mechanism:
  431. tau_r = self.connectivity_data[f"synparams/{pretype}:{posttype}"][
  432. "tau_r"
  433. ][()]
  434. tau_d = self.connectivity_data[f"synparams/{pretype}:{posttype}"][
  435. "tau_d"
  436. ][()]
  437. elif "AMPANMDA" in mechanism:
  438. tau_r_AMPA = self.connectivity_data[
  439. f"synparams/{pretype}:{posttype}"
  440. ]["tau_r_AMPA"][()]
  441. tau_r_NMDA = self.connectivity_data[
  442. f"synparams/{pretype}:{posttype}"
  443. ]["tau_r_NMDA"][()]
  444. tau_d_AMPA = self.connectivity_data[
  445. f"synparams/{pretype}:{posttype}"
  446. ]["tau_d_AMPA"][()]
  447. tau_d_NMDA = self.connectivity_data[
  448. f"synparams/{pretype}:{posttype}"
  449. ]["tau_d_NMDA"][()]
  450. else:
  451. raise ValueError(f"Unknown mechanism found: {mechanism}")
  452. # common to both synapses
  453. Use = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["Use"][
  454. ()
  455. ]
  456. Dep = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["Dep"][
  457. ()
  458. ]
  459. Fac = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["Fac"][
  460. ()
  461. ]
  462. gbase = self.connectivity_data[f"synparams/{pretype}:{posttype}"][
  463. "gmax"
  464. ][()]
  465. u0 = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["u0"][()]
  466. erev = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["e"][
  467. ()
  468. ]
  469. print(
  470. f"Creating synapse component: {pretype} -> {posttype}: {pretype}_{posttype}_{mechanism}."
  471. )
  472. if "UDF" in mechanism:
  473. syn = lems.Component(
  474. id_=f"{pretype}_{posttype}_{mechanism}",
  475. type_=f"{mechanism}",
  476. tau_r=f"{tau_r} ms",
  477. tau_d=f"{tau_d} ms",
  478. Use=Use,
  479. Dep=f"{Dep} ms",
  480. Fac=f"{Fac} ms",
  481. gbase=f"{gbase} uS",
  482. u0=u0,
  483. erev=f"{erev} mV",
  484. )
  485. elif "AMPANMDA" in mechanism:
  486. syn = lems.Component(
  487. id_=f"{pretype}_{posttype}_{mechanism}",
  488. type_=f"{mechanism}",
  489. tau_r_AMPA=f"{tau_r_AMPA} ms",
  490. tau_d_AMPA=f"{tau_d_AMPA} ms",
  491. tau_r_NMDA=f"{tau_r_NMDA} ms",
  492. tau_d_NMDA=f"{tau_d_NMDA} ms",
  493. Use=Use,
  494. Dep=f"{Dep} ms",
  495. Fac=f"{Fac} ms",
  496. gbase=f"{gbase} uS",
  497. u0=u0,
  498. erev=f"{erev} mV",
  499. mg="1 mM",
  500. weight_factor_NMDA="1",
  501. )
  502. self.lems_components.add(syn)
  503. anml_cell = self.nml_cell[f"{posttype}"] # type: neuroml.Cell
  504. syn_count = 0
  505. cur_precell = None
  506. cur_postcell = None
  507. print(
  508. f"Creating connections: {pretype} -> {posttype} (~{int(conndataset.shape[0])} with scale of {self.network_scale})."
  509. )
  510. # if we're not rotating cells, we only need one project between
  511. # the single populations of different cell types
  512. if self.rotate_cells is False:
  513. proj = component_factory(
  514. neuroml.Projection,
  515. id=f"proj_{pretype}_{posttype}",
  516. presynaptic_population=f"{pretype}_pop",
  517. postsynaptic_population=f"{posttype}_pop",
  518. synapse=f"{pretype}_{posttype}_{mechanism}",
  519. ) # type: neuroml.Projection
  520. # show a progress bar so we have some idea of what's going on
  521. max_value_possible = int(conndataset.shape[0])
  522. # rounded = round_to_sig(approx_max_value)
  523. bar = progressbar.ProgressBar(
  524. max_val=max_value_possible, poll_interval=30
  525. )
  526. bar_count = 0
  527. for conn in conndataset:
  528. precell = conn[0]
  529. postcell = conn[1]
  530. weight = conn[2]
  531. delay = conn[3]
  532. section = conn[4]
  533. sectionx = conn[5]
  534. # if both cells are not in our population, skip this connection
  535. if (
  536. precell not in self.cell_list.keys()
  537. or postcell not in self.cell_list.keys()
  538. ):
  539. logger.debug(
  540. f"{precell} or {postcell} are not included in the network. Skipping"
  541. )
  542. continue
  543. bar.update(bar_count)
  544. bar_count += 1
  545. section = (section.decode("utf-8")).split(".")[1]
  546. neuroml_seggrp_id = get_segment_group_name(section)
  547. # use info if it's cached, otherwise get new info and cache
  548. # it
  549. try:
  550. ord_segs = ordered_segments[f"{posttype}"][neuroml_seggrp_id]
  551. cumul_lengths = cumulative_lengths[f"{posttype}"][
  552. neuroml_seggrp_id
  553. ]
  554. except KeyError:
  555. [
  556. ord_segs,
  557. cumul_lengths,
  558. ] = anml_cell.get_ordered_segments_in_groups(
  559. group_list=[neuroml_seggrp_id],
  560. include_cumulative_lengths=True,
  561. )
  562. ordered_segments[f"{posttype}"][neuroml_seggrp_id] = ord_segs
  563. cumulative_lengths[f"{posttype}"][neuroml_seggrp_id] = (
  564. cumul_lengths
  565. )
  566. list_ord_segs = ord_segs[neuroml_seggrp_id]
  567. list_cumul_lengths = cumul_lengths[neuroml_seggrp_id]
  568. total_len = list_cumul_lengths[-1]
  569. section_loc = total_len * sectionx
  570. ind = bisect.bisect_left(list_cumul_lengths, section_loc)
  571. post_seg = list_ord_segs[ind]
  572. # if it's the first segment, with ind 0, [ind - 1] is not its
  573. # parent segment
  574. if ind != 0:
  575. frac_along = (section_loc - list_cumul_lengths[ind - 1]) / (
  576. list_cumul_lengths[ind] - list_cumul_lengths[ind - 1]
  577. )
  578. logger.debug(
  579. f"frac_along: ({section_loc} - {list_cumul_lengths[ind - 1]}) / ({list_cumul_lengths[ind]} - {list_cumul_lengths[ind - 1]}) = {frac_along}"
  580. )
  581. else:
  582. frac_along = section_loc / list_cumul_lengths[ind]
  583. logger.debug(
  584. f"frac_along: ({section_loc} / {list_cumul_lengths[ind]}) = {frac_along}"
  585. )
  586. # for zero length segments
  587. if frac_along == -float("inf"):
  588. frac_along = 1
  589. conn_count += 1
  590. logger.debug(
  591. f"{conn_count}: {pretype}:{precell} -> {posttype}:{postcell} {neuroml_seggrp_id}: segment {post_seg.id} ({list_cumul_lengths[ind - 1]} - {list_cumul_lengths[ind]}) at {frac_along} with mechanism {mechanism}"
  592. )
  593. # a new projection is only required when the pre or post cell
  594. # change
  595. if self.rotate_cells is True:
  596. if precell != cur_precell or postcell != cur_postcell:
  597. proj = self.network.add(
  598. neuroml.Projection,
  599. id=f"proj_{precell}_{postcell}",
  600. presynaptic_population=f"{pretype}_pop_{precell}",
  601. postsynaptic_population=f"{posttype}_pop_{postcell}",
  602. synapse=f"{pretype}_{posttype}_{mechanism}",
  603. )
  604. cur_precell = precell
  605. cur_postcell = postcell
  606. syn_count = 0
  607. try:
  608. proj.add(
  609. neuroml.ConnectionWD,
  610. id=syn_count,
  611. pre_cell_id=f"../{pretype}_pop_{precell}/0/{pretype}_{precell}",
  612. pre_segment_id=0,
  613. post_cell_id=f"../{posttype}_pop_{postcell}/0/{posttype}_{postcell}",
  614. post_segment_id=post_seg.id,
  615. post_fraction_along=frac_along,
  616. weight=str(float(weight) / self.network_scale),
  617. delay=f"{delay} ms",
  618. )
  619. except ValueError as e:
  620. print(f"list of cumulative lengths: {list_cumul_lengths}")
  621. print(
  622. f"frac_along: ({section_loc} - {list_cumul_lengths[ind - 1]}) / ({list_cumul_lengths[ind]} - {list_cumul_lengths[ind - 1]}) = {frac_along}"
  623. )
  624. raise e
  625. else:
  626. try:
  627. proj.add(
  628. neuroml.ConnectionWD,
  629. id=syn_count,
  630. pre_cell_id=f"../{pretype}_pop/{self.cell_list[precell]}/{pretype}_sim",
  631. pre_segment_id=0,
  632. post_cell_id=f"../{posttype}_pop/{self.cell_list[postcell]}/{posttype}_sim",
  633. post_segment_id=post_seg.id,
  634. post_fraction_along=frac_along,
  635. weight=str(float(weight) / self.network_scale),
  636. delay=f"{delay} ms",
  637. )
  638. except ValueError as e:
  639. print(f"list of cumulative lengths: {list_cumul_lengths}")
  640. print(
  641. f"frac_along: ({section_loc} - {list_cumul_lengths[ind - 1]}) / ({list_cumul_lengths[ind]} - {list_cumul_lengths[ind - 1]}) = {frac_along}"
  642. )
  643. raise e
  644. syn_count += 1
  645. bar.finish()
  646. # Only add projection to network if there's at least one
  647. # connection in it.
  648. # Not required with rotated cells because projection creation
  649. # and connection creation go hand in hand
  650. if self.rotate_cells is False:
  651. if len(proj.connection_wds) > 0:
  652. self.network.add(proj)
  653. end = time.time()
  654. print(f"Creating connections took: {(end - start)} seconds.")
  655. def add_background_input(self):
  656. """Add background input to cells."""
  657. start = time.time()
  658. # Component Type definition
  659. self.lems_components.add(lems.Include("synapses/Gfluct.nml"))
  660. # base excitatory conductances (from paper, given here in uS)
  661. cell_type_ge0 = {
  662. "HL23PYR": 0.000028,
  663. "HL23PV": 0.00028,
  664. "HL23SST": 0.00003,
  665. "HL23VIP": 0.000066,
  666. }
  667. # store input locations per cell type and create input components for
  668. # each cell type also, since all cells are indentical
  669. # values: { 'rel distance' : [(seg id, frac along)] }
  670. cell_type_input_locations = {} # type: typing.Dict[str, typing.Dict[float, typing.Tuple[int, float]]]
  671. for cell_type, cell in self.nml_cell.items():
  672. cell_type_input_locations[cell_type] = {}
  673. extremeties = cell.get_extremeties()
  674. try:
  675. basal_segs = cell.get_all_segments_in_group("basal_dendrite_group")
  676. except Exception:
  677. # PV cell doesn't have basal, only a dendrite group
  678. basal_segs = cell.get_all_segments_in_group("dendrite_group")
  679. # input points on basal dendrites
  680. longest_basal_branch_length = 0
  681. for segid, distance in extremeties.items():
  682. if distance > longest_basal_branch_length and segid in basal_segs:
  683. longest_basal_branch_length = distance
  684. half_way_basal = longest_basal_branch_length / 2
  685. segs_basal = cell.get_segments_at_distance(half_way_basal)
  686. # create input component for 0.5
  687. g_e0 = cell_type_ge0[cell_type] * math.exp(0.5)
  688. gfluct_component = lems.Component(
  689. id_=f"Gfluct_{cell_type}_0_5",
  690. type_="Gfluct",
  691. start=self.stim_start,
  692. stop=self.sim_end,
  693. dt=self.dt,
  694. E_e="0mV",
  695. E_i="-80mV",
  696. g_e0=f"{g_e0} uS",
  697. g_i0="0pS",
  698. tau_e="65ms",
  699. tau_i="20ms",
  700. std_e=f"{g_e0} uS",
  701. std_i="0pS",
  702. )
  703. self.lems_components.add(gfluct_component)
  704. # get segments to place input at
  705. cell_type_input_locations[cell_type][0.5] = []
  706. for seg, frac_along in segs_basal.items():
  707. if seg in basal_segs:
  708. cell_type_input_locations[cell_type][0.5].append((seg, frac_along))
  709. # additional for pyr apical cells
  710. pyr_cell = self.nml_cell["HL23PYR"]
  711. extremeties = pyr_cell.get_extremeties()
  712. longest_apical_branch_length = 0
  713. pyr_apical_segs = pyr_cell.get_all_segments_in_group("apical_dendrite_group")
  714. for segid, distance in extremeties.items():
  715. if distance > longest_apical_branch_length and segid in pyr_apical_segs:
  716. longest_apical_branch_length = distance
  717. apical_input_distances = [0.1, 0.3, 0.5, 0.7, 0.9]
  718. for d in apical_input_distances:
  719. # create the input component
  720. g_e0 = cell_type_ge0[cell_type] * math.exp(d)
  721. # create input component for use at each distance point
  722. gfluct_component = lems.Component(
  723. id_=f"Gfluct_HL23PYR_{str(d).replace('.', '_')}",
  724. type_="Gfluct",
  725. start=self.stim_start,
  726. stop=self.sim_end,
  727. dt=self.dt,
  728. E_e="0mV",
  729. E_i="-80mV",
  730. g_e0=f"{g_e0} uS",
  731. g_i0="0pS",
  732. tau_e="65ms",
  733. tau_i="20ms",
  734. std_e=f"{g_e0} uS",
  735. std_i="0pS",
  736. )
  737. self.lems_components.add(gfluct_component)
  738. # get segments to place at
  739. segs_apical = pyr_cell.get_segments_at_distance(
  740. d * longest_apical_branch_length
  741. )
  742. for seg, frac_along in segs_apical.items():
  743. if seg in pyr_apical_segs:
  744. try:
  745. cell_type_input_locations["HL23PYR"][d].append(
  746. (seg, frac_along)
  747. )
  748. except KeyError:
  749. cell_type_input_locations["HL23PYR"][d] = []
  750. cell_type_input_locations["HL23PYR"][d].append(
  751. (seg, frac_along)
  752. )
  753. # create a new input list for each population, and each location
  754. # because input list takes a component as an argument, and a different
  755. # component is required for each location
  756. input_list_ctr = 0
  757. input_ctr = 0
  758. for pop in self.network.populations:
  759. # cell name
  760. cell_type = pop.component.split("_")[0]
  761. input_segs = cell_type_input_locations[cell_type]
  762. for rel_dist, seginfos in input_segs.items():
  763. # one input list per population per component
  764. inputlist = self.network.add(
  765. "InputList",
  766. id=f"Gfluct_{input_list_ctr}",
  767. component=f"Gfluct_{cell_type}_{str(rel_dist).replace('.', '_')}",
  768. populations=pop.id,
  769. validate=False,
  770. )
  771. input_list_ctr += 1
  772. for seg, frac_along in seginfos:
  773. if self.rotate_cells is True:
  774. inputlist.add(
  775. "Input",
  776. id=f"{input_ctr}",
  777. target=f"../{pop.id}/0/{pop.component}",
  778. destination="synapses",
  779. segment_id=seg,
  780. fraction_along=frac_along,
  781. )
  782. input_ctr += 1
  783. else:
  784. for inst in pop.instances:
  785. inputlist.add(
  786. "Input",
  787. id=f"{input_ctr}",
  788. target=f"../{pop.id}/{inst.id}/{pop.component}",
  789. destination="synapses",
  790. segment_id=seg,
  791. fraction_along=frac_along,
  792. )
  793. input_ctr += 1
  794. end = time.time()
  795. print(f"Adding background input took: {(end - start)} seconds.")
  796. # TODO: incomplete, to be done after bg input implementation
  797. def add_stimulus(self):
  798. """Add additional stimulus to cells."""
  799. logger.setLevel(logging.DEBUG)
  800. print("Adding stimuli")
  801. # cell_nums = [self.circuit_params['SING_CELL_PARAM'].at['cell_num', name] for name in self.cell_types]
  802. # from circuit.py
  803. input_ctr = 0
  804. for acell_type in self.cell_types:
  805. # iterate over rows
  806. for row in self.circuit_params["STIM_PARAM"].axes[0]:
  807. # initialise parameters
  808. num_cells = 0 # number of cells to provide input to
  809. start_index = 0 # unused, is set to 0
  810. num_stim = 0 # number of spikes
  811. interval = 0 # spike interval
  812. start_time = 0 # spikes start time
  813. delay = 0 # spikes delay
  814. delay_range = 0 # spikes delay range
  815. loc_num = 0 # number of locations on cell to provide spike to
  816. loc = "all" # what locations to provide spikes to
  817. gmax = 0.0 # gmax
  818. stim_type = "" # type of synapse for stimulus
  819. syn_params = "" # parameters of synapse
  820. for col in self.circuit_params["STIM_PARAM"].axes[1]:
  821. # found the cell row
  822. if (
  823. "cell_name" == col
  824. and self.circuit_params["STIM_PARAM"].at[row, col] == acell_type
  825. ):
  826. num_cells = int(
  827. self.circuit_params["STIM_PARAM"].at[row, "num_cells"]
  828. * self.network_scale
  829. )
  830. start_index = self.circuit_params["STIM_PARAM"].at[
  831. row, "start_index"
  832. ]
  833. num_stim = self.circuit_params["STIM_PARAM"].at[row, "num_stim"]
  834. interval = self.circuit_params["STIM_PARAM"].at[row, "interval"]
  835. # currently unused: NeuroML does not have a
  836. # SpikeGenerator that allows setting a start time
  837. start_time = self.circuit_params["STIM_PARAM"].at[
  838. row, "start_time"
  839. ]
  840. delay = self.circuit_params["STIM_PARAM"].at[row, "delay"]
  841. delay_range = self.circuit_params["STIM_PARAM"].at[
  842. row, "delay_range"
  843. ]
  844. loc_num = self.circuit_params["STIM_PARAM"].at[row, "loc_num"]
  845. # loc: unused: "dend", which corresponds to all
  846. # dendritic segments
  847. loc = self.circuit_params["STIM_PARAM"].at[row, "loc"]
  848. gmax = self.circuit_params["STIM_PARAM"].at[row, "gmax"]
  849. stim_type = self.circuit_params["STIM_PARAM"].at[
  850. row, "stim_type"
  851. ]
  852. syn_params = self.circuit_params["STIM_PARAM"].at[
  853. row, "syn_params"
  854. ]
  855. # load the single template cell, since choosing sections does
  856. # not depend on rotation
  857. a_nml_cell = self.nml_cell[acell_type] # type: neuroml.Cell
  858. # loc is always "dend"
  859. logger.debug(f"loc: {loc}")
  860. dendritic_segments_ids = a_nml_cell.get_all_segments_in_group(
  861. "dendrite_group"
  862. )
  863. # dendritic_segments = [nml_cell.get_segment(seg) for seg in dendritic_segments_ids]
  864. segment_areas = numpy.array(
  865. [
  866. a_nml_cell.get_segment_surface_area(seg)
  867. for seg in dendritic_segments_ids
  868. ]
  869. )
  870. segment_areas = segment_areas / segment_areas.sum()
  871. cells = self.cell_list_by_type[acell_type]
  872. ctr = 0
  873. # if it's too small a model, at least give one cell input
  874. if num_cells == 0:
  875. num_cells = 1
  876. while ctr <= num_cells:
  877. cell_id = f"{acell_type}_{cells[ctr]}"
  878. pop_id = f"{acell_type}_pop_{cells[ctr]}"
  879. # get segments
  880. input_segments = numpy.random.choice(
  881. dendritic_segments_ids,
  882. size=loc_num,
  883. replace=False,
  884. p=segment_areas,
  885. )
  886. for aseg in input_segments:
  887. print(
  888. f"Adding sg_{input_ctr}: i_{input_ctr}/{input_ctr} to {pop_id}/{cell_id}/{aseg}"
  889. )
  890. # currently SpikeGenerator does not allow a start time, so
  891. # this is unused
  892. time_delay = 0
  893. while time_delay <= 0:
  894. time_delay = numpy.random.uniform(
  895. low=delay, high=delay + delay_range
  896. )
  897. gen = self.netdoc.add(
  898. neuroml.SpikeGenerator,
  899. id=f"sg_{input_ctr}",
  900. period=f"{interval} ms",
  901. )
  902. input_list = self.network.add(
  903. neuroml.InputList,
  904. id=f"i_{input_ctr}",
  905. component=gen.id,
  906. populations=pop_id,
  907. )
  908. input_list.add(
  909. neuroml.Input,
  910. id=f"{input_ctr}",
  911. target=f"../{pop_id}/0/{cell_id}",
  912. destination="synapses",
  913. segment_id=aseg,
  914. )
  915. input_ctr += 1
  916. ctr += 1
  917. break
  918. logger.setLevel(logging.INFO)
  919. def visualize_network(self, min_cells=5):
  920. """Generate morph plots"""
  921. print(
  922. f"Visualising network at scale {self.network_scale} with {min_cells} in full"
  923. )
  924. # if the network has been constructed, copy over the populations and
  925. # include the cells
  926. if self.netdoc is not None:
  927. nml_file = neuroml.NeuroMLDocument(id="network_copy")
  928. nml_file.add(neuroml.Network, id="dummy_network")
  929. # copy over populations
  930. nml_file.networks[0].populations = self.netdoc.networks[0].populations
  931. nml_file.includes = self.netdoc.includes
  932. # include cells
  933. for inc in self.nml_file.includes:
  934. incfile = read_neuroml2_file(inc.href)
  935. for cell in incfile.cells:
  936. nml_file.add(cell)
  937. else:
  938. # otherwise read the file in once so it's not read in repeatedly
  939. print(f"Reading {self.netdoc_file_name}")
  940. nml_file = read_neuroml2_file(self.netdoc_file_name)
  941. PYR_cells = []
  942. SST_cells = []
  943. PV_cells = []
  944. VIP_cells = []
  945. PYR_point_cells = []
  946. SST_point_cells = []
  947. PV_point_cells = []
  948. VIP_point_cells = []
  949. # because each cell is in a separate population
  950. print("Picking point cells")
  951. for inc in nml_file.includes:
  952. cell = inc.href.split("/")[1].replace(".cell.nml", "")
  953. if "PYR" in cell:
  954. PYR_cells.append(cell)
  955. if "PV" in cell:
  956. PV_cells.append(cell)
  957. if "VIP" in cell:
  958. VIP_cells.append(cell)
  959. if "SST" in cell:
  960. SST_cells.append(cell)
  961. print(f"Got {len(PYR_cells)} PYR cells")
  962. print(f"Got {len(SST_cells)} SST cells")
  963. print(f"Got {len(PV_cells)} PV cells")
  964. print(f"Got {len(VIP_cells)} VIP cells")
  965. # at least plot min_cells cells of each type, and all the rest as points
  966. if (len(PYR_cells) - min_cells) > 0:
  967. PYR_point_cells = random.sample(PYR_cells, len(PYR_cells) - min_cells)
  968. if (len(SST_cells) - min_cells) > 0:
  969. SST_point_cells = random.sample(SST_cells, len(SST_cells) - min_cells)
  970. if (len(PV_cells) - min_cells) > 0:
  971. PV_point_cells = random.sample(PV_cells, len(PV_cells) - min_cells)
  972. if (len(VIP_cells) - min_cells) > 0:
  973. VIP_point_cells = random.sample(VIP_cells, len(VIP_cells) - min_cells)
  974. print(
  975. f"Picked {len(PYR_point_cells) + len(SST_point_cells) + len(PV_point_cells) + len(VIP_point_cells)} point cells"
  976. )
  977. """
  978. print("Removing axons from PYR cells")
  979. for ac in nml_file.cells:
  980. if ac.id not in PYR_point_cells:
  981. axons = ac.get_all_segments_in_group(ac.get_segment_group("axon_group"))
  982. for s in ac.morphology.segments:
  983. if s.id in axons:
  984. ac.morphology.segments.remove(s)
  985. """
  986. """
  987. for plane in ["xy", "yz", "zx"]:
  988. print(f"Plotting {plane} with {point_fraction} fraction as point cells")
  989. plot_2D(
  990. nml_file=nml_file,
  991. plane2d=plane,
  992. min_width=1,
  993. nogui=True,
  994. title="",
  995. plot_type="constant",
  996. save_to_file=f"{self.netdoc_file_name.replace('.nml', '')}.{plane}.png",
  997. plot_spec={
  998. "point_cells": PYR_point_cells + SST_point_cells + VIP_point_cells + PV_point_cells
  999. }
  1000. )
  1001. print(f"Plotting {plane} with all cells as point cells")
  1002. plot_2D(
  1003. nml_file=nml_file,
  1004. plane2d=plane,
  1005. min_width=1,
  1006. nogui=True,
  1007. title="",
  1008. plot_type="constant",
  1009. save_to_file=f"{self.netdoc_file_name.replace('.nml', '')}.{plane}.allpoints.png",
  1010. plot_spec={
  1011. "point_fraction": 1.0
  1012. }
  1013. )
  1014. print(f"Plotting {plane} with a single cells of each type in detail")
  1015. plot_2D(
  1016. nml_file=nml_file,
  1017. plane2d=plane,
  1018. min_width=1,
  1019. nogui=True,
  1020. title="",
  1021. plot_type="constant",
  1022. save_to_file=f"{self.netdoc_file_name.replace('.nml', '')}.{plane}.points.png",
  1023. plot_spec={
  1024. "point_cells": PYR_cells[1:] + SST_cells[1:] + VIP_cells[1:] + PV_cells[1:]
  1025. }
  1026. )
  1027. """
  1028. plot_interactive_3D(
  1029. self.netdoc_file_name,
  1030. plot_type="detailed",
  1031. plot_spec={
  1032. "point_cells": PYR_point_cells
  1033. + SST_point_cells
  1034. + VIP_point_cells
  1035. + PV_point_cells
  1036. },
  1037. min_width=1.0,
  1038. )
  1039. def create_simulation(self, dt=None, seed=123, record_data=True):
  1040. """Create simulation, record data.
  1041. :param dt: override dt value (in ms)
  1042. :type dt: str
  1043. :param seed: seed
  1044. :type seed: int
  1045. :param record_data: toggle whether data should be recorded
  1046. :type record_data: bool
  1047. """
  1048. start = time.time()
  1049. if dt is None:
  1050. dt = self.dt
  1051. if seed is None:
  1052. seed = self.seed
  1053. simulation = LEMSSimulation(
  1054. sim_id=self.simulation_id,
  1055. duration=float(self.sim_length.replace("ms", "")),
  1056. dt=float(dt.replace("ms", "")),
  1057. simulation_seed=seed,
  1058. )
  1059. simulation.assign_simulation_target(self.network_id)
  1060. simulation.include_neuroml2_file(f"HL23Net_{self.network_scale}.net.nml")
  1061. simulation.include_lems_file(self.lems_components_file_name)
  1062. if record_data is True:
  1063. simulation.create_output_file(
  1064. "output1", f"HL23Net_{self.network_scale}.v.dat"
  1065. )
  1066. print(f"Saving data to: HL23Net_{self.network_scale}.v.dat")
  1067. for apop in self.network.populations:
  1068. for inst in apop.instances:
  1069. simulation.add_column_to_output_file(
  1070. "output1",
  1071. f"{apop.id}_{inst.id}",
  1072. f"{apop.id}/{inst.id}/{apop.component}/0/v",
  1073. )
  1074. simulation.save_to_file(self.lems_simulation_file)
  1075. print(f"Saved simulation to {self.lems_simulation_file}")
  1076. end = time.time()
  1077. print(f"Creating simulation took: {(end - start)} seconds.")
  1078. def run_sim(
  1079. self,
  1080. engine: str = "jneuroml_neuron",
  1081. cluster: typing.Union[str, bool] = False,
  1082. **kwargs,
  1083. ):
  1084. """Run the sim
  1085. :param engine: engine to use (jneuroml_neuron/jneuroml_netpyne)
  1086. :type engine: str
  1087. :param cluster: toggle submitting to nsg or qsub
  1088. Use "nsg_dry" for a dry NSG run (won't submit)
  1089. Use "qsub" to generate a qsub script instead
  1090. :type cluster: bool or str
  1091. :param **kwargs: other engine + nsg specific args
  1092. """
  1093. if cluster is False:
  1094. print(f"Running simulation: {self.lems_simulation_file}")
  1095. run_lems_with(
  1096. engine,
  1097. self.lems_simulation_file,
  1098. max_memory=self.max_memory,
  1099. nogui=True,
  1100. show_plot_already=False,
  1101. **kwargs,
  1102. )
  1103. elif cluster == "qsub":
  1104. print(f"Generating files but not running: {self.lems_simulation_file}")
  1105. # TODO: create in a different folder like NSG
  1106. tdir = generate_sim_scripts_in_folder(
  1107. engine=engine,
  1108. lems_file_name=self.lems_simulation_file,
  1109. root_dir=".",
  1110. max_memory=self.max_memory,
  1111. nogui=True,
  1112. show_plot_already=False,
  1113. **kwargs,
  1114. )
  1115. # remove trailing backslash
  1116. if tdir[-1] == "/":
  1117. tdir = tdir[:-1]
  1118. qsub_fn = f"{tdir}/{tdir}_generated/{self.lems_simulation_file}.sh"
  1119. netpyne_simfile = self.lems_simulation_file.replace(".xml", "_netpyne.py")
  1120. print(f"Generating qsub script for use on cluster: {qsub_fn}")
  1121. with open(qsub_fn, "w") as f:
  1122. print(
  1123. inspect.cleandoc(
  1124. textwrap.dedent(
  1125. f"""
  1126. #!/bin/bash -l
  1127. #$ -pe mpi 1024
  1128. #$ -l mem=4G
  1129. #$ -l h_rt=6:00:00
  1130. #$ -cwd
  1131. #$ -m be
  1132. source ~/.venv/bin/activate
  1133. nrnivmodl
  1134. gerun python3 {netpyne_simfile} -nogui
  1135. """
  1136. )
  1137. ),
  1138. file=f,
  1139. )
  1140. elif cluster == "nsg_dry":
  1141. print(
  1142. f"Preparing to run on NSG (but not submitting): {self.lems_simulation_file}"
  1143. )
  1144. run_on_nsg(
  1145. engine,
  1146. self.lems_simulation_file,
  1147. dry_run=True,
  1148. max_memory=self.max_memory,
  1149. **kwargs,
  1150. )
  1151. elif cluster == "nsg":
  1152. print(f"Running simulation on NSG: {self.lems_simulation_file}")
  1153. run_on_nsg(
  1154. engine, self.lems_simulation_file, max_memory=self.max_memory, **kwargs
  1155. )
  1156. print("Press Ctrl C to interrupt the waiting process")
  1157. print(
  1158. "You can use `nsg_job -l` to check the status of the job and download the simulation reults"
  1159. )
  1160. def plot_v_graphs(self):
  1161. """Plot membrane potential graphs"""
  1162. data = reload_saved_data(
  1163. self.lems_simulation_file,
  1164. plot=True,
  1165. show_plot_already=True,
  1166. reload_events=False,
  1167. )
  1168. """
  1169. logger.debug(data.keys())
  1170. xvals = [data["t"]]
  1171. yvals = list(data.values())[1:]
  1172. logger.debug(len(xvals * len(yvals)))
  1173. logger.debug(yvals)
  1174. labels = [a.split("/")[0] for a in data.keys()][1:]
  1175. generate_plot(
  1176. xvalues=xvals * len(yvals),
  1177. yvalues=yvals,
  1178. title="Membrane potentials",
  1179. labels=labels,
  1180. xaxis="time (ms)",
  1181. yaxis="v (mV)",
  1182. cols_in_legend_box=2,
  1183. save_figure_to=f"{self.lems_simulation_file.replace('.xml', '')}_v.png",
  1184. )
  1185. """
  1186. def add_step_current(self):
  1187. """Add a constant step current to all cells.
  1188. Useful for testing the cells to ensure they produce correct behaviour,
  1189. in an unconnected network, for example
  1190. """
  1191. print("Adding step current to each cell")
  1192. # a single pulse generator
  1193. pg = self.netdoc.add(
  1194. neuroml.PulseGenerator,
  1195. id="pg_0",
  1196. delay="100ms",
  1197. duration=self.sim_length,
  1198. amplitude="0.2nA",
  1199. )
  1200. for pop in self.network.populations:
  1201. self.network.add(neuroml.ExplicitInput, target=f"{pop.id}[0]", input=pg.id)
  1202. if __name__ == "__main__":
  1203. parser = argparse.ArgumentParser(
  1204. prog="NeuroML_YaoEtAl2022",
  1205. description="Generate and simulate NeuroML version of Yao et al 2022",
  1206. )
  1207. # general options
  1208. general_args = parser.add_argument_group("General options")
  1209. general_args.add_argument(
  1210. "-s",
  1211. "--scale",
  1212. action="store",
  1213. help="Scale of network",
  1214. default="1",
  1215. required=True,
  1216. type=float,
  1217. )
  1218. general_args.add_argument(
  1219. "-n",
  1220. "--new_cells",
  1221. action="store_true",
  1222. help="Toggle creation of new cells or re-use of existing ones",
  1223. default=False,
  1224. )
  1225. general_args.add_argument(
  1226. "-b",
  1227. "--biophysics",
  1228. action="store_true",
  1229. help="Toggle inclusion of biophysics",
  1230. default=True,
  1231. )
  1232. general_args.add_argument(
  1233. "-t",
  1234. "--tonic_inhibition",
  1235. action="store_true",
  1236. help="Toggle tonic inhibition in the network",
  1237. default=True,
  1238. )
  1239. general_args.add_argument(
  1240. "-c",
  1241. "--connections",
  1242. action="store_true",
  1243. help="Toggle creation of network connections",
  1244. default=True,
  1245. )
  1246. general_args.add_argument(
  1247. "-i",
  1248. "--network_input",
  1249. action="store",
  1250. help="Type of input to give to network",
  1251. choices=["background", "step", "none"],
  1252. default="background",
  1253. )
  1254. general_args.add_argument(
  1255. "-l",
  1256. "--stimulus",
  1257. action="store_true",
  1258. help="Toggle external stimulus",
  1259. default=False,
  1260. )
  1261. general_args.add_argument(
  1262. "--hdf5",
  1263. action="store_true",
  1264. help="Toggle exporting as HDF5",
  1265. )
  1266. general_args.add_argument(
  1267. "-r",
  1268. "--rotate_cells",
  1269. action="store_true",
  1270. help="Toggle rotation of cells: not required for simulation since placements of cells in space does not affect it",
  1271. default=False,
  1272. )
  1273. # actions
  1274. action_args = parser.add_argument_group("Actions")
  1275. action_args.add_argument(
  1276. "--create_network",
  1277. action="store_true",
  1278. help="Toggle creation of new network",
  1279. default=False,
  1280. )
  1281. action_args.add_argument(
  1282. "--create_simulation",
  1283. action="store_true",
  1284. help="Toggle creation of new simulation",
  1285. default=False,
  1286. )
  1287. action_args.add_argument(
  1288. "--visualize_network",
  1289. action="store",
  1290. metavar="<min number of cells to visualise with full morphology>",
  1291. help="Visualise network with provided minimum number of cells",
  1292. type=int,
  1293. default=25,
  1294. )
  1295. run_parser = action_args.add_mutually_exclusive_group()
  1296. run_parser.add_argument(
  1297. "--simulate_neuron",
  1298. action="store_true",
  1299. help="Simulation using single threaded NEURON simulation",
  1300. )
  1301. run_parser.add_argument(
  1302. "--simulate_netpyne_nsg",
  1303. action="store_true",
  1304. help="Generate configuration to run using NetPyNE on NSG",
  1305. )
  1306. run_parser.add_argument(
  1307. "--simulate_netpyne_qsub",
  1308. action="store_true",
  1309. help="Generate configuration to run using NetPyNE on a cluster using qsub",
  1310. )
  1311. # NSG options
  1312. nsg_args = parser.add_argument_group("NSG options")
  1313. # parse
  1314. nsg_args.add_argument(
  1315. "--number_cores", action="store", default="64", help="Number of cores requested"
  1316. )
  1317. nsg_args.add_argument(
  1318. "--number_of_nodes", action="store", default="8", help="Number of nodes"
  1319. )
  1320. nsg_args.add_argument(
  1321. "--tasks_per_node",
  1322. action="store",
  1323. default="64",
  1324. help="Number of tasks per node (cores * number nodes?)",
  1325. )
  1326. nsg_args.add_argument(
  1327. "--memory_per_node",
  1328. action="store",
  1329. default="240",
  1330. help="Memory requested per node in GB",
  1331. )
  1332. nsg_args.add_argument(
  1333. "--runtime", action="store", default="5", help="Run time requested in hours"
  1334. )
  1335. nsg_args.add_argument(
  1336. "--environment",
  1337. action="store",
  1338. default="OSBv2_EXPANSE_0_7_3",
  1339. help="Environment to request",
  1340. )
  1341. args = parser.parse_args()
  1342. model = HL23Net(
  1343. scale=args.scale,
  1344. new_cells=args.new_cells,
  1345. biophysics=args.biophysics,
  1346. tonic_inhibition=args.tonic_inhibition,
  1347. connections=args.connections,
  1348. network_input=args.network_input,
  1349. stimulus=args.stimulus,
  1350. hdf5=args.hdf5,
  1351. rotate_cells=args.rotate_cells,
  1352. )
  1353. if args.create_network:
  1354. model.create_network()
  1355. if args.create_simulation:
  1356. model.create_simulation()
  1357. if args.visualize_network:
  1358. model.visualize_network(min_cells=args.visualize_network)
  1359. # simulation
  1360. if args.simulate_neuron:
  1361. model.run_sim(engine="jneuroml_neuron", cluster=False, skip_run=False)
  1362. elif args.simulate_netpyne_nsg:
  1363. model.run_sim(
  1364. engine="jneuroml_netpyne",
  1365. cluster="nsg",
  1366. nsg_sim_config={
  1367. "number_cores_": args.number_cores,
  1368. "tasks_per_node_": args.tasks_per_node,
  1369. "number_nodes_": args.number_nodes,
  1370. "number_gbmemorypernode_": args.memory_per_node,
  1371. "runtime_": args.runtime,
  1372. "toolId": args.environment,
  1373. "nrnivmodl_o_": "1",
  1374. "cmdlineopts_": "-nogui",
  1375. },
  1376. nogui=True,
  1377. )
  1378. elif args.simulate_netpyne_qsub:
  1379. model.run_sim(engine="jneuroml_netpyne", cluster="qsub")