nml_main.py 55 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366
  1. #!/usr/bin/env python3
  2. """
  3. Main script for creating scaled versions of NeuroML network and simulating it.
  4. This parses population and connectivity information exported from circuit.py to
  5. create a NeuroML representation of the network.
  6. File: nml_main.py
  7. Copyright 2023 Ankur Sinha
  8. """
  9. import copy
  10. import logging
  11. import math
  12. import pathlib
  13. import random
  14. import sys
  15. import time
  16. import typing
  17. import bisect
  18. import textwrap
  19. import inspect
  20. import h5py
  21. import lems.api as lems
  22. import neuroml
  23. import numpy
  24. import pandas
  25. import progressbar
  26. from neuroml.loaders import read_neuroml2_file
  27. from neuroml.utils import component_factory
  28. from neuroml.writers import NeuroMLHdf5Writer
  29. from pyneuroml.lems.LEMSSimulation import LEMSSimulation
  30. from pyneuroml.neuron.nrn_export_utils import get_segment_group_name
  31. from pyneuroml.nsgr import run_on_nsg
  32. from pyneuroml.plot.Plot import generate_plot
  33. from pyneuroml.plot.PlotMorphology import plot_2D
  34. from pyneuroml.plot.PlotMorphologyVispy import plot_interactive_3D
  35. from pyneuroml.pynml import (
  36. reload_saved_data,
  37. run_lems_with,
  38. write_neuroml2_file,
  39. generate_sim_scripts_in_folder,
  40. )
  41. from pyneuroml.utils import rotate_cell
  42. from pyneuroml.utils.units import get_value_in_si, convert_to_units
  43. logger = logging.getLogger(__name__)
  44. logger.setLevel(logging.INFO)
  45. # https://stackoverflow.com/questions/3410976/how-to-round-a-number-to-significant-figures-in-python
  46. # currently unused
  47. def round_to_sig(x):
  48. one_less = round(x, int(math.floor(math.log10(abs(x))))) / 10
  49. return round(x + one_less, int(math.floor(math.log10(abs(one_less)))))
  50. class HL23Net(object):
  51. """HL23 network"""
  52. def __init__(
  53. self,
  54. scale: float = 0.01,
  55. connections: bool = True,
  56. network_input: str = "background",
  57. stimulus: bool = True,
  58. biophysics: bool = True,
  59. tonic_inhibition: bool = True,
  60. new_cells: bool = True,
  61. max_memory: str = "8G",
  62. hdf5=True,
  63. rotate_cells=False,
  64. ):
  65. """Init
  66. :param scale: network scale
  67. :type scale: float
  68. :param connections: toggle creation of network connections
  69. :type connections: bool
  70. :param network_input: select input to provide to cells
  71. - "background": for OU background input
  72. - "step": for a constant step current
  73. - "none": for no input
  74. :type network_input: str
  75. :param stimulus: toggle addition of stimulus to cells for stim protocol
  76. :type stimulus: bool
  77. :param biophysics: toggle addition of biophysics to cells (otherwise
  78. the cells only include morphology)
  79. :type biophysics: bool
  80. :param tonic_inhibition: toggle addition of tonic inhibition to cells
  81. note: requires biophysics to be enabled
  82. :type tonic_inhibition: bool
  83. :param new_cells: toggle regeneration of rotated cells, otherwise
  84. existing rotated cells are used
  85. :type new_cells: bool
  86. :param max_memory: max memory for JVM when running pyneuroml
  87. :type max_memory: str
  88. :param hdf5: toggle exporting to HDF5 format
  89. :type hdf5: bool
  90. :param rotate_cells: generate rotated cells, useful for visualization
  91. but does not affect the spiking simulation (will affect LPF, but
  92. we're not generating those here)
  93. :type rotate_cells: bool
  94. """
  95. object.__init__(self)
  96. self.network_scale = scale
  97. self.connections = connections
  98. self.network_input = network_input
  99. self.stimulus = stimulus
  100. self.biophysics = biophysics
  101. self.tonic_inhibition = tonic_inhibition
  102. self.new_cells = new_cells
  103. self.max_memory = max_memory
  104. self.hdf5 = hdf5
  105. self.rotate_cells = rotate_cells
  106. # data dumped from the simulation
  107. try:
  108. self.cell_data = h5py.File(
  109. "../L23Net/Circuit_output/cell_positions_and_rotations.h5", "r"
  110. )
  111. self.connectivity_data = h5py.File(
  112. "../L23Net/Circuit_output/synapse_connections.h5", "r"
  113. )
  114. self.circuit_params = pandas.read_excel(
  115. "../L23Net/Circuit_param.xls", sheet_name=None, index_col=0
  116. )
  117. # default synaptic parameters: are read from exported H5 files for
  118. # creation of synapses (above)
  119. self.circuit_params["syn_params"] = {
  120. "none": {
  121. "tau_r_AMPA": 0,
  122. "tau_d_AMPA": 0,
  123. "tau_r_NMDA": 0,
  124. "tau_d_NMDA": 0,
  125. "e": 0,
  126. "Dep": 0,
  127. "Fac": 0,
  128. "Use": 0,
  129. "u0": 0,
  130. "gmax": 0,
  131. }
  132. }
  133. # confirmed from self.cell_data.keys()
  134. self.cell_types = [i for i in self.circuit_params["conn_probs"].axes[0]]
  135. except FileNotFoundError:
  136. print("Files not found")
  137. self.create = False
  138. self.pop_colors = {
  139. "HL23PV": "0 0 1",
  140. "HL23PYR": "1 0 0",
  141. "HL23SST": "0 1 0",
  142. "HL23VIP": "0.5 0.5 0.5",
  143. }
  144. self.simulation_id = f"HL23Sim_{self.network_scale}"
  145. if self.rotate_cells is True:
  146. self.lems_simulation_file = (
  147. f"LEMS_HL23_{self.network_scale}_Sim.rotated.xml"
  148. )
  149. else:
  150. self.lems_simulation_file = f"LEMS_HL23_{self.network_scale}_Sim.xml"
  151. self.netdoc = None
  152. self.network_id = "HL23Network"
  153. if self.rotate_cells is True:
  154. self.netdoc_file_name = f"HL23Net_{self.network_scale}.rotated.net.nml"
  155. else:
  156. self.netdoc_file_name = f"HL23Net_{self.network_scale}.net.nml"
  157. if self.hdf5 is True:
  158. self.netdoc_file_name += ".h5"
  159. self.lems_components_file_name = f"lems_components_{self.network_scale}.xml"
  160. self.stim_start = "200ms"
  161. self.sim_length = "1000ms"
  162. self.sim_end = convert_to_units(
  163. f"{get_value_in_si(self.stim_start) + get_value_in_si(self.sim_length)} s",
  164. "ms",
  165. )
  166. self.dt = "0.025ms"
  167. self.seed = 4587
  168. def create_network(self):
  169. # set the scale of the network
  170. if self.create is False:
  171. print("Not creating network")
  172. return
  173. start = time.time()
  174. print(f"Creating network with scale {self.network_scale}")
  175. self.netdoc = component_factory(neuroml.NeuroMLDocument, id="HL23Network")
  176. self.network = self.netdoc.add(
  177. neuroml.Network,
  178. id=self.network_id,
  179. temperature="34.0 degC",
  180. notes=f"L23 network at {self.network_scale} scale",
  181. validate=False,
  182. )
  183. # synapse types
  184. # LEMS component definitions will be included in simulation file later
  185. self.lems_components = lems.Model()
  186. self.lems_components.add(lems.Include("CaDynamics_E2_NML2.nml"))
  187. self.lems_components.add(lems.Include("synapses/ProbAMPANMDA.synapse.nml"))
  188. self.lems_components.add(lems.Include("synapses/ProbUDF.synapse.nml"))
  189. if self.tonic_inhibition:
  190. self.lems_components.add(lems.Include("channels/Tonic.nml"))
  191. # add all the channel definitions to
  192. channel_files = pathlib.Path("channels").glob("**/*.channel.nml")
  193. for afile in channel_files:
  194. logger.debug(f"Including {afile}")
  195. self.lems_components.add(lems.Include(str(afile)))
  196. self.create_cells()
  197. if self.connections is True:
  198. self.create_connections()
  199. if self.network_input == "background":
  200. self.add_background_input()
  201. elif self.network_input == "step":
  202. self.add_step_current()
  203. elif self.network_input == "none":
  204. pass
  205. else:
  206. print(f"Invalid network_input value: {self.network_input}, not adding any")
  207. pass
  208. if self.stimulus is True:
  209. self.add_stimulus()
  210. print(self.netdoc.summary())
  211. self.netdoc.validate(recursive=True)
  212. print(f"Writing {self.lems_components_file_name} ")
  213. self.lems_components.export_to_file(self.lems_components_file_name)
  214. print(f"Writing {self.netdoc_file_name} ")
  215. if self.hdf5:
  216. NeuroMLHdf5Writer.write(
  217. self.netdoc, self.netdoc_file_name, embed_xml=False, compress=True
  218. )
  219. else:
  220. write_neuroml2_file(self.netdoc, self.netdoc_file_name, validate=False)
  221. end = time.time()
  222. print(f"Creating network took: {(end - start)} seconds.")
  223. def create_cells(self):
  224. """Create cells and add them to the network.
  225. Each rotated cell is a new component in NeuroML, and since each
  226. population can only have one component attached to it when cells are
  227. rotated, each cell will belong to a different single celled population.
  228. However, for simulations where one isn't looking at LFPs etc., one does
  229. not need to rotate the cells. So, one cell component will be used to
  230. create a single population of each cell type.
  231. """
  232. start = time.time()
  233. print("Creating cells")
  234. # make a directory for storing rotated cells
  235. # we include these cells in the network document to ensure that the network
  236. # document doesn't get too large
  237. self.temp_cell_dir = "rotated_cells"
  238. cellfilesdir = pathlib.Path(self.temp_cell_dir)
  239. cellfilesdir.mkdir(exist_ok=True)
  240. # keep track of cell gids for our connections later, required for scaled down
  241. # versions when not all cells are included
  242. self.nml_cell = {} # type: typing.Dict[str, neuroml.Cell]
  243. self.cell_list = {} # type: typing.Dict[str, int]
  244. self.cell_list_by_type = {}
  245. for ctype in self.cell_types:
  246. self.cell_list_by_type[ctype] = []
  247. # create the cell populations
  248. for ctype in self.cell_types:
  249. celldataset = self.cell_data[ctype]
  250. # ['gid', 'x', 'y', 'z', 'x_rot', 'y_rot', 'z_rot']
  251. logger.debug(f"table headers are: {celldataset.dtype.fields.keys()}")
  252. self.nml_cell[ctype] = neuroml.loaders.read_neuroml2_file(
  253. f"{ctype}.cell.nml"
  254. ).cells[0] # type: neuroml.Cell
  255. # replace biophys with empty object
  256. if self.biophysics is False:
  257. self.nml_cell[
  258. ctype
  259. ].biophysical_properties = neuroml.BiophysicalProperties(id="biophys")
  260. self.nml_cell[ctype].biophysical_properties.add(
  261. neuroml.MembraneProperties
  262. )
  263. self.nml_cell[ctype].biophysical_properties.add(
  264. neuroml.IntracellularProperties
  265. )
  266. # include the cell to ensure the ion channel files are included
  267. # self.netdoc.add(neuroml.IncludeType, href=f"{ctype}.cell.nml")
  268. # If we don't rotate cells, we only need one population per cell
  269. # type.
  270. # Make a copy anyway because we're modifying the original cell
  271. if self.rotate_cells is False:
  272. unrotated_cell = copy.deepcopy(self.nml_cell[ctype])
  273. unrotated_cell.id = unrotated_cell.id + "_sim"
  274. unrotated_cell_file = (
  275. f"{self.temp_cell_dir}/{unrotated_cell.id}.cell.nml"
  276. )
  277. unrotated_cell_doc = component_factory(
  278. neuroml.NeuroMLDocument, id=f"{unrotated_cell.id}_doc"
  279. )
  280. unrotated_cell_doc.add(unrotated_cell)
  281. if self.biophysics is True:
  282. self.__add_tonic_inhibition(
  283. ctype, unrotated_cell, unrotated_cell_doc
  284. )
  285. write_neuroml2_file(
  286. unrotated_cell_doc, unrotated_cell_file, validate=False
  287. )
  288. self.netdoc.add(neuroml.IncludeType, href=unrotated_cell_file)
  289. pop = self.network.add(
  290. neuroml.Population,
  291. id=f"{ctype}_pop",
  292. type="populationList",
  293. component=unrotated_cell.id,
  294. )
  295. pop.add(neuroml.Property(tag="color", value=self.pop_colors[ctype]))
  296. pop.add(neuroml.Property(tag="region", value="L23"))
  297. i = 0
  298. step = int(1 / self.network_scale)
  299. maxcell = len(celldataset)
  300. # put in minimum 2 cells of each type
  301. if step >= maxcell:
  302. step = 1
  303. maxcell = 2
  304. for j in range(0, maxcell, step):
  305. acell = celldataset[j]
  306. gid = acell[0]
  307. x = acell[1]
  308. y = acell[2]
  309. z = acell[3]
  310. xrot = acell[4]
  311. yrot = acell[5]
  312. zrot = acell[6]
  313. if self.rotate_cells is True:
  314. rotated_cell = None
  315. rotated_cell_file = (
  316. f"{self.temp_cell_dir}/{self.nml_cell[ctype].id}_{gid}.cell.nml"
  317. )
  318. rotated_cell_id = self.nml_cell[ctype].id + f"_{gid}"
  319. if (
  320. self.new_cells is False
  321. and pathlib.Path(rotated_cell_file).exists()
  322. ):
  323. print(f"{rotated_cell_file} already exists, not overwriting.")
  324. print("Set new_cells=True to regenerate all rotated cell files")
  325. else:
  326. rotated_cell = rotate_cell(
  327. self.nml_cell[ctype],
  328. xrot,
  329. yrot,
  330. zrot,
  331. order="xyz",
  332. relative_to_soma=True,
  333. )
  334. rotated_cell.id = rotated_cell_id
  335. rotated_cell_doc = component_factory(
  336. neuroml.NeuroMLDocument, id=f"{rotated_cell_id}_doc"
  337. )
  338. if self.biophysics is True:
  339. self.__add_tonic_inhibition(
  340. ctype, rotated_cell, rotated_cell_doc
  341. )
  342. rotated_cell_doc.add(rotated_cell)
  343. write_neuroml2_file(
  344. rotated_cell_doc, rotated_cell_file, validate=False
  345. )
  346. self.netdoc.add(neuroml.IncludeType, href=rotated_cell_file)
  347. pop = self.network.add(
  348. neuroml.Population,
  349. id=f"{ctype}_pop_{gid}",
  350. type="populationList",
  351. component=rotated_cell_id,
  352. )
  353. pop.add(neuroml.Property(tag="color", value=self.pop_colors[ctype]))
  354. pop.add(neuroml.Property(tag="region", value="L23"))
  355. pop.add(
  356. neuroml.Instance, id=0, location=neuroml.Location(x=x, y=y, z=z)
  357. )
  358. # track what gids we're including in our network
  359. # used later when creating connections
  360. self.cell_list[gid] = 0
  361. else:
  362. pop.add(
  363. neuroml.Instance, id=i, location=neuroml.Location(x=x, y=y, z=z)
  364. )
  365. self.cell_list[gid] = i
  366. i += 1
  367. # currently unused
  368. self.cell_list_by_type[ctype].append(gid)
  369. print(self.netdoc.summary())
  370. self.netdoc.validate(recursive=True)
  371. end = time.time()
  372. print(f"Creating cells took: {(end - start)} seconds")
  373. def __add_tonic_inhibition(self, ctype, cell, cell_doc):
  374. """Add tonic inhibition to provided cell
  375. :param ctype: cell type
  376. :type ctype: str
  377. :param cell: cell object to add to
  378. :type cell: neuroml.Cell
  379. :param cell_doc: cell doc object
  380. :type cell_doc: neuroml.NeuroMLDocument
  381. """
  382. if self.tonic_inhibition is True:
  383. # add gaba tonic inhibition to cells
  384. # definition file is included in the network, so we don't
  385. # re-include it here.
  386. if ctype == "HL23PYR" or ctype == "HL23SST":
  387. cell.add_channel_density(
  388. nml_cell_doc=cell_doc,
  389. cd_id="TonicInhibition",
  390. ion_channel="TonicPavlov2009",
  391. cond_density="0.000938 S_per_cm2",
  392. erev="-75 mV",
  393. group_id="all_minus_myelin",
  394. ion="non_specific",
  395. ion_chan_def_file="",
  396. )
  397. elif ctype == "HL23PV" or ctype == "HL23VIP":
  398. cell.add_channel_density(
  399. nml_cell_doc=cell_doc,
  400. cd_id="TonicInhibition",
  401. ion_channel="TonicPavlov2009",
  402. cond_density="0.000938 S_per_cm2",
  403. erev="-75 mV",
  404. group_id="all",
  405. ion="non_specific",
  406. ion_chan_def_file="",
  407. )
  408. def create_connections(self):
  409. start = time.time()
  410. print("Creating connections")
  411. # count how many connections we have in total
  412. conn_count = 0
  413. # create dicts to hold values for caching: prevents us from having to
  414. # get this info again and again
  415. ordered_segments = {}
  416. cumulative_lengths = {}
  417. for atype in self.cell_types:
  418. ordered_segments[f"{atype}"] = {}
  419. cumulative_lengths[f"{atype}"] = {}
  420. # create connections
  421. for pretype in self.cell_types:
  422. for posttype in self.cell_types:
  423. conndataset = self.connectivity_data[f"{pretype}:{posttype}"]
  424. # string
  425. mechanism = self.connectivity_data[f"synparams/{pretype}:{posttype}"][
  426. "mechanism"
  427. ][()].decode("utf-8")
  428. # all ints/floats
  429. if "UDF" in mechanism:
  430. tau_r = self.connectivity_data[f"synparams/{pretype}:{posttype}"][
  431. "tau_r"
  432. ][()]
  433. tau_d = self.connectivity_data[f"synparams/{pretype}:{posttype}"][
  434. "tau_d"
  435. ][()]
  436. elif "AMPANMDA" in mechanism:
  437. tau_r_AMPA = self.connectivity_data[
  438. f"synparams/{pretype}:{posttype}"
  439. ]["tau_r_AMPA"][()]
  440. tau_r_NMDA = self.connectivity_data[
  441. f"synparams/{pretype}:{posttype}"
  442. ]["tau_r_NMDA"][()]
  443. tau_d_AMPA = self.connectivity_data[
  444. f"synparams/{pretype}:{posttype}"
  445. ]["tau_d_AMPA"][()]
  446. tau_d_NMDA = self.connectivity_data[
  447. f"synparams/{pretype}:{posttype}"
  448. ]["tau_d_NMDA"][()]
  449. else:
  450. raise ValueError(f"Unknown mechanism found: {mechanism}")
  451. # common to both synapses
  452. Use = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["Use"][
  453. ()
  454. ]
  455. Dep = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["Dep"][
  456. ()
  457. ]
  458. Fac = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["Fac"][
  459. ()
  460. ]
  461. gbase = self.connectivity_data[f"synparams/{pretype}:{posttype}"][
  462. "gmax"
  463. ][()]
  464. u0 = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["u0"][()]
  465. erev = self.connectivity_data[f"synparams/{pretype}:{posttype}"]["e"][
  466. ()
  467. ]
  468. print(
  469. f"Creating synapse component: {pretype} -> {posttype}: {pretype}_{posttype}_{mechanism}."
  470. )
  471. if "UDF" in mechanism:
  472. syn = lems.Component(
  473. id_=f"{pretype}_{posttype}_{mechanism}",
  474. type_=f"{mechanism}",
  475. tau_r=f"{tau_r} ms",
  476. tau_d=f"{tau_d} ms",
  477. Use=Use,
  478. Dep=f"{Dep} ms",
  479. Fac=f"{Fac} ms",
  480. gbase=f"{gbase} uS",
  481. u0=u0,
  482. erev=f"{erev} mV",
  483. )
  484. elif "AMPANMDA" in mechanism:
  485. syn = lems.Component(
  486. id_=f"{pretype}_{posttype}_{mechanism}",
  487. type_=f"{mechanism}",
  488. tau_r_AMPA=f"{tau_r_AMPA} ms",
  489. tau_d_AMPA=f"{tau_d_AMPA} ms",
  490. tau_r_NMDA=f"{tau_r_NMDA} ms",
  491. tau_d_NMDA=f"{tau_d_NMDA} ms",
  492. Use=Use,
  493. Dep=f"{Dep} ms",
  494. Fac=f"{Fac} ms",
  495. gbase=f"{gbase} uS",
  496. u0=u0,
  497. erev=f"{erev} mV",
  498. mg="1 mM",
  499. weight_factor_NMDA="1",
  500. )
  501. self.lems_components.add(syn)
  502. anml_cell = self.nml_cell[f"{posttype}"] # type: neuroml.Cell
  503. syn_count = 0
  504. cur_precell = None
  505. cur_postcell = None
  506. print(
  507. f"Creating connections: {pretype} -> {posttype} (~{int(conndataset.shape[0])} with scale of {self.network_scale})."
  508. )
  509. # if we're not rotating cells, we only need one project between
  510. # the single populations of different cell types
  511. if self.rotate_cells is False:
  512. proj = component_factory(
  513. neuroml.Projection,
  514. id=f"proj_{pretype}_{posttype}",
  515. presynaptic_population=f"{pretype}_pop",
  516. postsynaptic_population=f"{posttype}_pop",
  517. synapse=f"{pretype}_{posttype}_{mechanism}",
  518. ) # type: neuroml.Projection
  519. # show a progress bar so we have some idea of what's going on
  520. max_value_possible = int(conndataset.shape[0])
  521. # rounded = round_to_sig(approx_max_value)
  522. bar = progressbar.ProgressBar(
  523. max_val=max_value_possible, poll_interval=30
  524. )
  525. bar_count = 0
  526. for conn in conndataset:
  527. precell = conn[0]
  528. postcell = conn[1]
  529. weight = conn[2]
  530. delay = conn[3]
  531. section = conn[4]
  532. sectionx = conn[5]
  533. # if both cells are not in our population, skip this connection
  534. if (
  535. precell not in self.cell_list.keys()
  536. or postcell not in self.cell_list.keys()
  537. ):
  538. logger.debug(
  539. f"{precell} or {postcell} are not included in the network. Skipping"
  540. )
  541. continue
  542. bar.update(bar_count)
  543. bar_count += 1
  544. section = (section.decode("utf-8")).split(".")[1]
  545. neuroml_seggrp_id = get_segment_group_name(section)
  546. # use info if it's cached, otherwise get new info and cache
  547. # it
  548. try:
  549. ord_segs = ordered_segments[f"{posttype}"][neuroml_seggrp_id]
  550. cumul_lengths = cumulative_lengths[f"{posttype}"][
  551. neuroml_seggrp_id
  552. ]
  553. except KeyError:
  554. [
  555. ord_segs,
  556. cumul_lengths,
  557. ] = anml_cell.get_ordered_segments_in_groups(
  558. group_list=[neuroml_seggrp_id],
  559. include_cumulative_lengths=True,
  560. )
  561. ordered_segments[f"{posttype}"][neuroml_seggrp_id] = ord_segs
  562. cumulative_lengths[f"{posttype}"][neuroml_seggrp_id] = (
  563. cumul_lengths
  564. )
  565. list_ord_segs = ord_segs[neuroml_seggrp_id]
  566. list_cumul_lengths = cumul_lengths[neuroml_seggrp_id]
  567. total_len = list_cumul_lengths[-1]
  568. section_loc = total_len * sectionx
  569. ind = bisect.bisect_left(list_cumul_lengths, section_loc)
  570. post_seg = list_ord_segs[ind]
  571. # if it's the first segment, with ind 0, [ind - 1] is not its
  572. # parent segment
  573. if ind != 0:
  574. frac_along = (section_loc - list_cumul_lengths[ind - 1]) / (
  575. list_cumul_lengths[ind] - list_cumul_lengths[ind - 1]
  576. )
  577. logger.debug(
  578. f"frac_along: ({section_loc} - {list_cumul_lengths[ind - 1]}) / ({list_cumul_lengths[ind]} - {list_cumul_lengths[ind - 1]}) = {frac_along}"
  579. )
  580. else:
  581. frac_along = section_loc / list_cumul_lengths[ind]
  582. logger.debug(
  583. f"frac_along: ({section_loc} / {list_cumul_lengths[ind]}) = {frac_along}"
  584. )
  585. # for zero length segments
  586. if frac_along == -float("inf"):
  587. frac_along = 1
  588. conn_count += 1
  589. logger.debug(
  590. f"{conn_count}: {pretype}:{precell} -> {posttype}:{postcell} {neuroml_seggrp_id}: segment {post_seg.id} ({list_cumul_lengths[ind - 1]} - {list_cumul_lengths[ind]}) at {frac_along} with mechanism {mechanism}"
  591. )
  592. # a new projection is only required when the pre or post cell
  593. # change
  594. if self.rotate_cells is True:
  595. if precell != cur_precell or postcell != cur_postcell:
  596. proj = self.network.add(
  597. neuroml.Projection,
  598. id=f"proj_{precell}_{postcell}",
  599. presynaptic_population=f"{pretype}_pop_{precell}",
  600. postsynaptic_population=f"{posttype}_pop_{postcell}",
  601. synapse=f"{pretype}_{posttype}_{mechanism}",
  602. )
  603. cur_precell = precell
  604. cur_postcell = postcell
  605. syn_count = 0
  606. try:
  607. proj.add(
  608. neuroml.ConnectionWD,
  609. id=syn_count,
  610. pre_cell_id=f"../{pretype}_pop_{precell}/0/{pretype}_{precell}",
  611. pre_segment_id=0,
  612. post_cell_id=f"../{posttype}_pop_{postcell}/0/{posttype}_{postcell}",
  613. post_segment_id=post_seg.id,
  614. post_fraction_along=frac_along,
  615. weight=str(float(weight) / self.network_scale),
  616. delay=f"{delay} ms",
  617. )
  618. except ValueError as e:
  619. print(f"list of cumulative lengths: {list_cumul_lengths}")
  620. print(
  621. f"frac_along: ({section_loc} - {list_cumul_lengths[ind - 1]}) / ({list_cumul_lengths[ind]} - {list_cumul_lengths[ind - 1]}) = {frac_along}"
  622. )
  623. raise e
  624. else:
  625. try:
  626. proj.add(
  627. neuroml.ConnectionWD,
  628. id=syn_count,
  629. pre_cell_id=f"../{pretype}_pop/{self.cell_list[precell]}/{pretype}_sim",
  630. pre_segment_id=0,
  631. post_cell_id=f"../{posttype}_pop/{self.cell_list[postcell]}/{posttype}_sim",
  632. post_segment_id=post_seg.id,
  633. post_fraction_along=frac_along,
  634. weight=str(float(weight) / self.network_scale),
  635. delay=f"{delay} ms",
  636. )
  637. except ValueError as e:
  638. print(f"list of cumulative lengths: {list_cumul_lengths}")
  639. print(
  640. f"frac_along: ({section_loc} - {list_cumul_lengths[ind - 1]}) / ({list_cumul_lengths[ind]} - {list_cumul_lengths[ind - 1]}) = {frac_along}"
  641. )
  642. raise e
  643. syn_count += 1
  644. bar.finish()
  645. # Only add projection to network if there's at least one
  646. # connection in it.
  647. # Not required with rotated cells because projection creation
  648. # and connection creation go hand in hand
  649. if self.rotate_cells is False:
  650. if len(proj.connection_wds) > 0:
  651. self.network.add(proj)
  652. end = time.time()
  653. print(f"Creating connections took: {(end - start)} seconds.")
  654. def add_background_input(self):
  655. """Add background input to cells."""
  656. start = time.time()
  657. # Component Type definition
  658. self.lems_components.add(lems.Include("synapses/Gfluct.nml"))
  659. # base excitatory conductances (from paper, given here in uS)
  660. cell_type_ge0 = {
  661. "HL23PYR": 0.000028,
  662. "HL23PV": 0.00028,
  663. "HL23SST": 0.00003,
  664. "HL23VIP": 0.000066,
  665. }
  666. # store input locations per cell type and create input components for
  667. # each cell type also, since all cells are indentical
  668. # values: { 'rel distance' : [(seg id, frac along)] }
  669. cell_type_input_locations = {} # type: typing.Dict[str, typing.Dict[float, typing.Tuple[int, float]]]
  670. for cell_type, cell in self.nml_cell.items():
  671. cell_type_input_locations[cell_type] = {}
  672. extremeties = cell.get_extremeties()
  673. try:
  674. basal_segs = cell.get_all_segments_in_group("basal_dendrite_group")
  675. except Exception:
  676. # PV cell doesn't have basal, only a dendrite group
  677. basal_segs = cell.get_all_segments_in_group("dendrite_group")
  678. # input points on basal dendrites
  679. longest_basal_branch_length = 0
  680. for segid, distance in extremeties.items():
  681. if distance > longest_basal_branch_length and segid in basal_segs:
  682. longest_basal_branch_length = distance
  683. half_way_basal = longest_basal_branch_length / 2
  684. segs_basal = cell.get_segments_at_distance(half_way_basal)
  685. # create input component for 0.5
  686. g_e0 = cell_type_ge0[cell_type] * math.exp(0.5)
  687. gfluct_component = lems.Component(
  688. id_=f"Gfluct_{cell_type}_0_5",
  689. type_="Gfluct",
  690. start=self.stim_start,
  691. stop=self.sim_end,
  692. dt=self.dt,
  693. E_e="0mV",
  694. E_i="-80mV",
  695. g_e0=f"{g_e0} uS",
  696. g_i0="0pS",
  697. tau_e="65ms",
  698. tau_i="20ms",
  699. std_e=f"{g_e0} uS",
  700. std_i="0pS",
  701. )
  702. self.lems_components.add(gfluct_component)
  703. # get segments to place input at
  704. cell_type_input_locations[cell_type][0.5] = []
  705. for seg, frac_along in segs_basal.items():
  706. if seg in basal_segs:
  707. cell_type_input_locations[cell_type][0.5].append((seg, frac_along))
  708. # additional for pyr apical cells
  709. pyr_cell = self.nml_cell["HL23PYR"]
  710. extremeties = pyr_cell.get_extremeties()
  711. longest_apical_branch_length = 0
  712. pyr_apical_segs = pyr_cell.get_all_segments_in_group("apical_dendrite_group")
  713. for segid, distance in extremeties.items():
  714. if distance > longest_apical_branch_length and segid in pyr_apical_segs:
  715. longest_apical_branch_length = distance
  716. apical_input_distances = [0.1, 0.3, 0.5, 0.7, 0.9]
  717. for d in apical_input_distances:
  718. # create the input component
  719. g_e0 = cell_type_ge0[cell_type] * math.exp(d)
  720. # create input component for use at each distance point
  721. gfluct_component = lems.Component(
  722. id_=f"Gfluct_HL23PYR_{str(d).replace('.', '_')}",
  723. type_="Gfluct",
  724. start=self.stim_start,
  725. stop=self.sim_end,
  726. dt=self.dt,
  727. E_e="0mV",
  728. E_i="-80mV",
  729. g_e0=f"{g_e0} uS",
  730. g_i0="0pS",
  731. tau_e="65ms",
  732. tau_i="20ms",
  733. std_e=f"{g_e0} uS",
  734. std_i="0pS",
  735. )
  736. self.lems_components.add(gfluct_component)
  737. # get segments to place at
  738. segs_apical = pyr_cell.get_segments_at_distance(
  739. d * longest_apical_branch_length
  740. )
  741. for seg, frac_along in segs_apical.items():
  742. if seg in pyr_apical_segs:
  743. try:
  744. cell_type_input_locations["HL23PYR"][d].append(
  745. (seg, frac_along)
  746. )
  747. except KeyError:
  748. cell_type_input_locations["HL23PYR"][d] = []
  749. cell_type_input_locations["HL23PYR"][d].append(
  750. (seg, frac_along)
  751. )
  752. # create a new input list for each population, and each location
  753. # because input list takes a component as an argument, and a different
  754. # component is required for each location
  755. input_list_ctr = 0
  756. input_ctr = 0
  757. for pop in self.network.populations:
  758. # cell name
  759. cell_type = pop.component.split("_")[0]
  760. input_segs = cell_type_input_locations[cell_type]
  761. for rel_dist, seginfos in input_segs.items():
  762. # one input list per population per component
  763. inputlist = self.network.add(
  764. "InputList",
  765. id=f"Gfluct_{input_list_ctr}",
  766. component=f"Gfluct_{cell_type}_{str(rel_dist).replace('.', '_')}",
  767. populations=pop.id,
  768. validate=False,
  769. )
  770. input_list_ctr += 1
  771. for seg, frac_along in seginfos:
  772. if self.rotate_cells is True:
  773. inputlist.add(
  774. "Input",
  775. id=f"{input_ctr}",
  776. target=f"../{pop.id}/0/{pop.component}",
  777. destination="synapses",
  778. segment_id=seg,
  779. fraction_along=frac_along,
  780. )
  781. input_ctr += 1
  782. else:
  783. for inst in pop.instances:
  784. inputlist.add(
  785. "Input",
  786. id=f"{input_ctr}",
  787. target=f"../{pop.id}/{inst.id}/{pop.component}",
  788. destination="synapses",
  789. segment_id=seg,
  790. fraction_along=frac_along,
  791. )
  792. input_ctr += 1
  793. end = time.time()
  794. print(f"Adding background input took: {(end - start)} seconds.")
  795. # TODO: incomplete, to be done after bg input implementation
  796. def add_stimulus(self):
  797. """Add additional stimulus to cells."""
  798. logger.setLevel(logging.DEBUG)
  799. print("Adding stimuli")
  800. # cell_nums = [self.circuit_params['SING_CELL_PARAM'].at['cell_num', name] for name in self.cell_types]
  801. # from circuit.py
  802. input_ctr = 0
  803. for acell_type in self.cell_types:
  804. # iterate over rows
  805. for row in self.circuit_params["STIM_PARAM"].axes[0]:
  806. # initialise parameters
  807. num_cells = 0 # number of cells to provide input to
  808. start_index = 0 # unused, is set to 0
  809. num_stim = 0 # number of spikes
  810. interval = 0 # spike interval
  811. start_time = 0 # spikes start time
  812. delay = 0 # spikes delay
  813. delay_range = 0 # spikes delay range
  814. loc_num = 0 # number of locations on cell to provide spike to
  815. loc = "all" # what locations to provide spikes to
  816. gmax = 0.0 # gmax
  817. stim_type = "" # type of synapse for stimulus
  818. syn_params = "" # parameters of synapse
  819. for col in self.circuit_params["STIM_PARAM"].axes[1]:
  820. # found the cell row
  821. if (
  822. "cell_name" == col
  823. and self.circuit_params["STIM_PARAM"].at[row, col] == acell_type
  824. ):
  825. num_cells = int(
  826. self.circuit_params["STIM_PARAM"].at[row, "num_cells"]
  827. * self.network_scale
  828. )
  829. start_index = self.circuit_params["STIM_PARAM"].at[
  830. row, "start_index"
  831. ]
  832. num_stim = self.circuit_params["STIM_PARAM"].at[row, "num_stim"]
  833. interval = self.circuit_params["STIM_PARAM"].at[row, "interval"]
  834. # currently unused: NeuroML does not have a
  835. # SpikeGenerator that allows setting a start time
  836. start_time = self.circuit_params["STIM_PARAM"].at[
  837. row, "start_time"
  838. ]
  839. delay = self.circuit_params["STIM_PARAM"].at[row, "delay"]
  840. delay_range = self.circuit_params["STIM_PARAM"].at[
  841. row, "delay_range"
  842. ]
  843. loc_num = self.circuit_params["STIM_PARAM"].at[row, "loc_num"]
  844. # loc: unused: "dend", which corresponds to all
  845. # dendritic segments
  846. loc = self.circuit_params["STIM_PARAM"].at[row, "loc"]
  847. gmax = self.circuit_params["STIM_PARAM"].at[row, "gmax"]
  848. stim_type = self.circuit_params["STIM_PARAM"].at[
  849. row, "stim_type"
  850. ]
  851. syn_params = self.circuit_params["STIM_PARAM"].at[
  852. row, "syn_params"
  853. ]
  854. # load the single template cell, since choosing sections does
  855. # not depend on rotation
  856. a_nml_cell = self.nml_cell[acell_type] # type: neuroml.Cell
  857. # loc is always "dend"
  858. logger.debug(f"loc: {loc}")
  859. dendritic_segments_ids = a_nml_cell.get_all_segments_in_group(
  860. "dendrite_group"
  861. )
  862. # dendritic_segments = [nml_cell.get_segment(seg) for seg in dendritic_segments_ids]
  863. segment_areas = numpy.array(
  864. [
  865. a_nml_cell.get_segment_surface_area(seg)
  866. for seg in dendritic_segments_ids
  867. ]
  868. )
  869. segment_areas = segment_areas / segment_areas.sum()
  870. cells = self.cell_list_by_type[acell_type]
  871. ctr = 0
  872. # if it's too small a model, at least give one cell input
  873. if num_cells == 0:
  874. num_cells = 1
  875. while ctr <= num_cells:
  876. cell_id = f"{acell_type}_{cells[ctr]}"
  877. pop_id = f"{acell_type}_pop_{cells[ctr]}"
  878. # get segments
  879. input_segments = numpy.random.choice(
  880. dendritic_segments_ids,
  881. size=loc_num,
  882. replace=False,
  883. p=segment_areas,
  884. )
  885. for aseg in input_segments:
  886. print(
  887. f"Adding sg_{input_ctr}: i_{input_ctr}/{input_ctr} to {pop_id}/{cell_id}/{aseg}"
  888. )
  889. # currently SpikeGenerator does not allow a start time, so
  890. # this is unused
  891. time_delay = 0
  892. while time_delay <= 0:
  893. time_delay = numpy.random.uniform(
  894. low=delay, high=delay + delay_range
  895. )
  896. gen = self.netdoc.add(
  897. neuroml.SpikeGenerator,
  898. id=f"sg_{input_ctr}",
  899. period=f"{interval} ms",
  900. )
  901. input_list = self.network.add(
  902. neuroml.InputList,
  903. id=f"i_{input_ctr}",
  904. component=gen.id,
  905. populations=pop_id,
  906. )
  907. input_list.add(
  908. neuroml.Input,
  909. id=f"{input_ctr}",
  910. target=f"../{pop_id}/0/{cell_id}",
  911. destination="synapses",
  912. segment_id=aseg,
  913. )
  914. input_ctr += 1
  915. ctr += 1
  916. break
  917. logger.setLevel(logging.INFO)
  918. def visualize_network(self, min_cells=5):
  919. """Generate morph plots"""
  920. print(
  921. f"Visualising network at scale {self.network_scale} with {min_cells} in full"
  922. )
  923. # if the network has been constructed, copy over the populations and
  924. # include the cells
  925. if self.netdoc is not None:
  926. nml_file = neuroml.NeuroMLDocument(id="network_copy")
  927. nml_file.add(neuroml.Network, id="dummy_network")
  928. # copy over populations
  929. nml_file.networks[0].populations = self.netdoc.networks[0].populations
  930. nml_file.includes = self.netdoc.includes
  931. # include cells
  932. for inc in self.nml_file.includes:
  933. incfile = read_neuroml2_file(inc.href)
  934. for cell in incfile.cells:
  935. nml_file.add(cell)
  936. else:
  937. # otherwise read the file in once so it's not read in repeatedly
  938. print(f"Reading {self.netdoc_file_name}")
  939. nml_file = read_neuroml2_file(self.netdoc_file_name)
  940. PYR_cells = []
  941. SST_cells = []
  942. PV_cells = []
  943. VIP_cells = []
  944. PYR_point_cells = []
  945. SST_point_cells = []
  946. PV_point_cells = []
  947. VIP_point_cells = []
  948. # because each cell is in a separate population
  949. print("Picking point cells")
  950. for inc in nml_file.includes:
  951. cell = inc.href.split("/")[1].replace(".cell.nml", "")
  952. if "PYR" in cell:
  953. PYR_cells.append(cell)
  954. if "PV" in cell:
  955. PV_cells.append(cell)
  956. if "VIP" in cell:
  957. VIP_cells.append(cell)
  958. if "SST" in cell:
  959. SST_cells.append(cell)
  960. print(f"Got {len(PYR_cells)} PYR cells")
  961. print(f"Got {len(SST_cells)} SST cells")
  962. print(f"Got {len(PV_cells)} PV cells")
  963. print(f"Got {len(VIP_cells)} VIP cells")
  964. # at least plot min_cells cells of each type, and all the rest as points
  965. if (len(PYR_cells) - min_cells) > 0:
  966. PYR_point_cells = random.sample(PYR_cells, len(PYR_cells) - min_cells)
  967. if (len(SST_cells) - min_cells) > 0:
  968. SST_point_cells = random.sample(SST_cells, len(SST_cells) - min_cells)
  969. if (len(PV_cells) - min_cells) > 0:
  970. PV_point_cells = random.sample(PV_cells, len(PV_cells) - min_cells)
  971. if (len(VIP_cells) - min_cells) > 0:
  972. VIP_point_cells = random.sample(VIP_cells, len(VIP_cells) - min_cells)
  973. print(
  974. f"Picked {len(PYR_point_cells) + len(SST_point_cells) + len(PV_point_cells) + len(VIP_point_cells)} point cells"
  975. )
  976. """
  977. print("Removing axons from PYR cells")
  978. for ac in nml_file.cells:
  979. if ac.id not in PYR_point_cells:
  980. axons = ac.get_all_segments_in_group(ac.get_segment_group("axon_group"))
  981. for s in ac.morphology.segments:
  982. if s.id in axons:
  983. ac.morphology.segments.remove(s)
  984. """
  985. """
  986. for plane in ["xy", "yz", "zx"]:
  987. print(f"Plotting {plane} with {point_fraction} fraction as point cells")
  988. plot_2D(
  989. nml_file=nml_file,
  990. plane2d=plane,
  991. min_width=1,
  992. nogui=True,
  993. title="",
  994. plot_type="constant",
  995. save_to_file=f"{self.netdoc_file_name.replace('.nml', '')}.{plane}.png",
  996. plot_spec={
  997. "point_cells": PYR_point_cells + SST_point_cells + VIP_point_cells + PV_point_cells
  998. }
  999. )
  1000. print(f"Plotting {plane} with all cells as point cells")
  1001. plot_2D(
  1002. nml_file=nml_file,
  1003. plane2d=plane,
  1004. min_width=1,
  1005. nogui=True,
  1006. title="",
  1007. plot_type="constant",
  1008. save_to_file=f"{self.netdoc_file_name.replace('.nml', '')}.{plane}.allpoints.png",
  1009. plot_spec={
  1010. "point_fraction": 1.0
  1011. }
  1012. )
  1013. print(f"Plotting {plane} with a single cells of each type in detail")
  1014. plot_2D(
  1015. nml_file=nml_file,
  1016. plane2d=plane,
  1017. min_width=1,
  1018. nogui=True,
  1019. title="",
  1020. plot_type="constant",
  1021. save_to_file=f"{self.netdoc_file_name.replace('.nml', '')}.{plane}.points.png",
  1022. plot_spec={
  1023. "point_cells": PYR_cells[1:] + SST_cells[1:] + VIP_cells[1:] + PV_cells[1:]
  1024. }
  1025. )
  1026. """
  1027. plot_interactive_3D(
  1028. self.netdoc_file_name,
  1029. plot_type="detailed",
  1030. plot_spec={
  1031. "point_cells": PYR_point_cells
  1032. + SST_point_cells
  1033. + VIP_point_cells
  1034. + PV_point_cells
  1035. },
  1036. min_width=1.0,
  1037. )
  1038. def create_simulation(self, dt=None, seed=123, record_data=True):
  1039. """Create simulation, record data.
  1040. :param dt: override dt value (in ms)
  1041. :type dt: str
  1042. :param seed: seed
  1043. :type seed: int
  1044. :param record_data: toggle whether data should be recorded
  1045. :type record_data: bool
  1046. """
  1047. start = time.time()
  1048. if dt is None:
  1049. dt = self.dt
  1050. if seed is None:
  1051. seed = self.seed
  1052. simulation = LEMSSimulation(
  1053. sim_id=self.simulation_id,
  1054. duration=float(self.sim_length.replace("ms", "")),
  1055. dt=float(dt.replace("ms", "")),
  1056. simulation_seed=seed,
  1057. )
  1058. simulation.assign_simulation_target(self.network_id)
  1059. simulation.include_neuroml2_file(f"HL23Net_{self.network_scale}.net.nml")
  1060. simulation.include_lems_file(self.lems_components_file_name)
  1061. if record_data is True:
  1062. simulation.create_output_file(
  1063. "output1", f"HL23Net_{self.network_scale}.v.dat"
  1064. )
  1065. print(f"Saving data to: HL23Net_{self.network_scale}.v.dat")
  1066. for apop in self.network.populations:
  1067. for inst in apop.instances:
  1068. simulation.add_column_to_output_file(
  1069. "output1",
  1070. f"{apop.id}_{inst.id}",
  1071. f"{apop.id}/{inst.id}/{apop.component}/0/v",
  1072. )
  1073. simulation.save_to_file(self.lems_simulation_file)
  1074. print(f"Saved simulation to {self.lems_simulation_file}")
  1075. end = time.time()
  1076. print(f"Creating simulation took: {(end - start)} seconds.")
  1077. def run_sim(
  1078. self,
  1079. engine: str = "jneuroml_neuron",
  1080. cluster: typing.Union[str, bool] = False,
  1081. **kwargs,
  1082. ):
  1083. """Run the sim
  1084. :param engine: engine to use (jneuroml_neuron/jneuroml_netpyne)
  1085. :type engine: str
  1086. :param cluster: toggle submitting to nsg or qsub
  1087. Use "nsg_dry" for a dry NSG run (won't submit)
  1088. Use "qsub" to generate a qsub script instead
  1089. :type cluster: bool or str
  1090. :param **kwargs: other engine + nsg specific args
  1091. """
  1092. if cluster is False:
  1093. print(f"Running simulation: {self.lems_simulation_file}")
  1094. run_lems_with(
  1095. engine,
  1096. self.lems_simulation_file,
  1097. max_memory=self.max_memory,
  1098. nogui=True,
  1099. show_plot_already=False,
  1100. **kwargs,
  1101. )
  1102. elif cluster == "qsub":
  1103. print(f"Generating files but not running: {self.lems_simulation_file}")
  1104. # TODO: create in a different folder like NSG
  1105. tdir = generate_sim_scripts_in_folder(
  1106. engine=engine,
  1107. lems_file_name=self.lems_simulation_file,
  1108. root_dir=".",
  1109. max_memory=self.max_memory,
  1110. nogui=True,
  1111. show_plot_already=False,
  1112. **kwargs,
  1113. )
  1114. # remove trailing backslash
  1115. if tdir[-1] == "/":
  1116. tdir = tdir[:-1]
  1117. qsub_fn = f"{tdir}/{tdir}_generated/{self.lems_simulation_file}.sh"
  1118. netpyne_simfile = self.lems_simulation_file.replace(".xml", "_netpyne.py")
  1119. print(f"Generating qsub script for use on cluster: {qsub_fn}")
  1120. with open(qsub_fn, "w") as f:
  1121. print(
  1122. inspect.cleandoc(
  1123. textwrap.dedent(
  1124. f"""
  1125. #!/bin/bash -l
  1126. #$ -pe mpi 1024
  1127. #$ -l mem=4G
  1128. #$ -l h_rt=6:00:00
  1129. #$ -cwd
  1130. #$ -m be
  1131. source ~/.venv/bin/activate
  1132. nrnivmodl
  1133. gerun python3 {netpyne_simfile} -nogui
  1134. """
  1135. )
  1136. ),
  1137. file=f,
  1138. )
  1139. elif cluster == "nsg_dry":
  1140. print(
  1141. f"Preparing to run on NSG (but not submitting): {self.lems_simulation_file}"
  1142. )
  1143. run_on_nsg(
  1144. engine,
  1145. self.lems_simulation_file,
  1146. dry_run=True,
  1147. max_memory=self.max_memory,
  1148. **kwargs,
  1149. )
  1150. elif cluster == "nsg":
  1151. print(f"Running simulation on NSG: {self.lems_simulation_file}")
  1152. run_on_nsg(
  1153. engine, self.lems_simulation_file, max_memory=self.max_memory, **kwargs
  1154. )
  1155. print("Press Ctrl C to interrupt the waiting process")
  1156. print(
  1157. "You can use `nsg_job -l` to check the status of the job and download the simulation reults"
  1158. )
  1159. def plot_v_graphs(self):
  1160. """Plot membrane potential graphs"""
  1161. data = reload_saved_data(
  1162. self.lems_simulation_file,
  1163. plot=True,
  1164. show_plot_already=True,
  1165. reload_events=False,
  1166. )
  1167. """
  1168. logger.debug(data.keys())
  1169. xvals = [data["t"]]
  1170. yvals = list(data.values())[1:]
  1171. logger.debug(len(xvals * len(yvals)))
  1172. logger.debug(yvals)
  1173. labels = [a.split("/")[0] for a in data.keys()][1:]
  1174. generate_plot(
  1175. xvalues=xvals * len(yvals),
  1176. yvalues=yvals,
  1177. title="Membrane potentials",
  1178. labels=labels,
  1179. xaxis="time (ms)",
  1180. yaxis="v (mV)",
  1181. cols_in_legend_box=2,
  1182. save_figure_to=f"{self.lems_simulation_file.replace('.xml', '')}_v.png",
  1183. )
  1184. """
  1185. def add_step_current(self):
  1186. """Add a constant step current to all cells.
  1187. Useful for testing the cells to ensure they produce correct behaviour,
  1188. in an unconnected network, for example
  1189. """
  1190. print("Adding step current to each cell")
  1191. # a single pulse generator
  1192. pg = self.netdoc.add(
  1193. neuroml.PulseGenerator,
  1194. id="pg_0",
  1195. delay="100ms",
  1196. duration=self.sim_length,
  1197. amplitude="0.2nA",
  1198. )
  1199. for pop in self.network.populations:
  1200. self.network.add(neuroml.ExplicitInput, target=f"{pop.id}[0]", input=pg.id)
  1201. if __name__ == "__main__":
  1202. scale = 0.02
  1203. if len(sys.argv) == 2:
  1204. scale = float(sys.argv[1])
  1205. elif len(sys.argv) > 2:
  1206. print("Only one argument accepted, using first as scale value")
  1207. scale = float(sys.argv[1])
  1208. model = HL23Net(
  1209. scale=scale,
  1210. new_cells=False,
  1211. biophysics=True,
  1212. tonic_inhibition=True,
  1213. connections=True,
  1214. network_input="background",
  1215. stimulus=False,
  1216. hdf5=False,
  1217. rotate_cells=False,
  1218. )
  1219. """
  1220. model.create_network()
  1221. model.create_simulation()
  1222. # model.visualize_network(min_cells=25)
  1223. # For normal run
  1224. model.run_sim(engine="jneuroml_neuron", cluster=False,
  1225. skip_run=False)
  1226. # for NSG
  1227. # with netpyne, use `-nogui` to prevent matplotlib import
  1228. model.run_sim(engine="jneuroml_netpyne", cluster="nsg",
  1229. nsg_sim_config={
  1230. "number_cores_": "64",
  1231. "tasks_per_node_": "64",
  1232. "number_nodes_": "8",
  1233. "number_gbmemorypernode_": "240",
  1234. "runtime_": "5",
  1235. 'toolId': "OSBv2_EXPANSE_0_7_3",
  1236. 'nrnivmodl_o_': "1",
  1237. "cmdlineopts_": "-nogui"
  1238. },
  1239. nogui=True)
  1240. """
  1241. model.run_sim(engine="jneuroml_netpyne", cluster="qsub")
  1242. # model.plot_v_graphs()