Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

test_brainwaredamio.py 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152
  1. # -*- coding: utf-8 -*-
  2. """
  3. Tests of neo.io.brainwaredamio
  4. """
  5. # needed for python 3 compatibility
  6. from __future__ import absolute_import, division, print_function
  7. import os.path
  8. import sys
  9. import unittest
  10. import numpy as np
  11. import quantities as pq
  12. from neo.core import (AnalogSignal, Block,
  13. ChannelIndex, Segment)
  14. from neo.io import BrainwareDamIO
  15. from neo.test.iotest.common_io_test import BaseTestIO
  16. from neo.test.tools import (assert_same_sub_schema,
  17. assert_neo_object_is_compliant)
  18. from neo.test.iotest.tools import create_generic_reader
  19. PY_VER = sys.version_info[0]
  20. def proc_dam(filename):
  21. '''Load an dam file that has already been processed by the official matlab
  22. file converter. That matlab data is saved to an m-file, which is then
  23. converted to a numpy '.npz' file. This numpy file is the file actually
  24. loaded. This function converts it to a neo block and returns the block.
  25. This block can be compared to the block produced by BrainwareDamIO to
  26. make sure BrainwareDamIO is working properly
  27. block = proc_dam(filename)
  28. filename: The file name of the numpy file to load. It should end with
  29. '*_dam_py?.npz'. This will be converted to a neo 'file_origin' property
  30. with the value '*.dam', so the filename to compare should fit that pattern.
  31. 'py?' should be 'py2' for the python 2 version of the numpy file or 'py3'
  32. for the python 3 version of the numpy file.
  33. example: filename = 'file1_dam_py2.npz'
  34. dam file name = 'file1.dam'
  35. '''
  36. with np.load(filename) as damobj:
  37. damfile = damobj.items()[0][1].flatten()
  38. filename = os.path.basename(filename[:-12]+'.dam')
  39. signals = [res.flatten() for res in damfile['signal']]
  40. stimIndexes = [int(res[0, 0].tolist()) for res in damfile['stimIndex']]
  41. timestamps = [res[0, 0] for res in damfile['timestamp']]
  42. block = Block(file_origin=filename)
  43. chx = ChannelIndex(file_origin=filename,
  44. index=np.array([0]),
  45. channel_ids=np.array([1]),
  46. channel_names=np.array(['Chan1'], dtype='S'))
  47. block.channel_indexes.append(chx)
  48. params = [res['params'][0, 0].flatten() for res in damfile['stim']]
  49. values = [res['values'][0, 0].flatten() for res in damfile['stim']]
  50. params = [[res1[0] for res1 in res] for res in params]
  51. values = [[res1 for res1 in res] for res in values]
  52. stims = [dict(zip(param, value)) for param, value in zip(params, values)]
  53. fulldam = zip(stimIndexes, timestamps, signals, stims)
  54. for stimIndex, timestamp, signal, stim in fulldam:
  55. sig = AnalogSignal(signal=signal*pq.mV,
  56. t_start=timestamp*pq.d,
  57. file_origin=filename,
  58. sampling_period=1.*pq.s)
  59. segment = Segment(file_origin=filename,
  60. index=stimIndex,
  61. **stim)
  62. segment.analogsignals = [sig]
  63. block.segments.append(segment)
  64. block.create_many_to_one_relationship()
  65. return block
  66. class BrainwareDamIOTestCase(BaseTestIO, unittest.TestCase):
  67. '''
  68. Unit test testcase for neo.io.BrainwareDamIO
  69. '''
  70. ioclass = BrainwareDamIO
  71. read_and_write_is_bijective = False
  72. # These are the files it tries to read and test for compliance
  73. files_to_test = ['block_300ms_4rep_1clust_part_ch1.dam',
  74. 'interleaved_500ms_5rep_ch2.dam',
  75. 'long_170s_1rep_1clust_ch2.dam',
  76. 'multi_500ms_mulitrep_ch1.dam',
  77. 'random_500ms_12rep_noclust_part_ch2.dam',
  78. 'sequence_500ms_5rep_ch2.dam']
  79. # these are reference files to compare to
  80. files_to_compare = ['block_300ms_4rep_1clust_part_ch1',
  81. 'interleaved_500ms_5rep_ch2',
  82. '',
  83. 'multi_500ms_mulitrep_ch1',
  84. 'random_500ms_12rep_noclust_part_ch2',
  85. 'sequence_500ms_5rep_ch2']
  86. # add the appropriate suffix depending on the python version
  87. for i, fname in enumerate(files_to_compare):
  88. if fname:
  89. files_to_compare[i] += '_dam_py%s.npz' % PY_VER
  90. # Will fetch from g-node if they don't already exist locally
  91. # How does it know to do this before any of the other tests?
  92. files_to_download = files_to_test + files_to_compare
  93. def test_reading_same(self):
  94. for ioobj, path in self.iter_io_objects(return_path=True):
  95. obj_reader_base = create_generic_reader(ioobj, target=False)
  96. obj_reader_single = create_generic_reader(ioobj)
  97. obj_base = obj_reader_base()
  98. obj_single = obj_reader_single()
  99. try:
  100. assert_same_sub_schema(obj_base, obj_single)
  101. except BaseException as exc:
  102. exc.args += ('from ' + os.path.basename(path),)
  103. raise
  104. def test_against_reference(self):
  105. for filename, refname in zip(self.files_to_test,
  106. self.files_to_compare):
  107. if not refname:
  108. continue
  109. obj = self.read_file(filename=filename)
  110. refobj = proc_dam(self.get_filename_path(refname))
  111. try:
  112. assert_neo_object_is_compliant(obj)
  113. assert_neo_object_is_compliant(refobj)
  114. assert_same_sub_schema(obj, refobj)
  115. except BaseException as exc:
  116. exc.args += ('from ' + filename,)
  117. raise
  118. if __name__ == '__main__':
  119. unittest.main()