Scheduled service maintenance on November 22


On Friday, November 22, 2024, between 06:00 CET and 18:00 CET, GIN services will undergo planned maintenance. Extended service interruptions should be expected. We will try to keep downtimes to a minimum, but recommend that users avoid critical tasks, large data uploads, or DOI requests during this time.

We apologize for any inconvenience.

common_io_test.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572
  1. # -*- coding: utf-8 -*-
  2. '''
  3. Common tests for IOs:
  4. * check presence of all necessary attr
  5. * check types
  6. * write/read consistency
  7. See BaseTestIO.
  8. The public URL is in url_for_tests.
  9. The private url for writing is
  10. ssh://gate.g-node.org/groups/neo/io_test_files/
  11. '''
  12. # needed for python 3 compatibility
  13. from __future__ import absolute_import
  14. __test__ = False
  15. url_for_tests = "https://portal.g-node.org/neo/"
  16. import os
  17. try:
  18. import unittest2 as unittest
  19. except ImportError:
  20. import unittest
  21. from neo.core import Block, Segment
  22. from neo.test.tools import (assert_same_sub_schema,
  23. assert_neo_object_is_compliant,
  24. assert_sub_schema_is_lazy_loaded,
  25. assert_lazy_sub_schema_can_be_loaded,
  26. assert_children_empty)
  27. from neo.test.iotest.tools import (can_use_network, cleanup_test_file,
  28. close_object_safe, create_generic_io_object,
  29. create_generic_reader,
  30. create_generic_writer,
  31. create_local_temp_dir, download_test_file,
  32. iter_generic_io_objects,
  33. iter_generic_readers, iter_read_objects,
  34. make_all_directories, read_generic,
  35. write_generic)
  36. from neo.test.generate_datasets import generate_from_supported_objects
  37. class BaseTestIO(object):
  38. '''
  39. This class make common tests for all IOs.
  40. Several startegies:
  41. * for IO able to read write : test_write_then_read
  42. * for IO able to read write with hash conservation (optional):
  43. test_read_then_write
  44. * for all IOs : test_assert_readed_neo_object_is_compliant
  45. 2 cases:
  46. * files are at G-node and downloaded:
  47. download_test_files_if_not_present
  48. * files are generated by MyIO.write()
  49. '''
  50. #~ __test__ = False
  51. # all IO test need to modify this:
  52. ioclass = None # the IOclass to be tested
  53. files_to_test = [] # list of files to test compliances
  54. files_to_download = [] # when files are at G-Node
  55. # when reading then writing produces files with identical hashes
  56. hash_conserved_when_write_read = False
  57. # when writing then reading creates an identical neo object
  58. read_and_write_is_bijective = True
  59. # allow environment to tell avoid using network
  60. use_network = can_use_network()
  61. local_test_dir = None
  62. def setUp(self):
  63. '''
  64. Set up the test fixture. This is run for every test
  65. '''
  66. self.higher = self.ioclass.supported_objects[0]
  67. self.shortname = self.ioclass.__name__.lower().strip('io')
  68. # these objects can both be written and read
  69. self.io_readandwrite = list(set(self.ioclass.readable_objects) &
  70. set(self.ioclass.writeable_objects))
  71. # these objects can be either written or read
  72. self.io_readorwrite = list(set(self.ioclass.readable_objects) |
  73. set(self.ioclass.writeable_objects))
  74. self.create_local_dir_if_not_exists()
  75. self.download_test_files_if_not_present()
  76. self.files_generated = []
  77. self.generate_files_for_io_able_to_write()
  78. self.files_to_test.extend(self.files_generated)
  79. self.cascade_modes = [True]
  80. if hasattr(self.ioclass, 'load_lazy_cascade'):
  81. self.cascade_modes.append('lazy')
  82. def create_local_dir_if_not_exists(self):
  83. '''
  84. Create a local directory to store testing files and return it.
  85. The directory path is also written to self.local_test_dir
  86. '''
  87. self.local_test_dir = create_local_temp_dir(self.shortname)
  88. return self.local_test_dir
  89. def download_test_files_if_not_present(self):
  90. '''
  91. Download %s file at G-node for testing
  92. url_for_tests is global at beginning of this file.
  93. ''' % self.ioclass.__name__
  94. if not self.use_network:
  95. raise unittest.SkipTest("Requires download of data from the web")
  96. url = url_for_tests+self.shortname
  97. try:
  98. make_all_directories(self.files_to_download, self.local_test_dir)
  99. download_test_file(self.files_to_download,
  100. self.local_test_dir, url)
  101. except IOError as exc:
  102. raise unittest.SkipTest(exc)
  103. download_test_files_if_not_present.__test__ = False
  104. def cleanup_file(self, path):
  105. '''
  106. Remove test files or directories safely.
  107. '''
  108. cleanup_test_file(self.ioclass, path, directory=self.local_test_dir)
  109. def able_to_write_or_read(self, writeread=False, readwrite=False):
  110. '''
  111. Return True if generalized writing or reading is possible.
  112. If writeread=True, return True if writing then reading is
  113. possible and produces identical neo objects.
  114. If readwrite=True, return True if reading then writing is possible
  115. and produces files with identical hashes.
  116. '''
  117. # Find the highest object that is supported by the IO
  118. # Test only if it is a Block or Segment, and if it can both read
  119. # and write this object.
  120. if self.higher not in self.io_readandwrite:
  121. return False
  122. if self.higher not in [Block, Segment]:
  123. return False
  124. # when io need external knowldge for writting or read such as
  125. # sampling_rate (RawBinaryIO...) the test is too much complex to design
  126. # genericaly.
  127. if (self.higher in self.ioclass.read_params and
  128. len(self.ioclass.read_params[self.higher]) != 0):
  129. return False
  130. # handle cases where the test should write then read
  131. if writeread and not self.read_and_write_is_bijective:
  132. return False
  133. # handle cases where the test should read then write
  134. if readwrite and not self.hash_conserved_when_write_read:
  135. return False
  136. return True
  137. def get_filename_path(self, filename):
  138. '''
  139. Get the path to a filename in the current temporary file directory
  140. '''
  141. return os.path.join(self.local_test_dir, filename)
  142. def generic_io_object(self, filename=None, return_path=False, clean=False):
  143. '''
  144. Create an io object in a generic way that can work with both
  145. file-based and directory-based io objects.
  146. If filename is None, create a filename (default).
  147. If return_path is True, return the full path of the file along with
  148. the io object. return ioobj, path. Default is False.
  149. If clean is True, try to delete existing versions of the file
  150. before creating the io object. Default is False.
  151. '''
  152. return create_generic_io_object(ioclass=self.ioclass,
  153. filename=filename,
  154. directory=self.local_test_dir,
  155. return_path=return_path,
  156. clean=clean)
  157. def create_file_reader(self, filename=None, return_path=False,
  158. clean=False, target=None, readall=False):
  159. '''
  160. Create a function that can read from the specified filename.
  161. If filename is None, create a filename (default).
  162. If return_path is True, return the full path of the file along with
  163. the reader function. return reader, path. Default is False.
  164. If clean is True, try to delete existing versions of the file
  165. before creating the io object. Default is False.
  166. If target is None, use the first supported_objects from ioobj
  167. If target is False, use the 'read' method.
  168. If target is the Block or Segment class, use read_block or
  169. read_segment, respectively.
  170. If target is a string, use 'read_'+target.
  171. If readall is True, use the read_all_ method instead of the read_
  172. method. Default is False.
  173. '''
  174. ioobj, path = self.generic_io_object(filename=filename,
  175. return_path=True, clean=clean)
  176. res = create_generic_reader(ioobj, target=target, readall=readall)
  177. if return_path:
  178. return res, path
  179. return res
  180. def create_file_writer(self, filename=None, return_path=False,
  181. clean=False, target=None):
  182. '''
  183. Create a function that can write from the specified filename.
  184. If filename is None, create a filename (default).
  185. If return_path is True, return the full path of the file along with
  186. the writer function. return writer, path. Default is False.
  187. If clean is True, try to delete existing versions of the file
  188. before creating the io object. Default is False.
  189. If target is None, use the first supported_objects from ioobj
  190. If target is False, use the 'write' method.
  191. If target is the Block or Segment class, use write_block or
  192. write_segment, respectively.
  193. If target is a string, use 'write_'+target.
  194. '''
  195. ioobj, path = self.generic_io_object(filename=filename,
  196. return_path=True, clean=clean)
  197. res = create_generic_writer(ioobj, target=target)
  198. if return_path:
  199. return res, path
  200. return res
  201. def read_file(self, filename=None, return_path=False, clean=False,
  202. target=None, readall=False, cascade=True, lazy=False):
  203. '''
  204. Read from the specified filename.
  205. If filename is None, create a filename (default).
  206. If return_path is True, return the full path of the file along with
  207. the object. return obj, path. Default is False.
  208. If clean is True, try to delete existing versions of the file
  209. before creating the io object. Default is False.
  210. If target is None, use the first supported_objects from ioobj
  211. If target is False, use the 'read' method.
  212. If target is the Block or Segment class, use read_block or
  213. read_segment, respectively.
  214. If target is a string, use 'read_'+target.
  215. The cascade and lazy parameters are passed to the reader. Defaults
  216. are True and False, respectively.
  217. If readall is True, use the read_all_ method instead of the read_
  218. method. Default is False.
  219. '''
  220. ioobj, path = self.generic_io_object(filename=filename,
  221. return_path=True, clean=clean)
  222. obj = read_generic(ioobj, target=target, cascade=cascade, lazy=lazy,
  223. readall=readall, return_reader=False)
  224. if return_path:
  225. return obj, path
  226. return obj
  227. def write_file(self, obj=None, filename=None, return_path=False,
  228. clean=False, target=None):
  229. '''
  230. Write the target object to a file using the given neo io object ioobj.
  231. If filename is None, create a filename (default).
  232. If return_path is True, return the full path of the file along with
  233. the object. return obj, path. Default is False.
  234. If clean is True, try to delete existing versions of the file
  235. before creating the io object. Default is False.
  236. If target is None, use the first supported_objects from ioobj
  237. If target is False, use the 'read' method.
  238. If target is the Block or Segment class, use read_block or
  239. read_segment, respectively.
  240. If target is a string, use 'read_'+target.
  241. obj is the object to write. If obj is None, an object is created
  242. automatically for the io class.
  243. '''
  244. ioobj, path = self.generic_io_object(filename=filename,
  245. return_path=True, clean=clean)
  246. obj = write_generic(ioobj, target=target, return_reader=False)
  247. if return_path:
  248. return obj, path
  249. return obj
  250. def iter_io_objects(self, return_path=False, clean=False):
  251. '''
  252. Return an iterable over the io objects created from files_to_test
  253. If return_path is True, yield the full path of the file along with
  254. the io object. yield ioobj, path Default is False.
  255. If clean is True, try to delete existing versions of the file
  256. before creating the io object. Default is False.
  257. '''
  258. return iter_generic_io_objects(ioclass=self.ioclass,
  259. filenames=self.files_to_test,
  260. directory=self.local_test_dir,
  261. return_path=return_path,
  262. clean=clean)
  263. def iter_readers(self, target=None, readall=False,
  264. return_path=False, return_ioobj=False, clean=False):
  265. '''
  266. Return an iterable over readers created from files_to_test.
  267. If return_path is True, return the full path of the file along with
  268. the reader object. return reader, path.
  269. If return_ioobj is True, return the io object as well as the reader.
  270. return reader, ioobj. Default is False.
  271. If both return_path and return_ioobj is True,
  272. return reader, path, ioobj. Default is False.
  273. If clean is True, try to delete existing versions of the file
  274. before creating the io object. Default is False.
  275. If readall is True, use the read_all_ method instead of the
  276. read_ method. Default is False.
  277. '''
  278. return iter_generic_readers(ioclass=self.ioclass,
  279. filenames=self.files_to_test,
  280. directory=self.local_test_dir,
  281. return_path=return_path,
  282. return_ioobj=return_ioobj,
  283. target=target,
  284. clean=clean,
  285. readall=readall)
  286. def iter_objects(self, target=None, return_path=False, return_ioobj=False,
  287. return_reader=False, clean=False, readall=False,
  288. cascade=True, lazy=False):
  289. '''
  290. Iterate over objects read from the list of filenames in files_to_test.
  291. If target is None, use the first supported_objects from ioobj
  292. If target is False, use the 'read' method.
  293. If target is the Block or Segment class, use read_block or
  294. read_segment, respectively.
  295. If target is a string, use 'read_'+target.
  296. If return_path is True, yield the full path of the file along with
  297. the object. yield obj, path.
  298. If return_ioobj is True, yield the io object as well as the object.
  299. yield obj, ioobj. Default is False.
  300. If return_reader is True, yield the io reader function as well as the
  301. object. yield obj, reader. Default is False.
  302. If some combination of return_path, return_ioobj, and return_reader
  303. is True, they are yielded in the order: obj, path, ioobj, reader.
  304. If clean is True, try to delete existing versions of the file
  305. before creating the io object. Default is False.
  306. The cascade and lazy parameters are passed to the reader. Defaults
  307. are True and False, respectively.
  308. If readall is True, use the read_all_ method instead of the read_
  309. method. Default is False.
  310. '''
  311. return iter_read_objects(ioclass=self.ioclass,
  312. filenames=self.files_to_test,
  313. directory=self.local_test_dir,
  314. target=target,
  315. return_path=return_path,
  316. return_ioobj=return_ioobj,
  317. return_reader=return_reader,
  318. clean=clean, readall=readall,
  319. cascade=cascade, lazy=lazy)
  320. def generate_files_for_io_able_to_write(self):
  321. '''
  322. Write files for use in testing.
  323. '''
  324. self.files_generated = []
  325. if not self.able_to_write_or_read():
  326. return
  327. generate_from_supported_objects(self.ioclass.supported_objects)
  328. ioobj, path = self.generic_io_object(return_path=True, clean=True)
  329. if ioobj is None:
  330. return
  331. self.files_generated.append(path)
  332. write_generic(ioobj, target=self.higher)
  333. close_object_safe(ioobj)
  334. def test_write_then_read(self):
  335. '''
  336. Test for IO that are able to write and read - here %s:
  337. 1 - Generate a full schema with supported objects.
  338. 2 - Write to a file
  339. 3 - Read from the file
  340. 4 - Check the hierachy
  341. 5 - Check data
  342. Work only for IO for Block and Segment for the highest object
  343. (main cases).
  344. ''' % self.ioclass.__name__
  345. if not self.able_to_write_or_read(writeread=True):
  346. return
  347. for cascade in self.cascade_modes:
  348. ioobj1 = self.generic_io_object(clean=True)
  349. if ioobj1 is None:
  350. return
  351. ob1 = write_generic(ioobj1, target=self.higher)
  352. close_object_safe(ioobj1)
  353. ioobj2 = self.generic_io_object()
  354. # Read the highest supported object from the file
  355. obj_reader = create_generic_reader(ioobj2, target=False)
  356. ob2 = obj_reader(cascade=cascade)[0]
  357. if self.higher == Segment:
  358. ob2 = ob2.segments[0]
  359. # some formats (e.g. elphy) do not support double floating
  360. # point spiketrains
  361. try:
  362. assert_same_sub_schema(ob1, ob2, True, 1e-8)
  363. assert_neo_object_is_compliant(ob1)
  364. assert_neo_object_is_compliant(ob2)
  365. # intercept exceptions and add more information
  366. except BaseException as exc:
  367. exc.args += ('with cascade=%s ' % cascade,)
  368. raise
  369. close_object_safe(ioobj2)
  370. def test_read_then_write(self):
  371. '''
  372. Test for IO that are able to read and write, here %s:
  373. 1 - Read a file
  374. 2 Write object set in another file
  375. 3 Compare the 2 files hash
  376. NOTE: TODO: Not implemented yet
  377. ''' % self.ioclass.__name__
  378. if not self.able_to_write_or_read(readwrite=True):
  379. return
  380. #assert_file_contents_equal(a, b)
  381. def test_assert_readed_neo_object_is_compliant(self):
  382. '''
  383. Reading %s files in `files_to_test` produces compliant objects.
  384. Compliance test: neo.test.tools.assert_neo_object_is_compliant for
  385. all cascade and lazy modes
  386. ''' % self.ioclass.__name__
  387. # This is for files presents at G-Node or generated
  388. for cascade in self.cascade_modes:
  389. for lazy in [True, False]:
  390. for obj, path in self.iter_objects(cascade=cascade, lazy=lazy,
  391. return_path=True):
  392. try:
  393. # Check compliance of the block
  394. assert_neo_object_is_compliant(obj)
  395. # intercept exceptions and add more information
  396. except BaseException as exc:
  397. exc.args += ('from %s with cascade=%s and lazy=%s' %
  398. (os.path.basename(path), cascade, lazy),)
  399. raise
  400. def test_readed_with_cascade_is_compliant(self):
  401. '''
  402. Reading %s files in `files_to_test` with `cascade` is compliant.
  403. A reader with cascade = False should return empty children.
  404. ''' % self.ioclass.__name__
  405. # This is for files presents at G-Node or generated
  406. for obj, path in self.iter_objects(cascade=False, lazy=False,
  407. return_path=True):
  408. try:
  409. # Check compliance of the block or segment
  410. assert_neo_object_is_compliant(obj)
  411. assert_children_empty(obj, self.ioclass)
  412. # intercept exceptions and add more information
  413. except BaseException as exc:
  414. exc.args += ('from %s ' % os.path.basename(path),)
  415. raise
  416. def test_readed_with_lazy_is_compliant(self):
  417. '''
  418. Reading %s files in `files_to_test` with `lazy` is compliant.
  419. Test the reader with lazy = True. All objects derived from ndarray
  420. or Quantity should have a size of 0. Also, AnalogSignal,
  421. AnalogSignalArray, SpikeTrain, Epoch, and Event should
  422. contain the lazy_shape attribute.
  423. ''' % self.ioclass.__name__
  424. # This is for files presents at G-Node or generated
  425. for cascade in self.cascade_modes:
  426. for obj, path in self.iter_objects(cascade=cascade, lazy=True,
  427. return_path=True):
  428. try:
  429. assert_sub_schema_is_lazy_loaded(obj)
  430. # intercept exceptions and add more information
  431. except BaseException as exc:
  432. exc.args += ('from %s with cascade=%s ' %
  433. (os.path.basename(path), cascade),)
  434. raise
  435. def test_load_lazy_objects(self):
  436. '''
  437. Reading %s files in `files_to_test` with `lazy` works.
  438. Test the reader with lazy = True. All objects derived from ndarray
  439. or Quantity should have a size of 0. Also, AnalogSignal,
  440. AnalogSignalArray, SpikeTrain, Epoch, and Event should
  441. contain the lazy_shape attribute.
  442. ''' % self.ioclass.__name__
  443. if not hasattr(self.ioclass, 'load_lazy_object'):
  444. return
  445. # This is for files presents at G-Node or generated
  446. for cascade in self.cascade_modes:
  447. for obj, path, ioobj in self.iter_objects(cascade=cascade,
  448. lazy=True,
  449. return_ioobj=True,
  450. return_path=True):
  451. try:
  452. assert_lazy_sub_schema_can_be_loaded(obj, ioobj)
  453. # intercept exceptions and add more information
  454. except BaseException as exc:
  455. exc.args += ('from %s with cascade=%s ' %
  456. (os.path.basename(path), cascade),)
  457. raise