1# -*- coding: utf-8 -*- 2 3import os 4import sys 5import shutil 6import platform 7import tempfile 8import warnings 9import threading 10import subprocess 11import queue 12 13try: 14 import multiprocessing as mp 15 multiprocessing_imported = True 16except ImportError: 17 multiprocessing_imported = False 18 19import numpy 20 21import tables 22import tables.flavor 23 24from tables import ( 25 Description, IsDescription, Float64Atom, Col, IntCol, Int16Col, Int32Col, 26 FloatCol, Float64Col, 27 ClosedFileError, FileModeError, FlavorError, FlavorWarning, 28 NaturalNameWarning, ClosedNodeError, NodeError, NoSuchNodeError, 29 UnImplemented, 30) 31 32from tables.flavor import all_flavors, array_of_flavor 33from tables.parameters import NODE_CACHE_SLOTS 34from tables.description import descr_from_dtype, dtype_from_descr 35from tables.tests import common 36from tables.tests.common import unittest, test_filename 37from tables.tests.common import PyTablesTestCase as TestCase 38 39 40class OpenFileFailureTestCase(TestCase): 41 def setUp(self): 42 super(OpenFileFailureTestCase, self).setUp() 43 44 import tables.file 45 46 self.N = len(tables.file._open_files) 47 self.open_files = tables.file._open_files 48 49 def test01_open_file(self): 50 """Checking opening of a non existing file.""" 51 52 h5fname = tempfile.mktemp(".h5") 53 with self.assertRaises(IOError): 54 h5file = tables.open_file(h5fname) 55 h5file.close() 56 57 self.assertEqual(self.N, len(self.open_files)) 58 59 def test02_open_file(self): 60 """Checking opening of an existing non HDF5 file.""" 61 62 # create a dummy file 63 h5fname = tempfile.mktemp(".h5") 64 open(h5fname, 'wb').close() 65 66 # Try to open the dummy file 67 try: 68 with self.assertRaises(tables.HDF5ExtError): 69 h5file = tables.open_file(h5fname) 70 h5file.close() 71 72 self.assertEqual(self.N, len(self.open_files)) 73 finally: 74 os.remove(h5fname) 75 76 def test03_open_file(self): 77 """Checking opening of an existing file with invalid mode.""" 78 79 # See gh-318 80 81 # create a dummy file 82 h5fname = tempfile.mktemp(".h5") 83 h5file = tables.open_file(h5fname, "w") 84 h5file.close() 85 86 try: 87 # Try to open the dummy file 88 self.assertRaises(ValueError, tables.open_file, h5fname, "ab") 89 finally: 90 os.remove(h5fname) 91 92 93class OpenFileTestCase(common.TempFileMixin, TestCase): 94 95 def setUp(self): 96 super(OpenFileTestCase, self).setUp() 97 self.populateFile() 98 99 def populateFile(self): 100 root = self.h5file.root 101 102 # Create an array 103 self.h5file.create_array(root, 'array', [1, 2], title="Array example") 104 self.h5file.create_table(root, 'table', {'var1': IntCol()}, 105 "Table example") 106 root._v_attrs.testattr = 41 107 108 # Create another array object 109 self.h5file.create_array(root, 'anarray', [1], "Array title") 110 self.h5file.create_table(root, 'atable', {'var1': IntCol()}, 111 "Table title") 112 113 # Create a group object 114 group = self.h5file.create_group(root, 'agroup', "Group title") 115 group._v_attrs.testattr = 42 116 117 # Create a some objects there 118 array1 = self.h5file.create_array(group, 'anarray1', 119 [1, 2, 3, 4, 5, 6, 7], 120 "Array title 1") 121 array1.attrs.testattr = 42 122 self.h5file.create_array(group, 'anarray2', [2], "Array title 2") 123 self.h5file.create_table(group, 'atable1', { 124 'var1': IntCol()}, "Table title 1") 125 ra = numpy.rec.array([(1, 11, 'a')], formats='u1,f4,a1') 126 self.h5file.create_table(group, 'atable2', ra, "Table title 2") 127 128 # Create a lonely group in first level 129 self.h5file.create_group(root, 'agroup2', "Group title 2") 130 131 # Create a new group in the second level 132 group3 = self.h5file.create_group(group, 'agroup3', "Group title 3") 133 134 # Create a new group in the third level 135 self.h5file.create_group(group3, 'agroup4', "Group title 4") 136 137 # Create an array in the root with the same name as one in 'agroup' 138 self.h5file.create_array(root, 'anarray1', [1, 2], 139 title="Array example") 140 141 def test00_newFile(self): 142 """Checking creation of a new file.""" 143 144 self.h5file.create_array(self.h5file.root, 'array_new', [1, 2], 145 title="Array example") 146 147 # Get the CLASS attribute of the arr object 148 class_ = self.h5file.root.array.attrs.CLASS 149 150 self.assertEqual(class_.capitalize(), "Array") 151 152 def test00_newFile_unicode_filename(self): 153 temp_dir = tempfile.mkdtemp() 154 try: 155 h5fname = str(os.path.join(temp_dir, 'test.h5')) 156 with tables.open_file(h5fname, 'w') as h5file: 157 self.assertTrue(h5file, tables.File) 158 finally: 159 shutil.rmtree(temp_dir) 160 161 def test00_newFile_numpy_str_filename(self): 162 temp_dir = tempfile.mkdtemp() 163 try: 164 h5fname = numpy.str_(os.path.join(temp_dir, 'test.h5')) 165 with tables.open_file(h5fname, 'w') as h5file: 166 self.assertTrue(h5file, tables.File) 167 finally: 168 shutil.rmtree(temp_dir) 169 170 def test00_newFile_numpy_unicode_filename(self): 171 temp_dir = tempfile.mkdtemp() 172 try: 173 h5fname = numpy.unicode_(os.path.join(temp_dir, 'test.h5')) 174 with tables.open_file(h5fname, 'w') as h5file: 175 self.assertTrue(h5file, tables.File) 176 finally: 177 shutil.rmtree(temp_dir) 178 179 def test01_openFile(self): 180 """Checking opening of an existing file.""" 181 182 # Open the old HDF5 file 183 self._reopen(node_cache_slots=self.node_cache_slots) 184 185 # Get the CLASS attribute of the arr object 186 title = self.h5file.root.array.get_attr("TITLE") 187 188 self.assertEqual(title, "Array example") 189 190 def test02_appendFile(self): 191 """Checking appending objects to an existing file.""" 192 193 # Append a new array to the existing file 194 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 195 self.h5file.create_array(self.h5file.root, 'array2', [3, 4], 196 title="Title example 2") 197 198 # Open this file in read-only mode 199 self._reopen(node_cache_slots=self.node_cache_slots) 200 201 # Get the CLASS attribute of the arr object 202 title = self.h5file.root.array2.get_attr("TITLE") 203 204 self.assertEqual(title, "Title example 2") 205 206 def test02b_appendFile2(self): 207 """Checking appending objects to an existing file ("a" version)""" 208 209 # Append a new array to the existing file 210 self._reopen(mode="a", node_cache_slots=self.node_cache_slots) 211 self.h5file.create_array(self.h5file.root, 'array2', [3, 4], 212 title="Title example 2") 213 214 # Open this file in read-only mode 215 self._reopen(node_cache_slots=self.node_cache_slots) 216 217 # Get the CLASS attribute of the arr object 218 title = self.h5file.root.array2.get_attr("TITLE") 219 220 self.assertEqual(title, "Title example 2") 221 222 # Begin to raise errors... 223 224 def test03_appendErrorFile(self): 225 """Checking appending objects to an existing file in "w" mode.""" 226 227 # Append a new array to the existing file but in write mode 228 # so, the existing file should be deleted! 229 self._reopen(mode="w", node_cache_slots=self.node_cache_slots) 230 self.h5file.create_array(self.h5file.root, 'array2', [3, 4], 231 title="Title example 2") 232 233 # Open this file in read-only mode 234 self._reopen(node_cache_slots=self.node_cache_slots) 235 236 with self.assertRaises(LookupError): 237 # Try to get the 'array' object in the old existing file 238 self.h5file.root.array 239 240 def test04a_openErrorFile(self): 241 """Checking opening a non-existing file for reading""" 242 243 with self.assertRaises(IOError): 244 tables.open_file("nonexistent.h5", mode="r", 245 node_cache_slots=self.node_cache_slots) 246 247 def test04b_alternateRootFile(self): 248 """Checking alternate root access to the object tree.""" 249 250 # Open the existent HDF5 file 251 self._reopen(root_uep="/agroup", 252 node_cache_slots=self.node_cache_slots) 253 254 # Get the CLASS attribute of the arr object 255 if common.verbose: 256 print("\nFile tree dump:", self.h5file) 257 title = self.h5file.root.anarray1.get_attr("TITLE") 258 259 # Get the node again, as this can trigger errors in some situations 260 anarray1 = self.h5file.root.anarray1 261 self.assertIsNotNone(anarray1) 262 263 self.assertEqual(title, "Array title 1") 264 265 # This test works well, but HDF5 emits a series of messages that 266 # may loose the user. It is better to deactivate it. 267 def notest04c_alternateRootFile(self): 268 """Checking non-existent alternate root access to the object tree""" 269 270 with self.assertRaises(RuntimeError): 271 self._reopen(root_uep="/nonexistent", 272 node_cache_slots=self.node_cache_slots) 273 274 def test05a_removeGroupRecursively(self): 275 """Checking removing a group recursively.""" 276 277 # Delete a group with leafs 278 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 279 280 with self.assertRaises(NodeError): 281 self.h5file.remove_node(self.h5file.root.agroup) 282 283 # This should work now 284 self.h5file.remove_node(self.h5file.root, 'agroup', recursive=1) 285 286 # Open this file in read-only mode 287 self._reopen(node_cache_slots=self.node_cache_slots) 288 289 # Try to get the removed object 290 with self.assertRaises(LookupError): 291 self.h5file.root.agroup 292 293 # Try to get a child of the removed object 294 with self.assertRaises(LookupError): 295 self.h5file.get_node("/agroup/agroup3") 296 297 def test05b_removeGroupRecursively(self): 298 """Checking removing a group recursively and access to it 299 immediately.""" 300 301 if common.verbose: 302 print('\n', '-=' * 30) 303 print("Running %s.test05b_removeGroupRecursively..." % 304 self.__class__.__name__) 305 306 # Delete a group with leafs 307 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 308 309 with self.assertRaises(NodeError): 310 self.h5file.remove_node(self.h5file.root, 'agroup') 311 312 # This should work now 313 self.h5file.remove_node(self.h5file.root, 'agroup', recursive=1) 314 315 # Try to get the removed object 316 with self.assertRaises(LookupError): 317 self.h5file.root.agroup 318 319 # Try to get a child of the removed object 320 with self.assertRaises(LookupError): 321 self.h5file.get_node("/agroup/agroup3") 322 323 def test06_removeNodeWithDel(self): 324 """Checking removing a node using ``__delattr__()``""" 325 326 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 327 328 with self.assertRaises(AttributeError): 329 # This should fail because there is no *Python attribute* 330 # called ``agroup``. 331 del self.h5file.root.agroup 332 333 def test06a_removeGroup(self): 334 """Checking removing a lonely group from an existing file.""" 335 336 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 337 338 self.h5file.remove_node(self.h5file.root, 'agroup2') 339 340 # Open this file in read-only mode 341 self._reopen(node_cache_slots=self.node_cache_slots) 342 343 # Try to get the removed object 344 with self.assertRaises(LookupError): 345 self.h5file.root.agroup2 346 347 def test06b_removeLeaf(self): 348 """Checking removing Leaves from an existing file.""" 349 350 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 351 self.h5file.remove_node(self.h5file.root, 'anarray') 352 353 # Open this file in read-only mode 354 self._reopen(node_cache_slots=self.node_cache_slots) 355 356 # Try to get the removed object 357 with self.assertRaises(LookupError): 358 self.h5file.root.anarray 359 360 def test06c_removeLeaf(self): 361 """Checking removing Leaves and access it immediately.""" 362 363 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 364 self.h5file.remove_node(self.h5file.root, 'anarray') 365 366 # Try to get the removed object 367 with self.assertRaises(LookupError): 368 self.h5file.root.anarray 369 370 def test06d_removeLeaf(self): 371 """Checking removing a non-existent node""" 372 373 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 374 375 # Try to get the removed object 376 with self.assertRaises(LookupError): 377 self.h5file.remove_node(self.h5file.root, 'nonexistent') 378 379 def test06e_removeTable(self): 380 """Checking removing Tables from an existing file.""" 381 382 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 383 self.h5file.remove_node(self.h5file.root, 'atable') 384 385 # Open this file in read-only mode 386 self._reopen(node_cache_slots=self.node_cache_slots) 387 388 # Try to get the removed object 389 with self.assertRaises(LookupError): 390 self.h5file.root.atable 391 392 def test07_renameLeaf(self): 393 """Checking renaming a leave and access it after a close/open.""" 394 395 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 396 self.h5file.rename_node(self.h5file.root.anarray, 'anarray2') 397 398 # Open this file in read-only mode 399 self._reopen(node_cache_slots=self.node_cache_slots) 400 401 # Ensure that the new name exists 402 array_ = self.h5file.root.anarray2 403 self.assertEqual(array_.name, "anarray2") 404 self.assertEqual(array_._v_pathname, "/anarray2") 405 self.assertEqual(array_._v_depth, 1) 406 407 # Try to get the previous object with the old name 408 with self.assertRaises(LookupError): 409 self.h5file.root.anarray 410 411 def test07b_renameLeaf(self): 412 """Checking renaming Leaves and accesing them immediately.""" 413 414 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 415 self.h5file.rename_node(self.h5file.root.anarray, 'anarray2') 416 417 # Ensure that the new name exists 418 array_ = self.h5file.root.anarray2 419 self.assertEqual(array_.name, "anarray2") 420 self.assertEqual(array_._v_pathname, "/anarray2") 421 self.assertEqual(array_._v_depth, 1) 422 423 # Try to get the previous object with the old name 424 with self.assertRaises(LookupError): 425 self.h5file.root.anarray 426 427 def test07c_renameLeaf(self): 428 """Checking renaming Leaves and modify attributes after that.""" 429 430 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 431 self.h5file.rename_node(self.h5file.root.anarray, 'anarray2') 432 array_ = self.h5file.root.anarray2 433 array_.attrs.TITLE = "hello" 434 435 # Ensure that the new attribute has been written correctly 436 self.assertEqual(array_.title, "hello") 437 self.assertEqual(array_.attrs.TITLE, "hello") 438 439 def test07d_renameLeaf(self): 440 """Checking renaming a Group under a nested group.""" 441 442 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 443 self.h5file.rename_node(self.h5file.root.agroup.anarray2, 'anarray3') 444 445 # Ensure that we can access n attributes in the new group 446 node = self.h5file.root.agroup.anarray3 447 self.assertEqual(node._v_title, "Array title 2") 448 449 def test08_renameToExistingLeaf(self): 450 """Checking renaming a node to an existing name.""" 451 452 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 453 454 # Try to get the previous object with the old name 455 with self.assertRaises(NodeError): 456 self.h5file.rename_node(self.h5file.root.anarray, 'array') 457 458 # Now overwrite the destination node. 459 anarray = self.h5file.root.anarray 460 self.h5file.rename_node(anarray, 'array', overwrite=True) 461 self.assertNotIn('/anarray', self.h5file) 462 self.assertIs(self.h5file.root.array, anarray) 463 464 def test08b_renameToNotValidNaturalName(self): 465 """Checking renaming a node to a non-valid natural name""" 466 467 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 468 469 with warnings.catch_warnings(): 470 warnings.filterwarnings("error", category=NaturalNameWarning) 471 472 # Try to get the previous object with the old name 473 with self.assertRaises(NaturalNameWarning): 474 self.h5file.rename_node(self.h5file.root.anarray, 'array 2') 475 476 def test09_renameGroup(self): 477 """Checking renaming a Group and access it after a close/open.""" 478 479 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 480 self.h5file.rename_node(self.h5file.root.agroup, 'agroup3') 481 482 # Open this file in read-only mode 483 self._reopen(node_cache_slots=self.node_cache_slots) 484 485 # Ensure that the new name exists 486 group = self.h5file.root.agroup3 487 self.assertEqual(group._v_name, "agroup3") 488 self.assertEqual(group._v_pathname, "/agroup3") 489 490 # The children of this group also must be accessible through the 491 # new name path 492 group2 = self.h5file.get_node("/agroup3/agroup3") 493 self.assertEqual(group2._v_name, "agroup3") 494 self.assertEqual(group2._v_pathname, "/agroup3/agroup3") 495 496 # Try to get the previous object with the old name 497 with self.assertRaises(LookupError): 498 self.h5file.root.agroup 499 500 # Try to get a child with the old pathname 501 with self.assertRaises(LookupError): 502 self.h5file.get_node("/agroup/agroup3") 503 504 def test09b_renameGroup(self): 505 """Checking renaming a Group and access it immediately.""" 506 507 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 508 self.h5file.rename_node(self.h5file.root.agroup, 'agroup3') 509 510 # Ensure that the new name exists 511 group = self.h5file.root.agroup3 512 self.assertEqual(group._v_name, "agroup3") 513 self.assertEqual(group._v_pathname, "/agroup3") 514 515 # The children of this group also must be accessible through the 516 # new name path 517 group2 = self.h5file.get_node("/agroup3/agroup3") 518 self.assertEqual(group2._v_name, "agroup3") 519 self.assertEqual(group2._v_pathname, "/agroup3/agroup3") 520 521 # Try to get the previous object with the old name 522 with self.assertRaises(LookupError): 523 self.h5file.root.agroup 524 525 # Try to get a child with the old pathname 526 with self.assertRaises(LookupError): 527 self.h5file.get_node("/agroup/agroup3") 528 529 def test09c_renameGroup(self): 530 """Checking renaming a Group and modify attributes afterwards.""" 531 532 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 533 self.h5file.rename_node(self.h5file.root.agroup, 'agroup3') 534 535 # Ensure that we can modify attributes in the new group 536 group = self.h5file.root.agroup3 537 group._v_attrs.TITLE = "Hello" 538 self.assertEqual(group._v_title, "Hello") 539 self.assertEqual(group._v_attrs.TITLE, "Hello") 540 541 def test09d_renameGroup(self): 542 """Checking renaming a Group under a nested group.""" 543 544 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 545 self.h5file.rename_node(self.h5file.root.agroup.agroup3, 'agroup4') 546 547 # Ensure that we can access n attributes in the new group 548 group = self.h5file.root.agroup.agroup4 549 self.assertEqual(group._v_title, "Group title 3") 550 551 def test09e_renameGroup(self): 552 """Checking renaming a Group with nested groups in the LRU cache.""" 553 # This checks for ticket #126. 554 555 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 556 557 # Load intermediate groups and keep a nested one alive. 558 g = self.h5file.root.agroup.agroup3.agroup4 559 self.assertIsNotNone(g) 560 self.h5file.rename_node('/', name='agroup', newname='agroup_') 561 562 # see ticket #126 563 self.assertNotIn('/agroup_/agroup4', self.h5file) 564 565 self.assertNotIn('/agroup', self.h5file) 566 for newpath in ['/agroup_', '/agroup_/agroup3', 567 '/agroup_/agroup3/agroup4']: 568 self.assertIn(newpath, self.h5file) 569 self.assertEqual( 570 newpath, self.h5file.get_node(newpath)._v_pathname) 571 572 def test10_moveLeaf(self): 573 """Checking moving a leave and access it after a close/open.""" 574 575 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 576 newgroup = self.h5file.create_group("/", "newgroup") 577 self.h5file.move_node(self.h5file.root.anarray, newgroup, 'anarray2') 578 579 # Open this file in read-only mode 580 self._reopen(node_cache_slots=self.node_cache_slots) 581 582 # Ensure that the new name exists 583 array_ = self.h5file.root.newgroup.anarray2 584 self.assertEqual(array_.name, "anarray2") 585 self.assertEqual(array_._v_pathname, "/newgroup/anarray2") 586 self.assertEqual(array_._v_depth, 2) 587 588 # Try to get the previous object with the old name 589 with self.assertRaises(LookupError): 590 self.h5file.root.anarray 591 592 def test10b_moveLeaf(self): 593 """Checking moving a leave and access it without a close/open.""" 594 595 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 596 newgroup = self.h5file.create_group("/", "newgroup") 597 self.h5file.move_node(self.h5file.root.anarray, newgroup, 'anarray2') 598 599 # Ensure that the new name exists 600 array_ = self.h5file.root.newgroup.anarray2 601 self.assertEqual(array_.name, "anarray2") 602 self.assertEqual(array_._v_pathname, "/newgroup/anarray2") 603 self.assertEqual(array_._v_depth, 2) 604 605 # Try to get the previous object with the old name 606 with self.assertRaises(LookupError): 607 self.h5file.root.anarray 608 609 def test10c_moveLeaf(self): 610 """Checking moving Leaves and modify attributes after that.""" 611 612 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 613 newgroup = self.h5file.create_group("/", "newgroup") 614 self.h5file.move_node(self.h5file.root.anarray, newgroup, 'anarray2') 615 array_ = self.h5file.root.newgroup.anarray2 616 array_.attrs.TITLE = "hello" 617 618 # Ensure that the new attribute has been written correctly 619 self.assertEqual(array_.title, "hello") 620 self.assertEqual(array_.attrs.TITLE, "hello") 621 622 def test10d_moveToExistingLeaf(self): 623 """Checking moving a leaf to an existing name.""" 624 625 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 626 627 # Try to get the previous object with the old name 628 with self.assertRaises(NodeError): 629 self.h5file.move_node( 630 self.h5file.root.anarray, self.h5file.root, 'array') 631 632 def test10_2_moveTable(self): 633 """Checking moving a table and access it after a close/open.""" 634 635 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 636 newgroup = self.h5file.create_group("/", "newgroup") 637 self.h5file.move_node(self.h5file.root.atable, newgroup, 'atable2') 638 639 # Open this file in read-only mode 640 self._reopen(node_cache_slots=self.node_cache_slots) 641 642 # Ensure that the new name exists 643 table_ = self.h5file.root.newgroup.atable2 644 self.assertEqual(table_.name, "atable2") 645 self.assertEqual(table_._v_pathname, "/newgroup/atable2") 646 self.assertEqual(table_._v_depth, 2) 647 648 # Try to get the previous object with the old name 649 with self.assertRaises(LookupError): 650 self.h5file.root.atable 651 652 def test10_2b_moveTable(self): 653 """Checking moving a table and access it without a close/open.""" 654 655 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 656 newgroup = self.h5file.create_group("/", "newgroup") 657 self.h5file.move_node(self.h5file.root.atable, newgroup, 'atable2') 658 659 # Ensure that the new name exists 660 table_ = self.h5file.root.newgroup.atable2 661 self.assertEqual(table_.name, "atable2") 662 self.assertEqual(table_._v_pathname, "/newgroup/atable2") 663 self.assertEqual(table_._v_depth, 2) 664 665 # Try to get the previous object with the old name 666 with self.assertRaises(LookupError): 667 self.h5file.root.atable 668 669 def test10_2b_bis_moveTable(self): 670 """Checking moving a table and use cached row without a close/open.""" 671 672 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 673 newgroup = self.h5file.create_group("/", "newgroup") 674 675 # Cache the Row attribute prior to the move 676 row = self.h5file.root.atable.row 677 self.h5file.move_node(self.h5file.root.atable, newgroup, 'atable2') 678 679 # Ensure that the new name exists 680 table_ = self.h5file.root.newgroup.atable2 681 self.assertEqual(table_.name, "atable2") 682 self.assertEqual(table_._v_pathname, "/newgroup/atable2") 683 self.assertEqual(table_._v_depth, 2) 684 685 # Ensure that cache Row attribute has been updated 686 row = table_.row 687 self.assertEqual(table_._v_pathname, row.table._v_pathname) 688 nrows = table_.nrows 689 690 # Add a new row just to make sure that this works 691 row.append() 692 table_.flush() 693 self.assertEqual(table_.nrows, nrows + 1) 694 695 def test10_2c_moveTable(self): 696 """Checking moving tables and modify attributes after that.""" 697 698 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 699 newgroup = self.h5file.create_group("/", "newgroup") 700 self.h5file.move_node(self.h5file.root.atable, newgroup, 'atable2') 701 table_ = self.h5file.root.newgroup.atable2 702 table_.attrs.TITLE = "hello" 703 704 # Ensure that the new attribute has been written correctly 705 self.assertEqual(table_.title, "hello") 706 self.assertEqual(table_.attrs.TITLE, "hello") 707 708 def test10_2d_moveToExistingTable(self): 709 """Checking moving a table to an existing name.""" 710 711 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 712 713 # Try to get the previous object with the old name 714 with self.assertRaises(NodeError): 715 self.h5file.move_node(self.h5file.root.atable, self.h5file.root, 716 'table') 717 718 def test10_2e_moveToExistingTableOverwrite(self): 719 """Checking moving a table to an existing name, overwriting it.""" 720 721 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 722 723 srcNode = self.h5file.root.atable 724 self.h5file.move_node(srcNode, self.h5file.root, 'table', 725 overwrite=True) 726 dstNode = self.h5file.root.table 727 728 self.assertIs(srcNode, dstNode) 729 730 def test11_moveGroup(self): 731 """Checking moving a Group and access it after a close/open.""" 732 733 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 734 newgroup = self.h5file.create_group(self.h5file.root, 'newgroup') 735 self.h5file.move_node(self.h5file.root.agroup, newgroup, 'agroup3') 736 737 # Open this file in read-only mode 738 self._reopen(node_cache_slots=self.node_cache_slots) 739 740 # Ensure that the new name exists 741 group = self.h5file.root.newgroup.agroup3 742 self.assertEqual(group._v_name, "agroup3") 743 self.assertEqual(group._v_pathname, "/newgroup/agroup3") 744 self.assertEqual(group._v_depth, 2) 745 746 # The children of this group must also be accessible through the 747 # new name path 748 group2 = self.h5file.get_node("/newgroup/agroup3/agroup3") 749 self.assertEqual(group2._v_name, "agroup3") 750 self.assertEqual(group2._v_pathname, "/newgroup/agroup3/agroup3") 751 self.assertEqual(group2._v_depth, 3) 752 753 # Try to get the previous object with the old name 754 with self.assertRaises(LookupError): 755 self.h5file.root.agroup 756 757 # Try to get a child with the old pathname 758 with self.assertRaises(LookupError): 759 self.h5file.get_node("/agroup/agroup3") 760 761 def test11b_moveGroup(self): 762 """Checking moving a Group and access it immediately.""" 763 764 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 765 newgroup = self.h5file.create_group(self.h5file.root, 'newgroup') 766 self.h5file.move_node(self.h5file.root.agroup, newgroup, 'agroup3') 767 768 # Ensure that the new name exists 769 group = self.h5file.root.newgroup.agroup3 770 self.assertEqual(group._v_name, "agroup3") 771 self.assertEqual(group._v_pathname, "/newgroup/agroup3") 772 self.assertEqual(group._v_depth, 2) 773 774 # The children of this group must also be accessible through the 775 # new name path 776 group2 = self.h5file.get_node("/newgroup/agroup3/agroup3") 777 self.assertEqual(group2._v_name, "agroup3") 778 self.assertEqual(group2._v_pathname, "/newgroup/agroup3/agroup3") 779 self.assertEqual(group2._v_depth, 3) 780 781 # Try to get the previous object with the old name 782 with self.assertRaises(LookupError): 783 self.h5file.root.agroup 784 785 # Try to get a child with the old pathname 786 with self.assertRaises(LookupError): 787 self.h5file.get_node("/agroup/agroup3") 788 789 def test11c_moveGroup(self): 790 """Checking moving a Group and modify attributes afterwards.""" 791 792 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 793 newgroup = self.h5file.create_group(self.h5file.root, 'newgroup') 794 self.h5file.move_node(self.h5file.root.agroup, newgroup, 'agroup3') 795 796 # Ensure that we can modify attributes in the new group 797 group = self.h5file.root.newgroup.agroup3 798 group._v_attrs.TITLE = "Hello" 799 group._v_attrs.hola = "Hello" 800 self.assertEqual(group._v_title, "Hello") 801 self.assertEqual(group._v_attrs.TITLE, "Hello") 802 self.assertEqual(group._v_attrs.hola, "Hello") 803 804 def test11d_moveToExistingGroup(self): 805 """Checking moving a group to an existing name.""" 806 807 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 808 809 # Try to get the previous object with the old name 810 with self.assertRaises(NodeError): 811 self.h5file.move_node(self.h5file.root.agroup, self.h5file.root, 812 'agroup2') 813 814 def test11e_moveToExistingGroupOverwrite(self): 815 """Checking moving a group to an existing name, overwriting it.""" 816 817 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 818 819 # agroup2 -> agroup 820 srcNode = self.h5file.root.agroup2 821 self.h5file.move_node(srcNode, self.h5file.root, 'agroup', 822 overwrite=True) 823 dstNode = self.h5file.root.agroup 824 825 self.assertIs(srcNode, dstNode) 826 827 def test12a_moveNodeOverItself(self): 828 """Checking moving a node over itself.""" 829 830 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 831 832 # array -> array 833 srcNode = self.h5file.root.array 834 self.h5file.move_node(srcNode, self.h5file.root, 'array') 835 dstNode = self.h5file.root.array 836 837 self.assertIs(srcNode, dstNode) 838 839 def test12b_moveGroupIntoItself(self): 840 """Checking moving a group into itself.""" 841 842 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 843 with self.assertRaises(NodeError): 844 # agroup2 -> agroup2/ 845 self.h5file.move_node(self.h5file.root.agroup2, 846 self.h5file.root.agroup2) 847 848 def test13a_copyLeaf(self): 849 """Copying a leaf.""" 850 851 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 852 853 # array => agroup2/ 854 new_node = self.h5file.copy_node(self.h5file.root.array, 855 self.h5file.root.agroup2) 856 dstNode = self.h5file.root.agroup2.array 857 858 self.assertIs(new_node, dstNode) 859 860 def test13b_copyGroup(self): 861 """Copying a group.""" 862 863 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 864 865 # agroup2 => agroup/ 866 new_node = self.h5file.copy_node(self.h5file.root.agroup2, 867 self.h5file.root.agroup) 868 dstNode = self.h5file.root.agroup.agroup2 869 870 self.assertIs(new_node, dstNode) 871 872 def test13c_copyGroupSelf(self): 873 """Copying a group into itself.""" 874 875 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 876 877 # agroup2 => agroup2/ 878 new_node = self.h5file.copy_node(self.h5file.root.agroup2, 879 self.h5file.root.agroup2) 880 dstNode = self.h5file.root.agroup2.agroup2 881 882 self.assertIs(new_node, dstNode) 883 884 def test13d_copyGroupRecursive(self): 885 """Recursively copying a group.""" 886 887 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 888 889 # agroup => agroup2/ 890 new_node = self.h5file.copy_node( 891 self.h5file.root.agroup, self.h5file.root.agroup2, recursive=True) 892 dstNode = self.h5file.root.agroup2.agroup 893 894 self.assertIs(new_node, dstNode) 895 dstChild1 = dstNode.anarray1 896 self.assertIsNotNone(dstChild1) 897 dstChild2 = dstNode.anarray2 898 self.assertIsNotNone(dstChild2) 899 dstChild3 = dstNode.agroup3 900 self.assertIsNotNone(dstChild3) 901 902 def test13e_copyRootRecursive(self): 903 """Recursively copying the root group into the root of another file.""" 904 905 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 906 h5fname2 = tempfile.mktemp(".h5") 907 h5file2 = tables.open_file( 908 h5fname2, mode="w", node_cache_slots=self.node_cache_slots) 909 try: 910 # h5file.root => h5file2.root 911 new_node = self.h5file.copy_node( 912 self.h5file.root, h5file2.root, recursive=True) 913 dstNode = h5file2.root 914 915 self.assertIs(new_node, dstNode) 916 self.assertIn("/agroup", h5file2) 917 self.assertIn("/agroup/anarray1", h5file2) 918 self.assertIn("/agroup/agroup3", h5file2) 919 920 finally: 921 h5file2.close() 922 os.remove(h5fname2) 923 924 def test13f_copyRootRecursive(self): 925 """Recursively copying the root group into a group in another file.""" 926 927 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 928 h5fname2 = tempfile.mktemp(".h5") 929 h5file2 = tables.open_file( 930 h5fname2, mode="w", node_cache_slots=self.node_cache_slots) 931 try: 932 h5file2.create_group('/', 'agroup2') 933 934 # fileh.root => h5file2.root.agroup2 935 new_node = self.h5file.copy_node( 936 self.h5file.root, h5file2.root.agroup2, recursive=True) 937 dstNode = h5file2.root.agroup2 938 939 self.assertIs(new_node, dstNode) 940 self.assertIn("/agroup2/agroup", h5file2) 941 self.assertIn("/agroup2/agroup/anarray1", h5file2) 942 self.assertIn("/agroup2/agroup/agroup3", h5file2) 943 944 finally: 945 h5file2.close() 946 os.remove(h5fname2) 947 948 def test13g_copyRootItself(self): 949 """Recursively copying the root group into itself.""" 950 951 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 952 agroup2 = self.h5file.root 953 self.assertIsNotNone(agroup2) 954 955 # h5file.root => h5file.root 956 self.assertRaises(IOError, self.h5file.copy_node, 957 self.h5file.root, self.h5file.root, recursive=True) 958 959 def test14a_copyNodeExisting(self): 960 """Copying over an existing node.""" 961 962 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 963 964 with self.assertRaises(NodeError): 965 # agroup2 => agroup 966 self.h5file.copy_node(self.h5file.root.agroup2, newname='agroup') 967 968 def test14b_copyNodeExistingOverwrite(self): 969 """Copying over an existing node, overwriting it.""" 970 971 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 972 973 # agroup2 => agroup 974 new_node = self.h5file.copy_node(self.h5file.root.agroup2, 975 newname='agroup', overwrite=True) 976 dstNode = self.h5file.root.agroup 977 978 self.assertIs(new_node, dstNode) 979 980 def test14b2_copyNodeExistingOverwrite(self): 981 """Copying over an existing node in other file, overwriting it.""" 982 983 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 984 985 h5fname2 = tempfile.mktemp(".h5") 986 h5file2 = tables.open_file( 987 h5fname2, mode="w", node_cache_slots=self.node_cache_slots) 988 989 try: 990 # file1:/anarray1 => h5fname2:/anarray1 991 new_node = self.h5file.copy_node(self.h5file.root.agroup.anarray1, 992 newparent=h5file2.root) 993 # file1:/ => h5fname2:/ 994 new_node = self.h5file.copy_node(self.h5file.root, h5file2.root, 995 overwrite=True, recursive=True) 996 dstNode = h5file2.root 997 998 self.assertIs(new_node, dstNode) 999 finally: 1000 h5file2.close() 1001 os.remove(h5fname2) 1002 1003 def test14c_copyNodeExistingSelf(self): 1004 """Copying over self.""" 1005 1006 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1007 1008 with self.assertRaises(NodeError): 1009 # agroup => agroup 1010 self.h5file.copy_node(self.h5file.root.agroup, newname='agroup') 1011 1012 def test14d_copyNodeExistingOverwriteSelf(self): 1013 """Copying over self, trying to overwrite.""" 1014 1015 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1016 1017 with self.assertRaises(NodeError): 1018 # agroup => agroup 1019 self.h5file.copy_node( 1020 self.h5file.root.agroup, newname='agroup', overwrite=True) 1021 1022 def test14e_copyGroupSelfRecursive(self): 1023 """Recursively copying a group into itself.""" 1024 1025 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1026 1027 with self.assertRaises(NodeError): 1028 # agroup => agroup/ 1029 self.h5file.copy_node(self.h5file.root.agroup, 1030 self.h5file.root.agroup, recursive=True) 1031 1032 def test15a_oneStepMove(self): 1033 """Moving and renaming a node in a single action.""" 1034 1035 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1036 1037 # anarray1 -> agroup/array 1038 srcNode = self.h5file.root.anarray1 1039 self.h5file.move_node(srcNode, self.h5file.root.agroup, 'array') 1040 dstNode = self.h5file.root.agroup.array 1041 1042 self.assertIs(srcNode, dstNode) 1043 1044 def test15b_oneStepCopy(self): 1045 """Copying and renaming a node in a single action.""" 1046 1047 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1048 1049 # anarray1 => agroup/array 1050 new_node = self.h5file.copy_node( 1051 self.h5file.root.anarray1, self.h5file.root.agroup, 'array') 1052 dstNode = self.h5file.root.agroup.array 1053 1054 self.assertIs(new_node, dstNode) 1055 1056 def test16a_fullCopy(self): 1057 """Copying full data and user attributes.""" 1058 1059 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1060 1061 # agroup => groupcopy 1062 srcNode = self.h5file.root.agroup 1063 new_node = self.h5file.copy_node( 1064 srcNode, newname='groupcopy', recursive=True) 1065 dstNode = self.h5file.root.groupcopy 1066 1067 self.assertIs(new_node, dstNode) 1068 self.assertEqual(srcNode._v_attrs.testattr, dstNode._v_attrs.testattr) 1069 self.assertEqual( 1070 srcNode.anarray1.attrs.testattr, dstNode.anarray1.attrs.testattr) 1071 self.assertEqual(srcNode.anarray1.read(), dstNode.anarray1.read()) 1072 1073 def test16b_partialCopy(self): 1074 """Copying partial data and no user attributes.""" 1075 1076 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1077 1078 # agroup => groupcopy 1079 srcNode = self.h5file.root.agroup 1080 new_node = self.h5file.copy_node( 1081 srcNode, newname='groupcopy', 1082 recursive=True, copyuserattrs=False, 1083 start=0, stop=5, step=2) 1084 dstNode = self.h5file.root.groupcopy 1085 1086 self.assertIs(new_node, dstNode) 1087 self.assertFalse(hasattr(dstNode._v_attrs, 'testattr')) 1088 self.assertFalse(hasattr(dstNode.anarray1.attrs, 'testattr')) 1089 self.assertEqual(srcNode.anarray1.read()[ 1090 0:5:2], dstNode.anarray1.read()) 1091 1092 def test16c_fullCopy(self): 1093 """Copying full data and user attributes (from file to file).""" 1094 1095 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1096 1097 h5fname2 = tempfile.mktemp(".h5") 1098 h5file2 = tables.open_file( 1099 h5fname2, mode="w", node_cache_slots=self.node_cache_slots) 1100 1101 try: 1102 # file1:/ => h5fname2:groupcopy 1103 srcNode = self.h5file.root 1104 new_node = self.h5file.copy_node( 1105 srcNode, h5file2.root, newname='groupcopy', recursive=True) 1106 dstNode = h5file2.root.groupcopy 1107 1108 self.assertIs(new_node, dstNode) 1109 self.assertEqual(srcNode._v_attrs.testattr, 1110 dstNode._v_attrs.testattr) 1111 self.assertEqual( 1112 srcNode.agroup.anarray1.attrs.testattr, 1113 dstNode.agroup.anarray1.attrs.testattr) 1114 self.assertEqual(srcNode.agroup.anarray1.read(), 1115 dstNode.agroup.anarray1.read()) 1116 finally: 1117 h5file2.close() 1118 os.remove(h5fname2) 1119 1120 def test17a_CopyChunkshape(self): 1121 """Copying dataset with a chunkshape.""" 1122 1123 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1124 srcTable = self.h5file.root.table 1125 newTable = self.h5file.copy_node( 1126 srcTable, newname='tablecopy', chunkshape=11) 1127 1128 self.assertEqual(newTable.chunkshape, (11,)) 1129 self.assertNotEqual(srcTable.chunkshape, newTable.chunkshape) 1130 1131 def test17b_CopyChunkshape(self): 1132 """Copying dataset with a chunkshape with 'keep' value.""" 1133 1134 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1135 srcTable = self.h5file.root.table 1136 newTable = self.h5file.copy_node( 1137 srcTable, newname='tablecopy', chunkshape='keep') 1138 1139 self.assertEqual(srcTable.chunkshape, newTable.chunkshape) 1140 1141 def test17c_CopyChunkshape(self): 1142 """Copying dataset with a chunkshape with 'auto' value.""" 1143 1144 self._reopen(mode="r+", node_cache_slots=self.node_cache_slots) 1145 srcTable = self.h5file.root.table 1146 newTable = self.h5file.copy_node( 1147 srcTable, newname='tablecopy', chunkshape=11) 1148 newTable2 = self.h5file.copy_node( 1149 newTable, newname='tablecopy2', chunkshape='auto') 1150 1151 self.assertEqual(srcTable.chunkshape, newTable2.chunkshape) 1152 1153 def test18_closedRepr(self): 1154 """Representing a closed node as a string.""" 1155 1156 self._reopen(node_cache_slots=self.node_cache_slots) 1157 1158 for node in [self.h5file.root.agroup, self.h5file.root.anarray]: 1159 node._f_close() 1160 self.assertIn('closed', str(node)) 1161 self.assertIn('closed', repr(node)) 1162 1163 def test19_fileno(self): 1164 """Checking that the 'fileno()' method works.""" 1165 1166 # Open the old HDF5 file 1167 self._reopen(mode="r", node_cache_slots=self.node_cache_slots) 1168 1169 # Get the file descriptor for this file 1170 fd = self.h5file.fileno() 1171 if common.verbose: 1172 print("Value of fileno():", fd) 1173 self.assertGreaterEqual(fd, 0) 1174 1175 1176class NodeCacheOpenFile(OpenFileTestCase): 1177 node_cache_slots = NODE_CACHE_SLOTS 1178 open_kwargs = dict(node_cache_slots=node_cache_slots) 1179 1180 1181class NoNodeCacheOpenFile(OpenFileTestCase): 1182 node_cache_slots = 0 1183 open_kwargs = dict(node_cache_slots=node_cache_slots) 1184 1185 1186class DictNodeCacheOpenFile(OpenFileTestCase): 1187 node_cache_slots = -NODE_CACHE_SLOTS 1188 open_kwargs = dict(node_cache_slots=node_cache_slots) 1189 1190 1191class CheckFileTestCase(common.TempFileMixin, TestCase): 1192 def setUp(self): 1193 super(CheckFileTestCase, self).setUp() 1194 1195 # Create a regular (text) file 1196 self.txtfile = tempfile.mktemp(".h5") 1197 self.fileh = open(self.txtfile, "w") 1198 self.fileh.write("Hello!") 1199 self.fileh.close() 1200 1201 def tearDown(self): 1202 self.fileh.close() 1203 os.remove(self.txtfile) 1204 super(CheckFileTestCase, self).tearDown() 1205 1206 def test00_isHDF5File(self): 1207 """Checking tables.is_hdf5_file function (TRUE case)""" 1208 1209 # Create a PyTables file (and by so, an HDF5 file) 1210 self.h5file.create_array(self.h5file.root, 'array', [1, 2], 1211 title="Title example") 1212 1213 # For this method to run, it needs a closed file 1214 self.h5file.close() 1215 1216 # When file has an HDF5 format, always returns 1 1217 if common.verbose: 1218 print("\nisHDF5File(%s) ==> %d" % ( 1219 self.h5fname, tables.is_hdf5_file(self.h5fname))) 1220 self.assertEqual(tables.is_hdf5_file(self.h5fname), 1) 1221 1222 def test01_isHDF5File(self): 1223 """Checking tables.is_hdf5_file function (FALSE case)""" 1224 1225 version = tables.is_hdf5_file(self.txtfile) 1226 1227 # When file is not an HDF5 format, always returns 0 or 1228 # negative value 1229 self.assertLessEqual(version, 0) 1230 1231 def test01x_isHDF5File_nonexistent(self): 1232 """Identifying a nonexistent HDF5 file.""" 1233 self.assertRaises(IOError, tables.is_hdf5_file, 'nonexistent') 1234 1235 @unittest.skipUnless(hasattr(os, 'getuid') and os.getuid() != 0, "no UID") 1236 def test01x_isHDF5File_unreadable(self): 1237 """Identifying an unreadable HDF5 file.""" 1238 1239 self.h5file.close() 1240 os.chmod(self.h5fname, 0) # no permissions at all 1241 self.assertRaises(IOError, tables.is_hdf5_file, self.h5fname) 1242 1243 def test02_isPyTablesFile(self): 1244 """Checking is_pytables_file function (TRUE case)""" 1245 1246 # Create a PyTables h5fname 1247 self.h5file.create_array(self.h5file.root, 'array', 1248 [1, 2], title="Title example") 1249 1250 # For this method to run, it needs a closed h5fname 1251 self.h5file.close() 1252 1253 version = tables.is_pytables_file(self.h5fname) 1254 1255 # When h5fname has a PyTables format, always returns "1.0" string or 1256 # greater 1257 if common.verbose: 1258 print() 1259 print("\nPyTables format version number ==> %s" % version) 1260 self.assertGreaterEqual(version, "1.0") 1261 1262 def test03_isPyTablesFile(self): 1263 """Checking is_pytables_file function (FALSE case)""" 1264 1265 version = tables.is_pytables_file(self.txtfile) 1266 1267 # When file is not a PyTables format, always returns 0 or 1268 # negative value 1269 if common.verbose: 1270 print() 1271 print("\nPyTables format version number ==> %s" % version) 1272 self.assertIsNone(version) 1273 1274 def test04_openGenericHDF5File(self): 1275 """Checking opening of a generic HDF5 file.""" 1276 1277 # Open an existing generic HDF5 file 1278 h5fname = test_filename("ex-noattr.h5") 1279 with tables.open_file(h5fname, mode="r") as h5file: 1280 # Check for some objects inside 1281 1282 # A group 1283 columns = h5file.get_node("/columns", classname="Group") 1284 self.assertEqual(columns._v_name, "columns") 1285 1286 # An Array 1287 array_ = h5file.get_node(columns, "TDC", classname="Array") 1288 self.assertEqual(array_._v_name, "TDC") 1289 1290 # The new LRU code defers the appearance of a warning to this point 1291 1292 # Here comes an Array of H5T_ARRAY type 1293 ui = h5file.get_node(columns, "pressure", classname="Array") 1294 self.assertEqual(ui._v_name, "pressure") 1295 if common.verbose: 1296 print("Array object with type H5T_ARRAY -->", repr(ui)) 1297 print("Array contents -->", ui[:]) 1298 1299 # A Table 1300 table = h5file.get_node("/detector", "table", classname="Table") 1301 self.assertEqual(table._v_name, "table") 1302 1303 def test04b_UnImplementedOnLoading(self): 1304 """Checking failure loading resulting in an ``UnImplemented`` node.""" 1305 1306 ############### Note for developers ############################### 1307 # This test fails if you have the line: # 1308 # ##return ChildClass(self, childname) # uncomment for debugging # 1309 # uncommented in Group.py! # 1310 ################################################################### 1311 1312 h5fname = test_filename('smpl_unsupptype.h5') 1313 with tables.open_file(h5fname) as h5file: 1314 with self.assertWarns(UserWarning): 1315 node = h5file.get_node('/CompoundChunked') 1316 self.assertIsInstance(node, UnImplemented) 1317 1318 def test04c_UnImplementedScalar(self): 1319 """Checking opening of HDF5 files containing scalar dataset of 1320 UnImlemented type.""" 1321 1322 with tables.open_file(test_filename("scalar.h5")) as h5file: 1323 with self.assertWarns(UserWarning): 1324 node = h5file.get_node('/variable length string') 1325 self.assertIsInstance(node, UnImplemented) 1326 1327 def test05_copyUnimplemented(self): 1328 """Checking that an UnImplemented object cannot be copied.""" 1329 1330 # Open an existing generic HDF5 file 1331 h5fname = test_filename("smpl_unsupptype.h5") 1332 with tables.open_file(h5fname, mode="r") as h5file: 1333 self.assertWarns(UserWarning, h5file.get_node, '/CompoundChunked') 1334 with warnings.catch_warnings(): 1335 warnings.simplefilter("ignore") 1336 ui = h5file.get_node('/CompoundChunked') 1337 self.assertEqual(ui._v_name, 'CompoundChunked') 1338 if common.verbose: 1339 print("UnImplement object -->", repr(ui)) 1340 1341 # Check that it cannot be copied to another file: 1342 self.assertWarns(UserWarning, ui.copy, self.h5file.root, "newui") 1343 1344 # The next can be used to check the copy of Array objects with H5T_ARRAY 1345 # in the future 1346 def _test05_copyUnimplemented(self): 1347 """Checking that an UnImplemented object cannot be copied.""" 1348 1349 # Open an existing generic HDF5 file 1350 # We don't need to wrap this in a try clause because 1351 # it has already been tried and the warning will not happen again 1352 h5fname2 = test_filename("ex-noattr.h5") 1353 with tables.open_file(h5fname2, mode="r") as h5file2: 1354 # An unsupported object (the deprecated H5T_ARRAY type in 1355 # Array, from pytables 0.8 on) 1356 ui = h5file2.get_node(h5file2.root.columns, "pressure") 1357 self.assertEqual(ui._v_name, "pressure") 1358 if common.verbose: 1359 print("UnImplement object -->", repr(ui)) 1360 1361 # Check that it cannot be copied to another file 1362 with warnings.catch_warnings(): 1363 # Force the userwarning to issue an error 1364 warnings.filterwarnings("error", category=UserWarning) 1365 with self.assertRaises(UserWarning): 1366 ui.copy(self.h5file.root, "newui") 1367 1368 1369@unittest.skipIf((os.name == 'nt' and sys.version_info < (3,)) 1370 or tables.file._FILE_OPEN_POLICY == 'strict', 1371 'FILE_OPEN_POLICY = "strict"') 1372class ThreadingTestCase(common.TempFileMixin, TestCase): 1373 def setUp(self): 1374 super(ThreadingTestCase, self).setUp() 1375 self.h5file.create_carray('/', 'test_array', tables.Int64Atom(), 1376 (200, 300)) 1377 self.h5file.close() 1378 1379 def test(self): 1380 lock = threading.Lock() 1381 1382 def syncronized_open_file(*args, **kwargs): 1383 with lock: 1384 return tables.open_file(*args, **kwargs) 1385 1386 def syncronized_close_file(self, *args, **kwargs): 1387 with lock: 1388 return self.close(*args, **kwargs) 1389 1390 filename = self.h5fname 1391 1392 def run(filename, q): 1393 try: 1394 f = syncronized_open_file(filename, mode='r') 1395 arr = f.root.test_array[8:12, 18:22] 1396 assert arr.max() == arr.min() == 0 1397 syncronized_close_file(f) 1398 except Exception: 1399 q.put(sys.exc_info()) 1400 else: 1401 q.put('OK') 1402 1403 threads = [] 1404 q = queue.Queue() 1405 for i in range(10): 1406 t = threading.Thread(target=run, args=(filename, q)) 1407 t.start() 1408 threads.append(t) 1409 1410 for i in range(10): 1411 self.assertEqual(q.get(), 'OK') 1412 1413 for t in threads: 1414 t.join() 1415 1416 1417class PythonAttrsTestCase(common.TempFileMixin, TestCase): 1418 """Test interactions of Python attributes and child nodes.""" 1419 1420 def test00_attrOverChild(self): 1421 """Setting a Python attribute over a child node.""" 1422 1423 root = self.h5file.root 1424 1425 # Create ``/test`` and overshadow it with ``root.test``. 1426 child = self.h5file.create_array(root, 'test', [1]) 1427 attr = 'foobar' 1428 self.assertWarns(NaturalNameWarning, setattr, root, 'test', attr) 1429 1430 self.assertIs(root.test, attr) 1431 self.assertIs(root._f_get_child('test'), child) 1432 1433 # Now bring ``/test`` again to light. 1434 del root.test 1435 1436 self.assertIs(root.test, child) 1437 1438 # Now there is no *attribute* named ``test``. 1439 self.assertRaises(AttributeError, 1440 delattr, root, 'test') 1441 1442 def test01_childUnderAttr(self): 1443 """Creating a child node under a Python attribute.""" 1444 1445 h5file = self.h5file 1446 root = h5file.root 1447 1448 # Create ``root.test`` and an overshadowed ``/test``. 1449 attr = 'foobar' 1450 root.test = attr 1451 self.assertWarns(NaturalNameWarning, 1452 h5file.create_array, root, 'test', [1]) 1453 child = h5file.get_node('/test') 1454 1455 self.assertIs(root.test, attr) 1456 self.assertIs(root._f_get_child('test'), child) 1457 1458 # Now bring ``/test`` again to light. 1459 del root.test 1460 1461 self.assertIs(root.test, child) 1462 1463 # Now there is no *attribute* named ``test``. 1464 self.assertRaises(AttributeError, delattr, root, 'test') 1465 1466 def test02_nodeAttrInLeaf(self): 1467 """Assigning a ``Node`` value as an attribute to a ``Leaf``.""" 1468 1469 h5file = self.h5file 1470 1471 array1 = h5file.create_array('/', 'array1', [1]) 1472 array2 = h5file.create_array('/', 'array2', [1]) 1473 1474 # This may make the garbage collector work a little. 1475 array1.array2 = array2 1476 array2.array1 = array1 1477 1478 # Check the assignments. 1479 self.assertIs(array1.array2, array2) 1480 self.assertIs(array2.array1, array1) 1481 self.assertRaises(NoSuchNodeError, # ``/array1`` is not a group 1482 h5file.get_node, '/array1/array2') 1483 self.assertRaises(NoSuchNodeError, # ``/array2`` is not a group 1484 h5file.get_node, '/array2/array3') 1485 1486 def test03_nodeAttrInGroup(self): 1487 """Assigning a ``Node`` value as an attribute to a ``Group``.""" 1488 1489 h5file = self.h5file 1490 root = h5file.root 1491 1492 array = h5file.create_array('/', 'array', [1]) 1493 1494 # Assign the array to a pair of attributes, 1495 # one of them overshadowing the original. 1496 root.arrayAlias = array 1497 self.assertWarns(NaturalNameWarning, setattr, root, 'array', array) 1498 1499 # Check the assignments. 1500 self.assertIs(root.arrayAlias, array) 1501 self.assertIs(root.array, array) 1502 self.assertRaises(NoSuchNodeError, h5file.get_node, '/arrayAlias') 1503 self.assertIs(h5file.get_node('/array'), array) 1504 1505 # Remove the attribute overshadowing the child. 1506 del root.array 1507 1508 # Now there is no *attribute* named ``array``. 1509 self.assertRaises(AttributeError, delattr, root, 'array') 1510 1511 1512class StateTestCase(common.TempFileMixin, TestCase): 1513 """Test that ``File`` and ``Node`` operations check their state (open or 1514 closed, readable or writable) before proceeding.""" 1515 1516 def test00_fileCopyFileClosed(self): 1517 """Test copying a closed file.""" 1518 1519 self.h5file.close() 1520 h5cfname = tempfile.mktemp(suffix='.h5') 1521 1522 try: 1523 self.assertRaises(ClosedFileError, 1524 self.h5file.copy_file, h5cfname) 1525 finally: 1526 if os.path.exists(h5cfname): 1527 os.remove(h5cfname) 1528 1529 def test01_fileCloseClosed(self): 1530 """Test closing an already closed file.""" 1531 1532 self.h5file.close() 1533 1534 try: 1535 self.h5file.close() 1536 except ClosedFileError: 1537 self.fail("could not close an already closed file") 1538 1539 def test02_fileFlushClosed(self): 1540 """Test flushing a closed file.""" 1541 1542 self.h5file.close() 1543 self.assertRaises(ClosedFileError, self.h5file.flush) 1544 1545 def test03_fileFlushRO(self): 1546 """Flushing a read-only file.""" 1547 1548 self._reopen('r') 1549 1550 try: 1551 self.h5file.flush() 1552 except FileModeError: 1553 self.fail("could not flush a read-only file") 1554 1555 def test04_fileCreateNodeClosed(self): 1556 """Test creating a node in a closed file.""" 1557 1558 self.h5file.close() 1559 self.assertRaises(ClosedFileError, 1560 self.h5file.create_group, '/', 'test') 1561 1562 def test05_fileCreateNodeRO(self): 1563 """Test creating a node in a read-only file.""" 1564 1565 self._reopen('r') 1566 self.assertRaises(FileModeError, 1567 self.h5file.create_group, '/', 'test') 1568 1569 def test06_fileRemoveNodeClosed(self): 1570 """Test removing a node from a closed file.""" 1571 1572 self.h5file.create_group('/', 'test') 1573 self.h5file.close() 1574 self.assertRaises(ClosedFileError, 1575 self.h5file.remove_node, '/', 'test') 1576 1577 def test07_fileRemoveNodeRO(self): 1578 """Test removing a node from a read-only file.""" 1579 1580 self.h5file.create_group('/', 'test') 1581 self._reopen('r') 1582 self.assertRaises(FileModeError, 1583 self.h5file.remove_node, '/', 'test') 1584 1585 def test08_fileMoveNodeClosed(self): 1586 """Test moving a node in a closed file.""" 1587 1588 self.h5file.create_group('/', 'test1') 1589 self.h5file.create_group('/', 'test2') 1590 self.h5file.close() 1591 self.assertRaises(ClosedFileError, 1592 self.h5file.move_node, '/test1', '/', 'test2') 1593 1594 def test09_fileMoveNodeRO(self): 1595 """Test moving a node in a read-only file.""" 1596 1597 self.h5file.create_group('/', 'test1') 1598 self.h5file.create_group('/', 'test2') 1599 self._reopen('r') 1600 self.assertRaises(FileModeError, 1601 self.h5file.move_node, '/test1', '/', 'test2') 1602 1603 def test10_fileCopyNodeClosed(self): 1604 """Test copying a node in a closed file.""" 1605 1606 self.h5file.create_group('/', 'test1') 1607 self.h5file.create_group('/', 'test2') 1608 self.h5file.close() 1609 self.assertRaises(ClosedFileError, 1610 self.h5file.copy_node, '/test1', '/', 'test2') 1611 1612 def test11_fileCopyNodeRO(self): 1613 """Test copying a node in a read-only file.""" 1614 1615 self.h5file.create_group('/', 'test1') 1616 self._reopen('r') 1617 self.assertRaises(FileModeError, 1618 self.h5file.copy_node, '/test1', '/', 'test2') 1619 1620 def test13_fileGetNodeClosed(self): 1621 """Test getting a node from a closed file.""" 1622 1623 self.h5file.create_group('/', 'test') 1624 self.h5file.close() 1625 self.assertRaises(ClosedFileError, self.h5file.get_node, '/test') 1626 1627 def test14_fileWalkNodesClosed(self): 1628 """Test walking a closed file.""" 1629 1630 self.h5file.create_group('/', 'test1') 1631 self.h5file.create_group('/', 'test2') 1632 self.h5file.close() 1633 self.assertRaises(ClosedFileError, next, self.h5file.walk_nodes()) 1634 1635 def test15_fileAttrClosed(self): 1636 """Test setting and deleting a node attribute in a closed file.""" 1637 1638 self.h5file.create_group('/', 'test') 1639 self.h5file.close() 1640 self.assertRaises(ClosedFileError, 1641 self.h5file.set_node_attr, '/test', 'foo', 'bar') 1642 self.assertRaises(ClosedFileError, 1643 self.h5file.del_node_attr, '/test', 'foo') 1644 1645 def test16_fileAttrRO(self): 1646 """Test setting and deleting a node attribute in a read-only file.""" 1647 1648 self.h5file.create_group('/', 'test') 1649 self.h5file.set_node_attr('/test', 'foo', 'foo') 1650 self._reopen('r') 1651 self.assertRaises(FileModeError, 1652 self.h5file.set_node_attr, '/test', 'foo', 'bar') 1653 self.assertRaises(FileModeError, 1654 self.h5file.del_node_attr, '/test', 'foo') 1655 1656 def test17_fileUndoClosed(self): 1657 """Test undo operations in a closed file.""" 1658 1659 self.h5file.enable_undo() 1660 self.h5file.create_group('/', 'test2') 1661 self.h5file.close() 1662 self.assertRaises(ClosedFileError, self.h5file.is_undo_enabled) 1663 self.assertRaises(ClosedFileError, self.h5file.get_current_mark) 1664 self.assertRaises(ClosedFileError, self.h5file.undo) 1665 self.assertRaises(ClosedFileError, self.h5file.disable_undo) 1666 1667 def test18_fileUndoRO(self): 1668 """Test undo operations in a read-only file.""" 1669 1670 self.h5file.enable_undo() 1671 self.h5file.create_group('/', 'test') 1672 self._reopen('r') 1673 self.assertEqual(self.h5file._undoEnabled, False) 1674 # self.assertRaises(FileModeError, self.h5file.undo) 1675 # self.assertRaises(FileModeError, self.h5file.disable_undo) 1676 1677 def test19a_getNode(self): 1678 """Test getting a child of a closed node.""" 1679 1680 g1 = self.h5file.create_group('/', 'g1') 1681 g2 = self.h5file.create_group('/g1', 'g2') 1682 1683 # Close this *object* so that it should not be used. 1684 g1._f_close() 1685 self.assertRaises(ClosedNodeError, g1._f_get_child, 'g2') 1686 1687 # Getting a node by its closed object is not allowed. 1688 self.assertRaises(ClosedNodeError, 1689 self.h5file.get_node, g1) 1690 1691 # Going through that *node* should reopen it automatically. 1692 try: 1693 g2_ = self.h5file.get_node('/g1/g2') 1694 except ClosedNodeError: 1695 self.fail("closed parent group has not been reopened") 1696 1697 # Already open nodes should be closed now, but not the new ones. 1698 self.assertIs(g2._v_isopen, False, 1699 "open child of closed group has not been closed") 1700 self.assertIs(g2_._v_isopen, True, 1701 "open child of closed group has not been closed") 1702 1703 # And existing closed ones should remain closed, but not the new ones. 1704 g1_ = self.h5file.get_node('/g1') 1705 self.assertIs(g1._v_isopen, False, 1706 "already closed group is not closed anymore") 1707 self.assertIs(g1_._v_isopen, True, 1708 "newly opened group is still closed") 1709 1710 def test19b_getNode(self): 1711 """Test getting a node that does not start with a slash ('/').""" 1712 1713 # Create an array in the root 1714 self.h5file.create_array('/', 'array', [1, 2], title="Title example") 1715 1716 # Get the array without specifying a leading slash 1717 self.assertRaises(NameError, self.h5file.get_node, "array") 1718 1719 def test20_removeNode(self): 1720 """Test removing a closed node.""" 1721 1722 # This test is a little redundant once we know that ``File.get_node()`` 1723 # will reload a closed node, but anyway... 1724 1725 group = self.h5file.create_group('/', 'group') 1726 array = self.h5file.create_array('/group', 'array', [1]) 1727 1728 # The closed *object* can not be used. 1729 group._f_close() 1730 self.assertRaises(ClosedNodeError, group._f_remove) 1731 self.assertRaises(ClosedNodeError, self.h5file.remove_node, group) 1732 1733 # Still, the *node* is reloaded when necessary. 1734 try: 1735 self.h5file.remove_node('/group', recursive=True) 1736 except ClosedNodeError: 1737 self.fail("closed node has not been reloaded") 1738 1739 # Objects of descendent removed nodes 1740 # should have been automatically closed when removed. 1741 self.assertRaises(ClosedNodeError, array._f_remove) 1742 1743 self.assertNotIn('/group/array', self.h5file) # just in case 1744 self.assertNotIn('/group', self.h5file) # just in case 1745 1746 def test21_attrsOfNode(self): 1747 """Test manipulating the attributes of a closed node.""" 1748 1749 node = self.h5file.create_group('/', 'test') 1750 nodeAttrs = node._v_attrs 1751 1752 nodeAttrs.test = attr = 'foo' 1753 1754 node._f_close() 1755 self.assertRaises(ClosedNodeError, getattr, node, '_v_attrs') 1756 # The design of ``AttributeSet`` does not yet allow this test. 1757 ## self.assertRaises(ClosedNodeError, getattr, nodeAttrs, 'test') 1758 1759 self.assertEqual(self.h5file.get_node_attr('/test', 'test'), attr) 1760 1761 def test21b_attrsOfNode(self): 1762 """Test manipulating the attributes of a node in a read-only file.""" 1763 1764 self.h5file.create_group('/', 'test') 1765 self.h5file.set_node_attr('/test', 'test', 'foo') 1766 1767 self._reopen('r') 1768 self.assertRaises(FileModeError, 1769 self.h5file.set_node_attr, '/test', 'test', 'bar') 1770 1771 def test22_fileClosesNode(self): 1772 """Test node closing because of file closing.""" 1773 1774 node = self.h5file.create_group('/', 'test') 1775 1776 self.h5file.close() 1777 self.assertRaises(ClosedNodeError, getattr, node, '_v_attrs') 1778 1779 def test23_reopenFile(self): 1780 """Testing reopening a file and closing it several times.""" 1781 1782 self.h5file.create_array('/', 'test', [1, 2, 3]) 1783 self.h5file.close() 1784 1785 with tables.open_file(self.h5fname, "r") as h5file1: 1786 self.assertEqual(h5file1.open_count, 1) 1787 if tables.file._FILE_OPEN_POLICY == 'strict': 1788 self.assertRaises(ValueError, 1789 tables.open_file, self.h5fname, "r") 1790 else: 1791 with tables.open_file(self.h5fname, "r") as h5file2: 1792 self.assertEqual(h5file1.open_count, 1) 1793 self.assertEqual(h5file2.open_count, 1) 1794 if common.verbose: 1795 print("(h5file1) open_count:", h5file1.open_count) 1796 print("(h5file1) test[1]:", h5file1.root.test[1]) 1797 self.assertEqual(h5file1.root.test[1], 2) 1798 h5file1.close() 1799 1800 self.assertEqual(h5file2.open_count, 1) 1801 if common.verbose: 1802 print("(h5file2) open_count:", h5file2.open_count) 1803 print("(h5file2) test[1]:", h5file2.root.test[1]) 1804 self.assertEqual(h5file2.root.test[1], 2) 1805 1806 1807class FlavorTestCase(common.TempFileMixin, TestCase): 1808 """Test that setting, getting and changing the ``flavor`` attribute of a 1809 leaf works as expected.""" 1810 1811 array_data = numpy.arange(10) 1812 scalar_data = numpy.int32(10) 1813 1814 def _reopen(self, mode='r'): 1815 super(FlavorTestCase, self)._reopen(mode) 1816 self.array = self.h5file.get_node('/array') 1817 self.scalar = self.h5file.get_node('/scalar') 1818 return True 1819 1820 def setUp(self): 1821 super(FlavorTestCase, self).setUp() 1822 self.array = self.h5file.create_array('/', 'array', self.array_data) 1823 self.scalar = self.h5file.create_array('/', 'scalar', self.scalar_data) 1824 1825 def test00_invalid(self): 1826 """Setting an invalid flavor.""" 1827 1828 self.assertRaises(FlavorError, setattr, self.array, 'flavor', 'foo') 1829 1830 def test01_readonly(self): 1831 """Setting a flavor in a read-only file.""" 1832 1833 self._reopen(mode='r') 1834 self.assertRaises(FileModeError, 1835 setattr, self.array, 'flavor', 1836 tables.flavor.internal_flavor) 1837 1838 def test02_change(self): 1839 """Changing the flavor and reading data.""" 1840 1841 for flavor in all_flavors: 1842 self.array.flavor = flavor 1843 self.assertEqual(self.array.flavor, flavor) 1844 idata = array_of_flavor(self.array_data, flavor) 1845 odata = self.array[:] 1846 self.assertTrue(common.allequal(odata, idata, flavor)) 1847 1848 def test03_store(self): 1849 """Storing a changed flavor.""" 1850 1851 for flavor in all_flavors: 1852 self.array.flavor = flavor 1853 self.assertEqual(self.array.flavor, flavor) 1854 self._reopen(mode='r+') 1855 self.assertEqual(self.array.flavor, flavor) 1856 1857 def test04_missing(self): 1858 """Reading a dataset of a missing flavor.""" 1859 1860 flavor = self.array.flavor # default is internal 1861 self.array._v_attrs.FLAVOR = 'foobar' # breaks flavor 1862 self._reopen(mode='r') 1863 idata = array_of_flavor(self.array_data, flavor) 1864 with self.assertWarns(FlavorWarning): 1865 odata = self.array.read() 1866 self.assertTrue(common.allequal(odata, idata, flavor)) 1867 1868 def test05_delete(self): 1869 """Deleting the flavor of a dataset.""" 1870 1871 self.array.flavor = 'python' # non-default 1872 self.assertEqual(self.array.flavor, 'python') 1873 self.assertEqual(self.array.attrs.FLAVOR, 'python') 1874 del self.array.flavor 1875 self.assertEqual(self.array.flavor, tables.flavor.internal_flavor) 1876 self.assertRaises(AttributeError, getattr, self.array.attrs, 'FLAVOR') 1877 1878 def test06_copyDeleted(self): 1879 """Copying a node with a deleted flavor (see #100).""" 1880 1881 snames = [node._v_name for node in [self.array, self.scalar]] 1882 dnames = ['%s_copy' % name for name in snames] 1883 for name in snames: 1884 node = self.h5file.get_node('/', name) 1885 del node.flavor 1886 # Check the copied flavors right after copying and after reopening. 1887 for fmode in ['r+', 'r']: 1888 self._reopen(fmode) 1889 for sname, dname in zip(snames, dnames): 1890 if fmode == 'r+': 1891 snode = self.h5file.get_node('/', sname) 1892 node = snode.copy('/', dname) 1893 elif fmode == 'r': 1894 node = self.h5file.get_node('/', dname) 1895 self.assertEqual(node.flavor, tables.flavor.internal_flavor, 1896 "flavor of node ``%s`` is not internal: %r" 1897 % (node._v_pathname, node.flavor)) 1898 1899 def test07_restrict_flavors(self): 1900 # regression test for gh-163 1901 1902 all_flavors = list(tables.flavor.all_flavors) 1903 alias_map = tables.flavor.alias_map.copy() 1904 converter_map = tables.flavor.converter_map.copy() 1905 identifier_map = tables.flavor.identifier_map.copy() 1906 description_map = tables.flavor.description_map.copy() 1907 1908 try: 1909 tables.flavor.restrict_flavors(keep=[]) 1910 self.assertLess(len(tables.flavor.alias_map), len(alias_map)) 1911 self.assertLess( 1912 len(tables.flavor.converter_map), 1913 len(converter_map)) 1914 finally: 1915 tables.flavor.all_flavors[:] = all_flavors[:] 1916 tables.flavor.alias_map.update(alias_map) 1917 tables.flavor.converter_map.update(converter_map) 1918 tables.flavor.identifier_map.update(identifier_map) 1919 tables.flavor.description_map.update(description_map) 1920 1921 1922@unittest.skipIf('win' in platform.system().lower(), 'known bug: gh-389') 1923@unittest.skipIf(sys.getfilesystemencoding() != 'utf-8', 1924 'need utf-8 file-system encoding') 1925class UnicodeFilename(common.TempFileMixin, TestCase): 1926 unicode_prefix = u'para\u0140lel' 1927 1928 def _getTempFileName(self): 1929 return tempfile.mktemp(prefix=self.unicode_prefix, suffix='.h5') 1930 1931 def setUp(self): 1932 super(UnicodeFilename, self).setUp() 1933 1934 self.test = self.h5file.create_array('/', 'test', [1, 2]) 1935 1936 # So as to check the reading 1937 self._reopen() 1938 1939 def test01(self): 1940 """Checking creating a filename with Unicode chars.""" 1941 1942 test = self.h5file.root.test 1943 if common.verbose: 1944 print("Filename:", self.h5fname) 1945 print("Array:", test[:]) 1946 print("Should look like:", [1, 2]) 1947 self.assertEqual(test[:], [1, 2], "Values does not match.") 1948 1949 def test02(self): 1950 """Checking tables.is_hdf5_file with a Unicode filename.""" 1951 1952 self.h5file.close() 1953 if common.verbose: 1954 print("Filename:", self.h5fname) 1955 print(" tables.is_hdf5_file?:", tables.is_hdf5_file(self.h5fname)) 1956 self.assertTrue(tables.is_hdf5_file(self.h5fname)) 1957 1958 def test03(self): 1959 """Checking is_pytables_file with a Unicode filename.""" 1960 1961 self.h5file.close() 1962 if common.verbose: 1963 print("Filename:", self.h5fname) 1964 print("is_pytables_file?:", tables.is_pytables_file(self.h5fname)) 1965 self.assertNotEqual(tables.is_pytables_file(self.h5fname), False) 1966 1967 @staticmethod 1968 def _store_carray(name, data, group): 1969 atom = tables.Atom.from_dtype(data.dtype) 1970 node = tables.CArray(group, name, shape=data.shape, atom=atom) 1971 node[:] = data 1972 1973 def test_store_and_load_with_non_ascii_attributes(self): 1974 self.h5file.close() 1975 self.h5file = tables.open_file(self.h5fname, "a") 1976 root = self.h5file.root 1977 group = self.h5file.create_group(root, 'face_data') 1978 array_name = u'data at 40\N{DEGREE SIGN}C' 1979 data = numpy.sinh(numpy.linspace(-1.4, 1.4, 500)) 1980 with warnings.catch_warnings(): 1981 warnings.simplefilter('ignore', NaturalNameWarning) 1982 self._store_carray(array_name, data, group) 1983 group = self.h5file.create_group(root, 'vertex_data') 1984 1985 1986@unittest.skipIf(sys.version_info < (3, 6), 1987 'PEP 519 was implemented in Python 3.6') 1988class PathLikeFilename(common.TempFileMixin, TestCase): 1989 1990 def _getTempFileName(self): 1991 from pathlib import Path 1992 return Path(tempfile.mktemp(suffix='.h5')) 1993 1994 def setUp(self): 1995 super(PathLikeFilename, self).setUp() 1996 1997 self.test = self.h5file.create_array('/', 'test', [1, 2]) 1998 1999 # So as to check the reading 2000 self._reopen() 2001 2002 def test01(self): 2003 """Checking creating a file with a PathLike object as the filename.""" 2004 2005 test = self.h5file.root.test 2006 if common.verbose: 2007 print("Filename:", self.h5fname) 2008 print("Array:", test[:]) 2009 print("Should look like:", [1, 2]) 2010 self.assertEqual(test[:], [1, 2], "Values does not match.") 2011 2012 def test02(self): 2013 """Checking tables.is_hdf5_file with a PathLike object as the filename.""" 2014 2015 self.h5file.close() 2016 if common.verbose: 2017 print("Filename:", self.h5fname) 2018 print(" tables.is_hdf5_file?:", tables.is_hdf5_file(self.h5fname)) 2019 self.assertTrue(tables.is_hdf5_file(self.h5fname)) 2020 2021 def test03(self): 2022 """Checking is_pytables_file with a PathLike object as the filename.""" 2023 2024 self.h5file.close() 2025 if common.verbose: 2026 print("Filename:", self.h5fname) 2027 print("is_pytables_file?:", tables.is_pytables_file(self.h5fname)) 2028 self.assertNotEqual(tables.is_pytables_file(self.h5fname), False) 2029 2030 2031class FilePropertyTestCase(TestCase): 2032 def setUp(self): 2033 super(FilePropertyTestCase, self).setUp() 2034 self.h5fname = tempfile.mktemp(".h5") 2035 self.h5file = None 2036 2037 def tearDown(self): 2038 if self.h5file: 2039 self.h5file.close() 2040 2041 if os.path.exists(self.h5fname): 2042 os.remove(self.h5fname) 2043 super(FilePropertyTestCase, self).tearDown() 2044 2045 def test_get_filesize(self): 2046 data = numpy.zeros((2000, 2000)) 2047 datasize = numpy.prod(data.shape) * data.dtype.itemsize 2048 2049 self.h5file = tables.open_file(self.h5fname, mode="w") 2050 self.h5file.create_array(self.h5file.root, 'array', data) 2051 h5_filesize = self.h5file.get_filesize() 2052 self.h5file.close() 2053 2054 fs_filesize = os.stat(self.h5fname)[6] 2055 2056 self.assertGreaterEqual(h5_filesize, datasize) 2057 self.assertEqual(h5_filesize, fs_filesize) 2058 2059 def test01_null_userblock_size(self): 2060 self.h5file = tables.open_file(self.h5fname, mode="w") 2061 self.h5file.create_array(self.h5file.root, 'array', [1, 2]) 2062 self.assertEqual(self.h5file.get_userblock_size(), 0) 2063 2064 def test02_null_userblock_size(self): 2065 self.h5file = tables.open_file(self.h5fname, mode="w") 2066 self.h5file.create_array(self.h5file.root, 'array', [1, 2]) 2067 self.h5file.close() 2068 self.h5file = tables.open_file(self.h5fname, mode="r") 2069 self.assertEqual(self.h5file.get_userblock_size(), 0) 2070 2071 def test03_null_userblock_size(self): 2072 USER_BLOCK_SIZE = 0 2073 self.h5file = tables.open_file( 2074 self.h5fname, mode="w", user_block_size=USER_BLOCK_SIZE) 2075 self.h5file.create_array(self.h5file.root, 'array', [1, 2]) 2076 self.assertEqual(self.h5file.get_userblock_size(), 0) 2077 2078 def test01_userblock_size(self): 2079 USER_BLOCK_SIZE = 512 2080 self.h5file = tables.open_file( 2081 self.h5fname, mode="w", user_block_size=USER_BLOCK_SIZE) 2082 self.h5file.create_array(self.h5file.root, 'array', [1, 2]) 2083 self.assertEqual(self.h5file.get_userblock_size(), USER_BLOCK_SIZE) 2084 2085 def test02_userblock_size(self): 2086 USER_BLOCK_SIZE = 512 2087 self.h5file = tables.open_file( 2088 self.h5fname, mode="w", user_block_size=USER_BLOCK_SIZE) 2089 self.h5file.create_array(self.h5file.root, 'array', [1, 2]) 2090 self.h5file.close() 2091 self.h5file = tables.open_file(self.h5fname, mode="r") 2092 self.assertEqual(self.h5file.get_userblock_size(), USER_BLOCK_SIZE) 2093 2094 def test_small_userblock_size(self): 2095 USER_BLOCK_SIZE = 12 2096 self.assertRaises(ValueError, tables.open_file, self.h5fname, mode="w", 2097 user_block_size=USER_BLOCK_SIZE) 2098 2099 def test_invalid_userblock_size(self): 2100 USER_BLOCK_SIZE = 1025 2101 self.assertRaises(ValueError, tables.open_file, self.h5fname, mode="w", 2102 user_block_size=USER_BLOCK_SIZE) 2103 2104 2105# Test for reading a file that uses Blosc and created on a big-endian platform 2106@unittest.skipIf(not common.blosc_avail, 'Blosc not available') 2107class BloscBigEndian(common.TestFileMixin, TestCase): 2108 h5fname = test_filename("blosc_bigendian.h5") 2109 2110 def test00_bigendian(self): 2111 """Checking compatibility with Blosc on big-endian machines.""" 2112 2113 # Check that we can read the contents without problems (nor warnings!) 2114 for dset_name in ('i1', 'i2', 'i4', 'i8'): 2115 a = numpy.arange(10, dtype=dset_name) 2116 dset = self.h5file.get_node('/'+dset_name) 2117 self.assertTrue(common.allequal(a, dset[:]), 2118 "Error in big-endian data!") 2119 2120 2121# Case test for Blosc and subprocesses (via multiprocessing module) 2122 2123# The worker function for the subprocess (needs to be here because Windows 2124# has problems pickling nested functions with the multiprocess module :-/) 2125def _worker(fn, qout=None): 2126 fp = tables.open_file(fn) 2127 if common.verbose: 2128 print("About to load: ", fn) 2129 rows = fp.root.table.where('(f0 < 10)') 2130 if common.verbose: 2131 print("Got the iterator, about to iterate") 2132 next(rows) 2133 if common.verbose: 2134 print("Succeeded in one iteration\n") 2135 fp.close() 2136 2137 if qout is not None: 2138 qout.put("Done") 2139 2140 2141# From: Yaroslav Halchenko <debian@onerussian.com> 2142# Subject: Skip the unittest on kFreeBSD and Hurd -- locking seems to 2143# be N/A 2144# 2145# on kfreebsd /dev/shm is N/A 2146# on Hurd -- inter-process semaphore locking is N/A 2147@unittest.skipIf(not multiprocessing_imported, 2148 'multiprocessing module not available') 2149@unittest.skipIf(platform.system().lower() in ('gnu', 'gnu/kfreebsd'), 2150 "multiprocessing module is not supported on Hurd/kFreeBSD") 2151@unittest.skipIf(not common.blosc_avail, 'Blosc not available') 2152class BloscSubprocess(TestCase): 2153 def test_multiprocess(self): 2154 # Create a relatively large table with Blosc level 9 (large blocks) 2155 h5fname = tempfile.mktemp(prefix="multiproc-blosc9-", suffix=".h5") 2156 try: 2157 size = int(3e5) 2158 sa = numpy.fromiter(((i, i**2, i//3) 2159 for i in range(size)), 'i4,i8,f8') 2160 with tables.open_file(h5fname, 'w') as h5file: 2161 h5file.create_table( 2162 h5file.root, 'table', sa, 2163 filters=tables.Filters(complevel=9, complib="blosc"), 2164 chunkshape=(size // 3,)) 2165 2166 if common.verbose: 2167 print("**** Running from main process:") 2168 _worker(h5fname) 2169 2170 if common.verbose: 2171 print("**** Running from subprocess:") 2172 2173 try: 2174 qout = mp.Queue() 2175 except OSError: 2176 print("Permission denied due to /dev/shm settings") 2177 else: 2178 ps = mp.Process(target=_worker, args=(h5fname, qout,)) 2179 ps.daemon = True 2180 ps.start() 2181 2182 result = qout.get() 2183 if common.verbose: 2184 print(result) 2185 finally: 2186 os.remove(h5fname) 2187 2188 2189class HDF5ErrorHandling(TestCase): 2190 def setUp(self): 2191 super(HDF5ErrorHandling, self).setUp() 2192 self._old_policy = tables.HDF5ExtError.DEFAULT_H5_BACKTRACE_POLICY 2193 2194 def tearDown(self): 2195 tables.HDF5ExtError.DEFAULT_H5_BACKTRACE_POLICY = self._old_policy 2196 super(HDF5ErrorHandling, self).tearDown() 2197 2198 def test_silence_messages(self): 2199 code = """ 2200import tables 2201tables.silence_hdf5_messages(False) 2202tables.silence_hdf5_messages() 2203try: 2204 tables.open_file(r'%s') 2205except tables.HDF5ExtError, e: 2206 pass 2207""" 2208 2209 filename = tempfile.mktemp(prefix="hdf5-error-handling-", suffix=".py") 2210 try: 2211 with open(filename, 'w') as fp: 2212 fp.write(code % filename) 2213 2214 p = subprocess.Popen([sys.executable, filename], 2215 stdout=subprocess.PIPE, 2216 stderr=subprocess.PIPE) 2217 (stdout, stderr) = p.communicate() 2218 2219 self.assertNotIn("HDF5-DIAG", stderr.decode('ascii')) 2220 finally: 2221 os.remove(filename) 2222 2223 def test_enable_messages(self): 2224 code = """ 2225import tables 2226tables.silence_hdf5_messages() 2227tables.silence_hdf5_messages(False) 2228try: 2229 tables.open_file(r'%s') 2230except tables.HDF5ExtError as e: 2231 pass 2232""" 2233 2234 filename = tempfile.mktemp(prefix="hdf5-error-handling-", suffix=".py") 2235 try: 2236 with open(filename, 'w') as fp: 2237 fp.write(code % filename) 2238 2239 p = subprocess.Popen([sys.executable, filename], 2240 stdout=subprocess.PIPE, 2241 stderr=subprocess.PIPE) 2242 (stdout, stderr) = p.communicate() 2243 2244 self.assertIn("HDF5-DIAG", stderr.decode('ascii')) 2245 finally: 2246 os.remove(filename) 2247 2248 def _raise_exterror(self): 2249 h5fname = tempfile.mktemp(".h5") 2250 open(h5fname, 'wb').close() 2251 2252 try: 2253 h5file = tables.open_file(h5fname) 2254 h5file.close() 2255 finally: 2256 os.remove(h5fname) 2257 2258 def test_h5_backtrace_quiet(self): 2259 tables.HDF5ExtError.DEFAULT_H5_BACKTRACE_POLICY = True 2260 2261 with self.assertRaises(tables.HDF5ExtError) as cm: 2262 self._raise_exterror() 2263 2264 self.assertIsNotNone(cm.exception.h5backtrace) 2265 2266 def test_h5_backtrace_verbose(self): 2267 tables.HDF5ExtError.DEFAULT_H5_BACKTRACE_POLICY = "VERBOSE" 2268 2269 with self.assertRaises(tables.HDF5ExtError) as cm: 2270 self._raise_exterror() 2271 2272 self.assertIsNotNone(cm.exception.h5backtrace) 2273 msg = str(cm.exception) 2274 self.assertIn(cm.exception.h5backtrace[-1][-1], msg) 2275 2276 def test_h5_backtrace_ignore(self): 2277 tables.HDF5ExtError.DEFAULT_H5_BACKTRACE_POLICY = False 2278 2279 with self.assertRaises(tables.HDF5ExtError) as cm: 2280 self._raise_exterror() 2281 2282 self.assertIsNone(cm.exception.h5backtrace) 2283 2284 2285class TestDescription(TestCase): 2286 def test_isdescription_inheritance(self): 2287 # Regression test for gh-65 2288 class TestDescParent(IsDescription): 2289 c = Int32Col() 2290 2291 class TestDesc(TestDescParent): 2292 pass 2293 2294 self.assertIn('c', TestDesc.columns) 2295 2296 def test_descr_from_dtype(self): 2297 t = numpy.dtype([('col1', 'int16'), ('col2', float)]) 2298 descr, byteorder = descr_from_dtype(t) 2299 2300 self.assertIn('col1', descr._v_colobjects) 2301 self.assertIn('col2', descr._v_colobjects) 2302 self.assertEqual(len(descr._v_colobjects), 2) 2303 self.assertIsInstance(descr._v_colobjects['col1'], Col) 2304 self.assertIsInstance(descr._v_colobjects['col2'], Col) 2305 self.assertEqual(descr._v_colobjects['col1'].dtype, numpy.int16) 2306 self.assertEqual(descr._v_colobjects['col2'].dtype, float) 2307 2308 def test_descr_from_dtype_rich_dtype(self): 2309 header = [(('timestamp', 't'), 'u4'), 2310 (('unit (cluster) id', 'unit'), 'u2')] 2311 t = numpy.dtype(header) 2312 2313 descr, byteorder = descr_from_dtype(t) 2314 self.assertEqual(len(descr._v_names), 2) 2315 self.assertEqual(sorted(descr._v_names), ['t', 'unit']) 2316 2317 def test_descr_from_dtype_comp_01(self): 2318 d1 = numpy.dtype([ 2319 ('x', 'int16'), 2320 ('y', 'int16'), 2321 ]) 2322 2323 d_comp = numpy.dtype([ 2324 ('time', 'float64'), 2325 ('value', d1) 2326 #('value', (d1, (1,))) 2327 ]) 2328 2329 descr, byteorder = descr_from_dtype(d_comp) 2330 2331 self.assertTrue(descr._v_is_nested) 2332 self.assertIn('time', descr._v_colobjects) 2333 self.assertIn('value', descr._v_colobjects) 2334 self.assertEqual(len(descr._v_colobjects), 2) 2335 self.assertIsInstance(descr._v_colobjects['time'], Col) 2336 self.assertTrue(isinstance(descr._v_colobjects['value'], 2337 tables.Description)) 2338 self.assertEqual(descr._v_colobjects['time'].dtype, numpy.float64) 2339 2340 def test_descr_from_dtype_comp_02(self): 2341 d1 = numpy.dtype([ 2342 ('x', 'int16'), 2343 ('y', 'int16'), 2344 ]) 2345 2346 d_comp = numpy.dtype([ 2347 ('time', 'float64'), 2348 ('value', (d1, (1,))) 2349 ]) 2350 2351 with self.assertWarns(UserWarning): 2352 descr, byteorder = descr_from_dtype(d_comp) 2353 2354 self.assertTrue(descr._v_is_nested) 2355 self.assertIn('time', descr._v_colobjects) 2356 self.assertIn('value', descr._v_colobjects) 2357 self.assertEqual(len(descr._v_colobjects), 2) 2358 self.assertIsInstance(descr._v_colobjects['time'], Col) 2359 self.assertTrue(isinstance(descr._v_colobjects['value'], 2360 tables.Description)) 2361 self.assertEqual(descr._v_colobjects['time'].dtype, numpy.float64) 2362 2363 def test_dtype_from_descr_is_description(self): 2364 # See gh-152 2365 class TestDescParent(IsDescription): 2366 col1 = Int16Col() 2367 col2 = FloatCol() 2368 2369 dtype = numpy.dtype([('col1', 'int16'), ('col2', float)]) 2370 t = dtype_from_descr(TestDescParent) 2371 2372 self.assertEqual(t, dtype) 2373 2374 def test_dtype_from_descr_is_description_instance(self): 2375 # See gh-152 2376 class TestDescParent(IsDescription): 2377 col1 = Int16Col() 2378 col2 = FloatCol() 2379 2380 dtype = numpy.dtype([('col1', 'int16'), ('col2', float)]) 2381 t = dtype_from_descr(TestDescParent()) 2382 2383 self.assertEqual(t, dtype) 2384 2385 def test_dtype_from_descr_description_instance(self): 2386 # See gh-152 2387 class TestDescParent(IsDescription): 2388 col1 = Int16Col() 2389 col2 = FloatCol() 2390 2391 dtype = numpy.dtype([('col1', 'int16'), ('col2', float)]) 2392 desctiption = Description(TestDescParent().columns) 2393 t = dtype_from_descr(desctiption) 2394 2395 self.assertEqual(t, dtype) 2396 2397 def test_dtype_from_descr_dict(self): 2398 # See gh-152 2399 dtype = numpy.dtype([('col1', 'int16'), ('col2', float)]) 2400 t = dtype_from_descr({'col1': Int16Col(), 'col2': FloatCol()}) 2401 2402 self.assertEqual(t, dtype) 2403 2404 def test_dtype_from_descr_invalid_type(self): 2405 # See gh-152 2406 self.assertRaises(ValueError, dtype_from_descr, []) 2407 2408 def test_dtype_from_descr_byteorder(self): 2409 # See gh-152 2410 class TestDescParent(IsDescription): 2411 col1 = Int16Col() 2412 col2 = FloatCol() 2413 2414 t = dtype_from_descr(TestDescParent, byteorder='>') 2415 2416 self.assertEqual(t['col1'].byteorder, '>') 2417 self.assertEqual(t['col2'].byteorder, '>') 2418 2419 def test_str_names(self): 2420 # see gh-42 2421 d = {'name': tables.Int16Col()} 2422 descr = Description(d) 2423 self.assertEqual(sorted(descr._v_names), sorted(d.keys())) 2424 self.assertIsInstance(descr._v_dtype, numpy.dtype) 2425 self.assertTrue(sorted(descr._v_dtype.fields.keys()), 2426 sorted(d.keys())) 2427 2428 2429class TestAtom(TestCase): 2430 def test_atom_attributes01(self): 2431 shape = (10, 10) 2432 a = Float64Atom(shape=shape) 2433 2434 self.assertEqual(a.dflt, 0.) 2435 self.assertEqual(a.dtype, numpy.dtype((numpy.float64, shape))) 2436 self.assertEqual(a.itemsize, a.dtype.base.itemsize) 2437 self.assertEqual(a.kind, 'float') 2438 self.assertEqual(a.ndim, len(shape)) 2439 # self.assertEqual(a.recarrtype, ) 2440 self.assertEqual(a.shape, shape) 2441 self.assertEqual(a.size, a.itemsize * numpy.prod(shape)) 2442 self.assertEqual(a.type, 'float64') 2443 2444 def test_atom_copy01(self): 2445 shape = (10, 10) 2446 a = Float64Atom(shape=shape) 2447 aa = a.copy() 2448 self.assertEqual(aa.shape, shape) 2449 2450 def test_atom_copy02(self): 2451 dflt = 2.0 2452 a = Float64Atom(dflt=dflt) 2453 aa = a.copy() 2454 self.assertEqual(aa.dflt, dflt) 2455 2456 def test_atom_copy_override(self): 2457 shape = (10, 10) 2458 dflt = 2.0 2459 a = Float64Atom(shape=shape, dflt=dflt) 2460 aa = a.copy(dflt=-dflt) 2461 self.assertEqual(aa.shape, shape) 2462 self.assertNotEqual(aa.dflt, dflt) 2463 self.assertEqual(aa.dflt, -dflt) 2464 2465 2466class TestCol(TestCase): 2467 def test_col_copy01(self): 2468 shape = (10, 10) 2469 c = Float64Col(shape=shape) 2470 cc = c.copy() 2471 self.assertEqual(cc.shape, shape) 2472 2473 def test_col_copy02(self): 2474 dflt = 2.0 2475 c = Float64Col(dflt=dflt) 2476 cc = c.copy() 2477 self.assertEqual(cc.dflt, dflt) 2478 2479 def test_col_copy_override(self): 2480 shape = (10, 10) 2481 dflt = 2.0 2482 pos = 3 2483 c = Float64Col(shape=shape, dflt=dflt, pos=pos) 2484 cc = c.copy(pos=2) 2485 self.assertEqual(cc.shape, shape) 2486 self.assertEqual(cc.dflt, dflt) 2487 self.assertNotEqual(cc._v_pos, pos) 2488 self.assertEqual(cc._v_pos, 2) 2489 2490 2491class TestSysattrCompatibility(TestCase): 2492 def test_open_python2(self): 2493 h5fname = test_filename("python2.h5") 2494 with tables.open_file(h5fname, "r") as h5file: 2495 self.assertTrue(h5file.isopen) 2496 2497 def test_open_python3(self): 2498 h5fname = test_filename("python3.h5") 2499 with tables.open_file(h5fname, "r") as h5file: 2500 self.assertTrue(h5file.isopen) 2501 2502 2503def suite(): 2504 theSuite = unittest.TestSuite() 2505 niter = 1 2506 2507 for i in range(niter): 2508 theSuite.addTest(unittest.makeSuite(OpenFileFailureTestCase)) 2509 theSuite.addTest(unittest.makeSuite(NodeCacheOpenFile)) 2510 theSuite.addTest(unittest.makeSuite(NoNodeCacheOpenFile)) 2511 theSuite.addTest(unittest.makeSuite(DictNodeCacheOpenFile)) 2512 theSuite.addTest(unittest.makeSuite(CheckFileTestCase)) 2513 theSuite.addTest(unittest.makeSuite(ThreadingTestCase)) 2514 theSuite.addTest(unittest.makeSuite(PythonAttrsTestCase)) 2515 theSuite.addTest(unittest.makeSuite(StateTestCase)) 2516 theSuite.addTest(unittest.makeSuite(FlavorTestCase)) 2517 theSuite.addTest(unittest.makeSuite(UnicodeFilename)) 2518 theSuite.addTest(unittest.makeSuite(PathLikeFilename)) 2519 theSuite.addTest(unittest.makeSuite(FilePropertyTestCase)) 2520 theSuite.addTest(unittest.makeSuite(BloscBigEndian)) 2521 theSuite.addTest(unittest.makeSuite(BloscSubprocess)) 2522 theSuite.addTest(unittest.makeSuite(HDF5ErrorHandling)) 2523 theSuite.addTest(unittest.makeSuite(TestDescription)) 2524 theSuite.addTest(unittest.makeSuite(TestAtom)) 2525 theSuite.addTest(unittest.makeSuite(TestCol)) 2526 theSuite.addTest(unittest.makeSuite(TestSysattrCompatibility)) 2527 2528 return theSuite 2529 2530 2531if __name__ == '__main__': 2532 common.parse_argv(sys.argv) 2533 common.print_versions() 2534 unittest.main(defaultTest='suite') 2535 2536## Local Variables: 2537## mode: python 2538## End: 2539