1# -*- coding: utf-8 -*-
2
3########################################################################
4#
5# License: BSD
6# Created: 2005-02-11
7# Author: Ivan Vilata i Balaguer - ivan@selidor.net
8#
9# $Id$
10#
11########################################################################
12
13"""PyTables nodes."""
14
15import warnings
16import functools
17
18from .registry import class_name_dict, class_id_dict
19from .exceptions import (ClosedNodeError, NodeError, UndoRedoWarning,
20                               PerformanceWarning)
21from .path import join_path, split_path, isvisiblepath
22from .utils import lazyattr
23from .undoredo import move_to_shadow
24from .attributeset import AttributeSet, NotLoggedAttributeSet
25
26
27__docformat__ = 'reStructuredText'
28"""The format of documentation strings in this module."""
29
30
31def _closedrepr(oldmethod):
32    """Decorate string representation method to handle closed nodes.
33
34    If the node is closed, a string like this is returned::
35
36      <closed MODULE.CLASS at ADDRESS>
37
38    instead of calling `oldmethod` and returning its result.
39
40    """
41
42    @functools.wraps(oldmethod)
43    def newmethod(self):
44        if not self._v_isopen:
45            cmod = self.__class__.__module__
46            cname = self.__class__.__name__
47            addr = hex(id(self))
48            return '<closed %s.%s at %s>' % (cmod, cname, addr)
49        return oldmethod(self)
50
51    return newmethod
52
53
54class MetaNode(type):
55    """Node metaclass.
56
57    This metaclass ensures that their instance classes get registered
58    into several dictionaries (namely the `tables.utils.class_name_dict`
59    class name dictionary and the `tables.utils.class_id_dict` class
60    identifier dictionary).
61
62    It also adds sanity checks to some methods:
63
64      * Check that the node is open when calling string representation
65        and provide a default string if so.
66
67    """
68
69    def __new__(class_, name, bases, dict_):
70        # Add default behaviour for representing closed nodes.
71        for mname in ['__str__', '__repr__']:
72            if mname in dict_:
73                dict_[mname] = _closedrepr(dict_[mname])
74
75        return type.__new__(class_, name, bases, dict_)
76
77    def __init__(class_, name, bases, dict_):
78        super(MetaNode, class_).__init__(name, bases, dict_)
79
80        # Always register into class name dictionary.
81        class_name_dict[class_.__name__] = class_
82
83        # Register into class identifier dictionary only if the class
84        # has an identifier and it is different from its parents'.
85        cid = getattr(class_, '_c_classid', None)
86        if cid is not None:
87            for base in bases:
88                pcid = getattr(base, '_c_classid', None)
89                if pcid == cid:
90                    break
91            else:
92                class_id_dict[cid] = class_
93
94
95class Node(metaclass=MetaNode):
96    """Abstract base class for all PyTables nodes.
97
98    This is the base class for *all* nodes in a PyTables hierarchy. It is an
99    abstract class, i.e. it may not be directly instantiated; however, every
100    node in the hierarchy is an instance of this class.
101
102    A PyTables node is always hosted in a PyTables *file*, under a *parent
103    group*, at a certain *depth* in the node hierarchy. A node knows its own
104    *name* in the parent group and its own *path name* in the file.
105
106    All the previous information is location-dependent, i.e. it may change when
107    moving or renaming a node in the hierarchy. A node also has
108    location-independent information, such as its *HDF5 object identifier* and
109    its *attribute set*.
110
111    This class gathers the operations and attributes (both location-dependent
112    and independent) which are common to all PyTables nodes, whatever their
113    type is. Nonetheless, due to natural naming restrictions, the names of all
114    of these members start with a reserved prefix (see the Group class
115    in :ref:`GroupClassDescr`).
116
117    Sub-classes with no children (e.g. *leaf nodes*) may define new methods,
118    attributes and properties to avoid natural naming restrictions. For
119    instance, _v_attrs may be shortened to attrs and _f_rename to
120    rename. However, the original methods and attributes should still be
121    available.
122
123    .. rubric:: Node attributes
124
125    .. attribute:: _v_depth
126
127        The depth of this node in the tree (an non-negative integer value).
128
129    .. attribute:: _v_file
130
131        The hosting File instance (see :ref:`FileClassDescr`).
132
133    .. attribute:: _v_name
134
135        The name of this node in its parent group (a string).
136
137    .. attribute:: _v_pathname
138
139        The path of this node in the tree (a string).
140
141    .. attribute:: _v_objectid
142
143        A node identifier (may change from run to run).
144
145        .. versionchanged:: 3.0
146           The *_v_objectID* attribute has been renamed into *_v_object_id*.
147
148    """
149
150    # By default, attributes accept Undo/Redo.
151    _AttributeSet = AttributeSet
152
153    # `_v_parent` is accessed via its file to avoid upwards references.
154    def _g_getparent(self):
155        "The parent :class:`Group` instance"
156        (parentpath, nodename) = split_path(self._v_pathname)
157        return self._v_file._get_node(parentpath)
158
159    _v_parent = property(_g_getparent)
160
161    # '_v_attrs' is defined as a lazy read-only attribute.
162    # This saves 0.7s/3.8s.
163    @lazyattr
164    def _v_attrs(self):
165        """The associated `AttributeSet` instance.
166
167        See Also
168        --------
169        tables.attributeset.AttributeSet : container for the HDF5 attributes
170
171        """
172
173        return self._AttributeSet(self)
174
175    # '_v_title' is a direct read-write shorthand for the 'TITLE' attribute
176    # with the empty string as a default value.
177    def _g_gettitle(self):
178        "A description of this node. A shorthand for TITLE attribute."
179        if hasattr(self._v_attrs, 'TITLE'):
180            return self._v_attrs.TITLE
181        else:
182            return ''
183
184    def _g_settitle(self, title):
185        self._v_attrs.TITLE = title
186
187    _v_title = property(_g_gettitle, _g_settitle)
188
189    # This may be looked up by ``__del__`` when ``__init__`` doesn't get
190    # to be called.  See ticket #144 for more info.
191    _v_isopen = False
192    """Whehter this node is open or not."""
193
194
195    # The ``_log`` argument is only meant to be used by ``_g_copy_as_child()``
196    # to avoid logging the creation of children nodes of a copied sub-tree.
197    def __init__(self, parentnode, name, _log=True):
198        # Remember to assign these values in the root group constructor
199        # as it does not use this method implementation!
200
201        # if the parent node is a softlink, dereference it
202        if isinstance(parentnode, class_name_dict['SoftLink']):
203            parentnode = parentnode.dereference()
204
205        self._v_file = None
206        """The hosting File instance (see :ref:`FileClassDescr`)."""
207
208        self._v_isopen = False
209        """Whether this node is open or not."""
210
211        self._v_pathname = None
212        """The path of this node in the tree (a string)."""
213
214        self._v_name = None
215        """The name of this node in its parent group (a string)."""
216
217        self._v_depth = None
218        """The depth of this node in the tree (an non-negative integer value).
219        """
220
221        self._v_maxtreedepth = parentnode._v_file.params['MAX_TREE_DEPTH']
222        """Maximum tree depth before warning the user.
223
224        .. versionchanged:: 3.0
225           Renamed into *_v_maxtreedepth* from *_v_maxTreeDepth*.
226
227        """
228
229        self._v__deleting = False
230        """Is the node being deleted?"""
231
232        self._v_objectid = None
233        """A node identifier (may change from run to run).
234
235        .. versionchanged:: 3.0
236           The *_v_objectID* attribute has been renamed into *_v_objectid*.
237
238        """
239
240        validate = new = self._v_new  # set by subclass constructor
241
242        # Is the parent node a group?  Is it open?
243        self._g_check_group(parentnode)
244        parentnode._g_check_open()
245        file_ = parentnode._v_file
246
247        # Will the file be able to host a new node?
248        if new:
249            file_._check_writable()
250
251        # Bind to the parent node and set location-dependent information.
252        if new:
253            # Only new nodes need to be referenced.
254            # Opened nodes are already known by their parent group.
255            parentnode._g_refnode(self, name, validate)
256        self._g_set_location(parentnode, name)
257
258        try:
259            # hdf5extension operations:
260            #   Update node attributes.
261            self._g_new(parentnode, name, init=True)
262            #   Create or open the node and get its object ID.
263            if new:
264                self._v_objectid = self._g_create()
265            else:
266                self._v_objectid = self._g_open()
267
268            # The node *has* been created, log that.
269            if new and _log and file_.is_undo_enabled():
270                self._g_log_create()
271
272            # This allows extra operations after creating the node.
273            self._g_post_init_hook()
274        except:
275            # If anything happens, the node must be closed
276            # to undo every possible registration made so far.
277            # We do *not* rely on ``__del__()`` doing it later,
278            # since it might never be called anyway.
279            self._f_close()
280            raise
281
282    def _g_log_create(self):
283        self._v_file._log('CREATE', self._v_pathname)
284
285
286    def __del__(self):
287        # Closed `Node` instances can not be killed and revived.
288        # Instead, accessing a closed and deleted (from memory, not
289        # disk) one yields a *new*, open `Node` instance.  This is
290        # because of two reasons:
291        #
292        # 1. Predictability.  After closing a `Node` and deleting it,
293        #    only one thing can happen when accessing it again: a new,
294        #    open `Node` instance is returned.  If closed nodes could be
295        #    revived, one could get either a closed or an open `Node`.
296        #
297        # 2. Ease of use.  If the user wants to access a closed node
298        #    again, the only condition would be that no references to
299        #    the `Node` instance were left.  If closed nodes could be
300        #    revived, the user would also need to force the closed
301        #    `Node` out of memory, which is not a trivial task.
302        #
303
304        if not self._v_isopen:
305            return  # the node is already closed or not initialized
306
307        self._v__deleting = True
308
309        # If we get here, the `Node` is still open.
310        try:
311            node_manager = self._v_file._node_manager
312            node_manager.drop_node(self, check_unregistered=False)
313        finally:
314            # At this point the node can still be open if there is still some
315            # alive reference around (e.g. if the __del__ method is called
316            # explicitly by the user).
317            if self._v_isopen:
318                self._v__deleting = True
319                self._f_close()
320
321    def _g_pre_kill_hook(self):
322        """Code to be called before killing the node."""
323        pass
324
325
326    def _g_create(self):
327        """Create a new HDF5 node and return its object identifier."""
328        raise NotImplementedError
329
330    def _g_open(self):
331        """Open an existing HDF5 node and return its object identifier."""
332        raise NotImplementedError
333
334    def _g_check_open(self):
335        """Check that the node is open.
336
337        If the node is closed, a `ClosedNodeError` is raised.
338
339        """
340
341        if not self._v_isopen:
342            raise ClosedNodeError("the node object is closed")
343        assert self._v_file.isopen, "found an open node in a closed file"
344
345
346    def _g_set_location(self, parentnode, name):
347        """Set location-dependent attributes.
348
349        Sets the location-dependent attributes of this node to reflect
350        that it is placed under the specified `parentnode`, with the
351        specified `name`.
352
353        This also triggers the insertion of file references to this
354        node.  If the maximum recommended tree depth is exceeded, a
355        `PerformanceWarning` is issued.
356
357        """
358
359        file_ = parentnode._v_file
360        parentdepth = parentnode._v_depth
361
362        self._v_file = file_
363        self._v_isopen = True
364
365        root_uep = file_.root_uep
366        if name.startswith(root_uep):
367            # This has been called from File._get_node()
368            assert parentdepth == 0
369            if root_uep == "/":
370                self._v_pathname = name
371            else:
372                self._v_pathname = name[len(root_uep):]
373            _, self._v_name = split_path(name)
374            self._v_depth = name.count("/") - root_uep.count("/") + 1
375        else:
376            # If we enter here is because this has been called elsewhere
377            self._v_name = name
378            self._v_pathname = join_path(parentnode._v_pathname, name)
379            self._v_depth = parentdepth + 1
380
381        # Check if the node is too deep in the tree.
382        if parentdepth >= self._v_maxtreedepth:
383            warnings.warn("""\
384node ``%s`` is exceeding the recommended maximum depth (%d);\
385be ready to see PyTables asking for *lots* of memory and possibly slow I/O"""
386                          % (self._v_pathname, self._v_maxtreedepth),
387                          PerformanceWarning)
388
389        if self._v_pathname != '/':
390            file_._node_manager.cache_node(self, self._v_pathname)
391
392
393    def _g_update_location(self, newparentpath):
394        """Update location-dependent attributes.
395
396        Updates location data when an ancestor node has changed its
397        location in the hierarchy to `newparentpath`.  In fact, this
398        method is expected to be called by an ancestor of this node.
399
400        This also triggers the update of file references to this node.
401        If the maximum recommended node depth is exceeded, a
402        `PerformanceWarning` is issued.  This warning is assured to be
403        unique.
404
405        """
406
407        oldpath = self._v_pathname
408        newpath = join_path(newparentpath, self._v_name)
409        newdepth = newpath.count('/')
410
411        self._v_pathname = newpath
412        self._v_depth = newdepth
413
414        # Check if the node is too deep in the tree.
415        if newdepth > self._v_maxtreedepth:
416            warnings.warn("""\
417moved descendent node is exceeding the recommended maximum depth (%d);\
418be ready to see PyTables asking for *lots* of memory and possibly slow I/O"""
419                          % (self._v_maxtreedepth,), PerformanceWarning)
420
421        node_manager = self._v_file._node_manager
422        node_manager.rename_node(oldpath, newpath)
423
424        # Tell dependent objects about the new location of this node.
425        self._g_update_dependent()
426
427
428    def _g_del_location(self):
429        """Clear location-dependent attributes.
430
431        This also triggers the removal of file references to this node.
432
433        """
434
435        node_manager = self._v_file._node_manager
436        pathname = self._v_pathname
437
438        if not self._v__deleting:
439            node_manager.drop_from_cache(pathname)
440            # Note: node_manager.drop_node do not removes the node form the
441            # registry if it is still open
442            node_manager.registry.pop(pathname, None)
443
444        self._v_file = None
445        self._v_isopen = False
446        self._v_pathname = None
447        self._v_name = None
448        self._v_depth = None
449
450
451    def _g_post_init_hook(self):
452        """Code to be run after node creation and before creation logging."""
453        pass
454
455
456    def _g_update_dependent(self):
457        """Update dependent objects after a location change.
458
459        All dependent objects (but not nodes!) referencing this node
460        must be updated here.
461
462        """
463
464        if '_v_attrs' in self.__dict__:
465            self._v_attrs._g_update_node_location(self)
466
467
468    def _f_close(self):
469        """Close this node in the tree.
470
471        This releases all resources held by the node, so it should not
472        be used again.  On nodes with data, it may be flushed to disk.
473
474        You should not need to close nodes manually because they are
475        automatically opened/closed when they are loaded/evicted from
476        the integrated LRU cache.
477
478        """
479
480        # After calling ``_f_close()``, two conditions are met:
481        #
482        #   1. The node object is detached from the tree.
483        #   2. *Every* attribute of the node is removed.
484        #
485        # Thus, cleanup operations used in ``_f_close()`` in sub-classes
486        # must be run *before* calling the method in the superclass.
487
488        if not self._v_isopen:
489            return  # the node is already closed
490
491        myDict = self.__dict__
492
493        # Close the associated `AttributeSet`
494        # only if it has already been placed in the object's dictionary.
495        if '_v_attrs' in myDict:
496            self._v_attrs._g_close()
497
498        # Detach the node from the tree if necessary.
499        self._g_del_location()
500
501        # Finally, clear all remaining attributes from the object.
502        myDict.clear()
503
504        # Just add a final flag to signal that the node is closed:
505        self._v_isopen = False
506
507    def _g_remove(self, recursive, force):
508        """Remove this node from the hierarchy.
509
510        If the node has children, recursive removal must be stated by
511        giving `recursive` a true value; otherwise, a `NodeError` will
512        be raised.
513
514        If `force` is set to true, the node will be removed no matter it
515        has children or not (useful for deleting hard links).
516
517        It does not log the change.
518
519        """
520
521        # Remove the node from the PyTables hierarchy.
522        parent = self._v_parent
523        parent._g_unrefnode(self._v_name)
524        # Close the node itself.
525        self._f_close()
526        # hdf5extension operations:
527        # Remove the node from the HDF5 hierarchy.
528        self._g_delete(parent)
529
530    def _f_remove(self, recursive=False, force=False):
531        """Remove this node from the hierarchy.
532
533        If the node has children, recursive removal must be stated by giving
534        recursive a true value; otherwise, a NodeError will be raised.
535
536        If the node is a link to a Group object, and you are sure that you want
537        to delete it, you can do this by setting the force flag to true.
538
539        """
540
541        self._g_check_open()
542        file_ = self._v_file
543        file_._check_writable()
544
545        if file_.is_undo_enabled():
546            self._g_remove_and_log(recursive, force)
547        else:
548            self._g_remove(recursive, force)
549
550    def _g_remove_and_log(self, recursive, force):
551        file_ = self._v_file
552        oldpathname = self._v_pathname
553        # Log *before* moving to use the right shadow name.
554        file_._log('REMOVE', oldpathname)
555        move_to_shadow(file_, oldpathname)
556
557
558    def _g_move(self, newparent, newname):
559        """Move this node in the hierarchy.
560
561        Moves the node into the given `newparent`, with the given
562        `newname`.
563
564        It does not log the change.
565
566        """
567
568        oldparent = self._v_parent
569        oldname = self._v_name
570        oldpathname = self._v_pathname  # to move the HDF5 node
571
572        # Try to insert the node into the new parent.
573        newparent._g_refnode(self, newname)
574        # Remove the node from the new parent.
575        oldparent._g_unrefnode(oldname)
576
577        # Remove location information for this node.
578        self._g_del_location()
579        # Set new location information for this node.
580        self._g_set_location(newparent, newname)
581
582        # hdf5extension operations:
583        #   Update node attributes.
584        self._g_new(newparent, self._v_name, init=False)
585        #   Move the node.
586        # self._v_parent._g_move_node(oldpathname, self._v_pathname)
587        self._v_parent._g_move_node(oldparent._v_objectid, oldname,
588                                    newparent._v_objectid, newname,
589                                    oldpathname, self._v_pathname)
590
591        # Tell dependent objects about the new location of this node.
592        self._g_update_dependent()
593
594    def _f_rename(self, newname, overwrite=False):
595        """Rename this node in place.
596
597        Changes the name of a node to *newname* (a string).  If a node with the
598        same newname already exists and overwrite is true, recursively remove
599        it before renaming.
600
601        """
602
603        self._f_move(newname=newname, overwrite=overwrite)
604
605    def _f_move(self, newparent=None, newname=None,
606                overwrite=False, createparents=False):
607        """Move or rename this node.
608
609        Moves a node into a new parent group, or changes the name of the
610        node. newparent can be a Group object (see :ref:`GroupClassDescr`) or a
611        pathname in string form. If it is not specified or None, the current
612        parent group is chosen as the new parent.  newname must be a string
613        with a new name. If it is not specified or None, the current name is
614        chosen as the new name. If createparents is true, the needed groups for
615        the given new parent group path to exist will be created.
616
617        Moving a node across databases is not allowed, nor it is moving a node
618        *into* itself. These result in a NodeError. However, moving a node
619        *over* itself is allowed and simply does nothing. Moving over another
620        existing node is similarly not allowed, unless the optional overwrite
621        argument is true, in which case that node is recursively removed before
622        moving.
623
624        Usually, only the first argument will be used, effectively moving the
625        node to a new location without changing its name.  Using only the
626        second argument is equivalent to renaming the node in place.
627
628        """
629
630        self._g_check_open()
631        file_ = self._v_file
632        oldparent = self._v_parent
633        oldname = self._v_name
634
635        # Set default arguments.
636        if newparent is None and newname is None:
637            raise NodeError("you should specify at least "
638                            "a ``newparent`` or a ``newname`` parameter")
639        if newparent is None:
640            newparent = oldparent
641        if newname is None:
642            newname = oldname
643
644        # Get destination location.
645        if hasattr(newparent, '_v_file'):  # from node
646            newfile = newparent._v_file
647            newpath = newparent._v_pathname
648        elif hasattr(newparent, 'startswith'):  # from path
649            newfile = file_
650            newpath = newparent
651        else:
652            raise TypeError("new parent is not a node nor a path: %r"
653                            % (newparent,))
654
655        # Validity checks on arguments.
656        # Is it in the same file?
657        if newfile is not file_:
658            raise NodeError("nodes can not be moved across databases; "
659                            "please make a copy of the node")
660
661        # The movement always fails if the hosting file can not be modified.
662        file_._check_writable()
663
664        # Moving over itself?
665        oldpath = oldparent._v_pathname
666        if newpath == oldpath and newname == oldname:
667            # This is equivalent to renaming the node to its current name,
668            # and it does not change the referenced object,
669            # so it is an allowed no-op.
670            return
671
672        # Moving into itself?
673        self._g_check_not_contains(newpath)
674
675        # Note that the previous checks allow us to go ahead and create
676        # the parent groups if `createparents` is true.  `newparent` is
677        # used instead of `newpath` to avoid accepting `Node` objects
678        # when `createparents` is true.
679        newparent = file_._get_or_create_path(newparent, createparents)
680        self._g_check_group(newparent)  # Is it a group?
681
682        # Moving over an existing node?
683        self._g_maybe_remove(newparent, newname, overwrite)
684
685        # Move the node.
686        oldpathname = self._v_pathname
687        self._g_move(newparent, newname)
688
689        # Log the change.
690        if file_.is_undo_enabled():
691            self._g_log_move(oldpathname)
692
693    def _g_log_move(self, oldpathname):
694        self._v_file._log('MOVE', oldpathname, self._v_pathname)
695
696
697    def _g_copy(self, newparent, newname, recursive, _log=True, **kwargs):
698        """Copy this node and return the new one.
699
700        Creates and returns a copy of the node in the given `newparent`,
701        with the given `newname`.  If `recursive` copy is stated, all
702        descendents are copied as well.  Additional keyword argumens may
703        affect the way that the copy is made.  Unknown arguments must be
704        ignored.  On recursive copies, all keyword arguments must be
705        passed on to the children invocation of this method.
706
707        If `_log` is false, the change is not logged.  This is *only*
708        intended to be used by ``_g_copy_as_child()`` as a means of
709        optimising sub-tree copies.
710
711        """
712
713        raise NotImplementedError
714
715    def _g_copy_as_child(self, newparent, **kwargs):
716        """Copy this node as a child of another group.
717
718        Copies just this node into `newparent`, not recursing children
719        nor overwriting nodes nor logging the copy.  This is intended to
720        be used when copying whole sub-trees.
721
722        """
723
724        return self._g_copy(newparent, self._v_name,
725                            recursive=False, _log=False, **kwargs)
726
727
728    def _f_copy(self, newparent=None, newname=None,
729                overwrite=False, recursive=False, createparents=False,
730                **kwargs):
731        """Copy this node and return the new node.
732
733        Creates and returns a copy of the node, maybe in a different place in
734        the hierarchy. newparent can be a Group object (see
735        :ref:`GroupClassDescr`) or a pathname in string form. If it is not
736        specified or None, the current parent group is chosen as the new
737        parent.  newname must be a string with a new name. If it is not
738        specified or None, the current name is chosen as the new name. If
739        recursive copy is stated, all descendants are copied as well. If
740        createparents is true, the needed groups for the given new parent group
741        path to exist will be created.
742
743        Copying a node across databases is supported but can not be
744        undone. Copying a node over itself is not allowed, nor it is
745        recursively copying a node into itself. These result in a
746        NodeError. Copying over another existing node is similarly not allowed,
747        unless the optional overwrite argument is true, in which case that node
748        is recursively removed before copying.
749
750        Additional keyword arguments may be passed to customize the copying
751        process. For instance, title and filters may be changed, user
752        attributes may be or may not be copied, data may be sub-sampled, stats
753        may be collected, etc. See the documentation for the particular node
754        type.
755
756        Using only the first argument is equivalent to copying the node to a
757        new location without changing its name. Using only the second argument
758        is equivalent to making a copy of the node in the same group.
759
760        """
761
762        self._g_check_open()
763        srcfile = self._v_file
764        srcparent = self._v_parent
765        srcname = self._v_name
766
767        dstparent = newparent
768        dstname = newname
769
770        # Set default arguments.
771        if dstparent is None and dstname is None:
772            raise NodeError("you should specify at least "
773                            "a ``newparent`` or a ``newname`` parameter")
774        if dstparent is None:
775            dstparent = srcparent
776        if dstname is None:
777            dstname = srcname
778
779        # Get destination location.
780        if hasattr(dstparent, '_v_file'):  # from node
781            dstfile = dstparent._v_file
782            dstpath = dstparent._v_pathname
783        elif hasattr(dstparent, 'startswith'):  # from path
784            dstfile = srcfile
785            dstpath = dstparent
786        else:
787            raise TypeError("new parent is not a node nor a path: %r"
788                            % (dstparent,))
789
790        # Validity checks on arguments.
791        if dstfile is srcfile:
792            # Copying over itself?
793            srcpath = srcparent._v_pathname
794            if dstpath == srcpath and dstname == srcname:
795                raise NodeError(
796                    "source and destination nodes are the same node: ``%s``"
797                    % self._v_pathname)
798
799            # Recursively copying into itself?
800            if recursive:
801                self._g_check_not_contains(dstpath)
802
803        # Note that the previous checks allow us to go ahead and create
804        # the parent groups if `createparents` is true.  `dstParent` is
805        # used instead of `dstPath` because it may be in other file, and
806        # to avoid accepting `Node` objects when `createparents` is
807        # true.
808        dstparent = srcfile._get_or_create_path(dstparent, createparents)
809        self._g_check_group(dstparent)  # Is it a group?
810
811        # Copying to another file with undo enabled?
812        if dstfile is not srcfile and srcfile.is_undo_enabled():
813            warnings.warn("copying across databases can not be undone "
814                          "nor redone from this database",
815                          UndoRedoWarning)
816
817        # Copying over an existing node?
818        self._g_maybe_remove(dstparent, dstname, overwrite)
819
820        # Copy the node.
821        # The constructor of the new node takes care of logging.
822        return self._g_copy(dstparent, dstname, recursive, **kwargs)
823
824    def _f_isvisible(self):
825        """Is this node visible?"""
826
827        self._g_check_open()
828        return isvisiblepath(self._v_pathname)
829
830
831    def _g_check_group(self, node):
832        # Node must be defined in order to define a Group.
833        # However, we need to know Group here.
834        # Using class_name_dict avoids a circular import.
835        if not isinstance(node, class_name_dict['Node']):
836            raise TypeError("new parent is not a registered node: %s"
837                            % node._v_pathname)
838        if not isinstance(node, class_name_dict['Group']):
839            raise TypeError("new parent node ``%s`` is not a group"
840                            % node._v_pathname)
841
842
843    def _g_check_not_contains(self, pathname):
844        # The not-a-TARDIS test. ;)
845        mypathname = self._v_pathname
846        if (mypathname == '/'  # all nodes fall below the root group
847           or pathname == mypathname
848           or pathname.startswith(mypathname + '/')):
849            raise NodeError("can not move or recursively copy node ``%s`` "
850                            "into itself" % mypathname)
851
852
853    def _g_maybe_remove(self, parent, name, overwrite):
854        if name in parent:
855            if not overwrite:
856                raise NodeError("""\
857destination group ``%s`` already has a node named ``%s``; \
858you may want to use the ``overwrite`` argument""" % (parent._v_pathname, name))
859            parent._f_get_child(name)._f_remove(True)
860
861
862    def _g_check_name(self, name):
863        """Check validity of name for this particular kind of node.
864
865        This is invoked once the standard HDF5 and natural naming checks
866        have successfully passed.
867
868        """
869
870        if name.startswith('_i_'):
871            # This is reserved for table index groups.
872            raise ValueError(
873                "node name starts with reserved prefix ``_i_``: %s" % name)
874
875
876    # <attribute handling>
877    def _f_getattr(self, name):
878        """Get a PyTables attribute from this node.
879
880        If the named attribute does not exist, an AttributeError is
881        raised.
882
883        """
884
885        return getattr(self._v_attrs, name)
886
887
888    def _f_setattr(self, name, value):
889        """Set a PyTables attribute for this node.
890
891        If the node already has a large number of attributes, a
892        PerformanceWarning is issued.
893
894        """
895
896        setattr(self._v_attrs, name, value)
897
898
899    def _f_delattr(self, name):
900        """Delete a PyTables attribute from this node.
901
902        If the named attribute does not exist, an AttributeError is
903        raised.
904
905        """
906
907        delattr(self._v_attrs, name)
908
909
910    # </attribute handling>
911
912
913class NotLoggedMixin:
914    # Include this class in your inheritance tree
915    # to avoid changes to instances of your class from being logged.
916
917    _AttributeSet = NotLoggedAttributeSet
918
919    def _g_log_create(self):
920        pass
921
922
923    def _g_log_move(self, oldpathname):
924        pass
925
926
927    def _g_remove_and_log(self, recursive, force):
928        self._g_remove(recursive, force)
929
930
931
932## Local Variables:
933## mode: python
934## py-indent-offset: 4
935## tab-width: 4
936## fill-column: 72
937## End:
938