1
2from .error import *
3
4from .tokens import *
5from .events import *
6from .nodes import *
7
8from .loader import *
9from .dumper import *
10
11__version__ = '5.1'
12try:
13    from .cyaml import *
14    __with_libyaml__ = True
15except ImportError:
16    __with_libyaml__ = False
17
18import io
19
20#------------------------------------------------------------------------------
21# Warnings control
22#------------------------------------------------------------------------------
23
24# 'Global' warnings state:
25_warnings_enabled = {
26    'YAMLLoadWarning': True,
27}
28
29# Get or set global warnings' state
30def warnings(settings=None):
31    if settings is None:
32        return _warnings_enabled
33
34    if type(settings) is dict:
35        for key in settings:
36            if key in _warnings_enabled:
37                _warnings_enabled[key] = settings[key]
38
39# Warn when load() is called without Loader=...
40class YAMLLoadWarning(RuntimeWarning):
41    pass
42
43def load_warning(method):
44    if _warnings_enabled['YAMLLoadWarning'] is False:
45        return
46
47    import warnings
48
49    message = (
50        "calling yaml.%s() without Loader=... is deprecated, as the "
51        "default Loader is unsafe. Please read "
52        "https://msg.pyyaml.org/load for full details."
53    ) % method
54
55    warnings.warn(message, YAMLLoadWarning, stacklevel=3)
56
57#------------------------------------------------------------------------------
58def scan(stream, Loader=Loader):
59    """
60    Scan a YAML stream and produce scanning tokens.
61    """
62    loader = Loader(stream)
63    try:
64        while loader.check_token():
65            yield loader.get_token()
66    finally:
67        loader.dispose()
68
69def parse(stream, Loader=Loader):
70    """
71    Parse a YAML stream and produce parsing events.
72    """
73    loader = Loader(stream)
74    try:
75        while loader.check_event():
76            yield loader.get_event()
77    finally:
78        loader.dispose()
79
80def compose(stream, Loader=Loader):
81    """
82    Parse the first YAML document in a stream
83    and produce the corresponding representation tree.
84    """
85    loader = Loader(stream)
86    try:
87        return loader.get_single_node()
88    finally:
89        loader.dispose()
90
91def compose_all(stream, Loader=Loader):
92    """
93    Parse all YAML documents in a stream
94    and produce corresponding representation trees.
95    """
96    loader = Loader(stream)
97    try:
98        while loader.check_node():
99            yield loader.get_node()
100    finally:
101        loader.dispose()
102
103def load(stream, Loader=None):
104    """
105    Parse the first YAML document in a stream
106    and produce the corresponding Python object.
107    """
108    if Loader is None:
109        load_warning('load')
110        Loader = FullLoader
111
112    loader = Loader(stream)
113    try:
114        return loader.get_single_data()
115    finally:
116        loader.dispose()
117
118def load_all(stream, Loader=None):
119    """
120    Parse all YAML documents in a stream
121    and produce corresponding Python objects.
122    """
123    if Loader is None:
124        load_warning('load_all')
125        Loader = FullLoader
126
127    loader = Loader(stream)
128    try:
129        while loader.check_data():
130            yield loader.get_data()
131    finally:
132        loader.dispose()
133
134def full_load(stream):
135    """
136    Parse the first YAML document in a stream
137    and produce the corresponding Python object.
138
139    Resolve all tags except those known to be
140    unsafe on untrusted input.
141    """
142    return load(stream, FullLoader)
143
144def full_load_all(stream):
145    """
146    Parse all YAML documents in a stream
147    and produce corresponding Python objects.
148
149    Resolve all tags except those known to be
150    unsafe on untrusted input.
151    """
152    return load_all(stream, FullLoader)
153
154def safe_load(stream):
155    """
156    Parse the first YAML document in a stream
157    and produce the corresponding Python object.
158
159    Resolve only basic YAML tags. This is known
160    to be safe for untrusted input.
161    """
162    return load(stream, SafeLoader)
163
164def safe_load_all(stream):
165    """
166    Parse all YAML documents in a stream
167    and produce corresponding Python objects.
168
169    Resolve only basic YAML tags. This is known
170    to be safe for untrusted input.
171    """
172    return load_all(stream, SafeLoader)
173
174def unsafe_load(stream):
175    """
176    Parse the first YAML document in a stream
177    and produce the corresponding Python object.
178
179    Resolve all tags, even those known to be
180    unsafe on untrusted input.
181    """
182    return load(stream, UnsafeLoader)
183
184def unsafe_load_all(stream):
185    """
186    Parse all YAML documents in a stream
187    and produce corresponding Python objects.
188
189    Resolve all tags, even those known to be
190    unsafe on untrusted input.
191    """
192    return load_all(stream, UnsafeLoader)
193
194def emit(events, stream=None, Dumper=Dumper,
195        canonical=None, indent=None, width=None,
196        allow_unicode=None, line_break=None):
197    """
198    Emit YAML parsing events into a stream.
199    If stream is None, return the produced string instead.
200    """
201    getvalue = None
202    if stream is None:
203        stream = io.StringIO()
204        getvalue = stream.getvalue
205    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
206            allow_unicode=allow_unicode, line_break=line_break)
207    try:
208        for event in events:
209            dumper.emit(event)
210    finally:
211        dumper.dispose()
212    if getvalue:
213        return getvalue()
214
215def serialize_all(nodes, stream=None, Dumper=Dumper,
216        canonical=None, indent=None, width=None,
217        allow_unicode=None, line_break=None,
218        encoding=None, explicit_start=None, explicit_end=None,
219        version=None, tags=None):
220    """
221    Serialize a sequence of representation trees into a YAML stream.
222    If stream is None, return the produced string instead.
223    """
224    getvalue = None
225    if stream is None:
226        if encoding is None:
227            stream = io.StringIO()
228        else:
229            stream = io.BytesIO()
230        getvalue = stream.getvalue
231    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
232            allow_unicode=allow_unicode, line_break=line_break,
233            encoding=encoding, version=version, tags=tags,
234            explicit_start=explicit_start, explicit_end=explicit_end)
235    try:
236        dumper.open()
237        for node in nodes:
238            dumper.serialize(node)
239        dumper.close()
240    finally:
241        dumper.dispose()
242    if getvalue:
243        return getvalue()
244
245def serialize(node, stream=None, Dumper=Dumper, **kwds):
246    """
247    Serialize a representation tree into a YAML stream.
248    If stream is None, return the produced string instead.
249    """
250    return serialize_all([node], stream, Dumper=Dumper, **kwds)
251
252def dump_all(documents, stream=None, Dumper=Dumper,
253        default_style=None, default_flow_style=False,
254        canonical=None, indent=None, width=None,
255        allow_unicode=None, line_break=None,
256        encoding=None, explicit_start=None, explicit_end=None,
257        version=None, tags=None, sort_keys=True):
258    """
259    Serialize a sequence of Python objects into a YAML stream.
260    If stream is None, return the produced string instead.
261    """
262    getvalue = None
263    if stream is None:
264        if encoding is None:
265            stream = io.StringIO()
266        else:
267            stream = io.BytesIO()
268        getvalue = stream.getvalue
269    dumper = Dumper(stream, default_style=default_style,
270            default_flow_style=default_flow_style,
271            canonical=canonical, indent=indent, width=width,
272            allow_unicode=allow_unicode, line_break=line_break,
273            encoding=encoding, version=version, tags=tags,
274            explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys)
275    try:
276        dumper.open()
277        for data in documents:
278            dumper.represent(data)
279        dumper.close()
280    finally:
281        dumper.dispose()
282    if getvalue:
283        return getvalue()
284
285def dump(data, stream=None, Dumper=Dumper, **kwds):
286    """
287    Serialize a Python object into a YAML stream.
288    If stream is None, return the produced string instead.
289    """
290    return dump_all([data], stream, Dumper=Dumper, **kwds)
291
292def safe_dump_all(documents, stream=None, **kwds):
293    """
294    Serialize a sequence of Python objects into a YAML stream.
295    Produce only basic YAML tags.
296    If stream is None, return the produced string instead.
297    """
298    return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
299
300def safe_dump(data, stream=None, **kwds):
301    """
302    Serialize a Python object into a YAML stream.
303    Produce only basic YAML tags.
304    If stream is None, return the produced string instead.
305    """
306    return dump_all([data], stream, Dumper=SafeDumper, **kwds)
307
308def add_implicit_resolver(tag, regexp, first=None,
309        Loader=Loader, Dumper=Dumper):
310    """
311    Add an implicit scalar detector.
312    If an implicit scalar value matches the given regexp,
313    the corresponding tag is assigned to the scalar.
314    first is a sequence of possible initial characters or None.
315    """
316    Loader.add_implicit_resolver(tag, regexp, first)
317    Dumper.add_implicit_resolver(tag, regexp, first)
318
319def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
320    """
321    Add a path based resolver for the given tag.
322    A path is a list of keys that forms a path
323    to a node in the representation tree.
324    Keys can be string values, integers, or None.
325    """
326    Loader.add_path_resolver(tag, path, kind)
327    Dumper.add_path_resolver(tag, path, kind)
328
329def add_constructor(tag, constructor, Loader=Loader):
330    """
331    Add a constructor for the given tag.
332    Constructor is a function that accepts a Loader instance
333    and a node object and produces the corresponding Python object.
334    """
335    Loader.add_constructor(tag, constructor)
336
337def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
338    """
339    Add a multi-constructor for the given tag prefix.
340    Multi-constructor is called for a node if its tag starts with tag_prefix.
341    Multi-constructor accepts a Loader instance, a tag suffix,
342    and a node object and produces the corresponding Python object.
343    """
344    Loader.add_multi_constructor(tag_prefix, multi_constructor)
345
346def add_representer(data_type, representer, Dumper=Dumper):
347    """
348    Add a representer for the given type.
349    Representer is a function accepting a Dumper instance
350    and an instance of the given data type
351    and producing the corresponding representation node.
352    """
353    Dumper.add_representer(data_type, representer)
354
355def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
356    """
357    Add a representer for the given type.
358    Multi-representer is a function accepting a Dumper instance
359    and an instance of the given data type or subtype
360    and producing the corresponding representation node.
361    """
362    Dumper.add_multi_representer(data_type, multi_representer)
363
364class YAMLObjectMetaclass(type):
365    """
366    The metaclass for YAMLObject.
367    """
368    def __init__(cls, name, bases, kwds):
369        super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
370        if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
371            cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
372            cls.yaml_dumper.add_representer(cls, cls.to_yaml)
373
374class YAMLObject(metaclass=YAMLObjectMetaclass):
375    """
376    An object that can dump itself to a YAML stream
377    and load itself from a YAML stream.
378    """
379
380    __slots__ = ()  # no direct instantiation, so allow immutable subclasses
381
382    yaml_loader = Loader
383    yaml_dumper = Dumper
384
385    yaml_tag = None
386    yaml_flow_style = None
387
388    @classmethod
389    def from_yaml(cls, loader, node):
390        """
391        Convert a representation node to a Python object.
392        """
393        return loader.construct_yaml_object(node, cls)
394
395    @classmethod
396    def to_yaml(cls, dumper, data):
397        """
398        Convert a Python object to a representation node.
399        """
400        return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
401                flow_style=cls.yaml_flow_style)
402
403