source: pyyaml/trunk/lib/yaml/__init__.py @ 136

Revision 136, 8.2 KB checked in by xi, 9 years ago (diff)

Major refactoring.

RevLine 
[51]1
[57]2from error import *
[133]3
[57]4from reader import *
5from scanner import *
6from parser import *
7from composer import *
8from constructor import *
[133]9
[132]10from emitter import *
[133]11from serializer import *
12from representer import *
[51]13
[133]14from detector import *
15
[51]16from tokens import *
17from events import *
[55]18from nodes import *
[51]19
[136]20from loader import *
21from dumper import *
[133]22
[136]23def scan(stream, Loader=Loader):
24    """
25    Scan a YAML stream and produce scanning tokens.
26    """
27    loader = Loader(stream)
28    while loader.check_token():
29        yield loader.get_token()
[51]30
[136]31def parse(stream, Loader=Loader):
32    """
33    Parse a YAML stream and produce parsing events.
34    """
35    loader = Loader(stream)
36    while loader.check_event():
37        yield loader.get_event()
[53]38
[136]39def compose(stream, Loader=Loader):
40    """
41    Parse the first YAML document in a stream
42    and produce the corresponding representation tree.
43    """
44    loader = Loader(stream)
45    if loader.check_node():
46        return loader.get_node()
[133]47
[136]48def compose_all(stream, Loader=Loader):
49    """
50    Parse all YAML documents in a stream
51    and produce corresponsing representation trees.
52    """
53    loader = Loader(stream)
54    while loader.check_node():
55        yield loader.get_node()
[53]56
[136]57def load_all(stream, Loader=Loader):
58    """
59    Parse all YAML documents in a stream
60    and produce corresponding Python objects.
61    """
62    loader = Loader(stream)
63    while loader.check_data():
64        yield loader.get_data()
[133]65
[136]66def load(stream, Loader=Loader):
67    """
68    Parse the first YAML document in a stream
69    and produce the corresponding Python object.
70    """
71    loader = Loader(stream)
72    if loader.check_data():
73        return loader.get_data()
74
75def safe_load_all(stream):
76    """
77    Parse all YAML documents in a stream
78    and produce corresponding Python objects.
79    Resolve only basic YAML tags.
80    """
81    return load_all(stream, SafeLoader)
82
83def safe_load(stream):
84    """
85    Parse the first YAML document in a stream
86    and produce the corresponding Python object.
87    Resolve only basic YAML tags.
88    """
89    return load(stream, SafeLoader)
90
91def emit(events, stream=None, Dumper=Dumper,
92        canonical=None, indent=None, width=None,
93        allow_unicode=None, line_break=None):
94    """
95    Emit YAML parsing events into a stream.
96    If stream is None, return the produced string instead.
97    """
98    getvalue = None
99    if stream is None:
[133]100        try:
101            from cStringIO import StringIO
102        except ImportError:
103            from StringIO import StringIO
[136]104        stream = StringIO()
105        getvalue = stream.getvalue
106    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
107            allow_unicode=allow_unicode, line_break=line_break)
[133]108    for event in events:
[136]109        dumper.emit(event)
110    if getvalue:
111        return getvalue()
[133]112
[136]113def serialize_all(nodes, stream=None, Dumper=Dumper,
114        canonical=None, indent=None, width=None,
115        allow_unicode=None, line_break=None,
116        encoding='utf-8', explicit_start=None, explicit_end=None,
117        version=None, tags=None):
118    """
119    Serialize a sequence of representation trees into a YAML stream.
120    If stream is None, return the produced string instead.
121    """
122    getvalue = None
123    if stream is None:
[133]124        try:
125            from cStringIO import StringIO
126        except ImportError:
127            from StringIO import StringIO
[136]128        stream = StringIO()
129        getvalue = stream.getvalue
130    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
131            allow_unicode=allow_unicode, line_break=line_break,
132            encoding=encoding, version=version, tags=tags,
133            explicit_start=explicit_start, explicit_end=explicit_end)
134    dumper.open()
135    for node in nodes:
136        dumper.serialize(node)
137    dumper.close()
138    if getvalue:
139        return getvalue()
[133]140
[136]141def serialize(node, stream=None, Dumper=Dumper, **kwds):
142    """
143    Serialize a representation tree into a YAML stream.
144    If stream is None, return the produced string instead.
145    """
146    return serialize_all([node], stream, Dumper=Dumper, **kwds)
[133]147
[136]148def dump_all(documents, stream=None, Dumper=Dumper,
149        canonical=None, indent=None, width=None,
150        allow_unicode=None, line_break=None,
151        encoding='utf-8', explicit_start=None, explicit_end=None,
152        version=None, tags=None):
153    """
154    Serialize a sequence of Python objects into a YAML stream.
155    If stream is None, return the produced string instead.
156    """
157    getvalue = None
158    if stream is None:
159        try:
160            from cStringIO import StringIO
161        except ImportError:
162            from StringIO import StringIO
163        stream = StringIO()
164        getvalue = stream.getvalue
165    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
166            allow_unicode=allow_unicode, line_break=line_break,
167            encoding=encoding, version=version, tags=tags,
168            explicit_start=explicit_start, explicit_end=explicit_end)
169    dumper.open()
170    for data in documents:
171        dumper.represent(data)
172    dumper.close()
173    if getvalue:
174        return getvalue()
[133]175
[136]176def dump(data, stream=None, Dumper=Dumper, **kwds):
177    """
178    Serialize a Python object into a YAML stream.
179    If stream is None, return the produced string instead.
180    """
181    return dump_all([data], stream, Dumper=Dumper, **kwds)
[133]182
[136]183def safe_dump_all(documents, stream=None, **kwds):
184    """
185    Serialize a sequence of Python objects into a YAML stream.
186    Produce only basic YAML tags.
187    If stream is None, return the produced string instead.
188    """
189    return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
190
191def safe_dump(data, stream=None, **kwds):
192    """
193    Serialize a Python object into a YAML stream.
194    Produce only basic YAML tags.
195    If stream is None, return the produced string instead.
196    """
197    return dump_all([data], stream, Dumper=SafeDumper, **kwds)
198
199def add_detector(tag, regexp, first=None, Loader=Loader, Dumper=Dumper):
200    """
201    Add an implicit scalar detector.
202    If an implicit scalar value matches the given regexp,
203    the corresponding tag is assigned to the scalar.
204    first is a sequence of possible initial characters or None.
205    """
206    Loader.add_detector(tag, regexp, first)
207    Dumper.add_detector(tag, regexp, first)
208
209def add_resolver(tag, path, Loader=Loader):
210    """
211    Add a path based resolver for the given tag.
212    A path is a list of keys that forms a path
213    to a node in the representation tree.
214    Keys can be string values, integers, or None.
215    """
216    Loader.add_resolver(tag, path)
217
218def add_constructor(tag, constructor, Loader=Loader):
219    """
220    Add a constructor for the given tag.
221    Constructor is a function that accepts a Loader instance
222    and a node object and produces the corresponding Python object.
223    """
224    Loader.add_constructor(tag, constructor)
225
226def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
227    """
228    Add a multi-constructor for the given tag prefix.
229    Multi-constructor is called for a node if its tag starts with tag_prefix.
230    Multi-constructor accepts a Loader instance, a tag suffix,
231    and a node object and produces the corresponding Python object.
232    """
233    Loader.add_multi_constructor(tag_prefix, multi_constructor)
234
235class YAMLObjectMetaclass(type):
236    """
237    The metaclass for YAMLObject.
238    """
239    def __init__(cls, name, bases, kwds):
240        super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
241        if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
242            cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
243            cls.yaml_dumper.add_representer(cls, cls.to_yaml)
244
245class YAMLObject(object):
246    """
247    An object that can dump itself to a YAML stream
248    and load itself from a YAML stream.
249    """
250
251    __metaclass__ = YAMLObjectMetaclass
252
253    yaml_loader = Loader
254    yaml_dumper = Dumper
255
256    yaml_tag = None
257    yaml_flow_style = None
258
259    def from_yaml(cls, loader, node):
260        """
261        Convert a representation node to a Python object.
262        """
263        return loader.construct_yaml_object(node, cls)
264    from_yaml = classmethod(from_yaml)
265
266    def to_yaml(cls, dumper, data):
267        """
268        Convert a Python object to a representation node.
269        """
270        return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
271                flow_style=cls.yaml_flow_style)
272    to_yaml = classmethod(to_yaml)
273
Note: See TracBrowser for help on using the repository browser.