source: pyyaml/trunk/lib/yaml/__init__.py @ 137

Revision 137, 8.7 KB checked in by xi, 9 years ago (diff)

Refactor resolver.

Line 
1
2from error import *
3
4from reader import *
5from scanner import *
6from parser import *
7from composer import *
8from constructor import *
9
10from emitter import *
11from serializer import *
12from representer import *
13
14from resolver import *
15
16from tokens import *
17from events import *
18from nodes import *
19
20from loader import *
21from dumper import *
22
23def scan(stream, Loader=Loader):
24    """
25    Scan a YAML stream and produce scanning tokens.
26    """
27    loader = Loader(stream)
28    while loader.check_token():
29        yield loader.get_token()
30
31def parse(stream, Loader=Loader):
32    """
33    Parse a YAML stream and produce parsing events.
34    """
35    loader = Loader(stream)
36    while loader.check_event():
37        yield loader.get_event()
38
39def compose(stream, Loader=Loader):
40    """
41    Parse the first YAML document in a stream
42    and produce the corresponding representation tree.
43    """
44    loader = Loader(stream)
45    if loader.check_node():
46        return loader.get_node()
47
48def compose_all(stream, Loader=Loader):
49    """
50    Parse all YAML documents in a stream
51    and produce corresponsing representation trees.
52    """
53    loader = Loader(stream)
54    while loader.check_node():
55        yield loader.get_node()
56
57def load_all(stream, Loader=Loader):
58    """
59    Parse all YAML documents in a stream
60    and produce corresponding Python objects.
61    """
62    loader = Loader(stream)
63    while loader.check_data():
64        yield loader.get_data()
65
66def load(stream, Loader=Loader):
67    """
68    Parse the first YAML document in a stream
69    and produce the corresponding Python object.
70    """
71    loader = Loader(stream)
72    if loader.check_data():
73        return loader.get_data()
74
75def safe_load_all(stream):
76    """
77    Parse all YAML documents in a stream
78    and produce corresponding Python objects.
79    Resolve only basic YAML tags.
80    """
81    return load_all(stream, SafeLoader)
82
83def safe_load(stream):
84    """
85    Parse the first YAML document in a stream
86    and produce the corresponding Python object.
87    Resolve only basic YAML tags.
88    """
89    return load(stream, SafeLoader)
90
91def emit(events, stream=None, Dumper=Dumper,
92        canonical=None, indent=None, width=None,
93        allow_unicode=None, line_break=None):
94    """
95    Emit YAML parsing events into a stream.
96    If stream is None, return the produced string instead.
97    """
98    getvalue = None
99    if stream is None:
100        try:
101            from cStringIO import StringIO
102        except ImportError:
103            from StringIO import StringIO
104        stream = StringIO()
105        getvalue = stream.getvalue
106    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
107            allow_unicode=allow_unicode, line_break=line_break)
108    for event in events:
109        dumper.emit(event)
110    if getvalue:
111        return getvalue()
112
113def serialize_all(nodes, stream=None, Dumper=Dumper,
114        canonical=None, indent=None, width=None,
115        allow_unicode=None, line_break=None,
116        encoding='utf-8', explicit_start=None, explicit_end=None,
117        version=None, tags=None):
118    """
119    Serialize a sequence of representation trees into a YAML stream.
120    If stream is None, return the produced string instead.
121    """
122    getvalue = None
123    if stream is None:
124        try:
125            from cStringIO import StringIO
126        except ImportError:
127            from StringIO import StringIO
128        stream = StringIO()
129        getvalue = stream.getvalue
130    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
131            allow_unicode=allow_unicode, line_break=line_break,
132            encoding=encoding, version=version, tags=tags,
133            explicit_start=explicit_start, explicit_end=explicit_end)
134    dumper.open()
135    for node in nodes:
136        dumper.serialize(node)
137    dumper.close()
138    if getvalue:
139        return getvalue()
140
141def serialize(node, stream=None, Dumper=Dumper, **kwds):
142    """
143    Serialize a representation tree into a YAML stream.
144    If stream is None, return the produced string instead.
145    """
146    return serialize_all([node], stream, Dumper=Dumper, **kwds)
147
148def dump_all(documents, stream=None, Dumper=Dumper,
149        canonical=None, indent=None, width=None,
150        allow_unicode=None, line_break=None,
151        encoding='utf-8', explicit_start=None, explicit_end=None,
152        version=None, tags=None):
153    """
154    Serialize a sequence of Python objects into a YAML stream.
155    If stream is None, return the produced string instead.
156    """
157    getvalue = None
158    if stream is None:
159        try:
160            from cStringIO import StringIO
161        except ImportError:
162            from StringIO import StringIO
163        stream = StringIO()
164        getvalue = stream.getvalue
165    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
166            allow_unicode=allow_unicode, line_break=line_break,
167            encoding=encoding, version=version, tags=tags,
168            explicit_start=explicit_start, explicit_end=explicit_end)
169    dumper.open()
170    for data in documents:
171        dumper.represent(data)
172    dumper.close()
173    if getvalue:
174        return getvalue()
175
176def dump(data, stream=None, Dumper=Dumper, **kwds):
177    """
178    Serialize a Python object into a YAML stream.
179    If stream is None, return the produced string instead.
180    """
181    return dump_all([data], stream, Dumper=Dumper, **kwds)
182
183def safe_dump_all(documents, stream=None, **kwds):
184    """
185    Serialize a sequence of Python objects into a YAML stream.
186    Produce only basic YAML tags.
187    If stream is None, return the produced string instead.
188    """
189    return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
190
191def safe_dump(data, stream=None, **kwds):
192    """
193    Serialize a Python object into a YAML stream.
194    Produce only basic YAML tags.
195    If stream is None, return the produced string instead.
196    """
197    return dump_all([data], stream, Dumper=SafeDumper, **kwds)
198
199def add_implicit_detector(tag, regexp, first=None,
200        Loader=Loader, Dumper=Dumper):
201    """
202    Add an implicit scalar detector.
203    If an implicit scalar value matches the given regexp,
204    the corresponding tag is assigned to the scalar.
205    first is a sequence of possible initial characters or None.
206    """
207    Loader.add_implicit_resolver(tag, regexp, first)
208    Dumper.add_implicit_resolver(tag, regexp, first)
209
210def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
211    """
212    Add a path based resolver for the given tag.
213    A path is a list of keys that forms a path
214    to a node in the representation tree.
215    Keys can be string values, integers, or None.
216    """
217    Loader.add_path_resolver(tag, path, kind)
218    Dumper.add_path_resolver(tag, path, kind)
219
220def add_constructor(tag, constructor, Loader=Loader):
221    """
222    Add a constructor for the given tag.
223    Constructor is a function that accepts a Loader instance
224    and a node object and produces the corresponding Python object.
225    """
226    Loader.add_constructor(tag, constructor)
227
228def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
229    """
230    Add a multi-constructor for the given tag prefix.
231    Multi-constructor is called for a node if its tag starts with tag_prefix.
232    Multi-constructor accepts a Loader instance, a tag suffix,
233    and a node object and produces the corresponding Python object.
234    """
235    Loader.add_multi_constructor(tag_prefix, multi_constructor)
236
237def add_representer(data_type, representer, Dumper=Dumper):
238    """
239    Add a representer for the given type.
240    Representer is a function accepting a Dumper instance
241    and an instance of the given data type
242    and producing the corresponding representation node.
243    """
244    Dumper.add_representer(data_type, representer)
245
246class YAMLObjectMetaclass(type):
247    """
248    The metaclass for YAMLObject.
249    """
250    def __init__(cls, name, bases, kwds):
251        super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
252        if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
253            cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
254            cls.yaml_dumper.add_representer(cls, cls.to_yaml)
255
256class YAMLObject(object):
257    """
258    An object that can dump itself to a YAML stream
259    and load itself from a YAML stream.
260    """
261
262    __metaclass__ = YAMLObjectMetaclass
263
264    yaml_loader = Loader
265    yaml_dumper = Dumper
266
267    yaml_tag = None
268    yaml_flow_style = None
269
270    def from_yaml(cls, loader, node):
271        """
272        Convert a representation node to a Python object.
273        """
274        return loader.construct_yaml_object(node, cls)
275    from_yaml = classmethod(from_yaml)
276
277    def to_yaml(cls, dumper, data):
278        """
279        Convert a Python object to a representation node.
280        """
281        return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
282                flow_style=cls.yaml_flow_style)
283    to_yaml = classmethod(to_yaml)
284
Note: See TracBrowser for help on using the repository browser.