source: pyyaml/trunk/lib/yaml/__init__.py @ 146

Revision 146, 8.5 KB checked in by xi, 8 years ago (diff)

Fix !!python/name for Python 2.3. Clear the yaml module namespace.

RevLine 
[51]1
[57]2from error import *
[133]3
[51]4from tokens import *
5from events import *
[55]6from nodes import *
[51]7
[136]8from loader import *
9from dumper import *
[133]10
[136]11def scan(stream, Loader=Loader):
12    """
13    Scan a YAML stream and produce scanning tokens.
14    """
15    loader = Loader(stream)
16    while loader.check_token():
17        yield loader.get_token()
[51]18
[136]19def parse(stream, Loader=Loader):
20    """
21    Parse a YAML stream and produce parsing events.
22    """
23    loader = Loader(stream)
24    while loader.check_event():
25        yield loader.get_event()
[53]26
[136]27def compose(stream, Loader=Loader):
28    """
29    Parse the first YAML document in a stream
30    and produce the corresponding representation tree.
31    """
32    loader = Loader(stream)
33    if loader.check_node():
34        return loader.get_node()
[133]35
[136]36def compose_all(stream, Loader=Loader):
37    """
38    Parse all YAML documents in a stream
39    and produce corresponsing representation trees.
40    """
41    loader = Loader(stream)
42    while loader.check_node():
43        yield loader.get_node()
[53]44
[136]45def load_all(stream, Loader=Loader):
46    """
47    Parse all YAML documents in a stream
48    and produce corresponding Python objects.
49    """
50    loader = Loader(stream)
51    while loader.check_data():
52        yield loader.get_data()
[133]53
[136]54def load(stream, Loader=Loader):
55    """
56    Parse the first YAML document in a stream
57    and produce the corresponding Python object.
58    """
59    loader = Loader(stream)
60    if loader.check_data():
61        return loader.get_data()
62
63def safe_load_all(stream):
64    """
65    Parse all YAML documents in a stream
66    and produce corresponding Python objects.
67    Resolve only basic YAML tags.
68    """
69    return load_all(stream, SafeLoader)
70
71def safe_load(stream):
72    """
73    Parse the first YAML document in a stream
74    and produce the corresponding Python object.
75    Resolve only basic YAML tags.
76    """
77    return load(stream, SafeLoader)
78
79def emit(events, stream=None, Dumper=Dumper,
80        canonical=None, indent=None, width=None,
81        allow_unicode=None, line_break=None):
82    """
83    Emit YAML parsing events into a stream.
84    If stream is None, return the produced string instead.
85    """
86    getvalue = None
87    if stream is None:
[133]88        try:
89            from cStringIO import StringIO
90        except ImportError:
91            from StringIO import StringIO
[136]92        stream = StringIO()
93        getvalue = stream.getvalue
94    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
95            allow_unicode=allow_unicode, line_break=line_break)
[133]96    for event in events:
[136]97        dumper.emit(event)
98    if getvalue:
99        return getvalue()
[133]100
[136]101def serialize_all(nodes, stream=None, Dumper=Dumper,
102        canonical=None, indent=None, width=None,
103        allow_unicode=None, line_break=None,
104        encoding='utf-8', explicit_start=None, explicit_end=None,
105        version=None, tags=None):
106    """
107    Serialize a sequence of representation trees into a YAML stream.
108    If stream is None, return the produced string instead.
109    """
110    getvalue = None
111    if stream is None:
[133]112        try:
113            from cStringIO import StringIO
114        except ImportError:
115            from StringIO import StringIO
[136]116        stream = StringIO()
117        getvalue = stream.getvalue
118    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
119            allow_unicode=allow_unicode, line_break=line_break,
120            encoding=encoding, version=version, tags=tags,
121            explicit_start=explicit_start, explicit_end=explicit_end)
122    dumper.open()
123    for node in nodes:
124        dumper.serialize(node)
125    dumper.close()
126    if getvalue:
127        return getvalue()
[133]128
[136]129def serialize(node, stream=None, Dumper=Dumper, **kwds):
130    """
131    Serialize a representation tree into a YAML stream.
132    If stream is None, return the produced string instead.
133    """
134    return serialize_all([node], stream, Dumper=Dumper, **kwds)
[133]135
[136]136def dump_all(documents, stream=None, Dumper=Dumper,
137        canonical=None, indent=None, width=None,
138        allow_unicode=None, line_break=None,
139        encoding='utf-8', explicit_start=None, explicit_end=None,
140        version=None, tags=None):
141    """
142    Serialize a sequence of Python objects into a YAML stream.
143    If stream is None, return the produced string instead.
144    """
145    getvalue = None
146    if stream is None:
147        try:
148            from cStringIO import StringIO
149        except ImportError:
150            from StringIO import StringIO
151        stream = StringIO()
152        getvalue = stream.getvalue
153    dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
154            allow_unicode=allow_unicode, line_break=line_break,
155            encoding=encoding, version=version, tags=tags,
156            explicit_start=explicit_start, explicit_end=explicit_end)
157    dumper.open()
158    for data in documents:
159        dumper.represent(data)
160    dumper.close()
161    if getvalue:
162        return getvalue()
[133]163
[136]164def dump(data, stream=None, Dumper=Dumper, **kwds):
165    """
166    Serialize a Python object into a YAML stream.
167    If stream is None, return the produced string instead.
168    """
169    return dump_all([data], stream, Dumper=Dumper, **kwds)
[133]170
[136]171def safe_dump_all(documents, stream=None, **kwds):
172    """
173    Serialize a sequence of Python objects into a YAML stream.
174    Produce only basic YAML tags.
175    If stream is None, return the produced string instead.
176    """
177    return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
178
179def safe_dump(data, stream=None, **kwds):
180    """
181    Serialize a Python object into a YAML stream.
182    Produce only basic YAML tags.
183    If stream is None, return the produced string instead.
184    """
185    return dump_all([data], stream, Dumper=SafeDumper, **kwds)
186
[137]187def add_implicit_detector(tag, regexp, first=None,
188        Loader=Loader, Dumper=Dumper):
[136]189    """
190    Add an implicit scalar detector.
191    If an implicit scalar value matches the given regexp,
192    the corresponding tag is assigned to the scalar.
193    first is a sequence of possible initial characters or None.
194    """
[137]195    Loader.add_implicit_resolver(tag, regexp, first)
196    Dumper.add_implicit_resolver(tag, regexp, first)
[136]197
[137]198def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
[136]199    """
200    Add a path based resolver for the given tag.
201    A path is a list of keys that forms a path
202    to a node in the representation tree.
203    Keys can be string values, integers, or None.
204    """
[137]205    Loader.add_path_resolver(tag, path, kind)
206    Dumper.add_path_resolver(tag, path, kind)
[136]207
208def add_constructor(tag, constructor, Loader=Loader):
209    """
210    Add a constructor for the given tag.
211    Constructor is a function that accepts a Loader instance
212    and a node object and produces the corresponding Python object.
213    """
214    Loader.add_constructor(tag, constructor)
215
216def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
217    """
218    Add a multi-constructor for the given tag prefix.
219    Multi-constructor is called for a node if its tag starts with tag_prefix.
220    Multi-constructor accepts a Loader instance, a tag suffix,
221    and a node object and produces the corresponding Python object.
222    """
223    Loader.add_multi_constructor(tag_prefix, multi_constructor)
224
[137]225def add_representer(data_type, representer, Dumper=Dumper):
226    """
227    Add a representer for the given type.
228    Representer is a function accepting a Dumper instance
229    and an instance of the given data type
230    and producing the corresponding representation node.
231    """
232    Dumper.add_representer(data_type, representer)
233
[136]234class YAMLObjectMetaclass(type):
235    """
236    The metaclass for YAMLObject.
237    """
238    def __init__(cls, name, bases, kwds):
239        super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
240        if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
241            cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
242            cls.yaml_dumper.add_representer(cls, cls.to_yaml)
243
244class YAMLObject(object):
245    """
246    An object that can dump itself to a YAML stream
247    and load itself from a YAML stream.
248    """
249
250    __metaclass__ = YAMLObjectMetaclass
251
252    yaml_loader = Loader
253    yaml_dumper = Dumper
254
255    yaml_tag = None
256    yaml_flow_style = None
257
258    def from_yaml(cls, loader, node):
259        """
260        Convert a representation node to a Python object.
261        """
262        return loader.construct_yaml_object(node, cls)
263    from_yaml = classmethod(from_yaml)
264
265    def to_yaml(cls, dumper, data):
266        """
267        Convert a Python object to a representation node.
268        """
269        return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
270                flow_style=cls.yaml_flow_style)
271    to_yaml = classmethod(to_yaml)
272
Note: See TracBrowser for help on using the repository browser.