source: pyyaml/trunk/lib/yaml/parser.py @ 222

Revision 222, 24.8 KB checked in by xi, 8 years ago (diff)

Subclass all base classes from object.

Hold references to the objects being represented (should fix #22).

The value of a mapping node is represented as a list of pairs (key, value)
now.

Sort dictionary items (fix #23).

Recursive structures are now loaded and dumped correctly, including complex
structures like recursive tuples (fix #5). Thanks Peter Murphy for the patches.
To make it possible, representer functions are allowed to be generators.
In this case, the first generated value is an object. Other values produced
by the representer are ignored.

Make Representer not try to guess !!pairs when a list is represented.
You need to construct a !!pairs node explicitly now.

Do not check for duplicate mapping keys as it didn't work correctly anyway.

RevLine 
[43]1
[198]2# The following YAML grammar is LL(1) and is parsed by a recursive descent
3# parser.
[51]4#
[118]5# stream            ::= STREAM-START implicit_document? explicit_document* STREAM-END
[198]6# implicit_document ::= block_node DOCUMENT-END*
7# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
8# block_node_or_indentless_sequence ::=
9#                       ALIAS
10#                       | properties (block_content | indentless_block_sequence)?
11#                       | block_content
12#                       | indentless_block_sequence
13# block_node        ::= ALIAS
14#                       | properties block_content?
15#                       | block_content
16# flow_node         ::= ALIAS
17#                       | properties flow_content?
18#                       | flow_content
19# properties        ::= TAG ANCHOR? | ANCHOR TAG?
[43]20# block_content     ::= block_collection | flow_collection | SCALAR
21# flow_content      ::= flow_collection | SCALAR
22# block_collection  ::= block_sequence | block_mapping
[198]23# flow_collection   ::= flow_sequence | flow_mapping
[51]24# block_sequence    ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
[198]25# indentless_sequence   ::= (BLOCK-ENTRY block_node?)+
26# block_mapping     ::= BLOCK-MAPPING_START
27#                       ((KEY block_node_or_indentless_sequence?)?
28#                       (VALUE block_node_or_indentless_sequence?)?)*
29#                       BLOCK-END
30# flow_sequence     ::= FLOW-SEQUENCE-START
31#                       (flow_sequence_entry FLOW-ENTRY)*
32#                       flow_sequence_entry?
33#                       FLOW-SEQUENCE-END
[51]34# flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
[198]35# flow_mapping      ::= FLOW-MAPPING-START
36#                       (flow_mapping_entry FLOW-ENTRY)*
37#                       flow_mapping_entry?
38#                       FLOW-MAPPING-END
[51]39# flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
[198]40#
[51]41# FIRST sets:
[198]42#
[118]43# stream: { STREAM-START }
[43]44# explicit_document: { DIRECTIVE DOCUMENT-START }
[51]45# implicit_document: FIRST(block_node)
[43]46# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
47# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
48# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
49# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
50# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
51# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
52# block_sequence: { BLOCK-SEQUENCE-START }
53# block_mapping: { BLOCK-MAPPING-START }
[51]54# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
[43]55# indentless_sequence: { ENTRY }
56# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
57# flow_sequence: { FLOW-SEQUENCE-START }
58# flow_mapping: { FLOW-MAPPING-START }
59# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
60# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
61
[57]62__all__ = ['Parser', 'ParserError']
63
[52]64from error import MarkedYAMLError
[46]65from tokens import *
[51]66from events import *
[136]67from scanner import *
[44]68
[52]69class ParserError(MarkedYAMLError):
70    pass
[44]71
[222]72class Parser(object):
[136]73    # Since writing a recursive-descendant parser is a straightforward task, we
74    # do not give many comments here.
[55]75    # Note that we use Python generators. If you rewrite the parser in another
[51]76    # language, you may replace all 'yield'-s with event handler calls.
[44]77
[51]78    DEFAULT_TAGS = {
79        u'!':   u'!',
80        u'!!':  u'tag:yaml.org,2002:',
81    }
[44]82
[136]83    def __init__(self):
[51]84        self.current_event = None
85        self.yaml_version = None
86        self.tag_handles = {}
[198]87        self.states = []
88        self.marks = []
89        self.state = self.parse_stream_start
[43]90
[136]91    def check_event(self, *choices):
[51]92        # Check the type of the next event.
93        if self.current_event is None:
[198]94            if self.state:
95                self.current_event = self.state()
[51]96        if self.current_event is not None:
[136]97            if not choices:
98                return True
[51]99            for choice in choices:
100                if isinstance(self.current_event, choice):
101                    return True
[44]102        return False
103
[136]104    def peek_event(self):
[53]105        # Get the next event.
106        if self.current_event is None:
[198]107            if self.state:
108                self.current_event = self.state()
[53]109        return self.current_event
110
[136]111    def get_event(self):
[198]112        # Get the next event and proceed further.
[51]113        if self.current_event is None:
[198]114            if self.state:
115                self.current_event = self.state()
[51]116        value = self.current_event
117        self.current_event = None
118        return value
[44]119
[198]120    # stream    ::= STREAM-START implicit_document? explicit_document* STREAM-END
121    # implicit_document ::= block_node DOCUMENT-END*
122    # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
[44]123
[198]124    def parse_stream_start(self):
[51]125
[198]126        # Parse the stream start.
[136]127        token = self.get_token()
[198]128        event = StreamStartEvent(token.start_mark, token.end_mark,
[130]129                encoding=token.encoding)
[118]130
[198]131        # Prepare the next state.
132        self.state = self.parse_implicit_document_start
133
134        return event
135
136    def parse_implicit_document_start(self):
137
138        # Parse an implicit document.
[136]139        if not self.check_token(DirectiveToken, DocumentStartToken,
[51]140                StreamEndToken):
141            self.tag_handles = self.DEFAULT_TAGS
[136]142            token = self.peek_token()
[118]143            start_mark = end_mark = token.start_mark
[198]144            event = DocumentStartEvent(start_mark, end_mark,
[132]145                    explicit=False)
[51]146
[198]147            # Prepare the next state.
148            self.states.append(self.parse_document_end)
149            self.state = self.parse_block_node
150
151            return event
152
153        else:
154            return self.parse_document_start()
155
156    def parse_document_start(self):
157
158        # Parse an explicit document.
159        if not self.check_token(StreamEndToken):
[136]160            token = self.peek_token()
[118]161            start_mark = token.start_mark
[130]162            version, tags = self.process_directives()
[136]163            if not self.check_token(DocumentStartToken):
[51]164                raise ParserError(None, None,
165                        "expected '<document start>', but found %r"
[136]166                        % self.peek_token().id,
167                        self.peek_token().start_mark)
168            token = self.get_token()
[118]169            end_mark = token.end_mark
[198]170            event = DocumentStartEvent(start_mark, end_mark,
[132]171                    explicit=True, version=version, tags=tags)
[198]172            self.states.append(self.parse_document_end)
173            self.state = self.parse_document_content
174        else:
175            # Parse the end of the stream.
176            token = self.get_token()
177            event = StreamEndEvent(token.start_mark, token.end_mark)
178            assert not self.states
179            assert not self.marks
180            self.state = None
181        return event
[43]182
[198]183    def parse_document_end(self):
[51]184
[198]185        # Parse the document end.
186        token = self.peek_token()
187        start_mark = end_mark = token.start_mark
188        explicit = False
189        while self.check_token(DocumentEndToken):
190            token = self.get_token()
191            end_mark = token.end_mark
192            explicit = True
193        event = DocumentEndEvent(start_mark, end_mark,
194                explicit=explicit)
195
196        # Prepare the next state.
197        self.state = self.parse_document_start
198
199        return event
200
201    def parse_document_content(self):
202        if self.check_token(DirectiveToken,
203                DocumentStartToken, DocumentEndToken, StreamEndToken):
204            event = self.process_empty_scalar(self.peek_token().start_mark)
205            self.state = self.states.pop()
206            return event
207        else:
208            return self.parse_block_node()
209
[51]210    def process_directives(self):
211        self.yaml_version = None
212        self.tag_handles = {}
[136]213        while self.check_token(DirectiveToken):
214            token = self.get_token()
[51]215            if token.name == u'YAML':
216                if self.yaml_version is not None:
217                    raise ParserError(None, None,
[116]218                            "found duplicate YAML directive", token.start_mark)
[51]219                major, minor = token.value
220                if major != 1:
221                    raise ParserError(None, None,
222                            "found incompatible YAML document (version 1.* is required)",
[116]223                            token.start_mark)
[51]224                self.yaml_version = token.value
225            elif token.name == u'TAG':
226                handle, prefix = token.value
227                if handle in self.tag_handles:
228                    raise ParserError(None, None,
229                            "duplicate tag handle %r" % handle.encode('utf-8'),
[116]230                            token.start_mark)
[51]231                self.tag_handles[handle] = prefix
[130]232        if self.tag_handles:
233            value = self.yaml_version, self.tag_handles.copy()
234        else:
235            value = self.yaml_version, None
[51]236        for key in self.DEFAULT_TAGS:
237            if key not in self.tag_handles:
238                self.tag_handles[key] = self.DEFAULT_TAGS[key]
[130]239        return value
[51]240
[198]241    # block_node_or_indentless_sequence ::= ALIAS
242    #               | properties (block_content | indentless_block_sequence)?
243    #               | block_content
244    #               | indentless_block_sequence
245    # block_node    ::= ALIAS
246    #                   | properties block_content?
247    #                   | block_content
248    # flow_node     ::= ALIAS
249    #                   | properties flow_content?
250    #                   | flow_content
251    # properties    ::= TAG ANCHOR? | ANCHOR TAG?
252    # block_content     ::= block_collection | flow_collection | SCALAR
253    # flow_content      ::= flow_collection | SCALAR
254    # block_collection  ::= block_sequence | block_mapping
255    # flow_collection   ::= flow_sequence | flow_mapping
256
[43]257    def parse_block_node(self):
[44]258        return self.parse_node(block=True)
[43]259
260    def parse_flow_node(self):
[44]261        return self.parse_node()
[43]262
263    def parse_block_node_or_indentless_sequence(self):
[44]264        return self.parse_node(block=True, indentless_sequence=True)
[43]265
[44]266    def parse_node(self, block=False, indentless_sequence=False):
[136]267        if self.check_token(AliasToken):
268            token = self.get_token()
[198]269            event = AliasEvent(token.value, token.start_mark, token.end_mark)
270            self.state = self.states.pop()
[44]271        else:
[51]272            anchor = None
273            tag = None
[116]274            start_mark = end_mark = tag_mark = None
[136]275            if self.check_token(AnchorToken):
276                token = self.get_token()
[130]277                start_mark = token.start_mark
278                end_mark = token.end_mark
[51]279                anchor = token.value
[136]280                if self.check_token(TagToken):
281                    token = self.get_token()
[130]282                    tag_mark = token.start_mark
283                    end_mark = token.end_mark
[51]284                    tag = token.value
[136]285            elif self.check_token(TagToken):
286                token = self.get_token()
[130]287                start_mark = tag_mark = token.start_mark
288                end_mark = token.end_mark
[51]289                tag = token.value
[136]290                if self.check_token(AnchorToken):
291                    token = self.get_token()
[130]292                    end_mark = token.end_mark
[51]293                    anchor = token.value
[136]294            if tag is not None and tag != u'!':
[51]295                handle, suffix = tag
296                if handle is not None:
297                    if handle not in self.tag_handles:
[116]298                        raise ParserError("while parsing a node", start_mark,
[51]299                                "found undefined tag handle %r" % handle.encode('utf-8'),
[116]300                                tag_mark)
[51]301                    tag = self.tag_handles[handle]+suffix
302                else:
303                    tag = suffix
[136]304            #if tag == u'!':
305            #    raise ParserError("while parsing a node", start_mark,
306            #            "found non-specific tag '!'", tag_mark,
307            #            "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
[116]308            if start_mark is None:
[136]309                start_mark = end_mark = self.peek_token().start_mark
[51]310            event = None
[137]311            implicit = (tag is None or tag == u'!')
[136]312            if indentless_sequence and self.check_token(BlockEntryToken):
313                end_mark = self.peek_token().end_mark
[137]314                event = SequenceStartEvent(anchor, tag, implicit,
315                        start_mark, end_mark)
[198]316                self.state = self.parse_indentless_sequence_entry
[44]317            else:
[136]318                if self.check_token(ScalarToken):
319                    token = self.get_token()
[116]320                    end_mark = token.end_mark
[137]321                    if (token.plain and tag is None) or tag == u'!':
322                        implicit = (True, False)
323                    elif tag is None:
324                        implicit = (False, True)
325                    else:
326                        implicit = (False, False)
[136]327                    event = ScalarEvent(anchor, tag, implicit, token.value,
328                            start_mark, end_mark, style=token.style)
[198]329                    self.state = self.states.pop()
[136]330                elif self.check_token(FlowSequenceStartToken):
331                    end_mark = self.peek_token().end_mark
[137]332                    event = SequenceStartEvent(anchor, tag, implicit,
333                            start_mark, end_mark, flow_style=True)
[198]334                    self.state = self.parse_flow_sequence_first_entry
[136]335                elif self.check_token(FlowMappingStartToken):
336                    end_mark = self.peek_token().end_mark
[137]337                    event = MappingStartEvent(anchor, tag, implicit,
338                            start_mark, end_mark, flow_style=True)
[198]339                    self.state = self.parse_flow_mapping_first_key
[136]340                elif block and self.check_token(BlockSequenceStartToken):
341                    end_mark = self.peek_token().start_mark
[137]342                    event = SequenceStartEvent(anchor, tag, implicit,
343                            start_mark, end_mark, flow_style=False)
[198]344                    self.state = self.parse_block_sequence_first_entry
[136]345                elif block and self.check_token(BlockMappingStartToken):
346                    end_mark = self.peek_token().start_mark
[137]347                    event = MappingStartEvent(anchor, tag, implicit,
348                            start_mark, end_mark, flow_style=False)
[198]349                    self.state = self.parse_block_mapping_first_key
[130]350                elif anchor is not None or tag is not None:
351                    # Empty scalars are allowed even if a tag or an anchor is
352                    # specified.
[137]353                    event = ScalarEvent(anchor, tag, (implicit, False), u'',
[136]354                            start_mark, end_mark)
[198]355                    self.state = self.states.pop()
[51]356                else:
357                    if block:
358                        node = 'block'
359                    else:
360                        node = 'flow'
[136]361                    token = self.peek_token()
[204]362                    raise ParserError("while parsing a %s node" % node, start_mark,
[51]363                            "expected the node content, but found %r" % token.id,
[116]364                            token.start_mark)
[198]365        return event
[44]366
[198]367    # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
368
369    def parse_block_sequence_first_entry(self):
[136]370        token = self.get_token()
[198]371        self.marks.append(token.start_mark)
372        return self.parse_block_sequence_entry()
373
374    def parse_block_sequence_entry(self):
375        if self.check_token(BlockEntryToken):
[136]376            token = self.get_token()
377            if not self.check_token(BlockEntryToken, BlockEndToken):
[198]378                self.states.append(self.parse_block_sequence_entry)
379                return self.parse_block_node()
[43]380            else:
[198]381                self.state = self.parse_block_sequence_entry
382                return self.process_empty_scalar(token.end_mark)
[136]383        if not self.check_token(BlockEndToken):
384            token = self.peek_token()
[204]385            raise ParserError("while parsing a block collection", self.marks[-1],
[116]386                    "expected <block end>, but found %r" % token.id, token.start_mark)
[136]387        token = self.get_token()
[198]388        event = SequenceEndEvent(token.start_mark, token.end_mark)
389        self.state = self.states.pop()
390        self.marks.pop()
391        return event
[43]392
[198]393    # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
394
395    def parse_indentless_sequence_entry(self):
396        if self.check_token(BlockEntryToken):
[136]397            token = self.get_token()
398            if not self.check_token(BlockEntryToken,
[51]399                    KeyToken, ValueToken, BlockEndToken):
[198]400                self.states.append(self.parse_indentless_sequence_entry)
401                return self.parse_block_node()
[43]402            else:
[198]403                self.state = self.parse_indentless_sequence_entry
404                return self.process_empty_scalar(token.end_mark)
[136]405        token = self.peek_token()
[198]406        event = SequenceEndEvent(token.start_mark, token.start_mark)
407        self.state = self.states.pop()
408        return event
[43]409
[198]410    # block_mapping     ::= BLOCK-MAPPING_START
411    #                       ((KEY block_node_or_indentless_sequence?)?
412    #                       (VALUE block_node_or_indentless_sequence?)?)*
413    #                       BLOCK-END
414
415    def parse_block_mapping_first_key(self):
[136]416        token = self.get_token()
[198]417        self.marks.append(token.start_mark)
418        return self.parse_block_mapping_key()
419
420    def parse_block_mapping_key(self):
421        if self.check_token(KeyToken):
422            token = self.get_token()
423            if not self.check_token(KeyToken, ValueToken, BlockEndToken):
424                self.states.append(self.parse_block_mapping_value)
425                return self.parse_block_node_or_indentless_sequence()
[51]426            else:
[198]427                self.state = self.parse_block_mapping_value
428                return self.process_empty_scalar(token.end_mark)
[136]429        if not self.check_token(BlockEndToken):
430            token = self.peek_token()
[204]431            raise ParserError("while parsing a block mapping", self.marks[-1],
[116]432                    "expected <block end>, but found %r" % token.id, token.start_mark)
[136]433        token = self.get_token()
[198]434        event = MappingEndEvent(token.start_mark, token.end_mark)
435        self.state = self.states.pop()
436        self.marks.pop()
437        return event
[43]438
[198]439    def parse_block_mapping_value(self):
440        if self.check_token(ValueToken):
441            token = self.get_token()
442            if not self.check_token(KeyToken, ValueToken, BlockEndToken):
443                self.states.append(self.parse_block_mapping_key)
444                return self.parse_block_node_or_indentless_sequence()
445            else:
446                self.state = self.parse_block_mapping_key
447                return self.process_empty_scalar(token.end_mark)
448        else:
449            self.state = self.parse_block_mapping_key
450            token = self.peek_token()
451            return self.process_empty_scalar(token.start_mark)
452
453    # flow_sequence     ::= FLOW-SEQUENCE-START
454    #                       (flow_sequence_entry FLOW-ENTRY)*
455    #                       flow_sequence_entry?
456    #                       FLOW-SEQUENCE-END
457    # flow_sequence_entry   ::= flow_node | KEY flow_node? (VALUE flow_node?)?
458    #
459    # Note that while production rules for both flow_sequence_entry and
460    # flow_mapping_entry are equal, their interpretations are different.
461    # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
462    # generate an inline mapping (set syntax).
463
464    def parse_flow_sequence_first_entry(self):
[136]465        token = self.get_token()
[198]466        self.marks.append(token.start_mark)
467        return self.parse_flow_sequence_entry(first=True)
468
469    def parse_flow_sequence_entry(self, first=False):
470        if not self.check_token(FlowSequenceEndToken):
471            if not first:
472                if self.check_token(FlowEntryToken):
473                    self.get_token()
474                else:
475                    token = self.peek_token()
[204]476                    raise ParserError("while parsing a flow sequence", self.marks[-1],
[198]477                            "expected ',' or ']', but got %r" % token.id, token.start_mark)
478           
[136]479            if self.check_token(KeyToken):
[204]480                token = self.peek_token()
[198]481                event = MappingStartEvent(None, None, True,
[130]482                        token.start_mark, token.end_mark,
483                        flow_style=True)
[198]484                self.state = self.parse_flow_sequence_entry_mapping_key
485                return event
486            elif not self.check_token(FlowSequenceEndToken):
487                self.states.append(self.parse_flow_sequence_entry)
488                return self.parse_flow_node()
489        token = self.get_token()
490        event = SequenceEndEvent(token.start_mark, token.end_mark)
491        self.state = self.states.pop()
492        self.marks.pop()
493        return event
494
495    def parse_flow_sequence_entry_mapping_key(self):
[204]496        token = self.get_token()
[198]497        if not self.check_token(ValueToken,
498                FlowEntryToken, FlowSequenceEndToken):
499            self.states.append(self.parse_flow_sequence_entry_mapping_value)
500            return self.parse_flow_node()
501        else:
502            self.state = self.parse_flow_sequence_entry_mapping_value
503            return self.process_empty_scalar(token.end_mark)
504
505    def parse_flow_sequence_entry_mapping_value(self):
506        if self.check_token(ValueToken):
507            token = self.get_token()
508            if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
509                self.states.append(self.parse_flow_sequence_entry_mapping_end)
510                return self.parse_flow_node()
[43]511            else:
[198]512                self.state = self.parse_flow_sequence_entry_mapping_end
513                return self.process_empty_scalar(token.end_mark)
514        else:
515            self.state = self.parse_flow_sequence_entry_mapping_end
516            token = self.peek_token()
517            return self.process_empty_scalar(token.start_mark)
518
519    def parse_flow_sequence_entry_mapping_end(self):
520        self.state = self.parse_flow_sequence_entry
521        token = self.peek_token()
522        return MappingEndEvent(token.start_mark, token.start_mark)
523
524    # flow_mapping  ::= FLOW-MAPPING-START
525    #                   (flow_mapping_entry FLOW-ENTRY)*
526    #                   flow_mapping_entry?
527    #                   FLOW-MAPPING-END
528    # flow_mapping_entry    ::= flow_node | KEY flow_node? (VALUE flow_node?)?
529
530    def parse_flow_mapping_first_key(self):
[136]531        token = self.get_token()
[198]532        self.marks.append(token.start_mark)
533        return self.parse_flow_mapping_key(first=True)
[43]534
[198]535    def parse_flow_mapping_key(self, first=False):
536        if not self.check_token(FlowMappingEndToken):
537            if not first:
538                if self.check_token(FlowEntryToken):
539                    self.get_token()
540                else:
541                    token = self.peek_token()
[204]542                    raise ParserError("while parsing a flow mapping", self.marks[-1],
[198]543                            "expected ',' or '}', but got %r" % token.id, token.start_mark)
[136]544            if self.check_token(KeyToken):
545                token = self.get_token()
546                if not self.check_token(ValueToken,
[51]547                        FlowEntryToken, FlowMappingEndToken):
[198]548                    self.states.append(self.parse_flow_mapping_value)
549                    return self.parse_flow_node()
[51]550                else:
[198]551                    self.state = self.parse_flow_mapping_value
552                    return self.process_empty_scalar(token.end_mark)
553            elif not self.check_token(FlowMappingEndToken):
554                self.states.append(self.parse_flow_mapping_empty_value)
555                return self.parse_flow_node()
556        token = self.get_token()
557        event = MappingEndEvent(token.start_mark, token.end_mark)
558        self.state = self.states.pop()
559        self.marks.pop()
560        return event
561
562    def parse_flow_mapping_value(self):
563        if self.check_token(ValueToken):
564            token = self.get_token()
565            if not self.check_token(FlowEntryToken, FlowMappingEndToken):
566                self.states.append(self.parse_flow_mapping_key)
567                return self.parse_flow_node()
[43]568            else:
[198]569                self.state = self.parse_flow_mapping_key
570                return self.process_empty_scalar(token.end_mark)
571        else:
572            self.state = self.parse_flow_mapping_key
[136]573            token = self.peek_token()
[198]574            return self.process_empty_scalar(token.start_mark)
[43]575
[198]576    def parse_flow_mapping_empty_value(self):
577        self.state = self.parse_flow_mapping_key
578        return self.process_empty_scalar(self.peek_token().start_mark)
579
[116]580    def process_empty_scalar(self, mark):
[137]581        return ScalarEvent(None, None, (True, False), u'', mark, mark)
[43]582
Note: See TracBrowser for help on using the repository browser.