source: branches/pyyaml3000/lib/yaml/scanner.py @ 44

Revision 44, 26.3 KB checked in by xi, 8 years ago (diff)

All tests passed! Scanner and Parser seem to be correct.

Line 
1
2# Tokens:
3# YAML-DIRECTIVE(major_version, minor_version), TAG-DIRECTIVE(handle, prefix)
4# RESERVED-DIRECTIVE(name)
5# DOCUMENT-START, DOCUMENT-END
6# BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END
7# FLOW-SEQUENCE-START, FLOW-MAPPING-START, FLOW-SEQUENCE-END, FLOW-MAPPING-END
8# ENTRY, KEY, VALUE
9# ALIAS(name), ANCHOR(name), TAG(value), SCALAR(value, plain)
10
11
12from marker import Marker
13#from error import YAMLError
14from stream import Stream
15
16#class ScannerError(YAMLError):
17class ScannerError(Exception):
18    pass
19
20class Token:
21    def __init__(self, start_marker, end_marker):
22        self.start_marker = start_marker
23        self.end_marker = end_marker
24
25class DirectiveToken(Token):
26    pass
27
28class YAMLDirectiveToken(DirectiveToken):
29    def __init__(self, major_version, minor_version, start_marker, end_marker):
30        self.major_version = major_version
31        self.minor_version = minor_version
32        self.start_marker = start_marker
33        self.end_marker = end_marker
34
35class TagDirectiveToken(DirectiveToken):
36    pass
37
38class ReservedDirectiveToken(DirectiveToken):
39    def __init__(self, name, start_marker, end_marker):
40        self.name = name
41        self.start_marker = start_marker
42        self.end_marker = end_marker
43
44class DocumentStartToken(Token):
45    pass
46
47class DocumentEndToken(Token):
48    pass
49
50class EndToken(Token):
51    pass
52
53class BlockSequenceStartToken(Token):
54    pass
55
56class BlockMappingStartToken(Token):
57    pass
58
59class BlockEndToken(Token):
60    pass
61
62class FlowSequenceStartToken(Token):
63    pass
64
65class FlowMappingStartToken(Token):
66    pass
67
68class FlowSequenceEndToken(Token):
69    pass
70
71class FlowMappingEndToken(Token):
72    pass
73
74class KeyToken(Token):
75    pass
76
77class ValueToken(Token):
78    pass
79
80class EntryToken(Token):
81    pass
82
83class AliasToken(Token):
84    def __init__(self, value, start_marker, end_marker):
85        self.value = value
86        self.start_marker = start_marker
87        self.end_marker = end_marker
88
89class AnchorToken(Token):
90    def __init__(self, value, start_marker, end_marker):
91        self.value = value
92        self.start_marker = start_marker
93        self.end_marker = end_marker
94
95class TagToken(Token):
96    def __init__(self, value, start_marker, end_marker):
97        self.value = value
98        self.start_marker = start_marker
99        self.end_marker = end_marker
100
101class ScalarToken(Token):
102    def __init__(self, value, plain, start_marker, end_marker):
103        self.value = value
104        self.plain = plain
105        self.start_marker = start_marker
106        self.end_marker = end_marker
107
108class SimpleKey:
109    def __init__(self, token_number, required, index, line, column, marker):
110        self.token_number = token_number
111        self.required = required
112        self.index = index
113        self.line = line
114        self.column = column
115        self.marker = marker
116
117class Scanner:
118
119    def __init__(self, source, data):
120        """Initialize the scanner."""
121        # The input stream. The Stream class do the dirty work of checking for
122        # BOM and converting the input data to Unicode. It also adds NUL to
123        # the end.
124        #
125        # Stream supports the following methods
126        #   self.stream.peek(k=1)   # peek the next k characters
127        #   self.stream.read(k=1)   # read the next k characters and move the
128        #                           # pointer
129        self.stream = Stream(source, data)
130
131        # Had we reached the end of the stream?
132        self.done = False
133
134        # The number of unclosed '{' and '['. `flow_level == 0` means block
135        # context.
136        self.flow_level = 0
137
138        # List of processed tokens that are not yet emitted.
139        self.tokens = []
140
141        # Number of tokens that were emitted through the `get_token` method.
142        self.tokens_taken = 0
143
144        # The current indentation level.
145        self.indent = -1
146
147        # Past indentation levels.
148        self.indents = []
149
150        # Variables related to simple keys treatment.
151
152        # A simple key is a key that is not denoted by the '?' indicator.
153        # Example of simple keys:
154        #   ---
155        #   block simple key: value
156        #   ? not a simple key:
157        #   : { flow simple key: value }
158        # We emit the KEY token before all keys, so when we find a potential
159        # simple key, we try to locate the corresponding ':' indicator.
160        # Simple keys should be limited to a single line and 1024 characters.
161
162        # Can a simple key start at the current position? A simple key may
163        # start:
164        # - at the beginning of the line, not counting indentation spaces
165        #       (in block context),
166        # - after '{', '[', ',' (in the flow context),
167        # - after '?', ':', '-' (in the block context).
168        # In the block context, this flag also signify if a block collection
169        # may start at the current position.
170        self.allow_simple_key = True
171
172        # Keep track of possible simple keys. This is a dictionary. The key
173        # is `flow_level`; there can be no more that one possible simple key
174        # for each level. The value is a SimpleKey record:
175        #   (token_number, required, index, line, column, marker)
176        # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
177        # '[', or '{' tokens.
178        self.possible_simple_keys = {}
179
180    # Two public methods.
181
182    def peek_token(self):
183        """Get the current token."""
184        while self.need_more_tokens():
185            self.fetch_more_tokens()
186        if self.tokens:
187            return self.tokens[0]
188
189    def get_token(self):
190        "Get the current token and remove it from the list of pending tokens."""
191        while self.need_more_tokens():
192            self.fetch_more_tokens()
193        if self.tokens:
194            self.tokens_taken += 1
195            return self.tokens.pop(0)
196
197    # Private methods.
198
199    def need_more_tokens(self):
200        if self.done:
201            return False
202        if not self.tokens:
203            return True
204        # The current token may be a potential simple key, so we
205        # need to look further.
206        self.stale_possible_simple_keys()
207        if self.next_possible_simple_key() == self.tokens_taken:
208            return True
209
210    def fetch_more_tokens(self):
211
212        # Eat whitespaces and comments until we reach the next token.
213        self.scan_to_next_token()
214
215        # Remove obsolete possible simple keys.
216        self.stale_possible_simple_keys()
217
218        # Compare the current indentation and column. It may add some tokens
219        # and decrease the current indentation level.
220        self.unwind_indent(self.stream.column)
221
222        #print
223        #print self.stream.get_marker().get_snippet()
224
225        # Peek the next character.
226        ch = self.stream.peek()
227
228        # Is it the end of stream?
229        if ch == u'\0':
230            return self.fetch_end()
231
232        # Is it a directive?
233        if ch == u'%' and self.check_directive():
234            return self.fetch_directive()
235
236        # Is it the document start?
237        if ch == u'-' and self.check_document_start():
238            return self.fetch_document_start()
239
240        # Is it the document end?
241        if ch == u'.' and self.check_document_end():
242            return self.fetch_document_end()
243
244        # Note: the order of the following checks is NOT significant.
245
246        # Is it the flow sequence start indicator?
247        if ch == u'[':
248            return self.fetch_flow_sequence_start()
249
250        # Is it the flow mapping start indicator?
251        if ch == u'{':
252            return self.fetch_flow_mapping_start()
253
254        # Is it the flow sequence end indicator?
255        if ch == u']':
256            return self.fetch_flow_sequence_end()
257
258        # Is it the flow mapping end indicator?
259        if ch == u'}':
260            return self.fetch_flow_mapping_end()
261
262        # Is it the entry indicator?
263        if ch in u'-,' and self.check_entry():
264            return self.fetch_entry()
265
266        # Is it the key indicator?
267        if ch == u'?' and self.check_key():
268            return self.fetch_key()
269
270        # Is it the value indicator?
271        if ch == u':' and self.check_value():
272            return self.fetch_value()
273
274        # Is it an alias?
275        if ch == u'*':
276            return self.fetch_alias()
277
278        # Is it an anchor?
279        if ch == u'&':
280            return self.fetch_anchor()
281
282        # Is it a tag?
283        if ch == u'!':
284            return self.fetch_tag()
285
286        # Is it a literal scalar?
287        if ch == u'|' and not self.flow_level:
288            return self.fetch_literal()
289
290        # Is it a folded scalar?
291        if ch == u'>' and not self.flow_level:
292            return self.fetch_folded()
293
294        # Is it a single quoted scalar?
295        if ch == u'\'':
296            return self.fetch_single()
297
298        # Is it a double quoted scalar?
299        if ch == u'\"':
300            return self.fetch_double()
301
302        # It must be a plain scalar then.
303        if self.check_plain():
304            return self.fetch_plain()
305
306        # No? It's an error. Let's produce a nice error message.
307        self.invalid_token()
308
309    # Simple keys treatment.
310
311    def next_possible_simple_key(self):
312        # Return the number of the nearest possible simple key. Actually we
313        # don't need to loop through the whole dictionary. We may replace it
314        # with the following code:
315        #   if not self.possible_simple_keys:
316        #       return None
317        #   return self.possible_simple_keys[
318        #           min(self.possible_simple_keys.keys())].token_number
319        min_token_number = None
320        for level in self.possible_simple_keys:
321            key = self.possible_simple_keys[level]
322            if min_token_number is None or key.token_number < min_token_number:
323                min_token_number = key.token_number
324        return min_token_number
325
326    def stale_possible_simple_keys(self):
327        # Remove entries that are no longer possible simple keys. According to
328        # the YAML specification, simple keys
329        # - should be limited to a single line,
330        # - should be no longer than 1024 characters.
331        # Disabling this procedure will allow simple keys of any length and
332        # height (may cause problems if indentation is broken though).
333        for level in self.possible_simple_keys.keys():
334            key = self.possible_simple_keys[level]
335            if key.line != self.stream.line  \
336                    or self.stream.index-key.index > 1024:
337                if key.required:
338                    self.fail("simple key is required")
339                del self.possible_simple_keys[level]
340
341    def save_possible_simple_key(self):
342        # The next token may start a simple key. We check if it's possible
343        # and save its position. This function is called for
344        #   ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
345
346        # Check if a simple key is required at the current position.
347        required = not self.flow_level and self.indent == self.stream.column
348
349        # The next token might be a simple key. Let's save it's number and
350        # position.
351        if self.allow_simple_key:
352            self.remove_possible_simple_key()
353            token_number = self.tokens_taken+len(self.tokens)
354            index = self.stream.index
355            line = self.stream.line
356            column = self.stream.column
357            marker = self.stream.get_marker()
358            key = SimpleKey(token_number, required,
359                    index, line, column, marker)
360            self.possible_simple_keys[self.flow_level] = key
361
362        # A simple key is required at the current position.
363        elif required:
364            self.fail("simple key is required")
365
366    def remove_possible_simple_key(self):
367        # Remove the saved possible key position at the current flow level.
368        if self.flow_level in self.possible_simple_keys:
369            key = self.possible_simple_keys[self.flow_level]
370            if key.required:
371                self.fail("simple key is required")
372
373    # Indentation functions.
374
375    def unwind_indent(self, column):
376
377        # In flow context, tokens should respect indentation.
378        if self.flow_level and self.indent > column:
379            self.fail("invalid intendation in the flow context")
380
381        # In block context, we may need to issue the BLOCK-END tokens.
382        while self.indent > column:
383            marker = self.stream.get_marker()
384            self.indent = self.indents.pop()
385            self.tokens.append(BlockEndToken(marker, marker))
386
387    def add_indent(self, column):
388        # Check if we need to increase indentation.
389        if self.indent < column:
390            self.indents.append(self.indent)
391            self.indent = column
392            return True
393        return False
394
395    # Fetchers.
396
397    def fetch_end(self):
398
399        # Set the current intendation to -1.
400        self.unwind_indent(-1)
401
402        # Reset everything (not really needed).
403        self.allow_simple_key = False
404        self.possible_simple_keys = {}
405
406        # Read the token.
407        marker = self.stream.get_marker()
408       
409        # Add END.
410        self.tokens.append(EndToken(marker, marker))
411
412        # The stream is ended.
413        self.done = True
414
415    def fetch_directive(self):
416       
417        # Set the current intendation to -1.
418        self.unwind_indent(-1)
419
420        # Reset simple keys.
421        self.remove_possible_simple_key()
422        self.allow_simple_key = False
423
424        # Scan and add DIRECTIVE.
425        self.scan_directive()
426
427    def fetch_document_start(self):
428        self.fetch_document_indicator(DocumentStartToken)
429
430    def fetch_document_end(self):
431        self.fetch_document_indicator(DocumentEndToken)
432
433    def fetch_document_indicator(self, TokenClass):
434
435        # Set the current intendation to -1.
436        self.unwind_indent(-1)
437
438        # Reset simple keys. Note that there could not be a block collection
439        # after '---'.
440        self.remove_possible_simple_key()
441        self.allow_simple_key = False
442
443        # Add DOCUMENT-START or DOCUMENT-END.
444        start_marker = self.stream.get_marker()
445        self.stream.read(3)
446        end_marker = self.stream.get_marker()
447        self.tokens.append(TokenClass(start_marker, end_marker))
448
449    def fetch_flow_sequence_start(self):
450        self.fetch_flow_collection_start(FlowSequenceStartToken)
451
452    def fetch_flow_mapping_start(self):
453        self.fetch_flow_collection_start(FlowMappingStartToken)
454
455    def fetch_flow_collection_start(self, TokenClass):
456
457        # '[' and '{' may start a simple key.
458        self.save_possible_simple_key()
459
460        # Increase the flow level.
461        self.flow_level += 1
462
463        # Simple keys are allowed after '[' and '{'.
464        self.allow_simple_key = True
465
466        # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
467        start_marker = self.stream.get_marker()
468        self.stream.read()
469        end_marker = self.stream.get_marker()
470        self.tokens.append(TokenClass(start_marker, end_marker))
471
472    def fetch_flow_sequence_end(self):
473        self.fetch_flow_collection_end(FlowSequenceEndToken)
474
475    def fetch_flow_mapping_end(self):
476        self.fetch_flow_collection_end(FlowMappingEndToken)
477
478    def fetch_flow_collection_end(self, TokenClass):
479
480        # Reset possible simple key on the current level.
481        self.remove_possible_simple_key()
482
483        # Decrease the flow level.
484        self.flow_level -= 1
485
486        # No simple keys after ']' or '}'.
487        self.allow_simple_key = False
488
489        # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
490        start_marker = self.stream.get_marker()
491        self.stream.read()
492        end_marker = self.stream.get_marker()
493        self.tokens.append(TokenClass(start_marker, end_marker))
494
495    def fetch_entry(self):
496
497        # Block context needs additional checks.
498        if not self.flow_level:
499
500            # Are we allowed to start a new entry?
501            if not self.allow_simple_key:
502                self.fail("Cannot start a new entry here")
503
504            # We may need to add BLOCK-SEQUENCE-START.
505            if self.add_indent(self.stream.column):
506                marker = self.stream.get_marker()
507                self.tokens.append(BlockSequenceStartToken(marker, marker))
508
509        # Simple keys are allowed after '-' and ','.
510        self.allow_simple_key = True
511
512        # Reset possible simple key on the current level.
513        self.remove_possible_simple_key()
514
515        # Add ENTRY.
516        start_marker = self.stream.get_marker()
517        self.stream.read()
518        end_marker = self.stream.get_marker()
519        self.tokens.append(EntryToken(start_marker, end_marker))
520
521    def fetch_key(self):
522       
523        # Block context needs additional checks.
524        if not self.flow_level:
525
526            # Are we allowed to start a key (not nessesary a simple)?
527            if not self.allow_simple_key:
528                self.fail("Cannot start a new key here")
529
530            # We may need to add BLOCK-MAPPING-START.
531            if self.add_indent(self.stream.column):
532                marker = self.stream.get_marker()
533                self.tokens.append(BlockMappingStartToken(marker, marker))
534
535        # Simple keys are allowed after '?' in the block context.
536        self.allow_simple_key = not self.flow_level
537
538        # Reset possible simple key on the current level.
539        self.remove_possible_simple_key()
540
541        # Add KEY.
542        start_marker = self.stream.get_marker()
543        self.stream.read()
544        end_marker = self.stream.get_marker()
545        self.tokens.append(KeyToken(start_marker, end_marker))
546
547    def fetch_value(self):
548
549        # Do we determine a simple key?
550        if self.flow_level in self.possible_simple_keys:
551
552            # Add KEY.
553            key = self.possible_simple_keys[self.flow_level]
554            del self.possible_simple_keys[self.flow_level]
555            self.tokens.insert(key.token_number-self.tokens_taken,
556                    KeyToken(key.marker, key.marker))
557
558            # If this key starts a new block mapping, we need to add
559            # BLOCK-MAPPING-START.
560            if not self.flow_level:
561                if self.add_indent(key.column):
562                    self.tokens.insert(key.token_number-self.tokens_taken,
563                            BlockMappingStartToken(key.marker, key.marker))
564
565            # There cannot be two simple keys one after another.
566            self.allow_simple_key = False
567
568        # It must be a part of a complex key.
569        else:
570           
571            # Simple keys are allowed after ':' in the block context.
572            self.allow_simple_key = not self.flow_level
573
574            # Reset possible simple key on the current level.
575            self.remove_possible_simple_key()
576
577        # Add VALUE.
578        start_marker = self.stream.get_marker()
579        self.stream.read()
580        end_marker = self.stream.get_marker()
581        self.tokens.append(ValueToken(start_marker, end_marker))
582
583    def fetch_alias(self):
584
585        # ALIAS could be a simple key.
586        self.save_possible_simple_key()
587
588        # No simple keys after ALIAS.
589        self.allow_simple_key = False
590
591        # Scan and add ALIAS.
592        self.scan_anchor(AliasToken)
593
594    def fetch_anchor(self):
595
596        # ANCHOR could start a simple key.
597        self.save_possible_simple_key()
598
599        # No simple keys after ANCHOR.
600        self.allow_simple_key = False
601
602        # Scan and add ANCHOR.
603        self.scan_anchor(AnchorToken)
604
605    def fetch_tag(self):
606
607        # TAG could start a simple key.
608        self.save_possible_simple_key()
609
610        # No simple keys after TAG.
611        self.allow_simple_key = False
612
613        # Scan and add TAG.
614        self.scan_tag()
615
616    def fetch_literal(self):
617        self.fetch_block_scalar(folded=False)
618
619    def fetch_folded(self):
620        self.fetch_block_scalar(folded=True)
621
622    def fetch_block_scalar(self, folded):
623
624        # A simple key may follow a block scalar.
625        self.allow_simple_key = True
626
627        # Reset possible simple key on the current level.
628        self.remove_possible_simple_key()
629
630        # Scan and add SCALAR.
631        self.scan_block_scalar(folded)
632
633    def fetch_single(self):
634        self.fetch_flow_scalar(double=False)
635
636    def fetch_double(self):
637        self.fetch_flow_scalar(double=True)
638
639    def fetch_flow_scalar(self, double):
640
641        # A flow scalar could be a simple key.
642        self.save_possible_simple_key()
643
644        # No simple keys after flow scalars.
645        self.allow_simple_key = False
646
647        # Scan and add SCALAR.
648        self.scan_flow_scalar(double)
649
650    def fetch_plain(self):
651
652        # A plain scalar could be a simple key.
653        self.save_possible_simple_key()
654
655        # No simple keys after plain scalars. But note that `scan_plain` will
656        # change this flag if the scan is finished at the beginning of the
657        # line.
658        self.allow_simple_key = False
659
660        # Scan and add SCALAR. May change `allow_simple_key`.
661        self.scan_plain()
662
663    # Checkers.
664
665    def check_directive(self):
666
667        # DIRECTIVE:        ^ '%' ...
668        # The '%' indicator is already checked.
669        if self.stream.column == 0:
670            return True
671
672    def check_document_start(self):
673
674        # DOCUMENT-START:   ^ '---' (' '|'\n')
675        if self.stream.column == 0:
676            prefix = self.stream.peek(4)
677            if prefix[:3] == u'---' and prefix[3] in u'\0 \t\r\n\x85\u2028\u2029':
678                return True
679
680    def check_document_end(self):
681
682        # DOCUMENT-END:     ^ '...' (' '|'\n')
683        if self.stream.column == 0:
684            prefix = self.stream.peek(4)
685            if prefix[:3] == u'...' and prefix[3] in u'\0 \t\r\n\x85\u2028\u2029':
686                return True
687
688    def check_entry(self):
689
690        # ENTRY(flow context):      ','
691        if self.flow_level:
692            return self.stream.peek() == u','
693
694        # ENTRY(block context):     '-' (' '|'\n')
695        else:
696            prefix = self.stream.peek(2)
697            return prefix[0] == u'-' and prefix[1] in u'\0 \t\r\n\x85\u2028\u2029'
698
699    def check_key(self):
700
701        # KEY(flow context):    '?'
702        if self.flow_level:
703            return True
704
705        # KEY(block context):   '?' (' '|'\n')
706        else:
707            prefix = self.stream.peek(2)
708            return prefix[1] in u'\0 \t\r\n\x85\u2028\u2029'
709
710    def check_value(self):
711
712        # VALUE(flow context):  ':'
713        if self.flow_level:
714            return True
715
716        # VALUE(block context): ':' (' '|'\n')
717        else:
718            prefix = self.stream.peek(2)
719            return prefix[1] in u'\0 \t\r\n\x85\u2028\u2029'
720
721    def check_plain(self):
722        return True
723
724    # Scanners.
725
726    def scan_to_next_token(self):
727        found = False
728        while not found:
729            while self.stream.peek() == u' ':
730                self.stream.read()
731            if self.stream.peek() == u'#':
732                while self.stream.peek() not in u'\r\n':
733                    self.stream.read()
734            if self.stream.peek() in u'\r\n':
735                self.stream.read()
736                if not self.flow_level:
737                    self.allow_simple_key = True
738            else:
739                found = True
740
741    def scan_directive(self):
742        marker = self.stream.get_marker()
743        if self.stream.peek(5) == u'%YAML ':
744            self.tokens.append(YAMLDirectiveToken(1, 1, marker, marker))
745        elif self.stream.peek(4) == u'%TAG ':
746            self.tokens.append(TagDirectiveToken(marker, marker))
747        else:
748            self.tokens.append(ReservedDirectiveToken('', marker, marker))
749        while self.stream.peek() not in u'\0\r\n':
750            self.stream.read()
751        self.stream.read()
752
753    def scan_anchor(self, TokenClass):
754        start_marker = self.stream.get_marker()
755        while self.stream.peek() not in u'\0 \t\r\n,:':
756            self.stream.read()
757        end_marker = self.stream.get_marker()
758        self.tokens.append(TokenClass('', start_marker, end_marker))
759
760    def scan_tag(self):
761        start_marker = self.stream.get_marker()
762        while self.stream.peek() not in u'\0 \t\r\n':
763            self.stream.read()
764        end_marker = self.stream.get_marker()
765        self.tokens.append(TagToken('', start_marker, end_marker))
766
767    def scan_block_scalar(self, folded):
768        start_marker = self.stream.get_marker()
769        indent = self.indent+1
770        if indent < 1:
771            indent = 1
772        while True:
773            while self.stream.peek() and self.stream.peek() and self.stream.peek() not in u'\0\r\n\x85\u2028\u2029':
774                self.stream.read()
775            if self.stream.peek() != u'\0':
776                self.stream.read()
777            count = 0
778            while count < indent and self.stream.peek() == u' ':
779                self.stream.read()
780                count += 1
781            if count < indent and self.stream.peek() not in u'#\r\n\x85\u2028\u2029':
782                break
783        self.tokens.append(ScalarToken('', False, start_marker, start_marker))
784
785    def scan_flow_scalar(self, double):
786        marker = self.stream.get_marker()
787        quote = self.stream.read()
788        while self.stream.peek() != quote:
789            if double and self.stream.peek() == u'\\':
790                self.stream.read(2)
791            elif not double and self.stream.peek(3)[1:] == u'\'\'':
792                self.stream.read(3)
793            else:
794                self.stream.read(1)
795        self.stream.read(1)
796        self.tokens.append(ScalarToken('', False, marker, marker))
797
798    def scan_plain(self):
799        indent = self.indent+1
800        if indent < 1:
801            indent = 1
802        space = False
803        marker = self.stream.get_marker()
804        while True:
805            while self.stream.peek() == u' ':
806                self.stream.read()
807                space = True
808            while self.stream.peek() not in u'\0\r\n?:,[]{}#'   \
809                    or (not space and self.stream.peek() == '#')    \
810                    or (not self.flow_level and self.stream.peek() in '?,[]{}') \
811                    or (not self.flow_level and self.stream.peek() == ':' and self.stream.peek(2)[1] not in u' \0\r\n'):
812                space = self.stream.peek() not in u' \t'
813                self.stream.read()
814                self.allow_simple_key = False
815            if self.stream.peek() not in u'\r\n':
816                break
817            while self.stream.peek() in u'\r\n':
818                self.stream.read()
819                if not self.flow_level:
820                    self.allow_simple_key = True
821            count = 0
822            while self.stream.peek() == u' ' and count < indent:
823                self.stream.read()
824                count += 1
825            if count < indent:
826                break
827            space = True
828        self.tokens.append(ScalarToken('', True, marker, marker))
829
830    def invalid_token(self):
831        self.fail("invalid token")
832
833    def fail(self, message):
834        raise ScannerError(message)
835
Note: See TracBrowser for help on using the repository browser.