1a5f9918aSopenharmony_ci 2a5f9918aSopenharmony_ci# The following YAML grammar is LL(1) and is parsed by a recursive descent 3a5f9918aSopenharmony_ci# parser. 4a5f9918aSopenharmony_ci# 5a5f9918aSopenharmony_ci# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END 6a5f9918aSopenharmony_ci# implicit_document ::= block_node DOCUMENT-END* 7a5f9918aSopenharmony_ci# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* 8a5f9918aSopenharmony_ci# block_node_or_indentless_sequence ::= 9a5f9918aSopenharmony_ci# ALIAS 10a5f9918aSopenharmony_ci# | properties (block_content | indentless_block_sequence)? 11a5f9918aSopenharmony_ci# | block_content 12a5f9918aSopenharmony_ci# | indentless_block_sequence 13a5f9918aSopenharmony_ci# block_node ::= ALIAS 14a5f9918aSopenharmony_ci# | properties block_content? 15a5f9918aSopenharmony_ci# | block_content 16a5f9918aSopenharmony_ci# flow_node ::= ALIAS 17a5f9918aSopenharmony_ci# | properties flow_content? 18a5f9918aSopenharmony_ci# | flow_content 19a5f9918aSopenharmony_ci# properties ::= TAG ANCHOR? | ANCHOR TAG? 20a5f9918aSopenharmony_ci# block_content ::= block_collection | flow_collection | SCALAR 21a5f9918aSopenharmony_ci# flow_content ::= flow_collection | SCALAR 22a5f9918aSopenharmony_ci# block_collection ::= block_sequence | block_mapping 23a5f9918aSopenharmony_ci# flow_collection ::= flow_sequence | flow_mapping 24a5f9918aSopenharmony_ci# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END 25a5f9918aSopenharmony_ci# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ 26a5f9918aSopenharmony_ci# block_mapping ::= BLOCK-MAPPING_START 27a5f9918aSopenharmony_ci# ((KEY block_node_or_indentless_sequence?)? 28a5f9918aSopenharmony_ci# (VALUE block_node_or_indentless_sequence?)?)* 29a5f9918aSopenharmony_ci# BLOCK-END 30a5f9918aSopenharmony_ci# flow_sequence ::= FLOW-SEQUENCE-START 31a5f9918aSopenharmony_ci# (flow_sequence_entry FLOW-ENTRY)* 32a5f9918aSopenharmony_ci# flow_sequence_entry? 33a5f9918aSopenharmony_ci# FLOW-SEQUENCE-END 34a5f9918aSopenharmony_ci# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? 35a5f9918aSopenharmony_ci# flow_mapping ::= FLOW-MAPPING-START 36a5f9918aSopenharmony_ci# (flow_mapping_entry FLOW-ENTRY)* 37a5f9918aSopenharmony_ci# flow_mapping_entry? 38a5f9918aSopenharmony_ci# FLOW-MAPPING-END 39a5f9918aSopenharmony_ci# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? 40a5f9918aSopenharmony_ci# 41a5f9918aSopenharmony_ci# FIRST sets: 42a5f9918aSopenharmony_ci# 43a5f9918aSopenharmony_ci# stream: { STREAM-START } 44a5f9918aSopenharmony_ci# explicit_document: { DIRECTIVE DOCUMENT-START } 45a5f9918aSopenharmony_ci# implicit_document: FIRST(block_node) 46a5f9918aSopenharmony_ci# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } 47a5f9918aSopenharmony_ci# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } 48a5f9918aSopenharmony_ci# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } 49a5f9918aSopenharmony_ci# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } 50a5f9918aSopenharmony_ci# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } 51a5f9918aSopenharmony_ci# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } 52a5f9918aSopenharmony_ci# block_sequence: { BLOCK-SEQUENCE-START } 53a5f9918aSopenharmony_ci# block_mapping: { BLOCK-MAPPING-START } 54a5f9918aSopenharmony_ci# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } 55a5f9918aSopenharmony_ci# indentless_sequence: { ENTRY } 56a5f9918aSopenharmony_ci# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } 57a5f9918aSopenharmony_ci# flow_sequence: { FLOW-SEQUENCE-START } 58a5f9918aSopenharmony_ci# flow_mapping: { FLOW-MAPPING-START } 59a5f9918aSopenharmony_ci# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } 60a5f9918aSopenharmony_ci# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } 61a5f9918aSopenharmony_ci 62a5f9918aSopenharmony_ci__all__ = ['Parser', 'ParserError'] 63a5f9918aSopenharmony_ci 64a5f9918aSopenharmony_cifrom .error import MarkedYAMLError 65a5f9918aSopenharmony_cifrom .tokens import * 66a5f9918aSopenharmony_cifrom .events import * 67a5f9918aSopenharmony_cifrom .scanner import * 68a5f9918aSopenharmony_ci 69a5f9918aSopenharmony_ciclass ParserError(MarkedYAMLError): 70a5f9918aSopenharmony_ci pass 71a5f9918aSopenharmony_ci 72a5f9918aSopenharmony_ciclass Parser: 73a5f9918aSopenharmony_ci # Since writing a recursive-descendant parser is a straightforward task, we 74a5f9918aSopenharmony_ci # do not give many comments here. 75a5f9918aSopenharmony_ci 76a5f9918aSopenharmony_ci DEFAULT_TAGS = { 77a5f9918aSopenharmony_ci '!': '!', 78a5f9918aSopenharmony_ci '!!': 'tag:yaml.org,2002:', 79a5f9918aSopenharmony_ci } 80a5f9918aSopenharmony_ci 81a5f9918aSopenharmony_ci def __init__(self): 82a5f9918aSopenharmony_ci self.current_event = None 83a5f9918aSopenharmony_ci self.yaml_version = None 84a5f9918aSopenharmony_ci self.tag_handles = {} 85a5f9918aSopenharmony_ci self.states = [] 86a5f9918aSopenharmony_ci self.marks = [] 87a5f9918aSopenharmony_ci self.state = self.parse_stream_start 88a5f9918aSopenharmony_ci 89a5f9918aSopenharmony_ci def dispose(self): 90a5f9918aSopenharmony_ci # Reset the state attributes (to clear self-references) 91a5f9918aSopenharmony_ci self.states = [] 92a5f9918aSopenharmony_ci self.state = None 93a5f9918aSopenharmony_ci 94a5f9918aSopenharmony_ci def check_event(self, *choices): 95a5f9918aSopenharmony_ci # Check the type of the next event. 96a5f9918aSopenharmony_ci if self.current_event is None: 97a5f9918aSopenharmony_ci if self.state: 98a5f9918aSopenharmony_ci self.current_event = self.state() 99a5f9918aSopenharmony_ci if self.current_event is not None: 100a5f9918aSopenharmony_ci if not choices: 101a5f9918aSopenharmony_ci return True 102a5f9918aSopenharmony_ci for choice in choices: 103a5f9918aSopenharmony_ci if isinstance(self.current_event, choice): 104a5f9918aSopenharmony_ci return True 105a5f9918aSopenharmony_ci return False 106a5f9918aSopenharmony_ci 107a5f9918aSopenharmony_ci def peek_event(self): 108a5f9918aSopenharmony_ci # Get the next event. 109a5f9918aSopenharmony_ci if self.current_event is None: 110a5f9918aSopenharmony_ci if self.state: 111a5f9918aSopenharmony_ci self.current_event = self.state() 112a5f9918aSopenharmony_ci return self.current_event 113a5f9918aSopenharmony_ci 114a5f9918aSopenharmony_ci def get_event(self): 115a5f9918aSopenharmony_ci # Get the next event and proceed further. 116a5f9918aSopenharmony_ci if self.current_event is None: 117a5f9918aSopenharmony_ci if self.state: 118a5f9918aSopenharmony_ci self.current_event = self.state() 119a5f9918aSopenharmony_ci value = self.current_event 120a5f9918aSopenharmony_ci self.current_event = None 121a5f9918aSopenharmony_ci return value 122a5f9918aSopenharmony_ci 123a5f9918aSopenharmony_ci # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END 124a5f9918aSopenharmony_ci # implicit_document ::= block_node DOCUMENT-END* 125a5f9918aSopenharmony_ci # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* 126a5f9918aSopenharmony_ci 127a5f9918aSopenharmony_ci def parse_stream_start(self): 128a5f9918aSopenharmony_ci 129a5f9918aSopenharmony_ci # Parse the stream start. 130a5f9918aSopenharmony_ci token = self.get_token() 131a5f9918aSopenharmony_ci event = StreamStartEvent(token.start_mark, token.end_mark, 132a5f9918aSopenharmony_ci encoding=token.encoding) 133a5f9918aSopenharmony_ci 134a5f9918aSopenharmony_ci # Prepare the next state. 135a5f9918aSopenharmony_ci self.state = self.parse_implicit_document_start 136a5f9918aSopenharmony_ci 137a5f9918aSopenharmony_ci return event 138a5f9918aSopenharmony_ci 139a5f9918aSopenharmony_ci def parse_implicit_document_start(self): 140a5f9918aSopenharmony_ci 141a5f9918aSopenharmony_ci # Parse an implicit document. 142a5f9918aSopenharmony_ci if not self.check_token(DirectiveToken, DocumentStartToken, 143a5f9918aSopenharmony_ci StreamEndToken): 144a5f9918aSopenharmony_ci self.tag_handles = self.DEFAULT_TAGS 145a5f9918aSopenharmony_ci token = self.peek_token() 146a5f9918aSopenharmony_ci start_mark = end_mark = token.start_mark 147a5f9918aSopenharmony_ci event = DocumentStartEvent(start_mark, end_mark, 148a5f9918aSopenharmony_ci explicit=False) 149a5f9918aSopenharmony_ci 150a5f9918aSopenharmony_ci # Prepare the next state. 151a5f9918aSopenharmony_ci self.states.append(self.parse_document_end) 152a5f9918aSopenharmony_ci self.state = self.parse_block_node 153a5f9918aSopenharmony_ci 154a5f9918aSopenharmony_ci return event 155a5f9918aSopenharmony_ci 156a5f9918aSopenharmony_ci else: 157a5f9918aSopenharmony_ci return self.parse_document_start() 158a5f9918aSopenharmony_ci 159a5f9918aSopenharmony_ci def parse_document_start(self): 160a5f9918aSopenharmony_ci 161a5f9918aSopenharmony_ci # Parse any extra document end indicators. 162a5f9918aSopenharmony_ci while self.check_token(DocumentEndToken): 163a5f9918aSopenharmony_ci self.get_token() 164a5f9918aSopenharmony_ci 165a5f9918aSopenharmony_ci # Parse an explicit document. 166a5f9918aSopenharmony_ci if not self.check_token(StreamEndToken): 167a5f9918aSopenharmony_ci token = self.peek_token() 168a5f9918aSopenharmony_ci start_mark = token.start_mark 169a5f9918aSopenharmony_ci version, tags = self.process_directives() 170a5f9918aSopenharmony_ci if not self.check_token(DocumentStartToken): 171a5f9918aSopenharmony_ci raise ParserError(None, None, 172a5f9918aSopenharmony_ci "expected '<document start>', but found %r" 173a5f9918aSopenharmony_ci % self.peek_token().id, 174a5f9918aSopenharmony_ci self.peek_token().start_mark) 175a5f9918aSopenharmony_ci token = self.get_token() 176a5f9918aSopenharmony_ci end_mark = token.end_mark 177a5f9918aSopenharmony_ci event = DocumentStartEvent(start_mark, end_mark, 178a5f9918aSopenharmony_ci explicit=True, version=version, tags=tags) 179a5f9918aSopenharmony_ci self.states.append(self.parse_document_end) 180a5f9918aSopenharmony_ci self.state = self.parse_document_content 181a5f9918aSopenharmony_ci else: 182a5f9918aSopenharmony_ci # Parse the end of the stream. 183a5f9918aSopenharmony_ci token = self.get_token() 184a5f9918aSopenharmony_ci event = StreamEndEvent(token.start_mark, token.end_mark) 185a5f9918aSopenharmony_ci assert not self.states 186a5f9918aSopenharmony_ci assert not self.marks 187a5f9918aSopenharmony_ci self.state = None 188a5f9918aSopenharmony_ci return event 189a5f9918aSopenharmony_ci 190a5f9918aSopenharmony_ci def parse_document_end(self): 191a5f9918aSopenharmony_ci 192a5f9918aSopenharmony_ci # Parse the document end. 193a5f9918aSopenharmony_ci token = self.peek_token() 194a5f9918aSopenharmony_ci start_mark = end_mark = token.start_mark 195a5f9918aSopenharmony_ci explicit = False 196a5f9918aSopenharmony_ci if self.check_token(DocumentEndToken): 197a5f9918aSopenharmony_ci token = self.get_token() 198a5f9918aSopenharmony_ci end_mark = token.end_mark 199a5f9918aSopenharmony_ci explicit = True 200a5f9918aSopenharmony_ci event = DocumentEndEvent(start_mark, end_mark, 201a5f9918aSopenharmony_ci explicit=explicit) 202a5f9918aSopenharmony_ci 203a5f9918aSopenharmony_ci # Prepare the next state. 204a5f9918aSopenharmony_ci self.state = self.parse_document_start 205a5f9918aSopenharmony_ci 206a5f9918aSopenharmony_ci return event 207a5f9918aSopenharmony_ci 208a5f9918aSopenharmony_ci def parse_document_content(self): 209a5f9918aSopenharmony_ci if self.check_token(DirectiveToken, 210a5f9918aSopenharmony_ci DocumentStartToken, DocumentEndToken, StreamEndToken): 211a5f9918aSopenharmony_ci event = self.process_empty_scalar(self.peek_token().start_mark) 212a5f9918aSopenharmony_ci self.state = self.states.pop() 213a5f9918aSopenharmony_ci return event 214a5f9918aSopenharmony_ci else: 215a5f9918aSopenharmony_ci return self.parse_block_node() 216a5f9918aSopenharmony_ci 217a5f9918aSopenharmony_ci def process_directives(self): 218a5f9918aSopenharmony_ci self.yaml_version = None 219a5f9918aSopenharmony_ci self.tag_handles = {} 220a5f9918aSopenharmony_ci while self.check_token(DirectiveToken): 221a5f9918aSopenharmony_ci token = self.get_token() 222a5f9918aSopenharmony_ci if token.name == 'YAML': 223a5f9918aSopenharmony_ci if self.yaml_version is not None: 224a5f9918aSopenharmony_ci raise ParserError(None, None, 225a5f9918aSopenharmony_ci "found duplicate YAML directive", token.start_mark) 226a5f9918aSopenharmony_ci major, minor = token.value 227a5f9918aSopenharmony_ci if major != 1: 228a5f9918aSopenharmony_ci raise ParserError(None, None, 229a5f9918aSopenharmony_ci "found incompatible YAML document (version 1.* is required)", 230a5f9918aSopenharmony_ci token.start_mark) 231a5f9918aSopenharmony_ci self.yaml_version = token.value 232a5f9918aSopenharmony_ci elif token.name == 'TAG': 233a5f9918aSopenharmony_ci handle, prefix = token.value 234a5f9918aSopenharmony_ci if handle in self.tag_handles: 235a5f9918aSopenharmony_ci raise ParserError(None, None, 236a5f9918aSopenharmony_ci "duplicate tag handle %r" % handle, 237a5f9918aSopenharmony_ci token.start_mark) 238a5f9918aSopenharmony_ci self.tag_handles[handle] = prefix 239a5f9918aSopenharmony_ci if self.tag_handles: 240a5f9918aSopenharmony_ci value = self.yaml_version, self.tag_handles.copy() 241a5f9918aSopenharmony_ci else: 242a5f9918aSopenharmony_ci value = self.yaml_version, None 243a5f9918aSopenharmony_ci for key in self.DEFAULT_TAGS: 244a5f9918aSopenharmony_ci if key not in self.tag_handles: 245a5f9918aSopenharmony_ci self.tag_handles[key] = self.DEFAULT_TAGS[key] 246a5f9918aSopenharmony_ci return value 247a5f9918aSopenharmony_ci 248a5f9918aSopenharmony_ci # block_node_or_indentless_sequence ::= ALIAS 249a5f9918aSopenharmony_ci # | properties (block_content | indentless_block_sequence)? 250a5f9918aSopenharmony_ci # | block_content 251a5f9918aSopenharmony_ci # | indentless_block_sequence 252a5f9918aSopenharmony_ci # block_node ::= ALIAS 253a5f9918aSopenharmony_ci # | properties block_content? 254a5f9918aSopenharmony_ci # | block_content 255a5f9918aSopenharmony_ci # flow_node ::= ALIAS 256a5f9918aSopenharmony_ci # | properties flow_content? 257a5f9918aSopenharmony_ci # | flow_content 258a5f9918aSopenharmony_ci # properties ::= TAG ANCHOR? | ANCHOR TAG? 259a5f9918aSopenharmony_ci # block_content ::= block_collection | flow_collection | SCALAR 260a5f9918aSopenharmony_ci # flow_content ::= flow_collection | SCALAR 261a5f9918aSopenharmony_ci # block_collection ::= block_sequence | block_mapping 262a5f9918aSopenharmony_ci # flow_collection ::= flow_sequence | flow_mapping 263a5f9918aSopenharmony_ci 264a5f9918aSopenharmony_ci def parse_block_node(self): 265a5f9918aSopenharmony_ci return self.parse_node(block=True) 266a5f9918aSopenharmony_ci 267a5f9918aSopenharmony_ci def parse_flow_node(self): 268a5f9918aSopenharmony_ci return self.parse_node() 269a5f9918aSopenharmony_ci 270a5f9918aSopenharmony_ci def parse_block_node_or_indentless_sequence(self): 271a5f9918aSopenharmony_ci return self.parse_node(block=True, indentless_sequence=True) 272a5f9918aSopenharmony_ci 273a5f9918aSopenharmony_ci def parse_node(self, block=False, indentless_sequence=False): 274a5f9918aSopenharmony_ci if self.check_token(AliasToken): 275a5f9918aSopenharmony_ci token = self.get_token() 276a5f9918aSopenharmony_ci event = AliasEvent(token.value, token.start_mark, token.end_mark) 277a5f9918aSopenharmony_ci self.state = self.states.pop() 278a5f9918aSopenharmony_ci else: 279a5f9918aSopenharmony_ci anchor = None 280a5f9918aSopenharmony_ci tag = None 281a5f9918aSopenharmony_ci start_mark = end_mark = tag_mark = None 282a5f9918aSopenharmony_ci if self.check_token(AnchorToken): 283a5f9918aSopenharmony_ci token = self.get_token() 284a5f9918aSopenharmony_ci start_mark = token.start_mark 285a5f9918aSopenharmony_ci end_mark = token.end_mark 286a5f9918aSopenharmony_ci anchor = token.value 287a5f9918aSopenharmony_ci if self.check_token(TagToken): 288a5f9918aSopenharmony_ci token = self.get_token() 289a5f9918aSopenharmony_ci tag_mark = token.start_mark 290a5f9918aSopenharmony_ci end_mark = token.end_mark 291a5f9918aSopenharmony_ci tag = token.value 292a5f9918aSopenharmony_ci elif self.check_token(TagToken): 293a5f9918aSopenharmony_ci token = self.get_token() 294a5f9918aSopenharmony_ci start_mark = tag_mark = token.start_mark 295a5f9918aSopenharmony_ci end_mark = token.end_mark 296a5f9918aSopenharmony_ci tag = token.value 297a5f9918aSopenharmony_ci if self.check_token(AnchorToken): 298a5f9918aSopenharmony_ci token = self.get_token() 299a5f9918aSopenharmony_ci end_mark = token.end_mark 300a5f9918aSopenharmony_ci anchor = token.value 301a5f9918aSopenharmony_ci if tag is not None: 302a5f9918aSopenharmony_ci handle, suffix = tag 303a5f9918aSopenharmony_ci if handle is not None: 304a5f9918aSopenharmony_ci if handle not in self.tag_handles: 305a5f9918aSopenharmony_ci raise ParserError("while parsing a node", start_mark, 306a5f9918aSopenharmony_ci "found undefined tag handle %r" % handle, 307a5f9918aSopenharmony_ci tag_mark) 308a5f9918aSopenharmony_ci tag = self.tag_handles[handle]+suffix 309a5f9918aSopenharmony_ci else: 310a5f9918aSopenharmony_ci tag = suffix 311a5f9918aSopenharmony_ci #if tag == '!': 312a5f9918aSopenharmony_ci # raise ParserError("while parsing a node", start_mark, 313a5f9918aSopenharmony_ci # "found non-specific tag '!'", tag_mark, 314a5f9918aSopenharmony_ci # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") 315a5f9918aSopenharmony_ci if start_mark is None: 316a5f9918aSopenharmony_ci start_mark = end_mark = self.peek_token().start_mark 317a5f9918aSopenharmony_ci event = None 318a5f9918aSopenharmony_ci implicit = (tag is None or tag == '!') 319a5f9918aSopenharmony_ci if indentless_sequence and self.check_token(BlockEntryToken): 320a5f9918aSopenharmony_ci end_mark = self.peek_token().end_mark 321a5f9918aSopenharmony_ci event = SequenceStartEvent(anchor, tag, implicit, 322a5f9918aSopenharmony_ci start_mark, end_mark) 323a5f9918aSopenharmony_ci self.state = self.parse_indentless_sequence_entry 324a5f9918aSopenharmony_ci else: 325a5f9918aSopenharmony_ci if self.check_token(ScalarToken): 326a5f9918aSopenharmony_ci token = self.get_token() 327a5f9918aSopenharmony_ci end_mark = token.end_mark 328a5f9918aSopenharmony_ci if (token.plain and tag is None) or tag == '!': 329a5f9918aSopenharmony_ci implicit = (True, False) 330a5f9918aSopenharmony_ci elif tag is None: 331a5f9918aSopenharmony_ci implicit = (False, True) 332a5f9918aSopenharmony_ci else: 333a5f9918aSopenharmony_ci implicit = (False, False) 334a5f9918aSopenharmony_ci event = ScalarEvent(anchor, tag, implicit, token.value, 335a5f9918aSopenharmony_ci start_mark, end_mark, style=token.style) 336a5f9918aSopenharmony_ci self.state = self.states.pop() 337a5f9918aSopenharmony_ci elif self.check_token(FlowSequenceStartToken): 338a5f9918aSopenharmony_ci end_mark = self.peek_token().end_mark 339a5f9918aSopenharmony_ci event = SequenceStartEvent(anchor, tag, implicit, 340a5f9918aSopenharmony_ci start_mark, end_mark, flow_style=True) 341a5f9918aSopenharmony_ci self.state = self.parse_flow_sequence_first_entry 342a5f9918aSopenharmony_ci elif self.check_token(FlowMappingStartToken): 343a5f9918aSopenharmony_ci end_mark = self.peek_token().end_mark 344a5f9918aSopenharmony_ci event = MappingStartEvent(anchor, tag, implicit, 345a5f9918aSopenharmony_ci start_mark, end_mark, flow_style=True) 346a5f9918aSopenharmony_ci self.state = self.parse_flow_mapping_first_key 347a5f9918aSopenharmony_ci elif block and self.check_token(BlockSequenceStartToken): 348a5f9918aSopenharmony_ci end_mark = self.peek_token().start_mark 349a5f9918aSopenharmony_ci event = SequenceStartEvent(anchor, tag, implicit, 350a5f9918aSopenharmony_ci start_mark, end_mark, flow_style=False) 351a5f9918aSopenharmony_ci self.state = self.parse_block_sequence_first_entry 352a5f9918aSopenharmony_ci elif block and self.check_token(BlockMappingStartToken): 353a5f9918aSopenharmony_ci end_mark = self.peek_token().start_mark 354a5f9918aSopenharmony_ci event = MappingStartEvent(anchor, tag, implicit, 355a5f9918aSopenharmony_ci start_mark, end_mark, flow_style=False) 356a5f9918aSopenharmony_ci self.state = self.parse_block_mapping_first_key 357a5f9918aSopenharmony_ci elif anchor is not None or tag is not None: 358a5f9918aSopenharmony_ci # Empty scalars are allowed even if a tag or an anchor is 359a5f9918aSopenharmony_ci # specified. 360a5f9918aSopenharmony_ci event = ScalarEvent(anchor, tag, (implicit, False), '', 361a5f9918aSopenharmony_ci start_mark, end_mark) 362a5f9918aSopenharmony_ci self.state = self.states.pop() 363a5f9918aSopenharmony_ci else: 364a5f9918aSopenharmony_ci if block: 365a5f9918aSopenharmony_ci node = 'block' 366a5f9918aSopenharmony_ci else: 367a5f9918aSopenharmony_ci node = 'flow' 368a5f9918aSopenharmony_ci token = self.peek_token() 369a5f9918aSopenharmony_ci raise ParserError("while parsing a %s node" % node, start_mark, 370a5f9918aSopenharmony_ci "expected the node content, but found %r" % token.id, 371a5f9918aSopenharmony_ci token.start_mark) 372a5f9918aSopenharmony_ci return event 373a5f9918aSopenharmony_ci 374a5f9918aSopenharmony_ci # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END 375a5f9918aSopenharmony_ci 376a5f9918aSopenharmony_ci def parse_block_sequence_first_entry(self): 377a5f9918aSopenharmony_ci token = self.get_token() 378a5f9918aSopenharmony_ci self.marks.append(token.start_mark) 379a5f9918aSopenharmony_ci return self.parse_block_sequence_entry() 380a5f9918aSopenharmony_ci 381a5f9918aSopenharmony_ci def parse_block_sequence_entry(self): 382a5f9918aSopenharmony_ci if self.check_token(BlockEntryToken): 383a5f9918aSopenharmony_ci token = self.get_token() 384a5f9918aSopenharmony_ci if not self.check_token(BlockEntryToken, BlockEndToken): 385a5f9918aSopenharmony_ci self.states.append(self.parse_block_sequence_entry) 386a5f9918aSopenharmony_ci return self.parse_block_node() 387a5f9918aSopenharmony_ci else: 388a5f9918aSopenharmony_ci self.state = self.parse_block_sequence_entry 389a5f9918aSopenharmony_ci return self.process_empty_scalar(token.end_mark) 390a5f9918aSopenharmony_ci if not self.check_token(BlockEndToken): 391a5f9918aSopenharmony_ci token = self.peek_token() 392a5f9918aSopenharmony_ci raise ParserError("while parsing a block collection", self.marks[-1], 393a5f9918aSopenharmony_ci "expected <block end>, but found %r" % token.id, token.start_mark) 394a5f9918aSopenharmony_ci token = self.get_token() 395a5f9918aSopenharmony_ci event = SequenceEndEvent(token.start_mark, token.end_mark) 396a5f9918aSopenharmony_ci self.state = self.states.pop() 397a5f9918aSopenharmony_ci self.marks.pop() 398a5f9918aSopenharmony_ci return event 399a5f9918aSopenharmony_ci 400a5f9918aSopenharmony_ci # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ 401a5f9918aSopenharmony_ci 402a5f9918aSopenharmony_ci def parse_indentless_sequence_entry(self): 403a5f9918aSopenharmony_ci if self.check_token(BlockEntryToken): 404a5f9918aSopenharmony_ci token = self.get_token() 405a5f9918aSopenharmony_ci if not self.check_token(BlockEntryToken, 406a5f9918aSopenharmony_ci KeyToken, ValueToken, BlockEndToken): 407a5f9918aSopenharmony_ci self.states.append(self.parse_indentless_sequence_entry) 408a5f9918aSopenharmony_ci return self.parse_block_node() 409a5f9918aSopenharmony_ci else: 410a5f9918aSopenharmony_ci self.state = self.parse_indentless_sequence_entry 411a5f9918aSopenharmony_ci return self.process_empty_scalar(token.end_mark) 412a5f9918aSopenharmony_ci token = self.peek_token() 413a5f9918aSopenharmony_ci event = SequenceEndEvent(token.start_mark, token.start_mark) 414a5f9918aSopenharmony_ci self.state = self.states.pop() 415a5f9918aSopenharmony_ci return event 416a5f9918aSopenharmony_ci 417a5f9918aSopenharmony_ci # block_mapping ::= BLOCK-MAPPING_START 418a5f9918aSopenharmony_ci # ((KEY block_node_or_indentless_sequence?)? 419a5f9918aSopenharmony_ci # (VALUE block_node_or_indentless_sequence?)?)* 420a5f9918aSopenharmony_ci # BLOCK-END 421a5f9918aSopenharmony_ci 422a5f9918aSopenharmony_ci def parse_block_mapping_first_key(self): 423a5f9918aSopenharmony_ci token = self.get_token() 424a5f9918aSopenharmony_ci self.marks.append(token.start_mark) 425a5f9918aSopenharmony_ci return self.parse_block_mapping_key() 426a5f9918aSopenharmony_ci 427a5f9918aSopenharmony_ci def parse_block_mapping_key(self): 428a5f9918aSopenharmony_ci if self.check_token(KeyToken): 429a5f9918aSopenharmony_ci token = self.get_token() 430a5f9918aSopenharmony_ci if not self.check_token(KeyToken, ValueToken, BlockEndToken): 431a5f9918aSopenharmony_ci self.states.append(self.parse_block_mapping_value) 432a5f9918aSopenharmony_ci return self.parse_block_node_or_indentless_sequence() 433a5f9918aSopenharmony_ci else: 434a5f9918aSopenharmony_ci self.state = self.parse_block_mapping_value 435a5f9918aSopenharmony_ci return self.process_empty_scalar(token.end_mark) 436a5f9918aSopenharmony_ci if not self.check_token(BlockEndToken): 437a5f9918aSopenharmony_ci token = self.peek_token() 438a5f9918aSopenharmony_ci raise ParserError("while parsing a block mapping", self.marks[-1], 439a5f9918aSopenharmony_ci "expected <block end>, but found %r" % token.id, token.start_mark) 440a5f9918aSopenharmony_ci token = self.get_token() 441a5f9918aSopenharmony_ci event = MappingEndEvent(token.start_mark, token.end_mark) 442a5f9918aSopenharmony_ci self.state = self.states.pop() 443a5f9918aSopenharmony_ci self.marks.pop() 444a5f9918aSopenharmony_ci return event 445a5f9918aSopenharmony_ci 446a5f9918aSopenharmony_ci def parse_block_mapping_value(self): 447a5f9918aSopenharmony_ci if self.check_token(ValueToken): 448a5f9918aSopenharmony_ci token = self.get_token() 449a5f9918aSopenharmony_ci if not self.check_token(KeyToken, ValueToken, BlockEndToken): 450a5f9918aSopenharmony_ci self.states.append(self.parse_block_mapping_key) 451a5f9918aSopenharmony_ci return self.parse_block_node_or_indentless_sequence() 452a5f9918aSopenharmony_ci else: 453a5f9918aSopenharmony_ci self.state = self.parse_block_mapping_key 454a5f9918aSopenharmony_ci return self.process_empty_scalar(token.end_mark) 455a5f9918aSopenharmony_ci else: 456a5f9918aSopenharmony_ci self.state = self.parse_block_mapping_key 457a5f9918aSopenharmony_ci token = self.peek_token() 458a5f9918aSopenharmony_ci return self.process_empty_scalar(token.start_mark) 459a5f9918aSopenharmony_ci 460a5f9918aSopenharmony_ci # flow_sequence ::= FLOW-SEQUENCE-START 461a5f9918aSopenharmony_ci # (flow_sequence_entry FLOW-ENTRY)* 462a5f9918aSopenharmony_ci # flow_sequence_entry? 463a5f9918aSopenharmony_ci # FLOW-SEQUENCE-END 464a5f9918aSopenharmony_ci # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? 465a5f9918aSopenharmony_ci # 466a5f9918aSopenharmony_ci # Note that while production rules for both flow_sequence_entry and 467a5f9918aSopenharmony_ci # flow_mapping_entry are equal, their interpretations are different. 468a5f9918aSopenharmony_ci # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` 469a5f9918aSopenharmony_ci # generate an inline mapping (set syntax). 470a5f9918aSopenharmony_ci 471a5f9918aSopenharmony_ci def parse_flow_sequence_first_entry(self): 472a5f9918aSopenharmony_ci token = self.get_token() 473a5f9918aSopenharmony_ci self.marks.append(token.start_mark) 474a5f9918aSopenharmony_ci return self.parse_flow_sequence_entry(first=True) 475a5f9918aSopenharmony_ci 476a5f9918aSopenharmony_ci def parse_flow_sequence_entry(self, first=False): 477a5f9918aSopenharmony_ci if not self.check_token(FlowSequenceEndToken): 478a5f9918aSopenharmony_ci if not first: 479a5f9918aSopenharmony_ci if self.check_token(FlowEntryToken): 480a5f9918aSopenharmony_ci self.get_token() 481a5f9918aSopenharmony_ci else: 482a5f9918aSopenharmony_ci token = self.peek_token() 483a5f9918aSopenharmony_ci raise ParserError("while parsing a flow sequence", self.marks[-1], 484a5f9918aSopenharmony_ci "expected ',' or ']', but got %r" % token.id, token.start_mark) 485a5f9918aSopenharmony_ci 486a5f9918aSopenharmony_ci if self.check_token(KeyToken): 487a5f9918aSopenharmony_ci token = self.peek_token() 488a5f9918aSopenharmony_ci event = MappingStartEvent(None, None, True, 489a5f9918aSopenharmony_ci token.start_mark, token.end_mark, 490a5f9918aSopenharmony_ci flow_style=True) 491a5f9918aSopenharmony_ci self.state = self.parse_flow_sequence_entry_mapping_key 492a5f9918aSopenharmony_ci return event 493a5f9918aSopenharmony_ci elif not self.check_token(FlowSequenceEndToken): 494a5f9918aSopenharmony_ci self.states.append(self.parse_flow_sequence_entry) 495a5f9918aSopenharmony_ci return self.parse_flow_node() 496a5f9918aSopenharmony_ci token = self.get_token() 497a5f9918aSopenharmony_ci event = SequenceEndEvent(token.start_mark, token.end_mark) 498a5f9918aSopenharmony_ci self.state = self.states.pop() 499a5f9918aSopenharmony_ci self.marks.pop() 500a5f9918aSopenharmony_ci return event 501a5f9918aSopenharmony_ci 502a5f9918aSopenharmony_ci def parse_flow_sequence_entry_mapping_key(self): 503a5f9918aSopenharmony_ci token = self.get_token() 504a5f9918aSopenharmony_ci if not self.check_token(ValueToken, 505a5f9918aSopenharmony_ci FlowEntryToken, FlowSequenceEndToken): 506a5f9918aSopenharmony_ci self.states.append(self.parse_flow_sequence_entry_mapping_value) 507a5f9918aSopenharmony_ci return self.parse_flow_node() 508a5f9918aSopenharmony_ci else: 509a5f9918aSopenharmony_ci self.state = self.parse_flow_sequence_entry_mapping_value 510a5f9918aSopenharmony_ci return self.process_empty_scalar(token.end_mark) 511a5f9918aSopenharmony_ci 512a5f9918aSopenharmony_ci def parse_flow_sequence_entry_mapping_value(self): 513a5f9918aSopenharmony_ci if self.check_token(ValueToken): 514a5f9918aSopenharmony_ci token = self.get_token() 515a5f9918aSopenharmony_ci if not self.check_token(FlowEntryToken, FlowSequenceEndToken): 516a5f9918aSopenharmony_ci self.states.append(self.parse_flow_sequence_entry_mapping_end) 517a5f9918aSopenharmony_ci return self.parse_flow_node() 518a5f9918aSopenharmony_ci else: 519a5f9918aSopenharmony_ci self.state = self.parse_flow_sequence_entry_mapping_end 520a5f9918aSopenharmony_ci return self.process_empty_scalar(token.end_mark) 521a5f9918aSopenharmony_ci else: 522a5f9918aSopenharmony_ci self.state = self.parse_flow_sequence_entry_mapping_end 523a5f9918aSopenharmony_ci token = self.peek_token() 524a5f9918aSopenharmony_ci return self.process_empty_scalar(token.start_mark) 525a5f9918aSopenharmony_ci 526a5f9918aSopenharmony_ci def parse_flow_sequence_entry_mapping_end(self): 527a5f9918aSopenharmony_ci self.state = self.parse_flow_sequence_entry 528a5f9918aSopenharmony_ci token = self.peek_token() 529a5f9918aSopenharmony_ci return MappingEndEvent(token.start_mark, token.start_mark) 530a5f9918aSopenharmony_ci 531a5f9918aSopenharmony_ci # flow_mapping ::= FLOW-MAPPING-START 532a5f9918aSopenharmony_ci # (flow_mapping_entry FLOW-ENTRY)* 533a5f9918aSopenharmony_ci # flow_mapping_entry? 534a5f9918aSopenharmony_ci # FLOW-MAPPING-END 535a5f9918aSopenharmony_ci # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? 536a5f9918aSopenharmony_ci 537a5f9918aSopenharmony_ci def parse_flow_mapping_first_key(self): 538a5f9918aSopenharmony_ci token = self.get_token() 539a5f9918aSopenharmony_ci self.marks.append(token.start_mark) 540a5f9918aSopenharmony_ci return self.parse_flow_mapping_key(first=True) 541a5f9918aSopenharmony_ci 542a5f9918aSopenharmony_ci def parse_flow_mapping_key(self, first=False): 543a5f9918aSopenharmony_ci if not self.check_token(FlowMappingEndToken): 544a5f9918aSopenharmony_ci if not first: 545a5f9918aSopenharmony_ci if self.check_token(FlowEntryToken): 546a5f9918aSopenharmony_ci self.get_token() 547a5f9918aSopenharmony_ci else: 548a5f9918aSopenharmony_ci token = self.peek_token() 549a5f9918aSopenharmony_ci raise ParserError("while parsing a flow mapping", self.marks[-1], 550a5f9918aSopenharmony_ci "expected ',' or '}', but got %r" % token.id, token.start_mark) 551a5f9918aSopenharmony_ci if self.check_token(KeyToken): 552a5f9918aSopenharmony_ci token = self.get_token() 553a5f9918aSopenharmony_ci if not self.check_token(ValueToken, 554a5f9918aSopenharmony_ci FlowEntryToken, FlowMappingEndToken): 555a5f9918aSopenharmony_ci self.states.append(self.parse_flow_mapping_value) 556a5f9918aSopenharmony_ci return self.parse_flow_node() 557a5f9918aSopenharmony_ci else: 558a5f9918aSopenharmony_ci self.state = self.parse_flow_mapping_value 559a5f9918aSopenharmony_ci return self.process_empty_scalar(token.end_mark) 560a5f9918aSopenharmony_ci elif not self.check_token(FlowMappingEndToken): 561a5f9918aSopenharmony_ci self.states.append(self.parse_flow_mapping_empty_value) 562a5f9918aSopenharmony_ci return self.parse_flow_node() 563a5f9918aSopenharmony_ci token = self.get_token() 564a5f9918aSopenharmony_ci event = MappingEndEvent(token.start_mark, token.end_mark) 565a5f9918aSopenharmony_ci self.state = self.states.pop() 566a5f9918aSopenharmony_ci self.marks.pop() 567a5f9918aSopenharmony_ci return event 568a5f9918aSopenharmony_ci 569a5f9918aSopenharmony_ci def parse_flow_mapping_value(self): 570a5f9918aSopenharmony_ci if self.check_token(ValueToken): 571a5f9918aSopenharmony_ci token = self.get_token() 572a5f9918aSopenharmony_ci if not self.check_token(FlowEntryToken, FlowMappingEndToken): 573a5f9918aSopenharmony_ci self.states.append(self.parse_flow_mapping_key) 574a5f9918aSopenharmony_ci return self.parse_flow_node() 575a5f9918aSopenharmony_ci else: 576a5f9918aSopenharmony_ci self.state = self.parse_flow_mapping_key 577a5f9918aSopenharmony_ci return self.process_empty_scalar(token.end_mark) 578a5f9918aSopenharmony_ci else: 579a5f9918aSopenharmony_ci self.state = self.parse_flow_mapping_key 580a5f9918aSopenharmony_ci token = self.peek_token() 581a5f9918aSopenharmony_ci return self.process_empty_scalar(token.start_mark) 582a5f9918aSopenharmony_ci 583a5f9918aSopenharmony_ci def parse_flow_mapping_empty_value(self): 584a5f9918aSopenharmony_ci self.state = self.parse_flow_mapping_key 585a5f9918aSopenharmony_ci return self.process_empty_scalar(self.peek_token().start_mark) 586a5f9918aSopenharmony_ci 587a5f9918aSopenharmony_ci def process_empty_scalar(self, mark): 588a5f9918aSopenharmony_ci return ScalarEvent(None, None, (True, False), '', mark, mark) 589a5f9918aSopenharmony_ci 590