parser.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590
  1. # SPDX-License-Identifier: MIT
  2. # The following YAML grammar is LL(1) and is parsed by a recursive descent
  3. # parser.
  4. #
  5. # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
  6. # implicit_document ::= block_node DOCUMENT-END*
  7. # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
  8. # block_node_or_indentless_sequence ::=
  9. # ALIAS
  10. # | properties (block_content | indentless_block_sequence)?
  11. # | block_content
  12. # | indentless_block_sequence
  13. # block_node ::= ALIAS
  14. # | properties block_content?
  15. # | block_content
  16. # flow_node ::= ALIAS
  17. # | properties flow_content?
  18. # | flow_content
  19. # properties ::= TAG ANCHOR? | ANCHOR TAG?
  20. # block_content ::= block_collection | flow_collection | SCALAR
  21. # flow_content ::= flow_collection | SCALAR
  22. # block_collection ::= block_sequence | block_mapping
  23. # flow_collection ::= flow_sequence | flow_mapping
  24. # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
  25. # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
  26. # block_mapping ::= BLOCK-MAPPING_START
  27. # ((KEY block_node_or_indentless_sequence?)?
  28. # (VALUE block_node_or_indentless_sequence?)?)*
  29. # BLOCK-END
  30. # flow_sequence ::= FLOW-SEQUENCE-START
  31. # (flow_sequence_entry FLOW-ENTRY)*
  32. # flow_sequence_entry?
  33. # FLOW-SEQUENCE-END
  34. # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
  35. # flow_mapping ::= FLOW-MAPPING-START
  36. # (flow_mapping_entry FLOW-ENTRY)*
  37. # flow_mapping_entry?
  38. # FLOW-MAPPING-END
  39. # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
  40. #
  41. # FIRST sets:
  42. #
  43. # stream: { STREAM-START }
  44. # explicit_document: { DIRECTIVE DOCUMENT-START }
  45. # implicit_document: FIRST(block_node)
  46. # block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
  47. # flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
  48. # block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
  49. # flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
  50. # block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
  51. # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
  52. # block_sequence: { BLOCK-SEQUENCE-START }
  53. # block_mapping: { BLOCK-MAPPING-START }
  54. # block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
  55. # indentless_sequence: { ENTRY }
  56. # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
  57. # flow_sequence: { FLOW-SEQUENCE-START }
  58. # flow_mapping: { FLOW-MAPPING-START }
  59. # flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
  60. # flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
  61. __all__ = ['Parser', 'ParserError']
  62. from error import MarkedYAMLError
  63. from tokens import *
  64. from events import *
  65. from scanner import *
  66. class ParserError(MarkedYAMLError):
  67. pass
  68. class Parser(object):
  69. # Since writing a recursive-descendant parser is a straightforward task, we
  70. # do not give many comments here.
  71. DEFAULT_TAGS = {
  72. u'!': u'!',
  73. u'!!': u'tag:yaml.org,2002:',
  74. }
  75. def __init__(self):
  76. self.current_event = None
  77. self.yaml_version = None
  78. self.tag_handles = {}
  79. self.states = []
  80. self.marks = []
  81. self.state = self.parse_stream_start
  82. def dispose(self):
  83. # Reset the state attributes (to clear self-references)
  84. self.states = []
  85. self.state = None
  86. def check_event(self, *choices):
  87. # Check the type of the next event.
  88. if self.current_event is None:
  89. if self.state:
  90. self.current_event = self.state()
  91. if self.current_event is not None:
  92. if not choices:
  93. return True
  94. for choice in choices:
  95. if isinstance(self.current_event, choice):
  96. return True
  97. return False
  98. def peek_event(self):
  99. # Get the next event.
  100. if self.current_event is None:
  101. if self.state:
  102. self.current_event = self.state()
  103. return self.current_event
  104. def get_event(self):
  105. # Get the next event and proceed further.
  106. if self.current_event is None:
  107. if self.state:
  108. self.current_event = self.state()
  109. value = self.current_event
  110. self.current_event = None
  111. return value
  112. # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
  113. # implicit_document ::= block_node DOCUMENT-END*
  114. # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
  115. def parse_stream_start(self):
  116. # Parse the stream start.
  117. token = self.get_token()
  118. event = StreamStartEvent(token.start_mark, token.end_mark,
  119. encoding=token.encoding)
  120. # Prepare the next state.
  121. self.state = self.parse_implicit_document_start
  122. return event
  123. def parse_implicit_document_start(self):
  124. # Parse an implicit document.
  125. if not self.check_token(DirectiveToken, DocumentStartToken,
  126. StreamEndToken):
  127. self.tag_handles = self.DEFAULT_TAGS
  128. token = self.peek_token()
  129. start_mark = end_mark = token.start_mark
  130. event = DocumentStartEvent(start_mark, end_mark,
  131. explicit=False)
  132. # Prepare the next state.
  133. self.states.append(self.parse_document_end)
  134. self.state = self.parse_block_node
  135. return event
  136. else:
  137. return self.parse_document_start()
  138. def parse_document_start(self):
  139. # Parse any extra document end indicators.
  140. while self.check_token(DocumentEndToken):
  141. self.get_token()
  142. # Parse an explicit document.
  143. if not self.check_token(StreamEndToken):
  144. token = self.peek_token()
  145. start_mark = token.start_mark
  146. version, tags = self.process_directives()
  147. if not self.check_token(DocumentStartToken):
  148. raise ParserError(None, None,
  149. "expected '<document start>', but found %r"
  150. % self.peek_token().id,
  151. self.peek_token().start_mark)
  152. token = self.get_token()
  153. end_mark = token.end_mark
  154. event = DocumentStartEvent(start_mark, end_mark,
  155. explicit=True, version=version, tags=tags)
  156. self.states.append(self.parse_document_end)
  157. self.state = self.parse_document_content
  158. else:
  159. # Parse the end of the stream.
  160. token = self.get_token()
  161. event = StreamEndEvent(token.start_mark, token.end_mark)
  162. assert not self.states
  163. assert not self.marks
  164. self.state = None
  165. return event
  166. def parse_document_end(self):
  167. # Parse the document end.
  168. token = self.peek_token()
  169. start_mark = end_mark = token.start_mark
  170. explicit = False
  171. if self.check_token(DocumentEndToken):
  172. token = self.get_token()
  173. end_mark = token.end_mark
  174. explicit = True
  175. event = DocumentEndEvent(start_mark, end_mark,
  176. explicit=explicit)
  177. # Prepare the next state.
  178. self.state = self.parse_document_start
  179. return event
  180. def parse_document_content(self):
  181. if self.check_token(DirectiveToken,
  182. DocumentStartToken, DocumentEndToken, StreamEndToken):
  183. event = self.process_empty_scalar(self.peek_token().start_mark)
  184. self.state = self.states.pop()
  185. return event
  186. else:
  187. return self.parse_block_node()
  188. def process_directives(self):
  189. self.yaml_version = None
  190. self.tag_handles = {}
  191. while self.check_token(DirectiveToken):
  192. token = self.get_token()
  193. if token.name == u'YAML':
  194. if self.yaml_version is not None:
  195. raise ParserError(None, None,
  196. "found duplicate YAML directive", token.start_mark)
  197. major, minor = token.value
  198. if major != 1:
  199. raise ParserError(None, None,
  200. "found incompatible YAML document (version 1.* is required)",
  201. token.start_mark)
  202. self.yaml_version = token.value
  203. elif token.name == u'TAG':
  204. handle, prefix = token.value
  205. if handle in self.tag_handles:
  206. raise ParserError(None, None,
  207. "duplicate tag handle %r" % handle.encode('utf-8'),
  208. token.start_mark)
  209. self.tag_handles[handle] = prefix
  210. if self.tag_handles:
  211. value = self.yaml_version, self.tag_handles.copy()
  212. else:
  213. value = self.yaml_version, None
  214. for key in self.DEFAULT_TAGS:
  215. if key not in self.tag_handles:
  216. self.tag_handles[key] = self.DEFAULT_TAGS[key]
  217. return value
  218. # block_node_or_indentless_sequence ::= ALIAS
  219. # | properties (block_content | indentless_block_sequence)?
  220. # | block_content
  221. # | indentless_block_sequence
  222. # block_node ::= ALIAS
  223. # | properties block_content?
  224. # | block_content
  225. # flow_node ::= ALIAS
  226. # | properties flow_content?
  227. # | flow_content
  228. # properties ::= TAG ANCHOR? | ANCHOR TAG?
  229. # block_content ::= block_collection | flow_collection | SCALAR
  230. # flow_content ::= flow_collection | SCALAR
  231. # block_collection ::= block_sequence | block_mapping
  232. # flow_collection ::= flow_sequence | flow_mapping
  233. def parse_block_node(self):
  234. return self.parse_node(block=True)
  235. def parse_flow_node(self):
  236. return self.parse_node()
  237. def parse_block_node_or_indentless_sequence(self):
  238. return self.parse_node(block=True, indentless_sequence=True)
  239. def parse_node(self, block=False, indentless_sequence=False):
  240. if self.check_token(AliasToken):
  241. token = self.get_token()
  242. event = AliasEvent(token.value, token.start_mark, token.end_mark)
  243. self.state = self.states.pop()
  244. else:
  245. anchor = None
  246. tag = None
  247. start_mark = end_mark = tag_mark = None
  248. if self.check_token(AnchorToken):
  249. token = self.get_token()
  250. start_mark = token.start_mark
  251. end_mark = token.end_mark
  252. anchor = token.value
  253. if self.check_token(TagToken):
  254. token = self.get_token()
  255. tag_mark = token.start_mark
  256. end_mark = token.end_mark
  257. tag = token.value
  258. elif self.check_token(TagToken):
  259. token = self.get_token()
  260. start_mark = tag_mark = token.start_mark
  261. end_mark = token.end_mark
  262. tag = token.value
  263. if self.check_token(AnchorToken):
  264. token = self.get_token()
  265. end_mark = token.end_mark
  266. anchor = token.value
  267. if tag is not None:
  268. handle, suffix = tag
  269. if handle is not None:
  270. if handle not in self.tag_handles:
  271. raise ParserError("while parsing a node", start_mark,
  272. "found undefined tag handle %r" % handle.encode('utf-8'),
  273. tag_mark)
  274. tag = self.tag_handles[handle]+suffix
  275. else:
  276. tag = suffix
  277. #if tag == u'!':
  278. # raise ParserError("while parsing a node", start_mark,
  279. # "found non-specific tag '!'", tag_mark,
  280. # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
  281. if start_mark is None:
  282. start_mark = end_mark = self.peek_token().start_mark
  283. event = None
  284. implicit = (tag is None or tag == u'!')
  285. if indentless_sequence and self.check_token(BlockEntryToken):
  286. end_mark = self.peek_token().end_mark
  287. event = SequenceStartEvent(anchor, tag, implicit,
  288. start_mark, end_mark)
  289. self.state = self.parse_indentless_sequence_entry
  290. else:
  291. if self.check_token(ScalarToken):
  292. token = self.get_token()
  293. end_mark = token.end_mark
  294. if (token.plain and tag is None) or tag == u'!':
  295. implicit = (True, False)
  296. elif tag is None:
  297. implicit = (False, True)
  298. else:
  299. implicit = (False, False)
  300. event = ScalarEvent(anchor, tag, implicit, token.value,
  301. start_mark, end_mark, style=token.style)
  302. self.state = self.states.pop()
  303. elif self.check_token(FlowSequenceStartToken):
  304. end_mark = self.peek_token().end_mark
  305. event = SequenceStartEvent(anchor, tag, implicit,
  306. start_mark, end_mark, flow_style=True)
  307. self.state = self.parse_flow_sequence_first_entry
  308. elif self.check_token(FlowMappingStartToken):
  309. end_mark = self.peek_token().end_mark
  310. event = MappingStartEvent(anchor, tag, implicit,
  311. start_mark, end_mark, flow_style=True)
  312. self.state = self.parse_flow_mapping_first_key
  313. elif block and self.check_token(BlockSequenceStartToken):
  314. end_mark = self.peek_token().start_mark
  315. event = SequenceStartEvent(anchor, tag, implicit,
  316. start_mark, end_mark, flow_style=False)
  317. self.state = self.parse_block_sequence_first_entry
  318. elif block and self.check_token(BlockMappingStartToken):
  319. end_mark = self.peek_token().start_mark
  320. event = MappingStartEvent(anchor, tag, implicit,
  321. start_mark, end_mark, flow_style=False)
  322. self.state = self.parse_block_mapping_first_key
  323. elif anchor is not None or tag is not None:
  324. # Empty scalars are allowed even if a tag or an anchor is
  325. # specified.
  326. event = ScalarEvent(anchor, tag, (implicit, False), u'',
  327. start_mark, end_mark)
  328. self.state = self.states.pop()
  329. else:
  330. if block:
  331. node = 'block'
  332. else:
  333. node = 'flow'
  334. token = self.peek_token()
  335. raise ParserError("while parsing a %s node" % node, start_mark,
  336. "expected the node content, but found %r" % token.id,
  337. token.start_mark)
  338. return event
  339. # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
  340. def parse_block_sequence_first_entry(self):
  341. token = self.get_token()
  342. self.marks.append(token.start_mark)
  343. return self.parse_block_sequence_entry()
  344. def parse_block_sequence_entry(self):
  345. if self.check_token(BlockEntryToken):
  346. token = self.get_token()
  347. if not self.check_token(BlockEntryToken, BlockEndToken):
  348. self.states.append(self.parse_block_sequence_entry)
  349. return self.parse_block_node()
  350. else:
  351. self.state = self.parse_block_sequence_entry
  352. return self.process_empty_scalar(token.end_mark)
  353. if not self.check_token(BlockEndToken):
  354. token = self.peek_token()
  355. raise ParserError("while parsing a block collection", self.marks[-1],
  356. "expected <block end>, but found %r" % token.id, token.start_mark)
  357. token = self.get_token()
  358. event = SequenceEndEvent(token.start_mark, token.end_mark)
  359. self.state = self.states.pop()
  360. self.marks.pop()
  361. return event
  362. # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
  363. def parse_indentless_sequence_entry(self):
  364. if self.check_token(BlockEntryToken):
  365. token = self.get_token()
  366. if not self.check_token(BlockEntryToken,
  367. KeyToken, ValueToken, BlockEndToken):
  368. self.states.append(self.parse_indentless_sequence_entry)
  369. return self.parse_block_node()
  370. else:
  371. self.state = self.parse_indentless_sequence_entry
  372. return self.process_empty_scalar(token.end_mark)
  373. token = self.peek_token()
  374. event = SequenceEndEvent(token.start_mark, token.start_mark)
  375. self.state = self.states.pop()
  376. return event
  377. # block_mapping ::= BLOCK-MAPPING_START
  378. # ((KEY block_node_or_indentless_sequence?)?
  379. # (VALUE block_node_or_indentless_sequence?)?)*
  380. # BLOCK-END
  381. def parse_block_mapping_first_key(self):
  382. token = self.get_token()
  383. self.marks.append(token.start_mark)
  384. return self.parse_block_mapping_key()
  385. def parse_block_mapping_key(self):
  386. if self.check_token(KeyToken):
  387. token = self.get_token()
  388. if not self.check_token(KeyToken, ValueToken, BlockEndToken):
  389. self.states.append(self.parse_block_mapping_value)
  390. return self.parse_block_node_or_indentless_sequence()
  391. else:
  392. self.state = self.parse_block_mapping_value
  393. return self.process_empty_scalar(token.end_mark)
  394. if not self.check_token(BlockEndToken):
  395. token = self.peek_token()
  396. raise ParserError("while parsing a block mapping", self.marks[-1],
  397. "expected <block end>, but found %r" % token.id, token.start_mark)
  398. token = self.get_token()
  399. event = MappingEndEvent(token.start_mark, token.end_mark)
  400. self.state = self.states.pop()
  401. self.marks.pop()
  402. return event
  403. def parse_block_mapping_value(self):
  404. if self.check_token(ValueToken):
  405. token = self.get_token()
  406. if not self.check_token(KeyToken, ValueToken, BlockEndToken):
  407. self.states.append(self.parse_block_mapping_key)
  408. return self.parse_block_node_or_indentless_sequence()
  409. else:
  410. self.state = self.parse_block_mapping_key
  411. return self.process_empty_scalar(token.end_mark)
  412. else:
  413. self.state = self.parse_block_mapping_key
  414. token = self.peek_token()
  415. return self.process_empty_scalar(token.start_mark)
  416. # flow_sequence ::= FLOW-SEQUENCE-START
  417. # (flow_sequence_entry FLOW-ENTRY)*
  418. # flow_sequence_entry?
  419. # FLOW-SEQUENCE-END
  420. # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
  421. #
  422. # Note that while production rules for both flow_sequence_entry and
  423. # flow_mapping_entry are equal, their interpretations are different.
  424. # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
  425. # generate an inline mapping (set syntax).
  426. def parse_flow_sequence_first_entry(self):
  427. token = self.get_token()
  428. self.marks.append(token.start_mark)
  429. return self.parse_flow_sequence_entry(first=True)
  430. def parse_flow_sequence_entry(self, first=False):
  431. if not self.check_token(FlowSequenceEndToken):
  432. if not first:
  433. if self.check_token(FlowEntryToken):
  434. self.get_token()
  435. else:
  436. token = self.peek_token()
  437. raise ParserError("while parsing a flow sequence", self.marks[-1],
  438. "expected ',' or ']', but got %r" % token.id, token.start_mark)
  439. if self.check_token(KeyToken):
  440. token = self.peek_token()
  441. event = MappingStartEvent(None, None, True,
  442. token.start_mark, token.end_mark,
  443. flow_style=True)
  444. self.state = self.parse_flow_sequence_entry_mapping_key
  445. return event
  446. elif not self.check_token(FlowSequenceEndToken):
  447. self.states.append(self.parse_flow_sequence_entry)
  448. return self.parse_flow_node()
  449. token = self.get_token()
  450. event = SequenceEndEvent(token.start_mark, token.end_mark)
  451. self.state = self.states.pop()
  452. self.marks.pop()
  453. return event
  454. def parse_flow_sequence_entry_mapping_key(self):
  455. token = self.get_token()
  456. if not self.check_token(ValueToken,
  457. FlowEntryToken, FlowSequenceEndToken):
  458. self.states.append(self.parse_flow_sequence_entry_mapping_value)
  459. return self.parse_flow_node()
  460. else:
  461. self.state = self.parse_flow_sequence_entry_mapping_value
  462. return self.process_empty_scalar(token.end_mark)
  463. def parse_flow_sequence_entry_mapping_value(self):
  464. if self.check_token(ValueToken):
  465. token = self.get_token()
  466. if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
  467. self.states.append(self.parse_flow_sequence_entry_mapping_end)
  468. return self.parse_flow_node()
  469. else:
  470. self.state = self.parse_flow_sequence_entry_mapping_end
  471. return self.process_empty_scalar(token.end_mark)
  472. else:
  473. self.state = self.parse_flow_sequence_entry_mapping_end
  474. token = self.peek_token()
  475. return self.process_empty_scalar(token.start_mark)
  476. def parse_flow_sequence_entry_mapping_end(self):
  477. self.state = self.parse_flow_sequence_entry
  478. token = self.peek_token()
  479. return MappingEndEvent(token.start_mark, token.start_mark)
  480. # flow_mapping ::= FLOW-MAPPING-START
  481. # (flow_mapping_entry FLOW-ENTRY)*
  482. # flow_mapping_entry?
  483. # FLOW-MAPPING-END
  484. # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
  485. def parse_flow_mapping_first_key(self):
  486. token = self.get_token()
  487. self.marks.append(token.start_mark)
  488. return self.parse_flow_mapping_key(first=True)
  489. def parse_flow_mapping_key(self, first=False):
  490. if not self.check_token(FlowMappingEndToken):
  491. if not first:
  492. if self.check_token(FlowEntryToken):
  493. self.get_token()
  494. else:
  495. token = self.peek_token()
  496. raise ParserError("while parsing a flow mapping", self.marks[-1],
  497. "expected ',' or '}', but got %r" % token.id, token.start_mark)
  498. if self.check_token(KeyToken):
  499. token = self.get_token()
  500. if not self.check_token(ValueToken,
  501. FlowEntryToken, FlowMappingEndToken):
  502. self.states.append(self.parse_flow_mapping_value)
  503. return self.parse_flow_node()
  504. else:
  505. self.state = self.parse_flow_mapping_value
  506. return self.process_empty_scalar(token.end_mark)
  507. elif not self.check_token(FlowMappingEndToken):
  508. self.states.append(self.parse_flow_mapping_empty_value)
  509. return self.parse_flow_node()
  510. token = self.get_token()
  511. event = MappingEndEvent(token.start_mark, token.end_mark)
  512. self.state = self.states.pop()
  513. self.marks.pop()
  514. return event
  515. def parse_flow_mapping_value(self):
  516. if self.check_token(ValueToken):
  517. token = self.get_token()
  518. if not self.check_token(FlowEntryToken, FlowMappingEndToken):
  519. self.states.append(self.parse_flow_mapping_key)
  520. return self.parse_flow_node()
  521. else:
  522. self.state = self.parse_flow_mapping_key
  523. return self.process_empty_scalar(token.end_mark)
  524. else:
  525. self.state = self.parse_flow_mapping_key
  526. token = self.peek_token()
  527. return self.process_empty_scalar(token.start_mark)
  528. def parse_flow_mapping_empty_value(self):
  529. self.state = self.parse_flow_mapping_key
  530. return self.process_empty_scalar(self.peek_token().start_mark)
  531. def process_empty_scalar(self, mark):
  532. return ScalarEvent(None, None, (True, False), u'', mark, mark)