__init__.py 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313
  1. # SPDX-License-Identifier: MIT
  2. from .error import *
  3. from .tokens import *
  4. from .events import *
  5. from .nodes import *
  6. from .loader import *
  7. from .dumper import *
  8. __version__ = '3.11'
  9. try:
  10. from .cyaml import *
  11. __with_libyaml__ = True
  12. except ImportError:
  13. __with_libyaml__ = False
  14. import io
  15. def scan(stream, Loader=Loader):
  16. """
  17. Scan a YAML stream and produce scanning tokens.
  18. """
  19. loader = Loader(stream)
  20. try:
  21. while loader.check_token():
  22. yield loader.get_token()
  23. finally:
  24. loader.dispose()
  25. def parse(stream, Loader=Loader):
  26. """
  27. Parse a YAML stream and produce parsing events.
  28. """
  29. loader = Loader(stream)
  30. try:
  31. while loader.check_event():
  32. yield loader.get_event()
  33. finally:
  34. loader.dispose()
  35. def compose(stream, Loader=Loader):
  36. """
  37. Parse the first YAML document in a stream
  38. and produce the corresponding representation tree.
  39. """
  40. loader = Loader(stream)
  41. try:
  42. return loader.get_single_node()
  43. finally:
  44. loader.dispose()
  45. def compose_all(stream, Loader=Loader):
  46. """
  47. Parse all YAML documents in a stream
  48. and produce corresponding representation trees.
  49. """
  50. loader = Loader(stream)
  51. try:
  52. while loader.check_node():
  53. yield loader.get_node()
  54. finally:
  55. loader.dispose()
  56. def load(stream, Loader=Loader):
  57. """
  58. Parse the first YAML document in a stream
  59. and produce the corresponding Python object.
  60. """
  61. loader = Loader(stream)
  62. try:
  63. return loader.get_single_data()
  64. finally:
  65. loader.dispose()
  66. def load_all(stream, Loader=Loader):
  67. """
  68. Parse all YAML documents in a stream
  69. and produce corresponding Python objects.
  70. """
  71. loader = Loader(stream)
  72. try:
  73. while loader.check_data():
  74. yield loader.get_data()
  75. finally:
  76. loader.dispose()
  77. def safe_load(stream):
  78. """
  79. Parse the first YAML document in a stream
  80. and produce the corresponding Python object.
  81. Resolve only basic YAML tags.
  82. """
  83. return load(stream, SafeLoader)
  84. def safe_load_all(stream):
  85. """
  86. Parse all YAML documents in a stream
  87. and produce corresponding Python objects.
  88. Resolve only basic YAML tags.
  89. """
  90. return load_all(stream, SafeLoader)
  91. def emit(events, stream=None, Dumper=Dumper,
  92. canonical=None, indent=None, width=None,
  93. allow_unicode=None, line_break=None):
  94. """
  95. Emit YAML parsing events into a stream.
  96. If stream is None, return the produced string instead.
  97. """
  98. getvalue = None
  99. if stream is None:
  100. stream = io.StringIO()
  101. getvalue = stream.getvalue
  102. dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
  103. allow_unicode=allow_unicode, line_break=line_break)
  104. try:
  105. for event in events:
  106. dumper.emit(event)
  107. finally:
  108. dumper.dispose()
  109. if getvalue:
  110. return getvalue()
  111. def serialize_all(nodes, stream=None, Dumper=Dumper,
  112. canonical=None, indent=None, width=None,
  113. allow_unicode=None, line_break=None,
  114. encoding=None, explicit_start=None, explicit_end=None,
  115. version=None, tags=None):
  116. """
  117. Serialize a sequence of representation trees into a YAML stream.
  118. If stream is None, return the produced string instead.
  119. """
  120. getvalue = None
  121. if stream is None:
  122. if encoding is None:
  123. stream = io.StringIO()
  124. else:
  125. stream = io.BytesIO()
  126. getvalue = stream.getvalue
  127. dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
  128. allow_unicode=allow_unicode, line_break=line_break,
  129. encoding=encoding, version=version, tags=tags,
  130. explicit_start=explicit_start, explicit_end=explicit_end)
  131. try:
  132. dumper.open()
  133. for node in nodes:
  134. dumper.serialize(node)
  135. dumper.close()
  136. finally:
  137. dumper.dispose()
  138. if getvalue:
  139. return getvalue()
  140. def serialize(node, stream=None, Dumper=Dumper, **kwds):
  141. """
  142. Serialize a representation tree into a YAML stream.
  143. If stream is None, return the produced string instead.
  144. """
  145. return serialize_all([node], stream, Dumper=Dumper, **kwds)
  146. def dump_all(documents, stream=None, Dumper=Dumper,
  147. default_style=None, default_flow_style=None,
  148. canonical=None, indent=None, width=None,
  149. allow_unicode=None, line_break=None,
  150. encoding=None, explicit_start=None, explicit_end=None,
  151. version=None, tags=None):
  152. """
  153. Serialize a sequence of Python objects into a YAML stream.
  154. If stream is None, return the produced string instead.
  155. """
  156. getvalue = None
  157. if stream is None:
  158. if encoding is None:
  159. stream = io.StringIO()
  160. else:
  161. stream = io.BytesIO()
  162. getvalue = stream.getvalue
  163. dumper = Dumper(stream, default_style=default_style,
  164. default_flow_style=default_flow_style,
  165. canonical=canonical, indent=indent, width=width,
  166. allow_unicode=allow_unicode, line_break=line_break,
  167. encoding=encoding, version=version, tags=tags,
  168. explicit_start=explicit_start, explicit_end=explicit_end)
  169. try:
  170. dumper.open()
  171. for data in documents:
  172. dumper.represent(data)
  173. dumper.close()
  174. finally:
  175. dumper.dispose()
  176. if getvalue:
  177. return getvalue()
  178. def dump(data, stream=None, Dumper=Dumper, **kwds):
  179. """
  180. Serialize a Python object into a YAML stream.
  181. If stream is None, return the produced string instead.
  182. """
  183. return dump_all([data], stream, Dumper=Dumper, **kwds)
  184. def safe_dump_all(documents, stream=None, **kwds):
  185. """
  186. Serialize a sequence of Python objects into a YAML stream.
  187. Produce only basic YAML tags.
  188. If stream is None, return the produced string instead.
  189. """
  190. return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
  191. def safe_dump(data, stream=None, **kwds):
  192. """
  193. Serialize a Python object into a YAML stream.
  194. Produce only basic YAML tags.
  195. If stream is None, return the produced string instead.
  196. """
  197. return dump_all([data], stream, Dumper=SafeDumper, **kwds)
  198. def add_implicit_resolver(tag, regexp, first=None,
  199. Loader=Loader, Dumper=Dumper):
  200. """
  201. Add an implicit scalar detector.
  202. If an implicit scalar value matches the given regexp,
  203. the corresponding tag is assigned to the scalar.
  204. first is a sequence of possible initial characters or None.
  205. """
  206. Loader.add_implicit_resolver(tag, regexp, first)
  207. Dumper.add_implicit_resolver(tag, regexp, first)
  208. def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
  209. """
  210. Add a path based resolver for the given tag.
  211. A path is a list of keys that forms a path
  212. to a node in the representation tree.
  213. Keys can be string values, integers, or None.
  214. """
  215. Loader.add_path_resolver(tag, path, kind)
  216. Dumper.add_path_resolver(tag, path, kind)
  217. def add_constructor(tag, constructor, Loader=Loader):
  218. """
  219. Add a constructor for the given tag.
  220. Constructor is a function that accepts a Loader instance
  221. and a node object and produces the corresponding Python object.
  222. """
  223. Loader.add_constructor(tag, constructor)
  224. def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
  225. """
  226. Add a multi-constructor for the given tag prefix.
  227. Multi-constructor is called for a node if its tag starts with tag_prefix.
  228. Multi-constructor accepts a Loader instance, a tag suffix,
  229. and a node object and produces the corresponding Python object.
  230. """
  231. Loader.add_multi_constructor(tag_prefix, multi_constructor)
  232. def add_representer(data_type, representer, Dumper=Dumper):
  233. """
  234. Add a representer for the given type.
  235. Representer is a function accepting a Dumper instance
  236. and an instance of the given data type
  237. and producing the corresponding representation node.
  238. """
  239. Dumper.add_representer(data_type, representer)
  240. def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
  241. """
  242. Add a representer for the given type.
  243. Multi-representer is a function accepting a Dumper instance
  244. and an instance of the given data type or subtype
  245. and producing the corresponding representation node.
  246. """
  247. Dumper.add_multi_representer(data_type, multi_representer)
  248. class YAMLObjectMetaclass(type):
  249. """
  250. The metaclass for YAMLObject.
  251. """
  252. def __init__(cls, name, bases, kwds):
  253. super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
  254. if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
  255. cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
  256. cls.yaml_dumper.add_representer(cls, cls.to_yaml)
  257. class YAMLObject(metaclass=YAMLObjectMetaclass):
  258. """
  259. An object that can dump itself to a YAML stream
  260. and load itself from a YAML stream.
  261. """
  262. __slots__ = () # no direct instantiation, so allow immutable subclasses
  263. yaml_loader = Loader
  264. yaml_dumper = Dumper
  265. yaml_tag = None
  266. yaml_flow_style = None
  267. @classmethod
  268. def from_yaml(cls, loader, node):
  269. """
  270. Convert a representation node to a Python object.
  271. """
  272. return loader.construct_yaml_object(node, cls)
  273. @classmethod
  274. def to_yaml(cls, dumper, data):
  275. """
  276. Convert a Python object to a representation node.
  277. """
  278. return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
  279. flow_style=cls.yaml_flow_style)