__init__.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390
  1. from .error import *
  2. from .tokens import *
  3. from .events import *
  4. from .nodes import *
  5. from .loader import *
  6. from .dumper import *
  7. __version__ = '6.0.2'
  8. try:
  9. from .cyaml import *
  10. __with_libyaml__ = True
  11. except ImportError:
  12. __with_libyaml__ = False
  13. import io
  14. #------------------------------------------------------------------------------
  15. # XXX "Warnings control" is now deprecated. Leaving in the API function to not
  16. # break code that uses it.
  17. #------------------------------------------------------------------------------
  18. def warnings(settings=None):
  19. if settings is None:
  20. return {}
  21. #------------------------------------------------------------------------------
  22. def scan(stream, Loader=Loader):
  23. """
  24. Scan a YAML stream and produce scanning tokens.
  25. """
  26. loader = Loader(stream)
  27. try:
  28. while loader.check_token():
  29. yield loader.get_token()
  30. finally:
  31. loader.dispose()
  32. def parse(stream, Loader=Loader):
  33. """
  34. Parse a YAML stream and produce parsing events.
  35. """
  36. loader = Loader(stream)
  37. try:
  38. while loader.check_event():
  39. yield loader.get_event()
  40. finally:
  41. loader.dispose()
  42. def compose(stream, Loader=Loader):
  43. """
  44. Parse the first YAML document in a stream
  45. and produce the corresponding representation tree.
  46. """
  47. loader = Loader(stream)
  48. try:
  49. return loader.get_single_node()
  50. finally:
  51. loader.dispose()
  52. def compose_all(stream, Loader=Loader):
  53. """
  54. Parse all YAML documents in a stream
  55. and produce corresponding representation trees.
  56. """
  57. loader = Loader(stream)
  58. try:
  59. while loader.check_node():
  60. yield loader.get_node()
  61. finally:
  62. loader.dispose()
  63. def load(stream, Loader):
  64. """
  65. Parse the first YAML document in a stream
  66. and produce the corresponding Python object.
  67. """
  68. loader = Loader(stream)
  69. try:
  70. return loader.get_single_data()
  71. finally:
  72. loader.dispose()
  73. def load_all(stream, Loader):
  74. """
  75. Parse all YAML documents in a stream
  76. and produce corresponding Python objects.
  77. """
  78. loader = Loader(stream)
  79. try:
  80. while loader.check_data():
  81. yield loader.get_data()
  82. finally:
  83. loader.dispose()
  84. def full_load(stream):
  85. """
  86. Parse the first YAML document in a stream
  87. and produce the corresponding Python object.
  88. Resolve all tags except those known to be
  89. unsafe on untrusted input.
  90. """
  91. return load(stream, FullLoader)
  92. def full_load_all(stream):
  93. """
  94. Parse all YAML documents in a stream
  95. and produce corresponding Python objects.
  96. Resolve all tags except those known to be
  97. unsafe on untrusted input.
  98. """
  99. return load_all(stream, FullLoader)
  100. def safe_load(stream):
  101. """
  102. Parse the first YAML document in a stream
  103. and produce the corresponding Python object.
  104. Resolve only basic YAML tags. This is known
  105. to be safe for untrusted input.
  106. """
  107. return load(stream, SafeLoader)
  108. def safe_load_all(stream):
  109. """
  110. Parse all YAML documents in a stream
  111. and produce corresponding Python objects.
  112. Resolve only basic YAML tags. This is known
  113. to be safe for untrusted input.
  114. """
  115. return load_all(stream, SafeLoader)
  116. def unsafe_load(stream):
  117. """
  118. Parse the first YAML document in a stream
  119. and produce the corresponding Python object.
  120. Resolve all tags, even those known to be
  121. unsafe on untrusted input.
  122. """
  123. return load(stream, UnsafeLoader)
  124. def unsafe_load_all(stream):
  125. """
  126. Parse all YAML documents in a stream
  127. and produce corresponding Python objects.
  128. Resolve all tags, even those known to be
  129. unsafe on untrusted input.
  130. """
  131. return load_all(stream, UnsafeLoader)
  132. def emit(events, stream=None, Dumper=Dumper,
  133. canonical=None, indent=None, width=None,
  134. allow_unicode=None, line_break=None):
  135. """
  136. Emit YAML parsing events into a stream.
  137. If stream is None, return the produced string instead.
  138. """
  139. getvalue = None
  140. if stream is None:
  141. stream = io.StringIO()
  142. getvalue = stream.getvalue
  143. dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
  144. allow_unicode=allow_unicode, line_break=line_break)
  145. try:
  146. for event in events:
  147. dumper.emit(event)
  148. finally:
  149. dumper.dispose()
  150. if getvalue:
  151. return getvalue()
  152. def serialize_all(nodes, stream=None, Dumper=Dumper,
  153. canonical=None, indent=None, width=None,
  154. allow_unicode=None, line_break=None,
  155. encoding=None, explicit_start=None, explicit_end=None,
  156. version=None, tags=None):
  157. """
  158. Serialize a sequence of representation trees into a YAML stream.
  159. If stream is None, return the produced string instead.
  160. """
  161. getvalue = None
  162. if stream is None:
  163. if encoding is None:
  164. stream = io.StringIO()
  165. else:
  166. stream = io.BytesIO()
  167. getvalue = stream.getvalue
  168. dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
  169. allow_unicode=allow_unicode, line_break=line_break,
  170. encoding=encoding, version=version, tags=tags,
  171. explicit_start=explicit_start, explicit_end=explicit_end)
  172. try:
  173. dumper.open()
  174. for node in nodes:
  175. dumper.serialize(node)
  176. dumper.close()
  177. finally:
  178. dumper.dispose()
  179. if getvalue:
  180. return getvalue()
  181. def serialize(node, stream=None, Dumper=Dumper, **kwds):
  182. """
  183. Serialize a representation tree into a YAML stream.
  184. If stream is None, return the produced string instead.
  185. """
  186. return serialize_all([node], stream, Dumper=Dumper, **kwds)
  187. def dump_all(documents, stream=None, Dumper=Dumper,
  188. default_style=None, default_flow_style=False,
  189. canonical=None, indent=None, width=None,
  190. allow_unicode=None, line_break=None,
  191. encoding=None, explicit_start=None, explicit_end=None,
  192. version=None, tags=None, sort_keys=True):
  193. """
  194. Serialize a sequence of Python objects into a YAML stream.
  195. If stream is None, return the produced string instead.
  196. """
  197. getvalue = None
  198. if stream is None:
  199. if encoding is None:
  200. stream = io.StringIO()
  201. else:
  202. stream = io.BytesIO()
  203. getvalue = stream.getvalue
  204. dumper = Dumper(stream, default_style=default_style,
  205. default_flow_style=default_flow_style,
  206. canonical=canonical, indent=indent, width=width,
  207. allow_unicode=allow_unicode, line_break=line_break,
  208. encoding=encoding, version=version, tags=tags,
  209. explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys)
  210. try:
  211. dumper.open()
  212. for data in documents:
  213. dumper.represent(data)
  214. dumper.close()
  215. finally:
  216. dumper.dispose()
  217. if getvalue:
  218. return getvalue()
  219. def dump(data, stream=None, Dumper=Dumper, **kwds):
  220. """
  221. Serialize a Python object into a YAML stream.
  222. If stream is None, return the produced string instead.
  223. """
  224. return dump_all([data], stream, Dumper=Dumper, **kwds)
  225. def safe_dump_all(documents, stream=None, **kwds):
  226. """
  227. Serialize a sequence of Python objects into a YAML stream.
  228. Produce only basic YAML tags.
  229. If stream is None, return the produced string instead.
  230. """
  231. return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
  232. def safe_dump(data, stream=None, **kwds):
  233. """
  234. Serialize a Python object into a YAML stream.
  235. Produce only basic YAML tags.
  236. If stream is None, return the produced string instead.
  237. """
  238. return dump_all([data], stream, Dumper=SafeDumper, **kwds)
  239. def add_implicit_resolver(tag, regexp, first=None,
  240. Loader=None, Dumper=Dumper):
  241. """
  242. Add an implicit scalar detector.
  243. If an implicit scalar value matches the given regexp,
  244. the corresponding tag is assigned to the scalar.
  245. first is a sequence of possible initial characters or None.
  246. """
  247. if Loader is None:
  248. loader.Loader.add_implicit_resolver(tag, regexp, first)
  249. loader.FullLoader.add_implicit_resolver(tag, regexp, first)
  250. loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first)
  251. else:
  252. Loader.add_implicit_resolver(tag, regexp, first)
  253. Dumper.add_implicit_resolver(tag, regexp, first)
  254. def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper):
  255. """
  256. Add a path based resolver for the given tag.
  257. A path is a list of keys that forms a path
  258. to a node in the representation tree.
  259. Keys can be string values, integers, or None.
  260. """
  261. if Loader is None:
  262. loader.Loader.add_path_resolver(tag, path, kind)
  263. loader.FullLoader.add_path_resolver(tag, path, kind)
  264. loader.UnsafeLoader.add_path_resolver(tag, path, kind)
  265. else:
  266. Loader.add_path_resolver(tag, path, kind)
  267. Dumper.add_path_resolver(tag, path, kind)
  268. def add_constructor(tag, constructor, Loader=None):
  269. """
  270. Add a constructor for the given tag.
  271. Constructor is a function that accepts a Loader instance
  272. and a node object and produces the corresponding Python object.
  273. """
  274. if Loader is None:
  275. loader.Loader.add_constructor(tag, constructor)
  276. loader.FullLoader.add_constructor(tag, constructor)
  277. loader.UnsafeLoader.add_constructor(tag, constructor)
  278. else:
  279. Loader.add_constructor(tag, constructor)
  280. def add_multi_constructor(tag_prefix, multi_constructor, Loader=None):
  281. """
  282. Add a multi-constructor for the given tag prefix.
  283. Multi-constructor is called for a node if its tag starts with tag_prefix.
  284. Multi-constructor accepts a Loader instance, a tag suffix,
  285. and a node object and produces the corresponding Python object.
  286. """
  287. if Loader is None:
  288. loader.Loader.add_multi_constructor(tag_prefix, multi_constructor)
  289. loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor)
  290. loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor)
  291. else:
  292. Loader.add_multi_constructor(tag_prefix, multi_constructor)
  293. def add_representer(data_type, representer, Dumper=Dumper):
  294. """
  295. Add a representer for the given type.
  296. Representer is a function accepting a Dumper instance
  297. and an instance of the given data type
  298. and producing the corresponding representation node.
  299. """
  300. Dumper.add_representer(data_type, representer)
  301. def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
  302. """
  303. Add a representer for the given type.
  304. Multi-representer is a function accepting a Dumper instance
  305. and an instance of the given data type or subtype
  306. and producing the corresponding representation node.
  307. """
  308. Dumper.add_multi_representer(data_type, multi_representer)
  309. class YAMLObjectMetaclass(type):
  310. """
  311. The metaclass for YAMLObject.
  312. """
  313. def __init__(cls, name, bases, kwds):
  314. super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
  315. if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
  316. if isinstance(cls.yaml_loader, list):
  317. for loader in cls.yaml_loader:
  318. loader.add_constructor(cls.yaml_tag, cls.from_yaml)
  319. else:
  320. cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
  321. cls.yaml_dumper.add_representer(cls, cls.to_yaml)
  322. class YAMLObject(metaclass=YAMLObjectMetaclass):
  323. """
  324. An object that can dump itself to a YAML stream
  325. and load itself from a YAML stream.
  326. """
  327. __slots__ = () # no direct instantiation, so allow immutable subclasses
  328. yaml_loader = [Loader, FullLoader, UnsafeLoader]
  329. yaml_dumper = Dumper
  330. yaml_tag = None
  331. yaml_flow_style = None
  332. @classmethod
  333. def from_yaml(cls, loader, node):
  334. """
  335. Convert a representation node to a Python object.
  336. """
  337. return loader.construct_yaml_object(node, cls)
  338. @classmethod
  339. def to_yaml(cls, dumper, data):
  340. """
  341. Convert a Python object to a representation node.
  342. """
  343. return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
  344. flow_style=cls.yaml_flow_style)