__init__.py 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315
  1. from error import *
  2. from tokens import *
  3. from events import *
  4. from nodes import *
  5. from loader import *
  6. from dumper import *
  7. __version__ = '3.12'
  8. try:
  9. from cyaml import *
  10. __with_libyaml__ = True
  11. except ImportError:
  12. __with_libyaml__ = False
  13. def scan(stream, Loader=Loader):
  14. """
  15. Scan a YAML stream and produce scanning tokens.
  16. """
  17. loader = Loader(stream)
  18. try:
  19. while loader.check_token():
  20. yield loader.get_token()
  21. finally:
  22. loader.dispose()
  23. def parse(stream, Loader=Loader):
  24. """
  25. Parse a YAML stream and produce parsing events.
  26. """
  27. loader = Loader(stream)
  28. try:
  29. while loader.check_event():
  30. yield loader.get_event()
  31. finally:
  32. loader.dispose()
  33. def compose(stream, Loader=Loader):
  34. """
  35. Parse the first YAML document in a stream
  36. and produce the corresponding representation tree.
  37. """
  38. loader = Loader(stream)
  39. try:
  40. return loader.get_single_node()
  41. finally:
  42. loader.dispose()
  43. def compose_all(stream, Loader=Loader):
  44. """
  45. Parse all YAML documents in a stream
  46. and produce corresponding representation trees.
  47. """
  48. loader = Loader(stream)
  49. try:
  50. while loader.check_node():
  51. yield loader.get_node()
  52. finally:
  53. loader.dispose()
  54. def load(stream, Loader=Loader):
  55. """
  56. Parse the first YAML document in a stream
  57. and produce the corresponding Python object.
  58. """
  59. loader = Loader(stream)
  60. try:
  61. return loader.get_single_data()
  62. finally:
  63. loader.dispose()
  64. def load_all(stream, Loader=Loader):
  65. """
  66. Parse all YAML documents in a stream
  67. and produce corresponding Python objects.
  68. """
  69. loader = Loader(stream)
  70. try:
  71. while loader.check_data():
  72. yield loader.get_data()
  73. finally:
  74. loader.dispose()
  75. def safe_load(stream):
  76. """
  77. Parse the first YAML document in a stream
  78. and produce the corresponding Python object.
  79. Resolve only basic YAML tags.
  80. """
  81. return load(stream, SafeLoader)
  82. def safe_load_all(stream):
  83. """
  84. Parse all YAML documents in a stream
  85. and produce corresponding Python objects.
  86. Resolve only basic YAML tags.
  87. """
  88. return load_all(stream, SafeLoader)
  89. def emit(events, stream=None, Dumper=Dumper,
  90. canonical=None, indent=None, width=None,
  91. allow_unicode=None, line_break=None):
  92. """
  93. Emit YAML parsing events into a stream.
  94. If stream is None, return the produced string instead.
  95. """
  96. getvalue = None
  97. if stream is None:
  98. from StringIO import StringIO
  99. stream = StringIO()
  100. getvalue = stream.getvalue
  101. dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
  102. allow_unicode=allow_unicode, line_break=line_break)
  103. try:
  104. for event in events:
  105. dumper.emit(event)
  106. finally:
  107. dumper.dispose()
  108. if getvalue:
  109. return getvalue()
  110. def serialize_all(nodes, stream=None, Dumper=Dumper,
  111. canonical=None, indent=None, width=None,
  112. allow_unicode=None, line_break=None,
  113. encoding='utf-8', explicit_start=None, explicit_end=None,
  114. version=None, tags=None):
  115. """
  116. Serialize a sequence of representation trees into a YAML stream.
  117. If stream is None, return the produced string instead.
  118. """
  119. getvalue = None
  120. if stream is None:
  121. if encoding is None:
  122. from StringIO import StringIO
  123. else:
  124. from cStringIO import StringIO
  125. stream = StringIO()
  126. getvalue = stream.getvalue
  127. dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
  128. allow_unicode=allow_unicode, line_break=line_break,
  129. encoding=encoding, version=version, tags=tags,
  130. explicit_start=explicit_start, explicit_end=explicit_end)
  131. try:
  132. dumper.open()
  133. for node in nodes:
  134. dumper.serialize(node)
  135. dumper.close()
  136. finally:
  137. dumper.dispose()
  138. if getvalue:
  139. return getvalue()
  140. def serialize(node, stream=None, Dumper=Dumper, **kwds):
  141. """
  142. Serialize a representation tree into a YAML stream.
  143. If stream is None, return the produced string instead.
  144. """
  145. return serialize_all([node], stream, Dumper=Dumper, **kwds)
  146. def dump_all(documents, stream=None, Dumper=Dumper,
  147. default_style=None, default_flow_style=None,
  148. canonical=None, indent=None, width=None,
  149. allow_unicode=None, line_break=None,
  150. encoding='utf-8', explicit_start=None, explicit_end=None,
  151. version=None, tags=None):
  152. """
  153. Serialize a sequence of Python objects into a YAML stream.
  154. If stream is None, return the produced string instead.
  155. """
  156. getvalue = None
  157. if stream is None:
  158. if encoding is None:
  159. from StringIO import StringIO
  160. else:
  161. from cStringIO import StringIO
  162. stream = StringIO()
  163. getvalue = stream.getvalue
  164. dumper = Dumper(stream, default_style=default_style,
  165. default_flow_style=default_flow_style,
  166. canonical=canonical, indent=indent, width=width,
  167. allow_unicode=allow_unicode, line_break=line_break,
  168. encoding=encoding, version=version, tags=tags,
  169. explicit_start=explicit_start, explicit_end=explicit_end)
  170. try:
  171. dumper.open()
  172. for data in documents:
  173. dumper.represent(data)
  174. dumper.close()
  175. finally:
  176. dumper.dispose()
  177. if getvalue:
  178. return getvalue()
  179. def dump(data, stream=None, Dumper=Dumper, **kwds):
  180. """
  181. Serialize a Python object into a YAML stream.
  182. If stream is None, return the produced string instead.
  183. """
  184. return dump_all([data], stream, Dumper=Dumper, **kwds)
  185. def safe_dump_all(documents, stream=None, **kwds):
  186. """
  187. Serialize a sequence of Python objects into a YAML stream.
  188. Produce only basic YAML tags.
  189. If stream is None, return the produced string instead.
  190. """
  191. return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
  192. def safe_dump(data, stream=None, **kwds):
  193. """
  194. Serialize a Python object into a YAML stream.
  195. Produce only basic YAML tags.
  196. If stream is None, return the produced string instead.
  197. """
  198. return dump_all([data], stream, Dumper=SafeDumper, **kwds)
  199. def add_implicit_resolver(tag, regexp, first=None,
  200. Loader=Loader, Dumper=Dumper):
  201. """
  202. Add an implicit scalar detector.
  203. If an implicit scalar value matches the given regexp,
  204. the corresponding tag is assigned to the scalar.
  205. first is a sequence of possible initial characters or None.
  206. """
  207. Loader.add_implicit_resolver(tag, regexp, first)
  208. Dumper.add_implicit_resolver(tag, regexp, first)
  209. def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
  210. """
  211. Add a path based resolver for the given tag.
  212. A path is a list of keys that forms a path
  213. to a node in the representation tree.
  214. Keys can be string values, integers, or None.
  215. """
  216. Loader.add_path_resolver(tag, path, kind)
  217. Dumper.add_path_resolver(tag, path, kind)
  218. def add_constructor(tag, constructor, Loader=Loader):
  219. """
  220. Add a constructor for the given tag.
  221. Constructor is a function that accepts a Loader instance
  222. and a node object and produces the corresponding Python object.
  223. """
  224. Loader.add_constructor(tag, constructor)
  225. def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
  226. """
  227. Add a multi-constructor for the given tag prefix.
  228. Multi-constructor is called for a node if its tag starts with tag_prefix.
  229. Multi-constructor accepts a Loader instance, a tag suffix,
  230. and a node object and produces the corresponding Python object.
  231. """
  232. Loader.add_multi_constructor(tag_prefix, multi_constructor)
  233. def add_representer(data_type, representer, Dumper=Dumper):
  234. """
  235. Add a representer for the given type.
  236. Representer is a function accepting a Dumper instance
  237. and an instance of the given data type
  238. and producing the corresponding representation node.
  239. """
  240. Dumper.add_representer(data_type, representer)
  241. def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
  242. """
  243. Add a representer for the given type.
  244. Multi-representer is a function accepting a Dumper instance
  245. and an instance of the given data type or subtype
  246. and producing the corresponding representation node.
  247. """
  248. Dumper.add_multi_representer(data_type, multi_representer)
  249. class YAMLObjectMetaclass(type):
  250. """
  251. The metaclass for YAMLObject.
  252. """
  253. def __init__(cls, name, bases, kwds):
  254. super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
  255. if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
  256. cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
  257. cls.yaml_dumper.add_representer(cls, cls.to_yaml)
  258. class YAMLObject(object):
  259. """
  260. An object that can dump itself to a YAML stream
  261. and load itself from a YAML stream.
  262. """
  263. __metaclass__ = YAMLObjectMetaclass
  264. __slots__ = () # no direct instantiation, so allow immutable subclasses
  265. yaml_loader = Loader
  266. yaml_dumper = Dumper
  267. yaml_tag = None
  268. yaml_flow_style = None
  269. def from_yaml(cls, loader, node):
  270. """
  271. Convert a representation node to a Python object.
  272. """
  273. return loader.construct_yaml_object(node, cls)
  274. from_yaml = classmethod(from_yaml)
  275. def to_yaml(cls, dumper, data):
  276. """
  277. Convert a Python object to a representation node.
  278. """
  279. return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
  280. flow_style=cls.yaml_flow_style)
  281. to_yaml = classmethod(to_yaml)