summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorxi <xi@18f92427-320e-0410-9341-c67f048884a3>2006-08-03 16:07:29 +0000
committerxi <xi@18f92427-320e-0410-9341-c67f048884a3>2006-08-03 16:07:29 +0000
commit728d62da5c3915c4c2d76a00c93cfae031639391 (patch)
tree07db11c9a08e9a4790c04a86fbc7d47db61b12be
parent9459ede7f9520011e44371c98a9d6173bfeb7cf6 (diff)
downloadpyyaml-728d62da5c3915c4c2d76a00c93cfae031639391.tar.gz
Subclass all base classes from `object`.
Hold references to the objects being represented (should fix #22). The value of a mapping node is represented as a list of pairs `(key, value)` now. Sort dictionary items (fix #23). Recursive structures are now loaded and dumped correctly, including complex structures like recursive tuples (fix #5). Thanks Peter Murphy for the patches. To make it possible, representer functions are allowed to be generators. In this case, the first generated value is an object. Other values produced by the representer are ignored. Make Representer not try to guess `!!pairs` when a list is represented. You need to construct a `!!pairs` node explicitly now. Do not check for duplicate mapping keys as it didn't work correctly anyway. git-svn-id: http://svn.pyyaml.org/pyyaml/trunk@222 18f92427-320e-0410-9341-c67f048884a3
-rw-r--r--lib/yaml/composer.py15
-rw-r--r--lib/yaml/constructor.py226
-rw-r--r--lib/yaml/emitter.py4
-rw-r--r--lib/yaml/error.py2
-rw-r--r--lib/yaml/events.py2
-rw-r--r--lib/yaml/nodes.py2
-rw-r--r--lib/yaml/parser.py2
-rw-r--r--lib/yaml/reader.py2
-rw-r--r--lib/yaml/representer.py128
-rw-r--r--lib/yaml/resolver.py2
-rw-r--r--lib/yaml/scanner.py6
-rw-r--r--lib/yaml/serializer.py24
-rw-r--r--lib/yaml/tokens.py2
-rw-r--r--tests/data/duplicate-key.former-loader-error.code1
-rw-r--r--tests/data/duplicate-key.former-loader-error.data (renamed from tests/data/duplicate-key.loader-error)0
-rw-r--r--tests/data/duplicate-mapping-key.former-loader-error.code1
-rw-r--r--tests/data/duplicate-mapping-key.former-loader-error.data (renamed from tests/data/duplicate-mapping-key.loader-error)0
-rw-r--r--tests/data/duplicate-merge-key.former-loader-error.code1
-rw-r--r--tests/data/duplicate-merge-key.former-loader-error.data (renamed from tests/data/duplicate-merge-key.loader-error)0
-rw-r--r--tests/data/duplicate-value-key.former-loader-error.code1
-rw-r--r--tests/data/duplicate-value-key.former-loader-error.data (renamed from tests/data/duplicate-value-key.loader-error)0
-rw-r--r--tests/data/recurive-list.recursive2
-rw-r--r--tests/data/recursive-anchor.former-loader-error (renamed from tests/data/recursive-anchor.loader-error)0
-rw-r--r--tests/data/recursive-dict.recursive3
-rw-r--r--tests/data/recursive-set.recursive3
-rw-r--r--tests/data/recursive-state.recursive2
-rw-r--r--tests/data/recursive-tuple.recursive3
-rw-r--r--tests/data/recursive.former-dumper-error (renamed from tests/data/recursive.dumper-error)0
-rw-r--r--tests/test_constructor.py4
-rw-r--r--tests/test_recursive.py60
-rw-r--r--tests/test_resolver.py3
-rw-r--r--tests/test_structure.py2
32 files changed, 295 insertions, 208 deletions
diff --git a/lib/yaml/composer.py b/lib/yaml/composer.py
index ed27dfb..1e380f4 100644
--- a/lib/yaml/composer.py
+++ b/lib/yaml/composer.py
@@ -8,7 +8,7 @@ from nodes import *
class ComposerError(MarkedYAMLError):
pass
-class Composer:
+class Composer(object):
def __init__(self):
self.anchors = {}
@@ -99,19 +99,20 @@ class Composer:
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(MappingNode, None, start_event.implicit)
- node = MappingNode(tag, {},
+ node = MappingNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
while not self.check_event(MappingEndEvent):
- key_event = self.peek_event()
+ #key_event = self.peek_event()
item_key = self.compose_node(node, None)
- if item_key in node.value:
- raise ComposerError("while composing a mapping", start_event.start_mark,
- "found duplicate key", key_event.start_mark)
+ #if item_key in node.value:
+ # raise ComposerError("while composing a mapping", start_event.start_mark,
+ # "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
- node.value[item_key] = item_value
+ #node.value[item_key] = item_value
+ node.value.append((item_key, item_value))
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
diff --git a/lib/yaml/constructor.py b/lib/yaml/constructor.py
index 7e47463..cbbcd78 100644
--- a/lib/yaml/constructor.py
+++ b/lib/yaml/constructor.py
@@ -21,7 +21,7 @@ import binascii, re, sys
class ConstructorError(MarkedYAMLError):
pass
-class BaseConstructor:
+class BaseConstructor(object):
yaml_constructors = {}
yaml_multi_constructors = {}
@@ -29,6 +29,8 @@ class BaseConstructor:
def __init__(self):
self.constructed_objects = {}
self.recursive_objects = {}
+ self.state_generators = []
+ self.deep_construct = False
def check_data(self):
# If there are more documents available?
@@ -39,130 +41,115 @@ class BaseConstructor:
if self.check_node():
return self.construct_document(self.get_node())
+ def g(): yield None
+ generator_type = type(g())
+ del g
+
def construct_document(self, node):
data = self.construct_object(node)
+ while self.state_generators:
+ state_generators = self.state_generators
+ self.state_generators = []
+ for generator in state_generators:
+ for dummy in generator:
+ pass
self.constructed_objects = {}
self.recursive_objects = {}
+ self.deep_construct = False
return data
- def construct_object(self, node):
+ def construct_object(self, node, deep=False):
+ if deep:
+ old_deep = self.deep_construct
+ self.deep_construct = True
if node in self.constructed_objects:
return self.constructed_objects[node]
if node in self.recursive_objects:
raise ConstructorError(None, None,
- "found recursive node", node.start_mark)
+ "found unconstructable recursive node", node.start_mark)
self.recursive_objects[node] = None
constructor = None
+ state_constructor = None
+ tag_suffix = None
if node.tag in self.yaml_constructors:
- constructor = lambda node: self.yaml_constructors[node.tag](self, node)
+ constructor = self.yaml_constructors[node.tag]
else:
for tag_prefix in self.yaml_multi_constructors:
if node.tag.startswith(tag_prefix):
tag_suffix = node.tag[len(tag_prefix):]
- constructor = lambda node: \
- self.yaml_multi_constructors[tag_prefix](self, tag_suffix, node)
+ constructor = self.yaml_multi_constructors[tag_prefix]
break
else:
if None in self.yaml_multi_constructors:
- constructor = lambda node: \
- self.yaml_multi_constructors[None](self, node.tag, node)
+ tag_suffix = node.tag
+ constructor = self.yaml_multi_constructors[None]
elif None in self.yaml_constructors:
- constructor = lambda node: \
- self.yaml_constructors[None](self, node)
+ constructor = self.yaml_constructors[None]
elif isinstance(node, ScalarNode):
- constructor = self.construct_scalar
+ constructor = self.__class__.construct_scalar
elif isinstance(node, SequenceNode):
- constructor = self.construct_sequence
+ constructor = self.__class__.construct_sequence
elif isinstance(node, MappingNode):
- constructor = self.construct_mapping
- else:
- print node.tag
- data = constructor(node)
+ constructor = self.__class__.construct_mapping
+ if tag_suffix is None:
+ data = constructor(self, node)
+ else:
+ data = constructor(self, tag_suffix, node)
+ if isinstance(data, self.generator_type):
+ generator = data
+ data = generator.next()
+ if self.deep_construct:
+ for dummy in generator:
+ pass
+ else:
+ self.state_generators.append(generator)
self.constructed_objects[node] = data
del self.recursive_objects[node]
+ if deep:
+ self.deep_construct = old_deep
return data
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
- if isinstance(node, MappingNode):
- for key_node in node.value:
- if key_node.tag == u'tag:yaml.org,2002:value':
- return self.construct_scalar(node.value[key_node])
raise ConstructorError(None, None,
"expected a scalar node, but found %s" % node.id,
node.start_mark)
return node.value
- def construct_sequence(self, node):
+ def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
raise ConstructorError(None, None,
"expected a sequence node, but found %s" % node.id,
node.start_mark)
- return [self.construct_object(child) for child in node.value]
+ return [self.construct_object(child, deep=deep)
+ for child in node.value]
- def construct_mapping(self, node):
+ def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
mapping = {}
- merge = None
- for key_node in node.value:
- if key_node.tag == u'tag:yaml.org,2002:merge':
- if merge is not None:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "found duplicate merge key", key_node.start_mark)
- value_node = node.value[key_node]
- if isinstance(value_node, MappingNode):
- merge = [self.construct_mapping(value_node)]
- elif isinstance(value_node, SequenceNode):
- merge = []
- for subnode in value_node.value:
- if not isinstance(subnode, MappingNode):
- raise ConstructorError("while constructing a mapping",
- node.start_mark,
- "expected a mapping for merging, but found %s"
- % subnode.id, subnode.start_mark)
- merge.append(self.construct_mapping(subnode))
- merge.reverse()
- else:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "expected a mapping or list of mappings for merging, but found %s"
- % value_node.id, value_node.start_mark)
- elif key_node.tag == u'tag:yaml.org,2002:value':
- if '=' in mapping:
- raise ConstructorError("while construction a mapping", node.start_mark,
- "found duplicate value key", key_node.start_mark)
- value = self.construct_object(node.value[key_node])
- mapping['='] = value
- else:
- key = self.construct_object(key_node)
- try:
- duplicate_key = key in mapping
- except TypeError, exc:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "found unacceptable key (%s)" % exc, key_node.start_mark)
- if duplicate_key:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "found duplicate key", key_node.start_mark)
- value = self.construct_object(node.value[key_node])
- mapping[key] = value
- if merge is not None:
- merge.append(mapping)
- mapping = {}
- for submapping in merge:
- mapping.update(submapping)
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ try:
+ hash(key)
+ except TypeError, exc:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "found unacceptable key (%s)" % exc, key_node.start_mark)
+ value = self.construct_object(value_node, deep=deep)
+ mapping[key] = value
return mapping
- def construct_pairs(self, node):
+ def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
pairs = []
- for key_node in node.value:
- key = self.construct_object(key_node)
- value = self.construct_object(node.value[key_node])
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ value = self.construct_object(value_node, deep=deep)
pairs.append((key, value))
return pairs
@@ -180,6 +167,53 @@ class BaseConstructor:
class SafeConstructor(BaseConstructor):
+ def construct_scalar(self, node):
+ if isinstance(node, MappingNode):
+ for key_node, value_node in node.value:
+ if key_node.tag == u'tag:yaml.org,2002:value':
+ return self.construct_scalar(value_node)
+ return BaseConstructor.construct_scalar(self, node)
+
+ def flatten_mapping(self, node):
+ merge = []
+ index = 0
+ while index < len(node.value):
+ key_node, value_node = node.value[index]
+ if key_node.tag == u'tag:yaml.org,2002:merge':
+ del node.value[index]
+ if isinstance(value_node, MappingNode):
+ self.flatten_mapping(value_node)
+ merge.extend(value_node.value)
+ elif isinstance(value_node, SequenceNode):
+ submerge = []
+ for subnode in value_node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing a mapping",
+ node.start_mark,
+ "expected a mapping for merging, but found %s"
+ % subnode.id, subnode.start_mark)
+ self.flatten_mapping(subnode)
+ submerge.append(subnode.value)
+ submerge.reverse()
+ for value in submerge:
+ merge.extend(value)
+ else:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "expected a mapping or list of mappings for merging, but found %s"
+ % value_node.id, value_node.start_mark)
+ elif key_node.tag == u'tag:yaml.org,2002:value':
+ key_node.tag = u'tag:yaml.org,2002:str'
+ index += 1
+ else:
+ index += 1
+ if merge:
+ node.value = merge + node.value
+
+ def construct_mapping(self, node, deep=False):
+ if isinstance(node, MappingNode):
+ self.flatten_mapping(node)
+ return BaseConstructor.construct_mapping(self, node, deep=deep)
+
def construct_yaml_null(self, node):
self.construct_scalar(node)
return None
@@ -296,10 +330,11 @@ class SafeConstructor(BaseConstructor):
def construct_yaml_omap(self, node):
# Note: we do not check for duplicate keys, because it's too
# CPU-expensive.
+ omap = []
+ yield omap
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
- omap = []
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
@@ -309,18 +344,18 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
- key_node = subnode.value.keys()[0]
+ key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
- value = self.construct_object(subnode.value[key_node])
+ value = self.construct_object(value_node)
omap.append((key, value))
- return omap
def construct_yaml_pairs(self, node):
# Note: the same code as `construct_yaml_omap`.
+ pairs = []
+ yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
- pairs = []
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing pairs", node.start_mark,
@@ -330,15 +365,16 @@ class SafeConstructor(BaseConstructor):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
- key_node = subnode.value.keys()[0]
+ key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
- value = self.construct_object(subnode.value[key_node])
+ value = self.construct_object(value_node)
pairs.append((key, value))
- return pairs
def construct_yaml_set(self, node):
+ data = set()
+ yield data
value = self.construct_mapping(node)
- return set(value)
+ data.update(value)
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
@@ -348,19 +384,25 @@ class SafeConstructor(BaseConstructor):
return value
def construct_yaml_seq(self, node):
- return self.construct_sequence(node)
+ data = []
+ yield data
+ data.extend(self.construct_sequence(node))
def construct_yaml_map(self, node):
- return self.construct_mapping(node)
+ data = {}
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
def construct_yaml_object(self, node, cls):
- state = self.construct_mapping(node)
data = cls.__new__(cls)
+ yield data
if hasattr(data, '__setstate__'):
+ state = self.construct_mapping(node, deep=True)
data.__setstate__(state)
else:
+ state = self.construct_mapping(node)
data.__dict__.update(state)
- return data
def construct_undefined(self, node):
raise ConstructorError(None, None,
@@ -434,7 +476,7 @@ class Constructor(SafeConstructor):
return complex(self.construct_scalar(node))
def construct_python_tuple(self, node):
- return tuple(self.construct_yaml_seq(node))
+ return tuple(self.construct_sequence(node))
def find_python_module(self, name, mark):
if not name:
@@ -525,9 +567,10 @@ class Constructor(SafeConstructor):
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
- state = self.construct_mapping(node)
+ yield instance
+ deep = hasattr(instance, '__setstate__')
+ state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
- return instance
def construct_python_object_apply(self, suffix, node, newobj=False):
# Format:
@@ -542,13 +585,13 @@ class Constructor(SafeConstructor):
# The difference between !!python/object/apply and !!python/object/new
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
- args = self.construct_sequence(node)
+ args = self.construct_sequence(node, deep=True)
kwds = {}
state = {}
listitems = []
dictitems = {}
else:
- value = self.construct_mapping(node)
+ value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
kwds = value.get('kwds', {})
state = value.get('state', {})
@@ -567,7 +610,6 @@ class Constructor(SafeConstructor):
def construct_python_object_new(self, suffix, node):
return self.construct_python_object_apply(suffix, node, newobj=True)
-
Constructor.add_constructor(
u'tag:yaml.org,2002:python/none',
Constructor.construct_yaml_null)
diff --git a/lib/yaml/emitter.py b/lib/yaml/emitter.py
index 2ac6c3b..d9d1bf8 100644
--- a/lib/yaml/emitter.py
+++ b/lib/yaml/emitter.py
@@ -16,7 +16,7 @@ import re
class EmitterError(YAMLError):
pass
-class ScalarAnalysis:
+class ScalarAnalysis(object):
def __init__(self, scalar, empty, multiline,
allow_flow_plain, allow_block_plain,
allow_single_quoted, allow_double_quoted,
@@ -30,7 +30,7 @@ class ScalarAnalysis:
self.allow_double_quoted = allow_double_quoted
self.allow_block = allow_block
-class Emitter:
+class Emitter(object):
DEFAULT_TAG_PREFIXES = {
u'!' : u'!',
diff --git a/lib/yaml/error.py b/lib/yaml/error.py
index 8fa916b..577686d 100644
--- a/lib/yaml/error.py
+++ b/lib/yaml/error.py
@@ -1,7 +1,7 @@
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
-class Mark:
+class Mark(object):
def __init__(self, name, index, line, column, buffer, pointer):
self.name = name
diff --git a/lib/yaml/events.py b/lib/yaml/events.py
index 3f244fa..f79ad38 100644
--- a/lib/yaml/events.py
+++ b/lib/yaml/events.py
@@ -1,7 +1,7 @@
# Abstract classes.
-class Event:
+class Event(object):
def __init__(self, start_mark=None, end_mark=None):
self.start_mark = start_mark
self.end_mark = end_mark
diff --git a/lib/yaml/nodes.py b/lib/yaml/nodes.py
index cb8c1cb..c4f070c 100644
--- a/lib/yaml/nodes.py
+++ b/lib/yaml/nodes.py
@@ -1,5 +1,5 @@
-class Node:
+class Node(object):
def __init__(self, tag, value, start_mark, end_mark):
self.tag = tag
self.value = value
diff --git a/lib/yaml/parser.py b/lib/yaml/parser.py
index 861c57b..eef0882 100644
--- a/lib/yaml/parser.py
+++ b/lib/yaml/parser.py
@@ -69,7 +69,7 @@ from scanner import *
class ParserError(MarkedYAMLError):
pass
-class Parser:
+class Parser(object):
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
# Note that we use Python generators. If you rewrite the parser in another
diff --git a/lib/yaml/reader.py b/lib/yaml/reader.py
index 9048ea8..1d4667c 100644
--- a/lib/yaml/reader.py
+++ b/lib/yaml/reader.py
@@ -77,7 +77,7 @@ class ReaderError(YAMLError):
% (ord(self.character), self.reason,
self.name, self.position)
-class Reader:
+class Reader(object):
# Reader:
# - determines the data encoding and converts it to unicode,
# - checks if characters are in allowed range,
diff --git a/lib/yaml/representer.py b/lib/yaml/representer.py
index 2516813..44957c4 100644
--- a/lib/yaml/representer.py
+++ b/lib/yaml/representer.py
@@ -21,7 +21,7 @@ import sys, copy_reg
class RepresenterError(YAMLError):
pass
-class BaseRepresenter:
+class BaseRepresenter(object):
yaml_representers = {}
yaml_multi_representers = {}
@@ -30,21 +30,27 @@ class BaseRepresenter:
self.default_style = default_style
self.default_flow_style = default_flow_style
self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
def represent(self, data):
node = self.represent_data(data)
self.serialize(node)
self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
class C: pass
c = C()
def f(): pass
+ def g(): yield None
classobj_type = type(C)
instance_type = type(c)
function_type = type(f)
+ generator_type = type(g())
builtin_function_type = type(abs)
module_type = type(sys)
- del C, c, f
+ del C, c, f, g
def get_classobj_bases(self, cls):
bases = [cls]
@@ -54,16 +60,17 @@ class BaseRepresenter:
def represent_data(self, data):
if self.ignore_aliases(data):
- alias_key = None
+ self.alias_key = None
else:
- alias_key = id(data)
- if alias_key is not None:
- if alias_key in self.represented_objects:
- node = self.represented_objects[alias_key]
- if node is None:
- raise RepresenterError("recursive objects are not allowed: %r" % data)
+ self.alias_key = id(data)
+ if self.alias_key is not None:
+ if self.alias_key in self.represented_objects:
+ node = self.represented_objects[self.alias_key]
+ #if node is None:
+ # raise RepresenterError("recursive objects are not allowed: %r" % data)
return node
- self.represented_objects[alias_key] = None
+ #self.represented_objects[alias_key] = None
+ self.object_keeper.append(data)
data_types = type(data).__mro__
if type(data) is self.instance_type:
data_types = self.get_classobj_bases(data.__class__)+list(data_types)
@@ -81,8 +88,8 @@ class BaseRepresenter:
node = self.yaml_representers[None](self, data)
else:
node = ScalarNode(None, unicode(data))
- if alias_key is not None:
- self.represented_objects[alias_key] = node
+ #if alias_key is not None:
+ # self.represented_objects[alias_key] = node
return node
def add_representer(cls, data_type, representer):
@@ -100,50 +107,52 @@ class BaseRepresenter:
def represent_scalar(self, tag, value, style=None):
if style is None:
style = self.default_style
- return ScalarNode(tag, value, style=style)
+ node = ScalarNode(tag, value, style=style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ return node
def represent_sequence(self, tag, sequence, flow_style=None):
- best_style = True
value = []
+ node = SequenceNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
for item in sequence:
node_item = self.represent_data(item)
if not (isinstance(node_item, ScalarNode) and not node_item.style):
best_style = False
- value.append(self.represent_data(item))
- if flow_style is None:
- flow_style = self.default_flow_style
+ value.append(node_item)
if flow_style is None:
- flow_style = best_style
- return SequenceNode(tag, value, flow_style=flow_style)
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
def represent_mapping(self, tag, mapping, flow_style=None):
+ value = []
+ node = MappingNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
best_style = True
- if hasattr(mapping, 'keys'):
- value = {}
- for item_key in mapping.keys():
- item_value = mapping[item_key]
- node_key = self.represent_data(item_key)
- node_value = self.represent_data(item_value)
- if not (isinstance(node_key, ScalarNode) and not node_key.style):
- best_style = False
- if not (isinstance(node_value, ScalarNode) and not node_value.style):
- best_style = False
- value[node_key] = node_value
- else:
- value = []
- for item_key, item_value in mapping:
- node_key = self.represent_data(item_key)
- node_value = self.represent_data(item_value)
- if not (isinstance(node_key, ScalarNode) and not node_key.style):
- best_style = False
- if not (isinstance(node_value, ScalarNode) and not node_value.style):
- best_style = False
- value.append((node_key, node_value))
- if flow_style is None:
- flow_style = self.default_flow_style
+ if hasattr(mapping, 'items'):
+ mapping = mapping.items()
+ mapping.sort()
+ for item_key, item_value in mapping:
+ node_key = self.represent_data(item_key)
+ node_value = self.represent_data(item_value)
+ if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ best_style = False
+ if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ best_style = False
+ value.append((node_key, node_value))
if flow_style is None:
- flow_style = best_style
- return MappingNode(tag, value, flow_style=flow_style)
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
def ignore_aliases(self, data):
return False
@@ -208,19 +217,19 @@ class SafeRepresenter(BaseRepresenter):
return self.represent_scalar(u'tag:yaml.org,2002:float', value)
def represent_list(self, data):
- pairs = (len(data) > 0 and isinstance(data, list))
- if pairs:
- for item in data:
- if not isinstance(item, tuple) or len(item) != 2:
- pairs = False
- break
- if not pairs:
+ #pairs = (len(data) > 0 and isinstance(data, list))
+ #if pairs:
+ # for item in data:
+ # if not isinstance(item, tuple) or len(item) != 2:
+ # pairs = False
+ # break
+ #if not pairs:
return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
- value = []
- for item_key, item_value in data:
- value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
- [(item_key, item_value)]))
- return SequenceNode(u'tag:yaml.org,2002:pairs', value)
+ #value = []
+ #for item_key, item_value in data:
+ # value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
+ # [(item_key, item_value)]))
+ #return SequenceNode(u'tag:yaml.org,2002:pairs', value)
def represent_dict(self, data):
return self.represent_mapping(u'tag:yaml.org,2002:map', data)
@@ -250,9 +259,6 @@ class SafeRepresenter(BaseRepresenter):
state = data.__getstate__()
else:
state = data.__dict__.copy()
- if isinstance(state, dict):
- state = state.items()
- state.sort()
return self.represent_mapping(tag, state, flow_style=flow_style)
def represent_undefined(self, data):
@@ -384,8 +390,6 @@ class Representer(SafeRepresenter):
else:
state = data.__dict__
if args is None and isinstance(state, dict):
- state = state.items()
- state.sort()
return self.represent_mapping(
u'tag:yaml.org,2002:python/object:'+class_name, state)
if isinstance(state, dict) and not state:
@@ -444,8 +448,6 @@ class Representer(SafeRepresenter):
function_name = u'%s.%s' % (function.__module__, function.__name__)
if not args and not listitems and not dictitems \
and isinstance(state, dict) and newobj:
- state = state.items()
- state.sort()
return self.represent_mapping(
u'tag:yaml.org,2002:python/object:'+function_name, state)
if not listitems and not dictitems \
diff --git a/lib/yaml/resolver.py b/lib/yaml/resolver.py
index 779f27a..3f405ed 100644
--- a/lib/yaml/resolver.py
+++ b/lib/yaml/resolver.py
@@ -9,7 +9,7 @@ import re
class ResolverError(YAMLError):
pass
-class BaseResolver:
+class BaseResolver(object):
DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
diff --git a/lib/yaml/scanner.py b/lib/yaml/scanner.py
index b36d35f..a3ecdd0 100644
--- a/lib/yaml/scanner.py
+++ b/lib/yaml/scanner.py
@@ -19,7 +19,7 @@
# ALIAS(value)
# ANCHOR(value)
# TAG(value)
-# SCALAR(value, plain)
+# SCALAR(value, plain, style)
#
# Read comments in the Scanner code for more details.
#
@@ -32,7 +32,7 @@ from tokens import *
class ScannerError(MarkedYAMLError):
pass
-class SimpleKey:
+class SimpleKey(object):
# See below simple keys treatment.
def __init__(self, token_number, required, index, line, column, mark):
@@ -43,7 +43,7 @@ class SimpleKey:
self.column = column
self.mark = mark
-class Scanner:
+class Scanner(object):
def __init__(self):
"""Initialize the scanner."""
diff --git a/lib/yaml/serializer.py b/lib/yaml/serializer.py
index 937be9a..2101f95 100644
--- a/lib/yaml/serializer.py
+++ b/lib/yaml/serializer.py
@@ -8,7 +8,7 @@ from nodes import *
class SerializerError(YAMLError):
pass
-class Serializer:
+class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
@@ -67,14 +67,9 @@ class Serializer:
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
- if hasattr(node.value, 'keys'):
- for key in node.value.keys():
- self.anchor_node(key)
- self.anchor_node(node.value[key])
- else:
- for key, value in node.value:
- self.anchor_node(key)
- self.anchor_node(value)
+ for key, value in node.value:
+ self.anchor_node(key)
+ self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
@@ -108,14 +103,9 @@ class Serializer:
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
- if hasattr(node.value, 'keys'):
- for key in node.value.keys():
- self.serialize_node(key, node, None)
- self.serialize_node(node.value[key], node, key)
- else:
- for key, value in node.value:
- self.serialize_node(key, node, None)
- self.serialize_node(value, node, key)
+ for key, value in node.value:
+ self.serialize_node(key, node, None)
+ self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()
diff --git a/lib/yaml/tokens.py b/lib/yaml/tokens.py
index 4fe4522..4d0b48a 100644
--- a/lib/yaml/tokens.py
+++ b/lib/yaml/tokens.py
@@ -1,5 +1,5 @@
-class Token:
+class Token(object):
def __init__(self, start_mark, end_mark):
self.start_mark = start_mark
self.end_mark = end_mark
diff --git a/tests/data/duplicate-key.former-loader-error.code b/tests/data/duplicate-key.former-loader-error.code
new file mode 100644
index 0000000..cb73906
--- /dev/null
+++ b/tests/data/duplicate-key.former-loader-error.code
@@ -0,0 +1 @@
+{ 'foo': 'baz' }
diff --git a/tests/data/duplicate-key.loader-error b/tests/data/duplicate-key.former-loader-error.data
index 84deb8f..84deb8f 100644
--- a/tests/data/duplicate-key.loader-error
+++ b/tests/data/duplicate-key.former-loader-error.data
diff --git a/tests/data/duplicate-mapping-key.former-loader-error.code b/tests/data/duplicate-mapping-key.former-loader-error.code
new file mode 100644
index 0000000..17a6285
--- /dev/null
+++ b/tests/data/duplicate-mapping-key.former-loader-error.code
@@ -0,0 +1 @@
+{ 'foo': { 'baz': 'bat', 'foo': 'duplicate key' } }
diff --git a/tests/data/duplicate-mapping-key.loader-error b/tests/data/duplicate-mapping-key.former-loader-error.data
index 7e7b4d1..7e7b4d1 100644
--- a/tests/data/duplicate-mapping-key.loader-error
+++ b/tests/data/duplicate-mapping-key.former-loader-error.data
diff --git a/tests/data/duplicate-merge-key.former-loader-error.code b/tests/data/duplicate-merge-key.former-loader-error.code
new file mode 100644
index 0000000..6a757f3
--- /dev/null
+++ b/tests/data/duplicate-merge-key.former-loader-error.code
@@ -0,0 +1 @@
+{ 'x': 1, 'y': 2, 'foo': 'bar', 'z': 3, 't': 4 }
diff --git a/tests/data/duplicate-merge-key.loader-error b/tests/data/duplicate-merge-key.former-loader-error.data
index cebc3a1..cebc3a1 100644
--- a/tests/data/duplicate-merge-key.loader-error
+++ b/tests/data/duplicate-merge-key.former-loader-error.data
diff --git a/tests/data/duplicate-value-key.former-loader-error.code b/tests/data/duplicate-value-key.former-loader-error.code
new file mode 100644
index 0000000..12f48c1
--- /dev/null
+++ b/tests/data/duplicate-value-key.former-loader-error.code
@@ -0,0 +1 @@
+{ 'foo': 'bar', '=': 2 }
diff --git a/tests/data/duplicate-value-key.loader-error b/tests/data/duplicate-value-key.former-loader-error.data
index b34a1d6..b34a1d6 100644
--- a/tests/data/duplicate-value-key.loader-error
+++ b/tests/data/duplicate-value-key.former-loader-error.data
diff --git a/tests/data/recurive-list.recursive b/tests/data/recurive-list.recursive
new file mode 100644
index 0000000..27a4ae5
--- /dev/null
+++ b/tests/data/recurive-list.recursive
@@ -0,0 +1,2 @@
+value = []
+value.append(value)
diff --git a/tests/data/recursive-anchor.loader-error b/tests/data/recursive-anchor.former-loader-error
index 661166c..661166c 100644
--- a/tests/data/recursive-anchor.loader-error
+++ b/tests/data/recursive-anchor.former-loader-error
diff --git a/tests/data/recursive-dict.recursive b/tests/data/recursive-dict.recursive
new file mode 100644
index 0000000..8f326f5
--- /dev/null
+++ b/tests/data/recursive-dict.recursive
@@ -0,0 +1,3 @@
+value = {}
+instance = AnInstance(value, value)
+value[instance] = instance
diff --git a/tests/data/recursive-set.recursive b/tests/data/recursive-set.recursive
new file mode 100644
index 0000000..d1a7ca3
--- /dev/null
+++ b/tests/data/recursive-set.recursive
@@ -0,0 +1,3 @@
+value = set()
+value.add(AnInstance(foo=value, bar=value))
+value.add(AnInstance(foo=value, bar=value))
diff --git a/tests/data/recursive-state.recursive b/tests/data/recursive-state.recursive
new file mode 100644
index 0000000..bffe61e
--- /dev/null
+++ b/tests/data/recursive-state.recursive
@@ -0,0 +1,2 @@
+value = []
+value.append(AnInstanceWithState(value, value))
diff --git a/tests/data/recursive-tuple.recursive b/tests/data/recursive-tuple.recursive
new file mode 100644
index 0000000..dc08d02
--- /dev/null
+++ b/tests/data/recursive-tuple.recursive
@@ -0,0 +1,3 @@
+value = ([], [])
+value[0].append(value)
+value[1].append(value[0])
diff --git a/tests/data/recursive.dumper-error b/tests/data/recursive.former-dumper-error
index 3c7cc2f..3c7cc2f 100644
--- a/tests/data/recursive.dumper-error
+++ b/tests/data/recursive.former-dumper-error
diff --git a/tests/test_constructor.py b/tests/test_constructor.py
index cd6695f..200b112 100644
--- a/tests/test_constructor.py
+++ b/tests/test_constructor.py
@@ -240,6 +240,10 @@ class MyDict(dict):
def __eq__(self, other):
return type(self) is type(other) and dict(self) == dict(other)
+def execute(code):
+ exec code
+ return value
+
class TestConstructorTypes(test_appliance.TestAppliance):
def _testTypes(self, test_name, data_filename, code_filename):
diff --git a/tests/test_recursive.py b/tests/test_recursive.py
index acbd01b..3c09264 100644
--- a/tests/test_recursive.py
+++ b/tests/test_recursive.py
@@ -1,22 +1,52 @@
-import unittest
+import test_appliance
+
from yaml import *
-RECURSIVE = """
---- &A
-- *A: *A
-"""
+class AnInstance:
+
+ def __init__(self, foo, bar):
+ self.foo = foo
+ self.bar = bar
+
+ def __repr__(self):
+ try:
+ return "%s(foo=%r, bar=%r)" % (self.__class__.__name__,
+ self.foo, self.bar)
+ except RuntimeError:
+ return "%s(foo=..., bar=...)" % self.__class__.__name__
+
+class AnInstanceWithState(AnInstance):
+
+ def __getstate__(self):
+ return {'attributes': [self.foo, self.bar]}
+
+ def __setstate__(self, state):
+ self.foo, self.bar = state['attributes']
-class TestRecursive(unittest.TestCase):
+class TestRecursive(test_appliance.TestAppliance):
- def testRecursive(self):
- node = compose(RECURSIVE)
- self._check(node)
- document = serialize(node)
- node = compose(document)
- self._check(node)
+ def _testRecursive(self, test_name, recursive_filename):
+ exec file(recursive_filename, 'r').read()
+ value1 = value
+ output1 = None
+ value2 = None
+ output2 = None
+ try:
+ output1 = dump(value1)
+ #print "OUTPUT %s:" % test_name
+ #print output1
+ value2 = load(output1)
+ output2 = dump(value2)
+ self.failUnlessEqual(output1, output2)
+ except:
+ print "VALUE1:", value1
+ print "VALUE2:", value2
+ print "OUTPUT1:"
+ print output1
+ print "OUTPUT2:"
+ print output2
+ raise
- def _check(self, node):
- self.failUnless(node in node.value[0].value)
- self.failUnless(node.value[0].value[node] is node)
+TestRecursive.add_tests('testRecursive', '.recursive')
diff --git a/tests/test_resolver.py b/tests/test_resolver.py
index 4e14010..a1845d8 100644
--- a/tests/test_resolver.py
+++ b/tests/test_resolver.py
@@ -72,8 +72,7 @@ class TestResolver(test_appliance.TestAppliance):
return node.tag, value
elif isinstance(node, MappingNode):
value = []
- for key in node.value:
- item = node.value[key]
+ for key, item in node.value:
value.append((self._convert(key), self._convert(item)))
value.sort()
return node.tag, value
diff --git a/tests/test_structure.py b/tests/test_structure.py
index 0573512..0aef982 100644
--- a/tests/test_structure.py
+++ b/tests/test_structure.py
@@ -153,6 +153,7 @@ class MyLoader(Loader):
def construct_undefined(self, node):
return self.construct_scalar(node)
+MyLoader.add_constructor(u'tag:yaml.org,2002:map', MyLoader.construct_mapping)
MyLoader.add_constructor(None, MyLoader.construct_undefined)
class MyCanonicalLoader(test_appliance.CanonicalLoader):
@@ -168,6 +169,7 @@ class MyCanonicalLoader(test_appliance.CanonicalLoader):
def construct_undefined(self, node):
return self.construct_scalar(node)
+MyCanonicalLoader.add_constructor(u'tag:yaml.org,2002:map', MyCanonicalLoader.construct_mapping)
MyCanonicalLoader.add_constructor(None, MyCanonicalLoader.construct_undefined)
class TestConstructor(test_appliance.TestAppliance):