summaryrefslogtreecommitdiff
path: root/tablib
diff options
context:
space:
mode:
authortusharmakkar08 <tusharmakkar08@gmail.com>2016-02-22 16:36:26 +0530
committertusharmakkar08 <tusharmakkar08@gmail.com>2016-02-22 16:36:26 +0530
commitd515724817111d9dc60db15e1b54e149f807fba1 (patch)
treede507ff397ddd44458e1cde114d839eaf0e208ab /tablib
parent2814fbc38100e00b2e78d47cb05defa8e9854126 (diff)
downloadtablib-d515724817111d9dc60db15e1b54e149f807fba1.tar.gz
PEP-8 standards followed
Diffstat (limited to 'tablib')
-rw-r--r--tablib/packages/dbfpy/dbf.py47
-rw-r--r--tablib/packages/dbfpy/dbfnew.py31
-rw-r--r--tablib/packages/dbfpy3/dbf.py43
-rw-r--r--tablib/packages/dbfpy3/dbfnew.py29
-rw-r--r--tablib/packages/yaml/composer.py39
-rw-r--r--tablib/packages/yaml/constructor.py195
-rw-r--r--tablib/packages/yaml3/composer.py40
-rw-r--r--tablib/packages/yaml3/constructor.py8
8 files changed, 228 insertions, 204 deletions
diff --git a/tablib/packages/dbfpy/dbf.py b/tablib/packages/dbfpy/dbf.py
index b3d2e21..add74da 100644
--- a/tablib/packages/dbfpy/dbf.py
+++ b/tablib/packages/dbfpy/dbf.py
@@ -63,9 +63,10 @@ __author__ = "Jeff Kunce <kuncej@mail.conservation.state.mo.us>"
__all__ = ["Dbf"]
from . import header
-from .import record
+from . import record
from utils import INVALID_VALUE
+
class Dbf(object):
"""DBF accessor.
@@ -82,13 +83,13 @@ class Dbf(object):
"""
__slots__ = ("name", "header", "stream",
- "_changed", "_new", "_ignore_errors")
+ "_changed", "_new", "_ignore_errors")
HeaderClass = header.DbfHeader
RecordClass = record.DbfRecord
INVALID_VALUE = INVALID_VALUE
- ## initialization and creation helpers
+ # initialization and creation helpers
def __init__(self, f, readOnly=False, new=False, ignoreErrors=False):
"""Initialize instance.
@@ -137,7 +138,7 @@ class Dbf(object):
self._new = bool(new)
self._changed = False
- ## properties
+ # properties
closed = property(lambda self: self.stream.closed)
recordCount = property(lambda self: self.header.recordCount)
@@ -149,6 +150,7 @@ class Dbf(object):
def ignoreErrors(self, value):
"""Update `ignoreErrors` flag on the header object and self"""
self.header.ignoreErrors = self._ignore_errors = bool(value)
+
ignoreErrors = property(
lambda self: self._ignore_errors,
ignoreErrors,
@@ -159,7 +161,7 @@ class Dbf(object):
""")
- ## protected methods
+ # protected methods
def _fixIndex(self, index):
"""Return fixed index.
@@ -185,7 +187,7 @@ class Dbf(object):
raise IndexError("Record index out of range")
return index
- ## iterface methods
+ # iterface methods
def close(self):
self.flush()
@@ -226,9 +228,9 @@ class Dbf(object):
self.header.addField(*defs)
else:
raise TypeError("At least one record was added, "
- "structure can't be changed")
+ "structure can't be changed")
- ## 'magic' methods (representation and sequence interface)
+ # 'magic' methods (representation and sequence interface)
def __repr__(self):
return "Dbf stream '%s'\n" % self.stream + repr(self.header)
@@ -248,19 +250,20 @@ class Dbf(object):
self._changed = True
self._new = False
- #def __del__(self):
- # """Flush stream upon deletion of the object."""
- # self.flush()
+ # def __del__(self):
+ # """Flush stream upon deletion of the object."""
+ # self.flush()
-def demoRead(filename):
+def demo_read(filename):
_dbf = Dbf(filename, True)
for _rec in _dbf:
print
print(repr(_rec))
_dbf.close()
-def demoCreate(filename):
+
+def demo_create(filename):
_dbf = Dbf(filename, new=True)
_dbf.addField(
("NAME", "C", 15),
@@ -269,10 +272,10 @@ def demoCreate(filename):
("BIRTHDATE", "D"),
)
for (_n, _s, _i, _b) in (
- ("John", "Miller", "YC", (1981, 1, 2)),
- ("Andy", "Larkin", "AL", (1982, 3, 4)),
- ("Bill", "Clinth", "", (1983, 5, 6)),
- ("Bobb", "McNail", "", (1984, 7, 8)),
+ ("John", "Miller", "YC", (1981, 1, 2)),
+ ("Andy", "Larkin", "AL", (1982, 3, 4)),
+ ("Bill", "Clinth", "", (1983, 5, 6)),
+ ("Bobb", "McNail", "", (1984, 7, 8)),
):
_rec = _dbf.newRecord()
_rec["NAME"] = _n
@@ -283,10 +286,12 @@ def demoCreate(filename):
print(repr(_dbf))
_dbf.close()
-if (__name__=='__main__'):
+
+if __name__ == '__main__':
import sys
+
_name = len(sys.argv) > 1 and sys.argv[1] or "county.dbf"
- demoCreate(_name)
- demoRead(_name)
+ demo_create(_name)
+ demo_read(_name)
-# vim: set et sw=4 sts=4 :
+ # vim: set et sw=4 sts=4 :
diff --git a/tablib/packages/dbfpy/dbfnew.py b/tablib/packages/dbfpy/dbfnew.py
index dea7e52..7a06d00 100644
--- a/tablib/packages/dbfpy/dbfnew.py
+++ b/tablib/packages/dbfpy/dbfnew.py
@@ -29,6 +29,7 @@ from fields import *
from header import *
from record import *
+
class _FieldDefinition(object):
"""Field definition.
@@ -151,28 +152,28 @@ class dbf_new(object):
_dbfh.write(stream)
-if (__name__=='__main__'):
+if __name__ == '__main__':
# create a new DBF-File
- dbfn=dbf_new()
- dbfn.add_field("name",'C',80)
- dbfn.add_field("price",'N',10,2)
- dbfn.add_field("date",'D',8)
+ dbfn = dbf_new()
+ dbfn.add_field("name", 'C', 80)
+ dbfn.add_field("price", 'N', 10, 2)
+ dbfn.add_field("date", 'D', 8)
dbfn.write("tst.dbf")
# test new dbf
print "*** created tst.dbf: ***"
dbft = Dbf('tst.dbf', readOnly=0)
print repr(dbft)
# add a record
- rec=DbfRecord(dbft)
- rec['name']='something'
- rec['price']=10.5
- rec['date']=(2000,1,12)
+ rec = DbfRecord(dbft)
+ rec['name'] = 'something'
+ rec['price'] = 10.5
+ rec['date'] = (2000, 1, 12)
rec.store()
# add another record
- rec=DbfRecord(dbft)
- rec['name']='foo and bar'
- rec['price']=12234
- rec['date']=(1992,7,15)
+ rec = DbfRecord(dbft)
+ rec['name'] = 'foo and bar'
+ rec['price'] = 12234
+ rec['date'] = (1992, 7, 15)
rec.store()
# show the records
@@ -181,8 +182,8 @@ if (__name__=='__main__'):
for i1 in range(len(dbft)):
rec = dbft[i1]
for fldName in dbft.fieldNames:
- print '%s:\t %s'%(fldName, rec[fldName])
+ print '%s:\t %s' % (fldName, rec[fldName])
print
dbft.close()
-# vim: set et sts=4 sw=4 :
+ # vim: set et sts=4 sw=4 :
diff --git a/tablib/packages/dbfpy3/dbf.py b/tablib/packages/dbfpy3/dbf.py
index 42de8a4..6fee457 100644
--- a/tablib/packages/dbfpy3/dbf.py
+++ b/tablib/packages/dbfpy3/dbf.py
@@ -66,6 +66,7 @@ from . import header
from . import record
from .utils import INVALID_VALUE
+
class Dbf(object):
"""DBF accessor.
@@ -82,13 +83,13 @@ class Dbf(object):
"""
__slots__ = ("name", "header", "stream",
- "_changed", "_new", "_ignore_errors")
+ "_changed", "_new", "_ignore_errors")
HeaderClass = header.DbfHeader
RecordClass = record.DbfRecord
INVALID_VALUE = INVALID_VALUE
- ## initialization and creation helpers
+ # initialization and creation helpers
def __init__(self, f, readOnly=False, new=False, ignoreErrors=False):
"""Initialize instance.
@@ -137,7 +138,7 @@ class Dbf(object):
self._new = bool(new)
self._changed = False
- ## properties
+ # properties
closed = property(lambda self: self.stream.closed)
recordCount = property(lambda self: self.header.recordCount)
@@ -149,6 +150,7 @@ class Dbf(object):
def ignoreErrors(self, value):
"""Update `ignoreErrors` flag on the header object and self"""
self.header.ignoreErrors = self._ignore_errors = bool(value)
+
ignoreErrors = property(
lambda self: self._ignore_errors,
ignoreErrors,
@@ -159,7 +161,7 @@ class Dbf(object):
""")
- ## protected methods
+ # protected methods
def _fixIndex(self, index):
"""Return fixed index.
@@ -185,7 +187,7 @@ class Dbf(object):
raise IndexError("Record index out of range")
return index
- ## iterface methods
+ # iterface methods
def close(self):
self.flush()
@@ -227,9 +229,9 @@ class Dbf(object):
self.header.addField(*defs)
else:
raise TypeError("At least one record was added, "
- "structure can't be changed")
+ "structure can't be changed")
- ## 'magic' methods (representation and sequence interface)
+ # 'magic' methods (representation and sequence interface)
def __repr__(self):
return "Dbf stream '%s'\n" % self.stream + repr(self.header)
@@ -249,19 +251,20 @@ class Dbf(object):
self._changed = True
self._new = False
- #def __del__(self):
- # """Flush stream upon deletion of the object."""
- # self.flush()
+ # def __del__(self):
+ # """Flush stream upon deletion of the object."""
+ # self.flush()
-def demoRead(filename):
+def demo_read(filename):
_dbf = Dbf(filename, True)
for _rec in _dbf:
print()
print(repr(_rec))
_dbf.close()
-def demoCreate(filename):
+
+def demo_create(filename):
_dbf = Dbf(filename, new=True)
_dbf.addField(
("NAME", "C", 15),
@@ -270,10 +273,10 @@ def demoCreate(filename):
("BIRTHDATE", "D"),
)
for (_n, _s, _i, _b) in (
- ("John", "Miller", "YC", (1981, 1, 2)),
- ("Andy", "Larkin", "AL", (1982, 3, 4)),
- ("Bill", "Clinth", "", (1983, 5, 6)),
- ("Bobb", "McNail", "", (1984, 7, 8)),
+ ("John", "Miller", "YC", (1981, 1, 2)),
+ ("Andy", "Larkin", "AL", (1982, 3, 4)),
+ ("Bill", "Clinth", "", (1983, 5, 6)),
+ ("Bobb", "McNail", "", (1984, 7, 8)),
):
_rec = _dbf.newRecord()
_rec["NAME"] = _n
@@ -284,10 +287,12 @@ def demoCreate(filename):
print(repr(_dbf))
_dbf.close()
-if (__name__=='__main__'):
+
+if __name__ == '__main__':
import sys
+
_name = len(sys.argv) > 1 and sys.argv[1] or "county.dbf"
- demoCreate(_name)
- demoRead(_name)
+ demo_create(_name)
+ demo_read(_name)
# vim: set et sw=4 sts=4 :
diff --git a/tablib/packages/dbfpy3/dbfnew.py b/tablib/packages/dbfpy3/dbfnew.py
index 4051bc6..8fab275 100644
--- a/tablib/packages/dbfpy3/dbfnew.py
+++ b/tablib/packages/dbfpy3/dbfnew.py
@@ -29,6 +29,7 @@ from .fields import *
from .header import *
from .record import *
+
class _FieldDefinition(object):
"""Field definition.
@@ -145,28 +146,28 @@ class dbf_new(object):
_dbfStream.close()
-if (__name__=='__main__'):
+if __name__ == '__main__':
# create a new DBF-File
- dbfn=dbf_new()
- dbfn.add_field("name",'C',80)
- dbfn.add_field("price",'N',10,2)
- dbfn.add_field("date",'D',8)
+ dbfn = dbf_new()
+ dbfn.add_field("name", 'C', 80)
+ dbfn.add_field("price", 'N', 10, 2)
+ dbfn.add_field("date", 'D', 8)
dbfn.write("tst.dbf")
# test new dbf
print("*** created tst.dbf: ***")
dbft = Dbf('tst.dbf', readOnly=0)
print(repr(dbft))
# add a record
- rec=DbfRecord(dbft)
- rec['name']='something'
- rec['price']=10.5
- rec['date']=(2000,1,12)
+ rec = DbfRecord(dbft)
+ rec['name'] = 'something'
+ rec['price'] = 10.5
+ rec['date'] = (2000, 1, 12)
rec.store()
# add another record
- rec=DbfRecord(dbft)
- rec['name']='foo and bar'
- rec['price']=12234
- rec['date']=(1992,7,15)
+ rec = DbfRecord(dbft)
+ rec['name'] = 'foo and bar'
+ rec['price'] = 12234
+ rec['date'] = (1992, 7, 15)
rec.store()
# show the records
@@ -175,7 +176,7 @@ if (__name__=='__main__'):
for i1 in range(len(dbft)):
rec = dbft[i1]
for fldName in dbft.fieldNames:
- print('%s:\t %s'%(fldName, rec[fldName]))
+ print('%s:\t %s' % (fldName, rec[fldName]))
print()
dbft.close()
diff --git a/tablib/packages/yaml/composer.py b/tablib/packages/yaml/composer.py
index 06e5ac7..d4ef0f1 100644
--- a/tablib/packages/yaml/composer.py
+++ b/tablib/packages/yaml/composer.py
@@ -1,15 +1,16 @@
-
__all__ = ['Composer', 'ComposerError']
from error import MarkedYAMLError
-from events import *
-from nodes import *
+from events import StreamEndEvent, StreamStartEvent, AliasEvent, SequenceEndEvent, SequenceStartEvent, MappingEndEvent,\
+ MappingStartEvent, ScalarEvent
+from nodes import MappingNode, ScalarNode, SequenceNode
+
class ComposerError(MarkedYAMLError):
pass
-class Composer(object):
+class Composer(object):
def __init__(self):
self.anchors = {}
@@ -39,8 +40,8 @@ class Composer(object):
if not self.check_event(StreamEndEvent):
event = self.get_event()
raise ComposerError("expected a single document in the stream",
- document.start_mark, "but found another document",
- event.start_mark)
+ document.start_mark, "but found another document",
+ event.start_mark)
# Drop the STREAM-END event.
self.get_event()
@@ -66,15 +67,14 @@ class Composer(object):
anchor = event.anchor
if anchor not in self.anchors:
raise ComposerError(None, None, "found undefined alias %r"
- % anchor.encode('utf-8'), event.start_mark)
+ % anchor.encode('utf-8'), event.start_mark)
return self.anchors[anchor]
event = self.peek_event()
anchor = event.anchor
- if anchor is not None:
- if anchor in self.anchors:
+ if anchor is not None and anchor in self.anchors:
raise ComposerError("found duplicate anchor %r; first occurence"
- % anchor.encode('utf-8'), self.anchors[anchor].start_mark,
- "second occurence", event.start_mark)
+ % anchor.encode('utf-8'), self.anchors[anchor].start_mark,
+ "second occurence", event.start_mark)
self.descend_resolver(parent, index)
if self.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
@@ -91,7 +91,7 @@ class Composer(object):
if tag is None or tag == u'!':
tag = self.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(tag, event.value,
- event.start_mark, event.end_mark, style=event.style)
+ event.start_mark, event.end_mark, style=event.style)
if anchor is not None:
self.anchors[anchor] = node
return node
@@ -102,8 +102,8 @@ class Composer(object):
if tag is None or tag == u'!':
tag = self.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(tag, [],
- start_event.start_mark, None,
- flow_style=start_event.flow_style)
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
index = 0
@@ -120,20 +120,19 @@ class Composer(object):
if tag is None or tag == u'!':
tag = self.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(tag, [],
- start_event.start_mark, None,
- flow_style=start_event.flow_style)
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
while not self.check_event(MappingEndEvent):
- #key_event = self.peek_event()
+ # key_event = self.peek_event()
item_key = self.compose_node(node, None)
- #if item_key in node.value:
+ # if item_key in node.value:
# raise ComposerError("while composing a mapping", start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
- #node.value[item_key] = item_value
+ # node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
-
diff --git a/tablib/packages/yaml/constructor.py b/tablib/packages/yaml/constructor.py
index 420c434..d44c7bd 100644
--- a/tablib/packages/yaml/constructor.py
+++ b/tablib/packages/yaml/constructor.py
@@ -1,6 +1,5 @@
-
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
- 'ConstructorError']
+ 'ConstructorError']
from error import *
from nodes import *
@@ -12,13 +11,17 @@ try:
except NameError:
from sets import Set as set
-import binascii, re, sys, types
+import binascii
+import re
+import sys
+import types
+
class ConstructorError(MarkedYAMLError):
pass
-class BaseConstructor(object):
+class BaseConstructor(object):
yaml_constructors = {}
yaml_multi_constructors = {}
@@ -65,7 +68,7 @@ class BaseConstructor(object):
return self.constructed_objects[node]
if node in self.recursive_objects:
raise ConstructorError(None, None,
- "found unconstructable recursive node", node.start_mark)
+ "found unconstructable recursive node", node.start_mark)
self.recursive_objects[node] = None
constructor = None
tag_suffix = None
@@ -110,23 +113,23 @@ class BaseConstructor(object):
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
raise ConstructorError(None, None,
- "expected a scalar node, but found %s" % node.id,
- node.start_mark)
+ "expected a scalar node, but found %s" % node.id,
+ node.start_mark)
return node.value
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
raise ConstructorError(None, None,
- "expected a sequence node, but found %s" % node.id,
- node.start_mark)
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
return [self.construct_object(child, deep=deep)
for child in node.value]
def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
- "expected a mapping node, but found %s" % node.id,
- node.start_mark)
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
mapping = {}
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
@@ -134,7 +137,7 @@ class BaseConstructor(object):
hash(key)
except TypeError, exc:
raise ConstructorError("while constructing a mapping", node.start_mark,
- "found unacceptable key (%s)" % exc, key_node.start_mark)
+ "found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
@@ -142,8 +145,8 @@ class BaseConstructor(object):
def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
- "expected a mapping node, but found %s" % node.id,
- node.start_mark)
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
@@ -155,16 +158,18 @@ class BaseConstructor(object):
if not 'yaml_constructors' in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
+
add_constructor = classmethod(add_constructor)
def add_multi_constructor(cls, tag_prefix, multi_constructor):
if not 'yaml_multi_constructors' in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
+
add_multi_constructor = classmethod(add_multi_constructor)
-class SafeConstructor(BaseConstructor):
+class SafeConstructor(BaseConstructor):
def construct_scalar(self, node):
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
@@ -187,9 +192,9 @@ class SafeConstructor(BaseConstructor):
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing a mapping",
- node.start_mark,
- "expected a mapping for merging, but found %s"
- % subnode.id, subnode.start_mark)
+ node.start_mark,
+ "expected a mapping for merging, but found %s"
+ % subnode.id, subnode.start_mark)
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
@@ -197,8 +202,8 @@ class SafeConstructor(BaseConstructor):
merge.extend(value)
else:
raise ConstructorError("while constructing a mapping", node.start_mark,
- "expected a mapping or list of mappings for merging, but found %s"
- % value_node.id, value_node.start_mark)
+ "expected a mapping or list of mappings for merging, but found %s"
+ % value_node.id, value_node.start_mark)
elif key_node.tag == u'tag:yaml.org,2002:value':
key_node.tag = u'tag:yaml.org,2002:str'
index += 1
@@ -217,12 +222,12 @@ class SafeConstructor(BaseConstructor):
return None
bool_values = {
- u'yes': True,
- u'no': False,
- u'true': True,
- u'false': False,
- u'on': True,
- u'off': False,
+ u'yes': True,
+ u'no': False,
+ u'true': True,
+ u'false': False,
+ u'on': True,
+ u'off': False,
}
def construct_yaml_bool(self, node):
@@ -240,27 +245,27 @@ class SafeConstructor(BaseConstructor):
if value == '0':
return 0
elif value.startswith('0b'):
- return sign*int(value[2:], 2)
+ return sign * int(value[2:], 2)
elif value.startswith('0x'):
- return sign*int(value[2:], 16)
+ return sign * int(value[2:], 16)
elif value[0] == '0':
- return sign*int(value, 8)
+ return sign * int(value, 8)
elif ':' in value:
digits = [int(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0
for digit in digits:
- value += digit*base
+ value += digit * base
base *= 60
- return sign*value
+ return sign * value
else:
- return sign*int(value)
+ return sign * int(value)
inf_value = 1e300
- while inf_value != inf_value*inf_value:
+ while inf_value != inf_value * inf_value:
inf_value *= inf_value
- nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
+ nan_value = -inf_value / inf_value # Trying to make a quiet NaN (like C99).
def construct_yaml_float(self, node):
value = str(self.construct_scalar(node))
@@ -271,7 +276,7 @@ class SafeConstructor(BaseConstructor):
if value[0] in '+-':
value = value[1:]
if value == '.inf':
- return sign*self.inf_value
+ return sign * self.inf_value
elif value == '.nan':
return self.nan_value
elif ':' in value:
@@ -280,11 +285,11 @@ class SafeConstructor(BaseConstructor):
base = 1
value = 0.0
for digit in digits:
- value += digit*base
+ value += digit * base
base *= 60
- return sign*value
+ return sign * value
else:
- return sign*float(value)
+ return sign * float(value)
def construct_yaml_binary(self, node):
value = self.construct_scalar(node)
@@ -292,10 +297,10 @@ class SafeConstructor(BaseConstructor):
return str(value).decode('base64')
except (binascii.Error, UnicodeEncodeError), exc:
raise ConstructorError(None, None,
- "failed to decode base64 data: %s" % exc, node.start_mark)
+ "failed to decode base64 data: %s" % exc, node.start_mark)
timestamp_regexp = re.compile(
- ur'''^(?P<year>[0-9][0-9][0-9][0-9])
+ ur'''^(?P<year>[0-9][0-9][0-9][0-9])
-(?P<month>[0-9][0-9]?)
-(?P<day>[0-9][0-9]?)
(?:(?:[Tt]|[ \t]+)
@@ -343,16 +348,16 @@ class SafeConstructor(BaseConstructor):
yield omap
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
- "expected a sequence, but found %s" % node.id, node.start_mark)
+ "expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
- "expected a mapping of length 1, but found %s" % subnode.id,
- subnode.start_mark)
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing an ordered map", node.start_mark,
- "expected a single mapping item, but found %d items" % len(subnode.value),
- subnode.start_mark)
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
@@ -364,16 +369,16 @@ class SafeConstructor(BaseConstructor):
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing pairs", node.start_mark,
- "expected a sequence, but found %s" % node.id, node.start_mark)
+ "expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing pairs", node.start_mark,
- "expected a mapping of length 1, but found %s" % subnode.id,
- subnode.start_mark)
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing pairs", node.start_mark,
- "expected a single mapping item, but found %d items" % len(subnode.value),
- subnode.start_mark)
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
@@ -415,62 +420,63 @@ class SafeConstructor(BaseConstructor):
def construct_undefined(self, node):
raise ConstructorError(None, None,
- "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
- node.start_mark)
+ "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
+ node.start_mark)
+
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:null',
- SafeConstructor.construct_yaml_null)
+ u'tag:yaml.org,2002:null',
+ SafeConstructor.construct_yaml_null)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:bool',
- SafeConstructor.construct_yaml_bool)
+ u'tag:yaml.org,2002:bool',
+ SafeConstructor.construct_yaml_bool)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:int',
- SafeConstructor.construct_yaml_int)
+ u'tag:yaml.org,2002:int',
+ SafeConstructor.construct_yaml_int)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:float',
- SafeConstructor.construct_yaml_float)
+ u'tag:yaml.org,2002:float',
+ SafeConstructor.construct_yaml_float)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:binary',
- SafeConstructor.construct_yaml_binary)
+ u'tag:yaml.org,2002:binary',
+ SafeConstructor.construct_yaml_binary)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:timestamp',
- SafeConstructor.construct_yaml_timestamp)
+ u'tag:yaml.org,2002:timestamp',
+ SafeConstructor.construct_yaml_timestamp)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:omap',
- SafeConstructor.construct_yaml_omap)
+ u'tag:yaml.org,2002:omap',
+ SafeConstructor.construct_yaml_omap)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:pairs',
- SafeConstructor.construct_yaml_pairs)
+ u'tag:yaml.org,2002:pairs',
+ SafeConstructor.construct_yaml_pairs)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:set',
- SafeConstructor.construct_yaml_set)
+ u'tag:yaml.org,2002:set',
+ SafeConstructor.construct_yaml_set)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:str',
- SafeConstructor.construct_yaml_str)
+ u'tag:yaml.org,2002:str',
+ SafeConstructor.construct_yaml_str)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:seq',
- SafeConstructor.construct_yaml_seq)
+ u'tag:yaml.org,2002:seq',
+ SafeConstructor.construct_yaml_seq)
SafeConstructor.add_constructor(
- u'tag:yaml.org,2002:map',
- SafeConstructor.construct_yaml_map)
+ u'tag:yaml.org,2002:map',
+ SafeConstructor.construct_yaml_map)
SafeConstructor.add_constructor(None,
- SafeConstructor.construct_undefined)
+ SafeConstructor.construct_undefined)
-class Constructor(SafeConstructor):
+class Constructor(SafeConstructor):
def construct_python_str(self, node):
return self.construct_scalar(node).encode('utf-8')
@@ -481,7 +487,7 @@ class Constructor(SafeConstructor):
return long(self.construct_yaml_int(node))
def construct_python_complex(self, node):
- return complex(self.construct_scalar(node))
+ return complex(self.construct_scalar(node))
def construct_python_tuple(self, node):
return tuple(self.construct_sequence(node))
@@ -489,21 +495,21 @@ class Constructor(SafeConstructor):
def find_python_module(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python module", mark,
- "expected non-empty name appended to the tag", mark)
+ "expected non-empty name appended to the tag", mark)
try:
__import__(name)
except ImportError, exc:
raise ConstructorError("while constructing a Python module", mark,
- "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
+ "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
return sys.modules[name]
def find_python_name(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python object", mark,
- "expected non-empty name appended to the tag", mark)
+ "expected non-empty name appended to the tag", mark)
if u'.' in name:
# Python 2.4 only
- #module_name, object_name = name.rsplit('.', 1)
+ # module_name, object_name = name.rsplit('.', 1)
items = name.split('.')
object_name = items.pop()
module_name = '.'.join(items)
@@ -514,40 +520,41 @@ class Constructor(SafeConstructor):
__import__(module_name)
except ImportError, exc:
raise ConstructorError("while constructing a Python object", mark,
- "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
+ "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
module = sys.modules[module_name]
if not hasattr(module, object_name):
raise ConstructorError("while constructing a Python object", mark,
- "cannot find %r in the module %r" % (object_name.encode('utf-8'),
- module.__name__), mark)
+ "cannot find %r in the module %r" % (object_name.encode('utf-8'),
+ module.__name__), mark)
return getattr(module, object_name)
def construct_python_name(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python name", node.start_mark,
- "expected the empty value, but found %r" % value.encode('utf-8'),
- node.start_mark)
+ "expected the empty value, but found %r" % value.encode('utf-8'),
+ node.start_mark)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python module", node.start_mark,
- "expected the empty value, but found %r" % value.encode('utf-8'),
- node.start_mark)
+ "expected the empty value, but found %r" % value.encode('utf-8'),
+ node.start_mark)
return self.find_python_module(suffix, node.start_mark)
- class classobj: pass
+ class classobj:
+ pass
def make_python_instance(self, suffix, node,
- args=None, kwds=None, newobj=False):
+ args=None, kwds=None, newobj=False):
if not args:
args = []
if not kwds:
kwds = {}
cls = self.find_python_name(suffix, node.start_mark)
- if newobj and isinstance(cls, type(self.classobj)) \
+ if newobj and isinstance(cls, type(self.classobj)) \
and not args and not kwds:
instance = self.classobj()
instance.__class__ = cls
@@ -618,6 +625,7 @@ class Constructor(SafeConstructor):
def construct_python_object_new(self, suffix, node):
return self.construct_python_object_apply(suffix, node, newobj=True)
+
Constructor.add_constructor(
u'tag:yaml.org,2002:python/none',
Constructor.construct_yaml_null)
@@ -681,4 +689,3 @@ Constructor.add_multi_constructor(
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/new:',
Constructor.construct_python_object_new)
-
diff --git a/tablib/packages/yaml3/composer.py b/tablib/packages/yaml3/composer.py
index d5c6a7a..97c1306 100644
--- a/tablib/packages/yaml3/composer.py
+++ b/tablib/packages/yaml3/composer.py
@@ -1,15 +1,17 @@
-
__all__ = ['Composer', 'ComposerError']
from .error import MarkedYAMLError
-from .events import *
-from .nodes import *
+from error import MarkedYAMLError
+from events import StreamEndEvent, StreamStartEvent, AliasEvent, SequenceEndEvent, SequenceStartEvent, MappingEndEvent,\
+ MappingStartEvent, ScalarEvent
+from nodes import MappingNode, ScalarNode, SequenceNode
+
class ComposerError(MarkedYAMLError):
pass
-class Composer:
+class Composer:
def __init__(self):
self.anchors = {}
@@ -39,8 +41,8 @@ class Composer:
if not self.check_event(StreamEndEvent):
event = self.get_event()
raise ComposerError("expected a single document in the stream",
- document.start_mark, "but found another document",
- event.start_mark)
+ document.start_mark, "but found another document",
+ event.start_mark)
# Drop the STREAM-END event.
self.get_event()
@@ -66,15 +68,14 @@ class Composer:
anchor = event.anchor
if anchor not in self.anchors:
raise ComposerError(None, None, "found undefined alias %r"
- % anchor, event.start_mark)
+ % anchor, event.start_mark)
return self.anchors[anchor]
event = self.peek_event()
anchor = event.anchor
- if anchor is not None:
- if anchor in self.anchors:
+ if anchor is not None and anchor in self.anchors:
raise ComposerError("found duplicate anchor %r; first occurence"
- % anchor, self.anchors[anchor].start_mark,
- "second occurence", event.start_mark)
+ % anchor, self.anchors[anchor].start_mark,
+ "second occurence", event.start_mark)
self.descend_resolver(parent, index)
if self.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
@@ -91,7 +92,7 @@ class Composer:
if tag is None or tag == '!':
tag = self.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(tag, event.value,
- event.start_mark, event.end_mark, style=event.style)
+ event.start_mark, event.end_mark, style=event.style)
if anchor is not None:
self.anchors[anchor] = node
return node
@@ -102,8 +103,8 @@ class Composer:
if tag is None or tag == '!':
tag = self.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(tag, [],
- start_event.start_mark, None,
- flow_style=start_event.flow_style)
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
index = 0
@@ -120,20 +121,19 @@ class Composer:
if tag is None or tag == '!':
tag = self.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(tag, [],
- start_event.start_mark, None,
- flow_style=start_event.flow_style)
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
while not self.check_event(MappingEndEvent):
- #key_event = self.peek_event()
+ # key_event = self.peek_event()
item_key = self.compose_node(node, None)
- #if item_key in node.value:
+ # if item_key in node.value:
# raise ComposerError("while composing a mapping", start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
- #node.value[item_key] = item_value
+ # node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
-
diff --git a/tablib/packages/yaml3/constructor.py b/tablib/packages/yaml3/constructor.py
index bd25b79..3fc9c7f 100644
--- a/tablib/packages/yaml3/constructor.py
+++ b/tablib/packages/yaml3/constructor.py
@@ -5,7 +5,13 @@ __all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
from .error import *
from .nodes import *
-import collections, datetime, base64, binascii, re, sys, types
+import base64
+import binascii
+import collections
+import datetime
+import re
+import sys
+import types
class ConstructorError(MarkedYAMLError):
pass