diff options
| author | tusharmakkar08 <tusharmakkar08@gmail.com> | 2016-02-22 16:36:26 +0530 |
|---|---|---|
| committer | tusharmakkar08 <tusharmakkar08@gmail.com> | 2016-02-22 16:36:26 +0530 |
| commit | d515724817111d9dc60db15e1b54e149f807fba1 (patch) | |
| tree | de507ff397ddd44458e1cde114d839eaf0e208ab | |
| parent | 2814fbc38100e00b2e78d47cb05defa8e9854126 (diff) | |
| download | tablib-d515724817111d9dc60db15e1b54e149f807fba1.tar.gz | |
PEP-8 standards followed
| -rw-r--r-- | AUTHORS | 1 | ||||
| -rw-r--r-- | tablib/packages/dbfpy/dbf.py | 47 | ||||
| -rw-r--r-- | tablib/packages/dbfpy/dbfnew.py | 31 | ||||
| -rw-r--r-- | tablib/packages/dbfpy3/dbf.py | 43 | ||||
| -rw-r--r-- | tablib/packages/dbfpy3/dbfnew.py | 29 | ||||
| -rw-r--r-- | tablib/packages/yaml/composer.py | 39 | ||||
| -rw-r--r-- | tablib/packages/yaml/constructor.py | 195 | ||||
| -rw-r--r-- | tablib/packages/yaml3/composer.py | 40 | ||||
| -rw-r--r-- | tablib/packages/yaml3/constructor.py | 8 | ||||
| -rwxr-xr-x | test_tablib.py | 123 |
10 files changed, 262 insertions, 294 deletions
@@ -32,3 +32,4 @@ Patches and Suggestions - Rabin Nankhwa - Marco Dallagiacoma - Mathias Loesch +- Tushar Makkar diff --git a/tablib/packages/dbfpy/dbf.py b/tablib/packages/dbfpy/dbf.py index b3d2e21..add74da 100644 --- a/tablib/packages/dbfpy/dbf.py +++ b/tablib/packages/dbfpy/dbf.py @@ -63,9 +63,10 @@ __author__ = "Jeff Kunce <kuncej@mail.conservation.state.mo.us>" __all__ = ["Dbf"] from . import header -from .import record +from . import record from utils import INVALID_VALUE + class Dbf(object): """DBF accessor. @@ -82,13 +83,13 @@ class Dbf(object): """ __slots__ = ("name", "header", "stream", - "_changed", "_new", "_ignore_errors") + "_changed", "_new", "_ignore_errors") HeaderClass = header.DbfHeader RecordClass = record.DbfRecord INVALID_VALUE = INVALID_VALUE - ## initialization and creation helpers + # initialization and creation helpers def __init__(self, f, readOnly=False, new=False, ignoreErrors=False): """Initialize instance. @@ -137,7 +138,7 @@ class Dbf(object): self._new = bool(new) self._changed = False - ## properties + # properties closed = property(lambda self: self.stream.closed) recordCount = property(lambda self: self.header.recordCount) @@ -149,6 +150,7 @@ class Dbf(object): def ignoreErrors(self, value): """Update `ignoreErrors` flag on the header object and self""" self.header.ignoreErrors = self._ignore_errors = bool(value) + ignoreErrors = property( lambda self: self._ignore_errors, ignoreErrors, @@ -159,7 +161,7 @@ class Dbf(object): """) - ## protected methods + # protected methods def _fixIndex(self, index): """Return fixed index. @@ -185,7 +187,7 @@ class Dbf(object): raise IndexError("Record index out of range") return index - ## iterface methods + # iterface methods def close(self): self.flush() @@ -226,9 +228,9 @@ class Dbf(object): self.header.addField(*defs) else: raise TypeError("At least one record was added, " - "structure can't be changed") + "structure can't be changed") - ## 'magic' methods (representation and sequence interface) + # 'magic' methods (representation and sequence interface) def __repr__(self): return "Dbf stream '%s'\n" % self.stream + repr(self.header) @@ -248,19 +250,20 @@ class Dbf(object): self._changed = True self._new = False - #def __del__(self): - # """Flush stream upon deletion of the object.""" - # self.flush() + # def __del__(self): + # """Flush stream upon deletion of the object.""" + # self.flush() -def demoRead(filename): +def demo_read(filename): _dbf = Dbf(filename, True) for _rec in _dbf: print print(repr(_rec)) _dbf.close() -def demoCreate(filename): + +def demo_create(filename): _dbf = Dbf(filename, new=True) _dbf.addField( ("NAME", "C", 15), @@ -269,10 +272,10 @@ def demoCreate(filename): ("BIRTHDATE", "D"), ) for (_n, _s, _i, _b) in ( - ("John", "Miller", "YC", (1981, 1, 2)), - ("Andy", "Larkin", "AL", (1982, 3, 4)), - ("Bill", "Clinth", "", (1983, 5, 6)), - ("Bobb", "McNail", "", (1984, 7, 8)), + ("John", "Miller", "YC", (1981, 1, 2)), + ("Andy", "Larkin", "AL", (1982, 3, 4)), + ("Bill", "Clinth", "", (1983, 5, 6)), + ("Bobb", "McNail", "", (1984, 7, 8)), ): _rec = _dbf.newRecord() _rec["NAME"] = _n @@ -283,10 +286,12 @@ def demoCreate(filename): print(repr(_dbf)) _dbf.close() -if (__name__=='__main__'): + +if __name__ == '__main__': import sys + _name = len(sys.argv) > 1 and sys.argv[1] or "county.dbf" - demoCreate(_name) - demoRead(_name) + demo_create(_name) + demo_read(_name) -# vim: set et sw=4 sts=4 : + # vim: set et sw=4 sts=4 : diff --git a/tablib/packages/dbfpy/dbfnew.py b/tablib/packages/dbfpy/dbfnew.py index dea7e52..7a06d00 100644 --- a/tablib/packages/dbfpy/dbfnew.py +++ b/tablib/packages/dbfpy/dbfnew.py @@ -29,6 +29,7 @@ from fields import * from header import * from record import * + class _FieldDefinition(object): """Field definition. @@ -151,28 +152,28 @@ class dbf_new(object): _dbfh.write(stream) -if (__name__=='__main__'): +if __name__ == '__main__': # create a new DBF-File - dbfn=dbf_new() - dbfn.add_field("name",'C',80) - dbfn.add_field("price",'N',10,2) - dbfn.add_field("date",'D',8) + dbfn = dbf_new() + dbfn.add_field("name", 'C', 80) + dbfn.add_field("price", 'N', 10, 2) + dbfn.add_field("date", 'D', 8) dbfn.write("tst.dbf") # test new dbf print "*** created tst.dbf: ***" dbft = Dbf('tst.dbf', readOnly=0) print repr(dbft) # add a record - rec=DbfRecord(dbft) - rec['name']='something' - rec['price']=10.5 - rec['date']=(2000,1,12) + rec = DbfRecord(dbft) + rec['name'] = 'something' + rec['price'] = 10.5 + rec['date'] = (2000, 1, 12) rec.store() # add another record - rec=DbfRecord(dbft) - rec['name']='foo and bar' - rec['price']=12234 - rec['date']=(1992,7,15) + rec = DbfRecord(dbft) + rec['name'] = 'foo and bar' + rec['price'] = 12234 + rec['date'] = (1992, 7, 15) rec.store() # show the records @@ -181,8 +182,8 @@ if (__name__=='__main__'): for i1 in range(len(dbft)): rec = dbft[i1] for fldName in dbft.fieldNames: - print '%s:\t %s'%(fldName, rec[fldName]) + print '%s:\t %s' % (fldName, rec[fldName]) print dbft.close() -# vim: set et sts=4 sw=4 : + # vim: set et sts=4 sw=4 : diff --git a/tablib/packages/dbfpy3/dbf.py b/tablib/packages/dbfpy3/dbf.py index 42de8a4..6fee457 100644 --- a/tablib/packages/dbfpy3/dbf.py +++ b/tablib/packages/dbfpy3/dbf.py @@ -66,6 +66,7 @@ from . import header from . import record from .utils import INVALID_VALUE + class Dbf(object): """DBF accessor. @@ -82,13 +83,13 @@ class Dbf(object): """ __slots__ = ("name", "header", "stream", - "_changed", "_new", "_ignore_errors") + "_changed", "_new", "_ignore_errors") HeaderClass = header.DbfHeader RecordClass = record.DbfRecord INVALID_VALUE = INVALID_VALUE - ## initialization and creation helpers + # initialization and creation helpers def __init__(self, f, readOnly=False, new=False, ignoreErrors=False): """Initialize instance. @@ -137,7 +138,7 @@ class Dbf(object): self._new = bool(new) self._changed = False - ## properties + # properties closed = property(lambda self: self.stream.closed) recordCount = property(lambda self: self.header.recordCount) @@ -149,6 +150,7 @@ class Dbf(object): def ignoreErrors(self, value): """Update `ignoreErrors` flag on the header object and self""" self.header.ignoreErrors = self._ignore_errors = bool(value) + ignoreErrors = property( lambda self: self._ignore_errors, ignoreErrors, @@ -159,7 +161,7 @@ class Dbf(object): """) - ## protected methods + # protected methods def _fixIndex(self, index): """Return fixed index. @@ -185,7 +187,7 @@ class Dbf(object): raise IndexError("Record index out of range") return index - ## iterface methods + # iterface methods def close(self): self.flush() @@ -227,9 +229,9 @@ class Dbf(object): self.header.addField(*defs) else: raise TypeError("At least one record was added, " - "structure can't be changed") + "structure can't be changed") - ## 'magic' methods (representation and sequence interface) + # 'magic' methods (representation and sequence interface) def __repr__(self): return "Dbf stream '%s'\n" % self.stream + repr(self.header) @@ -249,19 +251,20 @@ class Dbf(object): self._changed = True self._new = False - #def __del__(self): - # """Flush stream upon deletion of the object.""" - # self.flush() + # def __del__(self): + # """Flush stream upon deletion of the object.""" + # self.flush() -def demoRead(filename): +def demo_read(filename): _dbf = Dbf(filename, True) for _rec in _dbf: print() print(repr(_rec)) _dbf.close() -def demoCreate(filename): + +def demo_create(filename): _dbf = Dbf(filename, new=True) _dbf.addField( ("NAME", "C", 15), @@ -270,10 +273,10 @@ def demoCreate(filename): ("BIRTHDATE", "D"), ) for (_n, _s, _i, _b) in ( - ("John", "Miller", "YC", (1981, 1, 2)), - ("Andy", "Larkin", "AL", (1982, 3, 4)), - ("Bill", "Clinth", "", (1983, 5, 6)), - ("Bobb", "McNail", "", (1984, 7, 8)), + ("John", "Miller", "YC", (1981, 1, 2)), + ("Andy", "Larkin", "AL", (1982, 3, 4)), + ("Bill", "Clinth", "", (1983, 5, 6)), + ("Bobb", "McNail", "", (1984, 7, 8)), ): _rec = _dbf.newRecord() _rec["NAME"] = _n @@ -284,10 +287,12 @@ def demoCreate(filename): print(repr(_dbf)) _dbf.close() -if (__name__=='__main__'): + +if __name__ == '__main__': import sys + _name = len(sys.argv) > 1 and sys.argv[1] or "county.dbf" - demoCreate(_name) - demoRead(_name) + demo_create(_name) + demo_read(_name) # vim: set et sw=4 sts=4 : diff --git a/tablib/packages/dbfpy3/dbfnew.py b/tablib/packages/dbfpy3/dbfnew.py index 4051bc6..8fab275 100644 --- a/tablib/packages/dbfpy3/dbfnew.py +++ b/tablib/packages/dbfpy3/dbfnew.py @@ -29,6 +29,7 @@ from .fields import * from .header import * from .record import * + class _FieldDefinition(object): """Field definition. @@ -145,28 +146,28 @@ class dbf_new(object): _dbfStream.close() -if (__name__=='__main__'): +if __name__ == '__main__': # create a new DBF-File - dbfn=dbf_new() - dbfn.add_field("name",'C',80) - dbfn.add_field("price",'N',10,2) - dbfn.add_field("date",'D',8) + dbfn = dbf_new() + dbfn.add_field("name", 'C', 80) + dbfn.add_field("price", 'N', 10, 2) + dbfn.add_field("date", 'D', 8) dbfn.write("tst.dbf") # test new dbf print("*** created tst.dbf: ***") dbft = Dbf('tst.dbf', readOnly=0) print(repr(dbft)) # add a record - rec=DbfRecord(dbft) - rec['name']='something' - rec['price']=10.5 - rec['date']=(2000,1,12) + rec = DbfRecord(dbft) + rec['name'] = 'something' + rec['price'] = 10.5 + rec['date'] = (2000, 1, 12) rec.store() # add another record - rec=DbfRecord(dbft) - rec['name']='foo and bar' - rec['price']=12234 - rec['date']=(1992,7,15) + rec = DbfRecord(dbft) + rec['name'] = 'foo and bar' + rec['price'] = 12234 + rec['date'] = (1992, 7, 15) rec.store() # show the records @@ -175,7 +176,7 @@ if (__name__=='__main__'): for i1 in range(len(dbft)): rec = dbft[i1] for fldName in dbft.fieldNames: - print('%s:\t %s'%(fldName, rec[fldName])) + print('%s:\t %s' % (fldName, rec[fldName])) print() dbft.close() diff --git a/tablib/packages/yaml/composer.py b/tablib/packages/yaml/composer.py index 06e5ac7..d4ef0f1 100644 --- a/tablib/packages/yaml/composer.py +++ b/tablib/packages/yaml/composer.py @@ -1,15 +1,16 @@ - __all__ = ['Composer', 'ComposerError'] from error import MarkedYAMLError -from events import * -from nodes import * +from events import StreamEndEvent, StreamStartEvent, AliasEvent, SequenceEndEvent, SequenceStartEvent, MappingEndEvent,\ + MappingStartEvent, ScalarEvent +from nodes import MappingNode, ScalarNode, SequenceNode + class ComposerError(MarkedYAMLError): pass -class Composer(object): +class Composer(object): def __init__(self): self.anchors = {} @@ -39,8 +40,8 @@ class Composer(object): if not self.check_event(StreamEndEvent): event = self.get_event() raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", - event.start_mark) + document.start_mark, "but found another document", + event.start_mark) # Drop the STREAM-END event. self.get_event() @@ -66,15 +67,14 @@ class Composer(object): anchor = event.anchor if anchor not in self.anchors: raise ComposerError(None, None, "found undefined alias %r" - % anchor.encode('utf-8'), event.start_mark) + % anchor.encode('utf-8'), event.start_mark) return self.anchors[anchor] event = self.peek_event() anchor = event.anchor - if anchor is not None: - if anchor in self.anchors: + if anchor is not None and anchor in self.anchors: raise ComposerError("found duplicate anchor %r; first occurence" - % anchor.encode('utf-8'), self.anchors[anchor].start_mark, - "second occurence", event.start_mark) + % anchor.encode('utf-8'), self.anchors[anchor].start_mark, + "second occurence", event.start_mark) self.descend_resolver(parent, index) if self.check_event(ScalarEvent): node = self.compose_scalar_node(anchor) @@ -91,7 +91,7 @@ class Composer(object): if tag is None or tag == u'!': tag = self.resolve(ScalarNode, event.value, event.implicit) node = ScalarNode(tag, event.value, - event.start_mark, event.end_mark, style=event.style) + event.start_mark, event.end_mark, style=event.style) if anchor is not None: self.anchors[anchor] = node return node @@ -102,8 +102,8 @@ class Composer(object): if tag is None or tag == u'!': tag = self.resolve(SequenceNode, None, start_event.implicit) node = SequenceNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) + start_event.start_mark, None, + flow_style=start_event.flow_style) if anchor is not None: self.anchors[anchor] = node index = 0 @@ -120,20 +120,19 @@ class Composer(object): if tag is None or tag == u'!': tag = self.resolve(MappingNode, None, start_event.implicit) node = MappingNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) + start_event.start_mark, None, + flow_style=start_event.flow_style) if anchor is not None: self.anchors[anchor] = node while not self.check_event(MappingEndEvent): - #key_event = self.peek_event() + # key_event = self.peek_event() item_key = self.compose_node(node, None) - #if item_key in node.value: + # if item_key in node.value: # raise ComposerError("while composing a mapping", start_event.start_mark, # "found duplicate key", key_event.start_mark) item_value = self.compose_node(node, item_key) - #node.value[item_key] = item_value + # node.value[item_key] = item_value node.value.append((item_key, item_value)) end_event = self.get_event() node.end_mark = end_event.end_mark return node - diff --git a/tablib/packages/yaml/constructor.py b/tablib/packages/yaml/constructor.py index 420c434..d44c7bd 100644 --- a/tablib/packages/yaml/constructor.py +++ b/tablib/packages/yaml/constructor.py @@ -1,6 +1,5 @@ - __all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', - 'ConstructorError'] + 'ConstructorError'] from error import * from nodes import * @@ -12,13 +11,17 @@ try: except NameError: from sets import Set as set -import binascii, re, sys, types +import binascii +import re +import sys +import types + class ConstructorError(MarkedYAMLError): pass -class BaseConstructor(object): +class BaseConstructor(object): yaml_constructors = {} yaml_multi_constructors = {} @@ -65,7 +68,7 @@ class BaseConstructor(object): return self.constructed_objects[node] if node in self.recursive_objects: raise ConstructorError(None, None, - "found unconstructable recursive node", node.start_mark) + "found unconstructable recursive node", node.start_mark) self.recursive_objects[node] = None constructor = None tag_suffix = None @@ -110,23 +113,23 @@ class BaseConstructor(object): def construct_scalar(self, node): if not isinstance(node, ScalarNode): raise ConstructorError(None, None, - "expected a scalar node, but found %s" % node.id, - node.start_mark) + "expected a scalar node, but found %s" % node.id, + node.start_mark) return node.value def construct_sequence(self, node, deep=False): if not isinstance(node, SequenceNode): raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) + "expected a sequence node, but found %s" % node.id, + node.start_mark) return [self.construct_object(child, deep=deep) for child in node.value] def construct_mapping(self, node, deep=False): if not isinstance(node, MappingNode): raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) + "expected a mapping node, but found %s" % node.id, + node.start_mark) mapping = {} for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) @@ -134,7 +137,7 @@ class BaseConstructor(object): hash(key) except TypeError, exc: raise ConstructorError("while constructing a mapping", node.start_mark, - "found unacceptable key (%s)" % exc, key_node.start_mark) + "found unacceptable key (%s)" % exc, key_node.start_mark) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping @@ -142,8 +145,8 @@ class BaseConstructor(object): def construct_pairs(self, node, deep=False): if not isinstance(node, MappingNode): raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) + "expected a mapping node, but found %s" % node.id, + node.start_mark) pairs = [] for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) @@ -155,16 +158,18 @@ class BaseConstructor(object): if not 'yaml_constructors' in cls.__dict__: cls.yaml_constructors = cls.yaml_constructors.copy() cls.yaml_constructors[tag] = constructor + add_constructor = classmethod(add_constructor) def add_multi_constructor(cls, tag_prefix, multi_constructor): if not 'yaml_multi_constructors' in cls.__dict__: cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() cls.yaml_multi_constructors[tag_prefix] = multi_constructor + add_multi_constructor = classmethod(add_multi_constructor) -class SafeConstructor(BaseConstructor): +class SafeConstructor(BaseConstructor): def construct_scalar(self, node): if isinstance(node, MappingNode): for key_node, value_node in node.value: @@ -187,9 +192,9 @@ class SafeConstructor(BaseConstructor): for subnode in value_node.value: if not isinstance(subnode, MappingNode): raise ConstructorError("while constructing a mapping", - node.start_mark, - "expected a mapping for merging, but found %s" - % subnode.id, subnode.start_mark) + node.start_mark, + "expected a mapping for merging, but found %s" + % subnode.id, subnode.start_mark) self.flatten_mapping(subnode) submerge.append(subnode.value) submerge.reverse() @@ -197,8 +202,8 @@ class SafeConstructor(BaseConstructor): merge.extend(value) else: raise ConstructorError("while constructing a mapping", node.start_mark, - "expected a mapping or list of mappings for merging, but found %s" - % value_node.id, value_node.start_mark) + "expected a mapping or list of mappings for merging, but found %s" + % value_node.id, value_node.start_mark) elif key_node.tag == u'tag:yaml.org,2002:value': key_node.tag = u'tag:yaml.org,2002:str' index += 1 @@ -217,12 +222,12 @@ class SafeConstructor(BaseConstructor): return None bool_values = { - u'yes': True, - u'no': False, - u'true': True, - u'false': False, - u'on': True, - u'off': False, + u'yes': True, + u'no': False, + u'true': True, + u'false': False, + u'on': True, + u'off': False, } def construct_yaml_bool(self, node): @@ -240,27 +245,27 @@ class SafeConstructor(BaseConstructor): if value == '0': return 0 elif value.startswith('0b'): - return sign*int(value[2:], 2) + return sign * int(value[2:], 2) elif value.startswith('0x'): - return sign*int(value[2:], 16) + return sign * int(value[2:], 16) elif value[0] == '0': - return sign*int(value, 8) + return sign * int(value, 8) elif ':' in value: digits = [int(part) for part in value.split(':')] digits.reverse() base = 1 value = 0 for digit in digits: - value += digit*base + value += digit * base base *= 60 - return sign*value + return sign * value else: - return sign*int(value) + return sign * int(value) inf_value = 1e300 - while inf_value != inf_value*inf_value: + while inf_value != inf_value * inf_value: inf_value *= inf_value - nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). + nan_value = -inf_value / inf_value # Trying to make a quiet NaN (like C99). def construct_yaml_float(self, node): value = str(self.construct_scalar(node)) @@ -271,7 +276,7 @@ class SafeConstructor(BaseConstructor): if value[0] in '+-': value = value[1:] if value == '.inf': - return sign*self.inf_value + return sign * self.inf_value elif value == '.nan': return self.nan_value elif ':' in value: @@ -280,11 +285,11 @@ class SafeConstructor(BaseConstructor): base = 1 value = 0.0 for digit in digits: - value += digit*base + value += digit * base base *= 60 - return sign*value + return sign * value else: - return sign*float(value) + return sign * float(value) def construct_yaml_binary(self, node): value = self.construct_scalar(node) @@ -292,10 +297,10 @@ class SafeConstructor(BaseConstructor): return str(value).decode('base64') except (binascii.Error, UnicodeEncodeError), exc: raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) + "failed to decode base64 data: %s" % exc, node.start_mark) timestamp_regexp = re.compile( - ur'''^(?P<year>[0-9][0-9][0-9][0-9]) + ur'''^(?P<year>[0-9][0-9][0-9][0-9]) -(?P<month>[0-9][0-9]?) -(?P<day>[0-9][0-9]?) (?:(?:[Tt]|[ \t]+) @@ -343,16 +348,16 @@ class SafeConstructor(BaseConstructor): yield omap if not isinstance(node, SequenceNode): raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) + "expected a sequence, but found %s" % node.id, node.start_mark) for subnode in node.value: if not isinstance(subnode, MappingNode): raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) if len(subnode.value) != 1: raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) key_node, value_node = subnode.value[0] key = self.construct_object(key_node) value = self.construct_object(value_node) @@ -364,16 +369,16 @@ class SafeConstructor(BaseConstructor): yield pairs if not isinstance(node, SequenceNode): raise ConstructorError("while constructing pairs", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) + "expected a sequence, but found %s" % node.id, node.start_mark) for subnode in node.value: if not isinstance(subnode, MappingNode): raise ConstructorError("while constructing pairs", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) if len(subnode.value) != 1: raise ConstructorError("while constructing pairs", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) key_node, value_node = subnode.value[0] key = self.construct_object(key_node) value = self.construct_object(value_node) @@ -415,62 +420,63 @@ class SafeConstructor(BaseConstructor): def construct_undefined(self, node): raise ConstructorError(None, None, - "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'), - node.start_mark) + "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'), + node.start_mark) + SafeConstructor.add_constructor( - u'tag:yaml.org,2002:null', - SafeConstructor.construct_yaml_null) + u'tag:yaml.org,2002:null', + SafeConstructor.construct_yaml_null) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:bool', - SafeConstructor.construct_yaml_bool) + u'tag:yaml.org,2002:bool', + SafeConstructor.construct_yaml_bool) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:int', - SafeConstructor.construct_yaml_int) + u'tag:yaml.org,2002:int', + SafeConstructor.construct_yaml_int) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:float', - SafeConstructor.construct_yaml_float) + u'tag:yaml.org,2002:float', + SafeConstructor.construct_yaml_float) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:binary', - SafeConstructor.construct_yaml_binary) + u'tag:yaml.org,2002:binary', + SafeConstructor.construct_yaml_binary) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:timestamp', - SafeConstructor.construct_yaml_timestamp) + u'tag:yaml.org,2002:timestamp', + SafeConstructor.construct_yaml_timestamp) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:omap', - SafeConstructor.construct_yaml_omap) + u'tag:yaml.org,2002:omap', + SafeConstructor.construct_yaml_omap) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:pairs', - SafeConstructor.construct_yaml_pairs) + u'tag:yaml.org,2002:pairs', + SafeConstructor.construct_yaml_pairs) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:set', - SafeConstructor.construct_yaml_set) + u'tag:yaml.org,2002:set', + SafeConstructor.construct_yaml_set) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:str', - SafeConstructor.construct_yaml_str) + u'tag:yaml.org,2002:str', + SafeConstructor.construct_yaml_str) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:seq', - SafeConstructor.construct_yaml_seq) + u'tag:yaml.org,2002:seq', + SafeConstructor.construct_yaml_seq) SafeConstructor.add_constructor( - u'tag:yaml.org,2002:map', - SafeConstructor.construct_yaml_map) + u'tag:yaml.org,2002:map', + SafeConstructor.construct_yaml_map) SafeConstructor.add_constructor(None, - SafeConstructor.construct_undefined) + SafeConstructor.construct_undefined) -class Constructor(SafeConstructor): +class Constructor(SafeConstructor): def construct_python_str(self, node): return self.construct_scalar(node).encode('utf-8') @@ -481,7 +487,7 @@ class Constructor(SafeConstructor): return long(self.construct_yaml_int(node)) def construct_python_complex(self, node): - return complex(self.construct_scalar(node)) + return complex(self.construct_scalar(node)) def construct_python_tuple(self, node): return tuple(self.construct_sequence(node)) @@ -489,21 +495,21 @@ class Constructor(SafeConstructor): def find_python_module(self, name, mark): if not name: raise ConstructorError("while constructing a Python module", mark, - "expected non-empty name appended to the tag", mark) + "expected non-empty name appended to the tag", mark) try: __import__(name) except ImportError, exc: raise ConstructorError("while constructing a Python module", mark, - "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark) + "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark) return sys.modules[name] def find_python_name(self, name, mark): if not name: raise ConstructorError("while constructing a Python object", mark, - "expected non-empty name appended to the tag", mark) + "expected non-empty name appended to the tag", mark) if u'.' in name: # Python 2.4 only - #module_name, object_name = name.rsplit('.', 1) + # module_name, object_name = name.rsplit('.', 1) items = name.split('.') object_name = items.pop() module_name = '.'.join(items) @@ -514,40 +520,41 @@ class Constructor(SafeConstructor): __import__(module_name) except ImportError, exc: raise ConstructorError("while constructing a Python object", mark, - "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark) + "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark) module = sys.modules[module_name] if not hasattr(module, object_name): raise ConstructorError("while constructing a Python object", mark, - "cannot find %r in the module %r" % (object_name.encode('utf-8'), - module.__name__), mark) + "cannot find %r in the module %r" % (object_name.encode('utf-8'), + module.__name__), mark) return getattr(module, object_name) def construct_python_name(self, suffix, node): value = self.construct_scalar(node) if value: raise ConstructorError("while constructing a Python name", node.start_mark, - "expected the empty value, but found %r" % value.encode('utf-8'), - node.start_mark) + "expected the empty value, but found %r" % value.encode('utf-8'), + node.start_mark) return self.find_python_name(suffix, node.start_mark) def construct_python_module(self, suffix, node): value = self.construct_scalar(node) if value: raise ConstructorError("while constructing a Python module", node.start_mark, - "expected the empty value, but found %r" % value.encode('utf-8'), - node.start_mark) + "expected the empty value, but found %r" % value.encode('utf-8'), + node.start_mark) return self.find_python_module(suffix, node.start_mark) - class classobj: pass + class classobj: + pass def make_python_instance(self, suffix, node, - args=None, kwds=None, newobj=False): + args=None, kwds=None, newobj=False): if not args: args = [] if not kwds: kwds = {} cls = self.find_python_name(suffix, node.start_mark) - if newobj and isinstance(cls, type(self.classobj)) \ + if newobj and isinstance(cls, type(self.classobj)) \ and not args and not kwds: instance = self.classobj() instance.__class__ = cls @@ -618,6 +625,7 @@ class Constructor(SafeConstructor): def construct_python_object_new(self, suffix, node): return self.construct_python_object_apply(suffix, node, newobj=True) + Constructor.add_constructor( u'tag:yaml.org,2002:python/none', Constructor.construct_yaml_null) @@ -681,4 +689,3 @@ Constructor.add_multi_constructor( Constructor.add_multi_constructor( u'tag:yaml.org,2002:python/object/new:', Constructor.construct_python_object_new) - diff --git a/tablib/packages/yaml3/composer.py b/tablib/packages/yaml3/composer.py index d5c6a7a..97c1306 100644 --- a/tablib/packages/yaml3/composer.py +++ b/tablib/packages/yaml3/composer.py @@ -1,15 +1,17 @@ - __all__ = ['Composer', 'ComposerError'] from .error import MarkedYAMLError -from .events import * -from .nodes import * +from error import MarkedYAMLError +from events import StreamEndEvent, StreamStartEvent, AliasEvent, SequenceEndEvent, SequenceStartEvent, MappingEndEvent,\ + MappingStartEvent, ScalarEvent +from nodes import MappingNode, ScalarNode, SequenceNode + class ComposerError(MarkedYAMLError): pass -class Composer: +class Composer: def __init__(self): self.anchors = {} @@ -39,8 +41,8 @@ class Composer: if not self.check_event(StreamEndEvent): event = self.get_event() raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", - event.start_mark) + document.start_mark, "but found another document", + event.start_mark) # Drop the STREAM-END event. self.get_event() @@ -66,15 +68,14 @@ class Composer: anchor = event.anchor if anchor not in self.anchors: raise ComposerError(None, None, "found undefined alias %r" - % anchor, event.start_mark) + % anchor, event.start_mark) return self.anchors[anchor] event = self.peek_event() anchor = event.anchor - if anchor is not None: - if anchor in self.anchors: + if anchor is not None and anchor in self.anchors: raise ComposerError("found duplicate anchor %r; first occurence" - % anchor, self.anchors[anchor].start_mark, - "second occurence", event.start_mark) + % anchor, self.anchors[anchor].start_mark, + "second occurence", event.start_mark) self.descend_resolver(parent, index) if self.check_event(ScalarEvent): node = self.compose_scalar_node(anchor) @@ -91,7 +92,7 @@ class Composer: if tag is None or tag == '!': tag = self.resolve(ScalarNode, event.value, event.implicit) node = ScalarNode(tag, event.value, - event.start_mark, event.end_mark, style=event.style) + event.start_mark, event.end_mark, style=event.style) if anchor is not None: self.anchors[anchor] = node return node @@ -102,8 +103,8 @@ class Composer: if tag is None or tag == '!': tag = self.resolve(SequenceNode, None, start_event.implicit) node = SequenceNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) + start_event.start_mark, None, + flow_style=start_event.flow_style) if anchor is not None: self.anchors[anchor] = node index = 0 @@ -120,20 +121,19 @@ class Composer: if tag is None or tag == '!': tag = self.resolve(MappingNode, None, start_event.implicit) node = MappingNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) + start_event.start_mark, None, + flow_style=start_event.flow_style) if anchor is not None: self.anchors[anchor] = node while not self.check_event(MappingEndEvent): - #key_event = self.peek_event() + # key_event = self.peek_event() item_key = self.compose_node(node, None) - #if item_key in node.value: + # if item_key in node.value: # raise ComposerError("while composing a mapping", start_event.start_mark, # "found duplicate key", key_event.start_mark) item_value = self.compose_node(node, item_key) - #node.value[item_key] = item_value + # node.value[item_key] = item_value node.value.append((item_key, item_value)) end_event = self.get_event() node.end_mark = end_event.end_mark return node - diff --git a/tablib/packages/yaml3/constructor.py b/tablib/packages/yaml3/constructor.py index bd25b79..3fc9c7f 100644 --- a/tablib/packages/yaml3/constructor.py +++ b/tablib/packages/yaml3/constructor.py @@ -5,7 +5,13 @@ __all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', from .error import * from .nodes import * -import collections, datetime, base64, binascii, re, sys, types +import base64 +import binascii +import collections +import datetime +import re +import sys +import types class ConstructorError(MarkedYAMLError): pass diff --git a/test_tablib.py b/test_tablib.py index f7f7036..63ddf93 100755 --- a/test_tablib.py +++ b/test_tablib.py @@ -11,9 +11,6 @@ from tablib.compat import markup, unicode, is_py3 from tablib.core import Row - - - class TablibTestCase(unittest.TestCase): """Tablib test cases.""" @@ -35,12 +32,10 @@ class TablibTestCase(unittest.TestCase): self.founders.append(self.george) self.founders.append(self.tom) - def tearDown(self): """Teardown.""" pass - def test_empty_append(self): """Verify append() correctly adds tuple with no headers.""" new_row = (1, 2, 3) @@ -50,7 +45,6 @@ class TablibTestCase(unittest.TestCase): self.assertTrue(data.width == len(new_row)) self.assertTrue(data[0] == new_row) - def test_empty_append_with_headers(self): """Verify append() correctly detects mismatch of number of headers and data. @@ -72,7 +66,6 @@ class TablibTestCase(unittest.TestCase): self.assertRaises(tablib.InvalidDimensions, set_header_callable) - def test_add_column(self): """Verify adding column works with/without headers.""" @@ -93,7 +86,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(data['age'], new_col) - def test_add_column_no_data_no_headers(self): """Verify adding new column with no headers.""" @@ -105,7 +97,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(data.width, 1) self.assertEqual(data.height, len(new_col)) - def test_add_column_with_header_ignored(self): """Verify append_col() ignores the header if data.headers has not previously been set @@ -120,14 +111,13 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(data.height, len(new_col)) self.assertEqual(data.headers, None) - def test_add_column_with_header_and_headers_only_exist(self): """Verify append_col() with header correctly detects mismatch when headers exist but there is no existing row data """ data.headers = ['first_name'] - #no data + # no data new_col = ('allen') @@ -136,7 +126,6 @@ class TablibTestCase(unittest.TestCase): self.assertRaises(tablib.InvalidDimensions, append_col_callable) - def test_add_column_with_header_and_data_exists(self): """Verify append_col() works when headers and rows exists""" @@ -152,7 +141,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(data['age'], new_col) self.assertEqual(len(data.headers), len(self.headers) + 1) - def test_add_callable_column(self): """Verify adding column with values specified as callable.""" @@ -160,19 +148,17 @@ class TablibTestCase(unittest.TestCase): self.founders.append_col(new_col, header='first_again') - def test_header_slicing(self): """Verify slicing by headers.""" self.assertEqual(self.founders['first_name'], - [self.john[0], self.george[0], self.tom[0]]) + [self.john[0], self.george[0], self.tom[0]]) self.assertEqual(self.founders['last_name'], - [self.john[1], self.george[1], self.tom[1]]) + [self.john[1], self.george[1], self.tom[1]]) self.assertEqual(self.founders['gpa'], - [self.john[2], self.george[2], self.tom[2]]) - + [self.john[2], self.george[2], self.tom[2]]) def test_get_col(self): """Verify getting columns by index""" @@ -189,7 +175,6 @@ class TablibTestCase(unittest.TestCase): self.founders.get_col(list(self.headers).index('gpa')), [self.john[2], self.george[2], self.tom[2]]) - def test_data_slicing(self): """Verify slicing by data.""" @@ -206,7 +191,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(self.founders[1:3], [self.george, self.tom]) self.assertEqual(self.founders[2:], [self.tom]) - def test_row_slicing(self): """Verify Row's __getslice__ method. Issue #184.""" @@ -218,7 +202,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(john[0:2], list(self.john[0:2])) self.assertEqual(john[0:-1], list(self.john[0:-1])) - def test_delete(self): """Verify deleting from dataset works.""" @@ -258,7 +241,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(csv, self.founders.csv) - def test_tsv_export(self): """Verify exporting dataset object as TSV.""" @@ -276,7 +258,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(tsv, self.founders.tsv) - def test_html_export(self): """HTML export""" @@ -288,7 +269,6 @@ class TablibTestCase(unittest.TestCase): html.thead.close() for founder in self.founders: - html.tr(markup.oneliner.td(founder)) html.table.close() @@ -296,7 +276,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(html, self.founders.html) - def test_html_export_none_value(self): """HTML export""" @@ -304,10 +283,10 @@ class TablibTestCase(unittest.TestCase): html.table.open() html.thead.open() - html.tr(markup.oneliner.th(['foo','', 'bar'])) + html.tr(markup.oneliner.th(['foo', '', 'bar'])) html.thead.close() - html.tr(markup.oneliner.td(['foo','', 'bar'])) + html.tr(markup.oneliner.td(['foo', '', 'bar'])) html.table.close() html = str(html) @@ -317,7 +296,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(html, d.html) - def test_latex_export(self): """LaTeX export""" @@ -341,17 +319,14 @@ class TablibTestCase(unittest.TestCase): output = self.founders.latex self.assertEqual(output, expected) - def test_latex_export_empty_dataset(self): self.assertTrue(tablib.Dataset().latex is not None) - def test_latex_export_no_headers(self): d = tablib.Dataset() d.append(('one', 'two', 'three')) self.assertTrue('one' in d.latex) - def test_latex_export_caption(self): d = tablib.Dataset() d.append(('one', 'two', 'three')) @@ -360,7 +335,6 @@ class TablibTestCase(unittest.TestCase): d.title = 'Title' self.assertTrue('\\caption{Title}' in d.latex) - def test_latex_export_none_values(self): headers = ['foo', None, 'bar'] d = tablib.Dataset(['foo', None, 'bar'], headers=headers) @@ -368,7 +342,6 @@ class TablibTestCase(unittest.TestCase): self.assertTrue('foo' in output) self.assertFalse('None' in output) - def test_latex_escaping(self): d = tablib.Dataset(['~', '^']) output = d.latex @@ -378,15 +351,13 @@ class TablibTestCase(unittest.TestCase): self.assertFalse('^' in output) self.assertTrue('textasciicircum' in output) - def test_unicode_append(self): """Passes in a single unicode character and exports.""" if is_py3: new_row = ('å', 'é') else: - exec("new_row = (u'å', u'é')") - + exec ("new_row = (u'å', u'é')") data.append(new_row) @@ -400,7 +371,6 @@ class TablibTestCase(unittest.TestCase): data.html data.latex - def test_book_export_no_exceptions(self): """Test that various exports don't error out.""" @@ -413,7 +383,6 @@ class TablibTestCase(unittest.TestCase): book.xlsx book.ods - def test_json_import_set(self): """Generate and import JSON set serialization.""" data.append(self.john) @@ -426,7 +395,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(json.loads(_json), json.loads(data.json)) - def test_json_import_book(self): """Generate and import JSON book serialization.""" data.append(self.john) @@ -440,7 +408,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(json.loads(_json), json.loads(book.json)) - def test_yaml_import_set(self): """Generate and import YAML set serialization.""" data.append(self.john) @@ -453,7 +420,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(_yaml, data.yaml) - def test_yaml_import_book(self): """Generate and import YAML book serialization.""" data.append(self.john) @@ -467,7 +433,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(_yaml, book.yaml) - def test_csv_import_set(self): """Generate and import CSV set serialization.""" data.append(self.john) @@ -518,7 +483,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(_csv, data.get_csv(delimiter=';')) - def test_csv_import_set_with_newlines(self): """Generate and import CSV set serialization when row values have newlines.""" @@ -533,7 +497,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(_csv, data.csv) - def test_tsv_import_set(self): """Generate and import TSV set serialization.""" data.append(self.john) @@ -546,7 +509,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(_tsv, data.tsv) - def test_dbf_import_set(self): data.append(self.john) data.append(self.george) @@ -555,7 +517,7 @@ class TablibTestCase(unittest.TestCase): _dbf = data.dbf data.dbf = _dbf - #self.assertEqual(_dbf, data.dbf) + # self.assertEqual(_dbf, data.dbf) try: self.assertEqual(_dbf, data.dbf) except AssertionError: @@ -576,13 +538,13 @@ class TablibTestCase(unittest.TestCase): data.headers = self.headers _regression_dbf = (b'\x03r\x06\x06\x03\x00\x00\x00\x81\x00\xab\x00\x00' - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' - b'\x00\x00\x00FIRST_NAME\x00C\x00\x00\x00\x00P\x00\x00\x00\x00\x00' - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00LAST_NAME\x00\x00C\x00' - b'\x00\x00\x00P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' - b'\x00\x00GPA\x00\x00\x00\x00\x00\x00\x00\x00N\x00\x00\x00\x00\n' - b'\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\r' - ) + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x00\x00\x00FIRST_NAME\x00C\x00\x00\x00\x00P\x00\x00\x00\x00\x00' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00LAST_NAME\x00\x00C\x00' + b'\x00\x00\x00P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x00\x00GPA\x00\x00\x00\x00\x00\x00\x00\x00N\x00\x00\x00\x00\n' + b'\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\r' + ) _regression_dbf += b' John' + (b' ' * 75) _regression_dbf += b' Adams' + (b' ' * 74) _regression_dbf += b' 90.0000000' @@ -596,8 +558,8 @@ class TablibTestCase(unittest.TestCase): if is_py3: # If in python3, decode regression string to binary. - #_regression_dbf = bytes(_regression_dbf, 'utf-8') - #_regression_dbf = _regression_dbf.replace(b'\n', b'\r') + # _regression_dbf = bytes(_regression_dbf, 'utf-8') + # _regression_dbf = _regression_dbf.replace(b'\n', b'\r') pass try: @@ -606,23 +568,23 @@ class TablibTestCase(unittest.TestCase): index = 0 found_so_far = '' for reg_char, data_char in zip(_regression_dbf, data.dbf): - #found_so_far += chr(data_char) + # found_so_far += chr(data_char) if reg_char != data_char and index not in [1, 2, 3]: raise AssertionError( 'Failing at char %s: %s vs %s (found %s)' % ( - index, reg_char, data_char, found_so_far)) + index, reg_char, data_char, found_so_far)) index += 1 def test_dbf_format_detect(self): """Test the DBF format detection.""" _dbf = (b'\x03r\x06\x03\x03\x00\x00\x00\x81\x00\xab\x00\x00' - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' - b'\x00\x00\x00FIRST_NAME\x00C\x00\x00\x00\x00P\x00\x00\x00\x00\x00' - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00LAST_NAME\x00\x00C\x00' - b'\x00\x00\x00P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' - b'\x00\x00GPA\x00\x00\x00\x00\x00\x00\x00\x00N\x00\x00\x00\x00\n' - b'\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\r' - ) + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x00\x00\x00FIRST_NAME\x00C\x00\x00\x00\x00P\x00\x00\x00\x00\x00' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00LAST_NAME\x00\x00C\x00' + b'\x00\x00\x00P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x00\x00GPA\x00\x00\x00\x00\x00\x00\x00\x00N\x00\x00\x00\x00\n' + b'\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\r' + ) _dbf += b' John' + (b' ' * 75) _dbf += b' Adams' + (b' ' * 74) _dbf += b' 90.0000000' @@ -664,7 +626,6 @@ class TablibTestCase(unittest.TestCase): self.assertTrue(tablib.formats.csv.detect(_csv)) self.assertFalse(tablib.formats.csv.detect(_bunk)) - def test_tsv_format_detect(self): """Test TSV format detection.""" @@ -680,7 +641,6 @@ class TablibTestCase(unittest.TestCase): self.assertTrue(tablib.formats.tsv.detect(_tsv)) self.assertFalse(tablib.formats.tsv.detect(_bunk)) - def test_json_format_detect(self): """Test JSON format detection.""" @@ -692,7 +652,6 @@ class TablibTestCase(unittest.TestCase): self.assertTrue(tablib.formats.json.detect(_json)) self.assertFalse(tablib.formats.json.detect(_bunk)) - def test_yaml_format_detect(self): """Test YAML format detection.""" @@ -706,7 +665,6 @@ class TablibTestCase(unittest.TestCase): self.assertFalse(tablib.formats.yaml.detect(_bunk)) self.assertFalse(tablib.formats.yaml.detect(_tsv)) - def test_auto_format_detect(self): """Test auto format detection.""" @@ -722,7 +680,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(tablib.detect_format(_json), 'json') self.assertEqual(tablib.detect_format(_bunk), None) - def test_transpose(self): """Transpose a dataset.""" @@ -731,11 +688,11 @@ class TablibTestCase(unittest.TestCase): second_row = transposed_founders[1] self.assertEqual(transposed_founders.headers, - ["first_name","John", "George", "Thomas"]) + ["first_name", "John", "George", "Thomas"]) self.assertEqual(first_row, - ("last_name","Adams", "Washington", "Jefferson")) + ("last_name", "Adams", "Washington", "Jefferson")) self.assertEqual(second_row, - ("gpa",90, 67, 50)) + ("gpa", 90, 67, 50)) def test_transpose_multiple_headers(self): @@ -746,7 +703,6 @@ class TablibTestCase(unittest.TestCase): data.append(('John', 'Tyler', 71)) self.assertEqual(data.transpose().transpose().dict, data.dict) - def test_row_stacking(self): """Row stacking.""" @@ -758,12 +714,10 @@ class TablibTestCase(unittest.TestCase): row_stacked = self.founders.stack(to_join) for column in row_stacked.headers: - original_data = self.founders[column] expected_data = original_data + original_data self.assertEqual(row_stacked[column], expected_data) - def test_column_stacking(self): """Column stacking""" @@ -775,14 +729,12 @@ class TablibTestCase(unittest.TestCase): column_stacked = self.founders.stack_cols(to_join) for index, row in enumerate(column_stacked): - original_data = self.founders[index] expected_data = original_data + original_data self.assertEqual(row, expected_data) self.assertEqual(column_stacked[0], - ("John", "Adams", 90, "John", "Adams", 90)) - + ("John", "Adams", 90, "John", "Adams", 90)) def test_sorting(self): """Sort columns.""" @@ -801,7 +753,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(second_row, expected_second) self.assertEqual(third_row, expected_third) - def test_remove_duplicates(self): """Unique Rows.""" @@ -820,7 +771,6 @@ class TablibTestCase(unittest.TestCase): self.assertEqual(self.founders[2], self.tom) self.assertEqual(self.founders.height, 3) - def test_wipe(self): """Purge a dataset.""" @@ -837,12 +787,11 @@ class TablibTestCase(unittest.TestCase): self.assertTrue(data.width == len(new_row)) self.assertTrue(data[0] == new_row) - def test_subset(self): """Create a subset of a dataset""" rows = (0, 2) - columns = ('first_name','gpa') + columns = ('first_name', 'gpa') data.headers = self.headers @@ -850,14 +799,13 @@ class TablibTestCase(unittest.TestCase): data.append(self.george) data.append(self.tom) - #Verify data is truncated + # Verify data is truncated subset = data.subset(rows=rows, cols=columns) self.assertEqual(type(subset), tablib.Dataset) self.assertEqual(subset.headers, list(columns)) self.assertEqual(subset._data[0].list, ['John', 90]) self.assertEqual(subset._data[1].list, ['Thomas', 50]) - def test_formatters(self): """Confirm formatters are being triggered.""" @@ -877,8 +825,7 @@ class TablibTestCase(unittest.TestCase): if sys.version_info[0] > 2: data.append(['\xfc', '\xfd']) else: - exec("data.append([u'\xfc', u'\xfd'])") - + exec ("data.append([u'\xfc', u'\xfd'])") data.csv @@ -895,7 +842,6 @@ class TablibTestCase(unittest.TestCase): csv_first_name = data[headers[0]] self.assertEqual(orig_first_name, csv_first_name) - def test_csv_column_delete(self): """Build up a CSV and test deleting a column""" @@ -929,7 +875,6 @@ class TablibTestCase(unittest.TestCase): self.founders.append(('Old', 'Man', 100500)) self.assertEqual('first_name|last_name |gpa ', unicode(self.founders).split('\n')[0]) - def test_databook_add_sheet_accepts_only_dataset_instances(self): class NotDataset(object): def append(self, item): @@ -940,7 +885,6 @@ class TablibTestCase(unittest.TestCase): self.assertRaises(tablib.InvalidDatasetType, book.add_sheet, dataset) - def test_databook_add_sheet_accepts_dataset_subclasses(self): class DatasetSubclass(tablib.Dataset): pass @@ -955,7 +899,6 @@ class TablibTestCase(unittest.TestCase): except tablib.InvalidDatasetType: self.fail("Subclass of tablib.Dataset should be accepted by Databook.add_sheet") - def test_csv_formatter_support_kwargs(self): """Test CSV import and export with formatter configuration.""" data.append(self.john) |
