diff options
| author | Kenneth Reitz <me@kennethreitz.org> | 2016-02-25 12:58:35 -0500 |
|---|---|---|
| committer | Kenneth Reitz <me@kennethreitz.org> | 2016-02-25 12:58:35 -0500 |
| commit | 7ee924b5a69279db3115b30e11d102fb96da2ff3 (patch) | |
| tree | 6378d8b3b72a5945daea38fdb70f7ba3c0754e3d | |
| parent | ee9666a146514573a2a159baaabb0481a7e2c5c0 (diff) | |
| parent | d720beadac14ae051a424f1ad1add17cf7b6d240 (diff) | |
| download | tablib-7ee924b5a69279db3115b30e11d102fb96da2ff3.tar.gz | |
Merge pull request #228 from tomchristie/print-dataset-with-no-headers
Fixed textual representation for Dataset with no headers
| -rw-r--r-- | tablib/core.py | 6 | ||||
| -rwxr-xr-x | test_tablib.py | 10 |
2 files changed, 14 insertions, 2 deletions
diff --git a/tablib/core.py b/tablib/core.py index 3db24f3..aa787f4 100644 --- a/tablib/core.py +++ b/tablib/core.py @@ -223,7 +223,8 @@ class Dataset(object): result = [] # Add unicode representation of headers. - result.append([unicode(h) for h in self.__headers]) + if self.__headers: + result.append([unicode(h) for h in self.__headers]) # Add unicode representation of rows. result.extend(list(map(unicode, row)) for row in self._data) @@ -232,7 +233,8 @@ class Dataset(object): field_lens = list(map(max, zip(*lens))) # delimiter between header and data - result.insert(1, ['-' * length for length in field_lens]) + if self.__headers: + result.insert(1, ['-' * length for length in field_lens]) format_string = '|'.join('{%s:%s}' % item for item in enumerate(field_lens)) diff --git a/test_tablib.py b/test_tablib.py index 63ddf93..30bbacf 100755 --- a/test_tablib.py +++ b/test_tablib.py @@ -351,6 +351,16 @@ class TablibTestCase(unittest.TestCase): self.assertFalse('^' in output) self.assertTrue('textasciicircum' in output) + def test_str_no_columns(self): + d = tablib.Dataset(['a', 1], ['b', 2], ['c', 3]) + output = '%s' % d + + self.assertEqual(output.splitlines(), [ + 'a|1', + 'b|2', + 'c|3' + ]) + def test_unicode_append(self): """Passes in a single unicode character and exports.""" |
