summaryrefslogtreecommitdiff
path: root/unit_tests
diff options
context:
space:
mode:
authorJason Pellerin <jpellerin@gmail.com>2009-04-18 19:00:45 +0000
committerJason Pellerin <jpellerin@gmail.com>2009-04-18 19:00:45 +0000
commitc5bd03442781ce1c974920708e958d58d1ff6289 (patch)
tree04041c48ada439b6ca6510bb34007f7dd94b33be /unit_tests
parent296fee10bf942cf860326c88d5c5df6e906a8c7b (diff)
downloadnose-c5bd03442781ce1c974920708e958d58d1ff6289.tar.gz
Committed PyCon sprint work.
Diffstat (limited to 'unit_tests')
-rw-r--r--unit_tests/test_logcapture_plugin.py36
-rw-r--r--unit_tests/test_multiprocess_runner.py10
-rw-r--r--unit_tests/test_xunit.py205
3 files changed, 244 insertions, 7 deletions
diff --git a/unit_tests/test_logcapture_plugin.py b/unit_tests/test_logcapture_plugin.py
index e6d3b34..c540491 100644
--- a/unit_tests/test_logcapture_plugin.py
+++ b/unit_tests/test_logcapture_plugin.py
@@ -50,11 +50,20 @@ class TestLogCapturePlugin(object):
c.configure(options, Config())
eq_('++%(message)s++', c.logformat)
+ def test_logging_datefmt_option(self):
+ env = {'NOSE_LOGDATEFMT': '%H:%M:%S'}
+ c = LogCapture()
+ parser = OptionParser()
+ c.addOptions(parser, env)
+ options, args = parser.parse_args(['logging_datefmt'])
+ c.configure(options, Config())
+ eq_('%H:%M:%S', c.logdatefmt)
+
def test_captures_logging(self):
c = LogCapture()
parser = OptionParser()
c.addOptions(parser, {})
- options, args = parser.parse_args()
+ options, args = parser.parse_args([])
c.configure(options, Config())
c.start()
log = logging.getLogger("foobar.something")
@@ -73,7 +82,7 @@ class TestLogCapturePlugin(object):
records = c.formatLogRecords()
eq_(1, len(records))
eq_("++Hello++", records[0])
-
+
def test_logging_filter(self):
env = {'NOSE_LOGFILTER': 'foo,bar'}
c = LogCapture()
@@ -93,3 +102,26 @@ class TestLogCapturePlugin(object):
assert records[1].startswith('foo.x:'), records[1]
assert records[2].startswith('bar.quux:'), records[2]
+ def test_unicode_messages_handled(self):
+ msg = u'Ivan Krsti\u0107'
+ c = LogCapture()
+ parser = OptionParser()
+ c.addOptions(parser, {})
+ options, args = parser.parse_args([])
+ c.configure(options, Config())
+ c.start()
+ log = logging.getLogger("foobar.something")
+ log.debug(msg)
+ log.debug("ordinary string log")
+ c.end()
+
+ class Dummy:
+ pass
+ test = Dummy()
+ try:
+ raise Exception(msg)
+ except:
+ err = sys.exc_info()
+ (ec, ev, tb) = c.formatError(test, err)
+ print ev
+ assert msg.encode('utf-8') in ev
diff --git a/unit_tests/test_multiprocess_runner.py b/unit_tests/test_multiprocess_runner.py
index 9e2ee8d..71ee398 100644
--- a/unit_tests/test_multiprocess_runner.py
+++ b/unit_tests/test_multiprocess_runner.py
@@ -28,7 +28,7 @@ class TestMultiProcessTestRunner(unittest.TestCase):
def test_next_batch_with_classes(self):
r = multiprocess.MultiProcessTestRunner()
l = TestLoader()
- tests = list(r.next_batch(ContextSuite(
+ tests = list(r.nextBatch(ContextSuite(
tests=[l.makeTest(T_fixt), l.makeTest(T)])))
print tests
self.assertEqual(len(tests), 3)
@@ -49,7 +49,7 @@ class TestMultiProcessTestRunner(unittest.TestCase):
r = multiprocess.MultiProcessTestRunner()
l = TestLoader()
- tests = list(r.next_batch(l.loadTestsFromModule(mod_with_fixt)))
+ tests = list(r.nextBatch(l.loadTestsFromModule(mod_with_fixt)))
print tests
self.assertEqual(len(tests), 1)
@@ -70,7 +70,7 @@ class TestMultiProcessTestRunner(unittest.TestCase):
r = multiprocess.MultiProcessTestRunner()
l = TestLoader()
- tests = list(r.next_batch(l.loadTestsFromModule(mod_no_fixt)))
+ tests = list(r.nextBatch(l.loadTestsFromModule(mod_no_fixt)))
print tests
self.assertEqual(len(tests), 3)
@@ -83,7 +83,7 @@ class TestMultiProcessTestRunner(unittest.TestCase):
pass
r = multiprocess.MultiProcessTestRunner()
l = TestLoader()
- tests = list(r.next_batch(l.makeTest(Tg)))
+ tests = list(r.nextBatch(l.makeTest(Tg)))
print tests
print [r.address(t) for t in tests]
self.assertEqual(len(tests), 1)
@@ -111,7 +111,7 @@ class TestMultiProcessTestRunner(unittest.TestCase):
r = multiprocess.MultiProcessTestRunner()
l = TestLoader()
- tests = list(r.next_batch(l.loadTestsFromModule(mod_with_fixt2)))
+ tests = list(r.nextBatch(l.loadTestsFromModule(mod_with_fixt2)))
print tests
self.assertEqual(len(tests), 3)
diff --git a/unit_tests/test_xunit.py b/unit_tests/test_xunit.py
new file mode 100644
index 0000000..a10b2a0
--- /dev/null
+++ b/unit_tests/test_xunit.py
@@ -0,0 +1,205 @@
+
+import sys
+import os
+import optparse
+import unittest
+from nose.tools import eq_
+from nose.plugins.xunit import Xunit
+from nose.exc import SkipTest
+from nose.config import Config
+
+def mktest():
+ class TC(unittest.TestCase):
+ def runTest(self):
+ pass
+ test = TC()
+ return test
+
+mktest.__test__ = False
+
+class TestEscaping(unittest.TestCase):
+
+ def setUp(self):
+ self.x = Xunit()
+
+ def test_all(self):
+ eq_(self.x._xmlsafe('<baz src="http://foo?f=1&b=2" />'),
+ '&gt;baz src=&quot;http://foo?f=1&amp;b=2&quot; /&lt;')
+
+
+ def test_unicode_is_utf8_by_default(self):
+ eq_(self.x._xmlsafe(u'Ivan Krsti\u0107'),
+ 'Ivan Krsti\xc4\x87')
+
+
+ def test_unicode_custom_utf16_madness(self):
+ self.x.encoding = 'utf-16'
+ utf16 = self.x._xmlsafe(u'Ivan Krsti\u0107')
+
+ # to avoid big/little endian bytes, assert that we can put it back:
+ eq_(utf16.decode('utf16'), u'Ivan Krsti\u0107')
+
+class TestOptions(unittest.TestCase):
+
+ def test_defaults(self):
+ parser = optparse.OptionParser()
+ x = Xunit()
+ x.add_options(parser, env={})
+ (options, args) = parser.parse_args([])
+ eq_(options.xunit_file, "nosetests.xml")
+
+ def test_file_from_environ(self):
+ parser = optparse.OptionParser()
+ x = Xunit()
+ x.add_options(parser, env={'NOSE_XUNIT_FILE': "kangaroo.xml"})
+ (options, args) = parser.parse_args([])
+ eq_(options.xunit_file, "kangaroo.xml")
+
+ def test_file_from_opt(self):
+ parser = optparse.OptionParser()
+ x = Xunit()
+ x.add_options(parser, env={})
+ (options, args) = parser.parse_args(["--xunit-file=blagojevich.xml"])
+ eq_(options.xunit_file, "blagojevich.xml")
+
+class TestXMLOutputWithXML(unittest.TestCase):
+
+ def setUp(self):
+ self.xmlfile = os.path.abspath(
+ os.path.join(os.path.dirname(__file__),
+ 'support', 'xunit.xml'))
+ parser = optparse.OptionParser()
+ self.x = Xunit()
+ self.x.add_options(parser, env={})
+ (options, args) = parser.parse_args([
+ "--with-xunit",
+ "--xunit-file=%s" % self.xmlfile
+ ])
+ self.x.configure(options, Config())
+
+ try:
+ import xml.etree.ElementTree
+ except ImportError:
+ self.ET = False
+ else:
+ self.ET = xml.etree.ElementTree
+
+ def tearDown(self):
+ os.unlink(self.xmlfile)
+
+ def get_xml_report(self):
+ class DummyStream:
+ pass
+ self.x.report(DummyStream())
+ f = open(self.xmlfile, 'r')
+ return f.read()
+ f.close()
+
+ def test_addFailure(self):
+ test = mktest()
+ self.x.startTest(test)
+ try:
+ raise AssertionError("one is not equal to two")
+ except AssertionError:
+ some_err = sys.exc_info()
+
+ self.x.addFailure(test, some_err)
+
+ result = self.get_xml_report()
+ print result
+
+ if self.ET:
+ tree = self.ET.fromstring(result)
+ eq_(tree.attrib['name'], "nosetests")
+ eq_(tree.attrib['tests'], "1")
+ eq_(tree.attrib['errors'], "0")
+ eq_(tree.attrib['failures'], "1")
+ eq_(tree.attrib['skip'], "0")
+
+ tc = tree.find("testcase")
+ eq_(tc.attrib['classname'], "test_xunit.TC")
+ eq_(tc.attrib['name'], "test_xunit.TC.runTest")
+ assert int(tc.attrib['time']) >= 0
+
+ err = tc.find("failure")
+ eq_(err.attrib['type'], "exceptions.AssertionError")
+ err_lines = err.text.strip().split("\n")
+ eq_(err_lines[0], 'Traceback (most recent call last):')
+ eq_(err_lines[-1], 'AssertionError: one is not equal to two')
+ eq_(err_lines[-2], ' raise AssertionError("one is not equal to two")')
+ else:
+ # this is a dumb test for 2.4-
+ assert '<?xml version="1.0" encoding="UTF-8"?>' in result
+ assert '<testsuite name="nosetests" tests="1" errors="0" failures="1" skip="0">' in result
+ assert '<testcase classname="test_xunit.TC" name="test_xunit.TC.runTest"' in result
+ assert '<failure type="exceptions.AssertionError">' in result
+ assert 'AssertionError: one is not equal to two' in result
+ assert '</failure></testcase></testsuite>' in result
+
+ def test_addError(self):
+ test = mktest()
+ self.x.startTest(test)
+ try:
+ raise RuntimeError("some error happened")
+ except RuntimeError:
+ some_err = sys.exc_info()
+
+ self.x.addError(test, some_err)
+
+ result = self.get_xml_report()
+ print result
+
+ if self.ET:
+ tree = self.ET.fromstring(result)
+ eq_(tree.attrib['name'], "nosetests")
+ eq_(tree.attrib['tests'], "1")
+ eq_(tree.attrib['errors'], "1")
+ eq_(tree.attrib['failures'], "0")
+ eq_(tree.attrib['skip'], "0")
+
+ tc = tree.find("testcase")
+ eq_(tc.attrib['classname'], "test_xunit.TC")
+ eq_(tc.attrib['name'], "test_xunit.TC.runTest")
+ assert int(tc.attrib['time']) >= 0
+
+ err = tc.find("error")
+ eq_(err.attrib['type'], "exceptions.RuntimeError")
+ err_lines = err.text.strip().split("\n")
+ eq_(err_lines[0], 'Traceback (most recent call last):')
+ eq_(err_lines[-1], 'RuntimeError: some error happened')
+ eq_(err_lines[-2], ' raise RuntimeError("some error happened")')
+ else:
+ # this is a dumb test for 2.4-
+ assert '<?xml version="1.0" encoding="UTF-8"?>' in result
+ assert '<testsuite name="nosetests" tests="1" errors="1" failures="0" skip="0">' in result
+ assert '<testcase classname="test_xunit.TC" name="test_xunit.TC.runTest"' in result
+ assert '<error type="exceptions.RuntimeError">' in result
+ assert 'RuntimeError: some error happened' in result
+ assert '</error></testcase></testsuite>' in result
+
+ def test_addSuccess(self):
+ test = mktest()
+ self.x.startTest(test)
+ self.x.addSuccess(test, (None,None,None))
+
+ result = self.get_xml_report()
+ print result
+
+ if self.ET:
+ tree = self.ET.fromstring(result)
+ eq_(tree.attrib['name'], "nosetests")
+ eq_(tree.attrib['tests'], "1")
+ eq_(tree.attrib['errors'], "0")
+ eq_(tree.attrib['failures'], "0")
+ eq_(tree.attrib['skip'], "0")
+
+ tc = tree.find("testcase")
+ eq_(tc.attrib['classname'], "test_xunit.TC")
+ eq_(tc.attrib['name'], "test_xunit.TC.runTest")
+ assert int(tc.attrib['time']) >= 0
+ else:
+ # this is a dumb test for 2.4-
+ assert '<?xml version="1.0" encoding="UTF-8"?>' in result
+ assert '<testsuite name="nosetests" tests="1" errors="0" failures="0" skip="0">' in result
+ assert '<testcase classname="test_xunit.TC" name="test_xunit.TC.runTest"' in result
+ assert '</testsuite>' in result