summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeff Widman <jeff@jeffwidman.com>2017-12-07 15:07:31 -0800
committerJeff Widman <jeff@jeffwidman.com>2017-12-12 11:06:44 -0800
commit580520bcb9dc2ed9725e4b4871cd275f6826a182 (patch)
tree19b7dbdb15a8d5e2dd10bae3261321f033444b28
parenta699f6a347591076b5c427fc245e5ed5f5ffdd49 (diff)
downloadkafka-python-580520bcb9dc2ed9725e4b4871cd275f6826a182.tar.gz
Minor Exception cleanup
-rw-r--r--kafka/conn.py2
-rw-r--r--kafka/consumer/fetcher.py4
-rw-r--r--kafka/metrics/metric_name.py4
-rw-r--r--kafka/protocol/types.py2
-rw-r--r--test/fixtures.py4
-rw-r--r--test/test_failover_integration.py2
-rw-r--r--test/testutil.py2
7 files changed, 10 insertions, 10 deletions
diff --git a/kafka/conn.py b/kafka/conn.py
index 2926e2f..68f2659 100644
--- a/kafka/conn.py
+++ b/kafka/conn.py
@@ -47,7 +47,7 @@ try:
SSLWantReadError = ssl.SSLWantReadError
SSLWantWriteError = ssl.SSLWantWriteError
SSLZeroReturnError = ssl.SSLZeroReturnError
- except:
+ except AttributeError:
# support older ssl libraries
log.warning('Old SSL module detected.'
' SSL error handling may not operate cleanly.'
diff --git a/kafka/consumer/fetcher.py b/kafka/consumer/fetcher.py
index e4d76cf..f9251fd 100644
--- a/kafka/consumer/fetcher.py
+++ b/kafka/consumer/fetcher.py
@@ -478,8 +478,8 @@ class Fetcher(six.Iterator):
# caught by the generator. We want all exceptions to be raised
# back to the user. See Issue 545
except StopIteration as e:
- log.exception('StopIteration raised unpacking messageset: %s', e)
- raise Exception('StopIteration raised unpacking messageset')
+ log.exception('StopIteration raised unpacking messageset')
+ raise RuntimeError('StopIteration raised unpacking messageset')
def __iter__(self): # pylint: disable=non-iterator-returned
return self
diff --git a/kafka/metrics/metric_name.py b/kafka/metrics/metric_name.py
index a475d6c..b5acd16 100644
--- a/kafka/metrics/metric_name.py
+++ b/kafka/metrics/metric_name.py
@@ -50,9 +50,9 @@ class MetricName(object):
tags (dict, optional): Additional key/val attributes of the metric.
"""
if not (name and group):
- raise Exception('name and group must be non-empty.')
+ raise ValueError('name and group must be non-empty.')
if tags is not None and not isinstance(tags, dict):
- raise Exception('tags must be a dict if present.')
+ raise ValueError('tags must be a dict if present.')
self._name = name
self._group = group
diff --git a/kafka/protocol/types.py b/kafka/protocol/types.py
index 516b957..6a6e89e 100644
--- a/kafka/protocol/types.py
+++ b/kafka/protocol/types.py
@@ -148,7 +148,7 @@ class Schema(AbstractType):
field_val = value[i]
key_vals.append('%s=%s' % (self.names[i], self.fields[i].repr(field_val)))
return '(' + ', '.join(key_vals) + ')'
- except:
+ except Exception:
return repr(value)
diff --git a/test/fixtures.py b/test/fixtures.py
index 661a631..b49a160 100644
--- a/test/fixtures.py
+++ b/test/fixtures.py
@@ -162,7 +162,7 @@ class ZookeeperFixture(Fixture):
time.sleep(backoff)
tries += 1
else:
- raise Exception('Failed to start Zookeeper before max_timeout')
+ raise RuntimeError('Failed to start Zookeeper before max_timeout')
self.out("Done!")
atexit.register(self.close)
@@ -302,7 +302,7 @@ class KafkaFixture(Fixture):
time.sleep(backoff)
tries += 1
else:
- raise Exception('Failed to start KafkaInstance before max_timeout')
+ raise RuntimeError('Failed to start KafkaInstance before max_timeout')
self.out("Done!")
self.running = True
atexit.register(self.close)
diff --git a/test/test_failover_integration.py b/test/test_failover_integration.py
index 2439b58..9141947 100644
--- a/test/test_failover_integration.py
+++ b/test/test_failover_integration.py
@@ -197,7 +197,7 @@ class TestFailover(KafkaIntegrationTestCase):
while True:
try:
producer.send_messages(topic, partition, msg.encode('utf-8'))
- except:
+ except Exception:
log.exception('failure in _send_random_messages - retrying')
continue
else:
diff --git a/test/testutil.py b/test/testutil.py
index c247e6a..0bacac4 100644
--- a/test/testutil.py
+++ b/test/testutil.py
@@ -113,7 +113,7 @@ class KafkaIntegrationTestCase(unittest.TestCase):
def current_offset(self, topic, partition):
try:
offsets, = self.client.send_offset_request([OffsetRequestPayload(topic, partition, -1, 1)])
- except:
+ except Exception:
# XXX: We've seen some UnknownErrors here and can't debug w/o server logs
self.zk.child.dump_logs()
self.server.child.dump_logs()