summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeff Widman <jeff@jeffwidman.com>2017-09-12 13:56:06 -0700
committerJeff Widman <jeff@jeffwidman.com>2017-09-12 13:56:06 -0700
commit84aec293f7074b4fe46741cd2be694d1efd91386 (patch)
tree3f3d17b526653c45f062aed8b8ae2d4984e0d86a
parent370169029a9104f1963227005349f1dc6420a924 (diff)
downloadkafka-python-1212-stop-using-mutable-types-for-default-arg-values.tar.gz
Stop using mutable types for default arg values1212-stop-using-mutable-types-for-default-arg-values
Using mutable types for default args is typically a no-no unless their surprising behavior is being explicitly abused, for an explanation see: http://effbot.org/zone/default-values.htm Fix #1212
-rw-r--r--kafka/client.py18
-rw-r--r--test/test_client.py2
-rw-r--r--test/test_conn.py2
-rw-r--r--test/test_fetcher.py2
-rw-r--r--test/test_sender.py2
5 files changed, 13 insertions, 13 deletions
diff --git a/kafka/client.py b/kafka/client.py
index c233ea6..cdc57f7 100644
--- a/kafka/client.py
+++ b/kafka/client.py
@@ -588,21 +588,21 @@ class SimpleClient(object):
leader, None, None, None
)
- def send_metadata_request(self, payloads=[], fail_on_error=True,
+ def send_metadata_request(self, payloads=(), fail_on_error=True,
callback=None):
encoder = KafkaProtocol.encode_metadata_request
decoder = KafkaProtocol.decode_metadata_response
return self._send_broker_unaware_request(payloads, encoder, decoder)
- def send_consumer_metadata_request(self, payloads=[], fail_on_error=True,
+ def send_consumer_metadata_request(self, payloads=(), fail_on_error=True,
callback=None):
encoder = KafkaProtocol.encode_consumer_metadata_request
decoder = KafkaProtocol.decode_consumer_metadata_response
return self._send_broker_unaware_request(payloads, encoder, decoder)
- def send_produce_request(self, payloads=[], acks=1, timeout=1000,
+ def send_produce_request(self, payloads=(), acks=1, timeout=1000,
fail_on_error=True, callback=None):
"""
Encode and send some ProduceRequests
@@ -652,7 +652,7 @@ class SimpleClient(object):
if resp is not None and
(not fail_on_error or not self._raise_on_response_error(resp))]
- def send_fetch_request(self, payloads=[], fail_on_error=True,
+ def send_fetch_request(self, payloads=(), fail_on_error=True,
callback=None, max_wait_time=100, min_bytes=4096):
"""
Encode and send a FetchRequest
@@ -672,7 +672,7 @@ class SimpleClient(object):
return [resp if not callback else callback(resp) for resp in resps
if not fail_on_error or not self._raise_on_response_error(resp)]
- def send_offset_request(self, payloads=[], fail_on_error=True,
+ def send_offset_request(self, payloads=(), fail_on_error=True,
callback=None):
resps = self._send_broker_aware_request(
payloads,
@@ -682,7 +682,7 @@ class SimpleClient(object):
return [resp if not callback else callback(resp) for resp in resps
if not fail_on_error or not self._raise_on_response_error(resp)]
- def send_list_offset_request(self, payloads=[], fail_on_error=True,
+ def send_list_offset_request(self, payloads=(), fail_on_error=True,
callback=None):
resps = self._send_broker_aware_request(
payloads,
@@ -692,7 +692,7 @@ class SimpleClient(object):
return [resp if not callback else callback(resp) for resp in resps
if not fail_on_error or not self._raise_on_response_error(resp)]
- def send_offset_commit_request(self, group, payloads=[],
+ def send_offset_commit_request(self, group, payloads=(),
fail_on_error=True, callback=None):
encoder = functools.partial(KafkaProtocol.encode_offset_commit_request,
group=group)
@@ -702,7 +702,7 @@ class SimpleClient(object):
return [resp if not callback else callback(resp) for resp in resps
if not fail_on_error or not self._raise_on_response_error(resp)]
- def send_offset_fetch_request(self, group, payloads=[],
+ def send_offset_fetch_request(self, group, payloads=(),
fail_on_error=True, callback=None):
encoder = functools.partial(KafkaProtocol.encode_offset_fetch_request,
@@ -713,7 +713,7 @@ class SimpleClient(object):
return [resp if not callback else callback(resp) for resp in resps
if not fail_on_error or not self._raise_on_response_error(resp)]
- def send_offset_fetch_request_kafka(self, group, payloads=[],
+ def send_offset_fetch_request_kafka(self, group, payloads=(),
fail_on_error=True, callback=None):
encoder = functools.partial(KafkaProtocol.encode_offset_fetch_request,
diff --git a/test/test_client.py b/test/test_client.py
index 79ac8be..42a1623 100644
--- a/test/test_client.py
+++ b/test/test_client.py
@@ -398,7 +398,7 @@ class TestSimpleClient(unittest.TestCase):
def test_correlation_rollover(self):
with patch.object(SimpleClient, 'load_metadata_for_topics'):
big_num = 2**31 - 3
- client = SimpleClient(hosts=[], correlation_id=big_num)
+ client = SimpleClient(hosts=(), correlation_id=big_num)
self.assertEqual(big_num + 1, client._next_id())
self.assertEqual(big_num + 2, client._next_id())
self.assertEqual(0, client._next_id())
diff --git a/test/test_conn.py b/test/test_conn.py
index 2c418d4..1621e60 100644
--- a/test/test_conn.py
+++ b/test/test_conn.py
@@ -113,7 +113,7 @@ def test_send_max_ifr(conn):
def test_send_no_response(_socket, conn):
conn.connect()
assert conn.state is ConnectionStates.CONNECTED
- req = ProduceRequest[0](required_acks=0, timeout=0, topics=[])
+ req = ProduceRequest[0](required_acks=0, timeout=0, topics=())
header = RequestHeader(req, client_id=conn.config['client_id'])
payload_bytes = len(header.encode()) + len(req.encode())
third = payload_bytes // 3
diff --git a/test/test_fetcher.py b/test/test_fetcher.py
index 0562ec5..64eec1b 100644
--- a/test/test_fetcher.py
+++ b/test/test_fetcher.py
@@ -22,7 +22,7 @@ from kafka.errors import (
@pytest.fixture
def client(mocker):
- return mocker.Mock(spec=KafkaClient(bootstrap_servers=[], api_version=(0, 9)))
+ return mocker.Mock(spec=KafkaClient(bootstrap_servers=(), api_version=(0, 9)))
@pytest.fixture
diff --git a/test/test_sender.py b/test/test_sender.py
index cf911e1..f37e194 100644
--- a/test/test_sender.py
+++ b/test/test_sender.py
@@ -19,7 +19,7 @@ from kafka.structs import TopicPartition, OffsetAndMetadata
@pytest.fixture
def client(mocker):
- _cli = mocker.Mock(spec=KafkaClient(bootstrap_servers=[], api_version=(0, 9)))
+ _cli = mocker.Mock(spec=KafkaClient(bootstrap_servers=(), api_version=(0, 9)))
_cli.cluster = mocker.Mock(spec=ClusterMetadata())
return _cli