diff options
author | Taras Voinarovskyi <voyn1991@gmail.com> | 2017-09-13 22:03:51 +0300 |
---|---|---|
committer | GitHub <noreply@github.com> | 2017-09-13 22:03:51 +0300 |
commit | b1ae45c10d46e881492b0fd37f0919cac6d79224 (patch) | |
tree | 2dd4f71b4c841bea4662c3926848c296666280e1 | |
parent | 71efe2c2be3bd9729ec02df6bd6ce1b0cd6b4eba (diff) | |
parent | 84aec293f7074b4fe46741cd2be694d1efd91386 (diff) | |
download | kafka-python-b1ae45c10d46e881492b0fd37f0919cac6d79224.tar.gz |
Merge pull request #1213 from dpkp/1212-stop-using-mutable-types-for-default-arg-values
Stop using mutable types for default arg values
-rw-r--r-- | kafka/client.py | 18 | ||||
-rw-r--r-- | test/test_client.py | 2 | ||||
-rw-r--r-- | test/test_conn.py | 2 | ||||
-rw-r--r-- | test/test_fetcher.py | 2 | ||||
-rw-r--r-- | test/test_sender.py | 2 |
5 files changed, 13 insertions, 13 deletions
diff --git a/kafka/client.py b/kafka/client.py index 680c860..75b05bf 100644 --- a/kafka/client.py +++ b/kafka/client.py @@ -588,21 +588,21 @@ class SimpleClient(object): leader, None, None, None ) - def send_metadata_request(self, payloads=[], fail_on_error=True, + def send_metadata_request(self, payloads=(), fail_on_error=True, callback=None): encoder = KafkaProtocol.encode_metadata_request decoder = KafkaProtocol.decode_metadata_response return self._send_broker_unaware_request(payloads, encoder, decoder) - def send_consumer_metadata_request(self, payloads=[], fail_on_error=True, + def send_consumer_metadata_request(self, payloads=(), fail_on_error=True, callback=None): encoder = KafkaProtocol.encode_consumer_metadata_request decoder = KafkaProtocol.decode_consumer_metadata_response return self._send_broker_unaware_request(payloads, encoder, decoder) - def send_produce_request(self, payloads=[], acks=1, timeout=1000, + def send_produce_request(self, payloads=(), acks=1, timeout=1000, fail_on_error=True, callback=None): """ Encode and send some ProduceRequests @@ -652,7 +652,7 @@ class SimpleClient(object): if resp is not None and (not fail_on_error or not self._raise_on_response_error(resp))] - def send_fetch_request(self, payloads=[], fail_on_error=True, + def send_fetch_request(self, payloads=(), fail_on_error=True, callback=None, max_wait_time=100, min_bytes=4096): """ Encode and send a FetchRequest @@ -672,7 +672,7 @@ class SimpleClient(object): return [resp if not callback else callback(resp) for resp in resps if not fail_on_error or not self._raise_on_response_error(resp)] - def send_offset_request(self, payloads=[], fail_on_error=True, + def send_offset_request(self, payloads=(), fail_on_error=True, callback=None): resps = self._send_broker_aware_request( payloads, @@ -682,7 +682,7 @@ class SimpleClient(object): return [resp if not callback else callback(resp) for resp in resps if not fail_on_error or not self._raise_on_response_error(resp)] - def send_list_offset_request(self, payloads=[], fail_on_error=True, + def send_list_offset_request(self, payloads=(), fail_on_error=True, callback=None): resps = self._send_broker_aware_request( payloads, @@ -692,7 +692,7 @@ class SimpleClient(object): return [resp if not callback else callback(resp) for resp in resps if not fail_on_error or not self._raise_on_response_error(resp)] - def send_offset_commit_request(self, group, payloads=[], + def send_offset_commit_request(self, group, payloads=(), fail_on_error=True, callback=None): encoder = functools.partial(KafkaProtocol.encode_offset_commit_request, group=group) @@ -702,7 +702,7 @@ class SimpleClient(object): return [resp if not callback else callback(resp) for resp in resps if not fail_on_error or not self._raise_on_response_error(resp)] - def send_offset_fetch_request(self, group, payloads=[], + def send_offset_fetch_request(self, group, payloads=(), fail_on_error=True, callback=None): encoder = functools.partial(KafkaProtocol.encode_offset_fetch_request, @@ -713,7 +713,7 @@ class SimpleClient(object): return [resp if not callback else callback(resp) for resp in resps if not fail_on_error or not self._raise_on_response_error(resp)] - def send_offset_fetch_request_kafka(self, group, payloads=[], + def send_offset_fetch_request_kafka(self, group, payloads=(), fail_on_error=True, callback=None): encoder = functools.partial(KafkaProtocol.encode_offset_fetch_request, diff --git a/test/test_client.py b/test/test_client.py index 79ac8be..42a1623 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -398,7 +398,7 @@ class TestSimpleClient(unittest.TestCase): def test_correlation_rollover(self): with patch.object(SimpleClient, 'load_metadata_for_topics'): big_num = 2**31 - 3 - client = SimpleClient(hosts=[], correlation_id=big_num) + client = SimpleClient(hosts=(), correlation_id=big_num) self.assertEqual(big_num + 1, client._next_id()) self.assertEqual(big_num + 2, client._next_id()) self.assertEqual(0, client._next_id()) diff --git a/test/test_conn.py b/test/test_conn.py index 2c418d4..1621e60 100644 --- a/test/test_conn.py +++ b/test/test_conn.py @@ -113,7 +113,7 @@ def test_send_max_ifr(conn): def test_send_no_response(_socket, conn): conn.connect() assert conn.state is ConnectionStates.CONNECTED - req = ProduceRequest[0](required_acks=0, timeout=0, topics=[]) + req = ProduceRequest[0](required_acks=0, timeout=0, topics=()) header = RequestHeader(req, client_id=conn.config['client_id']) payload_bytes = len(header.encode()) + len(req.encode()) third = payload_bytes // 3 diff --git a/test/test_fetcher.py b/test/test_fetcher.py index 0562ec5..64eec1b 100644 --- a/test/test_fetcher.py +++ b/test/test_fetcher.py @@ -22,7 +22,7 @@ from kafka.errors import ( @pytest.fixture def client(mocker): - return mocker.Mock(spec=KafkaClient(bootstrap_servers=[], api_version=(0, 9))) + return mocker.Mock(spec=KafkaClient(bootstrap_servers=(), api_version=(0, 9))) @pytest.fixture diff --git a/test/test_sender.py b/test/test_sender.py index cf911e1..f37e194 100644 --- a/test/test_sender.py +++ b/test/test_sender.py @@ -19,7 +19,7 @@ from kafka.structs import TopicPartition, OffsetAndMetadata @pytest.fixture def client(mocker): - _cli = mocker.Mock(spec=KafkaClient(bootstrap_servers=[], api_version=(0, 9))) + _cli = mocker.Mock(spec=KafkaClient(bootstrap_servers=(), api_version=(0, 9))) _cli.cluster = mocker.Mock(spec=ClusterMetadata()) return _cli |