summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorShay Erlichmen <erlichmen@gmail.com>2018-02-21 21:03:40 +0200
committerShay Erlichmen <erlichmen@gmail.com>2018-02-21 21:03:40 +0200
commitef46f403db4069b472bf07c9e21774475de4f524 (patch)
tree92f74a95fca5c2d73261a4091ac244da3ae70daf
parent368717c6b2ccf908170808aaf5891be28e80f0da (diff)
parentdb1c1b6ade859120469c45de479bcac758ab173e (diff)
downloadurllib3-ef46f403db4069b472bf07c9e21774475de4f524.tar.gz
Merge remote-tracking branch 'upstream/master'
* upstream/master: (28 commits) Separate collections from collections.abc Remove trailing comma Make header parsing more RFC-7230-compliant raising the signleton NotImplemented actually raises a TypeError and not NotImplementedError - This issue can cause wrong exception handling or even no handling Pass python_requires argument to setuptools Add test for ConnectionPool.close Prevent AttributeError to be raised when pool is already None ;; Add pyenv install for Python 3.7 on macOS Add macOS+Python 3.7 to Travis Add py37 to tox.ini envlist Begin testing against Python 3.7 Prefer readthedocs.io instead of readthedocs.org for doc links Move RECENT_DATE to 2017-06-30 Add spacing between sections on README Add Akamai as a sponsor; update sponsorship text to include time-based contributions Handle gzip responses with multiple members Use FQDN only for DNS and drop trailing dot for other operations Credit HPE for my work Change use of deprecated assertEquals to pytest style assert ...
-rw-r--r--.travis.yml7
-rw-r--r--CHANGES.rst2
-rw-r--r--CONTRIBUTORS.txt6
-rw-r--r--README.rst9
-rwxr-xr-x_travis/install.sh20
-rw-r--r--docs/contributing.rst4
-rw-r--r--docs/user-guide.rst4
-rwxr-xr-xdummyserver/server.py2
-rw-r--r--dummyserver/testcase.py24
-rwxr-xr-xsetup.py1
-rw-r--r--test/socketpair_helper.py2
-rw-r--r--test/test_collections.py15
-rw-r--r--test/test_connectionpool.py19
-rw-r--r--test/test_poolmanager.py8
-rw-r--r--test/test_response.py60
-rw-r--r--test/with_dummyserver/test_connectionpool.py4
-rw-r--r--test/with_dummyserver/test_https.py5
-rw-r--r--test/with_dummyserver/test_socketlevel.py33
-rw-r--r--tox.ini9
-rw-r--r--urllib3/_collections.py23
-rw-r--r--urllib3/connection.py40
-rw-r--r--urllib3/connectionpool.py2
-rw-r--r--urllib3/request.py4
-rw-r--r--urllib3/response.py30
-rw-r--r--urllib3/util/selectors.py7
25 files changed, 282 insertions, 58 deletions
diff --git a/.travis.yml b/.travis.yml
index ea095824..d2afd240 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -33,6 +33,8 @@ matrix:
env: TOXENV=py35
- python: 3.6
env: TOXENV=py36
+ - python: 3.7-dev
+ env: TOXENV=py37
- python: pypy-5.4
env: TOXENV=pypy
- language: generic
@@ -50,6 +52,11 @@ matrix:
- language: generic
os: osx
env: TOXENV=py36
+ - language: generic
+ os: osx
+ env: TOXENV=py37
allow_failures:
- python: pypy-5.4
+ - python: 3.7-dev
+ - env: TOXENV=py37
sudo: false
diff --git a/CHANGES.rst b/CHANGES.rst
index bdeb1a1f..c65a1a20 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -872,7 +872,7 @@ dev (master)
* Refactored code to be even more decoupled, reusable, and extendable.
* License header added to ``.py`` files.
* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
- and docs in ``docs/`` and on urllib3.readthedocs.org.
+ and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
* Embettered all the things!
* Started writing this file.
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 99fff7a4..22b4c04a 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -243,5 +243,11 @@ In chronological order:
* Fix ``util.selectors._fileobj_to_fd`` to accept ``long``.
* Update appveyor tox setup to use the 64bit python.
+* Akamai (through Jesse Shapiro) <jshapiro@akamai.com>
+ * Ongoing maintenance
+
+* Dominique Leuenberger <dimstar@opensuse.org>
+ * Minor fixes in the test suite
+
* [Your name or handle] <[email or website]>
* [Brief summary of your changes]
diff --git a/README.rst b/README.rst
index 0e4fdf29..68c8832e 100644
--- a/README.rst
+++ b/README.rst
@@ -49,6 +49,7 @@ urllib3 is powerful and easy to use::
>>> r.data
'User-agent: *\nDisallow: /deny\n'
+
Installing
----------
@@ -75,17 +76,25 @@ urllib3 happily accepts contributions. Please see our
`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
for some tips on getting started.
+
Maintainers
-----------
+- `@haikuginger <https://github.com/haikuginger>`_ (Jesse Shapiro)
- `@lukasa <https://github.com/lukasa>`_ (Cory Benfield)
- `@sigmavirus24 <https://github.com/sigmavirus24>`_ (Ian Cordasco)
- `@shazow <https://github.com/shazow>`_ (Andrey Petrov)
👋
+
Sponsorship
-----------
If your company benefits from this library, please consider `sponsoring its
development <https://urllib3.readthedocs.io/en/latest/contributing.html#sponsorship>`_.
+
+Sponsors include:
+
+- Akamai (2017-present), sponsors `@haikuginger <https://github.com/haikuginger>`_'s work on an ongoing basis
+- Hewlett Packard Enterprise (2016-2017), sponsored `@Lukasa’s <https://github.com/Lukasa>`_ work on urllib3
diff --git a/_travis/install.sh b/_travis/install.sh
index 430dbeaa..dac22049 100755
--- a/_travis/install.sh
+++ b/_travis/install.sh
@@ -22,24 +22,28 @@ if [[ "$(uname -s)" == 'Darwin' ]]; then
pyenv global 2.6.9
;;
py27)
- curl -O https://bootstrap.pypa.io/get-pip.py
- python get-pip.py --user
+ pyenv install 2.7.14
+ pyenv global 2.7.14
;;
py33)
pyenv install 3.3.6
pyenv global 3.3.6
;;
py34)
- pyenv install 3.4.5
- pyenv global 3.4.5
+ pyenv install 3.4.7
+ pyenv global 3.4.7
;;
py35)
- pyenv install 3.5.2
- pyenv global 3.5.2
+ pyenv install 3.5.4
+ pyenv global 3.5.4
;;
py36)
- pyenv install 3.6.0
- pyenv global 3.6.0
+ pyenv install 3.6.3
+ pyenv global 3.6.3
+ ;;
+ py37)
+ pyenv install 3.7-dev
+ pyenv global 3.7-dev
;;
pypy*)
pyenv install "pypy-5.4.1"
diff --git a/docs/contributing.rst b/docs/contributing.rst
index ad3dd2e9..51fce4d9 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -62,6 +62,10 @@ We welcome your patronage on `Bountysource <https://www.bountysource.com/teams/u
Your contribution will go towards adding new features to urllib3 and making
sure all functionality continues to meet our high quality standards.
+We also welcome sponsorship in the form of time. We greatly appreciate companies
+who encourage employees to contribute on an ongoing basis during their work hours.
+Please let us know and we'll be glad to add you to our sponsors list!
+
Project Grant
-------------
diff --git a/docs/user-guide.rst b/docs/user-guide.rst
index a7f7da5b..a7084c6c 100644
--- a/docs/user-guide.rst
+++ b/docs/user-guide.rst
@@ -371,7 +371,7 @@ For example, to do a total of 3 retries, but limit to only 2 redirects::
>>> http.request(
... 'GET',
... 'http://httpbin.org/redirect/3',
- ... retries=urllib3.Retries(3, redirect=2))
+ ... retries=urllib3.Retry(3, redirect=2))
MaxRetryError
You can also disable exceptions for too many redirects and just return the
@@ -380,7 +380,7 @@ You can also disable exceptions for too many redirects and just return the
>>> r = http.request(
... 'GET',
... 'http://httpbin.org/redirect/3',
- ... retries=urllib3.Retries(
+ ... retries=urllib3.Retry(
... redirect=2, raise_on_redirect=False))
>>> r.status
302
diff --git a/dummyserver/server.py b/dummyserver/server.py
index 113324b3..6625934b 100755
--- a/dummyserver/server.py
+++ b/dummyserver/server.py
@@ -81,7 +81,7 @@ def _has_ipv6(host):
sock = socket.socket(socket.AF_INET6)
sock.bind((host, 0))
has_ipv6 = True
- except:
+ except Exception:
pass
if sock:
diff --git a/dummyserver/testcase.py b/dummyserver/testcase.py
index 41ec202c..f73f028e 100644
--- a/dummyserver/testcase.py
+++ b/dummyserver/testcase.py
@@ -1,5 +1,4 @@
import sys
-import socket
import threading
import pytest
@@ -10,6 +9,7 @@ from dummyserver.server import (
run_tornado_app,
run_loop_in_thread,
DEFAULT_CERTS,
+ HAS_IPV6,
)
from dummyserver.handlers import TestingApp
from dummyserver.proxy import ProxyHandler
@@ -76,6 +76,23 @@ class SocketDummyServerTestCase(unittest.TestCase):
if hasattr(cls, 'server_thread'):
cls.server_thread.join(0.1)
+ def assert_header_received(
+ self,
+ received_headers,
+ header_name,
+ expected_value=None
+ ):
+ header_name = header_name.encode('ascii')
+ if expected_value is not None:
+ expected_value = expected_value.encode('ascii')
+ header_titles = []
+ for header in received_headers:
+ key, value = header.split(b': ')
+ header_titles.append(key)
+ if key == header_name and expected_value is not None:
+ self.assertEqual(value, expected_value)
+ self.assertIn(header_name, header_titles)
+
class IPV4SocketDummyServerTestCase(SocketDummyServerTestCase):
@classmethod
@@ -134,7 +151,7 @@ class HTTPSDummyServerTestCase(HTTPDummyServerTestCase):
certs = DEFAULT_CERTS
-@pytest.mark.skipif(not socket.has_ipv6, reason='IPv6 not available')
+@pytest.mark.skipif(not HAS_IPV6, reason='IPv6 not available')
class IPV6HTTPSDummyServerTestCase(HTTPSDummyServerTestCase):
host = '::1'
@@ -178,11 +195,12 @@ class HTTPDummyProxyTestCase(unittest.TestCase):
cls.server_thread.join()
-@pytest.mark.skipif(not socket.has_ipv6, reason='IPv6 not available')
+@pytest.mark.skipif(not HAS_IPV6, reason='IPv6 not available')
class IPv6HTTPDummyServerTestCase(HTTPDummyServerTestCase):
host = '::1'
+@pytest.mark.skipif(not HAS_IPV6, reason='IPv6 not available')
class IPv6HTTPDummyProxyTestCase(HTTPDummyProxyTestCase):
http_host = 'localhost'
diff --git a/setup.py b/setup.py
index 064eb577..3aa9ccf0 100755
--- a/setup.py
+++ b/setup.py
@@ -52,6 +52,7 @@ setup(name='urllib3',
'urllib3.contrib._securetransport', 'urllib3.util',
],
requires=[],
+ python_requires=">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4",
tests_require=[
# These are a less-specific subset of dev-requirements.txt, for the
# convenience of distro package maintainers.
diff --git a/test/socketpair_helper.py b/test/socketpair_helper.py
index 4bea16fe..eb692d39 100644
--- a/test/socketpair_helper.py
+++ b/test/socketpair_helper.py
@@ -54,7 +54,7 @@ else:
pass
csock.setblocking(True)
ssock, _ = lsock.accept()
- except:
+ except Exception:
csock.close()
raise
finally:
diff --git a/test/test_collections.py b/test/test_collections.py
index 3a960c57..7e425247 100644
--- a/test/test_collections.py
+++ b/test/test_collections.py
@@ -4,6 +4,7 @@ from urllib3._collections import (
)
import pytest
+from urllib3.exceptions import InvalidHeader
from urllib3.packages import six
xrange = six.moves.xrange
@@ -340,8 +341,8 @@ Server: nginx
Content-Type: text/html; charset=windows-1251
Connection: keep-alive
X-Some-Multiline: asdf
- asdf
- asdf
+ asdf\t
+\t asdf
Set-Cookie: bb_lastvisit=1348253375; expires=Sat, 21-Sep-2013 18:49:35 GMT; path=/
Set-Cookie: bb_lastactivity=0; expires=Sat, 21-Sep-2013 18:49:35 GMT; path=/
www-authenticate: asdf
@@ -356,6 +357,14 @@ www-authenticate: bla
assert len(cookies) == 2
assert cookies[0].startswith("bb_lastvisit")
assert cookies[1].startswith("bb_lastactivity")
- assert d['x-some-multiline'].split() == ['asdf', 'asdf', 'asdf']
+ assert d['x-some-multiline'] == 'asdf asdf asdf'
assert d['www-authenticate'] == 'asdf, bla'
assert d.getlist('www-authenticate') == ['asdf', 'bla']
+ with_invalid_multiline = """\tthis-is-not-a-header: but it has a pretend value
+Authorization: Bearer 123
+
+"""
+ buffer = six.moves.StringIO(with_invalid_multiline.replace('\n', '\r\n'))
+ msg = six.moves.http_client.HTTPMessage(buffer)
+ with pytest.raises(InvalidHeader):
+ HTTPHeaderDict.from_httplib(msg)
diff --git a/test/test_connectionpool.py b/test/test_connectionpool.py
index d8b8a839..d4f4407a 100644
--- a/test/test_connectionpool.py
+++ b/test/test_connectionpool.py
@@ -118,6 +118,7 @@ class TestConnectionPool(object):
('google.com', 'https://google.com/'),
('yahoo.com', 'http://google.com/'),
('google.com', 'https://google.net/'),
+ ('google.com', 'http://google.com./'),
])
def test_not_same_host_no_port_http(self, a, b):
with HTTPConnectionPool(a) as c:
@@ -130,6 +131,7 @@ class TestConnectionPool(object):
('google.com', 'http://google.com/'),
('yahoo.com', 'https://google.com/'),
('google.com', 'https://google.net/'),
+ ('google.com', 'https://google.com./'),
])
def test_not_same_host_no_port_https(self, a, b):
with HTTPSConnectionPool(a) as c:
@@ -247,6 +249,23 @@ class TestConnectionPool(object):
with pytest.raises(Empty):
old_pool_queue.get(block=False)
+ def test_pool_close_twice(self):
+ pool = connection_from_url('http://google.com:80')
+
+ # Populate with some connections
+ conn1 = pool._get_conn()
+ conn2 = pool._get_conn()
+ pool._put_conn(conn1)
+ pool._put_conn(conn2)
+
+ pool.close()
+ assert pool.pool is None
+
+ try:
+ pool.close()
+ except AttributeError:
+ pytest.fail("Pool of the ConnectionPool is None and has no attribute get.")
+
def test_pool_timeouts(self):
with HTTPConnectionPool(host='localhost') as pool:
conn = pool._new_conn()
diff --git a/test/test_poolmanager.py b/test/test_poolmanager.py
index 8e253c7c..a233f013 100644
--- a/test/test_poolmanager.py
+++ b/test/test_poolmanager.py
@@ -31,6 +31,14 @@ class TestPoolManager(object):
assert conn1 == conn2
+ # Ensure that FQDNs are handled separately from relative domains
+ p = PoolManager(2)
+
+ conn1 = p.connection_from_url('http://localhost.:8081/foo')
+ conn2 = p.connection_from_url('http://localhost:8081/bar')
+
+ assert conn1 != conn2
+
def test_many_urls(self):
urls = [
"http://localhost:8081/foo",
diff --git a/test/test_response.py b/test/test_response.py
index b255ec1e..86ac786b 100644
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -1,4 +1,5 @@
import socket
+import zlib
from io import BytesIO, BufferedReader
@@ -96,7 +97,6 @@ class TestResponse(object):
assert r.read() == b''
def test_decode_deflate(self):
- import zlib
data = zlib.compress(b'foo')
fp = BytesIO(data)
@@ -105,7 +105,6 @@ class TestResponse(object):
assert r.data == b'foo'
def test_decode_deflate_case_insensitve(self):
- import zlib
data = zlib.compress(b'foo')
fp = BytesIO(data)
@@ -114,7 +113,6 @@ class TestResponse(object):
assert r.data == b'foo'
def test_chunked_decoding_deflate(self):
- import zlib
data = zlib.compress(b'foo')
fp = BytesIO(data)
@@ -132,7 +130,6 @@ class TestResponse(object):
assert r.read() == b''
def test_chunked_decoding_deflate2(self):
- import zlib
compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
data = compress.compress(b'foo')
data += compress.flush()
@@ -150,7 +147,6 @@ class TestResponse(object):
assert r.read() == b''
def test_chunked_decoding_gzip(self):
- import zlib
compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
data = compress.compress(b'foo')
data += compress.flush()
@@ -165,6 +161,53 @@ class TestResponse(object):
assert r.read() == b''
assert r.read() == b''
+ def test_decode_gzip_multi_member(self):
+ compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
+ data = compress.compress(b'foo')
+ data += compress.flush()
+ data = data * 3
+
+ fp = BytesIO(data)
+ r = HTTPResponse(fp, headers={'content-encoding': 'gzip'})
+
+ assert r.data == b'foofoofoo'
+
+ def test_decode_gzip_error(self):
+ fp = BytesIO(b'foo')
+ with pytest.raises(DecodeError):
+ HTTPResponse(fp, headers={'content-encoding': 'gzip'})
+
+ def test_decode_gzip_swallow_garbage(self):
+ # When data comes from multiple calls to read(), data after
+ # the first zlib error (here triggered by garbage) should be
+ # ignored.
+ compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
+ data = compress.compress(b'foo')
+ data += compress.flush()
+ data = data * 3 + b'foo'
+
+ fp = BytesIO(data)
+ r = HTTPResponse(
+ fp, headers={'content-encoding': 'gzip'}, preload_content=False)
+ ret = b''
+ for _ in range(100):
+ ret += r.read(1)
+ if r.closed:
+ break
+
+ assert ret == b'foofoofoo'
+
+ def test_chunked_decoding_gzip_swallow_garbage(self):
+ compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
+ data = compress.compress(b'foo')
+ data += compress.flush()
+ data = data * 3 + b'foo'
+
+ fp = BytesIO(data)
+ r = HTTPResponse(fp, headers={'content-encoding': 'gzip'})
+
+ assert r.data == b'foofoofoo'
+
def test_body_blob(self):
resp = HTTPResponse(b'foo')
assert resp.data == b'foo'
@@ -294,7 +337,6 @@ class TestResponse(object):
next(stream)
def test_gzipped_streaming(self):
- import zlib
compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
data = compress.compress(b'foo')
data += compress.flush()
@@ -310,7 +352,6 @@ class TestResponse(object):
next(stream)
def test_gzipped_streaming_tell(self):
- import zlib
compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
uncompressed_data = b'foo'
data = compress.compress(uncompressed_data)
@@ -333,8 +374,6 @@ class TestResponse(object):
def test_deflate_streaming_tell_intermediate_point(self):
# Ensure that ``tell()`` returns the correct number of bytes when
# part-way through streaming compressed content.
- import zlib
-
NUMBER_OF_READS = 10
class MockCompressedDataReading(BytesIO):
@@ -384,7 +423,6 @@ class TestResponse(object):
assert len(ZLIB_PAYLOAD) == end_of_stream
def test_deflate_streaming(self):
- import zlib
data = zlib.compress(b'foo')
fp = BytesIO(data)
@@ -398,7 +436,6 @@ class TestResponse(object):
next(stream)
def test_deflate2_streaming(self):
- import zlib
compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
data = compress.compress(b'foo')
data += compress.flush()
@@ -530,7 +567,6 @@ class TestResponse(object):
"""Show that we can decode the gizpped and chunked body."""
def stream():
# Set up a generator to chunk the gzipped body
- import zlib
compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
data = compress.compress(b'foobar')
data += compress.flush()
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
index cf46392d..0eacfa2c 100644
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -1000,7 +1000,7 @@ class TestRetryPoolSize(HTTPDummyServerTestCase):
def test_pool_size_retry(self):
self.pool.urlopen('GET', '/not_found', preload_content=False)
- self.assertEquals(self.pool.num_connections, 1)
+ assert self.pool.num_connections == 1
class TestRedirectPoolSize(HTTPDummyServerTestCase):
@@ -1017,7 +1017,7 @@ class TestRedirectPoolSize(HTTPDummyServerTestCase):
def test_pool_size_redirect(self):
self.pool.urlopen('GET', '/redirect', preload_content=False)
- self.assertEquals(self.pool.num_connections, 1)
+ assert self.pool.num_connections == 1
if __name__ == '__main__':
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
index ba947ea9..7b8d57b1 100644
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -65,6 +65,11 @@ class TestHTTPS(HTTPSDummyServerTestCase):
r = self._pool.request('GET', '/')
self.assertEqual(r.status, 200, r.data)
+ def test_dotted_fqdn(self):
+ pool = HTTPSConnectionPool(self.host + '.', self.port)
+ r = pool.request('GET', '/')
+ self.assertEqual(r.status, 200, r.data)
+
def test_set_ssl_version_to_tlsv1(self):
self._pool.ssl_version = ssl.PROTOCOL_TLSv1
r = self._pool.request('GET', '/')
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
index 5656c218..3135d371 100644
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -1225,6 +1225,37 @@ class TestHeaders(SocketDummyServerTestCase):
pool.request('GET', '/', headers=OrderedDict(expected_request_headers))
self.assertEqual(expected_request_headers, actual_request_headers)
+ def test_request_host_header_ignores_fqdn_dot(self):
+
+ received_headers = []
+
+ def socket_handler(listener):
+ sock = listener.accept()[0]
+
+ buf = b''
+ while not buf.endswith(b'\r\n\r\n'):
+ buf += sock.recv(65536)
+
+ for header in buf.split(b'\r\n')[1:]:
+ if header:
+ received_headers.append(header)
+
+ sock.send((
+ u'HTTP/1.1 204 No Content\r\n'
+ u'Content-Length: 0\r\n'
+ u'\r\n').encode('ascii'))
+
+ sock.close()
+
+ self._start_server(socket_handler)
+
+ pool = HTTPConnectionPool(self.host + '.', self.port, retries=False)
+ self.addCleanup(pool.close)
+ pool.request('GET', '/')
+ self.assert_header_received(
+ received_headers, 'Host', '%s:%s' % (self.host, self.port)
+ )
+
def test_response_headers_are_returned_in_the_original_order(self):
# NOTE: Probability this test gives a false negative is 1/(K!)
K = 16
@@ -1468,4 +1499,4 @@ class TestRetryPoolSizeDrainFail(SocketDummyServerTestCase):
self.addCleanup(pool.close)
pool.urlopen('GET', '/not_found', preload_content=False)
- self.assertEquals(pool.num_connections, 1)
+ assert pool.num_connections == 1
diff --git a/tox.ini b/tox.ini
index a8b9deb3..d352f52c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,14 +1,7 @@
[tox]
-envlist = flake8-py3, py26, py27, py34, py35, py36, pypy
+envlist = flake8-py3, py26, py27, py34, py35, py36, py37, pypy
[testenv]
-basepython =
- py26: {env:TOXPY26:python2.6}
- py27: {env:TOXPY27:python2.7}
- py34: {env:TOXPY34:python3.4}
- py35: {env:TOXPY35:python3.5}
- py36: {env:TOXPY36:python3.6}
- pypy: {env:TOXPYPY:pypy}
deps= -r{toxinidir}/dev-requirements.txt
commands=
# Print out the python version and bitness
diff --git a/urllib3/_collections.py b/urllib3/_collections.py
index 5df2372c..6e36b84e 100644
--- a/urllib3/_collections.py
+++ b/urllib3/_collections.py
@@ -1,5 +1,8 @@
from __future__ import absolute_import
-from collections import Mapping, MutableMapping
+try:
+ from collections.abc import Mapping, MutableMapping
+except ImportError:
+ from collections import Mapping, MutableMapping
try:
from threading import RLock
except ImportError: # Platform-specific: No threads available
@@ -15,6 +18,7 @@ try: # Python 2.7+
from collections import OrderedDict
except ImportError:
from .packages.ordered_dict import OrderedDict
+from .exceptions import InvalidHeader
from .packages.six import iterkeys, itervalues, PY3
@@ -305,13 +309,22 @@ class HTTPHeaderDict(MutableMapping):
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
# object and extracts the multiheaders properly.
+ obs_fold_continued_leaders = (' ', '\t')
headers = []
for line in message.headers:
- if line.startswith((' ', '\t')):
- key, value = headers[-1]
- headers[-1] = (key, value + '\r\n' + line.rstrip())
- continue
+ if line.startswith(obs_fold_continued_leaders):
+ if not headers:
+ # We received a header line that starts with OWS as described
+ # in RFC-7230 S3.2.4. This indicates a multiline header, but
+ # there exists no previous header to which we can attach it.
+ raise InvalidHeader(
+ 'Header continuation with no previous header: %s' % line
+ )
+ else:
+ key, value = headers[-1]
+ headers[-1] = (key, value + ' ' + line.strip())
+ continue
key, value = line.split(':', 1)
headers.append((key, value.strip()))
diff --git a/urllib3/connection.py b/urllib3/connection.py
index c0d83299..a03b573f 100644
--- a/urllib3/connection.py
+++ b/urllib3/connection.py
@@ -56,10 +56,11 @@ port_by_scheme = {
'https': 443,
}
-# When updating RECENT_DATE, move it to
-# within two years of the current date, and no
-# earlier than 6 months ago.
-RECENT_DATE = datetime.date(2016, 1, 1)
+# When updating RECENT_DATE, move it to within two years of the current date,
+# and not less than 6 months ago.
+# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or
+# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months)
+RECENT_DATE = datetime.date(2017, 6, 30)
class DummyConnection(object):
@@ -124,6 +125,35 @@ class HTTPConnection(_HTTPConnection, object):
# Superclass also sets self.source_address in Python 2.7+.
_HTTPConnection.__init__(self, *args, **kw)
+ @property
+ def host(self):
+ """
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
+
+ In general, SSL certificates don't include the trailing dot indicating a
+ fully-qualified domain name, and thus, they don't validate properly when
+ checked against a domain name that includes the dot. In addition, some
+ servers may not expect to receive the trailing dot when provided.
+
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
+ whereas a lookup without a trailing dot will search the system's search domain
+ list. Thus, it's important to keep the original host around for use only in
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
+ actual TCP connection across which we're going to send HTTP requests).
+ """
+ return self._dns_host.rstrip('.')
+
+ @host.setter
+ def host(self, value):
+ """
+ Setter for the `host` property.
+
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
+ only uses `host`, and it seems reasonable that other libraries follow suit.
+ """
+ self._dns_host = value
+
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
@@ -138,7 +168,7 @@ class HTTPConnection(_HTTPConnection, object):
try:
conn = connection.create_connection(
- (self.host, self.port), self.timeout, **extra_kw)
+ (self._dns_host, self.port), self.timeout, **extra_kw)
except SocketTimeout as e:
raise ConnectTimeoutError(
diff --git a/urllib3/connectionpool.py b/urllib3/connectionpool.py
index 85be0b46..b7b757dd 100644
--- a/urllib3/connectionpool.py
+++ b/urllib3/connectionpool.py
@@ -411,6 +411,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Close all pooled connections and disable the pool.
"""
+ if self.pool is None:
+ return
# Disable access to the pool
old_pool, self.pool = self.pool, None
diff --git a/urllib3/request.py b/urllib3/request.py
index c0fddff0..bfd7a245 100644
--- a/urllib3/request.py
+++ b/urllib3/request.py
@@ -44,8 +44,8 @@ class RequestMethods(object):
def urlopen(self, method, url, body=None, headers=None,
encode_multipart=True, multipart_boundary=None,
**kw): # Abstract
- raise NotImplemented("Classes extending RequestMethods must implement "
- "their own ``urlopen`` method.")
+ raise NotImplementedError("Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method.")
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
diff --git a/urllib3/response.py b/urllib3/response.py
index 7db771ee..3079836f 100644
--- a/urllib3/response.py
+++ b/urllib3/response.py
@@ -52,18 +52,42 @@ class DeflateDecoder(object):
self._data = None
+class GzipDecoderState(object):
+
+ FIRST_MEMBER = 0
+ OTHER_MEMBERS = 1
+ SWALLOW_DATA = 2
+
+
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+ self._state = GzipDecoderState.FIRST_MEMBER
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
- if not data:
- return data
- return self._obj.decompress(data)
+ ret = binary_type()
+ if self._state == GzipDecoderState.SWALLOW_DATA or not data:
+ return ret
+ while True:
+ try:
+ ret += self._obj.decompress(data)
+ except zlib.error:
+ previous_state = self._state
+ # Ignore data after the first error
+ self._state = GzipDecoderState.SWALLOW_DATA
+ if previous_state == GzipDecoderState.OTHER_MEMBERS:
+ # Allow trailing garbage acceptable in other gzip clients
+ return ret
+ raise
+ data = self._obj.unused_data
+ if not data:
+ return ret
+ self._state = GzipDecoderState.OTHER_MEMBERS
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def _get_decoder(mode):
diff --git a/urllib3/util/selectors.py b/urllib3/util/selectors.py
index 57334473..5cb772f6 100644
--- a/urllib3/util/selectors.py
+++ b/urllib3/util/selectors.py
@@ -11,10 +11,15 @@ import select
import socket
import sys
import time
-from collections import namedtuple, Mapping
+from collections import namedtuple
from ..packages.six import integer_types
try:
+ from collections.abc import Mapping
+except ImportError:
+ from collections import Mapping
+
+try:
monotonic = time.monotonic
except (AttributeError, ImportError): # Python 3.3<
monotonic = time.time