summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKenneth Reitz <me@kennethreitz.org>2014-12-01 14:45:27 -0500
committerKenneth Reitz <me@kennethreitz.org>2014-12-01 14:45:27 -0500
commit90857b8b59020397e5064df9aaedbef817f0ce3b (patch)
tree016cc58b4fb4ef34366773dc331f2d04fc067a9b
parent7aa6c62d6d917e11f81b166d1d6c9e60340783ac (diff)
parentadf475ef82cbd29f63814c0626f64926deb2355b (diff)
downloadpython-requests-90857b8b59020397e5064df9aaedbef817f0ce3b.tar.gz
Merge pull request #2216 from sigmavirus24/retries-take-2
Retries logic
-rw-r--r--requests/adapters.py17
-rw-r--r--requests/exceptions.py5
-rwxr-xr-xtest_requests.py14
3 files changed, 31 insertions, 5 deletions
diff --git a/requests/adapters.py b/requests/adapters.py
index 40088900..c892853b 100644
--- a/requests/adapters.py
+++ b/requests/adapters.py
@@ -26,9 +26,10 @@ from .packages.urllib3.exceptions import ProxyError as _ProxyError
from .packages.urllib3.exceptions import ProtocolError
from .packages.urllib3.exceptions import ReadTimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
+from .packages.urllib3.exceptions import ResponseError
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
- ProxyError)
+ ProxyError, RetryError)
from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
@@ -63,7 +64,9 @@ class HTTPAdapter(BaseAdapter):
should attempt. Note, this applies only to failed DNS lookups, socket
connections and connection timeouts, never to requests where data has
made it to the server. By default, Requests does not retry failed
- connections.
+ connections. If you need granular control over the conditions under
+ which we retry a request, import urllib3's ``Retry`` class and pass
+ that instead.
:param pool_block: Whether the connection pool should block for connections.
Usage::
@@ -79,7 +82,10 @@ class HTTPAdapter(BaseAdapter):
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
- self.max_retries = max_retries
+ if max_retries == DEFAULT_RETRIES:
+ self.max_retries = Retry(0, read=False)
+ else:
+ self.max_retries = Retry.from_int(max_retries)
self.config = {}
self.proxy_manager = {}
@@ -360,7 +366,7 @@ class HTTPAdapter(BaseAdapter):
assert_same_host=False,
preload_content=False,
decode_content=False,
- retries=Retry(self.max_retries, read=False),
+ retries=self.max_retries,
timeout=timeout
)
@@ -412,6 +418,9 @@ class HTTPAdapter(BaseAdapter):
if isinstance(e.reason, ConnectTimeoutError):
raise ConnectTimeout(e, request=request)
+ if isinstance(e.reason, ResponseError):
+ raise RetryError(e, request=request)
+
raise ConnectionError(e, request=request)
except _ProxyError as e:
diff --git a/requests/exceptions.py b/requests/exceptions.py
index 34c7a0db..89135a80 100644
--- a/requests/exceptions.py
+++ b/requests/exceptions.py
@@ -90,5 +90,10 @@ class ChunkedEncodingError(RequestException):
class ContentDecodingError(RequestException, BaseHTTPError):
"""Failed to decode response content"""
+
class StreamConsumedError(RequestException, TypeError):
"""The content for this response was already consumed"""
+
+
+class RetryError(RequestException):
+ """Custom retries logic failed"""
diff --git a/test_requests.py b/test_requests.py
index 4a05cb2e..4624f095 100755
--- a/test_requests.py
+++ b/test_requests.py
@@ -20,7 +20,7 @@ from requests.compat import (
from requests.cookies import cookiejar_from_dict, morsel_to_cookie
from requests.exceptions import (ConnectionError, ConnectTimeout,
InvalidSchema, InvalidURL, MissingSchema,
- ReadTimeout, Timeout)
+ ReadTimeout, Timeout, RetryError)
from requests.models import PreparedRequest
from requests.structures import CaseInsensitiveDict
from requests.sessions import SessionRedirectMixin
@@ -1520,6 +1520,7 @@ def test_prepared_request_complete_copy():
)
assert_copy(p, p.copy())
+
def test_prepare_unicode_url():
p = PreparedRequest()
p.prepare(
@@ -1529,5 +1530,16 @@ def test_prepare_unicode_url():
)
assert_copy(p, p.copy())
+
+def test_urllib3_retries():
+ from requests.packages.urllib3.util import Retry
+ s = requests.Session()
+ s.mount('https://', HTTPAdapter(max_retries=Retry(
+ total=2, status_forcelist=[500]
+ )))
+
+ with pytest.raises(RetryError):
+ s.get('https://httpbin.org/status/500')
+
if __name__ == '__main__':
unittest.main()