summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNate Prewitt <Nate.Prewitt@gmail.com>2018-04-07 15:11:00 -0700
committerNate Prewitt <Nate.Prewitt@gmail.com>2018-04-07 15:11:00 -0700
commitc7cea32304c05ef5890bcb8941edc8b6fbe04a03 (patch)
tree7a5d670da7f5b7a8403537f2099435ce2407194e
parent9c6bd54b44c0b05c6907522e8d9998a87b69c1cd (diff)
parentb66908e7b647689793e299edc111bf9910e93ad3 (diff)
downloadpython-requests-updating_3.0.0.tar.gz
Merge remote-tracking branch 'upstream/master' into updating_3.0.0updating_3.0.0
-rw-r--r--.gitignore2
-rw-r--r--.travis.yml4
-rw-r--r--AUTHORS.rst16
-rw-r--r--HISTORY.rst9
-rw-r--r--README.rst2
-rw-r--r--docs/_static/custom.css8
-rw-r--r--docs/_templates/hacks.html7
-rw-r--r--docs/_templates/sidebarintro.html19
-rw-r--r--docs/_templates/sidebarlogo.html17
-rw-r--r--docs/api.rst14
-rw-r--r--docs/community/out-there.rst2
-rw-r--r--docs/community/recommended.rst7
-rw-r--r--docs/community/release-process.rst17
-rw-r--r--docs/community/sponsors.rst96
-rw-r--r--docs/community/updates.rst1
-rw-r--r--docs/conf.py4
-rw-r--r--docs/dev/todo.rst3
-rw-r--r--docs/index.rst17
-rw-r--r--docs/user/advanced.rst61
-rw-r--r--docs/user/authentication.rst10
-rw-r--r--docs/user/quickstart.rst15
-rw-r--r--requests/adapters.py15
-rw-r--r--requests/exceptions.py4
-rw-r--r--requests/help.py2
-rw-r--r--requests/models.py4
-rw-r--r--requests/sessions.py13
-rw-r--r--requests/status_codes.py38
-rw-r--r--requests/utils.py103
-rw-r--r--setup.cfg3
-rwxr-xr-xsetup.py1
-rw-r--r--tests/test_lowlevel.py72
-rw-r--r--tests/test_requests.py61
-rw-r--r--tests/test_utils.py103
-rw-r--r--tox.ini2
34 files changed, 617 insertions, 135 deletions
diff --git a/.gitignore b/.gitignore
index 19ebfd79..cd0c32e9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,3 +21,5 @@ t.py
t2.py
dist
+
+/.mypy_cache/
diff --git a/.travis.yml b/.travis.yml
index 9f2397d0..1968ae52 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,7 +5,7 @@ python:
- "3.4"
- "3.5"
- "3.6"
- # - "3.7-dev"
+ - "3.7-dev"
# - "pypy" -- appears to hang
# - "pypy3"
matrix:
@@ -27,5 +27,3 @@ jobs:
- stage: coverage
python: 3.6
script: codecov
-
-
diff --git a/AUTHORS.rst b/AUTHORS.rst
index b35b2595..eb580a4d 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -1,13 +1,17 @@
Requests is written and maintained by Kenneth Reitz and
various contributors:
-Keepers of the Four Crystals
-````````````````````````````
+Keepers of the Crystals
+```````````````````````
- Kenneth Reitz <me@kennethreitz.org> `@kennethreitz <https://github.com/kennethreitz>`_, Keeper of the Master Crystal.
+- Ian Cordasco <graffatcolmingov@gmail.com> `@sigmavirus24 <https://github.com/sigmavirus24>`_.
+- Nate Prewitt <nate.prewitt@gmail.com> `@nateprewitt <https://github.com/nateprewitt>`_.
+
+Previous Keepers of Crystals
+````````````````````````````
+
- Cory Benfield <cory@lukasa.co.uk> `@lukasa <https://github.com/lukasa>`_
-- Ian Cordasco <graffatcolmingov@gmail.com> `@sigmavirus24 <https://github.com/sigmavirus24>`_
-- Nate Prewitt <nate.prewitt@gmail.com> `@nateprewitt <https://github.com/nateprewitt>`_
Patches and Suggestions
@@ -183,3 +187,7 @@ Patches and Suggestions
- Matt Liu <liumatt@gmail.com> (`@mlcrazy <https://github.com/mlcrazy>`_)
- Taylor Hoff <primdevs@protonmail.com> (`@PrimordialHelios <https://github.com/PrimordialHelios>`_)
- Hugo van Kemenade (`@hugovk <https://github.com/hugovk>`_)
+- Arthur Vigil (`@ahvigil <https://github.com/ahvigil>`_)
+- Nehal J Wani (`@nehaljwani <https://github.com/nehaljwani>`_)
+- Demetrios Bairaktaris (`@DemetriosBairaktaris <https://github.com/demetriosbairaktaris>`_)
+- Darren Dormer (`@ddormer <https://github.com/ddormer>`_)
diff --git a/HISTORY.rst b/HISTORY.rst
index efb9b6f2..fc71d0b8 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -9,10 +9,17 @@ dev
**Improvements**
- Warn user about possible slowdown when using cryptography version < 1.3.4
+- Check for invalid host in proxy URL, before forwarding request to adapter.
+- Fragments are now properly maintained across redirects. (RFC7231 7.1.2)
**Bugfixes**
- Parsing empty ``Link`` headers with ``parse_header_links()`` no longer return one bogus entry
+- Fixed issue where loading the default certificate bundle from a zip archive
+ would raise an ``IOError``
+- Fixed issue with unexpected ``ImportError`` on windows system which do not support ``winreg`` module
+- DNS resolution in proxy bypass no longer includes the username and password in
+ the request. This also fixes the issue of DNS queries failing on macOS.
2.18.4 (2017-08-15)
+++++++++++++++++++
@@ -124,7 +131,7 @@ dev
- Further restored the ``requests.packages`` namespace for compatibility reasons.
-No code modification (noted below) should be neccessary any longer.
+No code modification (noted below) should be necessary any longer.
2.16.1 (2017-05-27)
+++++++++++++++++++
diff --git a/README.rst b/README.rst
index 24d3a671..292b14fb 100644
--- a/README.rst
+++ b/README.rst
@@ -105,6 +105,6 @@ How to Contribute
#. Write a test which shows that the bug was fixed or that the feature works as expected.
#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
-.. _`the repository`: http://github.com/requests/requests
+.. _`the repository`: https://github.com/requests/requests
.. _AUTHORS: https://github.com/requests/requests/blob/master/AUTHORS.rst
.. _Contributor Friendly: https://github.com/requests/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 3a8af312..54def686 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,3 +1,11 @@
+body > div.document > div.sphinxsidebar > div > form > table > tbody > tr:nth-child(2) > td > select {
+ width: 100%!important;
+}
+
+#python27 > a {
+ color: white;
+}
+
#carbonads {
display: block;
overflow: hidden;
diff --git a/docs/_templates/hacks.html b/docs/_templates/hacks.html
index 0d88a6ec..c3fe2d1e 100644
--- a/docs/_templates/hacks.html
+++ b/docs/_templates/hacks.html
@@ -1,7 +1,6 @@
<!-- Alabaster (krTheme++) Hacks -->
<aside id="python27">
- Python 3, the new best practice, is here to stay.
- Python 2 will retire in only <time></time> months!
+ <a href="https://www.kennethreitz.org/requests3"><strong>Requests 3.0</strong> development is underway, and your financial help is appreciated!</a>
</aside>
<!-- Python 2 Death Clock™ -->
@@ -20,14 +19,14 @@
}
</style>
-<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.18.1/moment.min.js"></script>
+<!-- <script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.18.1/moment.min.js"></script>
<script>
var death = new Date('2020-04-12');
var diff = moment.duration(death - moment());
document.querySelector('#python27 time').innerText = (diff.years() * 12) + diff.months();
-</script>
+</script> -->
<!-- CSS Adjustments (I'm very picky.) -->
diff --git a/docs/_templates/sidebarintro.html b/docs/_templates/sidebarintro.html
index 5b437d85..047e6524 100644
--- a/docs/_templates/sidebarintro.html
+++ b/docs/_templates/sidebarintro.html
@@ -5,7 +5,7 @@
</p>
<p>
- <iframe src="http://ghbtns.com/github-btn.html?user=requests&repo=requests&type=watch&count=true&size=large"
+ <iframe src="https://ghbtns.com/github-btn.html?user=requests&repo=requests&type=watch&count=true&size=large"
allowtransparency="true" frameborder="0" scrolling="0" width="200px" height="35px"></iframe>
</p>
@@ -13,24 +13,26 @@
Requests is an elegant and simple HTTP library for Python, built for
human beings.
</p>
+<p>Sponsored by <strong><a href="https://linode.com/">Linode</a></strong> and <a href="http://docs.python-requests.org/en/master/community/sponsors/#patron-sponsors">other wonderful organizations</a>.</p>
+
<script async type="text/javascript" src="//cdn.carbonads.com/carbon.js?zoneid=1673&serve=C6AILKT&placement=pythonrequestsorg" id="_carbonads_js"></script>
-<h3><a href="http://www.unixstickers.com/stickers/coding_stickers/requests-shaped-sticker">Stickers!</a></h3>
+<p><a href="http://www.unixstickers.com/stickers/coding_stickers/requests-shaped-sticker">Requests Stickers!</a></p>
<h3>Stay Informed</h3>
<p>Receive updates on new releases and upcoming projects.</p>
-<p><iframe src="http://ghbtns.com/github-btn.html?user=kennethreitz&type=follow&count=false"
+<p><iframe src="https://ghbtns.com/github-btn.html?user=kennethreitz&type=follow&count=false"
allowtransparency="true" frameborder="0" scrolling="0" width="200" height="20"></iframe></p>
<p><a href="https://twitter.com/kennethreitz" class="twitter-follow-button" data-show-count="false">Follow @kennethreitz</a> <script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');</script></p>
-<p><a href="https://saythanks.io/to/kennethreitz">Say Thanks!</a></p>
<p><a href="http://tinyletter.com/kennethreitz">Join Mailing List</a>.</p>
<h3>Other Projects</h3>
<p>More <a href="http://kennethreitz.org/">Kenneth Reitz</a> projects:</p>
<ul>
- <li><a href="http://edmsynths.com/">edmsynths.com</a></li>
+ <li><a href="http://html.python-requests.org/">Requests-HTML</a></li>
+ <li><a href="http://howtopython.org/">howtopython.org</a></li>
<li><a href="http://pipenv.org/">pipenv</a></li>
<li><a href="http://pep8.org/">pep8.org</a></li>
<li><a href="http://httpbin.org/">httpbin.org</a></li>
@@ -48,9 +50,9 @@
<p></p>
- <li><a href="http://github.com/requests/requests">Requests @ GitHub</a></li>
- <li><a href="http://pypi.python.org/pypi/requests">Requests @ PyPI</a></li>
- <li><a href="http://github.com/requests/requests/issues">Issue Tracker</a></li>
+ <li><a href="https://github.com/requests/requests">Requests @ GitHub</a></li>
+ <li><a href="https://pypi.python.org/pypi/requests">Requests @ PyPI</a></li>
+ <li><a href="https://github.com/requests/requests/issues">Issue Tracker</a></li>
<li><a href="http://docs.python-requests.org/en/latest/community/updates/#software-updates">Release History</a></li>
</ul>
@@ -67,4 +69,3 @@
<li><a href="http://it.python-requests.org/">Italian</a></li>
<li><a href="http://es.python-requests.org/">Spanish</a></li>
</ul>
-
diff --git a/docs/_templates/sidebarlogo.html b/docs/_templates/sidebarlogo.html
index b31c3477..1b7afbd8 100644
--- a/docs/_templates/sidebarlogo.html
+++ b/docs/_templates/sidebarlogo.html
@@ -4,28 +4,31 @@
</a>
</p>
<p>
-<iframe src="http://ghbtns.com/github-btn.html?user=requests&repo=requests&type=watch&count=true&size=large"
+<iframe src="https://ghbtns.com/github-btn.html?user=requests&repo=requests&type=watch&count=true&size=large"
allowtransparency="true" frameborder="0" scrolling="0" width="200px" height="35px"></iframe>
</p>
+
<p>
Requests is an elegant and simple HTTP library for Python, built for
human beings. You are currently looking at the documentation of the
development release.
</p>
-<h3><a href="http://www.unixstickers.com/stickers/coding_stickers/requests-shaped-sticker">Stickers!</a></h3>
+
+<p>Sponsored by <strong><a href="https://linode.com/">Linode</a></strong> and <a href="http://docs.python-requests.org/en/master/community/sponsors/#patron-sponsors">other wonderful organizations</a>.</p>
+
<h3>Stay Informed</h3>
<p>Receive updates on new releases and upcoming projects.</p>
+<p><a href="http://www.unixstickers.com/stickers/coding_stickers/requests-shaped-sticker">Requests Stickers!</a></p>
+
<p><a href="http://tinyletter.com/kennethreitz">Join Mailing List</a>.</p>
<hr/>
<script async type="text/javascript" src="//cdn.carbonads.com/carbon.js?zoneid=1673&serve=C6AILKT&placement=pythonrequestsorg" id="_carbonads_js"></script>
-<p>If you enjoy using this project, <a href="https://saythanks.io/to/kennethreitz">Say Thanks!</a></p>
-
-<p><iframe src="http://ghbtns.com/github-btn.html?user=kennethreitz&type=follow&count=false"
+<p><iframe src="https://ghbtns.com/github-btn.html?user=kennethreitz&type=follow&count=false"
allowtransparency="true" frameborder="0" scrolling="0" width="200" height="20"></iframe></p>
<p><a href="https://twitter.com/kennethreitz" class="twitter-follow-button" data-show-count="false">Follow @kennethreitz</a> <script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0],p=/^http:/.test(d.location)?'http':'https';if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src=p+'://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js,fjs);}}(document, 'script', 'twitter-wjs');</script></p>
@@ -35,7 +38,8 @@
<p>More <a href="http://kennethreitz.org/">Kenneth Reitz</a> projects:</p>
<ul>
- <li><a href="http://edmsynths.com/">edmsynths.com</a></li>
+ <li><a href="http://html.python-requests.org/">Requests-HTML</a></li>
+ <li><a href="http://howtopython.org/">howtopython.org</a></li>
<li><a href="http://pipenv.org/">pipenv</a></li>
<li><a href="http://pep8.org/">pep8.org</a></li>
<li><a href="http://httpbin.org/">httpbin.org</a></li>
@@ -59,4 +63,3 @@
<li><a href="http://it.python-requests.org/">Italian</a></li>
<li><a href="http://es.python-requests.org/">Spanish</a></li>
</ul>
-
diff --git a/docs/api.rst b/docs/api.rst
index 5f258944..cecb0fe9 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -108,17 +108,7 @@ Status Code Lookup
.. autoclass:: requests.codes
-::
-
- >>> requests.codes['temporary_redirect']
- 307
-
- >>> requests.codes.teapot
- 418
-
- >>> requests.codes['\o/']
- 200
-
+.. automodule:: requests.status_codes
Migrating to 1.x
@@ -190,7 +180,7 @@ API Changes
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests
logging.getLogger().setLevel(logging.DEBUG)
- requests_log = logging.getLogger("requests.packages.urllib3")
+ requests_log = logging.getLogger("urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
diff --git a/docs/community/out-there.rst b/docs/community/out-there.rst
index 5ce5f79f..63e70169 100644
--- a/docs/community/out-there.rst
+++ b/docs/community/out-there.rst
@@ -18,7 +18,7 @@ Articles & Talks
- `Python for the Web <http://gun.io/blog/python-for-the-web/>`_ teaches how to use Python to interact with the web, using Requests.
- `Daniel Greenfeld's Review of Requests <http://pydanny.blogspot.com/2011/05/python-http-requests-for-humans.html>`_
- `My 'Python for Humans' talk <http://python-for-humans.heroku.com>`_ ( `audio <http://codeconf.s3.amazonaws.com/2011/pycodeconf/talks/PyCodeConf2011%20-%20Kenneth%20Reitz.m4a>`_ )
-- `Issac Kelly's 'Consuming Web APIs' talk <http://issackelly.github.com/Consuming-Web-APIs-with-Python-Talk/slides/slides.html>`_
+- `Issac Kelly's 'Consuming Web APIs' talk <https://issackelly.github.com/Consuming-Web-APIs-with-Python-Talk/slides/slides.html>`_
- `Blog post about Requests via Yum <http://arunsag.wordpress.com/2011/08/17/new-package-python-requests-http-for-humans/>`_
- `Russian blog post introducing Requests <http://habrahabr.ru/blogs/python/126262/>`_
- `Sending JSON in Requests <http://www.coglib.com/~icordasc/blog/2014/11/sending-json-in-requests.html>`_
diff --git a/docs/community/recommended.rst b/docs/community/recommended.rst
index 0f652d54..8fcd47a4 100644
--- a/docs/community/recommended.rst
+++ b/docs/community/recommended.rst
@@ -15,7 +15,7 @@ Certifi CA Bundle
validating the trustworthiness of SSL certificates while verifying the
identity of TLS hosts. It has been extracted from the Requests project.
-.. _Certifi: http://certifi.io/en/latest/
+.. _Certifi: https://github.com/certifi/python-certifi
CacheControl
------------
@@ -34,7 +34,7 @@ but do not belong in Requests proper. This library is actively maintained
by members of the Requests core team, and reflects the functionality most
requested by users within the community.
-.. _Requests-Toolbelt: http://toolbelt.readthedocs.io/en/latest/index.html
+.. _Requests-Toolbelt: https://toolbelt.readthedocs.io/en/latest/index.html
Requests-Threads
@@ -62,6 +62,3 @@ Betamax
A VCR imitation designed only for Python-Requests.
.. _betamax: https://github.com/sigmavirus24/betamax
-
-
-
diff --git a/docs/community/release-process.rst b/docs/community/release-process.rst
index 2e317ceb..18f71168 100644
--- a/docs/community/release-process.rst
+++ b/docs/community/release-process.rst
@@ -19,19 +19,18 @@ Breaking changes are changes that break backwards compatibility with prior
versions. If the project were to change the ``text`` attribute on a
``Response`` object to a method, that would only happen in a Major release.
-Major releases may also include miscellaneous bug fixes and upgrades to
-vendored packages. The core developers of Requests are committed to providing
-a good user experience. This means we're also committed to preserving
-backwards compatibility as much as possible. Major releases will be infrequent
-and will need strong justifications before they are considered.
+Major releases may also include miscellaneous bug fixes. The core developers of
+Requests are committed to providing a good user experience. This means we're
+also committed to preserving backwards compatibility as much as possible. Major
+releases will be infrequent and will need strong justifications before they are
+considered.
Minor Releases
--------------
-A minor release will not include breaking changes but may include
-miscellaneous bug fixes and upgrades to vendored packages. If the previous
-version of Requests released was ``v10.2.7`` a minor release would be
-versioned as ``v10.3.0``.
+A minor release will not include breaking changes but may include miscellaneous
+bug fixes. If the previous version of Requests released was ``v10.2.7`` a minor
+release would be versioned as ``v10.3.0``.
Minor releases will be backwards compatible with releases that have the same
major version number. In other words, all versions that would start with
diff --git a/docs/community/sponsors.rst b/docs/community/sponsors.rst
new file mode 100644
index 00000000..f1e11efd
--- /dev/null
+++ b/docs/community/sponsors.rst
@@ -0,0 +1,96 @@
+Community Sponsors
+==================
+
+**tl;dr**: Requests development is currently `funded by the Python community <https://www.kennethreitz.org/requests3>`_, and
+some wonderful organizations that utilize the software in their businesses.
+
+
+-------------------
+
+
+Requests is one of the most heavily–utilized Python packages in the world.
+
+It is used by major corporations worldwide for all tasks, both small and large — from writing one–off scripts to orchestrating millions of dollars of critical infrastructure.
+
+It's even embedded within pip, that tool that you use to install packages and deploy with every day!
+
+After losing our primary open source maintainer (who was sponsored by a company to work on Requests, and other projects, full–time), we are seeking community financial contributions towards the development of Requests 3.0.
+
+Patron Sponsors
+----------------
+
+
+`Linode — SSD Cloud Hosting & Linux Servers <https://www.linode.com>`_
+//////////////////////////////////////////////////////////////////////
+
+Whether you’re just getting started or deploying a complex system, launching a Linode cloud server has never been easier. They offer the fastest hardware and network in the industry with scalable environments, and their 24x7 customer support team is always standing by to help with any questions.
+
+✨🍰✨
+//////
+
+----------------------------------
+
+This slot is reserved for ethical organizations willing to invest $10,000 or more in Requests per year.
+
+By becoming a patron–level sponsor, your organization will receive the following benefits:
+
+- Prominent placement on the Requests documentation sidebar (~11,000 uniques / day).
+- Honorable mention here, with logo.
+- Peace of mind knowing that the infrastructure you rely on is being actively maintained.
+
+Organizations that sign up will be listed in order — first come first serve!
+
+Major Sponsors
+--------------
+
+The following organizations have significantly contributed towards Requests' sustainability:
+
+`Slack — Bring your team together <https://slack.com>`_
+///////////////////////////////////////////////////////
+
+Slack was extremely kind to be the first organization to generously donate a large sum towards the `2018 Requests 3.0 fundraiser <https://www.kennethreitz.org/requests3>`_, surpassing our entire fundraising goal immediately! They are helping the world become a better place through connectiveness, and reducing the amount of email we all have
+to deal with on a daily basis.
+
+P.S. They're `hiring <https://slack.com/careers#openings>`_!
+
+
+`Twilio — Voice, SMS, and Video for Humans <https://www.twilio.com>`_
+/////////////////////////////////////////////////////////////////////
+
+Twilio was the second organization to generously donate a large sum towards the `2018 Requests 3.0 fundraiser <https://www.kennethreitz.org/requests3>`_, matching the donation of Slack! They are helping the world become a better place through interconnectivity,
+providing easy–to–use APIs, and empowering developers world-over to help humans communicate in meaningful and effictive ways.
+
+
+`Azure Cloud Developer Advocates <https://developer.microsoft.com/en-us/advocates/>`_
+/////////////////////////////////////////////////////////////////////////////////////
+
+Azure was the third organization to generously donate a large sum towards the `2018 Requests 3.0 fundraiser <https://www.kennethreitz.org/requests3>`_, matching the donation of Twilio! Awesome group of generous folks :)
+
+
+`Niteo — Web Systems Development <https://www.niteoweb.com>`_
+/////////////////////////////////////////////////////////////
+
+Niteo was the fourth company to generously donate towards the `2018 Requests 3.0 fundraiser <https://www.kennethreitz.org/requests3>`_. Niteo is a company employing tech enthusiasts from all over the world
+who love to build great stuff.
+
+
+`Heroku <https://heroku.com/python>`_
+/////////////////////////////////////
+
+Heroku has allowed Kenneth Reitz to work on some open source projects during work hours,
+including Requests (but mostly Pipenv), from time–to–time, so they are listed
+here as an honorable mention.
+
+----------------
+
+If your organization is interested in becoming either a sponsor or a patron, please `send us an email <mailto:me@kennethreitz.org>`_.
+
+
+Individual Sponsors
+-------------------
+
+Countless individuals, too many to list here, have individually contributed towards the sustainability of the Requests
+project over the years. Some, financially, others, with code. Contributions (from humans) of all kinds are greatly
+appreciated.
+
+✨🍰✨ \ No newline at end of file
diff --git a/docs/community/updates.rst b/docs/community/updates.rst
index f755a493..3b9a3097 100644
--- a/docs/community/updates.rst
+++ b/docs/community/updates.rst
@@ -29,4 +29,3 @@ Release and Version History
===========================
.. include:: ../../HISTORY.rst
-
diff --git a/docs/conf.py b/docs/conf.py
index 4bda98b0..c952fe79 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -58,7 +58,7 @@ master_doc = 'index'
# General information about the project.
project = u'Requests'
-copyright = u'MMXVII. A <a href="http://kennethreitz.com/pages/open-projects.html">Kenneth Reitz</a> Project'
+copyright = u'MMXVIII. A <a href="http://kennethreitz.com/pages/open-projects.html">Kenneth Reitz</a> Project'
author = u'Kenneth Reitz'
# The version info for the project you're documenting, acts as replacement for
@@ -376,4 +376,4 @@ epub_exclude_files = ['search.html']
# If false, no index is generated.
#epub_use_index = True
-intersphinx_mapping = {'urllib3': ('http://urllib3.readthedocs.io/en/latest', None)}
+intersphinx_mapping = {'urllib3': ('https://urllib3.readthedocs.io/en/latest', None)}
diff --git a/docs/dev/todo.rst b/docs/dev/todo.rst
index d960d305..b1a3f7eb 100644
--- a/docs/dev/todo.rst
+++ b/docs/dev/todo.rst
@@ -60,5 +60,4 @@ Requests currently supports the following versions of Python:
Google AppEngine is not officially supported although support is available
with the `Requests-Toolbelt`_.
-.. _Requests-Toolbelt: http://toolbelt.readthedocs.io/
-
+.. _Requests-Toolbelt: https://toolbelt.readthedocs.io/
diff --git a/docs/index.rst b/docs/index.rst
index 9ee0fc79..5ffe739c 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -30,8 +30,8 @@ consumption.
.. note:: The use of **Python 3** is *highly* preferred over Python 2. Consider upgrading your applications and infrastructure if you find yourself *still* using Python 2 in production today. If you are using Python 3, congratulations — you are indeed a person of excellent taste.
—*Kenneth Reitz*
-
-
+
+
-------------------
**Behold, the power of Requests**::
@@ -59,12 +59,12 @@ are 100% automatic, thanks to `urllib3 <https://github.com/shazow/urllib3>`_.
User Testimonials
-----------------
-Twitter, Spotify, Microsoft, Amazon, Lyft, BuzzFeed, Reddit, The NSA, Her Majesty's Government, Google, Twilio, Runscope, Mozilla, Heroku,
+Nike, Twitter, Spotify, Microsoft, Amazon, Lyft, BuzzFeed, Reddit, The NSA, Her Majesty's Government, Google, Twilio, Runscope, Mozilla, Heroku,
PayPal, NPR, Obama for America, Transifex, Native Instruments, The Washington
Post, SoundCloud, Kippt, Sony, and Federal U.S.
Institutions that prefer to be unnamed claim to use Requests internally.
-**Armin Ronacher**—
+**Armin Ronacher**, creator of Flask—
*Requests is the perfect example how beautiful an API can be with the
right level of abstraction.*
@@ -74,14 +74,18 @@ Institutions that prefer to be unnamed claim to use Requests internally.
**Daniel Greenfeld**—
*Nuked a 1200 LOC spaghetti code library with 10 lines of code thanks to
- Kenneth Reitz's request library. Today has been AWESOME.*
+ Kenneth Reitz's Requests library. Today has been AWESOME.*
**Kenny Meyers**—
*Python HTTP: When in doubt, or when not in doubt, use Requests. Beautiful,
simple, Pythonic.*
Requests is one of the most downloaded Python packages of all time, pulling in
-over 13,000,000 downloads every month. All the cool kids are doing it!
+over 400,000 downloads **each day**. Join the party!
+
+If your organization uses Requests internally, consider `supporting the development of 3.0 <https://www.kennethreitz.org/requests3>`_. Your
+generosity will be greatly appreciated, and help drive the project forward
+into the future.
Beloved Features
----------------
@@ -133,6 +137,7 @@ Requests ecosystem and community.
.. toctree::
:maxdepth: 2
+ community/sponsors
community/recommended
community/faq
community/out-there
diff --git a/docs/user/advanced.rst b/docs/user/advanced.rst
index 613df205..e5f7f297 100644
--- a/docs/user/advanced.rst
+++ b/docs/user/advanced.rst
@@ -196,18 +196,18 @@ As a result an ``SSL: CERTIFICATE_VERIFY_FAILED`` is thrown.
You can get around this behaviour by explicity merging the environment settings into your session::
from requests import Request, Session
-
+
s = Session()
req = Request('GET', url)
-
+
prepped = s.prepare_request(req)
-
+
# Merge environment settings into session
settings = s.merge_environment_settings(prepped.url, None, None, None, None)
resp = s.send(prepped, **settings)
-
+
print(resp.status_code)
-
+
.. _verification:
SSL Cert Verification
@@ -274,20 +274,19 @@ If you specify a wrong path or an invalid cert, you'll get a SSLError::
CA Certificates
---------------
-By default, Requests bundles a set of root CAs that it trusts, sourced from the
-`Mozilla trust store`_. However, these are only updated once for each Requests
-version. This means that if you pin a Requests version your certificates can
-become extremely out of date.
+Requests uses certificates from the package `certifi`_. This allows for users
+to update their trusted certificates without changing the version of Requests.
-From Requests version 2.4.0 onwards, Requests will attempt to use certificates
-from `certifi`_ if it is present on the system. This allows for users to update
-their trusted certificates without having to change the code that runs on their
-system.
+Before version 2.16, Requests bundled a set of root CAs that it trusted,
+sourced from the `Mozilla trust store`_. The certificates were only updated
+once for each Requests version. When ``certifi`` was not installed, this led to
+extremely out-of-date certificate bundles when using significantly older
+versions of Requests.
For the sake of security we recommend upgrading certifi frequently!
.. _HTTP persistent connection: https://en.wikipedia.org/wiki/HTTP_persistent_connection
-.. _connection pooling: http://urllib3.readthedocs.io/en/latest/reference/index.html#module-urllib3.connectionpool
+.. _connection pooling: https://urllib3.readthedocs.io/en/latest/reference/index.html#module-urllib3.connectionpool
.. _certifi: http://certifi.io/
.. _Mozilla trust store: https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt
@@ -436,7 +435,7 @@ You can assign a hook function on a per-request basis by passing a
``{hook_name: callback_function}`` dictionary to the ``hooks`` request
parameter::
- hooks=dict(response=print_url)
+ hooks={'response': print_url}
That ``callback_function`` will receive a chunk of data as its first
argument.
@@ -452,12 +451,36 @@ If the callback function returns a value, it is assumed that it is to
replace the data that was passed in. If the function doesn't return
anything, nothing else is effected.
+::
+
+ def record_hook(r, *args, **kwargs):
+ r.hook_called = True
+ return r
+
Let's print some request method arguments at runtime::
- >>> requests.get('http://httpbin.org', hooks=dict(response=print_url))
+ >>> requests.get('http://httpbin.org', hooks={'response': print_url})
http://httpbin.org
<Response [200]>
+You can add multiple hooks to a single request. Let's call two hooks at once::
+
+ >>> r = requests.get('http://httpbin.org', hooks={'response': [print_url, record_hook]})
+ >>> r.hook_called
+ True
+
+You can also add hooks to a ``Session`` instance. Any hooks you add will then
+be called on every request made to the session. For example::
+
+ >>> s = requests.Session()
+ >>> s.hooks['response'].append(print_url)
+ >>> s.get('http://httpbin.org')
+ http://httpbin.org
+ <Response [200]>
+
+A ``Session`` can have multiple hooks, which will be called in the order
+they are added.
+
.. _custom-auth:
Custom Authentication
@@ -633,7 +656,7 @@ When you receive a response, Requests makes a guess at the encoding to
use for decoding the response when you access the :attr:`Response.text
<requests.Response.text>` attribute. Requests will first check for an
encoding in the HTTP header, and if none is present, will use `chardet
-<http://pypi.python.org/pypi/chardet>`_ to attempt to guess the encoding.
+<https://pypi.python.org/pypi/chardet>`_ to attempt to guess the encoding.
The only time Requests will not do this is if no explicit charset
is present in the HTTP headers **and** the ``Content-Type``
@@ -860,7 +883,7 @@ Link Headers
Many HTTP APIs feature Link headers. They make APIs more self describing and
discoverable.
-GitHub uses these for `pagination <http://developer.github.com/v3/#pagination>`_
+GitHub uses these for `pagination <https://developer.github.com/v3/#pagination>`_
in their API, for example::
>>> url = 'https://api.github.com/users/kennethreitz/repos?page=1&per_page=10'
@@ -927,9 +950,9 @@ passed-through to `urllib3`. We'll make a Transport Adapter that instructs the
library to use SSLv3::
import ssl
+ from urllib3.poolmanager import PoolManager
from requests.adapters import HTTPAdapter
- from requests.packages.urllib3.poolmanager import PoolManager
class Ssl3HttpAdapter(HTTPAdapter):
diff --git a/docs/user/authentication.rst b/docs/user/authentication.rst
index 8ffab504..411f79fd 100644
--- a/docs/user/authentication.rst
+++ b/docs/user/authentication.rst
@@ -136,11 +136,11 @@ Further examples can be found under the `Requests organization`_ and in the
.. _OAuth: http://oauth.net/
.. _requests_oauthlib: https://github.com/requests/requests-oauthlib
-.. _requests-oauthlib OAuth2 documentation: http://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html
-.. _Web Application Flow: http://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#web-application-flow
-.. _Mobile Application Flow: http://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#mobile-application-flow
-.. _Legacy Application Flow: http://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#legacy-application-flow
-.. _Backend Application Flow: http://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#backend-application-flow
+.. _requests-oauthlib OAuth2 documentation: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html
+.. _Web Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#web-application-flow
+.. _Mobile Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#mobile-application-flow
+.. _Legacy Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#legacy-application-flow
+.. _Backend Application Flow: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#backend-application-flow
.. _Kerberos: https://github.com/requests/requests-kerberos
.. _NTLM: https://github.com/requests/requests-ntlm
.. _Requests organization: https://github.com/requests
diff --git a/docs/user/quickstart.rst b/docs/user/quickstart.rst
index 109c3415..d393bf05 100644
--- a/docs/user/quickstart.rst
+++ b/docs/user/quickstart.rst
@@ -110,7 +110,7 @@ using, and change it, using the ``r.encoding`` property::
If you change the encoding, Requests will use the new value of ``r.encoding``
whenever you call ``r.text``. You might want to do this in any situation where
you can apply special logic to work out what the encoding of the content will
-be. For example, HTTP and XML have the ability to specify their encoding in
+be. For example, HTML and XML have the ability to specify their encoding in
their body. In situations like this, you should use ``r.content`` to find the
encoding, and then set ``r.encoding``. This will let you use ``r.text`` with
the correct encoding.
@@ -171,7 +171,7 @@ server, you can access ``r.raw``. If you want to do this, make sure you set
>>> r = requests.get('https://api.github.com/events', stream=True)
>>> r.raw
- <requests.packages.urllib3.response.HTTPResponse object at 0x101194810>
+ <urllib3.response.HTTPResponse object at 0x101194810>
>>> r.raw.read(10)
'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03'
@@ -189,6 +189,14 @@ download, the above is the preferred and recommended way to retrieve the
content. Note that ``chunk_size`` can be freely adjusted to a number that
may better fit your use cases.
+.. note::
+
+ An important note about using ``Response.iter_content`` versus ``Response.raw``.
+ ``Response.iter_content`` will automatically decode the ``gzip`` and ``deflate``
+ transfer-encodings. ``Response.raw`` is a raw stream of bytes -- it does not
+ transform the response content. If you really need access to the bytes as they
+ were returned, use ``Response.raw``.
+
Custom Headers
--------------
@@ -273,6 +281,9 @@ the ``json`` parameter (added in version 2.4.2) and it will be encoded automatic
>>> r = requests.post(url, json=payload)
+Note, the ``json`` parameter is ignored if either ``data`` or ``files`` is passed.
+
+Using the ``json`` parameter in the request will change the ``Content-Type`` in the header to ``application/json``.
POST a Multipart-Encoded File
-----------------------------
diff --git a/requests/adapters.py b/requests/adapters.py
index 5bf80eb5..fe0f9049 100644
--- a/requests/adapters.py
+++ b/requests/adapters.py
@@ -13,6 +13,7 @@ import socket
from urllib3.poolmanager import PoolManager, proxy_from_url
from urllib3.response import HTTPResponse
+from urllib3.util import parse_url
from urllib3.util import Timeout as TimeoutSauce
from urllib3.util.retry import Retry
from urllib3.exceptions import ClosedPoolError
@@ -28,13 +29,13 @@ from urllib3.exceptions import ResponseError
from .models import Response
from .compat import urlparse, basestring
-from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
- prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
- select_proxy)
+from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
+ get_encoding_from_headers, prepend_scheme_if_needed,
+ get_auth_from_url, urldefragauth, select_proxy)
from .structures import CaseInsensitiveDict
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
- ProxyError, RetryError, InvalidScheme)
+ ProxyError, RetryError, InvalidScheme, InvalidProxyURL)
from .auth import _basic_auth_str
try:
@@ -309,6 +310,10 @@ class HTTPAdapter(BaseAdapter):
if proxy:
proxy = prepend_scheme_if_needed(proxy, 'http')
+ proxy_url = parse_url(proxy)
+ if not proxy_url.host:
+ raise InvalidProxyURL("Please check proxy URL. It is malformed"
+ " and could be missing the host.")
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url, pool_kwargs=pool_kwargs)
else:
@@ -413,7 +418,7 @@ class HTTPAdapter(BaseAdapter):
conn = self.get_connection(request.url, proxies, verify, cert)
url = self.request_url(request, proxies)
- self.add_headers(request)
+ self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
diff --git a/requests/exceptions.py b/requests/exceptions.py
index ebf4cc34..1c61bf87 100644
--- a/requests/exceptions.py
+++ b/requests/exceptions.py
@@ -85,6 +85,10 @@ class InvalidHeader(RequestException, ValueError):
"""The header value provided was somehow invalid."""
+class InvalidProxyURL(InvalidURL):
+ """The proxy URL provided is invalid."""
+
+
class ChunkedEncodingError(RequestException):
"""The server declared chunked encoding but sent an invalid chunk."""
diff --git a/requests/help.py b/requests/help.py
index 5440ee61..06e06b2a 100644
--- a/requests/help.py
+++ b/requests/help.py
@@ -13,7 +13,7 @@ import chardet
from . import __version__ as requests_version
try:
- from .packages.urllib3.contrib import pyopenssl
+ from urllib3.contrib import pyopenssl
except ImportError:
pyopenssl = None
OpenSSL = None
diff --git a/requests/models.py b/requests/models.py
index e776bc34..c3391ad2 100644
--- a/requests/models.py
+++ b/requests/models.py
@@ -676,11 +676,11 @@ class Response(object):
@property
def ok(self):
- """Returns True if :attr:`status_code` is less than 400.
+ """Returns True if :attr:`status_code` is less than 400, False if not.
This attribute checks if the status code of the response is between
400 and 600 to see if there was a client error or a server error. If
- the status code, is between 200 and 400, this will return True. This
+ the status code is between 200 and 400, this will return True. This
is **not** a check to see if the response code is ``200 OK``.
"""
try:
diff --git a/requests/sessions.py b/requests/sessions.py
index a3f59133..66ed53ea 100644
--- a/requests/sessions.py
+++ b/requests/sessions.py
@@ -8,7 +8,7 @@ This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
-import platform
+import sys
import time
from collections import Mapping, OrderedDict
from datetime import timedelta
@@ -40,7 +40,7 @@ from .status_codes import codes
from .models import REDIRECT_STATI
# Preferred clock, based on which one is more accurate on a given system.
-if platform.system() == 'Windows':
+if sys.platform == 'win32':
try: # Python 3.4+
preferred_clock = time.perf_counter
except AttributeError: # Earlier than Python 3.
@@ -134,6 +134,7 @@ class SessionRedirectMixin(object):
history = [response] # keep track of history; seed it with the original response
location_url = self.get_redirect_target(response)
+ previous_fragment = urlparse(request.url).fragment
while location_url:
prepared_request = request.copy()
@@ -154,8 +155,12 @@ class SessionRedirectMixin(object):
parsed_rurl = urlparse(response.url)
location_url = '%s:%s' % (to_native_string(parsed_rurl.scheme), location_url)
- # The scheme should be lower case...
+ # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
parsed = urlparse(location_url)
+ if parsed.fragment == '' and previous_fragment:
+ parsed = parsed._replace(fragment=previous_fragment)
+ elif parsed.fragment:
+ previous_fragment = parsed.fragment
location_url = parsed.geturl()
# Facilitate relative 'location' headers, as allowed by RFC 7231.
@@ -720,7 +725,7 @@ class Session(SessionRedirectMixin):
"""
for (prefix, adapter) in self.adapters.items():
- if url.lower().startswith(prefix):
+ if url.lower().startswith(prefix.lower()):
return adapter
# Nothing matches :-/
diff --git a/requests/status_codes.py b/requests/status_codes.py
index dee89190..96b86ddb 100644
--- a/requests/status_codes.py
+++ b/requests/status_codes.py
@@ -1,5 +1,22 @@
# -*- coding: utf-8 -*-
+"""
+The ``codes`` object defines a mapping from common names for HTTP statuses
+to their numerical codes, accessible either as attributes or as dictionary
+items.
+
+>>> requests.codes['temporary_redirect']
+307
+>>> requests.codes.teapot
+418
+>>> requests.codes['\o/']
+200
+
+Some codes have multiple names, and both upper- and lower-case versions of
+the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
+``codes.okay`` all correspond to the HTTP status code 200.
+"""
+
from .structures import LookupDict
_codes = {
@@ -84,8 +101,19 @@ _codes = {
codes = LookupDict(name='status_codes')
-for code, titles in _codes.items():
- for title in titles:
- setattr(codes, title, code)
- if not title.startswith(('\\', '/')):
- setattr(codes, title.upper(), code)
+def _init():
+ for code, titles in _codes.items():
+ for title in titles:
+ setattr(codes, title, code)
+ if not title.startswith(('\\', '/')):
+ setattr(codes, title.upper(), code)
+
+ def doc(code):
+ names = ', '.join('``%s``' % n for n in _codes[code])
+ return '* %d: %s' % (code, names)
+
+ global __doc__
+ __doc__ = (__doc__ + '\n' +
+ '\n'.join(doc(code) for code in sorted(_codes)))
+
+_init()
diff --git a/requests/utils.py b/requests/utils.py
index 745858d1..c718a783 100644
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -8,17 +8,18 @@ This module provides utility functions that are used within Requests
that are also useful for external consumption.
"""
-import cgi
import codecs
import collections
import contextlib
import io
import os
-import platform
import re
import socket
import struct
+import sys
+import tempfile
import warnings
+import zipfile
from .__version__ import __version__
from . import certs
@@ -39,19 +40,25 @@ NETRC_FILES = ('.netrc', '_netrc')
DEFAULT_CA_BUNDLE_PATH = certs.where()
-if platform.system() == 'Windows':
+if sys.platform == 'win32':
# provide a proxy_bypass version on Windows without DNS lookups
def proxy_bypass_registry(host):
- if is_py3:
- import winreg
- else:
- import _winreg as winreg
+ try:
+ if is_py3:
+ import winreg
+ else:
+ import _winreg as winreg
+ except ImportError:
+ return False
+
try:
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
- proxyEnable = winreg.QueryValueEx(internetSettings,
- 'ProxyEnable')[0]
+ # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
+ proxyEnable = int(winreg.QueryValueEx(internetSettings,
+ 'ProxyEnable')[0])
+ # ProxyOverride is almost always a string
proxyOverride = winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0]
except OSError:
@@ -216,6 +223,38 @@ def guess_filename(obj):
return os.path.basename(name)
+def extract_zipped_paths(path):
+ """Replace nonexistent paths that look like they refer to a member of a zip
+ archive with the location of an extracted copy of the target, or else
+ just return the provided path unchanged.
+ """
+ if os.path.exists(path):
+ # this is already a valid path, no need to do anything further
+ return path
+
+ # find the first valid part of the provided path and treat that as a zip archive
+ # assume the rest of the path is the name of a member in the archive
+ archive, member = os.path.split(path)
+ while archive and not os.path.exists(archive):
+ archive, prefix = os.path.split(archive)
+ member = '/'.join([prefix, member])
+
+ if not zipfile.is_zipfile(archive):
+ return path
+
+ zip_file = zipfile.ZipFile(archive)
+ if member not in zip_file.namelist():
+ return path
+
+ # we have a valid zip archive and a valid member of that archive
+ tmp = tempfile.gettempdir()
+ extracted_path = os.path.join(tmp, *member.split('/'))
+ if not os.path.exists(extracted_path):
+ extracted_path = zip_file.extract(member, path=tmp)
+
+ return extracted_path
+
+
def from_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
@@ -407,6 +446,31 @@ def get_encodings_from_content(content):
xml_re.findall(content))
+def _parse_content_type_header(header):
+ """Returns content type and parameters from given header
+
+ :param header: string
+ :return: tuple containing content type and dictionary of
+ parameters
+ """
+
+ tokens = header.split(';')
+ content_type, params = tokens[0].strip(), tokens[1:]
+ params_dict = {}
+ items_to_strip = "\"' "
+
+ for param in params:
+ param = param.strip()
+ if param:
+ key, value = param, True
+ index_of_equals = param.find("=")
+ if index_of_equals != -1:
+ key = param[:index_of_equals].strip(items_to_strip)
+ value = param[index_of_equals + 1:].strip(items_to_strip)
+ params_dict[key] = value
+ return content_type, params_dict
+
+
def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
@@ -419,7 +483,7 @@ def get_encoding_from_headers(headers):
if not content_type:
return None
- content_type, params = cgi.parse_header(content_type)
+ content_type, params = _parse_content_type_header(content_type)
if 'charset' in params:
return params['charset'].strip("'\"")
@@ -653,34 +717,37 @@ def should_bypass_proxies(url, no_proxy):
no_proxy_arg = no_proxy
if no_proxy is None:
no_proxy = get_proxy('no_proxy')
- netloc = urlparse(url).netloc
+ parsed = urlparse(url)
if no_proxy:
# We need to check whether we match here. We need to see if we match
- # the end of the netloc, both with and without the port.
+ # the end of the hostname, both with and without the port.
no_proxy = (
host for host in no_proxy.replace(' ', '').split(',') if host
)
- ip = netloc.split(':')[0]
- if is_ipv4_address(ip):
+ if is_ipv4_address(parsed.hostname):
for proxy_ip in no_proxy:
if is_valid_cidr(proxy_ip):
- if address_in_network(ip, proxy_ip):
+ if address_in_network(parsed.hostname, proxy_ip):
return True
- elif ip == proxy_ip:
+ elif parsed.hostname == proxy_ip:
# If no_proxy ip was defined in plain IP notation instead of cidr notation &
# matches the IP of the index
return True
else:
+ host_with_port = parsed.hostname
+ if parsed.port:
+ host_with_port += ':{0}'.format(parsed.port)
+
for host in no_proxy:
- if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
+ if parsed.hostname.endswith(host) or host_with_port.endswith(host):
# The URL does match something in no_proxy, so we don't want
# to apply the proxies on this URL.
return True
with set_environ('no_proxy', no_proxy_arg):
- return bool(proxy_bypass(netloc))
+ return bool(proxy_bypass(parsed.hostname))
def get_environ_proxies(url, no_proxy=None):
diff --git a/setup.cfg b/setup.cfg
index 2a9acf13..ed8a958e 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,2 +1,5 @@
[bdist_wheel]
universal = 1
+
+[metadata]
+license_file = LICENSE
diff --git a/setup.py b/setup.py
index 2782b1c7..617e528f 100755
--- a/setup.py
+++ b/setup.py
@@ -72,6 +72,7 @@ setup(
package_data={'': ['LICENSE', 'NOTICE'], 'requests': ['*.pem']},
package_dir={'requests': 'requests'},
include_package_data=True,
+ python_requires=">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
install_requires=requires,
license=about['__license__'],
zip_safe=False,
diff --git a/tests/test_lowlevel.py b/tests/test_lowlevel.py
index 4161f875..6c6b0863 100644
--- a/tests/test_lowlevel.py
+++ b/tests/test_lowlevel.py
@@ -252,3 +252,75 @@ def test_redirect_rfc1808_to_non_ascii_location():
assert r.url == u'{0}/{1}'.format(url, expected_path.decode('ascii'))
close_server.set()
+
+def test_fragment_not_sent_with_request():
+ """Verify that the fragment portion of a URI isn't sent to the server."""
+ def response_handler(sock):
+ req = consume_socket_content(sock, timeout=0.5)
+ sock.send(
+ b'HTTP/1.1 200 OK\r\n'
+ b'Content-Length: '+bytes(len(req))+b'\r\n'
+ b'\r\n'+req
+ )
+
+ close_server = threading.Event()
+ server = Server(response_handler, wait_to_close_event=close_server)
+
+ with server as (host, port):
+ url = 'http://{0}:{1}/path/to/thing/#view=edit&token=hunter2'.format(host, port)
+ r = requests.get(url)
+ raw_request = r.content
+
+ assert r.status_code == 200
+ headers, body = raw_request.split(b'\r\n\r\n', 1)
+ status_line, headers = headers.split(b'\r\n', 1)
+
+ assert status_line == b'GET /path/to/thing/ HTTP/1.1'
+ for frag in (b'view', b'edit', b'token', b'hunter2'):
+ assert frag not in headers
+ assert frag not in body
+
+ close_server.set()
+
+def test_fragment_update_on_redirect():
+ """Verify we only append previous fragment if one doesn't exist on new
+ location. If a new fragment is encounterd in a Location header, it should
+ be added to all subsequent requests.
+ """
+
+ def response_handler(sock):
+ consume_socket_content(sock, timeout=0.5)
+ sock.send(
+ b'HTTP/1.1 302 FOUND\r\n'
+ b'Content-Length: 0\r\n'
+ b'Location: /get#relevant-section\r\n\r\n'
+ )
+ consume_socket_content(sock, timeout=0.5)
+ sock.send(
+ b'HTTP/1.1 302 FOUND\r\n'
+ b'Content-Length: 0\r\n'
+ b'Location: /final-url/\r\n\r\n'
+ )
+ consume_socket_content(sock, timeout=0.5)
+ sock.send(
+ b'HTTP/1.1 200 OK\r\n\r\n'
+ )
+
+ close_server = threading.Event()
+ server = Server(response_handler, wait_to_close_event=close_server)
+
+ with server as (host, port):
+ url = 'http://{0}:{1}/path/to/thing/#view=edit&token=hunter2'.format(host, port)
+ r = requests.get(url)
+ raw_request = r.content
+
+ assert r.status_code == 200
+ assert len(r.history) == 2
+ assert r.history[0].request.url == url
+
+ # Verify we haven't overwritten the location with our previous fragment.
+ assert r.history[1].request.url == 'http://{0}:{1}/get#relevant-section'.format(host, port)
+ # Verify previous fragment is used and not the original.
+ assert r.url == 'http://{0}:{1}/final-url/#relevant-section'.format(host, port)
+
+ close_server.set()
diff --git a/tests/test_requests.py b/tests/test_requests.py
index 6d8a1d84..3c61cdfa 100644
--- a/tests/test_requests.py
+++ b/tests/test_requests.py
@@ -25,7 +25,7 @@ from requests.exceptions import (
ConnectionError, ConnectTimeout, InvalidScheme, InvalidURL,
MissingScheme, ReadTimeout, Timeout, RetryError, TooManyRedirects,
ProxyError, InvalidHeader, UnrewindableBodyError, InvalidBodyError,
- SSLError)
+ SSLError, InvalidProxyURL)
from requests.models import PreparedRequest
from requests.structures import CaseInsensitiveDict
from requests.sessions import SessionRedirectMixin
@@ -369,6 +369,14 @@ class TestRequests:
for header in purged_headers:
assert header not in next_resp.request.headers
+ def test_fragment_maintained_on_redirect(self, httpbin):
+ fragment = "#view=edit&token=hunter2"
+ r = requests.get(httpbin('redirect-to?url=get')+fragment)
+
+ assert len(r.history) > 0
+ assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment
+ assert r.url == httpbin('get')+fragment
+
def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):
heads = {'User-agent': 'Mozilla/5.0'}
@@ -640,6 +648,19 @@ class TestRequests:
with pytest.raises(ProxyError):
requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'})
+ def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):
+ with pytest.raises(InvalidProxyURL):
+ requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'})
+
+ with pytest.raises(InvalidProxyURL):
+ requests.get(httpbin(), proxies={'http': 'http://:8080'})
+
+ with pytest.raises(InvalidProxyURL):
+ requests.get(httpbin_secure(), proxies={'https': 'https://'})
+
+ with pytest.raises(InvalidProxyURL):
+ requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'})
+
def test_basicauth_with_netrc(self, httpbin):
auth = ('user', 'pass')
wrong_auth = ('wronguser', 'wrongpass')
@@ -1587,6 +1608,44 @@ class TestRequests:
assert 'http://' in s2.adapters
assert 'https://' in s2.adapters
+ def test_session_get_adapter_prefix_matching(self, httpbin):
+ prefix = 'https://example.com'
+ more_specific_prefix = prefix + '/some/path'
+
+ url_matching_only_prefix = prefix + '/another/path'
+ url_matching_more_specific_prefix = more_specific_prefix + '/longer/path'
+ url_not_matching_prefix = 'https://another.example.com/'
+
+ s = requests.Session()
+ prefix_adapter = HTTPAdapter()
+ more_specific_prefix_adapter = HTTPAdapter()
+ s.mount(prefix, prefix_adapter)
+ s.mount(more_specific_prefix, more_specific_prefix_adapter)
+
+ assert s.get_adapter(url_matching_only_prefix) is prefix_adapter
+ assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter
+ assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter)
+
+ def test_session_get_adapter_prefix_matching_mixed_case(self, httpbin):
+ mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'
+ url_matching_prefix = mixed_case_prefix + '/full_url'
+
+ s = requests.Session()
+ my_adapter = HTTPAdapter()
+ s.mount(mixed_case_prefix, my_adapter)
+
+ assert s.get_adapter(url_matching_prefix) is my_adapter
+
+ def test_session_get_adapter_prefix_matching_is_case_insensitive(self, httpbin):
+ mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'
+ url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url'
+
+ s = requests.Session()
+ my_adapter = HTTPAdapter()
+ s.mount(mixed_case_prefix, my_adapter)
+
+ assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter
+
def test_header_remove_is_case_insensitive(self, httpbin):
# From issue #1321
s = requests.Session()
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 0cd93d7d..54b83335 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -2,15 +2,18 @@
import os
import copy
+import filecmp
from io import BytesIO
+import zipfile
+from collections import deque
import pytest
from requests import compat
from requests.cookies import RequestsCookieJar
from requests.structures import CaseInsensitiveDict
from requests.utils import (
- address_in_network, dotted_netmask,
- get_auth_from_url, get_encoding_from_headers,
+ address_in_network, dotted_netmask, extract_zipped_paths,
+ get_auth_from_url, _parse_content_type_header, get_encoding_from_headers,
get_encodings_from_content, get_environ_proxies,
guess_filename, guess_json_utf, is_ipv4_address,
is_valid_cidr, iter_slices, parse_dict_header,
@@ -256,6 +259,32 @@ class TestGuessFilename:
assert isinstance(result, expected_type)
+class TestExtractZippedPaths:
+
+ @pytest.mark.parametrize(
+ 'path', (
+ '/',
+ __file__,
+ pytest.__file__,
+ '/etc/invalid/location',
+ ))
+ def test_unzipped_paths_unchanged(self, path):
+ assert path == extract_zipped_paths(path)
+
+ def test_zipped_paths_extracted(self, tmpdir):
+ zipped_py = tmpdir.join('test.zip')
+ with zipfile.ZipFile(zipped_py.strpath, 'w') as f:
+ f.write(__file__)
+
+ _, name = os.path.splitdrive(__file__)
+ zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\/'))
+ extracted_path = extract_zipped_paths(zipped_path)
+
+ assert extracted_path != zipped_path
+ assert os.path.exists(extracted_path)
+ assert filecmp.cmp(extracted_path, __file__)
+
+
class TestContentEncodingDetection:
def test_none(self):
@@ -444,6 +473,45 @@ def test_parse_dict_header(value, expected):
@pytest.mark.parametrize(
'value, expected', (
(
+ 'application/xml',
+ ('application/xml', {})
+ ),
+ (
+ 'application/json ; charset=utf-8',
+ ('application/json', {'charset': 'utf-8'})
+ ),
+ (
+ 'text/plain',
+ ('text/plain', {})
+ ),
+ (
+ 'multipart/form-data; boundary = something ; boundary2=\'something_else\' ; no_equals ',
+ ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})
+ ),
+ (
+ 'multipart/form-data; boundary = something ; boundary2="something_else" ; no_equals ',
+ ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})
+ ),
+ (
+ 'multipart/form-data; boundary = something ; \'boundary2=something_else\' ; no_equals ',
+ ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})
+ ),
+ (
+ 'multipart/form-data; boundary = something ; "boundary2=something_else" ; no_equals ',
+ ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})
+ ),
+ (
+ 'application/json ; ; ',
+ ('application/json', {})
+ )
+ ))
+def test__parse_content_type_header(value, expected):
+ assert _parse_content_type_header(value) == expected
+
+
+@pytest.mark.parametrize(
+ 'value, expected', (
+ (
CaseInsensitiveDict(),
None
),
@@ -546,6 +614,7 @@ def test_urldefragauth(url, expected):
('http://172.16.1.1/', True),
('http://172.16.1.1:5000/', True),
('http://localhost.localdomain:5000/v1.0/', True),
+ ('http://google.com:6000/', True),
('http://172.16.1.12/', False),
('http://172.16.1.12:5000/', False),
('http://google.com:5000/v1.0/', False),
@@ -554,12 +623,32 @@ def test_should_bypass_proxies(url, expected, monkeypatch):
"""Tests for function should_bypass_proxies to check if proxy
can be bypassed or not
"""
- monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')
- monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')
+ monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')
+ monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')
assert should_bypass_proxies(url, no_proxy=None) == expected
@pytest.mark.parametrize(
+ 'url, expected', (
+ ('http://172.16.1.1/', '172.16.1.1'),
+ ('http://172.16.1.1:5000/', '172.16.1.1'),
+ ('http://user:pass@172.16.1.1', '172.16.1.1'),
+ ('http://user:pass@172.16.1.1:5000', '172.16.1.1'),
+ ('http://hostname/', 'hostname'),
+ ('http://hostname:5000/', 'hostname'),
+ ('http://user:pass@hostname', 'hostname'),
+ ('http://user:pass@hostname:5000', 'hostname'),
+ ))
+def test_should_bypass_proxies_pass_only_hostname(url, expected, mocker):
+ """The proxy_bypass function should be called with a hostname or IP without
+ a port number or auth credentials.
+ """
+ proxy_bypass = mocker.patch('requests.utils.proxy_bypass')
+ should_bypass_proxies(url, no_proxy=None)
+ proxy_bypass.assert_called_once_with(expected)
+
+
+@pytest.mark.parametrize(
'cookiejar', (
compat.cookielib.CookieJar(),
RequestsCookieJar()
@@ -638,6 +727,7 @@ def test_should_bypass_proxies_win_registry(url, expected, override,
pass
ie_settings = RegHandle()
+ proxyEnableValues = deque([1, "1"])
def OpenKey(key, subkey):
return ie_settings
@@ -645,7 +735,9 @@ def test_should_bypass_proxies_win_registry(url, expected, override,
def QueryValueEx(key, value_name):
if key is ie_settings:
if value_name == 'ProxyEnable':
- return [1]
+ # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD)
+ proxyEnableValues.rotate()
+ return [proxyEnableValues[0]]
elif value_name == 'ProxyOverride':
return [override]
@@ -656,6 +748,7 @@ def test_should_bypass_proxies_win_registry(url, expected, override,
monkeypatch.setenv('NO_PROXY', '')
monkeypatch.setattr(winreg, 'OpenKey', OpenKey)
monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx)
+ assert should_bypass_proxies(url, None) == expected
@pytest.mark.parametrize(
diff --git a/tox.ini b/tox.ini
index 03f069ba..47b68ba5 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27,py33,py34,py35,py36
+envlist = py27,py34,py35,py36
[testenv]