summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBrian Cristante <brcrista@microsoft.com>2018-10-15 09:44:40 -0400
committerBrian Cristante <brcrista@microsoft.com>2018-10-15 09:44:40 -0400
commit543a4613f4ce4be37b3b11d0f3f03246f6d54e18 (patch)
tree249c5797aedbae2e6278826c3dbc693db6a59528
parentf454f609be1b63a1da9b41bf42ed53fb87dfd5be (diff)
parent962cbf89a7d3915fc7042b1cea8c16cddc995440 (diff)
downloadpip-543a4613f4ce4be37b3b11d0f3f03246f6d54e18.tar.gz
Merge branch 'master' into topic/remove-conditionals
-rw-r--r--.travis.yml5
-rw-r--r--AUTHORS.txt13
-rw-r--r--NEWS.rst53
-rw-r--r--appveyor.yml6
-rw-r--r--docs/html/installing.rst2
-rw-r--r--news/3037AE5E-B9C6-4BCE-BD7A-D68DD4529386.trivial0
-rw-r--r--news/3249E95B-9AAF-4885-972B-268BEA9E0E5F.trivial0
-rw-r--r--news/4187.feature5
-rw-r--r--news/5124.trivial1
-rw-r--r--news/5270.bugfix2
-rw-r--r--news/5355.feature1
-rw-r--r--news/5375.feature1
-rw-r--r--news/5433.bugfix2
-rw-r--r--news/5483.bugfix2
-rw-r--r--news/5525.feature1
-rw-r--r--news/5561.feature1
-rw-r--r--news/5624.bugfix1
-rw-r--r--news/5644.bugfix1
-rw-r--r--news/5679.bugfix1
-rw-r--r--news/5748.trivial1
-rw-r--r--news/5753.trivial1
-rw-r--r--news/5827.feature1
-rw-r--r--news/5838.bugfix1
-rw-r--r--news/5866.removal2
-rw-r--r--news/5CACBF2F-4917-4C85-9F41-32C2F998E8AD.trivial0
-rw-r--r--news/8EA012C5-DBB9-4909-A724-3E375CBF4D3A.trivial0
-rw-r--r--news/8EC51B95-AFCC-4955-A416-5435650C8D15.trivial0
-rw-r--r--news/94B8EE86-500E-4C27-BC8E-136550E30A62.trivial0
-rw-r--r--news/ACEFD11F-E9D5-4EFD-901F-D11BAF89808A.trivial0
-rw-r--r--news/BE5B0FA7-1A6E-47F6-AF80-E26B679352A0.trivial0
-rw-r--r--news/CD18D0D2-2C80-43CD-9A26-ED2535E2E840.trivial0
-rw-r--r--news/CF11FF8C-C348-4523-9DFC-A7FEBCADF154.trivial0
-rw-r--r--news/D7B6B89D-2437-428E-A94B-56A3210F1C6F.trivial0
-rw-r--r--news/D9A540D9-7665-48A5-A1C8-5141ED49E404.trivial (renamed from news/143AE47B-6AC8-490A-B9F5-2F30022A6918.trivial)0
-rw-r--r--news/E74EEB42-0067-4C7E-B02A-01FAE49D1D98.trivial (renamed from news/15771DE2-0EE8-4776-84E3-5496E9C9C9CC.trivial)0
-rw-r--r--news/pep517.vendor1
-rw-r--r--news/user_guide_fix_requirements_file_ref.doc1
-rw-r--r--src/pip/__init__.py2
-rw-r--r--src/pip/_internal/cli/autocompletion.py3
-rw-r--r--src/pip/_internal/cli/parser.py5
-rw-r--r--src/pip/_internal/configuration.py19
-rw-r--r--src/pip/_internal/exceptions.py19
-rw-r--r--src/pip/_internal/index.py379
-rw-r--r--src/pip/_internal/operations/freeze.py119
-rw-r--r--src/pip/_internal/pep425tags.py8
-rw-r--r--src/pip/_internal/utils/compat.py13
-rw-r--r--src/pip/_internal/utils/misc.py18
-rw-r--r--src/pip/_internal/utils/ui.py10
-rw-r--r--src/pip/_internal/vcs/__init__.py45
-rw-r--r--src/pip/_internal/vcs/bazaar.py8
-rw-r--r--src/pip/_internal/vcs/git.py65
-rw-r--r--src/pip/_internal/vcs/mercurial.py9
-rw-r--r--src/pip/_internal/vcs/subversion.py7
-rw-r--r--src/pip/_internal/wheel.py2
-rw-r--r--src/pip/_vendor/certifi/__init__.py2
-rw-r--r--src/pip/_vendor/certifi/cacert.pem226
-rw-r--r--src/pip/_vendor/packaging/__about__.py4
-rw-r--r--src/pip/_vendor/packaging/requirements.py8
-rw-r--r--src/pip/_vendor/packaging/specifiers.py2
-rw-r--r--src/pip/_vendor/pkg_resources/__init__.py67
-rw-r--r--src/pip/_vendor/pkg_resources/py31compat.py5
-rw-r--r--src/pip/_vendor/pyparsing.py46
-rw-r--r--src/pip/_vendor/pytoml/parser.py4
-rw-r--r--src/pip/_vendor/vendor.txt10
-rw-r--r--tasks/vendoring/__init__.py3
-rw-r--r--tests/data/packages/README.txt4
-rw-r--r--tests/data/packages/paxpkg.tar.bz2bin1094 -> 0 bytes
-rw-r--r--tests/data/packages3/dinner/index.html4
-rw-r--r--tests/data/packages3/index.html4
-rw-r--r--tests/data/packages3/requiredinner/index.html4
-rw-r--r--tests/functional/test_completion.py24
-rw-r--r--tests/functional/test_download.py2
-rw-r--r--tests/functional/test_install.py9
-rw-r--r--tests/functional/test_install_vcs_git.py86
-rw-r--r--tests/functional/test_search.py2
-rw-r--r--tests/functional/test_vcs_git.py33
-rw-r--r--tests/lib/configuration_helpers.py4
-rw-r--r--tests/unit/test_configuration.py17
-rw-r--r--tests/unit/test_finder.py2
-rw-r--r--tests/unit/test_index.py69
-rw-r--r--tests/unit/test_index_html_page.py162
-rw-r--r--tests/unit/test_req.py3
-rw-r--r--tests/unit/test_utils.py20
-rw-r--r--tests/unit/test_vcs.py6
-rw-r--r--tools/mypy-requirements.txt1
-rw-r--r--tools/tox_pip.py28
-rw-r--r--tox.ini10
87 files changed, 1098 insertions, 610 deletions
diff --git a/.travis.yml b/.travis.yml
index 6ecfd9b64..058209841 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,5 +1,4 @@
language: python
-sudo: false
cache: pip
dist: trusty
@@ -41,11 +40,9 @@ jobs:
- env: GROUP=1
python: 3.7
dist: xenial
- sudo: required
- env: GROUP=2
python: 3.7
dist: xenial
- sudo: required
- env: GROUP=1
python: 3.5
- env: GROUP=2
@@ -57,8 +54,10 @@ jobs:
- env: GROUP=1
python: 3.8-dev
+ dist: xenial
- env: GROUP=2
python: 3.8-dev
+ dist: xenial
# It's okay to fail on the in-development CPython version.
fast_finish: true
diff --git a/AUTHORS.txt b/AUTHORS.txt
index f02621e55..e845ac715 100644
--- a/AUTHORS.txt
+++ b/AUTHORS.txt
@@ -12,6 +12,7 @@ Alexandre Conrad <alexandre.conrad@gmail.com>
Alli <alzeih@users.noreply.github.com>
Anatoly Techtonik <techtonik@gmail.com>
Andrei Geacar <andrei.geacar@gmail.com>
+Andrew Gaul <andrew@gaul.org>
Andrey Bulgakov <mail@andreiko.ru>
Andrés Delfino <34587441+andresdelfino@users.noreply.github.com>
Andrés Delfino <adelfino@gmail.com>
@@ -36,6 +37,8 @@ Ashley Manton <ajd.manton@googlemail.com>
Atsushi Odagiri <aodagx@gmail.com>
Avner Cohen <israbirding@gmail.com>
Baptiste Mispelon <bmispelon@gmail.com>
+Barney Gale <barney.gale@gmail.com>
+barneygale <barney.gale@gmail.com>
Bartek Ogryczak <b.ogryczak@gmail.com>
Bastian Venthur <mail@venthur.de>
Ben Darnell <ben@bendarnell.com>
@@ -46,6 +49,7 @@ Benjamin VanEvery <ben@simondata.com>
Benoit Pierre <benoit.pierre@gmail.com>
Berker Peksag <berker.peksag@gmail.com>
Bernardo B. Marques <bernardo.fire@gmail.com>
+Bernhard M. Wiedemann <bwiedemann@suse.de>
Bogdan Opanchuk <bogdan@opanchuk.net>
Brad Erickson <eosrei@gmail.com>
Bradley Ayers <bradley.ayers@gmail.com>
@@ -55,6 +59,7 @@ Brian Rosner <brosner@gmail.com>
BrownTruck <BrownTruck@users.noreply.github.com>
Bruno Oliveira <nicoddemus@gmail.com>
Bruno Renié <brutasse@gmail.com>
+Bstrdsmkr <bstrdsmkr@gmail.com>
Buck Golemon <buck@yelp.com>
burrows <burrows@preveil.com>
Bussonnier Matthias <bussonniermatthias@gmail.com>
@@ -157,6 +162,7 @@ Herbert Pfennig <herbert@albinen.com>
Hsiaoming Yang <lepture@me.com>
Hugo <hugovk@users.noreply.github.com>
Hugo Lopes Tavares <hltbra@gmail.com>
+hugovk <hugovk@users.noreply.github.com>
Hynek Schlawack <hs@ox.cx>
Ian Bicking <ianb@colorstudy.com>
Ian Cordasco <graffatcolmingov@gmail.com>
@@ -182,6 +188,7 @@ Jannis Leidel <jannis@leidel.info>
jarondl <me@jarondl.net>
Jason R. Coombs <jaraco@jaraco.com>
Jay Graves <jay@skabber.com>
+Jean-Christophe Fillion-Robin <jchris.fillionr@kitware.com>
Jeff Barber <jbarber@computer.org>
Jeff Dairiki <dairiki@dairiki.org>
Jeremy Stanley <fungi@yuggoth.org>
@@ -194,6 +201,7 @@ Jon Dufresne <jon.dufresne@gmail.com>
Jon Parise <jon@indelible.org>
Jon Wayne Parrott <jjramone13@gmail.com>
Jonas Nockert <jonasnockert@gmail.com>
+Jonathan Herbert <foohyfooh@gmail.com>
Joost Molenaar <j.j.molenaar@gmail.com>
Jorge Niedbalski <niedbalski@gmail.com>
Joseph Long <jdl@fastmail.fm>
@@ -219,10 +227,12 @@ kpinc <kop@meme.com>
Kumar McMillan <kumar.mcmillan@gmail.com>
Kyle Persohn <kyle.persohn@gmail.com>
Laurent Bristiel <laurent@bristiel.com>
+Laurie Opperman <laurie@sitesee.com.au>
Leon Sasson <leonsassonha@gmail.com>
Lev Givon <lev@columbia.edu>
Lincoln de Sousa <lincoln@comum.org>
Lipis <lipiridis@gmail.com>
+Loren Carvalho <lcarvalho@linkedin.com>
Lucas Cimon <lucas.cimon@gmail.com>
Ludovic Gasc <gmludo@gmail.com>
Luke Macken <lmacken@redhat.com>
@@ -259,6 +269,7 @@ Michael E. Karpeles <michael.karpeles@gmail.com>
Michael Klich <michal@michalklich.com>
Michael Williamson <mike@zwobble.org>
michaelpacer <michaelpacer@gmail.com>
+Mickaël Schoentgen <mschoentgen@nuxeo.com>
Miguel Araujo Perez <miguel.araujo.perez@gmail.com>
Mihir Singh <git.service@mihirsingh.com>
Min RK <benjaminrk@gmail.com>
@@ -272,6 +283,7 @@ Nehal J Wani <nehaljw.kkd1@gmail.com>
Nick Coghlan <ncoghlan@gmail.com>
Nick Stenning <nick@whiteink.com>
Nikhil Benesch <nikhil.benesch@gmail.com>
+Nitesh Sharma <nbsharma@outlook.com>
Nowell Strite <nowell@strite.org>
nvdv <modestdev@gmail.com>
Ofekmeister <ofekmeister@gmail.com>
@@ -380,6 +392,7 @@ Tomer Chachamu <tomer.chachamu@gmail.com>
Tony Zhaocheng Tan <tony@tonytan.io>
Toshio Kuratomi <toshio@fedoraproject.org>
Travis Swicegood <development@domain51.com>
+Tzu-ping Chung <uranusjr@gmail.com>
Valentin Haenel <valentin.haenel@gmx.de>
Victor Stinner <victor.stinner@gmail.com>
Viktor Szépe <viktor@szepe.net>
diff --git a/NEWS.rst b/NEWS.rst
index b7fc3f7a2..da44faec5 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -7,6 +7,53 @@
.. towncrier release notes start
+18.1 (2018-10-05)
+=================
+
+Features
+--------
+
+- Allow PEP 508 URL requirements to be used as dependencies.
+
+ As a security measure, pip will raise an exception when installing packages from
+ PyPI if those packages depend on packages not also hosted on PyPI.
+ In the future, PyPI will block uploading packages with such external URL dependencies directly. (`#4187 <https://github.com/pypa/pip/issues/4187>`_)
+- Upgrade pyparsing to 2.2.1. (`#5013 <https://github.com/pypa/pip/issues/5013>`_)
+- Allows dist options (--abi, --python-version, --platform, --implementation) when installing with --target (`#5355 <https://github.com/pypa/pip/issues/5355>`_)
+- Support passing ``svn+ssh`` URLs with a username to ``pip install -e``. (`#5375 <https://github.com/pypa/pip/issues/5375>`_)
+- pip now ensures that the RECORD file is sorted when installing from a wheel file. (`#5525 <https://github.com/pypa/pip/issues/5525>`_)
+- Add support for Python 3.7. (`#5561 <https://github.com/pypa/pip/issues/5561>`_)
+- Malformed configuration files now show helpful error messages, instead of tracebacks. (`#5798 <https://github.com/pypa/pip/issues/5798>`_)
+
+Bug Fixes
+---------
+
+- Checkout the correct branch when doing an editable Git install. (`#2037 <https://github.com/pypa/pip/issues/2037>`_)
+- Run self-version-check only on commands that may access the index, instead of
+ trying on every run and failing to do so due to missing options. (`#5433 <https://github.com/pypa/pip/issues/5433>`_)
+- Allow a Git ref to be installed over an existing installation. (`#5624 <https://github.com/pypa/pip/issues/5624>`_)
+- Show a better error message when a configuration option has an invalid value. (`#5644 <https://github.com/pypa/pip/issues/5644>`_)
+- Always revalidate cached simple API pages instead of blindly caching them for up to 10
+ minutes. (`#5670 <https://github.com/pypa/pip/issues/5670>`_)
+- Avoid caching self-version-check information when cache is disabled. (`#5679 <https://github.com/pypa/pip/issues/5679>`_)
+- Avoid traceback printing on autocomplete after flags in the CLI. (`#5751 <https://github.com/pypa/pip/issues/5751>`_)
+- Fix incorrect parsing of egg names if pip needs to guess the package name. (`#5819 <https://github.com/pypa/pip/issues/5819>`_)
+
+Vendored Libraries
+------------------
+
+- Upgrade certifi to 2018.8.24
+- Upgrade packaging to 18.0
+- Add pep517 version 0.2
+- Upgrade pytoml to 0.1.19
+- Upgrade pkg_resources to 40.4.3 (via setuptools)
+
+Improved Documentation
+----------------------
+
+- Fix "Requirements Files" reference in User Guide (`#user_guide_fix_requirements_file_ref <https://github.com/pypa/pip/issues/user_guide_fix_requirements_file_ref>`_)
+
+
18.0 (2018-07-22)
=================
@@ -17,7 +64,7 @@ Process
- Formally document our deprecation process as a minimum of 6 months of deprecation
warnings.
- Adopt and document NEWS fragment writing style.
-- Switch to releasing a new, non bug fix version of pip every 3 months.
+- Switch to releasing a new, non-bug fix version of pip every 3 months.
Deprecations and Removals
-------------------------
@@ -118,7 +165,7 @@ Bug Fixes
---------
- Prevent false-positive installation warnings due to incomplete name
- normalizaton. (#5134)
+ normalization. (#5134)
- Fix issue where installing from Git with a short SHA would fail. (#5140)
- Accept pre-release versions when checking for conflicts with pip check or pip
install. (#5141)
@@ -1207,7 +1254,7 @@ Improved Documentation
- **Dropped support for Python 2.4** The minimum supported Python version is
now Python 2.5.
-- Fixed pypi mirror support being broken on some DNS responses. Thanks
+- Fixed PyPI mirror support being broken on some DNS responses. Thanks
philwhin. (#605)
- Fixed pip uninstall removing files it didn't install. Thanks pjdelport.
(#355)
diff --git a/appveyor.yml b/appveyor.yml
index 7fe18eacb..cfad2a52f 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -2,6 +2,8 @@ environment:
matrix:
# Unit and integration tests.
- PYTHON: "C:\\Python27"
+ - PYTHON: "C:\\Python36-x64"
+ - PYTHON: "C:\\Python27"
RUN_INTEGRATION_TESTS: "True"
- PYTHON: "C:\\Python36-x64"
RUN_INTEGRATION_TESTS: "True"
@@ -58,8 +60,10 @@ test_script:
subst T: $env:TEMP
$env:TEMP = "T:\"
$env:TMP = "T:\"
- tox -e py -- -m unit -n 3
if ($env:RUN_INTEGRATION_TESTS -eq "True") {
tox -e py -- -m integration -n 3 --duration=5
}
+ else {
+ tox -e py -- -m unit -n 3
+ }
}
diff --git a/docs/html/installing.rst b/docs/html/installing.rst
index 99eef7360..35ba05db9 100644
--- a/docs/html/installing.rst
+++ b/docs/html/installing.rst
@@ -23,8 +23,6 @@ To install pip, securely download `get-pip.py
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
-As when running any script downloaded from the web, ensure that you have
-reviewed the code and are happy that it works as you expect.
Then run the following::
python get-pip.py
diff --git a/news/3037AE5E-B9C6-4BCE-BD7A-D68DD4529386.trivial b/news/3037AE5E-B9C6-4BCE-BD7A-D68DD4529386.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/3037AE5E-B9C6-4BCE-BD7A-D68DD4529386.trivial
+++ /dev/null
diff --git a/news/3249E95B-9AAF-4885-972B-268BEA9E0E5F.trivial b/news/3249E95B-9AAF-4885-972B-268BEA9E0E5F.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/3249E95B-9AAF-4885-972B-268BEA9E0E5F.trivial
+++ /dev/null
diff --git a/news/4187.feature b/news/4187.feature
deleted file mode 100644
index 03a874bc3..000000000
--- a/news/4187.feature
+++ /dev/null
@@ -1,5 +0,0 @@
-Allow PEP 508 URL requirements to be used as dependencies.
-
-As a security measure, pip will raise an exception when installing packages from
-PyPI if those packages depend on packages not also hosted on PyPI.
-In the future, PyPI will block uploading packages with such external URL dependencies directly.
diff --git a/news/5124.trivial b/news/5124.trivial
deleted file mode 100644
index bc0a19b87..000000000
--- a/news/5124.trivial
+++ /dev/null
@@ -1 +0,0 @@
-Limit progress bar update interval to 200 ms.
diff --git a/news/5270.bugfix b/news/5270.bugfix
new file mode 100644
index 000000000..9db8184c3
--- /dev/null
+++ b/news/5270.bugfix
@@ -0,0 +1,2 @@
+Handle `requests.exceptions.RetryError` raised in `PackageFinder` that was
+causing pip to fail silently when some indexes were unreachable.
diff --git a/news/5355.feature b/news/5355.feature
deleted file mode 100644
index fc2e108f2..000000000
--- a/news/5355.feature
+++ /dev/null
@@ -1 +0,0 @@
-Allows dist options (--abi, --python-version, --platform, --implementation) when installing with --target
diff --git a/news/5375.feature b/news/5375.feature
deleted file mode 100644
index 5e6460d3d..000000000
--- a/news/5375.feature
+++ /dev/null
@@ -1 +0,0 @@
-Support passing ``svn+ssh`` URLs with a username to ``pip install -e``.
diff --git a/news/5433.bugfix b/news/5433.bugfix
deleted file mode 100644
index bed41fce8..000000000
--- a/news/5433.bugfix
+++ /dev/null
@@ -1,2 +0,0 @@
-Run self-version-check only on commands that may access the index, instead of
-trying on every run and failing to do so due to missing options.
diff --git a/news/5483.bugfix b/news/5483.bugfix
new file mode 100644
index 000000000..9db8184c3
--- /dev/null
+++ b/news/5483.bugfix
@@ -0,0 +1,2 @@
+Handle `requests.exceptions.RetryError` raised in `PackageFinder` that was
+causing pip to fail silently when some indexes were unreachable.
diff --git a/news/5525.feature b/news/5525.feature
deleted file mode 100644
index 1af8be6f9..000000000
--- a/news/5525.feature
+++ /dev/null
@@ -1 +0,0 @@
-pip now ensures that the RECORD file is sorted when installing from a wheel file.
diff --git a/news/5561.feature b/news/5561.feature
deleted file mode 100644
index 30e37cb99..000000000
--- a/news/5561.feature
+++ /dev/null
@@ -1 +0,0 @@
-Add support for Python 3.7.
diff --git a/news/5624.bugfix b/news/5624.bugfix
deleted file mode 100644
index fc6b58fe8..000000000
--- a/news/5624.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Allow a Git ref to be installed over an existing installation.
diff --git a/news/5644.bugfix b/news/5644.bugfix
deleted file mode 100644
index 39b720772..000000000
--- a/news/5644.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Show a better error message when a configuration option has an invalid value. \ No newline at end of file
diff --git a/news/5679.bugfix b/news/5679.bugfix
deleted file mode 100644
index 250166925..000000000
--- a/news/5679.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Avoid caching self-version-check information when cache is disabled.
diff --git a/news/5748.trivial b/news/5748.trivial
deleted file mode 100644
index 6ca14b372..000000000
--- a/news/5748.trivial
+++ /dev/null
@@ -1 +0,0 @@
-Remove the unmatched bracket in the --no-clean option's help text. \ No newline at end of file
diff --git a/news/5753.trivial b/news/5753.trivial
deleted file mode 100644
index 4e6860c24..000000000
--- a/news/5753.trivial
+++ /dev/null
@@ -1 +0,0 @@
-Fix links to NEWS entry guidelines.
diff --git a/news/5827.feature b/news/5827.feature
new file mode 100644
index 000000000..2ef8d45be
--- /dev/null
+++ b/news/5827.feature
@@ -0,0 +1 @@
+A warning message is emitted when dropping an ``--[extra-]index-url`` value that points to an existing local directory.
diff --git a/news/5838.bugfix b/news/5838.bugfix
new file mode 100644
index 000000000..b83a9fa91
--- /dev/null
+++ b/news/5838.bugfix
@@ -0,0 +1 @@
+Fix content type detection if a directory named like an archive is used as a package source.
diff --git a/news/5866.removal b/news/5866.removal
new file mode 100644
index 000000000..f9bbd0549
--- /dev/null
+++ b/news/5866.removal
@@ -0,0 +1,2 @@
+Remove the deprecated SVN editable detection based on dependency links
+during freeze.
diff --git a/news/5CACBF2F-4917-4C85-9F41-32C2F998E8AD.trivial b/news/5CACBF2F-4917-4C85-9F41-32C2F998E8AD.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/5CACBF2F-4917-4C85-9F41-32C2F998E8AD.trivial
+++ /dev/null
diff --git a/news/8EA012C5-DBB9-4909-A724-3E375CBF4D3A.trivial b/news/8EA012C5-DBB9-4909-A724-3E375CBF4D3A.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/8EA012C5-DBB9-4909-A724-3E375CBF4D3A.trivial
+++ /dev/null
diff --git a/news/8EC51B95-AFCC-4955-A416-5435650C8D15.trivial b/news/8EC51B95-AFCC-4955-A416-5435650C8D15.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/8EC51B95-AFCC-4955-A416-5435650C8D15.trivial
+++ /dev/null
diff --git a/news/94B8EE86-500E-4C27-BC8E-136550E30A62.trivial b/news/94B8EE86-500E-4C27-BC8E-136550E30A62.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/94B8EE86-500E-4C27-BC8E-136550E30A62.trivial
+++ /dev/null
diff --git a/news/ACEFD11F-E9D5-4EFD-901F-D11BAF89808A.trivial b/news/ACEFD11F-E9D5-4EFD-901F-D11BAF89808A.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/ACEFD11F-E9D5-4EFD-901F-D11BAF89808A.trivial
+++ /dev/null
diff --git a/news/BE5B0FA7-1A6E-47F6-AF80-E26B679352A0.trivial b/news/BE5B0FA7-1A6E-47F6-AF80-E26B679352A0.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/BE5B0FA7-1A6E-47F6-AF80-E26B679352A0.trivial
+++ /dev/null
diff --git a/news/CD18D0D2-2C80-43CD-9A26-ED2535E2E840.trivial b/news/CD18D0D2-2C80-43CD-9A26-ED2535E2E840.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/CD18D0D2-2C80-43CD-9A26-ED2535E2E840.trivial
+++ /dev/null
diff --git a/news/CF11FF8C-C348-4523-9DFC-A7FEBCADF154.trivial b/news/CF11FF8C-C348-4523-9DFC-A7FEBCADF154.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/CF11FF8C-C348-4523-9DFC-A7FEBCADF154.trivial
+++ /dev/null
diff --git a/news/D7B6B89D-2437-428E-A94B-56A3210F1C6F.trivial b/news/D7B6B89D-2437-428E-A94B-56A3210F1C6F.trivial
deleted file mode 100644
index e69de29bb..000000000
--- a/news/D7B6B89D-2437-428E-A94B-56A3210F1C6F.trivial
+++ /dev/null
diff --git a/news/143AE47B-6AC8-490A-B9F5-2F30022A6918.trivial b/news/D9A540D9-7665-48A5-A1C8-5141ED49E404.trivial
index e69de29bb..e69de29bb 100644
--- a/news/143AE47B-6AC8-490A-B9F5-2F30022A6918.trivial
+++ b/news/D9A540D9-7665-48A5-A1C8-5141ED49E404.trivial
diff --git a/news/15771DE2-0EE8-4776-84E3-5496E9C9C9CC.trivial b/news/E74EEB42-0067-4C7E-B02A-01FAE49D1D98.trivial
index e69de29bb..e69de29bb 100644
--- a/news/15771DE2-0EE8-4776-84E3-5496E9C9C9CC.trivial
+++ b/news/E74EEB42-0067-4C7E-B02A-01FAE49D1D98.trivial
diff --git a/news/pep517.vendor b/news/pep517.vendor
deleted file mode 100644
index 53f5c85d4..000000000
--- a/news/pep517.vendor
+++ /dev/null
@@ -1 +0,0 @@
-Add pep517 version 0.2
diff --git a/news/user_guide_fix_requirements_file_ref.doc b/news/user_guide_fix_requirements_file_ref.doc
deleted file mode 100644
index 07112d375..000000000
--- a/news/user_guide_fix_requirements_file_ref.doc
+++ /dev/null
@@ -1 +0,0 @@
-Fix "Requirements Files" reference in User Guide
diff --git a/src/pip/__init__.py b/src/pip/__init__.py
index 2e8c4e43d..98e17e8d3 100644
--- a/src/pip/__init__.py
+++ b/src/pip/__init__.py
@@ -1 +1 @@
-__version__ = "18.1.dev0"
+__version__ = "19.0.dev0"
diff --git a/src/pip/_internal/cli/autocompletion.py b/src/pip/_internal/cli/autocompletion.py
index de97463fa..0a04199e6 100644
--- a/src/pip/_internal/cli/autocompletion.py
+++ b/src/pip/_internal/cli/autocompletion.py
@@ -116,7 +116,8 @@ def get_path_completion_type(cwords, cword, opts):
continue
for o in str(opt).split('/'):
if cwords[cword - 2].split('=')[0] == o:
- if any(x in ('path', 'file', 'dir')
+ if not opt.metavar or any(
+ x in ('path', 'file', 'dir')
for x in opt.metavar.split('/')):
return opt.metavar
diff --git a/src/pip/_internal/cli/parser.py b/src/pip/_internal/cli/parser.py
index 269190b59..e1eaac420 100644
--- a/src/pip/_internal/cli/parser.py
+++ b/src/pip/_internal/cli/parser.py
@@ -9,6 +9,7 @@ from distutils.util import strtobool
from pip._vendor.six import string_types
+from pip._internal.cli.status_codes import UNKNOWN_ERROR
from pip._internal.configuration import Configuration, ConfigurationError
from pip._internal.utils.compat import get_terminal_size
@@ -232,7 +233,7 @@ class ConfigOptionParser(CustomOptionParser):
try:
self.config.load()
except ConfigurationError as err:
- self.exit(2, err.args[0])
+ self.exit(UNKNOWN_ERROR, str(err))
defaults = self._update_defaults(self.defaults.copy()) # ours
for option in self._get_all_options():
@@ -244,7 +245,7 @@ class ConfigOptionParser(CustomOptionParser):
def error(self, msg):
self.print_usage(sys.stderr)
- self.exit(2, "%s\n" % msg)
+ self.exit(UNKNOWN_ERROR, "%s\n" % msg)
def invalid_config_error_message(action, key, val):
diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py
index 32133de01..fe6df9b75 100644
--- a/src/pip/_internal/configuration.py
+++ b/src/pip/_internal/configuration.py
@@ -18,7 +18,9 @@ import os
from pip._vendor import six
from pip._vendor.six.moves import configparser
-from pip._internal.exceptions import ConfigurationError
+from pip._internal.exceptions import (
+ ConfigurationError, ConfigurationFileCouldNotBeLoaded,
+)
from pip._internal.locations import (
legacy_config_file, new_config_file, running_under_virtualenv,
site_config_files, venv_config_file,
@@ -289,11 +291,16 @@ class Configuration(object):
try:
parser.read(fname)
except UnicodeDecodeError:
- raise ConfigurationError((
- "ERROR: "
- "Configuration file contains invalid %s characters.\n"
- "Please fix your configuration, located at %s\n"
- ) % (locale.getpreferredencoding(False), fname))
+ # See https://github.com/pypa/pip/issues/4963
+ raise ConfigurationFileCouldNotBeLoaded(
+ reason="contains invalid {} characters".format(
+ locale.getpreferredencoding(False)
+ ),
+ fname=fname,
+ )
+ except configparser.Error as error:
+ # See https://github.com/pypa/pip/issues/4893
+ raise ConfigurationFileCouldNotBeLoaded(error=error)
return parser
def _load_environment_vars(self):
diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py
index ad6f41253..f1ca6f36d 100644
--- a/src/pip/_internal/exceptions.py
+++ b/src/pip/_internal/exceptions.py
@@ -247,3 +247,22 @@ class HashMismatch(HashError):
class UnsupportedPythonVersion(InstallationError):
"""Unsupported python version according to Requires-Python package
metadata."""
+
+
+class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
+ """When there are errors while loading a configuration file
+ """
+
+ def __init__(self, reason="could not be loaded", fname=None, error=None):
+ super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
+ self.reason = reason
+ self.fname = fname
+ self.error = error
+
+ def __str__(self):
+ if self.fname is not None:
+ message_part = " in {}.".format(self.fname)
+ else:
+ assert self.error is not None
+ message_part = ".\n{}\n".format(self.error.message)
+ return "Configuration file {}{}".format(self.reason, message_part)
diff --git a/src/pip/_internal/index.py b/src/pip/_internal/index.py
index ea8363e78..012e87a82 100644
--- a/src/pip/_internal/index.py
+++ b/src/pip/_internal/index.py
@@ -16,7 +16,7 @@ from pip._vendor.distlib.compat import unescape
from pip._vendor.packaging import specifiers
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.requests.exceptions import SSLError
+from pip._vendor.requests.exceptions import RetryError, SSLError
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
@@ -34,7 +34,7 @@ from pip._internal.utils.compat import ipaddress
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
- ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path,
+ ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, normalize_path,
remove_auth_from_url,
)
from pip._internal.utils.packaging import check_requires_python
@@ -59,6 +59,173 @@ SECURE_ORIGINS = [
logger = logging.getLogger(__name__)
+def _match_vcs_scheme(url):
+ """Look for VCS schemes in the URL.
+
+ Returns the matched VCS scheme, or None if there's no match.
+ """
+ from pip._internal.vcs import VcsSupport
+ for scheme in VcsSupport.schemes:
+ if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
+ return scheme
+ return None
+
+
+def _is_url_like_archive(url):
+ """Return whether the URL looks like an archive.
+ """
+ filename = Link(url).filename
+ for bad_ext in ARCHIVE_EXTENSIONS:
+ if filename.endswith(bad_ext):
+ return True
+ return False
+
+
+class _NotHTML(Exception):
+ def __init__(self, content_type, request_desc):
+ super(_NotHTML, self).__init__(content_type, request_desc)
+ self.content_type = content_type
+ self.request_desc = request_desc
+
+
+def _ensure_html_header(response):
+ """Check the Content-Type header to ensure the response contains HTML.
+
+ Raises `_NotHTML` if the content type is not text/html.
+ """
+ content_type = response.headers.get("Content-Type", "")
+ if not content_type.lower().startswith("text/html"):
+ raise _NotHTML(content_type, response.request.method)
+
+
+class _NotHTTP(Exception):
+ pass
+
+
+def _ensure_html_response(url, session):
+ """Send a HEAD request to the URL, and ensure the response contains HTML.
+
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
+ `_NotHTML` if the content type is not text/html.
+ """
+ scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
+ if scheme not in {'http', 'https'}:
+ raise _NotHTTP()
+
+ resp = session.head(url, allow_redirects=True)
+ resp.raise_for_status()
+
+ _ensure_html_header(resp)
+
+
+def _get_html_response(url, session):
+ """Access an HTML page with GET, and return the response.
+
+ This consists of three parts:
+
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
+ check the Content-Type is HTML, to avoid downloading a large file.
+ Raise `_NotHTTP` if the content type cannot be determined, or
+ `_NotHTML` if it is not HTML.
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
+ 3. Check the Content-Type header to make sure we got HTML, and raise
+ `_NotHTML` otherwise.
+ """
+ if _is_url_like_archive(url):
+ _ensure_html_response(url, session=session)
+
+ logger.debug('Getting page %s', url)
+
+ resp = session.get(
+ url,
+ headers={
+ "Accept": "text/html",
+ # We don't want to blindly returned cached data for
+ # /simple/, because authors generally expecting that
+ # twine upload && pip install will function, but if
+ # they've done a pip install in the last ~10 minutes
+ # it won't. Thus by setting this to zero we will not
+ # blindly use any cached data, however the benefit of
+ # using max-age=0 instead of no-cache, is that we will
+ # still support conditional requests, so we will still
+ # minimize traffic sent in cases where the page hasn't
+ # changed at all, we will just always incur the round
+ # trip for the conditional GET now instead of only
+ # once per 10 minutes.
+ # For more information, please see pypa/pip#5670.
+ "Cache-Control": "max-age=0",
+ },
+ )
+ resp.raise_for_status()
+
+ # The check for archives above only works if the url ends with
+ # something that looks like an archive. However that is not a
+ # requirement of an url. Unless we issue a HEAD request on every
+ # url we cannot know ahead of time for sure if something is HTML
+ # or not. However we can check after we've downloaded it.
+ _ensure_html_header(resp)
+
+ return resp
+
+
+def _handle_get_page_fail(link, reason, url, meth=None):
+ if meth is None:
+ meth = logger.debug
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
+
+
+def _get_html_page(link, session=None):
+ if session is None:
+ raise TypeError(
+ "_get_html_page() missing 1 required keyword argument: 'session'"
+ )
+
+ url = link.url.split('#', 1)[0]
+
+ # Check for VCS schemes that do not support lookup as web pages.
+ vcs_scheme = _match_vcs_scheme(url)
+ if vcs_scheme:
+ logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
+ return None
+
+ # Tack index.html onto file:// URLs that point to directories
+ scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
+ if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
+ # add trailing slash if not present so urljoin doesn't trim
+ # final segment
+ if not url.endswith('/'):
+ url += '/'
+ url = urllib_parse.urljoin(url, 'index.html')
+ logger.debug(' file: URL is directory, getting %s', url)
+
+ try:
+ resp = _get_html_response(url, session=session)
+ except _NotHTTP as exc:
+ logger.debug(
+ 'Skipping page %s because it looks like an archive, and cannot '
+ 'be checked by HEAD.', link,
+ )
+ except _NotHTML as exc:
+ logger.debug(
+ 'Skipping page %s because the %s request got Content-Type: %s',
+ link, exc.request_desc, exc.content_type,
+ )
+ except requests.HTTPError as exc:
+ _handle_get_page_fail(link, exc, url)
+ except RetryError as exc:
+ _handle_get_page_fail(link, exc, url)
+ except SSLError as exc:
+ reason = "There was a problem confirming the ssl certificate: "
+ reason += str(exc)
+ _handle_get_page_fail(link, reason, url, meth=logger.info)
+ except requests.ConnectionError as exc:
+ _handle_get_page_fail(link, "connection error: %s" % exc, url)
+ except requests.Timeout:
+ _handle_get_page_fail(link, "timed out", url)
+ else:
+ return HTMLPage(resp.content, resp.url, resp.headers)
+
+
class PackageFinder(object):
"""This finds packages.
@@ -177,7 +344,7 @@ class PackageFinder(object):
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
replacement="PEP 508 URL dependencies",
- gone_in="18.2",
+ gone_in="19.0",
issue=4187,
)
self.dependency_links.extend(links)
@@ -216,6 +383,11 @@ class PackageFinder(object):
sort_path(os.path.join(path, item))
elif is_file_url:
urls.append(url)
+ else:
+ logger.warning(
+ "Path '{0}' is ignored: "
+ "it is a directory.".format(path),
+ )
elif os.path.isfile(path):
sort_path(path)
else:
@@ -415,7 +587,7 @@ class PackageFinder(object):
logger.debug('Analyzing links from page %s', page.url)
with indent_log():
page_versions.extend(
- self._package_versions(page.links, search)
+ self._package_versions(page.iter_links(), search)
)
dependency_versions = self._package_versions(
@@ -556,7 +728,7 @@ class PackageFinder(object):
continue
seen.add(location)
- page = self._get_page(location)
+ page = _get_html_page(location, session=self.session)
if page is None:
continue
@@ -673,9 +845,6 @@ class PackageFinder(object):
return InstallationCandidate(search.supplied, version, link)
- def _get_page(self, link):
- return HTMLPage.get_page(link, session=self.session)
-
def egg_info_matches(
egg_info, search_name, link,
@@ -694,7 +863,7 @@ def egg_info_matches(
return None
if search_name is None:
full_match = match.group(0)
- return full_match[full_match.index('-'):]
+ return full_match.split('-', 1)[-1]
name = match.group(0).lower()
# To match the "safe" name that pkg_resources creates:
name = name.replace('_', '-')
@@ -706,169 +875,71 @@ def egg_info_matches(
return None
-class HTMLPage(object):
- """Represents one page, along with its URL"""
+def _determine_base_url(document, page_url):
+ """Determine the HTML document's base URL.
- def __init__(self, content, url, headers=None):
- # Determine if we have any encoding information in our headers
- encoding = None
- if headers and "Content-Type" in headers:
- content_type, params = cgi.parse_header(headers["Content-Type"])
+ This looks for a ``<base>`` tag in the HTML document. If present, its href
+ attribute denotes the base URL of anchor tags in the document. If there is
+ no such tag (or if it does not have a valid href attribute), the HTML
+ file's URL is used as the base URL.
- if "charset" in params:
- encoding = params['charset']
+ :param document: An HTML document representation. The current
+ implementation expects the result of ``html5lib.parse()``.
+ :param page_url: The URL of the HTML document.
+ """
+ for base in document.findall(".//base"):
+ href = base.get("href")
+ if href is not None:
+ return href
+ return page_url
- self.content = content
- self.parsed = html5lib.parse(
- self.content,
- transport_encoding=encoding,
- namespaceHTMLElements=False,
- )
- self.url = url
- self.headers = headers
- def __str__(self):
- return self.url
+def _get_encoding_from_headers(headers):
+ """Determine if we have any encoding information in our headers.
+ """
+ if headers and "Content-Type" in headers:
+ content_type, params = cgi.parse_header(headers["Content-Type"])
+ if "charset" in params:
+ return params['charset']
+ return None
- @classmethod
- def get_page(cls, link, skip_archives=True, session=None):
- if session is None:
- raise TypeError(
- "get_page() missing 1 required keyword argument: 'session'"
- )
- url = link.url
- url = url.split('#', 1)[0]
+_CLEAN_LINK_RE = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
- # Check for VCS schemes that do not support lookup as web pages.
- from pip._internal.vcs import VcsSupport
- for scheme in VcsSupport.schemes:
- if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
- logger.debug('Cannot look at %s URL %s', scheme, link)
- return None
- try:
- if skip_archives:
- filename = link.filename
- for bad_ext in ARCHIVE_EXTENSIONS:
- if filename.endswith(bad_ext):
- content_type = cls._get_content_type(
- url, session=session,
- )
- if content_type.lower().startswith('text/html'):
- break
- else:
- logger.debug(
- 'Skipping page %s because of Content-Type: %s',
- link,
- content_type,
- )
- return
-
- logger.debug('Getting page %s', url)
-
- # Tack index.html onto file:// URLs that point to directories
- (scheme, netloc, path, params, query, fragment) = \
- urllib_parse.urlparse(url)
- if (scheme == 'file' and
- os.path.isdir(urllib_request.url2pathname(path))):
- # add trailing slash if not present so urljoin doesn't trim
- # final segment
- if not url.endswith('/'):
- url += '/'
- url = urllib_parse.urljoin(url, 'index.html')
- logger.debug(' file: URL is directory, getting %s', url)
-
- resp = session.get(
- url,
- headers={
- "Accept": "text/html",
- "Cache-Control": "max-age=600",
- },
- )
- resp.raise_for_status()
-
- # The check for archives above only works if the url ends with
- # something that looks like an archive. However that is not a
- # requirement of an url. Unless we issue a HEAD request on every
- # url we cannot know ahead of time for sure if something is HTML
- # or not. However we can check after we've downloaded it.
- content_type = resp.headers.get('Content-Type', 'unknown')
- if not content_type.lower().startswith("text/html"):
- logger.debug(
- 'Skipping page %s because of Content-Type: %s',
- link,
- content_type,
- )
- return
+def _clean_link(url):
+ """Makes sure a link is fully encoded. That is, if a ' ' shows up in
+ the link, it will be rewritten to %20 (while not over-quoting
+ % or other characters)."""
+ return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url)
- inst = cls(resp.content, resp.url, resp.headers)
- except requests.HTTPError as exc:
- cls._handle_fail(link, exc, url)
- except SSLError as exc:
- reason = "There was a problem confirming the ssl certificate: "
- reason += str(exc)
- cls._handle_fail(link, reason, url, meth=logger.info)
- except requests.ConnectionError as exc:
- cls._handle_fail(link, "connection error: %s" % exc, url)
- except requests.Timeout:
- cls._handle_fail(link, "timed out", url)
- else:
- return inst
- @staticmethod
- def _handle_fail(link, reason, url, meth=None):
- if meth is None:
- meth = logger.debug
+class HTMLPage(object):
+ """Represents one page, along with its URL"""
- meth("Could not fetch URL %s: %s - skipping", link, reason)
+ def __init__(self, content, url, headers=None):
+ self.content = content
+ self.url = url
+ self.headers = headers
- @staticmethod
- def _get_content_type(url, session):
- """Get the Content-Type of the given url, using a HEAD request"""
- scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
- if scheme not in {'http', 'https'}:
- # FIXME: some warning or something?
- # assertion error?
- return ''
-
- resp = session.head(url, allow_redirects=True)
- resp.raise_for_status()
-
- return resp.headers.get("Content-Type", "")
-
- @cached_property
- def base_url(self):
- bases = [
- x for x in self.parsed.findall(".//base")
- if x.get("href") is not None
- ]
- if bases and bases[0].get("href"):
- return bases[0].get("href")
- else:
- return self.url
+ def __str__(self):
+ return self.url
- @property
- def links(self):
+ def iter_links(self):
"""Yields all links in the page"""
- for anchor in self.parsed.findall(".//a"):
+ document = html5lib.parse(
+ self.content,
+ transport_encoding=_get_encoding_from_headers(self.headers),
+ namespaceHTMLElements=False,
+ )
+ base_url = _determine_base_url(document, self.url)
+ for anchor in document.findall(".//a"):
if anchor.get("href"):
href = anchor.get("href")
- url = self.clean_link(
- urllib_parse.urljoin(self.base_url, href)
- )
+ url = _clean_link(urllib_parse.urljoin(base_url, href))
pyrequire = anchor.get('data-requires-python')
pyrequire = unescape(pyrequire) if pyrequire else None
- yield Link(url, self, requires_python=pyrequire)
-
- _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
-
- def clean_link(self, url):
- """Makes sure a link is fully encoded. That is, if a ' ' shows up in
- the link, it will be rewritten to %20 (while not over-quoting
- % or other characters)."""
- return self._clean_re.sub(
- lambda match: '%%%2x' % ord(match.group(0)), url)
+ yield Link(url, self.url, requires_python=pyrequire)
Search = namedtuple('Search', 'supplied canonical formats')
diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py
index 1ceb7fedb..af484f2c1 100644
--- a/src/pip/_internal/operations/freeze.py
+++ b/src/pip/_internal/operations/freeze.py
@@ -14,7 +14,6 @@ from pip._internal.req.constructors import (
install_req_from_editable, install_req_from_line,
)
from pip._internal.req.req_file import COMMENT_RE
-from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.misc import (
dist_is_editable, get_installed_distributions,
)
@@ -164,89 +163,49 @@ class FrozenRequirement(object):
self.editable = editable
self.comments = comments
- _rev_re = re.compile(r'-r(\d+)$')
- _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
-
@classmethod
- def from_dist(cls, dist, dependency_links):
+ def _init_args_from_dist(cls, dist, dependency_links):
+ """
+ Compute and return arguments (req, editable, comments) to pass to
+ FrozenRequirement.__init__().
+
+ This method is for use in FrozenRequirement.from_dist().
+ """
location = os.path.normcase(os.path.abspath(dist.location))
- comments = []
from pip._internal.vcs import vcs, get_src_requirement
- if dist_is_editable(dist) and vcs.get_backend_name(location):
- editable = True
- try:
- req = get_src_requirement(dist, location)
- except InstallationError as exc:
- logger.warning(
- "Error when trying to get requirement for VCS system %s, "
- "falling back to uneditable format", exc
- )
- req = None
- if req is None:
- logger.warning(
- 'Could not determine repository location of %s', location
- )
- comments.append(
- '## !! Could not determine repository location'
- )
- req = dist.as_requirement()
- editable = False
- else:
- editable = False
+ if not dist_is_editable(dist):
req = dist.as_requirement()
- specs = req.specs
- assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
- 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
- (specs, dist)
- version = specs[0][1]
- ver_match = cls._rev_re.search(version)
- date_match = cls._date_re.search(version)
- if ver_match or date_match:
- svn_backend = vcs.get_backend('svn')
- if svn_backend:
- svn_location = svn_backend().get_location(
- dist,
- dependency_links,
- )
- if not svn_location:
- logger.warning(
- 'Warning: cannot find svn location for %s', req,
- )
- comments.append(
- '## FIXME: could not find svn URL in dependency_links '
- 'for this package:'
- )
- else:
- deprecated(
- "SVN editable detection based on dependency links "
- "will be dropped in the future.",
- replacement=None,
- gone_in="18.2",
- issue=4187,
- )
- comments.append(
- '# Installing as editable to satisfy requirement %s:' %
- req
- )
- if ver_match:
- rev = ver_match.group(1)
- else:
- rev = '{%s}' % date_match.group(1)
- editable = True
- req = '%s@%s#egg=%s' % (
- svn_location,
- rev,
- cls.egg_name(dist)
- )
- return cls(dist.project_name, req, editable, comments)
-
- @staticmethod
- def egg_name(dist):
- name = dist.egg_name()
- match = re.search(r'-py\d\.\d$', name)
- if match:
- name = name[:match.start()]
- return name
+ return (req, False, [])
+
+ vc_type = vcs.get_backend_type(location)
+
+ if not vc_type:
+ req = dist.as_requirement()
+ return (req, False, [])
+
+ try:
+ req = get_src_requirement(vc_type, dist, location)
+ except InstallationError as exc:
+ logger.warning(
+ "Error when trying to get requirement for VCS system %s, "
+ "falling back to uneditable format", exc
+ )
+ else:
+ if req is not None:
+ return (req, True, [])
+
+ logger.warning(
+ 'Could not determine repository location of %s', location
+ )
+ comments = ['## !! Could not determine repository location']
+ req = dist.as_requirement()
+
+ return (req, False, comments)
+
+ @classmethod
+ def from_dist(cls, dist, dependency_links):
+ args = cls._init_args_from_dist(dist, dependency_links)
+ return cls(dist.project_name, *args)
def __str__(self):
req = self.req
diff --git a/src/pip/_internal/pep425tags.py b/src/pip/_internal/pep425tags.py
index 0b5c7832d..ab1a02985 100644
--- a/src/pip/_internal/pep425tags.py
+++ b/src/pip/_internal/pep425tags.py
@@ -11,6 +11,7 @@ import warnings
from collections import OrderedDict
import pip._internal.utils.glibc
+from pip._internal.utils.compat import get_extension_suffixes
logger = logging.getLogger(__name__)
@@ -252,10 +253,9 @@ def get_supported(versions=None, noarch=False, platform=None,
abis[0:0] = [abi]
abi3s = set()
- import imp
- for suffix in imp.get_suffixes():
- if suffix[0].startswith('.abi'):
- abi3s.add(suffix[0].split('.', 2)[1])
+ for suffix in get_extension_suffixes():
+ if suffix.startswith('.abi'):
+ abi3s.add(suffix.split('.', 2)[1])
abis.extend(sorted(list(abi3s)))
diff --git a/src/pip/_internal/utils/compat.py b/src/pip/_internal/utils/compat.py
index e6c008d3b..3114f2da4 100644
--- a/src/pip/_internal/utils/compat.py
+++ b/src/pip/_internal/utils/compat.py
@@ -25,6 +25,7 @@ except ImportError:
__all__ = [
"ipaddress", "uses_pycache", "console_to_str", "native_str",
"get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
+ "get_extension_suffixes",
]
@@ -160,6 +161,18 @@ def get_path_uid(path):
return file_uid
+if sys.version_info >= (3, 4):
+ from importlib.machinery import EXTENSION_SUFFIXES
+
+ def get_extension_suffixes():
+ return EXTENSION_SUFFIXES
+else:
+ from imp import get_suffixes
+
+ def get_extension_suffixes():
+ return [suffix[0] for suffix in get_suffixes()]
+
+
def expanduser(path):
"""
Expand ~ and ~user constructions.
diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py
index f7afd1df5..6998e16f0 100644
--- a/src/pip/_internal/utils/misc.py
+++ b/src/pip/_internal/utils/misc.py
@@ -520,15 +520,11 @@ def untar_file(filename, location):
mode = 'r:*'
tar = tarfile.open(filename, mode)
try:
- # note: python<=2.5 doesn't seem to know about pax headers, filter them
leading = has_leading_dir([
member.name for member in tar.getmembers()
- if member.name != 'pax_global_header'
])
for member in tar.getmembers():
fn = member.name
- if fn == 'pax_global_header':
- continue
if leading:
fn = split_leading_dir(fn)[1]
path = os.path.join(location, fn)
@@ -856,6 +852,20 @@ def enum(*sequential, **named):
return type('Enum', (), enums)
+def make_vcs_requirement_url(repo_url, rev, egg_project_name, subdir=None):
+ """
+ Return the URL for a VCS requirement.
+
+ Args:
+ repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
+ """
+ req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name)
+ if subdir:
+ req += '&subdirectory={}'.format(subdir)
+
+ return req
+
+
def split_auth_from_netloc(netloc):
"""
Parse out and remove the auth information from a netloc.
diff --git a/src/pip/_internal/utils/ui.py b/src/pip/_internal/utils/ui.py
index 4a337241d..6bab904ab 100644
--- a/src/pip/_internal/utils/ui.py
+++ b/src/pip/_internal/utils/ui.py
@@ -137,7 +137,6 @@ class DownloadProgressMixin(object):
def __init__(self, *args, **kwargs):
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
self.message = (" " * (get_indentation() + 2)) + self.message
- self.last_update = 0.0
@property
def downloaded(self):
@@ -162,15 +161,6 @@ class DownloadProgressMixin(object):
self.next(n)
self.finish()
- def update(self):
- # limit updates to avoid swamping the TTY
- now = time.time()
- if now < self.last_update + 0.2:
- return
- self.last_update = now
-
- super(DownloadProgressMixin, self).update()
-
class WindowsMixin(object):
diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py
index 794b35d65..d34c8be0b 100644
--- a/src/pip/_internal/vcs/__init__.py
+++ b/src/pip/_internal/vcs/__init__.py
@@ -133,16 +133,16 @@ class VcsSupport(object):
else:
logger.warning('Cannot unregister because no class or name given')
- def get_backend_name(self, location):
+ def get_backend_type(self, location):
"""
- Return the name of the version control backend if found at given
- location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
+ Return the type of the version control backend if found at given
+ location, e.g. vcs.get_backend_type('/path/to/vcs/checkout')
"""
for vc_type in self._registry.values():
if vc_type.controls_location(location):
logger.debug('Determine that %s uses VCS: %s',
location, vc_type.name)
- return vc_type.name
+ return vc_type
return None
def get_backend(self, name):
@@ -150,12 +150,6 @@ class VcsSupport(object):
if name in self._registry:
return self._registry[name]
- def get_backend_from_location(self, location):
- vc_type = self.get_backend_name(location)
- if vc_type:
- return self.get_backend(vc_type)
- return None
-
vcs = VcsSupport()
@@ -487,23 +481,14 @@ class VersionControl(object):
return cls.is_repository_directory(location)
-def get_src_requirement(dist, location):
- version_control = vcs.get_backend_from_location(location)
- if version_control:
- try:
- return version_control().get_src_requirement(dist,
- location)
- except BadCommand:
- logger.warning(
- 'cannot determine version of editable source in %s '
- '(%s command not found in path)',
- location,
- version_control.name,
- )
- return dist.as_requirement()
- logger.warning(
- 'cannot determine version of editable source in %s (is not SVN '
- 'checkout, Git clone, Mercurial clone or Bazaar branch)',
- location,
- )
- return dist.as_requirement()
+def get_src_requirement(vc_type, dist, location):
+ try:
+ return vc_type().get_src_requirement(dist, location)
+ except BadCommand:
+ logger.warning(
+ 'cannot determine version of editable source in %s '
+ '(%s command not found in path)',
+ location,
+ vc_type.name,
+ )
+ return dist.as_requirement()
diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py
index d5c6efaf5..3cc66c9dc 100644
--- a/src/pip/_internal/vcs/bazaar.py
+++ b/src/pip/_internal/vcs/bazaar.py
@@ -6,7 +6,9 @@ import os
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.download import path_to_url
-from pip._internal.utils.misc import display_path, rmtree
+from pip._internal.utils.misc import (
+ display_path, make_vcs_requirement_url, rmtree,
+)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.vcs import VersionControl, vcs
@@ -98,9 +100,9 @@ class Bazaar(VersionControl):
return None
if not repo.lower().startswith('bzr:'):
repo = 'bzr+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
current_rev = self.get_revision(location)
- return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ return make_vcs_requirement_url(repo, current_rev, egg_project_name)
def is_commit_id_equal(self, dest, name):
"""Always assume the versions don't match"""
diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py
index bacc037f7..977853946 100644
--- a/src/pip/_internal/vcs/git.py
+++ b/src/pip/_internal/vcs/git.py
@@ -10,7 +10,7 @@ from pip._vendor.six.moves.urllib import request as urllib_request
from pip._internal.exceptions import BadCommand
from pip._internal.utils.compat import samefile
-from pip._internal.utils.misc import display_path
+from pip._internal.utils.misc import display_path, make_vcs_requirement_url
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.vcs import VersionControl, vcs
@@ -77,6 +77,20 @@ class Git(VersionControl):
version = '.'.join(version.split('.')[:3])
return parse_version(version)
+ def get_branch(self, location):
+ """
+ Return the current branch, or None if HEAD isn't at a branch
+ (e.g. detached HEAD).
+ """
+ args = ['rev-parse', '--abbrev-ref', 'HEAD']
+ output = self.run_command(args, show_stdout=False, cwd=location)
+ branch = output.strip()
+
+ if branch == 'HEAD':
+ return None
+
+ return branch
+
def export(self, location):
"""Export the Git repository at the url to the destination location"""
if not location.endswith('/'):
@@ -91,8 +105,8 @@ class Git(VersionControl):
def get_revision_sha(self, dest, rev):
"""
- Return a commit hash for the given revision if it names a remote
- branch or tag. Otherwise, return None.
+ Return (sha_or_none, is_branch), where sha_or_none is a commit hash
+ if the revision names a remote branch or tag, otherwise None.
Args:
dest: the repository directory.
@@ -115,7 +129,13 @@ class Git(VersionControl):
branch_ref = 'refs/remotes/origin/{}'.format(rev)
tag_ref = 'refs/tags/{}'.format(rev)
- return refs.get(branch_ref) or refs.get(tag_ref)
+ sha = refs.get(branch_ref)
+ if sha is not None:
+ return (sha, True)
+
+ sha = refs.get(tag_ref)
+
+ return (sha, False)
def resolve_revision(self, dest, url, rev_options):
"""
@@ -126,10 +146,13 @@ class Git(VersionControl):
rev_options: a RevOptions object.
"""
rev = rev_options.arg_rev
- sha = self.get_revision_sha(dest, rev)
+ sha, is_branch = self.get_revision_sha(dest, rev)
if sha is not None:
- return rev_options.make_new(sha)
+ rev_options = rev_options.make_new(sha)
+ rev_options.branch_name = rev if is_branch else None
+
+ return rev_options
# Do not show a warning for the common case of something that has
# the form of a Git commit hash.
@@ -177,10 +200,20 @@ class Git(VersionControl):
if rev_options.rev:
# Then a specific revision was requested.
rev_options = self.resolve_revision(dest, url, rev_options)
- # Only do a checkout if the current commit id doesn't match
- # the requested revision.
- if not self.is_commit_id_equal(dest, rev_options.rev):
- cmd_args = ['checkout', '-q'] + rev_options.to_args()
+ branch_name = getattr(rev_options, 'branch_name', None)
+ if branch_name is None:
+ # Only do a checkout if the current commit id doesn't match
+ # the requested revision.
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ cmd_args = ['checkout', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+ elif self.get_branch(dest) != branch_name:
+ # Then a specific branch was requested, and that branch
+ # is not yet checked out.
+ track_branch = 'origin/{}'.format(branch_name)
+ cmd_args = [
+ 'checkout', '-b', branch_name, '--track', track_branch,
+ ]
self.run_command(cmd_args, cwd=dest)
#: repo may contain submodules
@@ -261,14 +294,12 @@ class Git(VersionControl):
repo = self.get_url(location)
if not repo.lower().startswith('git:'):
repo = 'git+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
current_rev = self.get_revision(location)
- req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
- subdirectory = self._get_subdirectory(location)
- if subdirectory:
- req += '&subdirectory=' + subdirectory
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ subdir = self._get_subdirectory(location)
+ req = make_vcs_requirement_url(repo, current_rev, egg_project_name,
+ subdir=subdir)
+
return req
def get_url_rev_and_auth(self, url):
diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py
index 86d71ef2d..17cfb67d1 100644
--- a/src/pip/_internal/vcs/mercurial.py
+++ b/src/pip/_internal/vcs/mercurial.py
@@ -6,7 +6,7 @@ import os
from pip._vendor.six.moves import configparser
from pip._internal.download import path_to_url
-from pip._internal.utils.misc import display_path
+from pip._internal.utils.misc import display_path, make_vcs_requirement_url
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.vcs import VersionControl, vcs
@@ -88,11 +88,10 @@ class Mercurial(VersionControl):
repo = self.get_url(location)
if not repo.lower().startswith('hg:'):
repo = 'hg+' + repo
- egg_project_name = dist.egg_name().split('-', 1)[0]
- if not repo:
- return None
current_rev_hash = self.get_revision_hash(location)
- return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ return make_vcs_requirement_url(repo, current_rev_hash,
+ egg_project_name)
def is_commit_id_equal(self, dest, name):
"""Always assume the versions don't match"""
diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py
index 19e2e70dc..6f7cb5d94 100644
--- a/src/pip/_internal/vcs/subversion.py
+++ b/src/pip/_internal/vcs/subversion.py
@@ -7,7 +7,7 @@ import re
from pip._internal.models.link import Link
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
- display_path, rmtree, split_auth_from_netloc,
+ display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc,
)
from pip._internal.vcs import VersionControl, vcs
@@ -199,10 +199,11 @@ class Subversion(VersionControl):
repo = self.get_url(location)
if repo is None:
return None
+ repo = 'svn+' + repo
+ rev = self.get_revision(location)
# FIXME: why not project name?
egg_project_name = dist.egg_name().split('-', 1)[0]
- rev = self.get_revision(location)
- return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)
+ return make_vcs_requirement_url(repo, rev, egg_project_name)
def is_commit_id_equal(self, dest, name):
"""Always assume the versions don't match"""
diff --git a/src/pip/_internal/wheel.py b/src/pip/_internal/wheel.py
index 9a5cce33e..5ce890eba 100644
--- a/src/pip/_internal/wheel.py
+++ b/src/pip/_internal/wheel.py
@@ -475,7 +475,7 @@ if __name__ == '__main__':
if warn_script_location:
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
if msg is not None:
- logger.warn(msg)
+ logger.warning(msg)
if len(gui) > 0:
generated.extend(
diff --git a/src/pip/_vendor/certifi/__init__.py b/src/pip/_vendor/certifi/__init__.py
index 0c4963ef6..aa329fbb4 100644
--- a/src/pip/_vendor/certifi/__init__.py
+++ b/src/pip/_vendor/certifi/__init__.py
@@ -1,3 +1,3 @@
from .core import where, old_where
-__version__ = "2018.04.16"
+__version__ = "2018.08.24"
diff --git a/src/pip/_vendor/certifi/cacert.pem b/src/pip/_vendor/certifi/cacert.pem
index 2713f541c..85de024e7 100644
--- a/src/pip/_vendor/certifi/cacert.pem
+++ b/src/pip/_vendor/certifi/cacert.pem
@@ -3692,169 +3692,6 @@ lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
-----END CERTIFICATE-----
-# Issuer: CN=Certplus Root CA G1 O=Certplus
-# Subject: CN=Certplus Root CA G1 O=Certplus
-# Label: "Certplus Root CA G1"
-# Serial: 1491911565779898356709731176965615564637713
-# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42
-# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66
-# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e
------BEGIN CERTIFICATE-----
-MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA
-MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy
-dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa
-MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy
-dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
-ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a
-iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt
-6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP
-0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f
-6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE
-EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN
-1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc
-h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT
-mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV
-4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO
-WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud
-DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd
-Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq
-hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh
-66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7
-/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS
-S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j
-2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R
-Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr
-RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy
-6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV
-V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5
-g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl
-++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo=
------END CERTIFICATE-----
-
-# Issuer: CN=Certplus Root CA G2 O=Certplus
-# Subject: CN=Certplus Root CA G2 O=Certplus
-# Label: "Certplus Root CA G2"
-# Serial: 1492087096131536844209563509228951875861589
-# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31
-# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a
-# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17
------BEGIN CERTIFICATE-----
-MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x
-CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs
-dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x
-CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs
-dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat
-93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x
-Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P
-AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj
-FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG
-SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch
-p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal
-U5ORGpOucGpnutee5WEaXw==
------END CERTIFICATE-----
-
-# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust
-# Subject: CN=OpenTrust Root CA G1 O=OpenTrust
-# Label: "OpenTrust Root CA G1"
-# Serial: 1492036577811947013770400127034825178844775
-# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da
-# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e
-# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4
------BEGIN CERTIFICATE-----
-MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA
-MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w
-ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw
-MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU
-T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
-AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b
-wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX
-/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0
-77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP
-uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx
-p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx
-Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2
-TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W
-G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw
-vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY
-EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO
-BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1
-2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw
-DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E
-PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf
-gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS
-FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0
-V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P
-XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I
-i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t
-TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91
-09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky
-Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ
-AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj
-1oxx
------END CERTIFICATE-----
-
-# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust
-# Subject: CN=OpenTrust Root CA G2 O=OpenTrust
-# Label: "OpenTrust Root CA G2"
-# Serial: 1492012448042702096986875987676935573415441
-# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb
-# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b
-# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2
------BEGIN CERTIFICATE-----
-MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA
-MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w
-ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw
-MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU
-T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
-AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh
-/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e
-CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6
-1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE
-FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS
-gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X
-G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy
-YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH
-vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4
-t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/
-gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO
-BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3
-5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w
-DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz
-Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0
-nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT
-RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT
-wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2
-t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa
-TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2
-o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU
-3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA
-iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f
-WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM
-S1IK
------END CERTIFICATE-----
-
-# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust
-# Subject: CN=OpenTrust Root CA G3 O=OpenTrust
-# Label: "OpenTrust Root CA G3"
-# Serial: 1492104908271485653071219941864171170455615
-# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24
-# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6
-# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92
------BEGIN CERTIFICATE-----
-MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx
-CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U
-cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow
-QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl
-blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm
-3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d
-oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G
-A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5
-DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK
-BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q
-j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx
-4nxp5V2a+EEfOzmTk51V6s2N8fvB
------END CERTIFICATE-----
-
# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
# Subject: CN=ISRG Root X1 O=Internet Security Research Group
# Label: "ISRG Root X1"
@@ -4398,3 +4235,66 @@ MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
diff --git a/src/pip/_vendor/packaging/__about__.py b/src/pip/_vendor/packaging/__about__.py
index 4255c5b55..21fc6ce3e 100644
--- a/src/pip/_vendor/packaging/__about__.py
+++ b/src/pip/_vendor/packaging/__about__.py
@@ -12,10 +12,10 @@ __title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
-__version__ = "17.1"
+__version__ = "18.0"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "BSD or Apache License, Version 2.0"
-__copyright__ = "Copyright 2014-2016 %s" % __author__
+__copyright__ = "Copyright 2014-2018 %s" % __author__
diff --git a/src/pip/_vendor/packaging/requirements.py b/src/pip/_vendor/packaging/requirements.py
index 2760483a7..d40bd8c5c 100644
--- a/src/pip/_vendor/packaging/requirements.py
+++ b/src/pip/_vendor/packaging/requirements.py
@@ -92,16 +92,16 @@ class Requirement(object):
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
- raise InvalidRequirement(
- "Invalid requirement, parse error at \"{0!r}\"".format(
- requirement_string[e.loc:e.loc + 8]))
+ raise InvalidRequirement("Parse error at \"{0!r}\": {1}".format(
+ requirement_string[e.loc:e.loc + 8], e.msg
+ ))
self.name = req.name
if req.url:
parsed_url = urlparse.urlparse(req.url)
if not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc):
- raise InvalidRequirement("Invalid URL given")
+ raise InvalidRequirement("Invalid URL: {0}".format(req.url))
self.url = req.url
else:
self.url = None
diff --git a/src/pip/_vendor/packaging/specifiers.py b/src/pip/_vendor/packaging/specifiers.py
index 9b6353f05..4c798999d 100644
--- a/src/pip/_vendor/packaging/specifiers.py
+++ b/src/pip/_vendor/packaging/specifiers.py
@@ -503,7 +503,7 @@ class Specifier(_IndividualSpecifier):
return False
# Ensure that we do not allow a local version of the version mentioned
- # in the specifier, which is techincally greater than, to match.
+ # in the specifier, which is technically greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py
index f2815cc6b..0b432f689 100644
--- a/src/pip/_vendor/pkg_resources/__init__.py
+++ b/src/pip/_vendor/pkg_resources/__init__.py
@@ -47,6 +47,11 @@ except ImportError:
# Python 3.2 compatibility
import imp as _imp
+try:
+ FileExistsError
+except NameError:
+ FileExistsError = OSError
+
from pip._vendor import six
from pip._vendor.six.moves import urllib, map, filter
@@ -78,8 +83,11 @@ __import__('pip._vendor.packaging.requirements')
__import__('pip._vendor.packaging.markers')
-if (3, 0) < sys.version_info < (3, 3):
- raise RuntimeError("Python 3.3 or later is required")
+__metaclass__ = type
+
+
+if (3, 0) < sys.version_info < (3, 4):
+ raise RuntimeError("Python 3.4 or later is required")
if six.PY2:
# Those builtin exceptions are only defined in Python 3
@@ -537,7 +545,7 @@ class IResourceProvider(IMetadataProvider):
"""List of resource names in the directory (like ``os.listdir()``)"""
-class WorkingSet(object):
+class WorkingSet:
"""A collection of active distributions on sys.path (or a similar list)"""
def __init__(self, entries=None):
@@ -637,13 +645,12 @@ class WorkingSet(object):
distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order).
"""
- for dist in self:
- entries = dist.get_entry_map(group)
- if name is None:
- for ep in entries.values():
- yield ep
- elif name in entries:
- yield entries[name]
+ return (
+ entry
+ for dist in self
+ for entry in dist.get_entry_map(group).values()
+ if name is None or name == entry.name
+ )
def run_script(self, requires, script_name):
"""Locate distribution for `requires` and run `script_name` script"""
@@ -944,7 +951,7 @@ class _ReqExtras(dict):
return not req.marker or any(extra_evals)
-class Environment(object):
+class Environment:
"""Searchable snapshot of distributions on a search path"""
def __init__(
@@ -959,7 +966,7 @@ class Environment(object):
`platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an
- optional string naming the desired version of Python (e.g. ``'3.3'``);
+ optional string naming the desired version of Python (e.g. ``'3.6'``);
it defaults to the current version.
You may explicitly set `platform` (and/or `python`) to ``None`` if you
@@ -2087,7 +2094,12 @@ def _handle_ns(packageName, path_item):
importer = get_importer(path_item)
if importer is None:
return None
- loader = importer.find_module(packageName)
+
+ # capture warnings due to #1111
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ loader = importer.find_module(packageName)
+
if loader is None:
return None
module = sys.modules.get(packageName)
@@ -2132,12 +2144,13 @@ def _rebuild_mod_path(orig_path, package_name, module):
parts = path_parts[:-module_parts]
return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
- if not isinstance(orig_path, list):
- # Is this behavior useful when module.__path__ is not a list?
- return
+ new_path = sorted(orig_path, key=position_in_sys_path)
+ new_path = [_normalize_cached(p) for p in new_path]
- orig_path.sort(key=position_in_sys_path)
- module.__path__[:] = [_normalize_cached(p) for p in orig_path]
+ if isinstance(module.__path__, list):
+ module.__path__[:] = new_path
+ else:
+ module.__path__ = new_path
def declare_namespace(packageName):
@@ -2148,9 +2161,10 @@ def declare_namespace(packageName):
if packageName in _namespace_packages:
return
- path, parent = sys.path, None
- if '.' in packageName:
- parent = '.'.join(packageName.split('.')[:-1])
+ path = sys.path
+ parent, _, _ = packageName.rpartition('.')
+
+ if parent:
declare_namespace(parent)
if parent not in _namespace_packages:
__import__(parent)
@@ -2161,7 +2175,7 @@ def declare_namespace(packageName):
# Track what packages are namespaces, so when new path items are added,
# they can be updated
- _namespace_packages.setdefault(parent, []).append(packageName)
+ _namespace_packages.setdefault(parent or None, []).append(packageName)
_namespace_packages.setdefault(packageName, [])
for path_item in path:
@@ -2279,7 +2293,7 @@ EGG_NAME = re.compile(
).match
-class EntryPoint(object):
+class EntryPoint:
"""Object representing an advertised importable object"""
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
@@ -2433,7 +2447,7 @@ def _version_from_file(lines):
return safe_version(value.strip()) or None
-class Distribution(object):
+class Distribution:
"""Wrap an actual or potential sys.path entry w/metadata"""
PKG_INFO = 'PKG-INFO'
@@ -3027,7 +3041,10 @@ def _bypass_ensure_directory(path):
dirname, filename = split(path)
if dirname and filename and not isdir(dirname):
_bypass_ensure_directory(dirname)
- mkdir(dirname, 0o755)
+ try:
+ mkdir(dirname, 0o755)
+ except FileExistsError:
+ pass
def split_sections(s):
diff --git a/src/pip/_vendor/pkg_resources/py31compat.py b/src/pip/_vendor/pkg_resources/py31compat.py
index 331a51bb0..a2d3007ce 100644
--- a/src/pip/_vendor/pkg_resources/py31compat.py
+++ b/src/pip/_vendor/pkg_resources/py31compat.py
@@ -2,6 +2,8 @@ import os
import errno
import sys
+from pip._vendor import six
+
def _makedirs_31(path, exist_ok=False):
try:
@@ -15,8 +17,7 @@ def _makedirs_31(path, exist_ok=False):
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
- sys.version_info < (3, 2, 5) or
- (3, 3) <= sys.version_info < (3, 3, 6) or
+ six.PY2 or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
diff --git a/src/pip/_vendor/pyparsing.py b/src/pip/_vendor/pyparsing.py
index ba2619c23..865152d7c 100644
--- a/src/pip/_vendor/pyparsing.py
+++ b/src/pip/_vendor/pyparsing.py
@@ -1,6 +1,6 @@
# module pyparsing.py
#
-# Copyright (c) 2003-2016 Paul T. McGuire
+# Copyright (c) 2003-2018 Paul T. McGuire
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
@@ -25,6 +25,7 @@
__doc__ = \
"""
pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
@@ -58,10 +59,23 @@ The pyparsing module handles some of the problems that are typically vexing when
- extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
- quoted strings
- embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+ - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
+ - construct character word-group expressions using the L{Word} class
+ - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
+ - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones
+ - associate names with your parsed results using L{ParserElement.setResultsName}
+ - find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
+ - find more useful common expressions in the L{pyparsing_common} namespace class
"""
-__version__ = "2.2.0"
-__versionTime__ = "06 Mar 2017 02:06 UTC"
+__version__ = "2.2.1"
+__versionTime__ = "18 Sep 2018 00:49 UTC"
__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
import string
@@ -83,6 +97,15 @@ except ImportError:
from threading import RLock
try:
+ # Python 3
+ from collections.abc import Iterable
+ from collections.abc import MutableMapping
+except ImportError:
+ # Python 2.7
+ from collections import Iterable
+ from collections import MutableMapping
+
+try:
from collections import OrderedDict as _OrderedDict
except ImportError:
try:
@@ -940,7 +963,7 @@ class ParseResults(object):
def __dir__(self):
return (dir(type(self)) + list(self.keys()))
-collections.MutableMapping.register(ParseResults)
+MutableMapping.register(ParseResults)
def col (loc,strg):
"""Returns current column within a string, counting newlines as line separators.
@@ -1025,11 +1048,11 @@ def _trim_arity(func, maxargs=2):
# special handling for Python 3.5.0 - extra deep call stack by 1
offset = -3 if system_version == (3,5,0) else -2
frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
- return [(frame_summary.filename, frame_summary.lineno)]
+ return [frame_summary[:2]]
def extract_tb(tb, limit=0):
frames = traceback.extract_tb(tb, limit=limit)
frame_summary = frames[-1]
- return [(frame_summary.filename, frame_summary.lineno)]
+ return [frame_summary[:2]]
else:
extract_stack = traceback.extract_stack
extract_tb = traceback.extract_tb
@@ -1374,7 +1397,7 @@ class ParserElement(object):
else:
preloc = loc
tokensStart = preloc
- if self.mayIndexError or loc >= len(instring):
+ if self.mayIndexError or preloc >= len(instring):
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError:
@@ -1408,7 +1431,6 @@ class ParserElement(object):
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
-
if debugging:
#~ print ("Matched",self,"->",retTokens.asList())
if (self.debugActions[1] ):
@@ -3242,7 +3264,7 @@ class ParseExpression(ParserElement):
if isinstance( exprs, basestring ):
self.exprs = [ ParserElement._literalStringClass( exprs ) ]
- elif isinstance( exprs, collections.Iterable ):
+ elif isinstance( exprs, Iterable ):
exprs = list(exprs)
# if sequence of strings provided, wrap with Literal
if all(isinstance(expr, basestring) for expr in exprs):
@@ -4393,7 +4415,7 @@ def traceParseAction(f):
@traceParseAction
def remove_duplicate_chars(tokens):
- return ''.join(sorted(set(''.join(tokens)))
+ return ''.join(sorted(set(''.join(tokens))))
wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
@@ -4583,7 +4605,7 @@ def oneOf( strs, caseless=False, useRegex=True ):
symbols = []
if isinstance(strs,basestring):
symbols = strs.split()
- elif isinstance(strs, collections.Iterable):
+ elif isinstance(strs, Iterable):
symbols = list(strs)
else:
warnings.warn("Invalid argument to oneOf, expected string or iterable",
@@ -4734,7 +4756,7 @@ stringEnd = StringEnd().setName("stringEnd")
_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
-_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
_charRange = Group(_singleChar + Suppress("-") + _singleChar)
_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
diff --git a/src/pip/_vendor/pytoml/parser.py b/src/pip/_vendor/pytoml/parser.py
index e03a03fbd..9f94e9230 100644
--- a/src/pip/_vendor/pytoml/parser.py
+++ b/src/pip/_vendor/pytoml/parser.py
@@ -223,8 +223,8 @@ _float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?
_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))')
_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*')
-_litstr_re = re.compile(r"[^'\000-\037]*")
-_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*")
+_litstr_re = re.compile(r"[^'\000\010\012-\037]*")
+_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\010\013-\037]))*")
def _p_value(s, object_pairs_hook):
pos = s.pos()
diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt
index 48cbf8c6d..9389dd947 100644
--- a/src/pip/_vendor/vendor.txt
+++ b/src/pip/_vendor/vendor.txt
@@ -9,15 +9,15 @@ msgpack-python==0.5.6
lockfile==0.12.2
progress==1.4
ipaddress==1.0.22 # Only needed on 2.6 and 2.7
-packaging==17.1
+packaging==18.0
pep517==0.2
-pyparsing==2.2.0
-pytoml==0.1.16
+pyparsing==2.2.1
+pytoml==0.1.19
retrying==1.3.3
requests==2.19.1
chardet==3.0.4
idna==2.7
urllib3==1.23
- certifi==2018.4.16
-setuptools==39.2.0
+ certifi==2018.8.24
+setuptools==40.4.3
webencodings==0.5.1
diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py
index 554c53541..8bcdfcfbd 100644
--- a/tasks/vendoring/__init__.py
+++ b/tasks/vendoring/__init__.py
@@ -75,7 +75,8 @@ def rewrite_file_imports(item, vendored_libs):
"""Rewrite 'import xxx' and 'from xxx import' for vendored_libs"""
text = item.read_text(encoding='utf-8')
# Revendor pkg_resources.extern first
- text = re.sub(r'pkg_resources.extern', r'pip._vendor', text)
+ text = re.sub(r'pkg_resources\.extern', r'pip._vendor', text)
+ text = re.sub(r'from \.extern', r'from pip._vendor', text)
for lib in vendored_libs:
text = re.sub(
r'(\n\s*|^)import %s(\n\s*)' % lib,
diff --git a/tests/data/packages/README.txt b/tests/data/packages/README.txt
index b2f61b513..a7fccc5bb 100644
--- a/tests/data/packages/README.txt
+++ b/tests/data/packages/README.txt
@@ -59,10 +59,6 @@ parent/child-0.1.tar.gz
The parent-0.1.tar.gz and child-0.1.tar.gz packages are used by
test_uninstall:test_uninstall_overlapping_package.
-paxpkg.tar.bz2
---------------
-tar with pax headers
-
pkgwithmpkg-1.0.tar.gz; pkgwithmpkg-1.0-py2.7-macosx10.7.mpkg.zip
-----------------------------------------------------------------
used for osx test case (tests.test_finder:test_no_mpkg)
diff --git a/tests/data/packages/paxpkg.tar.bz2 b/tests/data/packages/paxpkg.tar.bz2
deleted file mode 100644
index d4fe6f4a9..000000000
--- a/tests/data/packages/paxpkg.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/tests/data/packages3/dinner/index.html b/tests/data/packages3/dinner/index.html
index 6afabea66..e258eb16b 100644
--- a/tests/data/packages3/dinner/index.html
+++ b/tests/data/packages3/dinner/index.html
@@ -1,6 +1,6 @@
-<html><head><title>PyPi Mirror</title></head>
+<html><head><title>PyPI Mirror</title></head>
<body>
- <h1>PyPi Mirror</h1>
+ <h1>PyPI Mirror</h1>
<h2>For testing --index-url with a file:// url for the index</h2>
<a href='Dinner-1.0.tar.gz'>Dinner-1.0.tar.gz</a><br />
<a href='Dinner-2.0.tar.gz'>Dinner-2.0.tar.gz</a><br />
diff --git a/tests/data/packages3/index.html b/tests/data/packages3/index.html
index a59811ba0..d66e70ec6 100644
--- a/tests/data/packages3/index.html
+++ b/tests/data/packages3/index.html
@@ -1,6 +1,6 @@
-<html><head><title>PyPi Mirror</title></head>
+<html><head><title>PyPI Mirror</title></head>
<body>
- <h1>PyPi Mirror</h1>
+ <h1>PyPI Mirror</h1>
<h2>For testing --index-url with a file:// url for the index</h2>
<a href='requiredinner'>requiredinner</a><br />
<a href='Dinner'>Dinner</a><br />
diff --git a/tests/data/packages3/requiredinner/index.html b/tests/data/packages3/requiredinner/index.html
index 52701cd5c..0981c9c72 100644
--- a/tests/data/packages3/requiredinner/index.html
+++ b/tests/data/packages3/requiredinner/index.html
@@ -1,6 +1,6 @@
-<html><head><title>PyPi Mirror</title></head>
+<html><head><title>PyPI Mirror</title></head>
<body>
- <h1>PyPi Mirror</h1>
+ <h1>PyPI Mirror</h1>
<h2>For testing --index-url with a file:// url for the index</h2>
<a href='requiredinner-1.0.tar.gz'>requiredinner=1.0.tar.gz</a><br />
</body>
diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py
index bd7a5afc4..586a0a89b 100644
--- a/tests/functional/test_completion.py
+++ b/tests/functional/test_completion.py
@@ -204,6 +204,30 @@ def test_completion_not_files_after_option(script, data):
)
+@pytest.mark.parametrize("cl_opts", ["-U", "--user", "-h"])
+def test_completion_not_files_after_nonexpecting_option(script, data, cl_opts):
+ """
+ Test not getting completion files after options which not applicable
+ (e.g. ``pip install``)
+ """
+ res, env = setup_completion(
+ script=script,
+ words=('pip install %s r' % cl_opts),
+ cword='2',
+ cwd=data.completion_paths,
+ )
+ assert not any(out in res.stdout for out in
+ ('requirements.txt', 'readme.txt',)), (
+ "autocomplete function completed <file> when "
+ "it should not complete"
+ )
+ assert not any(os.path.join(out, '') in res.stdout
+ for out in ('replay', 'resources')), (
+ "autocomplete function completed <dir> when "
+ "it should not complete"
+ )
+
+
def test_completion_directories_after_option(script, data):
"""
Test getting completion <dir> after options in command
diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py
index 5b03324be..40f370127 100644
--- a/tests/functional/test_download.py
+++ b/tests/functional/test_download.py
@@ -57,7 +57,7 @@ def test_download_wheel(script, data):
@pytest.mark.network
def test_single_download_from_requirements_file(script):
"""
- It should support download (in the scratch path) from PyPi from a
+ It should support download (in the scratch path) from PyPI from a
requirements file
"""
script.scratch_path.join("test-req.txt").write(textwrap.dedent("""
diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py
index fa586c0ae..3cbc2fe96 100644
--- a/tests/functional/test_install.py
+++ b/tests/functional/test_install.py
@@ -522,13 +522,6 @@ def test_install_global_option(script):
assert '0.1\n' in result.stdout
-def test_install_with_pax_header(script, data):
- """
- test installing from a tarball with pax header for python<2.6
- """
- script.pip('install', 'paxpkg.tar.bz2', cwd=data.packages)
-
-
def test_install_with_hacked_egg_info(script, data):
"""
test installing a package which defines its own egg_info class
@@ -629,7 +622,7 @@ def test_install_folder_using_relative_path(script):
@pytest.mark.network
def test_install_package_which_contains_dev_in_name(script):
"""
- Test installing package from pypi which contains 'dev' in name
+ Test installing package from PyPI which contains 'dev' in name
"""
result = script.pip('install', 'django-devserver==0.0.4')
devserver_folder = script.site_packages / 'devserver'
diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py
index 6648e44fe..77296baf1 100644
--- a/tests/functional/test_install_vcs_git.py
+++ b/tests/functional/test_install_vcs_git.py
@@ -10,15 +10,32 @@ from tests.lib.git_submodule_helpers import (
from tests.lib.local_repos import local_checkout
+def _get_editable_repo_dir(script, package_name):
+ """
+ Return the repository directory for an editable install.
+ """
+ return script.venv_path / 'src' / package_name
+
+
def _get_editable_branch(script, package_name):
"""
Return the current branch of an editable install.
"""
- repo_dir = script.venv_path / 'src' / package_name
+ repo_dir = _get_editable_repo_dir(script, package_name)
result = script.run(
'git', 'rev-parse', '--abbrev-ref', 'HEAD', cwd=repo_dir
)
+ return result.stdout.strip()
+
+def _get_branch_remote(script, package_name, branch):
+ """
+
+ """
+ repo_dir = _get_editable_repo_dir(script, package_name)
+ result = script.run(
+ 'git', 'config', 'branch.{}.remote'.format(branch), cwd=repo_dir
+ )
return result.stdout.strip()
@@ -363,7 +380,69 @@ def test_git_works_with_editable_non_origin_repo(script):
assert "version-pkg==0.1" in result.stdout
-def test_editable_non_master_default_branch(script):
+def test_editable__no_revision(script):
+ """
+ Test a basic install in editable mode specifying no revision.
+ """
+ version_pkg_path = _create_test_package(script)
+ _install_version_pkg_only(script, version_pkg_path)
+
+ branch = _get_editable_branch(script, 'version-pkg')
+ assert branch == 'master'
+
+ remote = _get_branch_remote(script, 'version-pkg', 'master')
+ assert remote == 'origin'
+
+
+def test_editable__branch_with_sha_same_as_default(script):
+ """
+ Test installing in editable mode a branch whose sha matches the sha
+ of the default branch, but is different from the default branch.
+ """
+ version_pkg_path = _create_test_package(script)
+ # Create a second branch with the same SHA.
+ script.run(
+ 'git', 'branch', 'develop', expect_stderr=True,
+ cwd=version_pkg_path,
+ )
+ _install_version_pkg_only(
+ script, version_pkg_path, rev='develop', expect_stderr=True
+ )
+
+ branch = _get_editable_branch(script, 'version-pkg')
+ assert branch == 'develop'
+
+ remote = _get_branch_remote(script, 'version-pkg', 'develop')
+ assert remote == 'origin'
+
+
+def test_editable__branch_with_sha_different_from_default(script):
+ """
+ Test installing in editable mode a branch whose sha is different from
+ the sha of the default branch.
+ """
+ version_pkg_path = _create_test_package(script)
+ # Create a second branch.
+ script.run(
+ 'git', 'branch', 'develop', expect_stderr=True,
+ cwd=version_pkg_path,
+ )
+ # Add another commit to the master branch to give it a different sha.
+ _change_test_package_version(script, version_pkg_path)
+
+ version = _install_version_pkg(
+ script, version_pkg_path, rev='develop', expect_stderr=True
+ )
+ assert version == '0.1'
+
+ branch = _get_editable_branch(script, 'version-pkg')
+ assert branch == 'develop'
+
+ remote = _get_branch_remote(script, 'version-pkg', 'develop')
+ assert remote == 'origin'
+
+
+def test_editable__non_master_default_branch(script):
"""
Test the branch you get after an editable install from a remote repo
with a non-master default branch.
@@ -376,8 +455,9 @@ def test_editable_non_master_default_branch(script):
cwd=version_pkg_path,
)
_install_version_pkg_only(script, version_pkg_path)
+
branch = _get_editable_branch(script, 'version-pkg')
- assert 'release' == branch
+ assert branch == 'release'
def test_reinstalling_works_with_editable_non_master_branch(script):
diff --git a/tests/functional/test_search.py b/tests/functional/test_search.py
index e65e72fb0..46e6a6647 100644
--- a/tests/functional/test_search.py
+++ b/tests/functional/test_search.py
@@ -25,7 +25,7 @@ def test_version_compare():
def test_pypi_xml_transformation():
"""
- Test transformation of data structures (pypi xmlrpc to custom list).
+ Test transformation of data structures (PyPI xmlrpc to custom list).
"""
pypi_hits = [
diff --git a/tests/functional/test_vcs_git.py b/tests/functional/test_vcs_git.py
index 656cc33ed..2a54af58f 100644
--- a/tests/functional/test_vcs_git.py
+++ b/tests/functional/test_vcs_git.py
@@ -37,9 +37,9 @@ def add_commits(script, dest, count):
return shas
-def check_rev(repo_dir, rev, expected_sha):
+def check_rev(repo_dir, rev, expected):
git = Git()
- assert git.get_revision_sha(repo_dir, rev) == expected_sha
+ assert git.get_revision_sha(repo_dir, rev) == expected
def test_git_dir_ignored():
@@ -70,6 +70,27 @@ def test_git_work_tree_ignored():
git.run_command(['status', temp_dir], extra_environ=env, cwd=temp_dir)
+def test_get_branch(script, tmpdir):
+ repo_dir = str(tmpdir)
+ script.run('git', 'init', cwd=repo_dir)
+ sha = do_commit(script, repo_dir)
+
+ git = Git()
+ assert git.get_branch(repo_dir) == 'master'
+
+ # Switch to a branch with the same SHA as "master" but whose name
+ # is alphabetically after.
+ script.run(
+ 'git', 'checkout', '-b', 'release', cwd=repo_dir,
+ expect_stderr=True,
+ )
+ assert git.get_branch(repo_dir) == 'release'
+
+ # Also test the detached HEAD case.
+ script.run('git', 'checkout', sha, cwd=repo_dir, expect_stderr=True)
+ assert git.get_branch(repo_dir) is None
+
+
def test_get_revision_sha(script):
with TempDirectory(kind="testing") as temp:
repo_dir = temp.path
@@ -102,9 +123,9 @@ def test_get_revision_sha(script):
script.run('git', 'tag', 'aaa/v1.0', head_sha, cwd=repo_dir)
script.run('git', 'tag', 'zzz/v1.0', head_sha, cwd=repo_dir)
- check_rev(repo_dir, 'v1.0', tag_sha)
- check_rev(repo_dir, 'v2.0', tag_sha)
- check_rev(repo_dir, 'origin-branch', origin_sha)
+ check_rev(repo_dir, 'v1.0', (tag_sha, False))
+ check_rev(repo_dir, 'v2.0', (tag_sha, False))
+ check_rev(repo_dir, 'origin-branch', (origin_sha, True))
ignored_names = [
# Local branches should be ignored.
@@ -122,7 +143,7 @@ def test_get_revision_sha(script):
'does-not-exist',
]
for name in ignored_names:
- check_rev(repo_dir, name, None)
+ check_rev(repo_dir, name, (None, False))
@pytest.mark.network
diff --git a/tests/lib/configuration_helpers.py b/tests/lib/configuration_helpers.py
index 1164be8a7..bd9ab79b5 100644
--- a/tests/lib/configuration_helpers.py
+++ b/tests/lib/configuration_helpers.py
@@ -34,13 +34,13 @@ class ConfigurationMixin(object):
old = self.configuration._load_config_files
@functools.wraps(old)
- def overidden():
+ def overridden():
# Manual Overload
self.configuration._config[variant].update(di)
self.configuration._parsers[variant].append((None, None))
return old()
- self.configuration._load_config_files = overidden
+ self.configuration._load_config_files = overridden
@contextlib.contextmanager
def tmpfile(self, contents):
diff --git a/tests/unit/test_configuration.py b/tests/unit/test_configuration.py
index ef1ddf96a..9fe5bd98a 100644
--- a/tests/unit/test_configuration.py
+++ b/tests/unit/test_configuration.py
@@ -68,6 +68,23 @@ class TestConfigurationLoading(ConfigurationMixin):
with pytest.raises(ConfigurationError):
self.configuration.get_value(":env:.version")
+ def test_environment_config_errors_if_malformed(self):
+ contents = """
+ test]
+ hello = 4
+ """
+ with self.tmpfile(contents) as config_file:
+ os.environ["PIP_CONFIG_FILE"] = config_file
+ with pytest.raises(ConfigurationError) as err:
+ self.configuration.load()
+
+ assert "section header" in str(err.value) # error kind
+ assert "1" in str(err.value) # line number
+ assert ( # file name
+ config_file in str(err.value) or
+ repr(config_file) in str(err.value)
+ )
+
class TestConfigurationPrecedence(ConfigurationMixin):
# Tests for methods to that determine the order of precedence of
diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py
index e719b21f1..05ebed3b5 100644
--- a/tests/unit/test_finder.py
+++ b/tests/unit/test_finder.py
@@ -95,7 +95,7 @@ def test_finder_detects_latest_already_satisfied_find_links(data):
def test_finder_detects_latest_already_satisfied_pypi_links():
"""Test PackageFinder detects latest already satisfied using pypi links"""
req = install_req_from_line('initools', None)
- # the latest initools on pypi is 0.3.1
+ # the latest initools on PyPI is 0.3.1
latest_version = "0.3.1"
satisfied_by = Mock(
location="/path",
diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py
index 4ede29589..ce81a9deb 100644
--- a/tests/unit/test_index.py
+++ b/tests/unit/test_index.py
@@ -1,9 +1,14 @@
+import logging
import os.path
import pytest
+from mock import Mock
+from pip._vendor import html5lib, requests
from pip._internal.download import PipSession
-from pip._internal.index import HTMLPage, Link, PackageFinder
+from pip._internal.index import (
+ Link, PackageFinder, _determine_base_url, _get_html_page, egg_info_matches,
+)
def test_sort_locations_file_expand_dir(data):
@@ -107,8 +112,11 @@ class TestLink(object):
),
],
)
-def test_base_url(html, url, expected):
- assert HTMLPage(html, url).base_url == expected
+def test_determine_base_url(html, url, expected):
+ document = html5lib.parse(
+ html, transport_encoding=None, namespaceHTMLElements=False,
+ )
+ assert _determine_base_url(document, url) == expected
class MockLogger(object):
@@ -154,3 +162,58 @@ def test_get_formatted_locations_basic_auth():
result = finder.get_formatted_locations()
assert 'user' not in result and 'pass' not in result
+
+
+@pytest.mark.parametrize(
+ ("egg_info", "search_name", "expected"),
+ [
+ # Trivial.
+ ("pip-18.0", "pip", "18.0"),
+ ("pip-18.0", None, "18.0"),
+
+ # Non-canonical names.
+ ("Jinja2-2.10", "jinja2", "2.10"),
+ ("jinja2-2.10", "Jinja2", "2.10"),
+
+ # Ambiguous names. Should be smart enough if the package name is
+ # provided, otherwise make a guess.
+ ("foo-2-2", "foo", "2-2"),
+ ("foo-2-2", "foo-2", "2"),
+ ("foo-2-2", None, "2-2"),
+ ("im-valid", None, "valid"),
+
+ # Invalid names.
+ ("invalid", None, None),
+ ("im_invalid", None, None),
+ ("the-package-name-8.19", "does-not-match", None),
+ ],
+)
+def test_egg_info_matches(egg_info, search_name, expected):
+ link = None # Only used for reporting.
+ version = egg_info_matches(egg_info, search_name, link)
+ assert version == expected
+
+
+def test_request_http_error(caplog):
+ caplog.set_level(logging.DEBUG)
+ link = Link('http://localhost')
+ session = Mock(PipSession)
+ session.get.return_value = resp = Mock()
+ resp.raise_for_status.side_effect = requests.HTTPError('Http error')
+ assert _get_html_page(link, session=session) is None
+ assert (
+ 'Could not fetch URL http://localhost: Http error - skipping'
+ in caplog.text
+ )
+
+
+def test_request_retries(caplog):
+ caplog.set_level(logging.DEBUG)
+ link = Link('http://localhost')
+ session = Mock(PipSession)
+ session.get.side_effect = requests.exceptions.RetryError('Retry error')
+ assert _get_html_page(link, session=session) is None
+ assert (
+ 'Could not fetch URL http://localhost: Retry error - skipping'
+ in caplog.text
+ )
diff --git a/tests/unit/test_index_html_page.py b/tests/unit/test_index_html_page.py
new file mode 100644
index 000000000..c872ad065
--- /dev/null
+++ b/tests/unit/test_index_html_page.py
@@ -0,0 +1,162 @@
+import logging
+
+import mock
+import pytest
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.download import PipSession
+from pip._internal.index import (
+ Link, _get_html_page, _get_html_response, _NotHTML, _NotHTTP,
+)
+
+
+@pytest.mark.parametrize(
+ "url",
+ [
+ "ftp://python.org/python-3.7.1.zip",
+ "file:///opt/data/pip-18.0.tar.gz",
+ ],
+)
+def test_get_html_response_archive_to_naive_scheme(url):
+ """
+ `_get_html_response()` should error on an archive-like URL if the scheme
+ does not allow "poking" without getting data.
+ """
+ with pytest.raises(_NotHTTP):
+ _get_html_response(url, session=mock.Mock(PipSession))
+
+
+@pytest.mark.parametrize(
+ "url, content_type",
+ [
+ ("http://python.org/python-3.7.1.zip", "application/zip"),
+ ("https://pypi.org/pip-18.0.tar.gz", "application/gzip"),
+ ],
+)
+def test_get_html_response_archive_to_http_scheme(url, content_type):
+ """
+ `_get_html_response()` should send a HEAD request on an archive-like URL
+ if the scheme supports it, and raise `_NotHTML` if the response isn't HTML.
+ """
+ session = mock.Mock(PipSession)
+ session.head.return_value = mock.Mock(**{
+ "request.method": "HEAD",
+ "headers": {"Content-Type": content_type},
+ })
+
+ with pytest.raises(_NotHTML) as ctx:
+ _get_html_response(url, session=session)
+
+ session.assert_has_calls([
+ mock.call.head(url, allow_redirects=True),
+ ])
+ assert ctx.value.args == (content_type, "HEAD")
+
+
+@pytest.mark.parametrize(
+ "url",
+ [
+ "http://python.org/python-3.7.1.zip",
+ "https://pypi.org/pip-18.0.tar.gz",
+ ],
+)
+def test_get_html_response_archive_to_http_scheme_is_html(url):
+ """
+ `_get_html_response()` should work with archive-like URLs if the HEAD
+ request is responded with text/html.
+ """
+ session = mock.Mock(PipSession)
+ session.head.return_value = mock.Mock(**{
+ "request.method": "HEAD",
+ "headers": {"Content-Type": "text/html"},
+ })
+ session.get.return_value = mock.Mock(headers={"Content-Type": "text/html"})
+
+ resp = _get_html_response(url, session=session)
+
+ assert resp is not None
+ assert session.mock_calls == [
+ mock.call.head(url, allow_redirects=True),
+ mock.call.head().raise_for_status(),
+ mock.call.get(url, headers={
+ "Accept": "text/html", "Cache-Control": "max-age=0",
+ }),
+ mock.call.get().raise_for_status(),
+ ]
+
+
+@pytest.mark.parametrize(
+ "url",
+ [
+ "https://pypi.org/simple/pip",
+ "https://pypi.org/simple/pip/",
+ "https://python.org/sitemap.xml",
+ ],
+)
+def test_get_html_response_no_head(url):
+ """
+ `_get_html_response()` shouldn't send a HEAD request if the URL does not
+ look like an archive, only the GET request that retrieves data.
+ """
+ session = mock.Mock(PipSession)
+
+ # Mock the headers dict to ensure it is accessed.
+ session.get.return_value = mock.Mock(headers=mock.Mock(**{
+ "get.return_value": "text/html",
+ }))
+
+ resp = _get_html_response(url, session=session)
+
+ assert resp is not None
+ assert session.head.call_count == 0
+ assert session.get.mock_calls == [
+ mock.call(url, headers={
+ "Accept": "text/html", "Cache-Control": "max-age=0",
+ }),
+ mock.call().raise_for_status(),
+ mock.call().headers.get("Content-Type", ""),
+ ]
+
+
+@pytest.mark.parametrize(
+ "url, vcs_scheme",
+ [
+ ("svn+http://pypi.org/something", "svn"),
+ ("git+https://github.com/pypa/pip.git", "git"),
+ ],
+)
+def test_get_html_page_invalid_scheme(caplog, url, vcs_scheme):
+ """`_get_html_page()` should error if an invalid scheme is given.
+
+ Only file:, http:, https:, and ftp: are allowed.
+ """
+ with caplog.at_level(logging.DEBUG):
+ page = _get_html_page(Link(url), session=mock.Mock(PipSession))
+
+ assert page is None
+ assert caplog.record_tuples == [
+ (
+ "pip._internal.index",
+ logging.DEBUG,
+ "Cannot look at {} URL {}".format(vcs_scheme, url),
+ ),
+ ]
+
+
+def test_get_html_page_directory_append_index(tmpdir):
+ """`_get_html_page()` should append "index.html" to a directory URL.
+ """
+ dirpath = tmpdir.mkdir("something")
+ dir_url = "file:///{}".format(
+ urllib_request.pathname2url(dirpath).lstrip("/"),
+ )
+
+ session = mock.Mock(PipSession)
+ with mock.patch("pip._internal.index._get_html_response") as mock_func:
+ _get_html_page(Link(dir_url), session=session)
+ assert mock_func.mock_calls == [
+ mock.call(
+ "{}/index.html".format(dir_url.rstrip("/")),
+ session=session,
+ ),
+ ]
diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py
index 09941d0f9..10a20b7ac 100644
--- a/tests/unit/test_req.py
+++ b/tests/unit/test_req.py
@@ -66,7 +66,8 @@ class TestRequirementSet(object):
build_dir = os.path.join(self.tempdir, 'build', 'simple')
os.makedirs(build_dir)
- open(os.path.join(build_dir, "setup.py"), 'w')
+ with open(os.path.join(build_dir, "setup.py"), 'w'):
+ pass
reqset = RequirementSet()
req = install_req_from_line('simple')
req.is_direct = True
diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py
index 947d25892..591c8ab54 100644
--- a/tests/unit/test_utils.py
+++ b/tests/unit/test_utils.py
@@ -24,8 +24,8 @@ from pip._internal.utils.glibc import check_glibc_version
from pip._internal.utils.hashes import Hashes, MissingHashes
from pip._internal.utils.misc import (
call_subprocess, egg_link_path, ensure_dir, get_installed_distributions,
- get_prog, normalize_path, remove_auth_from_url, rmtree,
- split_auth_from_netloc, untar_file, unzip_file,
+ get_prog, make_vcs_requirement_url, normalize_path, remove_auth_from_url,
+ rmtree, split_auth_from_netloc, untar_file, unzip_file,
)
from pip._internal.utils.packaging import check_dist_requires_python
from pip._internal.utils.temp_dir import TempDirectory
@@ -627,6 +627,22 @@ def test_call_subprocess_closes_stdin():
call_subprocess([sys.executable, '-c', 'input()'])
+@pytest.mark.parametrize('args, expected', [
+ # Test without subdir.
+ (('git+https://example.com/pkg', 'dev', 'myproj'),
+ 'git+https://example.com/pkg@dev#egg=myproj'),
+ # Test with subdir.
+ (('git+https://example.com/pkg', 'dev', 'myproj', 'sub/dir'),
+ 'git+https://example.com/pkg@dev#egg=myproj&subdirectory=sub/dir'),
+ # Test with None subdir.
+ (('git+https://example.com/pkg', 'dev', 'myproj', None),
+ 'git+https://example.com/pkg@dev#egg=myproj'),
+])
+def test_make_vcs_requirement_url(args, expected):
+ actual = make_vcs_requirement_url(*args)
+ assert actual == expected
+
+
@pytest.mark.parametrize('netloc, expected', [
# Test a basic case.
('example.com', ('example.com', (None, None))),
diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py
index 8d1b51afe..61661d61f 100644
--- a/tests/unit/test_vcs.py
+++ b/tests/unit/test_vcs.py
@@ -109,7 +109,7 @@ def test_git_get_src_requirements(git, dist):
@patch('pip._internal.vcs.git.Git.get_revision_sha')
def test_git_resolve_revision_rev_exists(get_sha_mock):
- get_sha_mock.return_value = '123456'
+ get_sha_mock.return_value = ('123456', False)
git = Git()
rev_options = git.make_rev_options('develop')
@@ -120,7 +120,7 @@ def test_git_resolve_revision_rev_exists(get_sha_mock):
@patch('pip._internal.vcs.git.Git.get_revision_sha')
def test_git_resolve_revision_rev_not_found(get_sha_mock):
- get_sha_mock.return_value = None
+ get_sha_mock.return_value = (None, False)
git = Git()
rev_options = git.make_rev_options('develop')
@@ -131,7 +131,7 @@ def test_git_resolve_revision_rev_not_found(get_sha_mock):
@patch('pip._internal.vcs.git.Git.get_revision_sha')
def test_git_resolve_revision_not_found_warning(get_sha_mock, caplog):
- get_sha_mock.return_value = None
+ get_sha_mock.return_value = (None, False)
git = Git()
url = 'git+https://git.example.com'
diff --git a/tools/mypy-requirements.txt b/tools/mypy-requirements.txt
new file mode 100644
index 000000000..47f2fcd86
--- /dev/null
+++ b/tools/mypy-requirements.txt
@@ -0,0 +1 @@
+mypy == 0.620
diff --git a/tools/tox_pip.py b/tools/tox_pip.py
new file mode 100644
index 000000000..8d91fbf56
--- /dev/null
+++ b/tools/tox_pip.py
@@ -0,0 +1,28 @@
+import os
+import shutil
+import subprocess
+import sys
+from glob import glob
+
+VIRTUAL_ENV = os.environ['VIRTUAL_ENV']
+TOX_PIP_DIR = os.path.join(VIRTUAL_ENV, 'pip')
+
+
+def pip(args):
+ # First things first, get a recent (stable) version of pip.
+ if not os.path.exists(TOX_PIP_DIR):
+ subprocess.check_call([sys.executable, '-m', 'pip',
+ '--disable-pip-version-check',
+ 'install', '-t', TOX_PIP_DIR,
+ 'pip'])
+ shutil.rmtree(glob(os.path.join(TOX_PIP_DIR, 'pip-*.dist-info'))[0])
+ # And use that version.
+ pypath = os.environ.get('PYTHONPATH')
+ pypath = pypath.split(os.pathsep) if pypath is not None else []
+ pypath.insert(0, TOX_PIP_DIR)
+ os.environ['PYTHONPATH'] = os.pathsep.join(pypath)
+ subprocess.check_call([sys.executable, '-m', 'pip'] + args)
+
+
+if __name__ == '__main__':
+ pip(sys.argv[1:])
diff --git a/tox.ini b/tox.ini
index 082af73ef..c48d695de 100644
--- a/tox.ini
+++ b/tox.ini
@@ -3,6 +3,11 @@ envlist =
docs, packaging, lint-py2, lint-py3, mypy,
py27, py34, py35, py36, py37, py38, pypy, pypy3
+[helpers]
+# Wrapper for calls to pip that make sure the version being used is the
+# original virtualenv (stable) version, and not the code being tested.
+pip = python {toxinidir}/tools/tox_pip.py
+
[testenv]
passenv = CI GIT_SSL_CAINFO
setenv =
@@ -11,7 +16,8 @@ setenv =
LC_CTYPE = en_US.UTF-8
deps = -r{toxinidir}/tools/tests-requirements.txt
commands = pytest --timeout 300 []
-install_command = python -m pip install {opts} {packages}
+install_command = {[helpers]pip} install {opts} {packages}
+list_dependencies_command = {[helpers]pip} freeze --all
usedevelop = True
[testenv:coverage-py3]
@@ -60,7 +66,7 @@ commands = {[lint]commands}
[testenv:mypy]
skip_install = True
basepython = python3
-deps = mypy
+deps = -r{toxinidir}/tools/mypy-requirements.txt
commands =
mypy src
mypy src -2