summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDonald Stufft <donald@stufft.io>2014-02-20 19:36:56 -0500
committerDonald Stufft <donald@stufft.io>2014-02-20 19:36:56 -0500
commit3435cf36ebe675c7a32eef40d2a201f33706230a (patch)
tree22fcdcf538bbe758c0e6fa9d79bafa5df688fa98
parent9e39d9d8ca701d40bf329c780049d30dac394dc3 (diff)
parent8c6a6a01dfcdf09409f527e94d81b1b8c941d464 (diff)
downloadpip-3435cf36ebe675c7a32eef40d2a201f33706230a.tar.gz
Merge branch '1.5.X'1.5.3
Conflicts: CHANGES.txt
-rw-r--r--CHANGES.txt21
-rwxr-xr-xpip/__init__.py2
-rw-r--r--pip/_vendor/six.py286
-rw-r--r--pip/_vendor/vendor.txt2
-rw-r--r--pip/baseparser.py5
-rw-r--r--pip/commands/install.py10
-rw-r--r--pip/commands/wheel.py16
-rw-r--r--pip/download.py82
-rw-r--r--pip/req.py133
-rw-r--r--pip/wheel.py70
-rw-r--r--tests/data/packages/README.txt5
-rw-r--r--tests/data/packages/brokenwheel-1.0-py2.py3-none-any.whlbin0 -> 1345 bytes
-rw-r--r--tests/data/packages/colander-0.9.9-py2.py3-none-any.whlbin0 -> 83733 bytes
-rw-r--r--tests/data/packages/futurewheel-1.9-py2.py3-none-any.whlbin0 -> 1778 bytes
-rw-r--r--tests/data/packages/futurewheel-3.0-py2.py3-none-any.whlbin0 -> 1770 bytes
-rw-r--r--tests/data/packages/translationstring-1.1.tar.gzbin0 -> 28524 bytes
-rw-r--r--tests/functional/test_install_download.py29
-rw-r--r--tests/functional/test_install_wheel.py28
-rw-r--r--tests/functional/test_wheel.py17
-rw-r--r--tests/unit/test_download.py102
-rw-r--r--tests/unit/test_wheel.py43
21 files changed, 744 insertions, 107 deletions
diff --git a/CHANGES.txt b/CHANGES.txt
index 604ec5000..afad818af 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,3 +1,24 @@
+**1.5.3 (2014-02-20)**
+
+
+* **DEPRECATION** ``pip install --build`` and ``pip install --no-clean`` are now
+ deprecated. See Issue #906 for discussion.
+
+* Fixed #1112. Couldn't download directly from wheel paths/urls, and when wheel
+ downloads did occur using requirement specifiers, dependencies weren't
+ downloaded (PR #1527)
+
+* Fixed #1320. ``pip wheel`` was not downloading wheels that already existed (PR
+ #1524)
+
+* Fixed #1111. ``pip install --download`` was failing using local
+ ``--find-links`` (PR #1524)
+
+* Workaround for Python bug http://bugs.python.org/issue20053 (PR #1544)
+
+* Don't pass a unicode __file__ to setup.py on Python 2.x (PR #1583)
+
+* Verify that the Wheel version is compatible with this pip (PR #1569)
**1.5.2 (2014-01-26)**
diff --git a/pip/__init__.py b/pip/__init__.py
index e169fa8e7..5466c5e76 100755
--- a/pip/__init__.py
+++ b/pip/__init__.py
@@ -19,7 +19,7 @@ import pip.cmdoptions
cmdoptions = pip.cmdoptions
# The version as used in the setup.py and the docs conf.py
-__version__ = "1.5.2"
+__version__ = "1.5.3"
def autocomplete():
diff --git a/pip/_vendor/six.py b/pip/_vendor/six.py
index eae31454a..7ec7f1bec 100644
--- a/pip/_vendor/six.py
+++ b/pip/_vendor/six.py
@@ -1,33 +1,35 @@
"""Utilities for writing code that runs on Python 2 and 3"""
-# Copyright (c) 2010-2013 Benjamin Peterson
+# Copyright (c) 2010-2014 Benjamin Peterson
#
-# Permission is hereby granted, free of charge, to any person obtaining a copy of
-# this software and associated documentation files (the "Software"), to deal in
-# the Software without restriction, including without limitation the rights to
-# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-# the Software, and to permit persons to whom the Software is furnished to do so,
-# subject to the following conditions:
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.3.0"
+__version__ = "1.5.2"
-# True if we are running on Python 3.
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
@@ -61,7 +63,7 @@ else:
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
- del X
+ del X
def _add_doc(func, doc):
@@ -82,9 +84,9 @@ class _LazyDescr(object):
def __get__(self, obj, tp):
result = self._resolve()
- setattr(obj, self.name, result)
+ setattr(obj, self.name, result) # Invokes __set__.
# This is a bit ugly, but it avoids running this again.
- delattr(tp, self.name)
+ delattr(obj.__class__, self.name)
return result
@@ -102,6 +104,35 @@ class MovedModule(_LazyDescr):
def _resolve(self):
return _import_module(self.mod)
+ def __getattr__(self, attr):
+ # Hack around the Django autoreloader. The reloader tries to get
+ # __file__ or __name__ of every module in sys.modules. This doesn't work
+ # well if this MovedModule is for an module that is unavailable on this
+ # machine (like winreg on Unix systems). Thus, we pretend __file__ and
+ # __name__ don't exist if the module hasn't been loaded yet. See issues
+ # #51 and #53.
+ if attr in ("__file__", "__name__") and self.mod not in sys.modules:
+ raise AttributeError
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
class MovedAttribute(_LazyDescr):
@@ -129,24 +160,29 @@ class MovedAttribute(_LazyDescr):
-class _MovedItems(types.ModuleType):
+class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
@@ -162,12 +198,14 @@ _moved_attributes = [
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
@@ -179,14 +217,167 @@ _moved_attributes = [
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ sys.modules[__name__ + ".moves." + attr.name] = attr
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
-moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ parse = sys.modules[__name__ + ".moves.urllib_parse"]
+ error = sys.modules[__name__ + ".moves.urllib_error"]
+ request = sys.modules[__name__ + ".moves.urllib_request"]
+ response = sys.modules[__name__ + ".moves.urllib_response"]
+ robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+
+sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
def add_move(move):
@@ -252,11 +443,16 @@ if PY3:
def get_unbound_function(unbound):
return unbound
+ create_bound_method = types.MethodType
+
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
class Iterator(object):
def next(self):
@@ -297,21 +493,33 @@ if PY3:
return s.encode("latin-1")
def u(s):
return s
+ unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
+ # Workaround for standalone backslash
def u(s):
- return unicode(s, "unicode_escape")
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
int2byte = chr
+ def byte2int(bs):
+ return ord(bs[0])
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ def iterbytes(buf):
+ return (ord(byte) for byte in buf)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
@@ -319,8 +527,7 @@ _add_doc(u, """Text literal""")
if PY3:
- import builtins
- exec_ = getattr(builtins, "exec")
+ exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
@@ -328,10 +535,6 @@ if PY3:
raise value.with_traceback(tb)
raise value
-
- print_ = getattr(builtins, "print")
- del builtins
-
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
@@ -351,14 +554,24 @@ else:
""")
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
def print_(*args, **kwargs):
- """The new-style print function."""
+ """The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
@@ -399,6 +612,21 @@ else:
_add_doc(reraise, """Reraise an exception.""")
-def with_metaclass(meta, base=object):
+def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
- return meta("NewBase", (base,), {})
+ return meta("NewBase", bases, {})
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
diff --git a/pip/_vendor/vendor.txt b/pip/_vendor/vendor.txt
index dbbfa46f7..1ce0bc05c 100644
--- a/pip/_vendor/vendor.txt
+++ b/pip/_vendor/vendor.txt
@@ -1,5 +1,5 @@
distlib==0.1.7
html5lib==1.0b1
-six==1.3.0
+six==1.5.2
colorama==0.2.7
requests==2.2.1
diff --git a/pip/baseparser.py b/pip/baseparser.py
index c71e07183..dd39ed632 100644
--- a/pip/baseparser.py
+++ b/pip/baseparser.py
@@ -130,12 +130,15 @@ class ConfigOptionParser(CustomOptionParser):
self.config = ConfigParser.RawConfigParser()
self.name = kwargs.pop('name')
self.files = self.get_config_files()
- self.config.read(self.files)
+ if self.files:
+ self.config.read(self.files)
assert self.name
optparse.OptionParser.__init__(self, *args, **kwargs)
def get_config_files(self):
config_file = os.environ.get('PIP_CONFIG_FILE', False)
+ if config_file == os.devnull:
+ return []
if config_file and os.path.exists(config_file):
return [config_file]
return [default_config_file]
diff --git a/pip/commands/install.py b/pip/commands/install.py
index d4432278a..911238abc 100644
--- a/pip/commands/install.py
+++ b/pip/commands/install.py
@@ -184,8 +184,14 @@ class InstallCommand(Command):
def run(self, options, args):
- if options.no_install or options.no_download:
- logger.deprecated('1.7', "DEPRECATION: '--no-install' and '--no-download` are deprecated. See https://github.com/pypa/pip/issues/906.")
+ if (
+ options.no_install or
+ options.no_download or
+ options.build_dir or
+ options.no_clean
+ ):
+ logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
+ 'and --no-clean are deprecated. See https://github.com/pypa/pip/issues/906.')
if options.download_dir:
options.no_install = True
diff --git a/pip/commands/wheel.py b/pip/commands/wheel.py
index 110fff417..6527063ca 100644
--- a/pip/commands/wheel.py
+++ b/pip/commands/wheel.py
@@ -146,19 +146,25 @@ class WheelCommand(Command):
ignore_dependencies=options.ignore_dependencies,
ignore_installed=True,
session=session,
+ wheel_download_dir=options.wheel_dir
)
+ # make the wheelhouse
+ if not os.path.exists(options.wheel_dir):
+ os.makedirs(options.wheel_dir)
+
#parse args and/or requirements files
for name in args:
- if name.endswith(".whl"):
- logger.notify("ignoring %s" % name)
- continue
requirement_set.add_requirement(
InstallRequirement.from_line(name, None))
for filename in options.requirements:
- for req in parse_requirements(filename, finder=finder, options=options, session=session):
- if req.editable or (req.name is None and req.url.endswith(".whl")):
+ for req in parse_requirements(
+ filename,
+ finder=finder,
+ options=options,
+ session=session):
+ if req.editable:
logger.notify("ignoring %s" % req.url)
continue
requirement_set.add_requirement(req)
diff --git a/pip/download.py b/pip/download.py
index 4a8a49248..b8cfb791a 100644
--- a/pip/download.py
+++ b/pip/download.py
@@ -344,18 +344,6 @@ def unpack_vcs_link(link, location, only_download=False):
vcs_backend.unpack(location)
-def unpack_file_url(link, location):
- source = url_to_path(link.url)
- content_type = mimetypes.guess_type(source)[0]
- if os.path.isdir(source):
- # delete the location since shutil will create it again :(
- if os.path.isdir(location):
- rmtree(location)
- shutil.copytree(source, location, symlinks=True)
- else:
- unpack_file(source, location, content_type, link)
-
-
def _get_used_vcs_backend(link):
for backend in vcs.backends:
if link.scheme in backend.schemes:
@@ -478,7 +466,6 @@ def _copy_file(filename, location, content_type, link):
shutil.move(download_location, dest_file)
if copy:
shutil.copy(filename, download_location)
- logger.indent -= 2
logger.notify('Saved %s' % display_path(download_location))
@@ -490,11 +477,12 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
temp_location = None
target_url = link.url.split('#', 1)[0]
-
already_cached = False
cache_file = None
cache_content_type_file = None
download_hash = None
+
+ # If a download cache is specified, is the file cached there?
if download_cache:
cache_file = os.path.join(download_cache,
urllib.quote(target_url, ''))
@@ -506,12 +494,14 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
if not os.path.isdir(download_cache):
create_download_cache_folder(download_cache)
+ # If a download dir is specified, is the file already downloaded there?
already_downloaded = None
if download_dir:
already_downloaded = os.path.join(download_dir, link.filename)
if not os.path.exists(already_downloaded):
already_downloaded = None
+ # If already downloaded, does it's hash match?
if already_downloaded:
temp_location = already_downloaded
content_type = mimetypes.guess_type(already_downloaded)[0]
@@ -529,7 +519,7 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
os.unlink(already_downloaded)
already_downloaded = None
- # We have a cached file, and we haven't already found a good downloaded copy
+ # If not a valid download, let's confirm the cached file is valid
if already_cached and not temp_location:
with open(cache_content_type_file) as fp:
content_type = fp.read().strip()
@@ -550,6 +540,7 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
already_cached = False
# We don't have either a cached or a downloaded copy
+ # let's download to a tmp dir
if not temp_location:
try:
resp = session.get(target_url, stream=True)
@@ -582,11 +573,72 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
if link.hash and link.hash_name:
_check_hash(download_hash, link)
+ # a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded:
_copy_file(temp_location, download_dir, content_type, link)
+
+ # unpack the archive to the build dir location. even when only downloading
+ # archives, they have to be unpacked to parse dependencies
unpack_file(temp_location, location, content_type, link)
+
+ # if using a download cache, cache it, if needed
if cache_file and not already_cached:
cache_download(cache_file, temp_location, content_type)
+
if not (already_cached or already_downloaded):
os.unlink(temp_location)
+
os.rmdir(temp_dir)
+
+
+def unpack_file_url(link, location, download_dir=None):
+
+ link_path = url_to_path(link.url_without_fragment)
+ already_downloaded = False
+
+ # If it's a url to a local directory
+ if os.path.isdir(link_path):
+ if os.path.isdir(location):
+ rmtree(location)
+ shutil.copytree(link_path, location, symlinks=True)
+ return
+
+ # if link has a hash, let's confirm it matches
+ if link.hash:
+ link_path_hash = _get_hash_from_file(link_path, link)
+ _check_hash(link_path_hash, link)
+
+ # If a download dir is specified, is the file already there and valid?
+ if download_dir:
+ download_path = os.path.join(download_dir, link.filename)
+ if os.path.exists(download_path):
+ content_type = mimetypes.guess_type(download_path)[0]
+ logger.notify('File was already downloaded %s' % download_path)
+ if link.hash:
+ download_hash = _get_hash_from_file(download_path, link)
+ try:
+ _check_hash(download_hash, link)
+ already_downloaded = True
+ except HashMismatch:
+ logger.warn(
+ 'Previously-downloaded file %s has bad hash, '
+ 're-downloading.' % link_path
+ )
+ os.unlink(download_path)
+ else:
+ already_downloaded = True
+
+ if already_downloaded:
+ from_path = download_path
+ else:
+ from_path = link_path
+
+ content_type = mimetypes.guess_type(from_path)[0]
+
+ # unpack the archive to the build dir location. even when only downloading
+ # archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type, link)
+
+ # a download dir is specified and not already downloaded
+ if download_dir and not already_downloaded:
+ _copy_file(from_path, download_dir, content_type, link)
diff --git a/pip/req.py b/pip/req.py
index 06cabeada..e9ea20f3b 100644
--- a/pip/req.py
+++ b/pip/req.py
@@ -33,7 +33,7 @@ from pip.download import (PipSession, get_file_content, is_url, url_to_path,
unpack_file_url, unpack_http_url)
import pip.wheel
from pip.wheel import move_wheel_files, Wheel, wheel_ext
-from pip._vendor import pkg_resources
+from pip._vendor import pkg_resources, six
def read_text_file(filename):
@@ -280,6 +280,10 @@ class InstallRequirement(object):
else:
setup_py = os.path.join(self.source_dir, setup_file)
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(setup_py, six.text_type):
+ setup_py = setup_py.encode(sys.getfilesystemencoding())
+
return setup_py
def run_egg_info(self, force_root_egg_info=False):
@@ -661,6 +665,9 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
self.install_editable(install_options, global_options)
return
if self.is_wheel:
+ version = pip.wheel.wheel_version(self.source_dir)
+ pip.wheel.check_compatibility(version, self.name)
+
self.move_wheel_files(self.source_dir, root=root)
self.install_succeeded = True
return
@@ -931,12 +938,15 @@ class Requirements(object):
class RequirementSet(object):
def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
- upgrade=False, ignore_installed=False, as_egg=False, target_dir=None,
- ignore_dependencies=False, force_reinstall=False, use_user_site=False,
- session=None, pycompile=True):
+ upgrade=False, ignore_installed=False, as_egg=False,
+ target_dir=None, ignore_dependencies=False,
+ force_reinstall=False, use_user_site=False, session=None,
+ pycompile=True, wheel_download_dir=None):
self.build_dir = build_dir
self.src_dir = src_dir
self.download_dir = download_dir
+ if download_cache:
+ download_cache = os.path.expanduser(download_cache)
self.download_cache = download_cache
self.upgrade = upgrade
self.ignore_installed = ignore_installed
@@ -954,6 +964,7 @@ class RequirementSet(object):
self.target_dir = target_dir #set from --target option
self.session = session or PipSession()
self.pycompile = pycompile
+ self.wheel_download_dir = wheel_download_dir
def __str__(self):
reqs = [req for req in self.requirements.values()
@@ -1170,11 +1181,26 @@ class RequirementSet(object):
assert url
if url:
try:
- self.unpack_url(url, location, self.is_download)
- except HTTPError:
- e = sys.exc_info()[1]
- logger.fatal('Could not install requirement %s because of error %s'
- % (req_to_install, e))
+
+ if (
+ url.filename.endswith(wheel_ext)
+ and self.wheel_download_dir
+ ):
+ # when doing 'pip wheel`
+ download_dir = self.wheel_download_dir
+ do_download = True
+ else:
+ download_dir = self.download_dir
+ do_download = self.is_download
+ self.unpack_url(
+ url, location, download_dir,
+ do_download,
+ )
+ except HTTPError as exc:
+ logger.fatal(
+ 'Could not install requirement %s because '
+ 'of error %s' % (req_to_install, exc)
+ )
raise InstallationError(
'Could not install requirement %s because of HTTP error %s for URL %s'
% (req_to_install, e, url))
@@ -1182,7 +1208,7 @@ class RequirementSet(object):
unpack = False
if unpack:
is_bundle = req_to_install.is_bundle
- is_wheel = url and url.filename.endswith('.whl')
+ is_wheel = url and url.filename.endswith(wheel_ext)
if is_bundle:
req_to_install.move_bundle_files(self.build_dir, self.src_dir)
for subreq in req_to_install.bundle_requirements():
@@ -1198,18 +1224,6 @@ class RequirementSet(object):
elif is_wheel:
req_to_install.source_dir = location
req_to_install.url = url.url
- dist = list(pkg_resources.find_distributions(location))[0]
- if not req_to_install.req:
- req_to_install.req = dist.as_requirement()
- self.add_requirement(req_to_install)
- if not self.ignore_dependencies:
- for subreq in dist.requires(req_to_install.extras):
- if self.has_requirement(subreq.project_name):
- continue
- subreq = InstallRequirement(str(subreq),
- req_to_install)
- reqs.append(subreq)
- self.add_requirement(subreq)
else:
req_to_install.source_dir = location
req_to_install.run_egg_info()
@@ -1233,8 +1247,32 @@ class RequirementSet(object):
req_to_install.conflicts_with = req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
+ logger.notify(
+ 'Requirement already satisfied (use '
+ '--upgrade to upgrade): %s' %
+ req_to_install
+ )
install = False
- if not (is_bundle or is_wheel):
+ if is_wheel:
+ dist = list(
+ pkg_resources.find_distributions(location)
+ )[0]
+ if not req_to_install.req:
+ req_to_install.req = dist.as_requirement()
+ self.add_requirement(req_to_install)
+ if not self.ignore_dependencies:
+ for subreq in dist.requires(
+ req_to_install.extras):
+ if self.has_requirement(
+ subreq.project_name):
+ continue
+ subreq = InstallRequirement(str(subreq),
+ req_to_install)
+ reqs.append(subreq)
+ self.add_requirement(subreq)
+
+ # sdists
+ elif not is_bundle:
## FIXME: shouldn't be globally added:
finder.add_dependency_links(req_to_install.dependency_links)
if (req_to_install.extras):
@@ -1257,10 +1295,14 @@ class RequirementSet(object):
if not self.has_requirement(req_to_install.name):
#'unnamed' requirements will get added here
self.add_requirement(req_to_install)
- if self.is_download or req_to_install._temp_build_dir is not None:
+
+ # cleanup tmp src
+ if not is_bundle:
+ if (
+ self.is_download or
+ req_to_install._temp_build_dir is not None
+ ):
self.reqs_to_cleanup.append(req_to_install)
- else:
- self.reqs_to_cleanup.append(req_to_install)
if install:
self.successfully_downloaded.append(req_to_install)
@@ -1304,23 +1346,36 @@ class RequirementSet(object):
call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest,
command_desc='python setup.py clean')
- def unpack_url(self, link, location, only_download=False):
- if only_download:
- loc = self.download_dir
- else:
- loc = location
+ def unpack_url(self, link, location, download_dir=None,
+ only_download=False):
+ if download_dir is None:
+ download_dir = self.download_dir
+
+ # non-editable vcs urls
if is_vcs_url(link):
- return unpack_vcs_link(link, loc, only_download)
- # a local file:// index could have links with hashes
- elif not link.hash and is_file_url(link):
- return unpack_file_url(link, loc)
+ if only_download:
+ loc = download_dir
+ else:
+ loc = location
+ unpack_vcs_link(link, loc, only_download)
+
+ # file urls
+ elif is_file_url(link):
+ unpack_file_url(link, location, download_dir)
+ if only_download:
+ write_delete_marker_file(location)
+
+ # http urls
else:
- if self.download_cache:
- self.download_cache = os.path.expanduser(self.download_cache)
- retval = unpack_http_url(link, location, self.download_cache, self.download_dir, self.session)
+ unpack_http_url(
+ link,
+ location,
+ self.download_cache,
+ download_dir,
+ self.session,
+ )
if only_download:
write_delete_marker_file(location)
- return retval
def install(self, install_options, global_options=(), *args, **kwargs):
"""Install everything in this set (after having downloaded and unpacked the packages)"""
diff --git a/pip/wheel.py b/pip/wheel.py
index 8847644de..839259df4 100644
--- a/pip/wheel.py
+++ b/pip/wheel.py
@@ -13,18 +13,23 @@ import shutil
import sys
from base64 import urlsafe_b64encode
+from email.parser import Parser
from pip.backwardcompat import ConfigParser, StringIO
-from pip.exceptions import InvalidWheelFilename
+from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
from pip.locations import distutils_scheme
from pip.log import logger
from pip import pep425tags
from pip.util import call_subprocess, normalize_path, make_path_relative
from pip._vendor import pkg_resources
from pip._vendor.distlib.scripts import ScriptMaker
+from pip._vendor import pkg_resources
+
wheel_ext = '.whl'
+VERSION_COMPATIBLE = (1, 0)
+
def rehash(path, algo='sha256', blocksize=1<<20):
"""Return (hash, length) for path using hashlib.new(algo)"""
@@ -388,6 +393,52 @@ def uninstallation_paths(dist):
yield path
+def wheel_version(source_dir):
+ """
+ Return the Wheel-Version of an extracted wheel, if possible.
+
+ Otherwise, return False if we couldn't parse / extract it.
+ """
+ try:
+ dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
+
+ wheel_data = dist.get_metadata('WHEEL')
+ wheel_data = Parser().parsestr(wheel_data)
+
+ version = wheel_data['Wheel-Version'].strip()
+ version = tuple(map(int, version.split('.')))
+ return version
+ except:
+ return False
+
+
+def check_compatibility(version, name):
+ """
+ Raises errors or warns if called with an incompatible Wheel-Version.
+
+ Pip should refuse to install a Wheel-Version that's a major series
+ ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
+ installing a version only minor version ahead (e.g 1.2 > 1.1).
+
+ version: a 2-tuple representing a Wheel-Version (Major, Minor)
+ name: name of wheel or package to raise exception about
+
+ :raises UnsupportedWheel: when an incompatible Wheel-Version is given
+ """
+ if not version:
+ raise UnsupportedWheel(
+ "%s is in an unsupported or invalid wheel" % name
+ )
+ if version[0] > VERSION_COMPATIBLE[0]:
+ raise UnsupportedWheel(
+ "%s's Wheel-Version (%s) is not compatible with this version "
+ "of pip" % (name, '.'.join(map(str, version)))
+ )
+ elif version > VERSION_COMPATIBLE:
+ logger.warn('Installing from a newer Wheel-Version (%s)'
+ % '.'.join(map(str, version)))
+
+
class Wheel(object):
"""A wheel file"""
@@ -475,18 +526,19 @@ class WheelBuilder(object):
reqset = self.requirement_set.requirements.values()
- #make the wheelhouse
- if not os.path.exists(self.wheel_dir):
- os.makedirs(self.wheel_dir)
+ buildset = [req for req in reqset if not req.is_wheel]
+
+ if not buildset:
+ return
#build the wheels
- logger.notify('Building wheels for collected packages: %s' % ', '.join([req.name for req in reqset]))
+ logger.notify(
+ 'Building wheels for collected packages: %s' %
+ ','.join([req.name for req in buildset])
+ )
logger.indent += 2
build_success, build_failure = [], []
- for req in reqset:
- if req.is_wheel:
- logger.notify("Skipping building wheel: %s", req.url)
- continue
+ for req in buildset:
if self._build_one(req):
build_success.append(req)
else:
diff --git a/tests/data/packages/README.txt b/tests/data/packages/README.txt
index 9393466ff..3fd0c69bd 100644
--- a/tests/data/packages/README.txt
+++ b/tests/data/packages/README.txt
@@ -29,6 +29,11 @@ FSPkg
-----
for installing from the file system
+futurewheel
+-----------
+Wheels of a Wheel-Version that is newer in minor and major series.
+Their version coincides with the apparent Wheel-Version they indicate.
+
gmpy-1.15.tar.gz
----------------
hash testing (although this pkg isn't needed explicitly)
diff --git a/tests/data/packages/brokenwheel-1.0-py2.py3-none-any.whl b/tests/data/packages/brokenwheel-1.0-py2.py3-none-any.whl
new file mode 100644
index 000000000..3ae77cf03
--- /dev/null
+++ b/tests/data/packages/brokenwheel-1.0-py2.py3-none-any.whl
Binary files differ
diff --git a/tests/data/packages/colander-0.9.9-py2.py3-none-any.whl b/tests/data/packages/colander-0.9.9-py2.py3-none-any.whl
new file mode 100644
index 000000000..031718f9c
--- /dev/null
+++ b/tests/data/packages/colander-0.9.9-py2.py3-none-any.whl
Binary files differ
diff --git a/tests/data/packages/futurewheel-1.9-py2.py3-none-any.whl b/tests/data/packages/futurewheel-1.9-py2.py3-none-any.whl
new file mode 100644
index 000000000..703243cbb
--- /dev/null
+++ b/tests/data/packages/futurewheel-1.9-py2.py3-none-any.whl
Binary files differ
diff --git a/tests/data/packages/futurewheel-3.0-py2.py3-none-any.whl b/tests/data/packages/futurewheel-3.0-py2.py3-none-any.whl
new file mode 100644
index 000000000..372b1be79
--- /dev/null
+++ b/tests/data/packages/futurewheel-3.0-py2.py3-none-any.whl
Binary files differ
diff --git a/tests/data/packages/translationstring-1.1.tar.gz b/tests/data/packages/translationstring-1.1.tar.gz
new file mode 100644
index 000000000..25370b8f9
--- /dev/null
+++ b/tests/data/packages/translationstring-1.1.tar.gz
Binary files differ
diff --git a/tests/functional/test_install_download.py b/tests/functional/test_install_download.py
index 03ca24135..dd2f8f602 100644
--- a/tests/functional/test_install_download.py
+++ b/tests/functional/test_install_download.py
@@ -1,3 +1,4 @@
+import os
import textwrap
from tests.lib.path import Path
@@ -48,6 +49,34 @@ def test_download_should_download_dependencies(script):
assert script.site_packages/ 'openid' not in result.files_created
+def test_download_wheel_archive(script, data):
+ """
+ It should download a wheel archive path
+ """
+ wheel_filename = 'colander-0.9.9-py2.py3-none-any.whl'
+ wheel_path = os.path.join(data.find_links, wheel_filename)
+ result = script.pip(
+ 'install', wheel_path,
+ '-d', '.', '--no-deps'
+ )
+ assert Path('scratch') / wheel_filename in result.files_created
+
+
+def test_download_should_download_wheel_deps(script, data):
+ """
+ It should download dependencies for wheels(in the scratch path)
+ """
+ wheel_filename = 'colander-0.9.9-py2.py3-none-any.whl'
+ dep_filename = 'translationstring-1.1.tar.gz'
+ wheel_path = os.path.join(data.find_links, wheel_filename)
+ result = script.pip(
+ 'install', wheel_path,
+ '-d', '.', '--find-links', data.find_links, '--no-index'
+ )
+ assert Path('scratch') / wheel_filename in result.files_created
+ assert Path('scratch') / dep_filename in result.files_created
+
+
def test_download_should_skip_existing_files(script):
"""
It should not download files already existing in the scratch dir
diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py
index c3c00f60f..1a602c644 100644
--- a/tests/functional/test_install_wheel.py
+++ b/tests/functional/test_install_wheel.py
@@ -3,6 +3,34 @@ import pytest
import glob
from tests.lib.path import Path
+from tests.lib import TestFailure
+
+
+def test_install_from_future_wheel_version(script, data):
+ """
+ Test installing a future wheel
+ """
+ package = data.packages.join("futurewheel-3.0-py2.py3-none-any.whl")
+ result = script.pip('install', package, '--no-index', expect_error=True)
+ with pytest.raises(TestFailure):
+ result.assert_installed('futurewheel', without_egg_link=True,
+ editable=False)
+
+ package = data.packages.join("futurewheel-1.9-py2.py3-none-any.whl")
+ result = script.pip('install', package, '--no-index', expect_error=False)
+ result.assert_installed('futurewheel', without_egg_link=True,
+ editable=False)
+
+
+def test_install_from_broken_wheel(script, data):
+ """
+ Test that installing a broken wheel fails properly
+ """
+ package = data.packages.join("brokenwheel-1.0-py2.py3-none-any.whl")
+ result = script.pip('install', package, '--no-index', expect_error=True)
+ with pytest.raises(TestFailure):
+ result.assert_installed('futurewheel', without_egg_link=True,
+ editable=False)
def test_install_from_wheel(script, data):
diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py
index bc7136f44..7f0607fba 100644
--- a/tests/functional/test_wheel.py
+++ b/tests/functional/test_wheel.py
@@ -32,6 +32,20 @@ def test_pip_wheel_success(script, data):
assert "Successfully built simple" in result.stdout, result.stdout
+def test_pip_wheel_downloads_wheels(script, data):
+ """
+ Test 'pip wheel' downloads wheels
+ """
+ script.pip('install', 'wheel')
+ result = script.pip(
+ 'wheel', '--no-index', '-f', data.find_links, 'simple.dist',
+ )
+ wheel_file_name = 'simple.dist-0.1-py2.py3-none-any.whl'
+ wheel_file_path = script.scratch/'wheelhouse'/wheel_file_name
+ assert wheel_file_path in result.files_created, result.stdout
+ assert "Saved" in result.stdout, result.stdout
+
+
def test_pip_wheel_fail(script, data):
"""
Test 'pip wheel' failure.
@@ -47,7 +61,7 @@ def test_pip_wheel_fail(script, data):
def test_pip_wheel_ignore_wheels_editables(script, data):
"""
- Test 'pip wheel' ignores editables and *.whl files in requirements
+ Test 'pip wheel' ignores editables
"""
script.pip('install', 'wheel')
@@ -64,7 +78,6 @@ def test_pip_wheel_ignore_wheels_editables(script, data):
assert wheel_file_path in result.files_created, (wheel_file_path, result.files_created)
assert "Successfully built simple" in result.stdout, result.stdout
assert "Failed to build" not in result.stdout, result.stdout
- assert "ignoring %s" % local_wheel in result.stdout
ignore_editable = "ignoring %s" % path_to_url(local_editable)
#TODO: understand this divergence
if sys.platform == 'win32':
diff --git a/tests/unit/test_download.py b/tests/unit/test_download.py
index acbc9b4a6..695a10dbb 100644
--- a/tests/unit/test_download.py
+++ b/tests/unit/test_download.py
@@ -1,6 +1,6 @@
import hashlib
import os
-from shutil import rmtree
+from shutil import rmtree, copy
from tempfile import mkdtemp
from mock import Mock, patch
@@ -8,7 +8,9 @@ import pytest
import pip
from pip.backwardcompat import urllib, BytesIO, b, pathname2url
-from pip.download import PipSession, path_to_url, unpack_http_url, url_to_path
+from pip.exceptions import HashMismatch
+from pip.download import (PipSession, path_to_url, unpack_http_url,
+ url_to_path, unpack_file_url)
from pip.index import Link
@@ -174,3 +176,99 @@ def test_path_to_url_win():
@pytest.mark.skipif("sys.platform != 'win32'")
def test_url_to_path_win():
assert url_to_path('file:///c:/tmp/file') == 'c:/tmp/file'
+
+
+class Test_unpack_file_url(object):
+
+ def prep(self, tmpdir, data):
+ self.build_dir = tmpdir.join('build')
+ self.download_dir = tmpdir.join('download')
+ os.mkdir(self.build_dir)
+ os.mkdir(self.download_dir)
+ self.dist_file = "simple-1.0.tar.gz"
+ self.dist_file2 = "simple-2.0.tar.gz"
+ self.dist_path = data.packages.join(self.dist_file)
+ self.dist_path2 = data.packages.join(self.dist_file2)
+ self.dist_url = Link(path_to_url(self.dist_path))
+ self.dist_url2 = Link(path_to_url(self.dist_path2))
+
+ def test_unpack_file_url_no_download(self, tmpdir, data):
+ self.prep(tmpdir, data)
+ unpack_file_url(self.dist_url, self.build_dir)
+ assert os.path.isdir(os.path.join(self.build_dir, 'simple'))
+ assert not os.path.isfile(
+ os.path.join(self.download_dir, self.dist_file))
+
+ def test_unpack_file_url_and_download(self, tmpdir, data):
+ self.prep(tmpdir, data)
+ unpack_file_url(self.dist_url, self.build_dir,
+ download_dir=self.download_dir)
+ assert os.path.isdir(os.path.join(self.build_dir, 'simple'))
+ assert os.path.isfile(os.path.join(self.download_dir, self.dist_file))
+
+ def test_unpack_file_url_download_already_exists(self, tmpdir,
+ data, monkeypatch):
+ self.prep(tmpdir, data)
+ # add in previous download (copy simple-2.0 as simple-1.0)
+ # so we can tell it didn't get overwritten
+ dest_file = os.path.join(self.download_dir, self.dist_file)
+ copy(self.dist_path2, dest_file)
+ dist_path2_md5 = hashlib.md5(
+ open(self.dist_path2, 'rb').read()).hexdigest()
+
+ unpack_file_url(self.dist_url, self.build_dir,
+ download_dir=self.download_dir)
+ # our hash should be the same, i.e. not overwritten by simple-1.0 hash
+ assert dist_path2_md5 == hashlib.md5(
+ open(dest_file, 'rb').read()).hexdigest()
+
+ def test_unpack_file_url_bad_hash(self, tmpdir, data,
+ monkeypatch):
+ """
+ Test when the file url hash fragment is wrong
+ """
+ self.prep(tmpdir, data)
+ self.dist_url.url = "%s#md5=bogus" % self.dist_url.url
+ with pytest.raises(HashMismatch):
+ unpack_file_url(self.dist_url, self.build_dir)
+
+ def test_unpack_file_url_download_bad_hash(self, tmpdir, data,
+ monkeypatch):
+ """
+ Test when existing download has different hash from the file url
+ fragment
+ """
+ self.prep(tmpdir, data)
+
+ # add in previous download (copy simple-2.0 as simple-1.0 so it's wrong
+ # hash)
+ dest_file = os.path.join(self.download_dir, self.dist_file)
+ copy(self.dist_path2, dest_file)
+
+ dist_path_md5 = hashlib.md5(
+ open(self.dist_path, 'rb').read()).hexdigest()
+ dist_path2_md5 = hashlib.md5(open(dest_file, 'rb').read()).hexdigest()
+
+ assert dist_path_md5 != dist_path2_md5
+
+ self.dist_url.url = "%s#md5=%s" % (
+ self.dist_url.url,
+ dist_path_md5
+ )
+ unpack_file_url(self.dist_url, self.build_dir,
+ download_dir=self.download_dir)
+
+ # confirm hash is for simple1-1.0
+ # the previous bad download has been removed
+ assert (hashlib.md5(open(dest_file, 'rb').read()).hexdigest()
+ ==
+ dist_path_md5
+ ), hashlib.md5(open(dest_file, 'rb').read()).hexdigest()
+
+ def test_unpack_file_url_thats_a_dir(self, tmpdir, data):
+ self.prep(tmpdir, data)
+ dist_path = data.packages.join("FSPkg")
+ dist_url = Link(path_to_url(dist_path))
+ unpack_file_url(dist_url, self.build_dir,
+ download_dir=self.download_dir)
+ assert os.path.isdir(os.path.join(self.build_dir, 'fspkg'))
diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py
index 5eb21dc7b..9b1061b9f 100644
--- a/tests/unit/test_wheel.py
+++ b/tests/unit/test_wheel.py
@@ -6,8 +6,11 @@ import pytest
import pkg_resources
from mock import patch, Mock
from pip import wheel
-from pip.exceptions import InstallationError, InvalidWheelFilename
+from pip.exceptions import (
+ InstallationError, InvalidWheelFilename, UnsupportedWheel,
+)
from pip.index import PackageFinder
+from pip.util import unpack_file
from tests.lib import assert_raises_regexp
@@ -51,6 +54,44 @@ def test_uninstallation_paths():
assert paths2 == paths
+def test_wheel_version(tmpdir, data):
+ future_wheel = 'futurewheel-1.9-py2.py3-none-any.whl'
+ broken_wheel = 'brokenwheel-1.0-py2.py3-none-any.whl'
+ future_version = (1, 9)
+
+ unpack_file(data.packages.join(future_wheel),
+ tmpdir + 'future', None, None)
+ unpack_file(data.packages.join(broken_wheel),
+ tmpdir + 'broken', None, None)
+
+ assert wheel.wheel_version(tmpdir + 'future') == future_version
+ assert not wheel.wheel_version(tmpdir + 'broken')
+
+
+def test_check_compatibility():
+ name = 'test'
+ vc = wheel.VERSION_COMPATIBLE
+
+ # Major version is higher - should be incompatible
+ higher_v = (vc[0] + 1, vc[1])
+
+ # test raises with correct error
+ with pytest.raises(UnsupportedWheel) as e:
+ wheel.check_compatibility(higher_v, name)
+ assert 'is not compatible' in str(e)
+
+ # Should only log.warn - minor version is greator
+ higher_v = (vc[0], vc[1] + 1)
+ wheel.check_compatibility(higher_v, name)
+
+ # These should work fine
+ wheel.check_compatibility(wheel.VERSION_COMPATIBLE, name)
+
+ # E.g if wheel to install is 1.0 and we support up to 1.2
+ lower_v = (vc[0], max(0, vc[1] - 1))
+ wheel.check_compatibility(lower_v, name)
+
+
class TestWheelFile(object):
def test_std_wheel_pattern(self):