summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDonald Stufft <donald@stufft.io>2014-05-02 19:51:48 -0400
committerDonald Stufft <donald@stufft.io>2014-05-02 21:01:00 -0400
commitb3ac7b22401e8f83cd09deaffe2fd10a9f3cf0c2 (patch)
tree9652901506ec99b2ec601682df152533483f8bff
parentfb8738edd671a1762ca0e5041eb6b09dca54b980 (diff)
downloadpip-b3ac7b22401e8f83cd09deaffe2fd10a9f3cf0c2.tar.gz
Upgrade distlib to 0.1.8
-rw-r--r--pip/_vendor/distlib/__init__.py5
-rw-r--r--pip/_vendor/distlib/_backport/sysconfig.py2
-rw-r--r--pip/_vendor/distlib/index.py16
-rw-r--r--pip/_vendor/distlib/locators.py33
-rw-r--r--pip/_vendor/distlib/metadata.py13
-rw-r--r--pip/_vendor/distlib/resources.py50
-rw-r--r--pip/_vendor/distlib/scripts.py6
-rw-r--r--pip/_vendor/distlib/util.py51
-rw-r--r--pip/_vendor/distlib/version.py35
-rw-r--r--pip/_vendor/distlib/wheel.py325
-rw-r--r--pip/_vendor/vendor.txt2
11 files changed, 419 insertions, 119 deletions
diff --git a/pip/_vendor/distlib/__init__.py b/pip/_vendor/distlib/__init__.py
index 56a56ba68..f9081bb84 100644
--- a/pip/_vendor/distlib/__init__.py
+++ b/pip/_vendor/distlib/__init__.py
@@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2012-2013 Vinay Sajip.
+# Copyright (C) 2012-2014 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import logging
-__version__ = '0.1.7'
+__version__ = '0.1.8'
class DistlibException(Exception):
pass
@@ -17,6 +17,7 @@ except ImportError: # pragma: no cover
class NullHandler(logging.Handler):
def handle(self, record): pass
def emit(self, record): pass
+ def createLock(self): self.lock = None
logger = logging.getLogger(__name__)
logger.addHandler(NullHandler())
diff --git a/pip/_vendor/distlib/_backport/sysconfig.py b/pip/_vendor/distlib/_backport/sysconfig.py
index a5b55feb4..1d3132679 100644
--- a/pip/_vendor/distlib/_backport/sysconfig.py
+++ b/pip/_vendor/distlib/_backport/sysconfig.py
@@ -68,7 +68,7 @@ _cfg_read = False
def _ensure_cfg_read():
global _cfg_read
if not _cfg_read:
- from distlib.resources import finder
+ from ..resources import finder
backport_package = __name__.rsplit('.', 1)[0]
_finder = finder(backport_package)
_cfgfile = _finder.find('sysconfig.cfg')
diff --git a/pip/_vendor/distlib/index.py b/pip/_vendor/distlib/index.py
index 30b2aa5a5..83004b13f 100644
--- a/pip/_vendor/distlib/index.py
+++ b/pip/_vendor/distlib/index.py
@@ -15,10 +15,10 @@ try:
except ImportError:
from dummy_threading import Thread
-from distlib import DistlibException
-from distlib.compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
- urlparse, build_opener)
-from distlib.util import cached_property, zip_dir
+from . import DistlibException
+from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
+ urlparse, build_opener, string_types)
+from .util import cached_property, zip_dir, ServerProxy
logger = logging.getLogger(__name__)
@@ -49,6 +49,7 @@ class PackageIndex(object):
self.ssl_verifier = None
self.gpg = None
self.gpg_home = None
+ self.rpc_proxy = None
with open(os.devnull, 'w') as sink:
for s in ('gpg2', 'gpg'):
try:
@@ -478,3 +479,10 @@ class PackageIndex(object):
'Content-length': str(len(body))
}
return Request(self.url, body, headers)
+
+ def search(self, terms, operator=None):
+ if isinstance(terms, string_types):
+ terms = {'name': terms}
+ if self.rpc_proxy is None:
+ self.rpc_proxy = ServerProxy(self.url, timeout=3.0)
+ return self.rpc_proxy.search(terms, operator or 'and')
diff --git a/pip/_vendor/distlib/locators.py b/pip/_vendor/distlib/locators.py
index ffe99310f..07bc1fd43 100644
--- a/pip/_vendor/distlib/locators.py
+++ b/pip/_vendor/distlib/locators.py
@@ -534,6 +534,11 @@ class SimpleScrapingLocator(Locator):
self.skip_externals = False
self.num_workers = num_workers
self._lock = threading.RLock()
+ # See issue #45: we need to be resilient when the locator is used
+ # in a thread, e.g. with concurrent.futures. We can't use self._lock
+ # as it is for coordinating our internal threads - the ones created
+ # in _prepare_threads.
+ self._gplock = threading.RLock()
def _prepare_threads(self):
"""
@@ -562,19 +567,21 @@ class SimpleScrapingLocator(Locator):
self._threads = []
def _get_project(self, name):
- self.result = result = {}
- self.project_name = name
- url = urljoin(self.base_url, '%s/' % quote(name))
- self._seen.clear()
- self._page_cache.clear()
- self._prepare_threads()
- try:
- logger.debug('Queueing %s', url)
- self._to_fetch.put(url)
- self._to_fetch.join()
- finally:
- self._wait_threads()
- del self.result
+ result = {}
+ with self._gplock:
+ self.result = result
+ self.project_name = name
+ url = urljoin(self.base_url, '%s/' % quote(name))
+ self._seen.clear()
+ self._page_cache.clear()
+ self._prepare_threads()
+ try:
+ logger.debug('Queueing %s', url)
+ self._to_fetch.put(url)
+ self._to_fetch.join()
+ finally:
+ self._wait_threads()
+ del self.result
return result
platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|'
diff --git a/pip/_vendor/distlib/metadata.py b/pip/_vendor/distlib/metadata.py
index ef3b1d6b3..8441d8fef 100644
--- a/pip/_vendor/distlib/metadata.py
+++ b/pip/_vendor/distlib/metadata.py
@@ -1006,7 +1006,18 @@ class Metadata(object):
if self._legacy:
self._legacy.add_requirements(requirements)
else:
- self._data.setdefault('run_requires', []).extend(requirements)
+ run_requires = self._data.setdefault('run_requires', [])
+ always = None
+ for entry in run_requires:
+ if 'environment' not in entry and 'extra' not in entry:
+ always = entry
+ break
+ if always is None:
+ always = { 'requires': requirements }
+ run_requires.insert(0, always)
+ else:
+ rset = set(always['requires']) | set(requirements)
+ always['requires'] = sorted(rset)
def __repr__(self):
name = self.name or '(no name)'
diff --git a/pip/_vendor/distlib/resources.py b/pip/_vendor/distlib/resources.py
index 7c2673720..567840e7b 100644
--- a/pip/_vendor/distlib/resources.py
+++ b/pip/_vendor/distlib/resources.py
@@ -17,39 +17,20 @@ import types
import zipimport
from . import DistlibException
-from .util import cached_property, get_cache_base, path_to_cache_dir
+from .util import cached_property, get_cache_base, path_to_cache_dir, Cache
logger = logging.getLogger(__name__)
-class Cache(object):
- """
- A class implementing a cache for resources that need to live in the file system
- e.g. shared libraries.
- """
+cache = None # created when needed
- def __init__(self, base=None):
- """
- Initialise an instance.
- :param base: The base directory where the cache should be located. If
- not specified, this will be the ``resource-cache``
- directory under whatever :func:`get_cache_base` returns.
- """
+class ResourceCache(Cache):
+ def __init__(self, base=None):
if base is None:
# Use native string to avoid issues on 2.x: see Python #20140.
base = os.path.join(get_cache_base(), str('resource-cache'))
- # we use 'isdir' instead of 'exists', because we want to
- # fail if there's a file with that name
- if not os.path.isdir(base):
- os.makedirs(base)
- self.base = os.path.abspath(os.path.normpath(base))
-
- def prefix_to_dir(self, prefix):
- """
- Converts a resource prefix to a directory name in the cache.
- """
- return path_to_cache_dir(prefix)
+ super(ResourceCache, self).__init__(base)
def is_stale(self, resource, path):
"""
@@ -87,24 +68,6 @@ class Cache(object):
f.write(resource.bytes)
return result
- def clear(self):
- """
- Clear the cache.
- """
- not_removed = []
- for fn in os.listdir(self.base):
- fn = os.path.join(self.base, fn)
- try:
- if os.path.islink(fn) or os.path.isfile(fn):
- os.remove(fn)
- elif os.path.isdir(fn):
- shutil.rmtree(fn)
- except Exception:
- not_removed.append(fn)
- return not_removed
-
-cache = Cache()
-
class ResourceBase(object):
def __init__(self, finder, name):
@@ -131,6 +94,9 @@ class Resource(ResourceBase):
@cached_property
def file_path(self):
+ global cache
+ if cache is None:
+ cache = ResourceCache()
return cache.get(self)
@cached_property
diff --git a/pip/_vendor/distlib/scripts.py b/pip/_vendor/distlib/scripts.py
index ba0e5202b..36850b2a3 100644
--- a/pip/_vendor/distlib/scripts.py
+++ b/pip/_vendor/distlib/scripts.py
@@ -92,8 +92,10 @@ class ScriptMaker(object):
return executable
def _get_shebang(self, encoding, post_interp=b'', options=None):
+ enquote = True
if self.executable:
executable = self.executable
+ enquote = False # assume this will be taken care of
elif not sysconfig.is_python_build():
executable = get_executable()
elif in_venv():
@@ -107,6 +109,10 @@ class ScriptMaker(object):
if options:
executable = self._get_alternate_executable(executable, options)
+ # If the user didn't specify an executable, it may be necessary to
+ # cater for executable paths with spaces (not uncommon on Windows)
+ if enquote and ' ' in executable:
+ executable = '"%s"' % executable
executable = fsencode(executable)
shebang = b'#!' + executable + post_interp + b'\n'
# Python parser starts to read a script using UTF-8 until
diff --git a/pip/_vendor/distlib/util.py b/pip/_vendor/distlib/util.py
index e64d078b6..29ec519ab 100644
--- a/pip/_vendor/distlib/util.py
+++ b/pip/_vendor/distlib/util.py
@@ -154,9 +154,9 @@ def in_venv():
def get_executable():
- if sys.platform == 'darwin' and ('__VENV_LAUNCHER__'
+ if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
in os.environ):
- result = os.environ['__VENV_LAUNCHER__']
+ result = os.environ['__PYVENV_LAUNCHER__']
else:
result = sys.executable
return result
@@ -595,7 +595,6 @@ def get_cache_base(suffix=None):
else:
# Assume posix, or old Windows
result = os.path.expanduser('~')
- result = os.path.join(result, suffix)
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
if os.path.isdir(result):
@@ -612,7 +611,7 @@ def get_cache_base(suffix=None):
if not usable:
result = tempfile.mkdtemp()
logger.warning('Default location unusable, using %s', result)
- return result
+ return os.path.join(result, suffix)
def path_to_cache_dir(path):
@@ -768,6 +767,50 @@ def get_package_data(name, version):
return _get_external_data(url)
+class Cache(object):
+ """
+ A class implementing a cache for resources that need to live in the file system
+ e.g. shared libraries. This class was moved from resources to here because it
+ could be used by other modules, e.g. the wheel module.
+ """
+
+ def __init__(self, base):
+ """
+ Initialise an instance.
+
+ :param base: The base directory where the cache should be located.
+ """
+ # we use 'isdir' instead of 'exists', because we want to
+ # fail if there's a file with that name
+ if not os.path.isdir(base):
+ os.makedirs(base)
+ if (os.stat(base).st_mode & 0o77) != 0:
+ logger.warning('Directory \'%s\' is not private', base)
+ self.base = os.path.abspath(os.path.normpath(base))
+
+ def prefix_to_dir(self, prefix):
+ """
+ Converts a resource prefix to a directory name in the cache.
+ """
+ return path_to_cache_dir(prefix)
+
+ def clear(self):
+ """
+ Clear the cache.
+ """
+ not_removed = []
+ for fn in os.listdir(self.base):
+ fn = os.path.join(self.base, fn)
+ try:
+ if os.path.islink(fn) or os.path.isfile(fn):
+ os.remove(fn)
+ elif os.path.isdir(fn):
+ shutil.rmtree(fn)
+ except Exception:
+ not_removed.append(fn)
+ return not_removed
+
+
class EventMixin(object):
"""
A very simple publish/subscribe system.
diff --git a/pip/_vendor/distlib/version.py b/pip/_vendor/distlib/version.py
index a2192274c..f0e62c4ee 100644
--- a/pip/_vendor/distlib/version.py
+++ b/pip/_vendor/distlib/version.py
@@ -181,7 +181,7 @@ class Matcher(object):
return self._string
-PEP426_VERSION_RE = re.compile(r'^(\d+\.\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
+PEP426_VERSION_RE = re.compile(r'^(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
r'(\.(post)(\d+))?(\.(dev)(\d+))?'
r'(-(\d+(\.\d+)?))?$')
@@ -297,7 +297,22 @@ class NormalizedMatcher(Matcher):
'!=': '_match_ne',
}
+ def _adjust_local(self, version, constraint, prefix):
+ if prefix:
+ strip_local = '-' not in constraint and version._parts[-1]
+ else:
+ # both constraint and version are
+ # NormalizedVersion instances.
+ # If constraint does not have a local component,
+ # ensure the version doesn't, either.
+ strip_local = not constraint._parts[-1] and version._parts[-1]
+ if strip_local:
+ s = version._string.split('-', 1)[0]
+ version = self.version_class(s)
+ return version, constraint
+
def _match_lt(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
if version >= constraint:
return False
release_clause = constraint._release_clause
@@ -305,6 +320,7 @@ class NormalizedMatcher(Matcher):
return not _match_prefix(version, pfx)
def _match_gt(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
if version <= constraint:
return False
release_clause = constraint._release_clause
@@ -312,12 +328,15 @@ class NormalizedMatcher(Matcher):
return not _match_prefix(version, pfx)
def _match_le(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
return version <= constraint
def _match_ge(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
return version >= constraint
def _match_eq(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
if not prefix:
result = (version == constraint)
else:
@@ -325,6 +344,7 @@ class NormalizedMatcher(Matcher):
return result
def _match_ne(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
if not prefix:
result = (version != constraint)
else:
@@ -332,6 +352,7 @@ class NormalizedMatcher(Matcher):
return result
def _match_compatible(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
if version == constraint:
return True
if version < constraint:
@@ -569,13 +590,15 @@ class LegacyVersion(Version):
def parse(self, s):
return _legacy_key(s)
- PREREL_TAGS = set(
- ['*a', '*alpha', '*b', '*beta', '*c', '*rc', '*r', '*@', '*pre']
- )
-
@property
def is_prerelease(self):
- return any(x in self.PREREL_TAGS for x in self._parts)
+ result = False
+ for x in self._parts:
+ if (isinstance(x, string_types) and x.startswith('*') and
+ x < '*final'):
+ result = True
+ break
+ return result
class LegacyMatcher(Matcher):
diff --git a/pip/_vendor/distlib/wheel.py b/pip/_vendor/distlib/wheel.py
index 6dbca81f1..d67d4bc5d 100644
--- a/pip/_vendor/distlib/wheel.py
+++ b/pip/_vendor/distlib/wheel.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright (C) 2013 Vinay Sajip.
+# Copyright (C) 2013-2014 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
@@ -27,12 +27,13 @@ from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import InstalledDistribution
from .metadata import Metadata, METADATA_FILENAME
-from .util import (FileOperator, convert_path, CSVReader, CSVWriter,
- cached_property, get_cache_base, read_exports)
-
+from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
+ cached_property, get_cache_base, read_exports, tempdir)
+from .version import NormalizedVersion, UnsupportedVersionError
logger = logging.getLogger(__name__)
+cache = None # created when needed
if hasattr(sys, 'pypy_version_info'):
IMP_PREFIX = 'pp'
@@ -55,7 +56,17 @@ ABI = sysconfig.get_config_var('SOABI')
if ABI and ABI.startswith('cpython-'):
ABI = ABI.replace('cpython-', 'cp')
else:
- ABI = 'none'
+ def _derive_abi():
+ parts = ['cp', VER_SUFFIX]
+ if sysconfig.get_config_var('Py_DEBUG'):
+ parts.append('d')
+ if sysconfig.get_config_var('WITH_PYMALLOC'):
+ parts.append('m')
+ if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
+ parts.append('u')
+ return ''.join(parts)
+ ABI = _derive_abi()
+ del _derive_abi
FILENAME_RE = re.compile(r'''
(?P<nm>[^-]+)
@@ -132,7 +143,7 @@ class Wheel(object):
Initialise an instance using a (valid) filename.
"""
self.sign = sign
- self.verify = verify
+ self.should_verify = verify
self.buildver = ''
self.pyver = [PYVER]
self.abi = ['none']
@@ -147,7 +158,8 @@ class Wheel(object):
if m:
info = m.groupdict('')
self.name = info['nm']
- self.version = info['vn']
+ # Reinstate the local version separator
+ self.version = info['vn'].replace('_', '-')
self.buildver = info['bn']
self._filename = self.filename
else:
@@ -179,10 +191,17 @@ class Wheel(object):
pyver = '.'.join(self.pyver)
abi = '.'.join(self.abi)
arch = '.'.join(self.arch)
- return '%s-%s%s-%s-%s-%s.whl' % (self.name, self.version, buildver,
+ # replace - with _ as a local version separator
+ version = self.version.replace('-', '_')
+ return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
pyver, abi, arch)
@property
+ def exists(self):
+ path = os.path.join(self.dirname, self.filename)
+ return os.path.isfile(path)
+
+ @property
def tags(self):
for pyver in self.pyver:
for abi in self.abi:
@@ -195,29 +214,38 @@ class Wheel(object):
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
wrapper = codecs.getreader('utf-8')
- metadata_filename = posixpath.join(info_dir, METADATA_FILENAME)
with ZipFile(pathname, 'r') as zf:
+ wheel_metadata = self.get_wheel_metadata(zf)
+ wv = wheel_metadata['Wheel-Version'].split('.', 1)
+ file_version = tuple([int(i) for i in wv])
+ if file_version < (1, 1):
+ fn = 'METADATA'
+ else:
+ fn = METADATA_FILENAME
try:
+ metadata_filename = posixpath.join(info_dir, fn)
with zf.open(metadata_filename) as bf:
wf = wrapper(bf)
result = Metadata(fileobj=wf)
except KeyError:
raise ValueError('Invalid wheel, because %s is '
- 'missing' % METADATA_FILENAME)
+ 'missing' % fn)
return result
- @cached_property
- def info(self):
- pathname = os.path.join(self.dirname, self.filename)
+ def get_wheel_metadata(self, zf):
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
metadata_filename = posixpath.join(info_dir, 'WHEEL')
- wrapper = codecs.getreader('utf-8')
+ with zf.open(metadata_filename) as bf:
+ wf = codecs.getreader('utf-8')(bf)
+ message = message_from_file(wf)
+ return dict(message)
+
+ @cached_property
+ def info(self):
+ pathname = os.path.join(self.dirname, self.filename)
with ZipFile(pathname, 'r') as zf:
- with zf.open(metadata_filename) as bf:
- wf = wrapper(bf)
- message = message_from_file(wf)
- result = dict(message)
+ result = self.get_wheel_metadata(zf)
return result
def process_shebang(self, data):
@@ -255,6 +283,28 @@ class Wheel(object):
p = to_posix(os.path.relpath(record_path, base))
writer.writerow((p, '', ''))
+ def write_records(self, info, libdir, archive_paths):
+ records = []
+ distinfo, info_dir = info
+ hasher = getattr(hashlib, self.hash_kind)
+ for ap, p in archive_paths:
+ with open(p, 'rb') as f:
+ data = f.read()
+ digest = '%s=%s' % self.get_hash(data)
+ size = os.path.getsize(p)
+ records.append((ap, digest, size))
+
+ p = os.path.join(distinfo, 'RECORD')
+ self.write_record(records, p, libdir)
+ ap = to_posix(os.path.join(info_dir, 'RECORD'))
+ archive_paths.append((ap, p))
+
+ def build_zip(self, pathname, archive_paths):
+ with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
+ for ap, p in archive_paths:
+ logger.debug('Wrote %s to %s in wheel', p, ap)
+ zf.write(p, ap)
+
def build(self, paths, tags=None, wheel_version=None):
"""
Build a wheel from files in specified paths, and use any specified tags
@@ -353,25 +403,10 @@ class Wheel(object):
# Now, at last, RECORD.
# Paths in here are archive paths - nothing else makes sense.
- records = []
- hasher = getattr(hashlib, self.hash_kind)
- for ap, p in archive_paths:
- with open(p, 'rb') as f:
- data = f.read()
- digest = '%s=%s' % self.get_hash(data)
- size = os.path.getsize(p)
- records.append((ap, digest, size))
-
- p = os.path.join(distinfo, 'RECORD')
- self.write_record(records, p, libdir)
- ap = to_posix(os.path.join(info_dir, 'RECORD'))
- archive_paths.append((ap, p))
+ self.write_records((distinfo, info_dir), libdir, archive_paths)
# Now, ready to build the zip file
pathname = os.path.join(self.dirname, self.filename)
- with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
- for ap, p in archive_paths:
- logger.debug('Wrote %s to %s in wheel', p, ap)
- zf.write(p, ap)
+ self.build_zip(pathname, archive_paths)
return pathname
def install(self, paths, maker, **kwargs):
@@ -601,11 +636,13 @@ class Wheel(object):
shutil.rmtree(workdir)
def _get_dylib_cache(self):
- # Use native string to avoid issues on 2.x: see Python #20140.
- result = os.path.join(get_cache_base(), str('dylib-cache'), sys.version[:3])
- if not os.path.isdir(result):
- os.makedirs(result)
- return result
+ global cache
+ if cache is None:
+ # Use native string to avoid issues on 2.x: see Python #20140.
+ base = os.path.join(get_cache_base(), str('dylib-cache'),
+ sys.version[:3])
+ cache = Cache(base)
+ return cache
def _get_extensions(self):
pathname = os.path.join(self.dirname, self.filename)
@@ -619,7 +656,11 @@ class Wheel(object):
with zf.open(arcname) as bf:
wf = wrapper(bf)
extensions = json.load(wf)
- cache_base = self._get_dylib_cache()
+ cache = self._get_dylib_cache()
+ prefix = cache.prefix_to_dir(pathname)
+ cache_base = os.path.join(cache.base, prefix)
+ if not os.path.isdir(cache_base):
+ os.makedirs(cache_base)
for name, relpath in extensions.items():
dest = os.path.join(cache_base, convert_path(relpath))
if not os.path.exists(dest):
@@ -637,10 +678,25 @@ class Wheel(object):
pass
return result
+ def is_compatible(self):
+ """
+ Determine if a wheel is compatible with the running system.
+ """
+ return is_compatible(self)
+
+ def is_mountable(self):
+ """
+ Determine if a wheel is asserted as mountable by its metadata.
+ """
+ return True # for now - metadata details TBD
+
def mount(self, append=False):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
- if not is_compatible(self):
- msg = 'Wheel %s not mountable in this Python.' % pathname
+ if not self.is_compatible():
+ msg = 'Wheel %s not compatible with this Python.' % pathname
+ raise DistlibException(msg)
+ if not self.is_mountable():
+ msg = 'Wheel %s is marked as not mountable.' % pathname
raise DistlibException(msg)
if pathname in sys.path:
logger.debug('%s already in path', pathname)
@@ -667,6 +723,160 @@ class Wheel(object):
if _hook in sys.meta_path:
sys.meta_path.remove(_hook)
+ def verify(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ data_dir = '%s.data' % name_ver
+ info_dir = '%s.dist-info' % name_ver
+
+ metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
+ wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+ record_name = posixpath.join(info_dir, 'RECORD')
+
+ wrapper = codecs.getreader('utf-8')
+
+ with ZipFile(pathname, 'r') as zf:
+ with zf.open(wheel_metadata_name) as bwf:
+ wf = wrapper(bwf)
+ message = message_from_file(wf)
+ wv = message['Wheel-Version'].split('.', 1)
+ file_version = tuple([int(i) for i in wv])
+ # TODO version verification
+
+ records = {}
+ with zf.open(record_name) as bf:
+ with CSVReader(stream=bf) as reader:
+ for row in reader:
+ p = row[0]
+ records[p] = row
+
+ for zinfo in zf.infolist():
+ arcname = zinfo.filename
+ if isinstance(arcname, text_type):
+ u_arcname = arcname
+ else:
+ u_arcname = arcname.decode('utf-8')
+ if '..' in u_arcname:
+ raise DistlibException('invalid entry in '
+ 'wheel: %r' % u_arcname)
+
+ # The signature file won't be in RECORD,
+ # and we don't currently don't do anything with it
+ if u_arcname.endswith('/RECORD.jws'):
+ continue
+ row = records[u_arcname]
+ if row[2] and str(zinfo.file_size) != row[2]:
+ raise DistlibException('size mismatch for '
+ '%s' % u_arcname)
+ if row[1]:
+ kind, value = row[1].split('=', 1)
+ with zf.open(arcname) as bf:
+ data = bf.read()
+ _, digest = self.get_hash(data, kind)
+ if digest != value:
+ raise DistlibException('digest mismatch for '
+ '%s' % arcname)
+
+ def update(self, modifier, dest_dir=None, **kwargs):
+ """
+ Update the contents of a wheel in a generic way. The modifier should
+ be a callable which expects a dictionary argument: its keys are
+ archive-entry paths, and its values are absolute filesystem paths
+ where the contents the corresponding archive entries can be found. The
+ modifier is free to change the contents of the files pointed to, add
+ new entries and remove entries, before returning. This method will
+ extract the entire contents of the wheel to a temporary location, call
+ the modifier, and then use the passed (and possibly updated)
+ dictionary to write a new wheel. If ``dest_dir`` is specified, the new
+ wheel is written there -- otherwise, the original wheel is overwritten.
+
+ The modifier should return True if it updated the wheel, else False.
+ This method returns the same value the modifier returns.
+ """
+
+ def get_version(path_map, info_dir):
+ version = path = None
+ key = '%s/%s' % (info_dir, METADATA_FILENAME)
+ if key not in path_map:
+ key = '%s/PKG-INFO' % info_dir
+ if key in path_map:
+ path = path_map[key]
+ version = Metadata(path=path).version
+ return version, path
+
+ def update_version(version, path):
+ updated = None
+ try:
+ v = NormalizedVersion(version)
+ i = version.find('-')
+ if i < 0:
+ updated = '%s-1' % version
+ else:
+ parts = [int(s) for s in version[i + 1:].split('.')]
+ parts[-1] += 1
+ updated = '%s-%s' % (version[:i],
+ '.'.join(str(i) for i in parts))
+ except UnsupportedVersionError:
+ logger.debug('Cannot update non-compliant (PEP-440) '
+ 'version %r', version)
+ if updated:
+ md = Metadata(path=path)
+ md.version = updated
+ legacy = not path.endswith(METADATA_FILENAME)
+ md.write(path=path, legacy=legacy)
+ logger.debug('Version updated from %r to %r', version,
+ updated)
+
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ record_name = posixpath.join(info_dir, 'RECORD')
+ with tempdir() as workdir:
+ with ZipFile(pathname, 'r') as zf:
+ path_map = {}
+ for zinfo in zf.infolist():
+ arcname = zinfo.filename
+ if isinstance(arcname, text_type):
+ u_arcname = arcname
+ else:
+ u_arcname = arcname.decode('utf-8')
+ if u_arcname == record_name:
+ continue
+ if '..' in u_arcname:
+ raise DistlibException('invalid entry in '
+ 'wheel: %r' % u_arcname)
+ zf.extract(zinfo, workdir)
+ path = os.path.join(workdir, convert_path(u_arcname))
+ path_map[u_arcname] = path
+
+ # Remember the version.
+ original_version, _ = get_version(path_map, info_dir)
+ # Files extracted. Call the modifier.
+ modified = modifier(path_map, **kwargs)
+ if modified:
+ # Something changed - need to build a new wheel.
+ current_version, path = get_version(path_map, info_dir)
+ if current_version and (current_version == original_version):
+ # Add or update local version to signify changes.
+ update_version(current_version, path)
+ # Decide where the new wheel goes.
+ if dest_dir is None:
+ fd, newpath = tempfile.mkstemp(suffix='.whl',
+ prefix='wheel-update-',
+ dir=workdir)
+ os.close(fd)
+ else:
+ if not os.path.isdir(dest_dir):
+ raise DistlibException('Not a directory: %r' % dest_dir)
+ newpath = os.path.join(dest_dir, self.filename)
+ archive_paths = list(path_map.items())
+ distinfo = os.path.join(workdir, info_dir)
+ info = distinfo, info_dir
+ self.write_records(info, workdir, archive_paths)
+ self.build_zip(newpath, archive_paths)
+ if dest_dir is None:
+ shutil.copyfile(newpath, pathname)
+ return modified
def compatible_tags():
"""
@@ -687,9 +897,34 @@ def compatible_tags():
abis.append('none')
result = []
+ arches = [ARCH]
+ if sys.platform == 'darwin':
+ m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
+ if m:
+ name, major, minor, arch = m.groups()
+ minor = int(minor)
+ matches = [arch]
+ if arch in ('i386', 'ppc'):
+ matches.append('fat')
+ if arch in ('i386', 'ppc', 'x86_64'):
+ matches.append('fat3')
+ if arch in ('ppc64', 'x86_64'):
+ matches.append('fat64')
+ if arch in ('i386', 'x86_64'):
+ matches.append('intel')
+ if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
+ matches.append('universal')
+ while minor >= 0:
+ for match in matches:
+ s = '%s_%s_%s_%s' % (name, major, minor, match)
+ if s != ARCH: # already there
+ arches.append(s)
+ minor -= 1
+
# Most specific - our Python version, ABI and arch
for abi in abis:
- result.append((''.join((IMP_PREFIX, versions[0])), abi, ARCH))
+ for arch in arches:
+ result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
# where no ABI / arch dependency, but IMP_PREFIX dependency
for i, version in enumerate(versions):
@@ -702,7 +937,7 @@ def compatible_tags():
result.append((''.join(('py', version)), 'none', 'any'))
if i == 0:
result.append((''.join(('py', version[0])), 'none', 'any'))
- return result
+ return set(result)
COMPATIBLE_TAGS = compatible_tags()
diff --git a/pip/_vendor/vendor.txt b/pip/_vendor/vendor.txt
index 1ce0bc05c..8385545e5 100644
--- a/pip/_vendor/vendor.txt
+++ b/pip/_vendor/vendor.txt
@@ -1,4 +1,4 @@
-distlib==0.1.7
+distlib==0.1.8
html5lib==1.0b1
six==1.5.2
colorama==0.2.7