diff options
author | Jason R. Coombs <jaraco@jaraco.com> | 2017-11-12 13:40:27 -0500 |
---|---|---|
committer | Jason R. Coombs <jaraco@jaraco.com> | 2017-11-12 13:40:27 -0500 |
commit | ff497abc7d922edf8a0221946dd41445be7f968c (patch) | |
tree | 8b831f0f305268c9429fe4bcd1afe13452a0934b | |
parent | 5eb85d0e1f9064a645c490cfdde9bb246af986b2 (diff) | |
download | wheel-ff497abc7d922edf8a0221946dd41445be7f968c.tar.gz |
Removing everything
63 files changed, 0 insertions, 6160 deletions
diff --git a/CHANGES.txt b/CHANGES.txt deleted file mode 100644 index dfbb12b..0000000 --- a/CHANGES.txt +++ /dev/null @@ -1,281 +0,0 @@ -0.30.0 -====== -- Add py-limited-api {cp32|cp33|cp34|...} flag to produce cpNN.abi3.{arch} - tags on CPython 3. -- Improve Python, abi tagging for `wheel convert`. Thanks Ales Erjavec. -- Much improved use of context managers for file handling. Thanks Kyle - Stewart. -- Convert absolute imports to relative. Thanks Ashish Bhate. -- Remove support for Python 2.6. - -0.29.0 -====== -- Fix compression type of files in archive (Issue #155, Pull Request #62, - thanks Xavier Fernandez) - -0.28.0 -====== -- Fix file modes in archive (Issue #154) - -0.27.0 -====== -- Support forcing a platform tag using `--plat-name` on pure-Python wheels, as - well as nonstandard platform tags on non-pure wheels (Pull Request #60, Issue - #144, thanks Andrés Díaz) -- Add SOABI tags to platform-specific wheels built for Python 2.X (Pull Request - #55, Issue #63, Issue #101) -- Support reproducible wheel files, wheels that can be rebuilt and will hash to - the same values as previous builds (Pull Request #52, Issue #143, thanks - Barry Warsaw) -- Support for changes in keyring >= 8.0 (Pull Request #61, thanks Jason R. - Coombs) -- Use the file context manager when checking if dependency_links.txt is empty, - fixes problems building wheels under PyPy on Windows (Issue #150, thanks - Cosimo Lupo) -- Don't attempt to (recursively) create a build directory ending with `..` - (invalid on all platforms, but code was only executed on Windows) (Issue #91) -- Added the PyPA Code of Conduct (Pull Request #56) - -0.26.0 -====== -- Fix multiple entrypoint comparison failure on Python 3 (Issue #148) - -0.25.0 -====== -- Add Python 3.5 to tox configuration -- Deterministic (sorted) metadata -- Fix tagging for Python 3.5 compatibility -- Support py2-none-'arch' and py3-none-'arch' tags -- Treat data-only wheels as pure -- Write to temporary file and rename when using wheel install --force - -0.24.0 -====== -- The python tag used for pure-python packages is now .pyN (major version - only). This change actually occurred in 0.23.0 when the --python-tag - option was added, but was not explicitly mentioned in the changelog then. -- wininst2wheel and egg2wheel removed. Use "wheel convert [archive]" - instead. -- Wheel now supports setuptools style conditional requirements via the - extras_require={} syntax. Separate 'extra' names from conditions using - the : character. Wheel's own setup.py does this. (The empty-string - extra is the same as install_requires.) These conditional requirements - should work the same whether the package is installed by wheel or - by setup.py. - -0.23.0 -====== -- Compatibility tag flags added to the bdist_wheel command -- sdist should include files necessary for tests -- 'wheel convert' can now also convert unpacked eggs to wheel -- Rename pydist.json to metadata.json to avoid stepping on the PEP -- The --skip-scripts option has been removed, and not generating scripts is now - the default. The option was a temporary approach until installers could - generate scripts themselves. That is now the case with pip 1.5 and later. - Note that using pip 1.4 to install a wheel without scripts will leave the - installation without entry-point wrappers. The "wheel install-scripts" - command can be used to generate the scripts in such cases. -- Thank you contributors - -0.22.0 -====== -- Include entry_points.txt, scripts a.k.a. commands, in experimental - pydist.json -- Improved test_requires parsing -- Python 2.6 fixes, "wheel version" command courtesy pombredanne - -0.21.0 -====== -- Pregenerated scripts are the default again. -- "setup.py bdist_wheel --skip-scripts" turns them off. -- setuptools is no longer a listed requirement for the 'wheel' - package. It is of course still required in order for bdist_wheel - to work. -- "python -m wheel" avoids importing pkg_resources until it's necessary. - -0.20.0 -====== -- No longer include console_scripts in wheels. Ordinary scripts (shell files, - standalone Python files) are included as usual. -- Include new command "python -m wheel install-scripts [distribution - [distribution ...]]" to install the console_scripts (setuptools-style - scripts using pkg_resources) for a distribution. - -0.19.0 -====== -- pymeta.json becomes pydist.json - -0.18.0 -====== -- Python 3 Unicode improvements - -0.17.0 -====== -- Support latest PEP-426 "pymeta.json" (json-format metadata) - -0.16.0 -====== -- Python 2.6 compatibility bugfix (thanks John McFarlane) -- Non-prerelease version number - -1.0.0a2 -======= -- Bugfix for C-extension tags for CPython 3.3 (using SOABI) - -1.0.0a1 -======= -- Bugfix for bdist_wininst converter "wheel convert" -- Bugfix for dists where "is pure" is None instead of True or False - -1.0.0a0 -======= -- Update for version 1.0 of Wheel (PEP accepted). -- Python 3 fix for moving Unicode Description to metadata body -- Include rudimentary API documentation in Sphinx (thanks Kevin Horn) - -0.15.0 -====== -- Various improvements - -0.14.0 -====== -- Changed the signature format to better comply with the current JWS spec. - Breaks all existing signatures. -- Include ``wheel unsign`` command to remove RECORD.jws from an archive. -- Put the description in the newly allowed payload section of PKG-INFO - (METADATA) files. - -0.13.0 -====== -- Use distutils instead of sysconfig to get installation paths; can install - headers. -- Improve WheelFile() sort. -- Allow bootstrap installs without any pkg_resources. - -0.12.0 -====== -- Unit test for wheel.tool.install - -0.11.0 -====== -- API cleanup - -0.10.3 -====== -- Scripts fixer fix - -0.10.2 -====== -- Fix keygen - -0.10.1 -====== -- Preserve attributes on install. - -0.10.0 -====== -- Include a copy of pkg_resources. Wheel can now install into a virtualenv - that does not have distribute (though most packages still require - pkg_resources to actually work; wheel install distribute) -- Define a new setup.cfg section [wheel]. universal=1 will - apply the py2.py3-none-any tag for pure python wheels. - -0.9.7 -===== -- Only import dirspec when needed. dirspec is only needed to find the - configuration for keygen/signing operations. - -0.9.6 -===== -- requires-dist from setup.cfg overwrites any requirements from setup.py - Care must be taken that the requirements are the same in both cases, - or just always install from wheel. -- drop dirspec requirement on win32 -- improved command line utility, adds 'wheel convert [egg or wininst]' to - convert legacy binary formats to wheel - -0.9.5 -===== -- Wheel's own wheel file can be executed by Python, and can install itself: - ``python wheel-0.9.5-py27-none-any/wheel install ...`` -- Use argparse; basic ``wheel install`` command should run with only stdlib - dependencies. -- Allow requires_dist in setup.cfg's [metadata] section. In addition to - dependencies in setup.py, but will only be interpreted when installing - from wheel, not from sdist. Can be qualified with environment markers. - -0.9.4 -===== -- Fix wheel.signatures in sdist - -0.9.3 -===== -- Integrated digital signatures support without C extensions. -- Integrated "wheel install" command (single package, no dependency - resolution) including compatibility check. -- Support Python 3.3 -- Use Metadata 1.3 (PEP 426) - -0.9.2 -===== -- Automatic signing if WHEEL_TOOL points to the wheel binary -- Even more Python 3 fixes - -0.9.1 -===== -- 'wheel sign' uses the keys generated by 'wheel keygen' (instead of generating - a new key at random each time) -- Python 2/3 encoding/decoding fixes -- Run tests on Python 2.6 (without signature verification) - -0.9 -=== -- Updated digital signatures scheme -- Python 3 support for digital signatures -- Always verify RECORD hashes on extract -- "wheel" command line tool to sign, verify, unpack wheel files - -0.8 -=== -- none/any draft pep tags update -- improved wininst2wheel script -- doc changes and other improvements - -0.7 -=== -- sort .dist-info at end of wheel archive -- Windows & Python 3 fixes from Paul Moore -- pep8 -- scripts to convert wininst & egg to wheel - -0.6 -=== -- require distribute >= 0.6.28 -- stop using verlib - -0.5 -=== -- working pretty well - -0.4.2 -===== -- hyphenated name fix - -0.4 -=== -- improve test coverage -- improve Windows compatibility -- include tox.ini courtesy of Marc Abramowitz -- draft hmac sha-256 signing function - -0.3 -=== -- prototype egg2wheel conversion script - -0.2 -=== -- Python 3 compatibility - -0.1 -=== -- Initial version diff --git a/LICENSE.txt b/LICENSE.txt deleted file mode 100644 index c3441e6..0000000 --- a/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -"wheel" copyright (c) 2012-2014 Daniel Holth <dholth@fastmail.fm> and -contributors. - -The MIT License - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR -OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 990e55d..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,7 +0,0 @@ -include wheel/*.txt *.txt *.sh -recursive-include wheel/test *.py -include wheel/test/test-1.0-py2.py3-none-win32.whl -include wheel/test/headers.dist/header.h -include wheel/test/pydist-schema.json -prune wheel/test/*/dist -prune wheel/test/*/build diff --git a/METADATA.in b/METADATA.in deleted file mode 100644 index 18ebf32..0000000 --- a/METADATA.in +++ /dev/null @@ -1,34 +0,0 @@ -Metadata-Version: 2.0 -Name: wheel -Version: ${VERSION} -Summary: A built-package format for Python. -Home-page: http://bitbucket.org/pypa/wheel/ -Author: Daniel Holth -Author-email: dholth@fastmail.fm -License: MIT -Keywords: wheel,packaging -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Provides-Extra: tool -Provides-Extra: signatures -Requires-Dist: keyring; extra == 'signatures' -Provides-Extra: faster-signatures -Requires-Dist: ed25519ll; extra == 'faster-signatures' -Requires-Dist: argparse; python_version=="2.6" -Provides-Extra: signatures -Requires-Dist: pyxdg; sys_platform!="win32" and extra == 'signatures' - -${DESCRIPTION} diff --git a/README.txt b/README.txt deleted file mode 100644 index 7b37ad9..0000000 --- a/README.txt +++ /dev/null @@ -1,50 +0,0 @@ -Wheel -===== - -A built-package format for Python. - -A wheel is a ZIP-format archive with a specially formatted filename -and the .whl extension. It is designed to contain all the files for a -PEP 376 compatible install in a way that is very close to the on-disk -format. Many packages will be properly installed with only the "Unpack" -step (simply extracting the file onto sys.path), and the unpacked archive -preserves enough information to "Spread" (copy data and scripts to their -final locations) at any later time. - -The wheel project provides a `bdist_wheel` command for setuptools -(requires setuptools >= 0.8.0). Wheel files can be installed with a -newer `pip` from https://github.com/pypa/pip or with wheel's own command -line utility. - -The wheel documentation is at http://wheel.rtfd.org/. The file format -is documented in PEP 427 (http://www.python.org/dev/peps/pep-0427/). - -The reference implementation is at https://bitbucket.org/pypa/wheel - -Why not egg? ------------- - -Python's egg format predates the packaging related standards we have -today, the most important being PEP 376 "Database of Installed Python -Distributions" which specifies the .dist-info directory (instead of -.egg-info) and PEP 426 "Metadata for Python Software Packages 2.0" -which specifies how to express dependencies (instead of requires.txt -in .egg-info). - -Wheel implements these things. It also provides a richer file naming -convention that communicates the Python implementation and ABI as well -as simply the language version used in a particular package. - -Unlike .egg, wheel will be a fully-documented standard at the binary -level that is truly easy to install even if you do not want to use the -reference implementation. - - -Code of Conduct ---------------- - -Everyone interacting in the wheel project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. - -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ - diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 37ffa8e..0000000 --- a/appveyor.yml +++ /dev/null @@ -1,5 +0,0 @@ -version: "{build}" -install: -- cmd: python -m pip install tox -build_script: -- cmd: python -m tox diff --git a/bento.info b/bento.info deleted file mode 100644 index cd2e496..0000000 --- a/bento.info +++ /dev/null @@ -1,11 +0,0 @@ -# Experimental bento.info for wheel -Name: wheel -Version: 0.12.0 -Library: - Packages: - wheel, wheel.tool, wheel.test - -ExtraSourceFiles: - wheel/test/simple.dist/*.py, - wheel/test/simple.dist/simpledist/*.py - diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 54b44f9..0000000 --- a/docs/Makefile +++ /dev/null @@ -1,153 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make <target>' where <target> is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/wheel.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/wheel.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/wheel" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/wheel" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." diff --git a/docs/api.rst b/docs/api.rst deleted file mode 100644 index 152b2f5..0000000 --- a/docs/api.rst +++ /dev/null @@ -1,30 +0,0 @@ -API Documentation -================= - -wheel.archive -------------- - -.. automodule:: wheel.archive - :members: - - -wheel.install -------------- - -.. automodule:: wheel.install - :members: - - -wheel.pkginfo -------------- - -.. automodule:: wheel.pkginfo - :members: - - -wheel.util ----------- - -.. automodule:: wheel.util - :members: - diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 62b1001..0000000 --- a/docs/conf.py +++ /dev/null @@ -1,242 +0,0 @@ -# -*- coding: utf-8 -*- -# -# wheel documentation build configuration file, created by -# sphinx-quickstart on Thu Jul 12 00:14:09 2012. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys, os, pkg_resources - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'wheel' -copyright = u'2012, Daniel Holth' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = pkg_resources.working_set.by_key['wheel'].version -# The full version, including alpha/beta/rc tags. -release = version - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# "<project> v<release> documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a <link> tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'wheeldoc' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'wheel.tex', u'wheel Documentation', - u'Daniel Holth', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'wheel', u'wheel Documentation', - [u'Daniel Holth'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'wheel', u'wheel Documentation', - u'Daniel Holth', 'wheel', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index f1dbce6..0000000 --- a/docs/index.rst +++ /dev/null @@ -1,225 +0,0 @@ -.. wheel documentation master file, created by - sphinx-quickstart on Thu Jul 12 00:14:09 2012. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -.. include:: ../README.txt - -Usage ------ - -The current version of wheel can be used to speed up repeated -installations by reducing the number of times you have to compile your -software. When you are creating a virtualenv for each revision of your -software the savings can be dramatic. This example packages pyramid -and all its dependencies as wheels, and then installs pyramid from the -built packages:: - - # Make sure you have the latest pip that supports wheel - pip install --upgrade pip - - # Install wheel - pip install wheel - - # Build a directory of wheels for pyramid and all its dependencies - pip wheel --wheel-dir=/tmp/wheelhouse pyramid - - # Install from cached wheels - pip install --use-wheel --no-index --find-links=/tmp/wheelhouse pyramid - - # Install from cached wheels remotely - pip install --use-wheel --no-index --find-links=https://wheelhouse.example.com/ pyramid - - -For lxml, an up to 3-minute "search for the newest version and compile" -can become a less-than-1 second "unpack from wheel". - -As a side effect the wheel directory, "/tmp/wheelhouse" in the example, -contains installable copies of the exact versions of your application's -dependencies. By installing from those cached wheels -you can recreate that environment quickly and with no surprises. - -To build an individual wheel, run ``python setup.py bdist_wheel``. Note that -``bdist_wheel`` only works with distribute (``import setuptools``); plain -``distutils`` does not support pluggable commands like ``bdist_wheel``. On -the other hand ``pip`` always runs ``setup.py`` with setuptools enabled. - -Wheel also includes its own installer that can only install wheels (not -sdists) from a local file or folder, but has the advantage of working -even when distribute or pip has not been installed. - -Wheel's builtin utility can be invoked directly from wheel's own wheel:: - - $ python wheel-0.21.0-py2.py3-none-any.whl/wheel -h - usage: wheel [-h] - - {keygen,sign,unsign,verify,unpack,install,install-scripts,convert,help} - ... - - positional arguments: - {keygen,sign,unsign,verify,unpack,install,install-scripts,convert,help} - commands - keygen Generate signing key - sign Sign wheel - unsign Remove RECORD.jws from a wheel by truncating the zip - file. RECORD.jws must be at the end of the archive. - The zip file must be an ordinary archive, with the - compressed files and the directory in the same order, - and without any non-zip content after the truncation - point. - verify Verify a wheel. The signature will be verified for - internal consistency ONLY and printed. Wheel's own - unpack/install commands verify the manifest against - the signature and file contents. - unpack Unpack wheel - install Install wheels - install-scripts Install console_scripts - convert Convert egg or wininst to wheel - help Show this help - - optional arguments: - -h, --help show this help message and exit - -Setuptools scripts handling ---------------------------- - -Setuptools' popular `console_scripts` and `gui_scripts` entry points can -be used to generate platform-specific scripts wrappers. Most usefully -these wrappers include `.exe` launchers if they are generated on a -Windows machine. - -As of 0.23.0, `bdist_wheel` no longer places pre-generated versions of these -wrappers into the `*.data/scripts/` directory of the archive (non-setuptools -scripts are still present, of course). - -If the scripts are needed, use a real installer like `pip`. The wheel tool -`python -m wheel install-scripts package [package ...]` can also be used at -any time to call setuptools to write the appropriate scripts wrappers. - -Defining the Python version ---------------------------- - -The `bdist_wheel` command automatically determines the correct tags to use for -the generated wheel. These are based on the Python interpreter used to -generate the wheel and whether the project contains C extension code or not. -While this is usually correct for C code, it can be too conservative for pure -Python code. The bdist_wheel command therefore supports two flags that can be -used to specify the Python version tag to use more precisely:: - - --universal Specifies that a pure-python wheel is "universal" - (i.e., it works on any version of Python). This - equates to the tag "py2.py3". - --python-tag XXX Specifies the precise python version tag to use for - a pure-python wheel. - --py-limited-api {cp32|cp33|cp34|...} - Specifies Python Py_LIMITED_API compatibility with - the version of CPython passed and later versions. - The wheel will be tagged cpNN.abi3.{arch} on CPython 3. - This flag does not affect Python 2 builds or alternate - Python implementations. - - To conform to the limited API, all your C - extensions must use only functions from the limited - API, pass Extension(py_limited_api=True) and e.g. - #define Py_LIMITED_API=0x03020000 depending on - the exact minimun Python you wish to support. - -The --universal and --python-tag flags have no effect when used on a -project that includes C extension code. - -The default for a pure Python project (if no explicit flags are given) is "pyN" -where N is the major version of the Python interpreter used to build the wheel. -This is generally the correct choice, as projects would not typically ship -different wheels for different minor versions of Python. - -A reasonable use of the `--python-tag` argument would be for a project that -uses Python syntax only introduced in a particular Python version. There are -no current examples of this, but if wheels had been available when Python 2.5 -was released (the first version containing the `with` statement), wheels for a -project that used the `with` statement would typically use `--python-tag py25`. -However, unless a separate version of the wheel was shipped which avoided the -use of the new syntax, there is little benefit in explicitly marking the tag in -this manner. - -Typically, projects would not specify Python tags on the command line, but -would use `setup.cfg` to set them as a project default:: - - [bdist_wheel] - universal=1 - -or:: - - [bdist_wheel] - python-tag = py32 - -Defining conditional dependencies ---------------------------------- - -In wheel, the only way to have conditional dependencies (that might only be -needed on certain platforms) is to use environment markers as defined by -PEP 426. - -As of wheel 0.24.0, the recommended way to do this is in the setuptools -`extras_require` parameter. A `:` separates the extra name from the marker. -Wheel's own setup.py has an example:: - extras_require={ - ':python_version=="2.6"': ['argparse'], - 'signatures': ['keyring'], - 'signatures:sys_platform!="win32"': ['pyxdg'], - 'faster-signatures': ['ed25519ll'], - 'tool': [] - }, - -The extra named '' signifies a default requirement, as if it was passed to -`install_requires`. - -Older versions of bdist_wheel supported passing requirements in a -now-deprecated [metadata] section in setup.cfg. - -Automatically sign wheel files ------------------------------- - -Wheel contains an experimental digital signatures scheme based on Ed25519 -signatures; these signatures are unrelated to pgp/gpg signatures and do not -include a trust model. - -`python setup.py bdist_wheel` will automatically sign wheel files if -the environment variable `WHEEL_TOOL` is set to the path of the `wheel` -command line tool.:: - - # Install the wheel tool and its dependencies - $ pip install wheel[tool] - # Generate a signing key (only once) - $ wheel keygen - - $ export WHEEL_TOOL=/path/to/wheel - $ python setup.py bdist_wheel - -Signing is done in a subprocess because it is not convenient for the -build environment to contain bindings to the keyring and cryptography -libraries. The keyring library may not be able to find your keys (choosing -a different key storage back end based on available dependencies) unless -you run it from the same environment used for keygen. - -Format ------- - -The wheel format is documented as PEP 427 "The Wheel Binary Package -Format..." (http://www.python.org/dev/peps/pep-0427/). - -Slogans -------- - -Wheel - -* Because ‘newegg’ was taken. -* Python packaging - reinvented. -* A container for cheese. -* It makes it easier to roll out software. - -.. toctree:: - :maxdepth: 2 - - story - api - diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 8083236..0000000 --- a/docs/make.bat +++ /dev/null @@ -1,190 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^<target^>` where ^<target^> is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\wheel.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\wheel.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -:end diff --git a/docs/story.rst b/docs/story.rst deleted file mode 100644 index 2c150c2..0000000 --- a/docs/story.rst +++ /dev/null @@ -1,62 +0,0 @@ -The Story of Wheel -================== - -I was impressed with Tarek’s packaging talk at PyCon 2010, and I -admire PEP 345 (Metadata for Python Software Packages 1.2) and PEP 376 -(Database of Installed Python Distributions) which standardize a richer -metadata format and show how distributions should be installed on disk. So -naturally with all the hubbub about `packaging` in Python 3.3, I decided -to try it to reap the benefits of a more standardized and predictable -Python packaging experience. - -I began by converting `cryptacular`, a password hashing package which -has a simple C extension, to use setup.cfg. I downloaded the Python 3.3 -source, struggled with the difference between setup.py and setup.cfg -syntax, fixed the `define_macros` feature, stopped using the missing -`extras` functionality, and several hours later I was able to generate my -`METADATA` from `setup.cfg`. I rejoiced at my newfound freedom from the -tyranny of arbitrary code execution during the build and install process. - -It was a lot of work. The package is worse off than before, and it can’t -be built or installed without patching the Python source code itself. - -It was about that time that distutils-sig had a discussion about the -need to include a generated setup.cfg from setup.cfg because setup.cfg -wasn’t static enough. Wait, what? - -Of course there is a different way to massively simplify the install -process. It’s called built or binary packages. You never have to run -`setup.py` because there is no `setup.py`. There is only METADATA aka -PKG-INFO. Installation has two steps: ‘build package’; ‘install -package’, and you can skip the first step, have someone else do it -for you, do it on another machine, or install the build system from a -binary package and let the build system handle the building. The build -is still complicated, but installation is simple. - -With the binary package strategy people who want to install use a simple, -compatible installer, and people who want to package use whatever is -convenient for them for as long as it meets their needs. No one has -to rewrite `setup.py` for their own or the 20k+ other packages on PyPi -unless a different build system does a better job. - -Wheel is my attempt to benefit from the excellent distutils-sig work -without having to fix the intractable `distutils` software itself. Like -METADATA and .dist-info directories but unlike Extension(), it’s -simple enough that there really could be alternate implementations; the -simplest (but less than ideal) installer is nothing more than “unzip -archive.whl” somewhere on sys.path. - -If you’ve made it this far you probably wonder whether I’ve heard -of eggs. Some comparisons: - -* Wheel is an installation format; egg is importable. Wheel archives do not need to include .pyc and are less tied to a specific Python version or implementation. Wheel can install (pure Python) packages built with previous versions of Python so you don’t always have to wait for the packager to catch up. - -* Wheel uses .dist-info directories; egg uses .egg-info. Wheel is compatible with the new world of Python `packaging` and the new concepts it brings. - -* Wheel has a richer file naming convention for today’s multi-implementation world. A single wheel archive can indicate its compatibility with a number of Python language versions and implementations, ABIs, and system architectures. Historically the ABI has been specific to a CPython release, but when we get a longer-term ABI, wheel will be ready. - -* Wheel is lossless. The first wheel implementation `bdist_wheel` always generates `egg-info`, and then converts it to a `.whl`. Later tools will allow for the conversion of existing eggs and bdist_wininst distributions. - -* Wheel is versioned. Every wheel file contains the version of the wheel specification and the implementation that packaged it. Hopefully the next migration can simply be to Wheel 2.0. - -I hope you will benefit from wheel. diff --git a/entry_points.txt b/entry_points.txt deleted file mode 100644 index f57b8c0..0000000 --- a/entry_points.txt +++ /dev/null @@ -1,5 +0,0 @@ -[console_scripts] -wheel = wheel.tool:main - -[distutils.commands] -bdist_wheel = wheel.bdist_wheel:bdist_wheel
\ No newline at end of file diff --git a/pylintrc b/pylintrc deleted file mode 100644 index ebd9658..0000000 --- a/pylintrc +++ /dev/null @@ -1,249 +0,0 @@ -[MASTER] - -# Specify a configuration file. -#rcfile= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Profiled execution. -profile=no - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Pickle collected data for later comparisons. -persistent=yes - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - - -[MESSAGES CONTROL] - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. -#enable= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). -disable=F0401,E0611,E1101,C0103 - - -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html -output-format=colorized - -# Include message's id in output -include-ids=yes - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells whether to display a full report or only the messages -reports=yes - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Add a comment according to your evaluation note. This is used by the global -# evaluation report (RP0004). -comment=yes - - -[BASIC] - -# Required attributes for module, separated by a comma -required-attributes= - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,apply,input - -# Regular expression which should only match correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression which should only match correct module level names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression which should only match correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression which should only match correct function names -function-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct method names -method-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct instance attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct variable names -variable-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct list comprehension / -# generator expression variable names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Regular expression which should only match functions or classes name which do -# not require a docstring -no-docstring-rgx=__.*__ - - -[TYPECHECK] - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# List of classes names for which member attributes should not be checked -# (useful for classes with attributes dynamically set). -ignored-classes=SQLObject - -# When zope mode is activated, add a predefined set of Zope acquired attributes -# to generated-members. -zope=no - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E0201 when accessed. Python regular -# expressions are accepted. -generated-members=REQUEST,acl_users,aq_parent - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=80 - -# Maximum number of lines in a module -max-module-lines=1000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - - -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=2 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[VARIABLES] - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# A regular expression matching the beginning of the name of dummy variables -# (i.e. not used). -dummy-variables-rgx=_|dummy - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - - -[CLASSES] - -# List of interface methods to ignore, separated by a comma. This is used for -# instance to not check methods defines in Zope's Interface base class. -ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.* - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -max-branchs=12 - -# Maximum number of statements in function / method body -max-statements=50 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - - -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,string,TERMIOS,Bastion,rexec - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 99207b9..0000000 --- a/setup.cfg +++ /dev/null @@ -1,9 +0,0 @@ -[pytest] -addopts=--ignore=dist --ignore=build --cov=wheel - -[metadata] -license-file = LICENSE.txt - -[bdist_wheel] -# use py2.py3 tag for pure-python dist: -universal=1 diff --git a/setup.py b/setup.py deleted file mode 100644 index e187cba..0000000 --- a/setup.py +++ /dev/null @@ -1,66 +0,0 @@ -import os.path, codecs, re - -from setuptools import setup - -here = os.path.abspath(os.path.dirname(__file__)) -README = codecs.open(os.path.join(here, 'README.txt'), encoding='utf8').read() -CHANGES = codecs.open(os.path.join(here, 'CHANGES.txt'), encoding='utf8').read() - -with codecs.open(os.path.join(os.path.dirname(__file__), 'wheel', '__init__.py'), - encoding='utf8') as version_file: - metadata = dict(re.findall(r"""__([a-z]+)__ = "([^"]+)""", version_file.read())) - -setup(name='wheel', - version=metadata['version'], - description='A built-package format for Python.', - long_description=README + '\n\n' + CHANGES, - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.2", - "Programming Language :: Python :: 3.3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: CPython", - "Programming Language :: Python :: Implementation :: PyPy", - ], - author='Daniel Holth', - author_email='dholth@fastmail.fm', - url='https://bitbucket.org/pypa/wheel/', - keywords=['wheel', 'packaging'], - license='MIT', - packages=[ - 'wheel', - 'wheel.test', - 'wheel.tool', - 'wheel.signatures' - ], - extras_require={ - 'signatures': ['keyring', 'keyrings.alt'], - 'signatures:sys_platform!="win32"': ['pyxdg'], - 'faster-signatures': ['ed25519ll'], - 'tool': [] - }, - tests_require=[ - 'jsonschema', - 'pytest', - 'coverage', - 'pytest-cov', - ], - include_package_data=True, - zip_safe=False, - entry_points = { - 'console_scripts': [ - 'wheel=wheel.tool:main' - ], - 'distutils.commands': [ - 'bdist_wheel=wheel.bdist_wheel:bdist_wheel' - ] - } - ) diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 309d58d..0000000 --- a/tox.ini +++ /dev/null @@ -1,17 +0,0 @@ -# Tox (http://tox.testrun.org/) is a tool for running tests -# in multiple virtualenvs. This configuration file will run the -# test suite on all supported python versions. To use it, "pip install tox" -# and then run "tox" from this directory. - -[tox] -envlist = py27, pypy, py33, py34, py35, py36 - -[testenv] -commands = - py.test -deps = - .[tool,signatures] - jsonschema - pytest - pytest-cov - setuptools>3.0 diff --git a/vendorize.sh b/vendorize.sh deleted file mode 100755 index 3402172..0000000 --- a/vendorize.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh -# Vendorize pep 425 tagging scheme -cp ../pep425/pep425tags.py wheel diff --git a/wheel/__init__.py b/wheel/__init__.py deleted file mode 100644 index 7b3d278..0000000 --- a/wheel/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# __variables__ with double-quoted values will be available in setup.py: -__version__ = "0.30.0.a0" diff --git a/wheel/__main__.py b/wheel/__main__.py deleted file mode 100644 index 889359c..0000000 --- a/wheel/__main__.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Wheel command line tool (enable python -m wheel syntax) -""" - -import sys - -def main(): # needed for console script - if __package__ == '': - # To be able to run 'python wheel-0.9.whl/wheel': - import os.path - path = os.path.dirname(os.path.dirname(__file__)) - sys.path[0:0] = [path] - import wheel.tool - sys.exit(wheel.tool.main()) - -if __name__ == "__main__": - sys.exit(main()) diff --git a/wheel/archive.py b/wheel/archive.py deleted file mode 100644 index 403d45b..0000000 --- a/wheel/archive.py +++ /dev/null @@ -1,79 +0,0 @@ -""" -Archive tools for wheel. -""" - -import os -import time -import logging -import os.path -import zipfile - -from distutils import log - - -def archive_wheelfile(base_name, base_dir): - '''Archive all files under `base_dir` in a whl file and name it like - `base_name`. - ''' - olddir = os.path.abspath(os.curdir) - base_name = os.path.abspath(base_name) - try: - os.chdir(base_dir) - return make_wheelfile_inner(base_name) - finally: - os.chdir(olddir) - - -def make_wheelfile_inner(base_name, base_dir='.'): - """Create a whl file from all the files under 'base_dir'. - - Places .dist-info at the end of the archive.""" - - zip_filename = base_name + ".whl" - - log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) - - # Some applications need reproducible .whl files, but they can't do this - # without forcing the timestamp of the individual ZipInfo objects. See - # issue #143. - timestamp = os.environ.get('SOURCE_DATE_EPOCH') - if timestamp is None: - date_time = None - else: - date_time = time.gmtime(int(timestamp))[0:6] - - # XXX support bz2, xz when available - zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED) - - score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3} - deferred = [] - - def writefile(path, date_time): - st = os.stat(path) - if date_time is None: - mtime = time.gmtime(st.st_mtime) - date_time = mtime[0:6] - zinfo = zipfile.ZipInfo(path, date_time) - zinfo.external_attr = st.st_mode << 16 - zinfo.compress_type = zipfile.ZIP_DEFLATED - with open(path, 'rb') as fp: - zip.writestr(zinfo, fp.read()) - log.info("adding '%s'" % path) - - for dirpath, dirnames, filenames in os.walk(base_dir): - for name in filenames: - path = os.path.normpath(os.path.join(dirpath, name)) - - if os.path.isfile(path): - if dirpath.endswith('.dist-info'): - deferred.append((score.get(name, 0), path)) - else: - writefile(path, date_time) - - deferred.sort() - for score, path in deferred: - writefile(path, date_time) - - zip.close() - - return zip_filename diff --git a/wheel/bdist_wheel.py b/wheel/bdist_wheel.py deleted file mode 100644 index dfcc910..0000000 --- a/wheel/bdist_wheel.py +++ /dev/null @@ -1,467 +0,0 @@ -""" -Create a wheel (.whl) distribution. - -A wheel is a built archive format. -""" - -import csv -import hashlib -import os -import subprocess -import warnings -import shutil -import json -import sys -import re - -import pkg_resources - -safe_name = pkg_resources.safe_name -safe_version = pkg_resources.safe_version - -from shutil import rmtree -from email.generator import Generator - -from distutils.core import Command -from distutils.sysconfig import get_python_version - -from distutils import log as logger - -from .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platform -from .util import native, open_for_csv -from .archive import archive_wheelfile -from .pkginfo import read_pkg_info, write_pkg_info -from .metadata import pkginfo_to_dict -from . import pep425tags, metadata -from . import __version__ as wheel_version - -PY_LIMITED_API_PATTERN = r'cp3\d' - -def safer_name(name): - return safe_name(name).replace('-', '_') - -def safer_version(version): - return safe_version(version).replace('-', '_') - -class bdist_wheel(Command): - - description = 'create a wheel distribution' - - user_options = [('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('relative', None, - "build the archive using relative paths" - "(default: false)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ('universal', None, - "make a universal wheel" - " (default: false)"), - ('python-tag=', None, - "Python implementation compatibility tag" - " (default: py%s)" % get_impl_ver()[0]), - ('py-limited-api=', None, - "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" - " (default: false)"), - ] - - boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal'] - - def initialize_options(self): - self.bdist_dir = None - self.data_dir = None - self.plat_name = None - self.plat_tag = None - self.format = 'zip' - self.keep_temp = False - self.dist_dir = None - self.distinfo_dir = None - self.egginfo_dir = None - self.root_is_pure = None - self.skip_build = None - self.relative = False - self.owner = None - self.group = None - self.universal = False - self.python_tag = 'py' + get_impl_ver()[0] - self.py_limited_api = False - self.plat_name_supplied = False - - def finalize_options(self): - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'wheel') - - self.data_dir = self.wheel_dist_name + '.data' - self.plat_name_supplied = self.plat_name is not None - - need_options = ('dist_dir', 'plat_name', 'skip_build') - - self.set_undefined_options('bdist', - *zip(need_options, need_options)) - - self.root_is_pure = not (self.distribution.has_ext_modules() - or self.distribution.has_c_libraries()) - - if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api): - raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN) - - # Support legacy [wheel] section for setting universal - wheel = self.distribution.get_option_dict('wheel') - if 'universal' in wheel: - # please don't define this in your global configs - val = wheel['universal'][1].strip() - if val.lower() in ('1', 'true', 'yes'): - self.universal = True - - @property - def wheel_dist_name(self): - """Return distribution full name with - replaced with _""" - return '-'.join((safer_name(self.distribution.get_name()), - safer_version(self.distribution.get_version()))) - - def get_tag(self): - # bdist sets self.plat_name if unset, we should only use it for purepy - # wheels if the user supplied it. - if self.plat_name_supplied: - plat_name = self.plat_name - elif self.root_is_pure: - plat_name = 'any' - else: - plat_name = self.plat_name or get_platform() - if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647: - plat_name = 'linux_i686' - plat_name = plat_name.replace('-', '_').replace('.', '_') - - - if self.root_is_pure: - if self.universal: - impl = 'py2.py3' - else: - impl = self.python_tag - tag = (impl, 'none', plat_name) - else: - impl_name = get_abbr_impl() - impl_ver = get_impl_ver() - impl = impl_name + impl_ver - # We don't work on CPython 3.1, 3.0. - if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'): - impl = self.py_limited_api - abi_tag = 'abi3' - else: - abi_tag = str(get_abi_tag()).lower() - tag = (impl, abi_tag, plat_name) - supported_tags = pep425tags.get_supported( - supplied_platform=plat_name if self.plat_name_supplied else None) - # XXX switch to this alternate implementation for non-pure: - if not self.py_limited_api: - assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0]) - assert tag in supported_tags, "would build wheel with unsupported tag %s" % tag - return tag - - def get_archive_basename(self): - """Return archive name without extension""" - - impl_tag, abi_tag, plat_tag = self.get_tag() - - archive_basename = "%s-%s-%s-%s" % ( - self.wheel_dist_name, - impl_tag, - abi_tag, - plat_tag) - return archive_basename - - def run(self): - build_scripts = self.reinitialize_command('build_scripts') - build_scripts.executable = 'python' - - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', - reinit_subcommands=True) - install.root = self.bdist_dir - install.compile = False - install.skip_build = self.skip_build - install.warn_dir = False - - # A wheel without setuptools scripts is more cross-platform. - # Use the (undocumented) `no_ep` option to setuptools' - # install_scripts command to avoid creating entry point scripts. - install_scripts = self.reinitialize_command('install_scripts') - install_scripts.no_ep = True - - # Use a custom scheme for the archive, because we have to decide - # at installation time which scheme to use. - for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'): - setattr(install, - 'install_' + key, - os.path.join(self.data_dir, key)) - - basedir_observed = '' - - if os.name == 'nt': - # win32 barfs if any of these are ''; could be '.'? - # (distutils.command.install:change_roots bug) - basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..')) - self.install_libbase = self.install_lib = basedir_observed - - setattr(install, - 'install_purelib' if self.root_is_pure else 'install_platlib', - basedir_observed) - - logger.info("installing to %s", self.bdist_dir) - - self.run_command('install') - - archive_basename = self.get_archive_basename() - - pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) - if not self.relative: - archive_root = self.bdist_dir - else: - archive_root = os.path.join( - self.bdist_dir, - self._ensure_relative(install.install_base)) - - self.set_undefined_options( - 'install_egg_info', ('target', 'egginfo_dir')) - self.distinfo_dir = os.path.join(self.bdist_dir, - '%s.dist-info' % self.wheel_dist_name) - self.egg2dist(self.egginfo_dir, - self.distinfo_dir) - - self.write_wheelfile(self.distinfo_dir) - - self.write_record(self.bdist_dir, self.distinfo_dir) - - # Make the archive - if not os.path.exists(self.dist_dir): - os.makedirs(self.dist_dir) - wheel_name = archive_wheelfile(pseudoinstall_root, archive_root) - - # Sign the archive - if 'WHEEL_TOOL' in os.environ: - subprocess.call([os.environ['WHEEL_TOOL'], 'sign', wheel_name]) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution, 'dist_files', []).append( - ('bdist_wheel', get_python_version(), wheel_name)) - - if not self.keep_temp: - if self.dry_run: - logger.info('removing %s', self.bdist_dir) - else: - rmtree(self.bdist_dir) - - def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'): - from email.message import Message - msg = Message() - msg['Wheel-Version'] = '1.0' # of the spec - msg['Generator'] = generator - msg['Root-Is-Purelib'] = str(self.root_is_pure).lower() - - # Doesn't work for bdist_wininst - impl_tag, abi_tag, plat_tag = self.get_tag() - for impl in impl_tag.split('.'): - for abi in abi_tag.split('.'): - for plat in plat_tag.split('.'): - msg['Tag'] = '-'.join((impl, abi, plat)) - - wheelfile_path = os.path.join(wheelfile_base, 'WHEEL') - logger.info('creating %s', wheelfile_path) - with open(wheelfile_path, 'w') as f: - Generator(f, maxheaderlen=0).flatten(msg) - - def _ensure_relative(self, path): - # copied from dir_util, deleted - drive, path = os.path.splitdrive(path) - if path[0:1] == os.sep: - path = drive + path[1:] - return path - - def _pkginfo_to_metadata(self, egg_info_path, pkginfo_path): - return metadata.pkginfo_to_metadata(egg_info_path, pkginfo_path) - - def license_file(self): - """Return license filename from a license-file key in setup.cfg, or None.""" - metadata = self.distribution.get_option_dict('metadata') - if not 'license_file' in metadata: - return None - return metadata['license_file'][1] - - def setupcfg_requirements(self): - """Generate requirements from setup.cfg as - ('Requires-Dist', 'requirement; qualifier') tuples. From a metadata - section in setup.cfg: - - [metadata] - provides-extra = extra1 - extra2 - requires-dist = requirement; qualifier - another; qualifier2 - unqualified - - Yields - - ('Provides-Extra', 'extra1'), - ('Provides-Extra', 'extra2'), - ('Requires-Dist', 'requirement; qualifier'), - ('Requires-Dist', 'another; qualifier2'), - ('Requires-Dist', 'unqualified') - """ - metadata = self.distribution.get_option_dict('metadata') - - # our .ini parser folds - to _ in key names: - for key, title in (('provides_extra', 'Provides-Extra'), - ('requires_dist', 'Requires-Dist')): - if not key in metadata: - continue - field = metadata[key] - for line in field[1].splitlines(): - line = line.strip() - if not line: - continue - yield (title, line) - - def add_requirements(self, metadata_path): - """Add additional requirements from setup.cfg to file metadata_path""" - additional = list(self.setupcfg_requirements()) - if not additional: return - pkg_info = read_pkg_info(metadata_path) - if 'Provides-Extra' in pkg_info or 'Requires-Dist' in pkg_info: - warnings.warn('setup.cfg requirements overwrite values from setup.py') - del pkg_info['Provides-Extra'] - del pkg_info['Requires-Dist'] - for k, v in additional: - pkg_info[k] = v - write_pkg_info(metadata_path, pkg_info) - - def egg2dist(self, egginfo_path, distinfo_path): - """Convert an .egg-info directory into a .dist-info directory""" - def adios(p): - """Appropriately delete directory, file or link.""" - if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p): - shutil.rmtree(p) - elif os.path.exists(p): - os.unlink(p) - - adios(distinfo_path) - - if not os.path.exists(egginfo_path): - # There is no egg-info. This is probably because the egg-info - # file/directory is not named matching the distribution name used - # to name the archive file. Check for this case and report - # accordingly. - import glob - pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info') - possible = glob.glob(pat) - err = "Egg metadata expected at %s but not found" % (egginfo_path,) - if possible: - alt = os.path.basename(possible[0]) - err += " (%s found - possible misnamed archive file?)" % (alt,) - - raise ValueError(err) - - if os.path.isfile(egginfo_path): - # .egg-info is a single file - pkginfo_path = egginfo_path - pkg_info = self._pkginfo_to_metadata(egginfo_path, egginfo_path) - os.mkdir(distinfo_path) - else: - # .egg-info is a directory - pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO') - pkg_info = self._pkginfo_to_metadata(egginfo_path, pkginfo_path) - - # ignore common egg metadata that is useless to wheel - shutil.copytree(egginfo_path, distinfo_path, - ignore=lambda x, y: set(('PKG-INFO', - 'requires.txt', - 'SOURCES.txt', - 'not-zip-safe',))) - - # delete dependency_links if it is only whitespace - dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt') - with open(dependency_links_path, 'r') as dependency_links_file: - dependency_links = dependency_links_file.read().strip() - if not dependency_links: - adios(dependency_links_path) - - write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info) - - # XXX deprecated. Still useful for current distribute/setuptools. - metadata_path = os.path.join(distinfo_path, 'METADATA') - self.add_requirements(metadata_path) - - # XXX intentionally a different path than the PEP. - metadata_json_path = os.path.join(distinfo_path, 'metadata.json') - pymeta = pkginfo_to_dict(metadata_path, - distribution=self.distribution) - - if 'description' in pymeta: - description_filename = 'DESCRIPTION.rst' - description_text = pymeta.pop('description') - description_path = os.path.join(distinfo_path, - description_filename) - with open(description_path, "wb") as description_file: - description_file.write(description_text.encode('utf-8')) - pymeta['extensions']['python.details']['document_names']['description'] = description_filename - - # XXX heuristically copy any LICENSE/LICENSE.txt? - license = self.license_file() - if license: - license_filename = 'LICENSE.txt' - shutil.copy(license, os.path.join(self.distinfo_dir, license_filename)) - pymeta['extensions']['python.details']['document_names']['license'] = license_filename - - with open(metadata_json_path, "w") as metadata_json: - json.dump(pymeta, metadata_json, sort_keys=True) - - adios(egginfo_path) - - def write_record(self, bdist_dir, distinfo_dir): - from .util import urlsafe_b64encode - - record_path = os.path.join(distinfo_dir, 'RECORD') - record_relpath = os.path.relpath(record_path, bdist_dir) - - def walk(): - for dir, dirs, files in os.walk(bdist_dir): - dirs.sort() - for f in sorted(files): - yield os.path.join(dir, f) - - def skip(path): - """Wheel hashes every possible file.""" - return (path == record_relpath) - - with open_for_csv(record_path, 'w+') as record_file: - writer = csv.writer(record_file) - for path in walk(): - relpath = os.path.relpath(path, bdist_dir) - if skip(relpath): - hash = '' - size = '' - else: - with open(path, 'rb') as f: - data = f.read() - digest = hashlib.sha256(data).digest() - hash = 'sha256=' + native(urlsafe_b64encode(digest)) - size = len(data) - record_path = os.path.relpath( - path, bdist_dir).replace(os.path.sep, '/') - writer.writerow((record_path, hash, size)) diff --git a/wheel/decorator.py b/wheel/decorator.py deleted file mode 100644 index e4b56d1..0000000 --- a/wheel/decorator.py +++ /dev/null @@ -1,19 +0,0 @@ -# from Pyramid - - -class reify(object): - """Put the result of a method which uses this (non-data) - descriptor decorator in the instance dict after the first call, - effectively replacing the decorator with an instance variable. - """ - - def __init__(self, wrapped): - self.wrapped = wrapped - self.__doc__ = wrapped.__doc__ - - def __get__(self, inst, objtype=None): - if inst is None: - return self - val = self.wrapped(inst) - setattr(inst, self.wrapped.__name__, val) - return val diff --git a/wheel/egg2wheel.py b/wheel/egg2wheel.py deleted file mode 100755 index e8d3153..0000000 --- a/wheel/egg2wheel.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python -import os.path -import re -import sys -import tempfile -import zipfile -import wheel.bdist_wheel -import shutil -import distutils.dist -from distutils.archive_util import make_archive -from argparse import ArgumentParser -from glob import iglob -from wheel.wininst2wheel import _bdist_wheel_tag - -egg_info_re = re.compile(r'''(?P<name>.+?)-(?P<ver>.+?) - (-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg''', re.VERBOSE) - -def egg2wheel(egg_path, dest_dir): - egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict() - dir = tempfile.mkdtemp(suffix="_e2w") - if os.path.isfile(egg_path): - # assume we have a bdist_egg otherwise - egg = zipfile.ZipFile(egg_path) - egg.extractall(dir) - else: - # support buildout-style installed eggs directories - for pth in os.listdir(egg_path): - src = os.path.join(egg_path, pth) - if os.path.isfile(src): - shutil.copy2(src, dir) - else: - shutil.copytree(src, os.path.join(dir, pth)) - - dist_info = "%s-%s" % (egg_info['name'], egg_info['ver']) - abi = 'none' - pyver = egg_info['pyver'].replace('.', '') - arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_') - if arch != 'any': - # assume all binary eggs are for CPython - pyver = 'cp' + pyver[2:] - wheel_name = '-'.join(( - dist_info, - pyver, - abi, - arch - )) - root_is_purelib = egg_info['arch'] is None - if root_is_purelib: - bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) - else: - bw = _bdist_wheel_tag(distutils.dist.Distribution()) - - bw.root_is_pure = root_is_purelib - bw.python_tag = pyver - bw.plat_name_supplied = True - bw.plat_name = egg_info['arch'] or 'any' - if not root_is_purelib: - bw.full_tag_supplied = True - bw.full_tag = (pyver, abi, arch) - - dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) - bw.egg2dist(os.path.join(dir, 'EGG-INFO'), - dist_info_dir) - bw.write_wheelfile(dist_info_dir, generator='egg2wheel') - bw.write_record(dir, dist_info_dir) - filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir) - os.rename(filename, filename[:-3] + 'whl') - shutil.rmtree(dir) - -def main(): - parser = ArgumentParser() - parser.add_argument('eggs', nargs='*', help="Eggs to convert") - parser.add_argument('--dest-dir', '-d', default=os.path.curdir, - help="Directory to store wheels (default %(default)s)") - parser.add_argument('--verbose', '-v', action='store_true') - args = parser.parse_args() - for pat in args.eggs: - for egg in iglob(pat): - if args.verbose: - sys.stdout.write("{0}... ".format(egg)) - egg2wheel(egg, args.dest_dir) - if args.verbose: - sys.stdout.write("OK\n") - -if __name__ == "__main__": - main() diff --git a/wheel/eggnames.txt b/wheel/eggnames.txt deleted file mode 100644 index d422120..0000000 --- a/wheel/eggnames.txt +++ /dev/null @@ -1,87 +0,0 @@ -vcard-0.7.8-py2.7.egg -qtalchemy-0.7.1-py2.7.egg -AMQPDeliver-0.1-py2.7.egg -infi.registry-0.1.1-py2.7.egg -infi.instruct-0.5.5-py2.7.egg -infi.devicemanager-0.1.2-py2.7.egg -TracTixSummary-1.0-py2.7.egg -ToscaWidgets-0.9.12-py2.7.egg -archipel_agent_iphone_notification-0.5.0beta-py2.7.egg -archipel_agent_action_scheduler-0.5.0beta-py2.7.egg -ao.social-1.0.2-py2.7.egg -apgl-0.7-py2.7.egg -satchmo_payment_payworld-0.1.1-py2.7.egg -snmpsim-0.1.3-py2.7.egg -sshim-0.2-py2.7.egg -shove-0.3.4-py2.7.egg -simpleavro-0.3.0-py2.7.egg -wkhtmltopdf-0.2-py2.7.egg -wokkel-0.7.0-py2.7.egg -jmbo_social-0.0.6-py2.7.egg -jmbo_post-0.0.6-py2.7.egg -jcrack-0.0.2-py2.7.egg -riak-1.4.0-py2.7.egg -restclient-0.10.2-py2.7.egg -Sutekh-0.8.1-py2.7.egg -trayify-0.0.1-py2.7.egg -tweepy-1.9-py2.7.egg -topzootools-0.2.1-py2.7.egg -haystack-0.16-py2.7.egg -zope.interface-4.0.1-py2.7-win32.egg -neuroshare-0.8.5-py2.7-macosx-10.7-intel.egg -ndg_httpsclient-0.2.0-py2.7.egg -libtele-0.3-py2.7.egg -litex.cxpool-1.0.2-py2.7.egg -obspy.iris-0.5.1-py2.7.egg -obspy.mseed-0.6.1-py2.7-win32.egg -obspy.core-0.6.2-py2.7.egg -CorePost-0.0.3-py2.7.egg -fnordstalk-0.0.3-py2.7.egg -Persistence-2.13.2-py2.7-win32.egg -Pydap-3.1.RC1-py2.7.egg -PyExecJS-1.0.4-py2.7.egg -Wally-0.7.2-py2.7.egg -ExtensionClass-4.0a1-py2.7-win32.egg -Feedjack-0.9.16-py2.7.egg -Mars24-0.3.9-py2.7.egg -HalWeb-0.6.0-py2.7.egg -DARE-0.7.140-py2.7.egg -macholib-1.3-py2.7.egg -marrow.wsgi.egress.compression-1.1-py2.7.egg -mcs-0.3.7-py2.7.egg -Kook-0.6.0-py2.7.egg -er-0.1-py2.7.egg -evasion_director-1.1.4-py2.7.egg -djquery-0.1a-py2.7.egg -django_factory-0.7-py2.7.egg -django_gizmo-0.0.3-py2.7.egg -django_category-0.1-py2.7.egg -dbwrap-0.3.2-py2.7.egg -django_supergeneric-1.0-py2.7.egg -django_dynamo-0.25-py2.7.egg -django_acollabauth-0.1-py2.7.egg -django_qrlink-0.1.0-py2.7.egg -django_addons-0.6.6-py2.7.egg -cover_grabber-1.1.2-py2.7.egg -chem-1.1-py2.7.egg -crud-0.1-py2.7.egg -bongo-0.1-py2.7.egg -bytecodehacks-April2000-py2.7.egg -greenlet-0.3.4-py2.7-win32.egg -ginvoke-0.3.1-py2.7.egg -pyobjc_framework_ScriptingBridge-2.3-py2.7.egg -pecan-0.2.0a-py2.7.egg -pyress-0.2.0-py2.7.egg -pyobjc_framework_PubSub-2.3-py2.7.egg -pyobjc_framework_ExceptionHandling-2.3-py2.7.egg -pywps-trunk-py2.7.egg -pyobjc_framework_CFNetwork-2.3-py2.7-macosx-10.6-fat.egg -py.saunter-0.40-py2.7.egg -pyfnordmetric-0.0.1-py2.7.egg -pyws-1.1.1-py2.7.egg -prestapyt-0.4.0-py2.7.egg -passlib-1.5.3-py2.7.egg -pyga-2.1-py2.7.egg -pygithub3-0.3-py2.7.egg -pyobjc_framework_OpenDirectory-2.3-py2.7.egg -yaposib-0.2.75-py2.7-linux-x86_64.egg diff --git a/wheel/install.py b/wheel/install.py deleted file mode 100644 index a422b0e..0000000 --- a/wheel/install.py +++ /dev/null @@ -1,481 +0,0 @@ -""" -Operations on existing wheel files, including basic installation. -""" -# XXX see patched pip to install - -import sys -import warnings -import os.path -import re -import zipfile -import hashlib -import csv - -import shutil - -try: - _big_number = sys.maxsize -except NameError: - _big_number = sys.maxint - -from .decorator import reify -from .util import (urlsafe_b64encode, from_json, urlsafe_b64decode, - native, binary, HashingFile) -from . import signatures -from .pkginfo import read_pkg_info_bytes -from .util import open_for_csv - -from .pep425tags import get_supported -from .paths import get_install_paths - -# The next major version after this version of the 'wheel' tool: -VERSION_TOO_HIGH = (1, 0) - -# Non-greedy matching of an optional build number may be too clever (more -# invalid wheel filenames will match). Separate regex for .dist-info? -WHEEL_INFO_RE = re.compile( - r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?) - ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) - \.whl|\.dist-info)$""", - re.VERBOSE).match - -def parse_version(version): - """Use parse_version from pkg_resources or distutils as available.""" - global parse_version - try: - from pkg_resources import parse_version - except ImportError: - from distutils.version import LooseVersion as parse_version - return parse_version(version) - -class BadWheelFile(ValueError): - pass - - -class WheelFile(object): - """Parse wheel-specific attributes from a wheel (.whl) file and offer - basic installation and verification support. - - WheelFile can be used to simply parse a wheel filename by avoiding the - methods that require the actual file contents.""" - - WHEEL_INFO = "WHEEL" - RECORD = "RECORD" - - def __init__(self, - filename, - fp=None, - append=False, - context=get_supported): - """ - :param fp: A seekable file-like object or None to open(filename). - :param append: Open archive in append mode. - :param context: Function returning list of supported tags. Wheels - must have the same context to be sortable. - """ - self.filename = filename - self.fp = fp - self.append = append - self.context = context - basename = os.path.basename(filename) - self.parsed_filename = WHEEL_INFO_RE(basename) - if not basename.endswith('.whl') or self.parsed_filename is None: - raise BadWheelFile("Bad filename '%s'" % filename) - - def __repr__(self): - return self.filename - - @property - def distinfo_name(self): - return "%s.dist-info" % self.parsed_filename.group('namever') - - @property - def datadir_name(self): - return "%s.data" % self.parsed_filename.group('namever') - - @property - def record_name(self): - return "%s/%s" % (self.distinfo_name, self.RECORD) - - @property - def wheelinfo_name(self): - return "%s/%s" % (self.distinfo_name, self.WHEEL_INFO) - - @property - def tags(self): - """A wheel file is compatible with the Cartesian product of the - period-delimited tags in its filename. - To choose a wheel file among several candidates having the same - distribution version 'ver', an installer ranks each triple of - (pyver, abi, plat) that its Python installation can run, sorting - the wheels by the best-ranked tag it supports and then by their - arity which is just len(list(compatibility_tags)). - """ - tags = self.parsed_filename.groupdict() - for pyver in tags['pyver'].split('.'): - for abi in tags['abi'].split('.'): - for plat in tags['plat'].split('.'): - yield (pyver, abi, plat) - - compatibility_tags = tags - - @property - def arity(self): - """The number of compatibility tags the wheel declares.""" - return len(list(self.compatibility_tags)) - - @property - def rank(self): - """ - Lowest index of any of this wheel's tags in self.context(), and the - arity e.g. (0, 1) - """ - return self.compatibility_rank(self.context()) - - @property - def compatible(self): - return self.rank[0] != _big_number # bad API! - - # deprecated: - def compatibility_rank(self, supported): - """Rank the wheel against the supported tags. Smaller ranks are more - compatible! - - :param supported: A list of compatibility tags that the current - Python implemenation can run. - """ - preferences = [] - for tag in self.compatibility_tags: - try: - preferences.append(supported.index(tag)) - # Tag not present - except ValueError: - pass - if len(preferences): - return (min(preferences), self.arity) - return (_big_number, 0) - - # deprecated - def supports_current_python(self, x): - assert self.context == x, 'context mismatch' - return self.compatible - - # Comparability. - # Wheels are equal if they refer to the same file. - # If two wheels are not equal, compare based on (in this order): - # 1. Name - # 2. Version - # 3. Compatibility rank - # 4. Filename (as a tiebreaker) - @property - def _sort_key(self): - return (self.parsed_filename.group('name'), - parse_version(self.parsed_filename.group('ver')), - tuple(-x for x in self.rank), - self.filename) - - def __eq__(self, other): - return self.filename == other.filename - - def __ne__(self, other): - return self.filename != other.filename - - def __lt__(self, other): - if self.context != other.context: - raise TypeError("{0}.context != {1}.context".format(self, other)) - - return self._sort_key < other._sort_key - - # XXX prune - - sn = self.parsed_filename.group('name') - on = other.parsed_filename.group('name') - if sn != on: - return sn < on - sv = parse_version(self.parsed_filename.group('ver')) - ov = parse_version(other.parsed_filename.group('ver')) - if sv != ov: - return sv < ov - # Compatibility - if self.context != other.context: - raise TypeError("{0}.context != {1}.context".format(self, other)) - sc = self.rank - oc = other.rank - if sc != None and oc != None and sc != oc: - # Smaller compatibility ranks are "better" than larger ones, - # so we have to reverse the sense of the comparison here! - return sc > oc - elif sc == None and oc != None: - return False - return self.filename < other.filename - - def __gt__(self, other): - return other < self - - def __le__(self, other): - return self == other or self < other - - def __ge__(self, other): - return self == other or other < self - - # - # Methods using the file's contents: - # - - @reify - def zipfile(self): - mode = "r" - if self.append: - mode = "a" - vzf = VerifyingZipFile(self.fp if self.fp else self.filename, mode) - if not self.append: - self.verify(vzf) - return vzf - - @reify - def parsed_wheel_info(self): - """Parse wheel metadata (the .data/WHEEL file)""" - return read_pkg_info_bytes(self.zipfile.read(self.wheelinfo_name)) - - def check_version(self): - version = self.parsed_wheel_info['Wheel-Version'] - if tuple(map(int, version.split('.'))) >= VERSION_TOO_HIGH: - raise ValueError("Wheel version is too high") - - @reify - def install_paths(self): - """ - Consult distutils to get the install paths for our dist. A dict with - ('purelib', 'platlib', 'headers', 'scripts', 'data'). - - We use the name from our filename as the dist name, which means headers - could be installed in the wrong place if the filesystem-escaped name - is different than the Name. Who cares? - """ - name = self.parsed_filename.group('name') - return get_install_paths(name) - - def install(self, force=False, overrides={}): - """ - Install the wheel into site-packages. - """ - - # Utility to get the target directory for a particular key - def get_path(key): - return overrides.get(key) or self.install_paths[key] - - # The base target location is either purelib or platlib - if self.parsed_wheel_info['Root-Is-Purelib'] == 'true': - root = get_path('purelib') - else: - root = get_path('platlib') - - # Parse all the names in the archive - name_trans = {} - for info in self.zipfile.infolist(): - name = info.filename - # Zip files can contain entries representing directories. - # These end in a '/'. - # We ignore these, as we create directories on demand. - if name.endswith('/'): - continue - - # Pathnames in a zipfile namelist are always /-separated. - # In theory, paths could start with ./ or have other oddities - # but this won't happen in practical cases of well-formed wheels. - # We'll cover the simple case of an initial './' as it's both easy - # to do and more common than most other oddities. - if name.startswith('./'): - name = name[2:] - - # Split off the base directory to identify files that are to be - # installed in non-root locations - basedir, sep, filename = name.partition('/') - if sep and basedir == self.datadir_name: - # Data file. Target destination is elsewhere - key, sep, filename = filename.partition('/') - if not sep: - raise ValueError("Invalid filename in wheel: {0}".format(name)) - target = get_path(key) - else: - # Normal file. Target destination is root - key = '' - target = root - filename = name - - # Map the actual filename from the zipfile to its intended target - # directory and the pathname relative to that directory. - dest = os.path.normpath(os.path.join(target, filename)) - name_trans[info] = (key, target, filename, dest) - - # We're now ready to start processing the actual install. The process - # is as follows: - # 1. Prechecks - is the wheel valid, is its declared architecture - # OK, etc. [[Responsibility of the caller]] - # 2. Overwrite check - do any of the files to be installed already - # exist? - # 3. Actual install - put the files in their target locations. - # 4. Update RECORD - write a suitably modified RECORD file to - # reflect the actual installed paths. - - if not force: - for info, v in name_trans.items(): - k = info.filename - key, target, filename, dest = v - if os.path.exists(dest): - raise ValueError("Wheel file {0} would overwrite {1}. Use force if this is intended".format(k, dest)) - - # Get the name of our executable, for use when replacing script - # wrapper hashbang lines. - # We encode it using getfilesystemencoding, as that is "the name of - # the encoding used to convert Unicode filenames into system file - # names". - exename = sys.executable.encode(sys.getfilesystemencoding()) - record_data = [] - record_name = self.distinfo_name + '/RECORD' - for info, (key, target, filename, dest) in name_trans.items(): - name = info.filename - source = self.zipfile.open(info) - # Skip the RECORD file - if name == record_name: - continue - ddir = os.path.dirname(dest) - if not os.path.isdir(ddir): - os.makedirs(ddir) - destination = HashingFile(open(dest, 'wb')) - if key == 'scripts': - hashbang = source.readline() - if hashbang.startswith(b'#!python'): - hashbang = b'#!' + exename + binary(os.linesep) - destination.write(hashbang) - shutil.copyfileobj(source, destination) - reldest = os.path.relpath(dest, root) - reldest.replace(os.sep, '/') - record_data.append((reldest, destination.digest(), destination.length)) - destination.close() - source.close() - # preserve attributes (especially +x bit for scripts) - attrs = info.external_attr >> 16 - if attrs: # tends to be 0 if Windows. - os.chmod(dest, info.external_attr >> 16) - - record_name = os.path.join(root, self.record_name) - with open_for_csv(record_name, 'w+') as record_file: - writer = csv.writer(record_file) - for reldest, digest, length in sorted(record_data): - writer.writerow((reldest, digest, length)) - writer.writerow((self.record_name, '', '')) - - def verify(self, zipfile=None): - """Configure the VerifyingZipFile `zipfile` by verifying its signature - and setting expected hashes for every hash in RECORD. - Caller must complete the verification process by completely reading - every file in the archive (e.g. with extractall).""" - sig = None - if zipfile is None: - zipfile = self.zipfile - zipfile.strict = True - - record_name = '/'.join((self.distinfo_name, 'RECORD')) - sig_name = '/'.join((self.distinfo_name, 'RECORD.jws')) - # tolerate s/mime signatures: - smime_sig_name = '/'.join((self.distinfo_name, 'RECORD.p7s')) - zipfile.set_expected_hash(record_name, None) - zipfile.set_expected_hash(sig_name, None) - zipfile.set_expected_hash(smime_sig_name, None) - record = zipfile.read(record_name) - - record_digest = urlsafe_b64encode(hashlib.sha256(record).digest()) - try: - sig = from_json(native(zipfile.read(sig_name))) - except KeyError: # no signature - pass - if sig: - headers, payload = signatures.verify(sig) - if payload['hash'] != "sha256=" + native(record_digest): - msg = "RECORD.sig claimed RECORD hash {0} != computed hash {1}." - raise BadWheelFile(msg.format(payload['hash'], - native(record_digest))) - - reader = csv.reader((native(r) for r in record.splitlines())) - - for row in reader: - filename = row[0] - hash = row[1] - if not hash: - if filename not in (record_name, sig_name): - sys.stderr.write("%s has no hash!\n" % filename) - continue - algo, data = row[1].split('=', 1) - assert algo == "sha256", "Unsupported hash algorithm" - zipfile.set_expected_hash(filename, urlsafe_b64decode(binary(data))) - - -class VerifyingZipFile(zipfile.ZipFile): - """ZipFile that can assert that each of its extracted contents matches - an expected sha256 hash. Note that each file must be completly read in - order for its hash to be checked.""" - - def __init__(self, file, mode="r", - compression=zipfile.ZIP_STORED, - allowZip64=False): - zipfile.ZipFile.__init__(self, file, mode, compression, allowZip64) - - self.strict = False - self._expected_hashes = {} - self._hash_algorithm = hashlib.sha256 - - def set_expected_hash(self, name, hash): - """ - :param name: name of zip entry - :param hash: bytes of hash (or None for "don't care") - """ - self._expected_hashes[name] = hash - - def open(self, name_or_info, mode="r", pwd=None): - """Return file-like object for 'name'.""" - # A non-monkey-patched version would contain most of zipfile.py - ef = zipfile.ZipFile.open(self, name_or_info, mode, pwd) - if isinstance(name_or_info, zipfile.ZipInfo): - name = name_or_info.filename - else: - name = name_or_info - if (name in self._expected_hashes - and self._expected_hashes[name] != None): - expected_hash = self._expected_hashes[name] - try: - _update_crc_orig = ef._update_crc - except AttributeError: - warnings.warn('Need ZipExtFile._update_crc to implement ' - 'file hash verification (in Python >= 2.7)') - return ef - running_hash = self._hash_algorithm() - if hasattr(ef, '_eof'): # py33 - def _update_crc(data): - _update_crc_orig(data) - running_hash.update(data) - if ef._eof and running_hash.digest() != expected_hash: - raise BadWheelFile("Bad hash for file %r" % ef.name) - else: - def _update_crc(data, eof=None): - _update_crc_orig(data, eof=eof) - running_hash.update(data) - if eof and running_hash.digest() != expected_hash: - raise BadWheelFile("Bad hash for file %r" % ef.name) - ef._update_crc = _update_crc - elif self.strict and name not in self._expected_hashes: - raise BadWheelFile("No expected hash for file %r" % ef.name) - return ef - - def pop(self): - """Truncate the last file off this zipfile. - Assumes infolist() is in the same order as the files (true for - ordinary zip files created by Python)""" - if not self.fp: - raise RuntimeError( - "Attempt to pop from ZIP archive that was already closed") - last = self.infolist().pop() - del self.NameToInfo[last.filename] - self.fp.seek(last.header_offset, os.SEEK_SET) - self.fp.truncate() - self._didModify = True diff --git a/wheel/metadata.py b/wheel/metadata.py deleted file mode 100644 index 2b123f9..0000000 --- a/wheel/metadata.py +++ /dev/null @@ -1,330 +0,0 @@ -""" -Tools for converting old- to new-style metadata. -""" - -from collections import namedtuple -from .pkginfo import read_pkg_info -from .util import OrderedDefaultDict -from collections import OrderedDict - -import re -import os.path -import textwrap -import pkg_resources -import email.parser - -from . import __version__ as wheel_version - -METADATA_VERSION = "2.0" - -PLURAL_FIELDS = { "classifier" : "classifiers", - "provides_dist" : "provides", - "provides_extra" : "extras" } - -SKIP_FIELDS = set() - -CONTACT_FIELDS = (({"email":"author_email", "name": "author"}, - "author"), - ({"email":"maintainer_email", "name": "maintainer"}, - "maintainer")) - -# commonly filled out as "UNKNOWN" by distutils: -UNKNOWN_FIELDS = set(("author", "author_email", "platform", "home_page", - "license")) - -# Wheel itself is probably the only program that uses non-extras markers -# in METADATA/PKG-INFO. Support its syntax with the extra at the end only. -EXTRA_RE = re.compile("""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""") -KEYWORDS_RE = re.compile("[\0-,]+") - -MayRequiresKey = namedtuple('MayRequiresKey', ('condition', 'extra')) - -def unique(iterable): - """ - Yield unique values in iterable, preserving order. - """ - seen = set() - for value in iterable: - if not value in seen: - seen.add(value) - yield value - - -def handle_requires(metadata, pkg_info, key): - """ - Place the runtime requirements from pkg_info into metadata. - """ - may_requires = OrderedDefaultDict(list) - for value in sorted(pkg_info.get_all(key)): - extra_match = EXTRA_RE.search(value) - if extra_match: - groupdict = extra_match.groupdict() - condition = groupdict['condition'] - extra = groupdict['extra'] - package = groupdict['package'] - if condition.endswith(' and '): - condition = condition[:-5] - else: - condition, extra = None, None - package = value - key = MayRequiresKey(condition, extra) - may_requires[key].append(package) - - if may_requires: - metadata['run_requires'] = [] - def sort_key(item): - # Both condition and extra could be None, which can't be compared - # against strings in Python 3. - key, value = item - if key.condition is None: - return '' - return key.condition - for key, value in sorted(may_requires.items(), key=sort_key): - may_requirement = OrderedDict((('requires', value),)) - if key.extra: - may_requirement['extra'] = key.extra - if key.condition: - may_requirement['environment'] = key.condition - metadata['run_requires'].append(may_requirement) - - if not 'extras' in metadata: - metadata['extras'] = [] - metadata['extras'].extend([key.extra for key in may_requires.keys() if key.extra]) - - -def pkginfo_to_dict(path, distribution=None): - """ - Convert PKG-INFO to a prototype Metadata 2.0 (PEP 426) dict. - - The description is included under the key ['description'] rather than - being written to a separate file. - - path: path to PKG-INFO file - distribution: optional distutils Distribution() - """ - - metadata = OrderedDefaultDict(lambda: OrderedDefaultDict(lambda: OrderedDefaultDict(OrderedDict))) - metadata["generator"] = "bdist_wheel (" + wheel_version + ")" - try: - unicode - pkg_info = read_pkg_info(path) - except NameError: - with open(path, 'rb') as pkg_info_file: - pkg_info = email.parser.Parser().parsestr(pkg_info_file.read().decode('utf-8')) - description = None - - if pkg_info['Summary']: - metadata['summary'] = pkginfo_unicode(pkg_info, 'Summary') - del pkg_info['Summary'] - - if pkg_info['Description']: - description = dedent_description(pkg_info) - del pkg_info['Description'] - else: - payload = pkg_info.get_payload() - if isinstance(payload, bytes): - # Avoid a Python 2 Unicode error. - # We still suffer ? glyphs on Python 3. - payload = payload.decode('utf-8') - if payload: - description = payload - - if description: - pkg_info['description'] = description - - for key in sorted(unique(k.lower() for k in pkg_info.keys())): - low_key = key.replace('-', '_') - - if low_key in SKIP_FIELDS: - continue - - if low_key in UNKNOWN_FIELDS and pkg_info.get(key) == 'UNKNOWN': - continue - - if low_key in sorted(PLURAL_FIELDS): - metadata[PLURAL_FIELDS[low_key]] = pkg_info.get_all(key) - - elif low_key == "requires_dist": - handle_requires(metadata, pkg_info, key) - - elif low_key == 'provides_extra': - if not 'extras' in metadata: - metadata['extras'] = [] - metadata['extras'].extend(pkg_info.get_all(key)) - - elif low_key == 'home_page': - metadata['extensions']['python.details']['project_urls'] = {'Home':pkg_info[key]} - - elif low_key == 'keywords': - metadata['keywords'] = KEYWORDS_RE.split(pkg_info[key]) - - else: - metadata[low_key] = pkg_info[key] - - metadata['metadata_version'] = METADATA_VERSION - - if 'extras' in metadata: - metadata['extras'] = sorted(set(metadata['extras'])) - - # include more information if distribution is available - if distribution: - for requires, attr in (('test_requires', 'tests_require'),): - try: - requirements = getattr(distribution, attr) - if isinstance(requirements, list): - new_requirements = sorted(convert_requirements(requirements)) - metadata[requires] = [{'requires':new_requirements}] - except AttributeError: - pass - - # handle contacts - contacts = [] - for contact_type, role in CONTACT_FIELDS: - contact = OrderedDict() - for key in sorted(contact_type): - if contact_type[key] in metadata: - contact[key] = metadata.pop(contact_type[key]) - if contact: - contact['role'] = role - contacts.append(contact) - if contacts: - metadata['extensions']['python.details']['contacts'] = contacts - - # convert entry points to exports - try: - with open(os.path.join(os.path.dirname(path), "entry_points.txt"), "r") as ep_file: - ep_map = pkg_resources.EntryPoint.parse_map(ep_file.read()) - exports = OrderedDict() - for group, items in sorted(ep_map.items()): - exports[group] = OrderedDict() - for item in sorted(map(str, items.values())): - name, export = item.split(' = ', 1) - exports[group][name] = export - if exports: - metadata['extensions']['python.exports'] = exports - except IOError: - pass - - # copy console_scripts entry points to commands - if 'python.exports' in metadata['extensions']: - for (ep_script, wrap_script) in (('console_scripts', 'wrap_console'), - ('gui_scripts', 'wrap_gui')): - if ep_script in metadata['extensions']['python.exports']: - metadata['extensions']['python.commands'][wrap_script] = \ - metadata['extensions']['python.exports'][ep_script] - - return metadata - -def requires_to_requires_dist(requirement): - """Compose the version predicates for requirement in PEP 345 fashion.""" - requires_dist = [] - for op, ver in requirement.specs: - requires_dist.append(op + ver) - if not requires_dist: - return '' - return " (%s)" % ','.join(requires_dist) - -def convert_requirements(requirements): - """Yield Requires-Dist: strings for parsed requirements strings.""" - for req in requirements: - parsed_requirement = pkg_resources.Requirement.parse(req) - spec = requires_to_requires_dist(parsed_requirement) - extras = ",".join(parsed_requirement.extras) - if extras: - extras = "[%s]" % extras - yield (parsed_requirement.project_name + extras + spec) - -def generate_requirements(extras_require): - """ - Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement') - and ('Provides-Extra', 'extra') tuples. - - extras_require is a dictionary of {extra: [requirements]} as passed to setup(), - using the empty extra {'': [requirements]} to hold install_requires. - """ - for extra, depends in extras_require.items(): - condition = '' - if extra and ':' in extra: # setuptools extra:condition syntax - extra, condition = extra.split(':', 1) - extra = pkg_resources.safe_extra(extra) - if extra: - yield ('Provides-Extra', extra) - if condition: - condition += " and " - condition += "extra == '%s'" % extra - if condition: - condition = '; ' + condition - for new_req in convert_requirements(depends): - yield ('Requires-Dist', new_req + condition) - -def pkginfo_to_metadata(egg_info_path, pkginfo_path): - """ - Convert .egg-info directory with PKG-INFO to the Metadata 1.3 aka - old-draft Metadata 2.0 format. - """ - pkg_info = read_pkg_info(pkginfo_path) - pkg_info.replace_header('Metadata-Version', '2.0') - requires_path = os.path.join(egg_info_path, 'requires.txt') - if os.path.exists(requires_path): - with open(requires_path) as requires_file: - requires = requires_file.read() - for extra, reqs in sorted(pkg_resources.split_sections(requires), - key=lambda x: x[0] or ''): - for item in generate_requirements({extra: reqs}): - pkg_info[item[0]] = item[1] - - description = pkg_info['Description'] - if description: - pkg_info.set_payload(dedent_description(pkg_info)) - del pkg_info['Description'] - - return pkg_info - - -def pkginfo_unicode(pkg_info, field): - """Hack to coax Unicode out of an email Message() - Python 3.3+""" - text = pkg_info[field] - field = field.lower() - if not isinstance(text, str): - if not hasattr(pkg_info, 'raw_items'): # Python 3.2 - return str(text) - for item in pkg_info.raw_items(): - if item[0].lower() == field: - text = item[1].encode('ascii', 'surrogateescape')\ - .decode('utf-8') - break - - return text - - -def dedent_description(pkg_info): - """ - Dedent and convert pkg_info['Description'] to Unicode. - """ - description = pkg_info['Description'] - - # Python 3 Unicode handling, sorta. - surrogates = False - if not isinstance(description, str): - surrogates = True - description = pkginfo_unicode(pkg_info, 'Description') - - description_lines = description.splitlines() - description_dedent = '\n'.join( - # if the first line of long_description is blank, - # the first line here will be indented. - (description_lines[0].lstrip(), - textwrap.dedent('\n'.join(description_lines[1:])), - '\n')) - - if surrogates: - description_dedent = description_dedent\ - .encode("utf8")\ - .decode("ascii", "surrogateescape") - - return description_dedent - - -if __name__ == "__main__": - import sys, pprint - pprint.pprint(pkginfo_to_dict(sys.argv[1])) diff --git a/wheel/paths.py b/wheel/paths.py deleted file mode 100644 index fe3dfd6..0000000 --- a/wheel/paths.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Installation paths. - -Map the .data/ subdirectory names to install paths. -""" - -import os.path -import sys -import distutils.dist as dist -import distutils.command.install as install - -def get_install_command(name): - # late binding due to potential monkeypatching - d = dist.Distribution({'name':name}) - i = install.install(d) - i.finalize_options() - return i - -def get_install_paths(name): - """ - Return the (distutils) install paths for the named dist. - - A dict with ('purelib', 'platlib', 'headers', 'scripts', 'data') keys. - """ - paths = {} - - i = get_install_command(name) - - for key in install.SCHEME_KEYS: - paths[key] = getattr(i, 'install_' + key) - - # pip uses a similar path as an alternative to the system's (read-only) - # include directory: - if hasattr(sys, 'real_prefix'): # virtualenv - paths['headers'] = os.path.join(sys.prefix, - 'include', - 'site', - 'python' + sys.version[:3], - name) - - return paths diff --git a/wheel/pep425tags.py b/wheel/pep425tags.py deleted file mode 100644 index a7bd4a9..0000000 --- a/wheel/pep425tags.py +++ /dev/null @@ -1,178 +0,0 @@ -"""Generate and work with PEP 425 Compatibility Tags.""" - -import sys -import warnings - -import sysconfig -import distutils.util - - -def get_config_var(var): - try: - return sysconfig.get_config_var(var) - except IOError as e: # pip Issue #1074 - warnings.warn("{0}".format(e), RuntimeWarning) - return None - - -def get_abbr_impl(): - """Return abbreviated implementation name.""" - if hasattr(sys, 'pypy_version_info'): - pyimpl = 'pp' - elif sys.platform.startswith('java'): - pyimpl = 'jy' - elif sys.platform == 'cli': - pyimpl = 'ip' - else: - pyimpl = 'cp' - return pyimpl - - -def get_impl_ver(): - """Return implementation version.""" - impl_ver = get_config_var("py_version_nodot") - if not impl_ver or get_abbr_impl() == 'pp': - impl_ver = ''.join(map(str, get_impl_version_info())) - return impl_ver - - -def get_impl_version_info(): - """Return sys.version_info-like tuple for use in decrementing the minor - version.""" - if get_abbr_impl() == 'pp': - # as per https://github.com/pypa/pip/issues/2882 - return (sys.version_info[0], sys.pypy_version_info.major, - sys.pypy_version_info.minor) - else: - return sys.version_info[0], sys.version_info[1] - - -def get_flag(var, fallback, expected=True, warn=True): - """Use a fallback method for determining SOABI flags if the needed config - var is unset or unavailable.""" - val = get_config_var(var) - if val is None: - if warn: - warnings.warn("Config variable '{0}' is unset, Python ABI tag may " - "be incorrect".format(var), RuntimeWarning, 2) - return fallback() - return val == expected - - -def get_abi_tag(): - """Return the ABI tag based on SOABI (if available) or emulate SOABI - (CPython 2, PyPy).""" - soabi = get_config_var('SOABI') - impl = get_abbr_impl() - if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): - d = '' - m = '' - u = '' - if get_flag('Py_DEBUG', - lambda: hasattr(sys, 'gettotalrefcount'), - warn=(impl == 'cp')): - d = 'd' - if get_flag('WITH_PYMALLOC', - lambda: impl == 'cp', - warn=(impl == 'cp')): - m = 'm' - if get_flag('Py_UNICODE_SIZE', - lambda: sys.maxunicode == 0x10ffff, - expected=4, - warn=(impl == 'cp' and - sys.version_info < (3, 3))) \ - and sys.version_info < (3, 3): - u = 'u' - abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) - elif soabi and soabi.startswith('cpython-'): - abi = 'cp' + soabi.split('-')[1] - elif soabi: - abi = soabi.replace('.', '_').replace('-', '_') - else: - abi = None - return abi - - -def get_platform(): - """Return our platform name 'win32', 'linux_x86_64'""" - # XXX remove distutils dependency - result = distutils.util.get_platform().replace('.', '_').replace('-', '_') - if result == "linux_x86_64" and sys.maxsize == 2147483647: - # pip pull request #3497 - result = "linux_i686" - return result - - -def get_supported(versions=None, supplied_platform=None): - """Return a list of supported tags for each version specified in - `versions`. - - :param versions: a list of string versions, of the form ["33", "32"], - or None. The first version will be assumed to support our ABI. - """ - supported = [] - - # Versions must be given with respect to the preference - if versions is None: - versions = [] - version_info = get_impl_version_info() - major = version_info[:-1] - # Support all previous minor Python versions. - for minor in range(version_info[-1], -1, -1): - versions.append(''.join(map(str, major + (minor,)))) - - impl = get_abbr_impl() - - abis = [] - - abi = get_abi_tag() - if abi: - abis[0:0] = [abi] - - abi3s = set() - import imp - for suffix in imp.get_suffixes(): - if suffix[0].startswith('.abi'): - abi3s.add(suffix[0].split('.', 2)[1]) - - abis.extend(sorted(list(abi3s))) - - abis.append('none') - - platforms = [] - if supplied_platform: - platforms.append(supplied_platform) - platforms.append(get_platform()) - - # Current version, current API (built specifically for our Python): - for abi in abis: - for arch in platforms: - supported.append(('%s%s' % (impl, versions[0]), abi, arch)) - - # abi3 modules compatible with older version of Python - for version in versions[1:]: - # abi3 was introduced in Python 3.2 - if version in ('31', '30'): - break - for abi in abi3s: # empty set if not Python 3 - for arch in platforms: - supported.append(("%s%s" % (impl, version), abi, arch)) - - # No abi / arch, but requires our implementation: - for i, version in enumerate(versions): - supported.append(('%s%s' % (impl, version), 'none', 'any')) - if i == 0: - # Tagged specifically as being cross-version compatible - # (with just the major version specified) - supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) - - # Major Python version + platform; e.g. binaries not using the Python API - supported.append(('py%s' % (versions[0][0]), 'none', arch)) - - # No abi / arch, generic Python - for i, version in enumerate(versions): - supported.append(('py%s' % (version,), 'none', 'any')) - if i == 0: - supported.append(('py%s' % (version[0]), 'none', 'any')) - - return supported diff --git a/wheel/pkginfo.py b/wheel/pkginfo.py deleted file mode 100644 index 8a4aca3..0000000 --- a/wheel/pkginfo.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Tools for reading and writing PKG-INFO / METADATA without caring -about the encoding.""" - -from email.parser import Parser - -try: - unicode - _PY3 = False -except NameError: - _PY3 = True - -if not _PY3: - from email.generator import Generator - - def read_pkg_info_bytes(bytestr): - return Parser().parsestr(bytestr) - - def read_pkg_info(path): - with open(path, "r") as headers: - message = Parser().parse(headers) - return message - - def write_pkg_info(path, message): - with open(path, 'w') as metadata: - Generator(metadata, maxheaderlen=0).flatten(message) - -else: - from email.generator import BytesGenerator - def read_pkg_info_bytes(bytestr): - headers = bytestr.decode(encoding="ascii", errors="surrogateescape") - message = Parser().parsestr(headers) - return message - - def read_pkg_info(path): - with open(path, "r", - encoding="ascii", - errors="surrogateescape") as headers: - message = Parser().parse(headers) - return message - - def write_pkg_info(path, message): - with open(path, "wb") as out: - BytesGenerator(out, maxheaderlen=0).flatten(message) - diff --git a/wheel/signatures/__init__.py b/wheel/signatures/__init__.py deleted file mode 100644 index 3f21b50..0000000 --- a/wheel/signatures/__init__.py +++ /dev/null @@ -1,106 +0,0 @@ -""" -Create and verify jws-js format Ed25519 signatures. -""" - -__all__ = [ 'sign', 'verify' ] - -import json -from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary - -ed25519ll = None - -ALG = "Ed25519" - -def get_ed25519ll(): - """Lazy import-and-test of ed25519 module""" - global ed25519ll - - if not ed25519ll: - try: - import ed25519ll # fast (thousands / s) - except (ImportError, OSError): # pragma nocover - from . import ed25519py as ed25519ll # pure Python (hundreds / s) - test() - - return ed25519ll - -def sign(payload, keypair): - """Return a JWS-JS format signature given a JSON-serializable payload and - an Ed25519 keypair.""" - get_ed25519ll() - # - header = { - "alg": ALG, - "jwk": { - "kty": ALG, # alg -> kty in jwk-08. - "vk": native(urlsafe_b64encode(keypair.vk)) - } - } - - encoded_header = urlsafe_b64encode(binary(json.dumps(header, sort_keys=True))) - encoded_payload = urlsafe_b64encode(binary(json.dumps(payload, sort_keys=True))) - secured_input = b".".join((encoded_header, encoded_payload)) - sig_msg = ed25519ll.crypto_sign(secured_input, keypair.sk) - signature = sig_msg[:ed25519ll.SIGNATUREBYTES] - encoded_signature = urlsafe_b64encode(signature) - - return {"recipients": - [{"header":native(encoded_header), - "signature":native(encoded_signature)}], - "payload": native(encoded_payload)} - -def assertTrue(condition, message=""): - if not condition: - raise ValueError(message) - -def verify(jwsjs): - """Return (decoded headers, payload) if all signatures in jwsjs are - consistent, else raise ValueError. - - Caller must decide whether the keys are actually trusted.""" - get_ed25519ll() - # XXX forbid duplicate keys in JSON input using object_pairs_hook (2.7+) - recipients = jwsjs["recipients"] - encoded_payload = binary(jwsjs["payload"]) - headers = [] - for recipient in recipients: - assertTrue(len(recipient) == 2, "Unknown recipient key {0}".format(recipient)) - h = binary(recipient["header"]) - s = binary(recipient["signature"]) - header = json.loads(native(urlsafe_b64decode(h))) - assertTrue(header["alg"] == ALG, - "Unexpected algorithm {0}".format(header["alg"])) - if "alg" in header["jwk"] and not "kty" in header["jwk"]: - header["jwk"]["kty"] = header["jwk"]["alg"] # b/w for JWK < -08 - assertTrue(header["jwk"]["kty"] == ALG, # true for Ed25519 - "Unexpected key type {0}".format(header["jwk"]["kty"])) - vk = urlsafe_b64decode(binary(header["jwk"]["vk"])) - secured_input = b".".join((h, encoded_payload)) - sig = urlsafe_b64decode(s) - sig_msg = sig+secured_input - verified_input = native(ed25519ll.crypto_sign_open(sig_msg, vk)) - verified_header, verified_payload = verified_input.split('.') - verified_header = binary(verified_header) - decoded_header = native(urlsafe_b64decode(verified_header)) - headers.append(json.loads(decoded_header)) - - verified_payload = binary(verified_payload) - - # only return header, payload that have passed through the crypto library. - payload = json.loads(native(urlsafe_b64decode(verified_payload))) - - return headers, payload - -def test(): - kp = ed25519ll.crypto_sign_keypair() - payload = {'test': 'onstartup'} - jwsjs = json.loads(json.dumps(sign(payload, kp))) - verify(jwsjs) - jwsjs['payload'] += 'x' - try: - verify(jwsjs) - except ValueError: - pass - else: # pragma no cover - raise RuntimeError("No error from bad wheel.signatures payload.") - diff --git a/wheel/signatures/djbec.py b/wheel/signatures/djbec.py deleted file mode 100644 index 56efe44..0000000 --- a/wheel/signatures/djbec.py +++ /dev/null @@ -1,270 +0,0 @@ -# Ed25519 digital signatures -# Based on http://ed25519.cr.yp.to/python/ed25519.py -# See also http://ed25519.cr.yp.to/software.html -# Adapted by Ron Garret -# Sped up considerably using coordinate transforms found on: -# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html -# Specifically add-2008-hwcd-4 and dbl-2008-hwcd - -try: # pragma nocover - unicode - PY3 = False - def asbytes(b): - """Convert array of integers to byte string""" - return ''.join(chr(x) for x in b) - def joinbytes(b): - """Convert array of bytes to byte string""" - return ''.join(b) - def bit(h, i): - """Return i'th bit of bytestring h""" - return (ord(h[i//8]) >> (i%8)) & 1 - -except NameError: # pragma nocover - PY3 = True - asbytes = bytes - joinbytes = bytes - def bit(h, i): - return (h[i//8] >> (i%8)) & 1 - -import hashlib - -b = 256 -q = 2**255 - 19 -l = 2**252 + 27742317777372353535851937790883648493 - -def H(m): - return hashlib.sha512(m).digest() - -def expmod(b, e, m): - if e == 0: return 1 - t = expmod(b, e // 2, m) ** 2 % m - if e & 1: t = (t * b) % m - return t - -# Can probably get some extra speedup here by replacing this with -# an extended-euclidean, but performance seems OK without that -def inv(x): - return expmod(x, q-2, q) - -d = -121665 * inv(121666) -I = expmod(2,(q-1)//4,q) - -def xrecover(y): - xx = (y*y-1) * inv(d*y*y+1) - x = expmod(xx,(q+3)//8,q) - if (x*x - xx) % q != 0: x = (x*I) % q - if x % 2 != 0: x = q-x - return x - -By = 4 * inv(5) -Bx = xrecover(By) -B = [Bx % q,By % q] - -#def edwards(P,Q): -# x1 = P[0] -# y1 = P[1] -# x2 = Q[0] -# y2 = Q[1] -# x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2) -# y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2) -# return (x3 % q,y3 % q) - -#def scalarmult(P,e): -# if e == 0: return [0,1] -# Q = scalarmult(P,e/2) -# Q = edwards(Q,Q) -# if e & 1: Q = edwards(Q,P) -# return Q - -# Faster (!) version based on: -# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html - -def xpt_add(pt1, pt2): - (X1, Y1, Z1, T1) = pt1 - (X2, Y2, Z2, T2) = pt2 - A = ((Y1-X1)*(Y2+X2)) % q - B = ((Y1+X1)*(Y2-X2)) % q - C = (Z1*2*T2) % q - D = (T1*2*Z2) % q - E = (D+C) % q - F = (B-A) % q - G = (B+A) % q - H = (D-C) % q - X3 = (E*F) % q - Y3 = (G*H) % q - Z3 = (F*G) % q - T3 = (E*H) % q - return (X3, Y3, Z3, T3) - -def xpt_double (pt): - (X1, Y1, Z1, _) = pt - A = (X1*X1) - B = (Y1*Y1) - C = (2*Z1*Z1) - D = (-A) % q - J = (X1+Y1) % q - E = (J*J-A-B) % q - G = (D+B) % q - F = (G-C) % q - H = (D-B) % q - X3 = (E*F) % q - Y3 = (G*H) % q - Z3 = (F*G) % q - T3 = (E*H) % q - return (X3, Y3, Z3, T3) - -def pt_xform (pt): - (x, y) = pt - return (x, y, 1, (x*y)%q) - -def pt_unxform (pt): - (x, y, z, _) = pt - return ((x*inv(z))%q, (y*inv(z))%q) - -def xpt_mult (pt, n): - if n==0: return pt_xform((0,1)) - _ = xpt_double(xpt_mult(pt, n>>1)) - return xpt_add(_, pt) if n&1 else _ - -def scalarmult(pt, e): - return pt_unxform(xpt_mult(pt_xform(pt), e)) - -def encodeint(y): - bits = [(y >> i) & 1 for i in range(b)] - e = [(sum([bits[i * 8 + j] << j for j in range(8)])) - for i in range(b//8)] - return asbytes(e) - -def encodepoint(P): - x = P[0] - y = P[1] - bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1] - e = [(sum([bits[i * 8 + j] << j for j in range(8)])) - for i in range(b//8)] - return asbytes(e) - -def publickey(sk): - h = H(sk) - a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2)) - A = scalarmult(B,a) - return encodepoint(A) - -def Hint(m): - h = H(m) - return sum(2**i * bit(h,i) for i in range(2*b)) - -def signature(m,sk,pk): - h = H(sk) - a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2)) - inter = joinbytes([h[i] for i in range(b//8,b//4)]) - r = Hint(inter + m) - R = scalarmult(B,r) - S = (r + Hint(encodepoint(R) + pk + m) * a) % l - return encodepoint(R) + encodeint(S) - -def isoncurve(P): - x = P[0] - y = P[1] - return (-x*x + y*y - 1 - d*x*x*y*y) % q == 0 - -def decodeint(s): - return sum(2**i * bit(s,i) for i in range(0,b)) - -def decodepoint(s): - y = sum(2**i * bit(s,i) for i in range(0,b-1)) - x = xrecover(y) - if x & 1 != bit(s,b-1): x = q-x - P = [x,y] - if not isoncurve(P): raise Exception("decoding point that is not on curve") - return P - -def checkvalid(s, m, pk): - if len(s) != b//4: raise Exception("signature length is wrong") - if len(pk) != b//8: raise Exception("public-key length is wrong") - R = decodepoint(s[0:b//8]) - A = decodepoint(pk) - S = decodeint(s[b//8:b//4]) - h = Hint(encodepoint(R) + pk + m) - v1 = scalarmult(B,S) -# v2 = edwards(R,scalarmult(A,h)) - v2 = pt_unxform(xpt_add(pt_xform(R), pt_xform(scalarmult(A, h)))) - return v1==v2 - -########################################################## -# -# Curve25519 reference implementation by Matthew Dempsky, from: -# http://cr.yp.to/highspeed/naclcrypto-20090310.pdf - -# P = 2 ** 255 - 19 -P = q -A = 486662 - -#def expmod(b, e, m): -# if e == 0: return 1 -# t = expmod(b, e / 2, m) ** 2 % m -# if e & 1: t = (t * b) % m -# return t - -# def inv(x): return expmod(x, P - 2, P) - -def add(n, m, d): - (xn, zn) = n - (xm, zm) = m - (xd, zd) = d - x = 4 * (xm * xn - zm * zn) ** 2 * zd - z = 4 * (xm * zn - zm * xn) ** 2 * xd - return (x % P, z % P) - -def double(n): - (xn, zn) = n - x = (xn ** 2 - zn ** 2) ** 2 - z = 4 * xn * zn * (xn ** 2 + A * xn * zn + zn ** 2) - return (x % P, z % P) - -def curve25519(n, base=9): - one = (base,1) - two = double(one) - # f(m) evaluates to a tuple - # containing the mth multiple and the - # (m+1)th multiple of base. - def f(m): - if m == 1: return (one, two) - (pm, pm1) = f(m // 2) - if (m & 1): - return (add(pm, pm1, one), double(pm1)) - return (double(pm), add(pm, pm1, one)) - ((x,z), _) = f(n) - return (x * inv(z)) % P - -import random - -def genkey(n=0): - n = n or random.randint(0,P) - n &= ~7 - n &= ~(128 << 8 * 31) - n |= 64 << 8 * 31 - return n - -#def str2int(s): -# return int(hexlify(s), 16) -# # return sum(ord(s[i]) << (8 * i) for i in range(32)) -# -#def int2str(n): -# return unhexlify("%x" % n) -# # return ''.join([chr((n >> (8 * i)) & 255) for i in range(32)]) - -################################################# - -def dsa_test(): - import os - msg = str(random.randint(q,q+q)).encode('utf-8') - sk = os.urandom(32) - pk = publickey(sk) - sig = signature(msg, sk, pk) - return checkvalid(sig, msg, pk) - -def dh_test(): - sk1 = genkey() - sk2 = genkey() - return curve25519(sk1, curve25519(sk2)) == curve25519(sk2, curve25519(sk1)) - diff --git a/wheel/signatures/ed25519py.py b/wheel/signatures/ed25519py.py deleted file mode 100644 index 55eba2e..0000000 --- a/wheel/signatures/ed25519py.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- - -import warnings -import os - -from collections import namedtuple -from . import djbec - -__all__ = ['crypto_sign', 'crypto_sign_open', 'crypto_sign_keypair', 'Keypair', - 'PUBLICKEYBYTES', 'SECRETKEYBYTES', 'SIGNATUREBYTES'] - -PUBLICKEYBYTES=32 -SECRETKEYBYTES=64 -SIGNATUREBYTES=64 - -Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret key - -def crypto_sign_keypair(seed=None): - """Return (verifying, secret) key from a given seed, or os.urandom(32)""" - if seed is None: - seed = os.urandom(PUBLICKEYBYTES) - else: - warnings.warn("ed25519ll should choose random seed.", - RuntimeWarning) - if len(seed) != 32: - raise ValueError("seed must be 32 random bytes or None.") - skbytes = seed - vkbytes = djbec.publickey(skbytes) - return Keypair(vkbytes, skbytes+vkbytes) - - -def crypto_sign(msg, sk): - """Return signature+message given message and secret key. - The signature is the first SIGNATUREBYTES bytes of the return value. - A copy of msg is in the remainder.""" - if len(sk) != SECRETKEYBYTES: - raise ValueError("Bad signing key length %d" % len(sk)) - vkbytes = sk[PUBLICKEYBYTES:] - skbytes = sk[:PUBLICKEYBYTES] - sig = djbec.signature(msg, skbytes, vkbytes) - return sig + msg - - -def crypto_sign_open(signed, vk): - """Return message given signature+message and the verifying key.""" - if len(vk) != PUBLICKEYBYTES: - raise ValueError("Bad verifying key length %d" % len(vk)) - rc = djbec.checkvalid(signed[:SIGNATUREBYTES], signed[SIGNATUREBYTES:], vk) - if not rc: - raise ValueError("rc != True", rc) - return signed[SIGNATUREBYTES:] - diff --git a/wheel/signatures/keys.py b/wheel/signatures/keys.py deleted file mode 100644 index 57d7feb..0000000 --- a/wheel/signatures/keys.py +++ /dev/null @@ -1,99 +0,0 @@ -"""Store and retrieve wheel signing / verifying keys. - -Given a scope (a package name, + meaning "all packages", or - meaning -"no packages"), return a list of verifying keys that are trusted for that -scope. - -Given a package name, return a list of (scope, key) suggested keys to sign -that package (only the verifying keys; the private signing key is stored -elsewhere). - -Keys here are represented as urlsafe_b64encoded strings with no padding. - -Tentative command line interface: - -# list trusts -wheel trust -# trust a particular key for all -wheel trust + key -# trust key for beaglevote -wheel trust beaglevote key -# stop trusting a key for all -wheel untrust + key - -# generate a key pair -wheel keygen - -# import a signing key from a file -wheel import keyfile - -# export a signing key -wheel export key -""" - -import json -import os.path -from ..util import native, load_config_paths, save_config_path - -class WheelKeys(object): - SCHEMA = 1 - CONFIG_NAME = 'wheel.json' - - def __init__(self): - self.data = {'signers':[], 'verifiers':[]} - - def load(self): - # XXX JSON is not a great database - for path in load_config_paths('wheel'): - conf = os.path.join(native(path), self.CONFIG_NAME) - if os.path.exists(conf): - with open(conf, 'r') as infile: - self.data = json.load(infile) - for x in ('signers', 'verifiers'): - if not x in self.data: - self.data[x] = [] - if 'schema' not in self.data: - self.data['schema'] = self.SCHEMA - elif self.data['schema'] != self.SCHEMA: - raise ValueError( - "Bad wheel.json version {0}, expected {1}".format( - self.data['schema'], self.SCHEMA)) - break - return self - - def save(self): - # Try not to call this a very long time after load() - path = save_config_path('wheel') - conf = os.path.join(native(path), self.CONFIG_NAME) - with open(conf, 'w+') as out: - json.dump(self.data, out, indent=2) - return self - - def trust(self, scope, vk): - """Start trusting a particular key for given scope.""" - self.data['verifiers'].append({'scope':scope, 'vk':vk}) - return self - - def untrust(self, scope, vk): - """Stop trusting a particular key for given scope.""" - self.data['verifiers'].remove({'scope':scope, 'vk':vk}) - return self - - def trusted(self, scope=None): - """Return list of [(scope, trusted key), ...] for given scope.""" - trust = [(x['scope'], x['vk']) for x in self.data['verifiers'] if x['scope'] in (scope, '+')] - trust.sort(key=lambda x: x[0]) - trust.reverse() - return trust - - def signers(self, scope): - """Return list of signing key(s).""" - sign = [(x['scope'], x['vk']) for x in self.data['signers'] if x['scope'] in (scope, '+')] - sign.sort(key=lambda x: x[0]) - sign.reverse() - return sign - - def add_signer(self, scope, vk): - """Remember verifying key vk as being valid for signing in scope.""" - self.data['signers'].append({'scope':scope, 'vk':vk}) - diff --git a/wheel/test/__init__.py b/wheel/test/__init__.py deleted file mode 100644 index 4287ca8..0000000 --- a/wheel/test/__init__.py +++ /dev/null @@ -1 +0,0 @@ -#
\ No newline at end of file diff --git a/wheel/test/complex-dist/complexdist/__init__.py b/wheel/test/complex-dist/complexdist/__init__.py deleted file mode 100644 index 559fbb7..0000000 --- a/wheel/test/complex-dist/complexdist/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -def main(): - return diff --git a/wheel/test/complex-dist/setup.py b/wheel/test/complex-dist/setup.py deleted file mode 100644 index 615d5dc..0000000 --- a/wheel/test/complex-dist/setup.py +++ /dev/null @@ -1,30 +0,0 @@ -from setuptools import setup - -try: - unicode - def u8(s): - return s.decode('unicode-escape') -except NameError: - def u8(s): - return s - -setup(name='complex-dist', - version='0.1', - description=u8('Another testing distribution \N{SNOWMAN}'), - long_description=u8('Another testing distribution \N{SNOWMAN}'), - author="Illustrious Author", - author_email="illustrious@example.org", - url="http://example.org/exemplary", - packages=['complexdist'], - setup_requires=["wheel", "setuptools"], - install_requires=["quux", "splort"], - extras_require={'simple':['simple.dist']}, - tests_require=["foo", "bar>=10.0.0"], - entry_points={ - 'console_scripts': [ - 'complex-dist=complexdist:main', - 'complex-dist2=complexdist:main', - ], - }, - ) - diff --git a/wheel/test/conftest.py b/wheel/test/conftest.py deleted file mode 100644 index d14cc47..0000000 --- a/wheel/test/conftest.py +++ /dev/null @@ -1,45 +0,0 @@ -"""
-pytest local configuration plug-in
-"""
-
-import gc
-import warnings
-
-import pytest
-
-@pytest.yield_fixture(scope='function', autouse=True)
-def error_on_ResourceWarning():
- """This fixture captures ResourceWarning's and reports an "error"
- describing the file handles left open.
-
- This is shown regardless of how successful the test was, if a test fails
- and leaves files open then those files will be reported. Ideally, even
- those files should be closed properly after a test failure or exception.
-
- Since only Python 3 and PyPy3 have ResourceWarning's, this context will
- have no effect when running tests on Python 2 or PyPy.
-
- Because of autouse=True, this function will be automatically enabled for
- all test_* functions in this module.
-
- This code is primarily based on the examples found here:
- https://stackoverflow.com/questions/24717027/convert-python-3-resourcewarnings-into-exception
- """
- try:
- ResourceWarning
- except NameError:
- # Python 2, PyPy
- yield
- return
- # Python 3, PyPy3
- with warnings.catch_warnings(record=True) as caught:
- warnings.resetwarnings() # clear all filters
- warnings.simplefilter('ignore') # ignore all
- warnings.simplefilter('always', ResourceWarning) # add filter
- yield # run tests in this context
- gc.collect() # run garbage collection (for pypy3)
- if not caught:
- return
- pytest.fail('The following file descriptors were not closed properly:\n' +
- '\n'.join((str(warning.message) for warning in caught)),
- pytrace=False)
diff --git a/wheel/test/extension.dist/extension.c b/wheel/test/extension.dist/extension.c deleted file mode 100644 index a37c3fa..0000000 --- a/wheel/test/extension.dist/extension.c +++ /dev/null @@ -1,2 +0,0 @@ -#define Py_LIMITED_API 0x03020000 -#include <Python.h> diff --git a/wheel/test/extension.dist/setup.cfg b/wheel/test/extension.dist/setup.cfg deleted file mode 100644 index 9f6ff39..0000000 --- a/wheel/test/extension.dist/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[bdist_wheel] -py_limited_api=cp32 diff --git a/wheel/test/extension.dist/setup.py b/wheel/test/extension.dist/setup.py deleted file mode 100644 index 7a66845..0000000 --- a/wheel/test/extension.dist/setup.py +++ /dev/null @@ -1,20 +0,0 @@ -from setuptools import setup, Extension - -try: - unicode - def u8(s): - return s.decode('unicode-escape').encode('utf-8') -except NameError: - def u8(s): - return s.encode('utf-8') - -setup(name='extension.dist', - version='0.1', - description=u8('A testing distribution \N{SNOWMAN}'), - ext_modules=[ - Extension(name='extension', - sources=['extension.c'], - py_limited_api=True) - ], - ) - diff --git a/wheel/test/headers.dist/header.h b/wheel/test/headers.dist/header.h deleted file mode 100644 index e69de29..0000000 --- a/wheel/test/headers.dist/header.h +++ /dev/null diff --git a/wheel/test/headers.dist/headersdist.py b/wheel/test/headers.dist/headersdist.py deleted file mode 100644 index e69de29..0000000 --- a/wheel/test/headers.dist/headersdist.py +++ /dev/null diff --git a/wheel/test/headers.dist/setup.cfg b/wheel/test/headers.dist/setup.cfg deleted file mode 100644 index 7c964b4..0000000 --- a/wheel/test/headers.dist/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[wheel] -universal=1 diff --git a/wheel/test/headers.dist/setup.py b/wheel/test/headers.dist/setup.py deleted file mode 100644 index 2704f01..0000000 --- a/wheel/test/headers.dist/setup.py +++ /dev/null @@ -1,16 +0,0 @@ -from setuptools import setup - -try: - unicode - def u8(s): - return s.decode('unicode-escape').encode('utf-8') -except NameError: - def u8(s): - return s.encode('utf-8') - -setup(name='headers.dist', - version='0.1', - description=u8('A distribution with headers'), - headers=['header.h'] - ) - diff --git a/wheel/test/pydist-schema.json b/wheel/test/pydist-schema.json deleted file mode 100644 index 566f3a4..0000000 --- a/wheel/test/pydist-schema.json +++ /dev/null @@ -1,362 +0,0 @@ -{ - "id": "http://www.python.org/dev/peps/pep-0426/", - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "Metadata for Python Software Packages 2.0", - "type": "object", - "properties": { - "metadata_version": { - "description": "Version of the file format", - "type": "string", - "pattern": "^(\\d+(\\.\\d+)*)$" - }, - "generator": { - "description": "Name and version of the program that produced this file.", - "type": "string", - "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])( \\(.*\\))?$" - }, - "name": { - "description": "The name of the distribution.", - "type": "string", - "$ref": "#/definitions/distribution_name" - }, - "version": { - "description": "The distribution's public version identifier", - "type": "string", - "pattern": "^(\\d+(\\.\\d+)*)((a|b|c|rc)(\\d+))?(\\.(post)(\\d+))?(\\.(dev)(\\d+))?$" - }, - "source_label": { - "description": "A constrained identifying text string", - "type": "string", - "pattern": "^[0-9a-z_.-+]+$" - }, - "source_url": { - "description": "A string containing a full URL where the source for this specific version of the distribution can be downloaded.", - "type": "string", - "format": "uri" - }, - "summary": { - "description": "A one-line summary of what the distribution does.", - "type": "string" - }, - "extras": { - "description": "A list of optional sets of dependencies that may be used to define conditional dependencies in \"may_require\" and similar fields.", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/extra_name" - } - }, - "meta_requires": { - "description": "A list of subdistributions made available through this metadistribution.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "run_requires": { - "description": "A list of other distributions needed to run this distribution.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "test_requires": { - "description": "A list of other distributions needed when this distribution is tested.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "build_requires": { - "description": "A list of other distributions needed when this distribution is built.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "dev_requires": { - "description": "A list of other distributions needed when this distribution is developed.", - "type": "array", - "$ref": "#/definitions/dependencies" - }, - "provides": { - "description": "A list of strings naming additional dependency requirements that are satisfied by installing this distribution. These strings must be of the form Name or Name (Version)", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/provides_declaration" - } - }, - "modules": { - "description": "A list of modules and/or packages available for import after installing this distribution.", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/qualified_name" - } - }, - "namespaces": { - "description": "A list of namespace packages this distribution contributes to", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/qualified_name" - } - }, - "obsoleted_by": { - "description": "A string that indicates that this project is no longer being developed. The named project provides a substitute or replacement.", - "type": "string", - "$ref": "#/definitions/requirement" - }, - "supports_environments": { - "description": "A list of strings specifying the environments that the distribution explicitly supports.", - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/environment_marker" - } - }, - "install_hooks": { - "description": "The install_hooks field is used to define various operations that may be invoked on a distribution in a platform independent manner.", - "type": "object", - "properties": { - "postinstall": { - "type": "string", - "$ref": "#/definitions/export_specifier" - }, - "preuninstall": { - "type": "string", - "$ref": "#/definitions/export_specifier" - } - } - }, - "extensions": { - "description": "Extensions to the metadata may be present in a mapping under the 'extensions' key.", - "type": "object", - "$ref": "#/definitions/extensions" - } - }, - - "required": ["metadata_version", "name", "version", "summary"], - "additionalProperties": false, - - "definitions": { - "contact": { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "email": { - "type": "string" - }, - "url": { - "type": "string" - }, - "role": { - "type": "string" - } - }, - "required": ["name"], - "additionalProperties": false - }, - "dependencies": { - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/dependency" - } - }, - "dependency": { - "type": "object", - "properties": { - "extra": { - "type": "string", - "$ref": "#/definitions/extra_name" - }, - "environment": { - "type": "string", - "$ref": "#/definitions/environment_marker" - }, - "requires": { - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/requirement" - } - } - }, - "required": ["requires"], - "additionalProperties": false - }, - "extensions": { - "type": "object", - "patternProperties": { - "^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": {} - }, - "properties": { - "python.details" : { - "description": "More information regarding the distribution.", - "type": "object", - "properties": { - "document_names": { - "description": "Names of supporting metadata documents", - "type": "object", - "properties": { - "description": { - "type": "string", - "$ref": "#/definitions/document_name" - }, - "changelog": { - "type": "string", - "$ref": "#/definitions/document_name" - }, - "license": { - "type": "string", - "$ref": "#/definitions/document_name" - } - }, - "additionalProperties": false - }, - "keywords": { - "description": "A list of additional keywords to be used to assist searching for the distribution in a larger catalog.", - "type": "array", - "items": { - "type": "string" - } - }, - "license": { - "description": "A string indicating the license covering the distribution.", - "type": "string" - }, - "classifiers": { - "description": "A list of strings, with each giving a single classification value for the distribution.", - "type": "array", - "items": { - "type": "string" - } - } - } - }, - "python.project" : { - "description": "More information regarding the creation and maintenance of the distribution.", - "$ref": "#/definitions/project_or_integrator" - }, - "python.integrator" : { - "description": "More information regarding the downstream redistributor of the distribution.", - "$ref": "#/definitions/project_or_integrator" - }, - "python.commands" : { - "description": "Command line interfaces provided by this distribution", - "type": "object", - "$ref": "#/definitions/commands" - }, - "python.exports" : { - "description": "Other exported interfaces provided by this distribution", - "type": "object", - "$ref": "#/definitions/exports" - } - }, - "additionalProperties": false - }, - "commands": { - "type": "object", - "properties": { - "wrap_console": { - "type": "object", - "$ref": "#/definitions/command_map" - }, - "wrap_gui": { - "type": "object", - "$ref": "#/definitions/command_map" - }, - "prebuilt": { - "type": "array", - "items": { - "type": "string", - "$ref": "#/definitions/relative_path" - } - } - }, - "additionalProperties": false - }, - "exports": { - "type": "object", - "patternProperties": { - "^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": { - "type": "object", - "patternProperties": { - ".": { - "type": "string", - "$ref": "#/definitions/export_specifier" - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false - }, - "command_map": { - "type": "object", - "patternProperties": { - "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": { - "type": "string", - "$ref": "#/definitions/export_specifier" - } - }, - "additionalProperties": false - }, - "project_or_integrator" : { - "type": "object", - "properties" : { - "contacts": { - "description": "A list of contributor entries giving the recommended contact points for getting more information about the project.", - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/contact" - } - }, - "contributors": { - "description": "A list of contributor entries for other contributors not already listed as current project points of contact.", - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/contact" - } - }, - "project_urls": { - "description": "A mapping of arbitrary text labels to additional URLs relevant to the project.", - "type": "object" - } - } - }, - "distribution_name": { - "type": "string", - "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" - }, - "requirement": { - "type": "string" - }, - "provides_declaration": { - "type": "string" - }, - "environment_marker": { - "type": "string" - }, - "document_name": { - "type": "string" - }, - "extra_name" : { - "type": "string", - "pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$" - }, - "relative_path" : { - "type": "string" - }, - "export_specifier": { - "type": "string", - "pattern": "^([A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)(:[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)?(\\[[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?\\])?$" - }, - "qualified_name" : { - "type": "string", - "pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*$" - }, - "prefixed_name" : { - "type": "string", - "pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_0-9]*)*$" - } - } -} diff --git a/wheel/test/simple.dist/setup.py b/wheel/test/simple.dist/setup.py deleted file mode 100644 index 50c909f..0000000 --- a/wheel/test/simple.dist/setup.py +++ /dev/null @@ -1,17 +0,0 @@ -from setuptools import setup - -try: - unicode - def u8(s): - return s.decode('unicode-escape').encode('utf-8') -except NameError: - def u8(s): - return s.encode('utf-8') - -setup(name='simple.dist', - version='0.1', - description=u8('A testing distribution \N{SNOWMAN}'), - packages=['simpledist'], - extras_require={'voting': ['beaglevote']}, - ) - diff --git a/wheel/test/simple.dist/simpledist/__init__.py b/wheel/test/simple.dist/simpledist/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/wheel/test/simple.dist/simpledist/__init__.py +++ /dev/null diff --git a/wheel/test/test-1.0-py2.py3-none-win32.whl b/wheel/test/test-1.0-py2.py3-none-win32.whl Binary files differdeleted file mode 100644 index dfd3070..0000000 --- a/wheel/test/test-1.0-py2.py3-none-win32.whl +++ /dev/null diff --git a/wheel/test/test_basic.py b/wheel/test/test_basic.py deleted file mode 100644 index 6bd46b1..0000000 --- a/wheel/test/test_basic.py +++ /dev/null @@ -1,178 +0,0 @@ -""" -Basic wheel tests. -""" - -import os -import pkg_resources -import json -import sys - -from pkg_resources import resource_filename - -import wheel.util -import wheel.tool - -from wheel import egg2wheel -from wheel.install import WheelFile -from zipfile import ZipFile -from shutil import rmtree - -test_distributions = ("complex-dist", "simple.dist", "headers.dist") - -def teardown_module(): - """Delete eggs/wheels created by tests.""" - base = pkg_resources.resource_filename('wheel.test', '') - for dist in test_distributions: - for subdir in ('build', 'dist'): - try: - rmtree(os.path.join(base, dist, subdir)) - except OSError: - pass - -def setup_module(): - build_wheel() - build_egg() - -def build_wheel(): - """Build wheels from test distributions.""" - for dist in test_distributions: - pwd = os.path.abspath(os.curdir) - distdir = pkg_resources.resource_filename('wheel.test', dist) - os.chdir(distdir) - try: - sys.argv = ['', 'bdist_wheel'] - exec(compile(open('setup.py').read(), 'setup.py', 'exec')) - finally: - os.chdir(pwd) - -def build_egg(): - """Build eggs from test distributions.""" - for dist in test_distributions: - pwd = os.path.abspath(os.curdir) - distdir = pkg_resources.resource_filename('wheel.test', dist) - os.chdir(distdir) - try: - sys.argv = ['', 'bdist_egg'] - exec(compile(open('setup.py').read(), 'setup.py', 'exec')) - finally: - os.chdir(pwd) - -def test_findable(): - """Make sure pkg_resources can find us.""" - assert pkg_resources.working_set.by_key['wheel'].version - -def test_egg_re(): - """Make sure egg_info_re matches.""" - egg_names_path = pkg_resources.resource_filename('wheel', 'eggnames.txt') - with open(egg_names_path) as egg_names: - for line in egg_names: - line = line.strip() - if not line: - continue - assert egg2wheel.egg_info_re.match(line), line - -def test_compatibility_tags(): - """Test compatibilty tags are working.""" - wf = WheelFile("package-1.0.0-cp32.cp33-noabi-noarch.whl") - assert (list(wf.compatibility_tags) == - [('cp32', 'noabi', 'noarch'), ('cp33', 'noabi', 'noarch')]) - assert (wf.arity == 2) - - wf2 = WheelFile("package-1.0.0-1st-cp33-noabi-noarch.whl") - wf2_info = wf2.parsed_filename.groupdict() - assert wf2_info['build'] == '1st', wf2_info - -def test_convert_egg(): - base = pkg_resources.resource_filename('wheel.test', '') - for dist in test_distributions: - distdir = os.path.join(base, dist, 'dist') - eggs = [e for e in os.listdir(distdir) if e.endswith('.egg')] - wheel.tool.convert(eggs, distdir, verbose=False) - -def test_unpack(): - """ - Make sure 'wheel unpack' works. - This also verifies the integrity of our testing wheel files. - """ - for dist in test_distributions: - distdir = pkg_resources.resource_filename('wheel.test', - os.path.join(dist, 'dist')) - for wheelfile in (w for w in os.listdir(distdir) if w.endswith('.whl')): - wheel.tool.unpack(os.path.join(distdir, wheelfile), distdir) - -def test_no_scripts(): - """Make sure entry point scripts are not generated.""" - dist = "complex-dist" - basedir = pkg_resources.resource_filename('wheel.test', dist) - for (dirname, subdirs, filenames) in os.walk(basedir): - for filename in filenames: - if filename.endswith('.whl'): - whl = ZipFile(os.path.join(dirname, filename)) - for entry in whl.infolist(): - assert not '.data/scripts/' in entry.filename - -def test_pydist(): - """Make sure pydist.json exists and validates against our schema.""" - # XXX this test may need manual cleanup of older wheels - - import jsonschema - - def open_json(filename): - with open(filename, 'rb') as json_file: - return json.loads(json_file.read().decode('utf-8')) - - pymeta_schema = open_json(resource_filename('wheel.test', - 'pydist-schema.json')) - valid = 0 - for dist in ("simple.dist", "complex-dist"): - basedir = pkg_resources.resource_filename('wheel.test', dist) - for (dirname, subdirs, filenames) in os.walk(basedir): - for filename in filenames: - if filename.endswith('.whl'): - whl = ZipFile(os.path.join(dirname, filename)) - for entry in whl.infolist(): - if entry.filename.endswith('/metadata.json'): - pymeta = json.loads(whl.read(entry).decode('utf-8')) - jsonschema.validate(pymeta, pymeta_schema) - valid += 1 - assert valid > 0, "No metadata.json found" - -def test_util(): - """Test functions in util.py.""" - for i in range(10): - before = b'*' * i - encoded = wheel.util.urlsafe_b64encode(before) - assert not encoded.endswith(b'=') - after = wheel.util.urlsafe_b64decode(encoded) - assert before == after - - -def test_pick_best(): - """Test the wheel ranking algorithm.""" - def get_tags(res): - info = res[-1].parsed_filename.groupdict() - return info['pyver'], info['abi'], info['plat'] - - cand_tags = [('py27', 'noabi', 'noarch'), ('py26', 'noabi', 'noarch'), - ('cp27', 'noabi', 'linux_i686'), - ('cp26', 'noabi', 'linux_i686'), - ('cp27', 'noabi', 'linux_x86_64'), - ('cp26', 'noabi', 'linux_x86_64')] - cand_wheels = [WheelFile('testpkg-1.0-%s-%s-%s.whl' % t) - for t in cand_tags] - - supported = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')] - supported2 = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch'), - ('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch')] - supported3 = [('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch'), - ('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')] - - for supp in (supported, supported2, supported3): - context = lambda: list(supp) - for wheel in cand_wheels: - wheel.context = context - best = max(cand_wheels) - assert list(best.tags)[0] == supp[0] - - # assert_equal( - # list(map(get_tags, pick_best(cand_wheels, supp, top=False))), supp) diff --git a/wheel/test/test_install.py b/wheel/test/test_install.py deleted file mode 100644 index ddcddf5..0000000 --- a/wheel/test/test_install.py +++ /dev/null @@ -1,55 +0,0 @@ -# Test wheel.
-# The file has the following contents:
-# hello.pyd
-# hello/hello.py
-# hello/__init__.py
-# test-1.0.data/data/hello.dat
-# test-1.0.data/headers/hello.dat
-# test-1.0.data/scripts/hello.sh
-# test-1.0.dist-info/WHEEL
-# test-1.0.dist-info/METADATA
-# test-1.0.dist-info/RECORD
-# The root is PLATLIB
-# So, some in PLATLIB, and one in each of DATA, HEADERS and SCRIPTS.
-
-import wheel.tool
-import wheel.pep425tags
-from wheel.install import WheelFile
-from tempfile import mkdtemp
-import shutil
-import os
-
-THISDIR = os.path.dirname(__file__)
-TESTWHEEL = os.path.join(THISDIR, 'test-1.0-py2.py3-none-win32.whl')
-
-def check(*path):
- return os.path.exists(os.path.join(*path))
-
-def test_install():
- tempdir = mkdtemp()
- def get_supported():
- return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')]
- whl = WheelFile(TESTWHEEL, context=get_supported)
- assert whl.supports_current_python(get_supported)
- try:
- locs = {}
- for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'):
- locs[key] = os.path.join(tempdir, key)
- os.mkdir(locs[key])
- whl.install(overrides=locs)
- assert len(os.listdir(locs['purelib'])) == 0
- assert check(locs['platlib'], 'hello.pyd')
- assert check(locs['platlib'], 'hello', 'hello.py')
- assert check(locs['platlib'], 'hello', '__init__.py')
- assert check(locs['data'], 'hello.dat')
- assert check(locs['headers'], 'hello.dat')
- assert check(locs['scripts'], 'hello.sh')
- assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD')
- finally:
- shutil.rmtree(tempdir)
-
-def test_install_tool():
- """Slightly improve coverage of wheel.install"""
- wheel.tool.install([TESTWHEEL], force=True, dry_run=True)
-
-
\ No newline at end of file diff --git a/wheel/test/test_keys.py b/wheel/test/test_keys.py deleted file mode 100644 index f96166b..0000000 --- a/wheel/test/test_keys.py +++ /dev/null @@ -1,98 +0,0 @@ -import tempfile -import os.path -import unittest -import json - -from wheel.signatures import keys - -wheel_json = """ -{ - "verifiers": [ - { - "scope": "+", - "vk": "bp-bjK2fFgtA-8DhKKAAPm9-eAZcX_u03oBv2RlKOBc" - }, - { - "scope": "+", - "vk": "KAHZBfyqFW3OcFDbLSG4nPCjXxUPy72phP9I4Rn9MAo" - }, - { - "scope": "+", - "vk": "tmAYCrSfj8gtJ10v3VkvW7jOndKmQIYE12hgnFu3cvk" - } - ], - "signers": [ - { - "scope": "+", - "vk": "tmAYCrSfj8gtJ10v3VkvW7jOndKmQIYE12hgnFu3cvk" - }, - { - "scope": "+", - "vk": "KAHZBfyqFW3OcFDbLSG4nPCjXxUPy72phP9I4Rn9MAo" - } - ], - "schema": 1 -} -""" - -class TestWheelKeys(unittest.TestCase): - def setUp(self): - self.config = tempfile.NamedTemporaryFile(suffix='.json') - self.config.close() - - self.config_path, self.config_filename = os.path.split(self.config.name) - def load(*args): - return [self.config_path] - def save(*args): - return self.config_path - keys.load_config_paths = load - keys.save_config_path = save - self.wk = keys.WheelKeys() - self.wk.CONFIG_NAME = self.config_filename - - def tearDown(self): - os.unlink(self.config.name) - - def test_load_save(self): - self.wk.data = json.loads(wheel_json) - - self.wk.add_signer('+', '67890') - self.wk.add_signer('scope', 'abcdefg') - - self.wk.trust('epocs', 'gfedcba') - self.wk.trust('+', '12345') - - self.wk.save() - - del self.wk.data - self.wk.load() - - signers = self.wk.signers('scope') - self.assertTrue(signers[0] == ('scope', 'abcdefg'), self.wk.data['signers']) - self.assertTrue(signers[1][0] == '+', self.wk.data['signers']) - - trusted = self.wk.trusted('epocs') - self.assertTrue(trusted[0] == ('epocs', 'gfedcba')) - self.assertTrue(trusted[1][0] == '+') - - self.wk.untrust('epocs', 'gfedcba') - trusted = self.wk.trusted('epocs') - self.assertTrue(('epocs', 'gfedcba') not in trusted) - - def test_load_save_incomplete(self): - self.wk.data = json.loads(wheel_json) - del self.wk.data['signers'] - self.wk.data['schema'] = self.wk.SCHEMA+1 - self.wk.save() - try: - self.wk.load() - except ValueError: - pass - else: - raise Exception("Expected ValueError") - - del self.wk.data['schema'] - self.wk.save() - self.wk.load() - - diff --git a/wheel/test/test_paths.py b/wheel/test/test_paths.py deleted file mode 100644 index a23d506..0000000 --- a/wheel/test/test_paths.py +++ /dev/null @@ -1,6 +0,0 @@ -import wheel.paths -from distutils.command.install import SCHEME_KEYS - -def test_path(): - d = wheel.paths.get_install_paths('wheel') - assert len(d) == len(SCHEME_KEYS) diff --git a/wheel/test/test_ranking.py b/wheel/test/test_ranking.py deleted file mode 100644 index 1632a13..0000000 --- a/wheel/test/test_ranking.py +++ /dev/null @@ -1,43 +0,0 @@ -import unittest - -from wheel.pep425tags import get_supported -from wheel.install import WheelFile - -WHEELPAT = "%(name)s-%(ver)s-%(pyver)s-%(abi)s-%(arch)s.whl" -def make_wheel(name, ver, pyver, abi, arch): - name = WHEELPAT % dict(name=name, ver=ver, pyver=pyver, abi=abi, - arch=arch) - return WheelFile(name) - -# This relies on the fact that generate_supported will always return the -# exact pyver, abi, and architecture for its first (best) match. -sup = get_supported() -pyver, abi, arch = sup[0] -genver = 'py' + pyver[2:] -majver = genver[:3] - -COMBINATIONS = ( - ('bar', '0.9', 'py2.py3', 'none', 'any'), - ('bar', '0.9', majver, 'none', 'any'), - ('bar', '0.9', genver, 'none', 'any'), - ('bar', '0.9', pyver, abi, arch), - ('bar', '1.3.2', majver, 'none', 'any'), - ('bar', '3.1', genver, 'none', 'any'), - ('bar', '3.1', pyver, abi, arch), - ('foo', '1.0', majver, 'none', 'any'), - ('foo', '1.1', pyver, abi, arch), - ('foo', '2.1', majver + '0', 'none', 'any'), - # This will not be compatible for Python x.0. Beware when we hit Python - # 4.0, and don't test with 3.0!!! - ('foo', '2.1', majver + '1', 'none', 'any'), - ('foo', '2.1', pyver , 'none', 'any'), - ('foo', '2.1', pyver , abi, arch), -) - -WHEELS = [ make_wheel(*args) for args in COMBINATIONS ] - -class TestRanking(unittest.TestCase): - def test_comparison(self): - for i in range(len(WHEELS)-1): - for j in range(i): - self.assertTrue(WHEELS[j]<WHEELS[i]) diff --git a/wheel/test/test_signatures.py b/wheel/test/test_signatures.py deleted file mode 100644 index 0af19a7..0000000 --- a/wheel/test/test_signatures.py +++ /dev/null @@ -1,47 +0,0 @@ -from wheel import signatures -from wheel.signatures import djbec, ed25519py -from wheel.util import binary - -def test_getlib(): - signatures.get_ed25519ll() - -def test_djbec(): - djbec.dsa_test() - djbec.dh_test() - -def test_ed25519py(): - kp0 = ed25519py.crypto_sign_keypair(binary(' '*32)) - kp = ed25519py.crypto_sign_keypair() - - signed = ed25519py.crypto_sign(binary('test'), kp.sk) - - ed25519py.crypto_sign_open(signed, kp.vk) - - try: - ed25519py.crypto_sign_open(signed, kp0.vk) - except ValueError: - pass - else: - raise Exception("Expected ValueError") - - try: - ed25519py.crypto_sign_keypair(binary(' '*33)) - except ValueError: - pass - else: - raise Exception("Expected ValueError") - - try: - ed25519py.crypto_sign(binary(''), binary(' ')*31) - except ValueError: - pass - else: - raise Exception("Expected ValueError") - - try: - ed25519py.crypto_sign_open(binary(''), binary(' ')*31) - except ValueError: - pass - else: - raise Exception("Expected ValueError") -
\ No newline at end of file diff --git a/wheel/test/test_tagopt.py b/wheel/test/test_tagopt.py deleted file mode 100644 index b0d083e..0000000 --- a/wheel/test/test_tagopt.py +++ /dev/null @@ -1,176 +0,0 @@ -""" -Tests for the bdist_wheel tag options (--python-tag, --universal, and ---plat-name) -""" - -import sys -import shutil -import pytest -import py.path -import tempfile -import subprocess - -SETUP_PY = """\ -from setuptools import setup, Extension - -setup( - name="Test", - version="1.0", - author_email="author@example.com", - py_modules=["test"], - {ext_modules} -) -""" - -EXT_MODULES = "ext_modules=[Extension('_test', sources=['test.c'])]," - -@pytest.fixture -def temp_pkg(request, ext=False): - tempdir = tempfile.mkdtemp() - def fin(): - shutil.rmtree(tempdir) - request.addfinalizer(fin) - temppath = py.path.local(tempdir) - temppath.join('test.py').write('print("Hello, world")') - if ext: - temppath.join('test.c').write('#include <stdio.h>') - setup_py = SETUP_PY.format(ext_modules=EXT_MODULES) - else: - setup_py = SETUP_PY.format(ext_modules='') - temppath.join('setup.py').write(setup_py) - return temppath - -@pytest.fixture -def temp_ext_pkg(request): - return temp_pkg(request, ext=True) - -def test_default_tag(temp_pkg): - subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename == 'Test-1.0-py%s-none-any.whl' % (sys.version[0],) - assert wheels[0].ext == '.whl' - -def test_explicit_tag(temp_pkg): - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--python-tag=py32'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py32-') - assert wheels[0].ext == '.whl' - -def test_universal_tag(temp_pkg): - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--universal'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py2.py3-') - assert wheels[0].ext == '.whl' - -def test_universal_beats_explicit_tag(temp_pkg): - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--universal', '--python-tag=py32'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py2.py3-') - assert wheels[0].ext == '.whl' - -def test_universal_in_setup_cfg(temp_pkg): - temp_pkg.join('setup.cfg').write('[bdist_wheel]\nuniversal=1') - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py2.py3-') - assert wheels[0].ext == '.whl' - -def test_pythontag_in_setup_cfg(temp_pkg): - temp_pkg.join('setup.cfg').write('[bdist_wheel]\npython_tag=py32') - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py32-') - assert wheels[0].ext == '.whl' - -def test_legacy_wheel_section_in_setup_cfg(temp_pkg): - temp_pkg.join('setup.cfg').write('[wheel]\nuniversal=1') - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.startswith('Test-1.0-py2.py3-') - assert wheels[0].ext == '.whl' - -def test_plat_name_purepy(temp_pkg): - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--plat-name=testplat.pure'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.endswith('-testplat_pure.whl') - assert wheels[0].ext == '.whl' - -def test_plat_name_ext(temp_ext_pkg): - try: - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel', '--plat-name=testplat.arch'], - cwd=str(temp_ext_pkg)) - except subprocess.CalledProcessError: - pytest.skip("Cannot compile C Extensions") - dist_dir = temp_ext_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.endswith('-testplat_arch.whl') - assert wheels[0].ext == '.whl' - -def test_plat_name_purepy_in_setupcfg(temp_pkg): - temp_pkg.join('setup.cfg').write('[bdist_wheel]\nplat_name=testplat.pure') - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_pkg)) - dist_dir = temp_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.endswith('-testplat_pure.whl') - assert wheels[0].ext == '.whl' - -def test_plat_name_ext_in_setupcfg(temp_ext_pkg): - temp_ext_pkg.join('setup.cfg').write('[bdist_wheel]\nplat_name=testplat.arch') - try: - subprocess.check_call( - [sys.executable, 'setup.py', 'bdist_wheel'], - cwd=str(temp_ext_pkg)) - except subprocess.CalledProcessError: - pytest.skip("Cannot compile C Extensions") - dist_dir = temp_ext_pkg.join('dist') - assert dist_dir.check(dir=1) - wheels = dist_dir.listdir() - assert len(wheels) == 1 - assert wheels[0].basename.endswith('-testplat_arch.whl') - assert wheels[0].ext == '.whl' diff --git a/wheel/test/test_tool.py b/wheel/test/test_tool.py deleted file mode 100644 index 078f1ed..0000000 --- a/wheel/test/test_tool.py +++ /dev/null @@ -1,25 +0,0 @@ -from .. import tool - -def test_keygen(): - def get_keyring(): - WheelKeys, keyring = tool.get_keyring() - - class WheelKeysTest(WheelKeys): - def save(self): - pass - - class keyringTest: - @classmethod - def get_keyring(cls): - class keyringTest2: - pw = None - def set_password(self, a, b, c): - self.pw = c - def get_password(self, a, b): - return self.pw - - return keyringTest2() - - return WheelKeysTest, keyringTest - - tool.keygen(get_keyring=get_keyring) diff --git a/wheel/test/test_wheelfile.py b/wheel/test/test_wheelfile.py deleted file mode 100644 index 181668f..0000000 --- a/wheel/test/test_wheelfile.py +++ /dev/null @@ -1,142 +0,0 @@ -import os -import wheel.install -import wheel.archive -import hashlib -try: - from StringIO import StringIO -except ImportError: - from io import BytesIO as StringIO -import codecs -import zipfile -import pytest -import shutil -import tempfile -from contextlib import contextmanager - -@contextmanager -def environ(key, value): - old_value = os.environ.get(key) - try: - os.environ[key] = value - yield - finally: - if old_value is None: - del os.environ[key] - else: - os.environ[key] = old_value - -@contextmanager -def temporary_directory(): - # tempfile.TemporaryDirectory doesn't exist in Python 2. - tempdir = tempfile.mkdtemp() - try: - yield tempdir - finally: - shutil.rmtree(tempdir) - -@contextmanager -def readable_zipfile(path): - # zipfile.ZipFile() isn't a context manager under Python 2. - zf = zipfile.ZipFile(path, 'r') - try: - yield zf - finally: - zf.close() - - -def test_verifying_zipfile(): - if not hasattr(zipfile.ZipExtFile, '_update_crc'): - pytest.skip('No ZIP verification. Missing ZipExtFile._update_crc.') - - sio = StringIO() - zf = zipfile.ZipFile(sio, 'w') - zf.writestr("one", b"first file") - zf.writestr("two", b"second file") - zf.writestr("three", b"third file") - zf.close() - - # In default mode, VerifyingZipFile checks the hash of any read file - # mentioned with set_expected_hash(). Files not mentioned with - # set_expected_hash() are not checked. - vzf = wheel.install.VerifyingZipFile(sio, 'r') - vzf.set_expected_hash("one", hashlib.sha256(b"first file").digest()) - vzf.set_expected_hash("three", "blurble") - vzf.open("one").read() - vzf.open("two").read() - try: - vzf.open("three").read() - except wheel.install.BadWheelFile: - pass - else: - raise Exception("expected exception 'BadWheelFile()'") - - # In strict mode, VerifyingZipFile requires every read file to be - # mentioned with set_expected_hash(). - vzf.strict = True - try: - vzf.open("two").read() - except wheel.install.BadWheelFile: - pass - else: - raise Exception("expected exception 'BadWheelFile()'") - - vzf.set_expected_hash("two", None) - vzf.open("two").read() - -def test_pop_zipfile(): - sio = StringIO() - zf = wheel.install.VerifyingZipFile(sio, 'w') - zf.writestr("one", b"first file") - zf.writestr("two", b"second file") - zf.close() - - try: - zf.pop() - except RuntimeError: - pass # already closed - else: - raise Exception("expected RuntimeError") - - zf = wheel.install.VerifyingZipFile(sio, 'a') - zf.pop() - zf.close() - - zf = wheel.install.VerifyingZipFile(sio, 'r') - assert len(zf.infolist()) == 1 - -def test_zipfile_timestamp(): - # An environment variable can be used to influence the timestamp on - # TarInfo objects inside the zip. See issue #143. TemporaryDirectory is - # not a context manager under Python 3. - with temporary_directory() as tempdir: - for filename in ('one', 'two', 'three'): - path = os.path.join(tempdir, filename) - with codecs.open(path, 'w', encoding='utf-8') as fp: - fp.write(filename + '\n') - zip_base_name = os.path.join(tempdir, 'dummy') - # The earliest date representable in TarInfos, 1980-01-01 - with environ('SOURCE_DATE_EPOCH', '315576060'): - zip_filename = wheel.archive.make_wheelfile_inner( - zip_base_name, tempdir) - with readable_zipfile(zip_filename) as zf: - for info in zf.infolist(): - assert info.date_time[:3] == (1980, 1, 1) - -def test_zipfile_attributes(): - # With the change from ZipFile.write() to .writestr(), we need to manually - # set member attributes. - with temporary_directory() as tempdir: - files = (('foo', 0o644), ('bar', 0o755)) - for filename, mode in files: - path = os.path.join(tempdir, filename) - with codecs.open(path, 'w', encoding='utf-8') as fp: - fp.write(filename + '\n') - os.chmod(path, mode) - zip_base_name = os.path.join(tempdir, 'dummy') - zip_filename = wheel.archive.make_wheelfile_inner( - zip_base_name, tempdir) - with readable_zipfile(zip_filename) as zf: - for filename, mode in files: - info = zf.getinfo(os.path.join(tempdir, filename)) - assert info.external_attr == (mode | 0o100000) << 16 - assert info.compress_type == zipfile.ZIP_DEFLATED diff --git a/wheel/tool/__init__.py b/wheel/tool/__init__.py deleted file mode 100644 index 4c0187b..0000000 --- a/wheel/tool/__init__.py +++ /dev/null @@ -1,359 +0,0 @@ -""" -Wheel command-line utility. -""" - -import os -import hashlib -import sys -import json - -from glob import iglob -from .. import signatures -from ..util import (urlsafe_b64decode, urlsafe_b64encode, native, binary, - matches_requirement) -from ..install import WheelFile, VerifyingZipFile -from ..paths import get_install_command - -def require_pkgresources(name): - try: - import pkg_resources - except ImportError: - raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name)) - -import argparse - -class WheelError(Exception): pass - -# For testability -def get_keyring(): - try: - from ..signatures import keys - import keyring - assert keyring.get_keyring().priority - except (ImportError, AssertionError): - raise WheelError("Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.") - return keys.WheelKeys, keyring - -def keygen(get_keyring=get_keyring): - """Generate a public/private key pair.""" - WheelKeys, keyring = get_keyring() - - ed25519ll = signatures.get_ed25519ll() - - wk = WheelKeys().load() - - keypair = ed25519ll.crypto_sign_keypair() - vk = native(urlsafe_b64encode(keypair.vk)) - sk = native(urlsafe_b64encode(keypair.sk)) - kr = keyring.get_keyring() - kr.set_password("wheel", vk, sk) - sys.stdout.write("Created Ed25519 keypair with vk={0}\n".format(vk)) - sys.stdout.write("in {0!r}\n".format(kr)) - - sk2 = kr.get_password('wheel', vk) - if sk2 != sk: - raise WheelError("Keyring is broken. Could not retrieve secret key.") - - sys.stdout.write("Trusting {0} to sign and verify all packages.\n".format(vk)) - wk.add_signer('+', vk) - wk.trust('+', vk) - wk.save() - -def sign(wheelfile, replace=False, get_keyring=get_keyring): - """Sign a wheel""" - WheelKeys, keyring = get_keyring() - - ed25519ll = signatures.get_ed25519ll() - - wf = WheelFile(wheelfile, append=True) - wk = WheelKeys().load() - - name = wf.parsed_filename.group('name') - sign_with = wk.signers(name)[0] - sys.stdout.write("Signing {0} with {1}\n".format(name, sign_with[1])) - - vk = sign_with[1] - kr = keyring.get_keyring() - sk = kr.get_password('wheel', vk) - keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)), - urlsafe_b64decode(binary(sk))) - - - record_name = wf.distinfo_name + '/RECORD' - sig_name = wf.distinfo_name + '/RECORD.jws' - if sig_name in wf.zipfile.namelist(): - raise WheelError("Wheel is already signed.") - record_data = wf.zipfile.read(record_name) - payload = {"hash":"sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))} - sig = signatures.sign(payload, keypair) - wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True)) - wf.zipfile.close() - -def unsign(wheelfile): - """ - Remove RECORD.jws from a wheel by truncating the zip file. - - RECORD.jws must be at the end of the archive. The zip file must be an - ordinary archive, with the compressed files and the directory in the same - order, and without any non-zip content after the truncation point. - """ - vzf = VerifyingZipFile(wheelfile, "a") - info = vzf.infolist() - if not (len(info) and info[-1].filename.endswith('/RECORD.jws')): - raise WheelError("RECORD.jws not found at end of archive.") - vzf.pop() - vzf.close() - -def verify(wheelfile): - """Verify a wheel. - - The signature will be verified for internal consistency ONLY and printed. - Wheel's own unpack/install commands verify the manifest against the - signature and file contents. - """ - wf = WheelFile(wheelfile) - sig_name = wf.distinfo_name + '/RECORD.jws' - sig = json.loads(native(wf.zipfile.open(sig_name).read())) - verified = signatures.verify(sig) - sys.stderr.write("Signatures are internally consistent.\n") - sys.stdout.write(json.dumps(verified, indent=2)) - sys.stdout.write('\n') - -def unpack(wheelfile, dest='.'): - """Unpack a wheel. - - Wheel content will be unpacked to {dest}/{name}-{ver}, where {name} - is the package name and {ver} its version. - - :param wheelfile: The path to the wheel. - :param dest: Destination directory (default to current directory). - """ - wf = WheelFile(wheelfile) - namever = wf.parsed_filename.group('namever') - destination = os.path.join(dest, namever) - sys.stderr.write("Unpacking to: %s\n" % (destination)) - wf.zipfile.extractall(destination) - wf.zipfile.close() - -def install(requirements, requirements_file=None, - wheel_dirs=None, force=False, list_files=False, - dry_run=False): - """Install wheels. - - :param requirements: A list of requirements or wheel files to install. - :param requirements_file: A file containing requirements to install. - :param wheel_dirs: A list of directories to search for wheels. - :param force: Install a wheel file even if it is not compatible. - :param list_files: Only list the files to install, don't install them. - :param dry_run: Do everything but the actual install. - """ - - # If no wheel directories specified, use the WHEELPATH environment - # variable, or the current directory if that is not set. - if not wheel_dirs: - wheelpath = os.getenv("WHEELPATH") - if wheelpath: - wheel_dirs = wheelpath.split(os.pathsep) - else: - wheel_dirs = [ os.path.curdir ] - - # Get a list of all valid wheels in wheel_dirs - all_wheels = [] - for d in wheel_dirs: - for w in os.listdir(d): - if w.endswith('.whl'): - wf = WheelFile(os.path.join(d, w)) - if wf.compatible: - all_wheels.append(wf) - - # If there is a requirements file, add it to the list of requirements - if requirements_file: - # If the file doesn't exist, search for it in wheel_dirs - # This allows standard requirements files to be stored with the - # wheels. - if not os.path.exists(requirements_file): - for d in wheel_dirs: - name = os.path.join(d, requirements_file) - if os.path.exists(name): - requirements_file = name - break - - with open(requirements_file) as fd: - requirements.extend(fd) - - to_install = [] - for req in requirements: - if req.endswith('.whl'): - # Explicitly specified wheel filename - if os.path.exists(req): - wf = WheelFile(req) - if wf.compatible or force: - to_install.append(wf) - else: - msg = ("{0} is not compatible with this Python. " - "--force to install anyway.".format(req)) - raise WheelError(msg) - else: - # We could search on wheel_dirs, but it's probably OK to - # assume the user has made an error. - raise WheelError("No such wheel file: {}".format(req)) - continue - - # We have a requirement spec - # If we don't have pkg_resources, this will raise an exception - matches = matches_requirement(req, all_wheels) - if not matches: - raise WheelError("No match for requirement {}".format(req)) - to_install.append(max(matches)) - - # We now have a list of wheels to install - if list_files: - sys.stdout.write("Installing:\n") - - if dry_run: - return - - for wf in to_install: - if list_files: - sys.stdout.write(" {0}\n".format(wf.filename)) - continue - wf.install(force=force) - wf.zipfile.close() - -def install_scripts(distributions): - """ - Regenerate the entry_points console_scripts for the named distribution. - """ - try: - from setuptools.command import easy_install - import pkg_resources - except ImportError: - raise RuntimeError("'wheel install_scripts' needs setuptools.") - - for dist in distributions: - pkg_resources_dist = pkg_resources.get_distribution(dist) - install = get_install_command(dist) - command = easy_install.easy_install(install.distribution) - command.args = ['wheel'] # dummy argument - command.finalize_options() - command.install_egg_scripts(pkg_resources_dist) - -def convert(installers, dest_dir, verbose): - require_pkgresources('wheel convert') - - # Only support wheel convert if pkg_resources is present - from ..wininst2wheel import bdist_wininst2wheel - from ..egg2wheel import egg2wheel - - for pat in installers: - for installer in iglob(pat): - if os.path.splitext(installer)[1] == '.egg': - conv = egg2wheel - else: - conv = bdist_wininst2wheel - if verbose: - sys.stdout.write("{0}... ".format(installer)) - sys.stdout.flush() - conv(installer, dest_dir) - if verbose: - sys.stdout.write("OK\n") - -def parser(): - p = argparse.ArgumentParser() - s = p.add_subparsers(help="commands") - - def keygen_f(args): - keygen() - keygen_parser = s.add_parser('keygen', help='Generate signing key') - keygen_parser.set_defaults(func=keygen_f) - - def sign_f(args): - sign(args.wheelfile) - sign_parser = s.add_parser('sign', help='Sign wheel') - sign_parser.add_argument('wheelfile', help='Wheel file') - sign_parser.set_defaults(func=sign_f) - - def unsign_f(args): - unsign(args.wheelfile) - unsign_parser = s.add_parser('unsign', help=unsign.__doc__) - unsign_parser.add_argument('wheelfile', help='Wheel file') - unsign_parser.set_defaults(func=unsign_f) - - def verify_f(args): - verify(args.wheelfile) - verify_parser = s.add_parser('verify', help=verify.__doc__) - verify_parser.add_argument('wheelfile', help='Wheel file') - verify_parser.set_defaults(func=verify_f) - - def unpack_f(args): - unpack(args.wheelfile, args.dest) - unpack_parser = s.add_parser('unpack', help='Unpack wheel') - unpack_parser.add_argument('--dest', '-d', help='Destination directory', - default='.') - unpack_parser.add_argument('wheelfile', help='Wheel file') - unpack_parser.set_defaults(func=unpack_f) - - def install_f(args): - install(args.requirements, args.requirements_file, - args.wheel_dirs, args.force, args.list_files) - install_parser = s.add_parser('install', help='Install wheels') - install_parser.add_argument('requirements', nargs='*', - help='Requirements to install.') - install_parser.add_argument('--force', default=False, - action='store_true', - help='Install incompatible wheel files.') - install_parser.add_argument('--wheel-dir', '-d', action='append', - dest='wheel_dirs', - help='Directories containing wheels.') - install_parser.add_argument('--requirements-file', '-r', - help="A file containing requirements to " - "install.") - install_parser.add_argument('--list', '-l', default=False, - dest='list_files', - action='store_true', - help="List wheels which would be installed, " - "but don't actually install anything.") - install_parser.set_defaults(func=install_f) - - def install_scripts_f(args): - install_scripts(args.distributions) - install_scripts_parser = s.add_parser('install-scripts', help='Install console_scripts') - install_scripts_parser.add_argument('distributions', nargs='*', - help='Regenerate console_scripts for these distributions') - install_scripts_parser.set_defaults(func=install_scripts_f) - - def convert_f(args): - convert(args.installers, args.dest_dir, args.verbose) - convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel') - convert_parser.add_argument('installers', nargs='*', help='Installers to convert') - convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir, - help="Directory to store wheels (default %(default)s)") - convert_parser.add_argument('--verbose', '-v', action='store_true') - convert_parser.set_defaults(func=convert_f) - - def version_f(args): - from .. import __version__ - sys.stdout.write("wheel %s\n" % __version__) - version_parser = s.add_parser('version', help='Print version and exit') - version_parser.set_defaults(func=version_f) - - def help_f(args): - p.print_help() - help_parser = s.add_parser('help', help='Show this help') - help_parser.set_defaults(func=help_f) - - return p - -def main(): - p = parser() - args = p.parse_args() - if not hasattr(args, 'func'): - p.print_help() - else: - # XXX on Python 3.3 we get 'args has no func' rather than short help. - try: - args.func(args) - return 0 - except WheelError as e: - sys.stderr.write(e.message + "\n") - return 1 diff --git a/wheel/util.py b/wheel/util.py deleted file mode 100644 index 20f386f..0000000 --- a/wheel/util.py +++ /dev/null @@ -1,164 +0,0 @@ -"""Utility functions.""" - -import sys -import os -import base64 -import json -import hashlib -from collections import OrderedDict - -__all__ = ['urlsafe_b64encode', 'urlsafe_b64decode', 'utf8', - 'to_json', 'from_json', 'matches_requirement'] - -def urlsafe_b64encode(data): - """urlsafe_b64encode without padding""" - return base64.urlsafe_b64encode(data).rstrip(binary('=')) - - -def urlsafe_b64decode(data): - """urlsafe_b64decode without padding""" - pad = b'=' * (4 - (len(data) & 3)) - return base64.urlsafe_b64decode(data + pad) - - -def to_json(o): - '''Convert given data to JSON.''' - return json.dumps(o, sort_keys=True) - - -def from_json(j): - '''Decode a JSON payload.''' - return json.loads(j) - -def open_for_csv(name, mode): - if sys.version_info[0] < 3: - nl = {} - bin = 'b' - else: - nl = { 'newline': '' } - bin = '' - return open(name, mode + bin, **nl) - -try: - unicode - - def utf8(data): - '''Utf-8 encode data.''' - if isinstance(data, unicode): - return data.encode('utf-8') - return data -except NameError: - def utf8(data): - '''Utf-8 encode data.''' - if isinstance(data, str): - return data.encode('utf-8') - return data - - -try: - # For encoding ascii back and forth between bytestrings, as is repeatedly - # necessary in JSON-based crypto under Python 3 - unicode - def native(s): - return s - def binary(s): - if isinstance(s, unicode): - return s.encode('ascii') - return s -except NameError: - def native(s): - if isinstance(s, bytes): - return s.decode('ascii') - return s - def binary(s): - if isinstance(s, str): - return s.encode('ascii') - -class HashingFile(object): - def __init__(self, fd, hashtype='sha256'): - self.fd = fd - self.hashtype = hashtype - self.hash = hashlib.new(hashtype) - self.length = 0 - def write(self, data): - self.hash.update(data) - self.length += len(data) - self.fd.write(data) - def close(self): - self.fd.close() - def digest(self): - if self.hashtype == 'md5': - return self.hash.hexdigest() - digest = self.hash.digest() - return self.hashtype + '=' + native(urlsafe_b64encode(digest)) - -class OrderedDefaultDict(OrderedDict): - def __init__(self, *args, **kwargs): - if not args: - self.default_factory = None - else: - if not (args[0] is None or callable(args[0])): - raise TypeError('first argument must be callable or None') - self.default_factory = args[0] - args = args[1:] - super(OrderedDefaultDict, self).__init__(*args, **kwargs) - - def __missing__ (self, key): - if self.default_factory is None: - raise KeyError(key) - self[key] = default = self.default_factory() - return default - -if sys.platform == 'win32': - import ctypes.wintypes - # CSIDL_APPDATA for reference - not used here for compatibility with - # dirspec, which uses LOCAL_APPDATA and COMMON_APPDATA in that order - csidl = dict(CSIDL_APPDATA=26, CSIDL_LOCAL_APPDATA=28, - CSIDL_COMMON_APPDATA=35) - def get_path(name): - SHGFP_TYPE_CURRENT = 0 - buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) - ctypes.windll.shell32.SHGetFolderPathW(0, csidl[name], 0, SHGFP_TYPE_CURRENT, buf) - return buf.value - - def save_config_path(*resource): - appdata = get_path("CSIDL_LOCAL_APPDATA") - path = os.path.join(appdata, *resource) - if not os.path.isdir(path): - os.makedirs(path) - return path - def load_config_paths(*resource): - ids = ["CSIDL_LOCAL_APPDATA", "CSIDL_COMMON_APPDATA"] - for id in ids: - base = get_path(id) - path = os.path.join(base, *resource) - if os.path.exists(path): - yield path -else: - def save_config_path(*resource): - import xdg.BaseDirectory - return xdg.BaseDirectory.save_config_path(*resource) - def load_config_paths(*resource): - import xdg.BaseDirectory - return xdg.BaseDirectory.load_config_paths(*resource) - -def matches_requirement(req, wheels): - """List of wheels matching a requirement. - - :param req: The requirement to satisfy - :param wheels: List of wheels to search. - """ - try: - from pkg_resources import Distribution, Requirement - except ImportError: - raise RuntimeError("Cannot use requirements without pkg_resources") - - req = Requirement.parse(req) - - selected = [] - for wf in wheels: - f = wf.parsed_filename - dist = Distribution(project_name=f.group("name"), version=f.group("ver")) - if dist in req: - selected.append(wf) - return selected diff --git a/wheel/wininst2wheel.py b/wheel/wininst2wheel.py deleted file mode 100755 index 15f0cdf..0000000 --- a/wheel/wininst2wheel.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env python -import os.path -import re -import sys -import tempfile -import zipfile -import wheel.bdist_wheel -import distutils.dist -from distutils.archive_util import make_archive -from shutil import rmtree -from wheel.archive import archive_wheelfile -from argparse import ArgumentParser -from glob import iglob - -egg_info_re = re.compile(r'''(^|/)(?P<name>[^/]+?)-(?P<ver>.+?) - (-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg-info(/|$)''', re.VERBOSE) - -def parse_info(wininfo_name, egginfo_name): - """Extract metadata from filenames. - - Extracts the 4 metadataitems needed (name, version, pyversion, arch) from - the installer filename and the name of the egg-info directory embedded in - the zipfile (if any). - - The egginfo filename has the format:: - - name-ver(-pyver)(-arch).egg-info - - The installer filename has the format:: - - name-ver.arch(-pyver).exe - - Some things to note: - - 1. The installer filename is not definitive. An installer can be renamed - and work perfectly well as an installer. So more reliable data should - be used whenever possible. - 2. The egg-info data should be preferred for the name and version, because - these come straight from the distutils metadata, and are mandatory. - 3. The pyver from the egg-info data should be ignored, as it is - constructed from the version of Python used to build the installer, - which is irrelevant - the installer filename is correct here (even to - the point that when it's not there, any version is implied). - 4. The architecture must be taken from the installer filename, as it is - not included in the egg-info data. - 5. Architecture-neutral installers still have an architecture because the - installer format itself (being executable) is architecture-specific. We - should therefore ignore the architecture if the content is pure-python. - """ - - egginfo = None - if egginfo_name: - egginfo = egg_info_re.search(egginfo_name) - if not egginfo: - raise ValueError("Egg info filename %s is not valid" % - (egginfo_name,)) - - # Parse the wininst filename - # 1. Distribution name (up to the first '-') - w_name, sep, rest = wininfo_name.partition('-') - if not sep: - raise ValueError("Installer filename %s is not valid" % - (wininfo_name,)) - # Strip '.exe' - rest = rest[:-4] - # 2. Python version (from the last '-', must start with 'py') - rest2, sep, w_pyver = rest.rpartition('-') - if sep and w_pyver.startswith('py'): - rest = rest2 - w_pyver = w_pyver.replace('.', '') - else: - # Not version specific - use py2.py3. While it is possible that - # pure-Python code is not compatible with both Python 2 and 3, there - # is no way of knowing from the wininst format, so we assume the best - # here (the user can always manually rename the wheel to be more - # restrictive if needed). - w_pyver = 'py2.py3' - # 3. Version and architecture - w_ver, sep, w_arch = rest.rpartition('.') - if not sep: - raise ValueError("Installer filename %s is not valid" % - (wininfo_name,)) - - if egginfo: - w_name = egginfo.group('name') - w_ver = egginfo.group('ver') - - return dict(name=w_name, ver=w_ver, arch=w_arch, pyver=w_pyver) - -def bdist_wininst2wheel(path, dest_dir=os.path.curdir): - bdw = zipfile.ZipFile(path) - - # Search for egg-info in the archive - egginfo_name = None - for filename in bdw.namelist(): - if '.egg-info' in filename: - egginfo_name = filename - break - - info = parse_info(os.path.basename(path), egginfo_name) - - root_is_purelib = True - for zipinfo in bdw.infolist(): - if zipinfo.filename.startswith('PLATLIB'): - root_is_purelib = False - break - if root_is_purelib: - paths = {'purelib': ''} - else: - paths = {'platlib': ''} - - dist_info = "%(name)s-%(ver)s" % info - datadir = "%s.data/" % dist_info - - # rewrite paths to trick ZipFile into extracting an egg - # XXX grab wininst .ini - between .exe, padding, and first zip file. - members = [] - egginfo_name = '' - for zipinfo in bdw.infolist(): - key, basename = zipinfo.filename.split('/', 1) - key = key.lower() - basepath = paths.get(key, None) - if basepath is None: - basepath = datadir + key.lower() + '/' - oldname = zipinfo.filename - newname = basepath + basename - zipinfo.filename = newname - del bdw.NameToInfo[oldname] - bdw.NameToInfo[newname] = zipinfo - # Collect member names, but omit '' (from an entry like "PLATLIB/" - if newname: - members.append(newname) - # Remember egg-info name for the egg2dist call below - if not egginfo_name: - if newname.endswith('.egg-info'): - egginfo_name = newname - elif '.egg-info/' in newname: - egginfo_name, sep, _ = newname.rpartition('/') - dir = tempfile.mkdtemp(suffix="_b2w") - bdw.extractall(dir, members) - - # egg2wheel - abi = 'none' - pyver = info['pyver'] - arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_') - # Wininst installers always have arch even if they are not - # architecture-specific (because the format itself is). - # So, assume the content is architecture-neutral if root is purelib. - if root_is_purelib: - arch = 'any' - # If the installer is architecture-specific, it's almost certainly also - # CPython-specific. - if arch != 'any': - pyver = pyver.replace('py', 'cp') - wheel_name = '-'.join(( - dist_info, - pyver, - abi, - arch - )) - if root_is_purelib: - bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution()) - else: - bw = _bdist_wheel_tag(distutils.dist.Distribution()) - - bw.root_is_pure = root_is_purelib - bw.python_tag = pyver - bw.plat_name_supplied = True - bw.plat_name = info['arch'] or 'any' - - if not root_is_purelib: - bw.full_tag_supplied = True - bw.full_tag = (pyver, abi, arch) - - dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info) - bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir) - bw.write_wheelfile(dist_info_dir, generator='wininst2wheel') - bw.write_record(dir, dist_info_dir) - - archive_wheelfile(os.path.join(dest_dir, wheel_name), dir) - rmtree(dir) - - -class _bdist_wheel_tag(wheel.bdist_wheel.bdist_wheel): - # allow the client to override the default generated wheel tag - # The default bdist_wheel implementation uses python and abi tags - # of the running python process. This is not suitable for - # generating/repackaging prebuild binaries. - - full_tag_supplied = False - full_tag = None # None or a (pytag, soabitag, plattag) triple - - def get_tag(self): - if self.full_tag_supplied and self.full_tag is not None: - return self.full_tag - else: - return super(_bdist_wheel_tag, self).get_tag() - - -def main(): - parser = ArgumentParser() - parser.add_argument('installers', nargs='*', help="Installers to convert") - parser.add_argument('--dest-dir', '-d', default=os.path.curdir, - help="Directory to store wheels (default %(default)s)") - parser.add_argument('--verbose', '-v', action='store_true') - args = parser.parse_args() - for pat in args.installers: - for installer in iglob(pat): - if args.verbose: - sys.stdout.write("{0}... ".format(installer)) - bdist_wininst2wheel(installer, args.dest_dir) - if args.verbose: - sys.stdout.write("OK\n") - -if __name__ == "__main__": - main() diff --git a/wscript b/wscript deleted file mode 100644 index c1ee262..0000000 --- a/wscript +++ /dev/null @@ -1,133 +0,0 @@ -APPNAME = 'wheel' -WHEEL_TAG = 'py2.py3-none-any' -VERSION = '0.24.0' - -top = '.' -out = 'build' - -from waflib import Utils - -def options(opt): - opt.load('python') - -def configure(ctx): - ctx.load('python') - ctx.check_python_version() - -def build(bld): - bld(features='py', - source=bld.path.ant_glob('wheel/**/*.py', excl="wheel/test"), - install_from='.') - - # build the wheel: - - DIST_INFO = '%s-%s.dist-info' % (APPNAME, VERSION) - - node = bld.path.get_bld().make_node(DIST_INFO) - if not os.path.exists(node.abspath()): - os.mkdir(node.abspath()) - - metadata = node.make_node('METADATA') - - import codecs, string - README = codecs.open('README.txt', encoding='utf8').read() - CHANGES = codecs.open('CHANGES.txt', encoding='utf8').read() - METADATA = codecs.open('METADATA.in', encoding='utf8').read() - METADATA = string.Template(METADATA).substitute( - VERSION=VERSION, - DESCRIPTION='\n\n'.join((README, CHANGES)) - ) - - bld(source='METADATA.in', - target=metadata, - rule=lambda tsk: Utils.writef(tsk.outputs[0].abspath(), METADATA)) - - wheel = node.make_node('WHEEL') - - WHEEL="""Wheel-Version: 1.0 -Generator: waf (0.0.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any -""" - bld(target=wheel, - rule=lambda tsk: Utils.writef(tsk.outputs[0].abspath(), WHEEL)) - - # globs don't work, since here the files may not exist: - bld.install_files('${PYTHONDIR}/'+DIST_INFO, [DIST_INFO+'/WHEEL', DIST_INFO+'/METADATA']) - - # only if entry_points.txt exists: - bld.install_files('${PYTHONDIR}/'+DIST_INFO, ['entry_points.txt']) - -import shutil, os, base64 - -def urlsafe_b64encode(data): - """urlsafe_b64encode without padding""" - return base64.urlsafe_b64encode(data).rstrip(b'=') - -from waflib import Scripting -class WheelDist(Scripting.Dist): - def manifest(self): - """ - Add the wheel manifest. - """ - import hashlib - files = self.get_files() - lines = [] - for f in files: - print("File: %s" % f.relpath()) - size = os.stat(f.abspath()).st_size - digest = hashlib.sha256(open(f.abspath(), 'rb').read()).digest() - digest = "sha256="+(urlsafe_b64encode(digest).decode('ascii')) - lines.append("%s,%s,%s" % (f.path_from(self.base_path).replace(',', ',,'), digest, size)) - - record_path = '%s-%s.dist-info/RECORD' % (APPNAME, VERSION) - lines.append(record_path+',,') - RECORD = '\n'.join(lines) - - import zipfile - zip = zipfile.ZipFile(self.get_arch_name(), 'a') - zip.writestr(record_path, RECORD, zipfile.ZIP_DEFLATED) - zip.close() - -from waflib import Build -class package_cls(Build.InstallContext): - cmd = 'package' - fun = 'build' - - def init_dirs(self, *k, **kw): - super(package_cls, self).init_dirs(*k, **kw) - self.tmp = self.bldnode.make_node('package_tmp_dir') - try: - shutil.rmtree(self.tmp.abspath()) - except: - pass - if os.path.exists(self.tmp.abspath()): - self.fatal('Could not remove the temporary directory %r' % self.tmp) - self.tmp.mkdir() - self.options.destdir = self.tmp.abspath() - - def execute(self, *k, **kw): - back = self.options.destdir - try: - super(package_cls, self).execute(*k, **kw) - finally: - self.options.destdir = back - - files = self.tmp.ant_glob('**', excl=" **/*.pyc **/*.pyo") - - # we could mess with multiple inheritance but this is probably unnecessary - ctx = WheelDist() - ctx.algo = 'zip' - ctx.arch_name = '%s-%s-%s.whl' % (APPNAME, VERSION, WHEEL_TAG) - ctx.files = files - ctx.tar_prefix = '' - ctx.base_path = self.tmp - ctx.base_name = '' - ctx.archive() - - # add manifest... - ctx.manifest() - - shutil.rmtree(self.tmp.abspath()) - |