2013-08-22 06:38:23 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
from contextlib import contextmanager
|
2010-06-03 04:25:26 +02:00
|
|
|
import os
|
|
|
|
import sys
|
2013-02-08 07:17:27 +01:00
|
|
|
import re
|
2010-06-03 04:25:26 +02:00
|
|
|
import textwrap
|
2011-03-23 02:41:22 +01:00
|
|
|
import site
|
2017-08-07 18:03:43 +02:00
|
|
|
import shutil
|
2013-08-21 11:16:07 +02:00
|
|
|
|
|
|
|
import scripttest
|
2017-08-10 16:02:40 +02:00
|
|
|
import six
|
2013-08-22 07:54:29 +02:00
|
|
|
import virtualenv
|
2011-03-23 02:41:22 +01:00
|
|
|
|
2017-03-24 19:21:22 +01:00
|
|
|
from tests.lib.path import Path, curdir
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
DATA_DIR = Path(__file__).folder.folder.join("data").abspath
|
2013-08-23 15:26:38 +02:00
|
|
|
SRC_DIR = Path(__file__).abspath.folder.folder.folder
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2008-10-16 00:02:57 +02:00
|
|
|
pyversion = sys.version[:3]
|
2015-03-15 16:14:23 +01:00
|
|
|
pyversion_tuple = sys.version_info
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2010-04-26 08:52:46 +02:00
|
|
|
|
2010-06-10 00:01:01 +02:00
|
|
|
def path_to_url(path):
|
|
|
|
"""
|
2010-08-19 05:08:24 +02:00
|
|
|
Convert a path to URI. The path will be made absolute and
|
|
|
|
will not have quoted path parts.
|
2010-06-10 00:01:01 +02:00
|
|
|
(adapted from pip.util)
|
|
|
|
"""
|
2010-06-10 01:06:35 +02:00
|
|
|
path = os.path.normpath(os.path.abspath(path))
|
2010-06-10 00:01:01 +02:00
|
|
|
drive, path = os.path.splitdrive(path)
|
|
|
|
filepath = path.split(os.path.sep)
|
2010-08-19 05:08:24 +02:00
|
|
|
url = '/'.join(filepath)
|
2010-06-10 00:08:33 +02:00
|
|
|
if drive:
|
2010-06-21 18:23:54 +02:00
|
|
|
return 'file:///' + drive + url
|
2013-08-21 11:16:07 +02:00
|
|
|
return 'file://' + url
|
2010-06-10 00:01:01 +02:00
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
|
2017-08-10 16:02:40 +02:00
|
|
|
# workaround for https://github.com/pypa/virtualenv/issues/306
|
|
|
|
def virtualenv_lib_path(venv_home, venv_lib):
|
|
|
|
if not hasattr(sys, "pypy_version_info"):
|
|
|
|
return venv_lib
|
|
|
|
version_fmt = '{0}' if six.PY3 else '{0}.{1}'
|
|
|
|
version_dir = version_fmt.format(*sys.version_info)
|
|
|
|
return os.path.join(venv_home, 'lib-python', version_dir)
|
|
|
|
|
|
|
|
|
2017-06-05 13:48:23 +02:00
|
|
|
def create_file(path, contents=None):
|
|
|
|
"""Create a file on the path, with the given contents
|
|
|
|
"""
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.misc import ensure_dir
|
2017-06-05 13:48:23 +02:00
|
|
|
|
|
|
|
ensure_dir(os.path.dirname(path))
|
|
|
|
with open(path, "w") as f:
|
|
|
|
if contents is not None:
|
|
|
|
f.write(contents)
|
|
|
|
else:
|
|
|
|
f.write("\n")
|
|
|
|
|
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
class TestData(object):
|
|
|
|
"""
|
|
|
|
Represents a bundle of pre-created test data.
|
|
|
|
|
|
|
|
This copies a pristine set of test data into a root location that is
|
|
|
|
designed to be test specific. The reason for this is when running the tests
|
|
|
|
concurrently errors can be generated because the related tooling uses
|
|
|
|
the directory as a work space. This leads to two concurrent processes
|
|
|
|
trampling over each other. This class gets around that by copying all
|
|
|
|
data into a directory and operating on the copied data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, root, source=None):
|
|
|
|
self.source = source or DATA_DIR
|
|
|
|
self.root = Path(root).abspath
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def copy(cls, root):
|
|
|
|
obj = cls(root)
|
|
|
|
obj.reset()
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def reset(self):
|
|
|
|
self.root.rmtree()
|
|
|
|
self.source.copytree(self.root)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def packages(self):
|
|
|
|
return self.root.join("packages")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def packages2(self):
|
|
|
|
return self.root.join("packages2")
|
|
|
|
|
2014-04-30 02:55:12 +02:00
|
|
|
@property
|
|
|
|
def packages3(self):
|
|
|
|
return self.root.join("packages3")
|
|
|
|
|
2014-05-03 19:02:23 +02:00
|
|
|
@property
|
|
|
|
def src(self):
|
|
|
|
return self.root.join("src")
|
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
@property
|
|
|
|
def indexes(self):
|
|
|
|
return self.root.join("indexes")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def reqfiles(self):
|
|
|
|
return self.root.join("reqfiles")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def find_links(self):
|
|
|
|
return path_to_url(self.packages)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def find_links2(self):
|
|
|
|
return path_to_url(self.packages2)
|
|
|
|
|
2014-04-30 02:55:12 +02:00
|
|
|
@property
|
|
|
|
def find_links3(self):
|
|
|
|
return path_to_url(self.packages3)
|
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
def index_url(self, index="simple"):
|
|
|
|
return path_to_url(self.root.join("indexes", index))
|
2010-06-09 01:58:14 +02:00
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
class TestFailure(AssertionError):
|
|
|
|
"""
|
|
|
|
An "assertion" failed during testing.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class TestPipResult(object):
|
|
|
|
|
2010-05-19 12:13:07 +02:00
|
|
|
def __init__(self, impl, verbose=False):
|
2010-04-28 22:55:10 +02:00
|
|
|
self._impl = impl
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-05-19 12:13:07 +02:00
|
|
|
if verbose:
|
2011-03-15 20:49:48 +01:00
|
|
|
print(self.stdout)
|
2010-05-19 12:13:07 +02:00
|
|
|
if self.stderr:
|
2011-03-15 20:49:48 +01:00
|
|
|
print('======= stderr ========')
|
|
|
|
print(self.stderr)
|
|
|
|
print('=======================')
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
def __getattr__(self, attr):
|
2010-06-03 04:25:26 +02:00
|
|
|
return getattr(self._impl, attr)
|
2010-04-29 16:04:49 +02:00
|
|
|
|
2010-05-01 23:34:06 +02:00
|
|
|
if sys.platform == 'win32':
|
2010-07-29 18:56:03 +02:00
|
|
|
|
2010-05-01 23:34:06 +02:00
|
|
|
@property
|
|
|
|
def stdout(self):
|
|
|
|
return self._impl.stdout.replace('\r\n', '\n')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def stderr(self):
|
|
|
|
return self._impl.stderr.replace('\r\n', '\n')
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-05-02 00:09:45 +02:00
|
|
|
def __str__(self):
|
2010-06-03 04:25:26 +02:00
|
|
|
return str(self._impl).replace('\r\n', '\n')
|
2010-05-02 00:09:45 +02:00
|
|
|
else:
|
|
|
|
# Python doesn't automatically forward __str__ through __getattr__
|
2010-07-29 18:56:03 +02:00
|
|
|
|
2010-05-02 00:09:45 +02:00
|
|
|
def __str__(self):
|
|
|
|
return str(self._impl)
|
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
def assert_installed(self, pkg_name, editable=True, with_files=[],
|
|
|
|
without_files=[], without_egg_link=False,
|
2014-03-08 19:33:05 +01:00
|
|
|
use_user_site=False, sub_dir=False):
|
2010-04-28 22:55:10 +02:00
|
|
|
e = self.test_env
|
|
|
|
|
2013-02-11 07:00:57 +01:00
|
|
|
if editable:
|
2014-02-24 22:52:23 +01:00
|
|
|
pkg_dir = e.venv / 'src' / pkg_name.lower()
|
2014-03-08 19:33:05 +01:00
|
|
|
# If package was installed in a sub directory
|
|
|
|
if sub_dir:
|
|
|
|
pkg_dir = pkg_dir / sub_dir
|
2013-02-11 07:00:57 +01:00
|
|
|
else:
|
|
|
|
without_egg_link = True
|
2014-02-24 22:52:23 +01:00
|
|
|
pkg_dir = e.site_packages / pkg_name
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2010-08-19 11:40:50 +02:00
|
|
|
if use_user_site:
|
|
|
|
egg_link_path = e.user_site / pkg_name + '.egg-link'
|
|
|
|
else:
|
|
|
|
egg_link_path = e.site_packages / pkg_name + '.egg-link'
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
if without_egg_link:
|
|
|
|
if egg_link_path in self.files_created:
|
2014-01-28 15:17:51 +01:00
|
|
|
raise TestFailure(
|
|
|
|
'unexpected egg link file created: %r\n%s' %
|
|
|
|
(egg_link_path, self)
|
|
|
|
)
|
2010-04-28 22:55:10 +02:00
|
|
|
else:
|
2014-03-26 23:24:19 +01:00
|
|
|
if egg_link_path not in self.files_created:
|
2014-01-28 15:17:51 +01:00
|
|
|
raise TestFailure(
|
|
|
|
'expected egg link file missing: %r\n%s' %
|
|
|
|
(egg_link_path, self)
|
|
|
|
)
|
2010-05-20 08:35:04 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
egg_link_file = self.files_created[egg_link_path]
|
|
|
|
|
2014-03-08 19:33:05 +01:00
|
|
|
# FIXME: I don't understand why there's a trailing . here
|
2015-02-24 13:46:10 +01:00
|
|
|
if not (egg_link_file.bytes.endswith('\n.') and
|
|
|
|
egg_link_file.bytes[:-2].endswith(pkg_dir)):
|
2017-03-24 19:21:22 +01:00
|
|
|
raise TestFailure(textwrap.dedent(u'''\
|
2014-03-08 19:33:05 +01:00
|
|
|
Incorrect egg_link file %r
|
|
|
|
Expected ending: %r
|
|
|
|
------- Actual contents -------
|
|
|
|
%s
|
|
|
|
-------------------------------''' % (
|
2014-01-28 15:17:51 +01:00
|
|
|
egg_link_file,
|
2014-03-08 19:33:05 +01:00
|
|
|
pkg_dir + '\n.',
|
|
|
|
repr(egg_link_file.bytes))
|
2017-03-24 19:21:22 +01:00
|
|
|
))
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2010-08-19 11:40:50 +02:00
|
|
|
if use_user_site:
|
2014-02-24 22:52:23 +01:00
|
|
|
pth_file = e.user_site / 'easy-install.pth'
|
2010-08-19 11:40:50 +02:00
|
|
|
else:
|
2014-02-24 22:52:23 +01:00
|
|
|
pth_file = e.site_packages / 'easy-install.pth'
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
if (pth_file in self.files_updated) == without_egg_link:
|
2010-06-03 04:25:26 +02:00
|
|
|
raise TestFailure('%r unexpectedly %supdated by install' % (
|
|
|
|
pth_file, (not without_egg_link and 'not ' or '')))
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
if (pkg_dir in self.files_created) == (curdir in without_files):
|
2010-06-03 04:25:26 +02:00
|
|
|
raise TestFailure(textwrap.dedent('''\
|
2010-04-28 22:55:10 +02:00
|
|
|
expected package directory %r %sto be created
|
|
|
|
actually created:
|
|
|
|
%s
|
|
|
|
''') % (
|
2013-08-19 04:40:20 +02:00
|
|
|
pkg_dir,
|
2010-06-03 04:25:26 +02:00
|
|
|
(curdir in without_files and 'not ' or ''),
|
|
|
|
sorted(self.files_created.keys())))
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
for f in with_files:
|
2014-02-24 22:52:23 +01:00
|
|
|
if not (pkg_dir / f).normpath in self.files_created:
|
2014-01-28 15:17:51 +01:00
|
|
|
raise TestFailure(
|
|
|
|
'Package directory %r missing expected content %r' %
|
|
|
|
(pkg_dir, f)
|
|
|
|
)
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
for f in without_files:
|
2014-02-24 22:52:23 +01:00
|
|
|
if (pkg_dir / f).normpath in self.files_created:
|
2014-01-28 15:17:51 +01:00
|
|
|
raise TestFailure(
|
|
|
|
'Package directory %r has unexpected content %f' %
|
|
|
|
(pkg_dir, f)
|
|
|
|
)
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2013-08-22 06:38:23 +02:00
|
|
|
class PipTestEnvironment(scripttest.TestFileEnvironment):
|
2013-08-21 11:16:07 +02:00
|
|
|
"""
|
|
|
|
A specialized TestFileEnvironment for testing pip
|
|
|
|
"""
|
2010-05-02 20:11:45 +02:00
|
|
|
|
|
|
|
#
|
|
|
|
# Attribute naming convention
|
|
|
|
# ---------------------------
|
2010-06-03 04:25:26 +02:00
|
|
|
#
|
2010-05-02 20:11:45 +02:00
|
|
|
# Instances of this class have many attributes representing paths
|
|
|
|
# in the filesystem. To keep things straight, absolute paths have
|
|
|
|
# a name of the form xxxx_path and relative paths have a name that
|
|
|
|
# does not end in '_path'.
|
|
|
|
|
2010-05-26 14:06:27 +02:00
|
|
|
exe = sys.platform == 'win32' and '.exe' or ''
|
2010-05-19 12:13:07 +02:00
|
|
|
verbose = False
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2013-08-22 07:54:29 +02:00
|
|
|
def __init__(self, base_path, *args, **kwargs):
|
2013-08-21 11:16:07 +02:00
|
|
|
# Make our base_path a test.lib.path.Path object
|
|
|
|
base_path = Path(base_path)
|
2010-04-27 17:35:48 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Store paths related to the virtual environment
|
2013-08-22 07:54:29 +02:00
|
|
|
_virtualenv = kwargs.pop("virtualenv")
|
2015-09-22 10:48:23 +02:00
|
|
|
path_locations = virtualenv.path_locations(_virtualenv)
|
|
|
|
# Make sure we have test.lib.path.Path objects
|
|
|
|
venv, lib, include, bin = map(Path, path_locations)
|
2013-08-21 11:16:07 +02:00
|
|
|
self.venv_path = venv
|
2017-08-10 16:02:40 +02:00
|
|
|
self.lib_path = virtualenv_lib_path(venv, lib)
|
2013-08-21 11:16:07 +02:00
|
|
|
self.include_path = include
|
|
|
|
self.bin_path = bin
|
2010-04-26 05:40:27 +02:00
|
|
|
|
2012-07-30 08:10:55 +02:00
|
|
|
if hasattr(sys, "pypy_version_info"):
|
2013-08-21 11:16:07 +02:00
|
|
|
self.site_packages_path = self.venv_path.join("site-packages")
|
2013-04-12 08:13:01 +02:00
|
|
|
else:
|
2013-08-21 11:16:07 +02:00
|
|
|
self.site_packages_path = self.lib_path.join("site-packages")
|
2010-04-28 17:41:55 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
self.user_base_path = self.venv_path.join("user")
|
2014-01-28 15:17:51 +01:00
|
|
|
self.user_bin_path = self.user_base_path.join(
|
|
|
|
self.bin_path - self.venv_path
|
|
|
|
)
|
2013-08-21 11:16:07 +02:00
|
|
|
self.user_site_path = self.venv_path.join(
|
|
|
|
"user",
|
|
|
|
site.USER_SITE[len(site.USER_BASE) + 1:],
|
|
|
|
)
|
2013-04-12 08:13:01 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Create a Directory to use as a scratch pad
|
|
|
|
self.scratch_path = base_path.join("scratch").mkdir()
|
2010-04-17 23:49:29 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Set our default working directory
|
|
|
|
kwargs.setdefault("cwd", self.scratch_path)
|
2010-04-13 02:48:37 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Setup our environment
|
|
|
|
environ = kwargs.get("environ")
|
|
|
|
if environ is None:
|
|
|
|
environ = os.environ.copy()
|
2013-04-12 08:13:01 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
environ["PATH"] = Path.pathsep.join(
|
|
|
|
[self.bin_path] + [environ.get("PATH", [])],
|
|
|
|
)
|
|
|
|
environ["PYTHONUSERBASE"] = self.user_base_path
|
2013-08-22 08:44:21 +02:00
|
|
|
# Writing bytecode can mess up updated file detection
|
|
|
|
environ["PYTHONDONTWRITEBYTECODE"] = "1"
|
2013-08-21 11:16:07 +02:00
|
|
|
kwargs["environ"] = environ
|
2012-06-14 07:17:55 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Call the TestFileEnvironment __init__
|
2013-08-22 06:38:23 +02:00
|
|
|
super(PipTestEnvironment, self).__init__(base_path, *args, **kwargs)
|
2010-04-27 17:35:48 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Expand our absolute path directories into relative
|
|
|
|
for name in ["base", "venv", "lib", "include", "bin", "site_packages",
|
2013-09-01 22:15:45 +02:00
|
|
|
"user_base", "user_site", "user_bin", "scratch"]:
|
2013-08-21 11:16:07 +02:00
|
|
|
real_name = "%s_path" % name
|
|
|
|
setattr(self, name, getattr(self, real_name) - self.base_path)
|
2013-04-12 08:13:01 +02:00
|
|
|
|
2014-04-28 16:38:39 +02:00
|
|
|
# Make sure temp_path is a Path object
|
|
|
|
self.temp_path = Path(self.temp_path)
|
2013-08-22 07:51:14 +02:00
|
|
|
# Ensure the tmp dir exists, things break horribly if it doesn't
|
2013-08-21 11:16:07 +02:00
|
|
|
self.temp_path.mkdir()
|
2012-07-13 00:59:44 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# create easy-install.pth in user_site, so we always have it updated
|
|
|
|
# instead of created
|
|
|
|
self.user_site_path.makedirs()
|
|
|
|
self.user_site_path.join("easy-install.pth").touch()
|
2013-04-12 08:13:01 +02:00
|
|
|
|
2011-03-15 20:49:48 +01:00
|
|
|
def _ignore_file(self, fn):
|
2011-03-20 17:47:15 +01:00
|
|
|
if fn.endswith('__pycache__') or fn.endswith(".pyc"):
|
2011-03-15 20:49:48 +01:00
|
|
|
result = True
|
|
|
|
else:
|
2013-08-22 06:38:23 +02:00
|
|
|
result = super(PipTestEnvironment, self)._ignore_file(fn)
|
2011-03-15 20:49:48 +01:00
|
|
|
return result
|
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
def run(self, *args, **kw):
|
2010-05-19 12:13:07 +02:00
|
|
|
if self.verbose:
|
2011-03-15 20:49:48 +01:00
|
|
|
print('>> running %s %s' % (args, kw))
|
2010-04-28 22:55:10 +02:00
|
|
|
cwd = kw.pop('cwd', None)
|
2010-06-03 04:25:26 +02:00
|
|
|
run_from = kw.pop('run_from', None)
|
2010-04-28 22:55:10 +02:00
|
|
|
assert not cwd or not run_from, "Don't use run_from; it's going away"
|
2013-08-19 04:40:20 +02:00
|
|
|
cwd = cwd or run_from or self.cwd
|
2014-01-28 15:17:51 +01:00
|
|
|
return TestPipResult(
|
|
|
|
super(PipTestEnvironment, self).run(cwd=cwd, *args, **kw),
|
|
|
|
verbose=self.verbose,
|
|
|
|
)
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
def pip(self, *args, **kwargs):
|
2015-03-15 15:56:08 +01:00
|
|
|
# On old versions of Python, urllib3/requests will raise a warning
|
|
|
|
# about the lack of an SSLContext. Expect it when running commands
|
|
|
|
# that will touch the outside world.
|
2015-03-15 16:14:23 +01:00
|
|
|
if (pyversion_tuple < (2, 7, 9) and
|
2015-07-29 22:25:13 +02:00
|
|
|
args and args[0] in ('search', 'install', 'download')):
|
2015-03-15 15:56:08 +01:00
|
|
|
kwargs['expect_stderr'] = True
|
2017-03-21 12:28:22 +01:00
|
|
|
# Python 3.3 is deprecated and we emit a warning on it.
|
|
|
|
if pyversion_tuple[:2] == (3, 3):
|
|
|
|
kwargs['expect_stderr'] = True
|
2015-03-15 15:56:08 +01:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
return self.run("pip", *args, **kwargs)
|
2010-04-15 13:01:36 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
def pip_install_local(self, *args, **kwargs):
|
2013-08-23 13:09:53 +02:00
|
|
|
return self.pip(
|
|
|
|
"install", "--no-index",
|
|
|
|
"--find-links", path_to_url(os.path.join(DATA_DIR, "packages")),
|
2013-08-21 11:16:07 +02:00
|
|
|
*args, **kwargs
|
|
|
|
)
|
2008-10-16 00:02:57 +02:00
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2009-04-01 00:17:08 +02:00
|
|
|
# FIXME ScriptTest does something similar, but only within a single
|
|
|
|
# ProcResult; this generalizes it so states can be compared across
|
|
|
|
# multiple commands. Maybe should be rolled into ScriptTest?
|
2009-04-06 19:59:20 +02:00
|
|
|
def diff_states(start, end, ignore=None):
|
2009-04-01 00:17:08 +02:00
|
|
|
"""
|
|
|
|
Differences two "filesystem states" as represented by dictionaries
|
|
|
|
of FoundFile and FoundDir objects.
|
|
|
|
|
|
|
|
Returns a dictionary with following keys:
|
|
|
|
|
|
|
|
``deleted``
|
|
|
|
Dictionary of files/directories found only in the start state.
|
|
|
|
|
|
|
|
``created``
|
|
|
|
Dictionary of files/directories found only in the end state.
|
|
|
|
|
|
|
|
``updated``
|
|
|
|
Dictionary of files whose size has changed (FIXME not entirely
|
|
|
|
reliable, but comparing contents is not possible because
|
|
|
|
FoundFile.bytes is lazy, and comparing mtime doesn't help if
|
|
|
|
we want to know if a file has been returned to its earlier
|
|
|
|
state).
|
|
|
|
|
|
|
|
Ignores mtime and other file attributes; only presence/absence and
|
|
|
|
size are considered.
|
2010-04-22 08:37:50 +02:00
|
|
|
|
2009-04-01 00:17:08 +02:00
|
|
|
"""
|
2009-04-06 19:59:20 +02:00
|
|
|
ignore = ignore or []
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-05-26 05:44:47 +02:00
|
|
|
def prefix_match(path, prefix):
|
2010-06-03 04:25:26 +02:00
|
|
|
if path == prefix:
|
2010-05-26 05:44:47 +02:00
|
|
|
return True
|
2010-05-26 06:02:15 +02:00
|
|
|
prefix = prefix.rstrip(os.path.sep) + os.path.sep
|
|
|
|
return path.startswith(prefix)
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2009-04-06 19:59:20 +02:00
|
|
|
start_keys = set([k for k in start.keys()
|
2010-05-26 05:44:47 +02:00
|
|
|
if not any([prefix_match(k, i) for i in ignore])])
|
2009-04-06 19:59:20 +02:00
|
|
|
end_keys = set([k for k in end.keys()
|
2010-05-26 05:44:47 +02:00
|
|
|
if not any([prefix_match(k, i) for i in ignore])])
|
2009-04-06 19:59:20 +02:00
|
|
|
deleted = dict([(k, start[k]) for k in start_keys.difference(end_keys)])
|
|
|
|
created = dict([(k, end[k]) for k in end_keys.difference(start_keys)])
|
2009-04-01 00:17:08 +02:00
|
|
|
updated = {}
|
2009-04-06 19:59:20 +02:00
|
|
|
for k in start_keys.intersection(end_keys):
|
2009-04-01 00:17:08 +02:00
|
|
|
if (start[k].size != end[k].size):
|
|
|
|
updated[k] = end[k]
|
|
|
|
return dict(deleted=deleted, created=created, updated=updated)
|
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
|
|
|
|
def assert_all_changes(start_state, end_state, expected_changes):
|
2010-05-03 04:08:03 +02:00
|
|
|
"""
|
2010-05-03 06:39:04 +02:00
|
|
|
Fails if anything changed that isn't listed in the
|
2010-06-03 04:25:26 +02:00
|
|
|
expected_changes.
|
2010-05-03 06:39:04 +02:00
|
|
|
|
|
|
|
start_state is either a dict mapping paths to
|
|
|
|
scripttest.[FoundFile|FoundDir] objects or a TestPipResult whose
|
|
|
|
files_before we'll test. end_state is either a similar dict or a
|
|
|
|
TestPipResult whose files_after we'll test.
|
|
|
|
|
|
|
|
Note: listing a directory means anything below
|
|
|
|
that directory can be expected to have changed.
|
2010-05-03 04:08:03 +02:00
|
|
|
"""
|
2013-08-22 12:20:25 +02:00
|
|
|
__tracebackhide__ = True
|
|
|
|
|
2010-05-03 06:39:04 +02:00
|
|
|
start_files = start_state
|
|
|
|
end_files = end_state
|
|
|
|
if isinstance(start_state, TestPipResult):
|
|
|
|
start_files = start_state.files_before
|
|
|
|
if isinstance(end_state, TestPipResult):
|
|
|
|
end_files = end_state.files_after
|
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
diff = diff_states(start_files, end_files, ignore=expected_changes)
|
2011-03-15 20:49:48 +01:00
|
|
|
if list(diff.values()) != [{}, {}, {}]:
|
2010-06-03 04:25:26 +02:00
|
|
|
raise TestFailure('Unexpected changes:\n' + '\n'.join(
|
|
|
|
[k + ': ' + ', '.join(v.keys()) for k, v in diff.items()]))
|
2010-05-03 04:08:03 +02:00
|
|
|
|
|
|
|
# Don't throw away this potentially useful information
|
|
|
|
return diff
|
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
|
2013-09-27 20:42:05 +02:00
|
|
|
def _create_test_package_with_subdirectory(script, subdirectory):
|
|
|
|
script.scratch_path.join("version_pkg").mkdir()
|
2014-02-24 22:52:23 +01:00
|
|
|
version_pkg_path = script.scratch_path / 'version_pkg'
|
2013-09-27 20:42:05 +02:00
|
|
|
version_pkg_path.join("version_pkg.py").write(textwrap.dedent("""
|
2013-07-24 17:25:35 +02:00
|
|
|
def main():
|
|
|
|
print('0.1')
|
2013-09-27 20:42:05 +02:00
|
|
|
"""))
|
2014-01-28 15:17:51 +01:00
|
|
|
version_pkg_path.join("setup.py").write(
|
|
|
|
textwrap.dedent("""
|
|
|
|
from setuptools import setup, find_packages
|
|
|
|
setup(name='version_pkg',
|
|
|
|
version='0.1',
|
|
|
|
packages=find_packages(),
|
|
|
|
py_modules=['version_pkg'],
|
|
|
|
entry_points=dict(console_scripts=['version_pkg=version_pkg:main']))
|
|
|
|
"""))
|
2013-07-24 17:25:35 +02:00
|
|
|
|
2013-09-27 20:42:05 +02:00
|
|
|
subdirectory_path = version_pkg_path.join(subdirectory)
|
|
|
|
subdirectory_path.mkdir()
|
|
|
|
subdirectory_path.join('version_subpkg.py').write(textwrap.dedent("""
|
2013-07-24 17:25:35 +02:00
|
|
|
def main():
|
|
|
|
print('0.1')
|
2013-09-27 20:42:05 +02:00
|
|
|
"""))
|
2013-07-24 17:25:35 +02:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
subdirectory_path.join('setup.py').write(
|
|
|
|
textwrap.dedent("""
|
|
|
|
from setuptools import setup, find_packages
|
|
|
|
setup(name='version_subpkg',
|
|
|
|
version='0.1',
|
|
|
|
packages=find_packages(),
|
|
|
|
py_modules=['version_subpkg'],
|
|
|
|
entry_points=dict(console_scripts=['version_pkg=version_subpkg:main']))
|
|
|
|
"""))
|
2013-07-24 17:25:35 +02:00
|
|
|
|
2013-09-27 20:42:05 +02:00
|
|
|
script.run('git', 'init', cwd=version_pkg_path)
|
|
|
|
script.run('git', 'add', '.', cwd=version_pkg_path)
|
2014-01-28 15:17:51 +01:00
|
|
|
script.run(
|
|
|
|
'git', 'commit', '-q',
|
2014-05-07 16:20:14 +02:00
|
|
|
'--author', 'pip <pypa-dev@googlegroups.com>',
|
2014-01-28 15:17:51 +01:00
|
|
|
'-am', 'initial version', cwd=version_pkg_path
|
|
|
|
)
|
2013-07-24 17:25:35 +02:00
|
|
|
|
|
|
|
return version_pkg_path
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
|
2015-11-22 13:00:14 +01:00
|
|
|
def _create_test_package_with_srcdir(script, name='version_pkg', vcs='git'):
|
|
|
|
script.scratch_path.join(name).mkdir()
|
|
|
|
version_pkg_path = script.scratch_path / name
|
|
|
|
subdir_path = version_pkg_path.join('subdir')
|
|
|
|
subdir_path.mkdir()
|
|
|
|
src_path = subdir_path.join('src')
|
|
|
|
src_path.mkdir()
|
|
|
|
pkg_path = src_path.join('pkg')
|
|
|
|
pkg_path.mkdir()
|
|
|
|
pkg_path.join('__init__.py').write('')
|
|
|
|
subdir_path.join("setup.py").write(textwrap.dedent("""
|
|
|
|
from setuptools import setup, find_packages
|
|
|
|
setup(
|
|
|
|
name='{name}',
|
|
|
|
version='0.1',
|
|
|
|
packages=find_packages(),
|
|
|
|
package_dir={{'': 'src'}},
|
|
|
|
)
|
|
|
|
""".format(name=name)))
|
|
|
|
return _vcs_add(script, version_pkg_path, vcs)
|
|
|
|
|
|
|
|
|
2015-03-19 11:54:09 +01:00
|
|
|
def _create_test_package(script, name='version_pkg', vcs='git'):
|
|
|
|
script.scratch_path.join(name).mkdir()
|
|
|
|
version_pkg_path = script.scratch_path / name
|
|
|
|
version_pkg_path.join("%s.py" % name).write(textwrap.dedent("""
|
2013-08-21 11:16:07 +02:00
|
|
|
def main():
|
|
|
|
print('0.1')
|
|
|
|
"""))
|
|
|
|
version_pkg_path.join("setup.py").write(textwrap.dedent("""
|
|
|
|
from setuptools import setup, find_packages
|
|
|
|
setup(
|
2015-03-19 11:54:09 +01:00
|
|
|
name='{name}',
|
2013-08-21 11:16:07 +02:00
|
|
|
version='0.1',
|
|
|
|
packages=find_packages(),
|
2015-03-19 11:54:09 +01:00
|
|
|
py_modules=['{name}'],
|
|
|
|
entry_points=dict(console_scripts=['{name}={name}:main'])
|
2013-08-21 11:16:07 +02:00
|
|
|
)
|
2015-03-19 11:54:09 +01:00
|
|
|
""".format(name=name)))
|
2015-11-22 12:10:00 +01:00
|
|
|
return _vcs_add(script, version_pkg_path, vcs)
|
|
|
|
|
|
|
|
|
|
|
|
def _vcs_add(script, version_pkg_path, vcs='git'):
|
2015-03-11 07:45:41 +01:00
|
|
|
if vcs == 'git':
|
|
|
|
script.run('git', 'init', cwd=version_pkg_path)
|
|
|
|
script.run('git', 'add', '.', cwd=version_pkg_path)
|
|
|
|
script.run(
|
|
|
|
'git', 'commit', '-q',
|
|
|
|
'--author', 'pip <pypa-dev@googlegroups.com>',
|
|
|
|
'-am', 'initial version', cwd=version_pkg_path,
|
|
|
|
)
|
2015-03-12 05:53:43 +01:00
|
|
|
elif vcs == 'hg':
|
|
|
|
script.run('hg', 'init', cwd=version_pkg_path)
|
|
|
|
script.run('hg', 'add', '.', cwd=version_pkg_path)
|
|
|
|
script.run(
|
|
|
|
'hg', 'commit', '-q',
|
|
|
|
'--user', 'pip <pypa-dev@googlegroups.com>',
|
|
|
|
'-m', 'initial version', cwd=version_pkg_path,
|
|
|
|
)
|
2015-03-11 07:45:41 +01:00
|
|
|
elif vcs == 'svn':
|
2015-03-14 20:47:55 +01:00
|
|
|
repo_url = _create_svn_repo(script, version_pkg_path)
|
2015-03-11 07:45:41 +01:00
|
|
|
script.run(
|
|
|
|
'svn', 'checkout', repo_url, 'pip-test-package',
|
|
|
|
cwd=script.scratch_path
|
|
|
|
)
|
|
|
|
checkout_path = script.scratch_path / 'pip-test-package'
|
|
|
|
|
|
|
|
# svn internally stores windows drives as uppercase; we'll match that.
|
|
|
|
checkout_path = checkout_path.replace('c:', 'C:')
|
|
|
|
|
|
|
|
version_pkg_path = checkout_path
|
2015-03-09 17:30:06 +01:00
|
|
|
elif vcs == 'bazaar':
|
|
|
|
script.run('bzr', 'init', cwd=version_pkg_path)
|
|
|
|
script.run('bzr', 'add', '.', cwd=version_pkg_path)
|
|
|
|
script.run(
|
|
|
|
'bzr', 'whoami', 'pip <pypa-dev@googlegroups.com>',
|
|
|
|
cwd=version_pkg_path)
|
|
|
|
script.run(
|
|
|
|
'bzr', 'commit', '-q',
|
|
|
|
'--author', 'pip <pypa-dev@googlegroups.com>',
|
|
|
|
'-m', 'initial version', cwd=version_pkg_path,
|
|
|
|
)
|
2015-03-11 07:45:41 +01:00
|
|
|
else:
|
|
|
|
raise ValueError('Unknown vcs: %r' % vcs)
|
2010-08-05 16:08:25 +02:00
|
|
|
return version_pkg_path
|
|
|
|
|
|
|
|
|
2015-03-14 20:47:55 +01:00
|
|
|
def _create_svn_repo(script, version_pkg_path):
|
|
|
|
repo_url = path_to_url(
|
|
|
|
script.scratch_path / 'pip-test-package-repo' / 'trunk')
|
|
|
|
script.run(
|
|
|
|
'svnadmin', 'create', 'pip-test-package-repo',
|
|
|
|
cwd=script.scratch_path
|
|
|
|
)
|
|
|
|
script.run(
|
|
|
|
'svn', 'import', version_pkg_path, repo_url,
|
|
|
|
'-m', 'Initial import of pip-test-package',
|
|
|
|
cwd=script.scratch_path
|
|
|
|
)
|
|
|
|
return repo_url
|
|
|
|
|
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
def _change_test_package_version(script, version_pkg_path):
|
|
|
|
version_pkg_path.join("version_pkg.py").write(textwrap.dedent('''\
|
2010-08-05 16:08:25 +02:00
|
|
|
def main():
|
2013-08-21 11:16:07 +02:00
|
|
|
print("some different version")'''))
|
2014-01-28 15:17:51 +01:00
|
|
|
script.run(
|
|
|
|
'git', 'clean', '-qfdx',
|
|
|
|
cwd=version_pkg_path,
|
|
|
|
expect_stderr=True,
|
|
|
|
)
|
|
|
|
script.run(
|
|
|
|
'git', 'commit', '-q',
|
2014-05-07 16:20:14 +02:00
|
|
|
'--author', 'pip <pypa-dev@googlegroups.com>',
|
2014-01-28 15:17:51 +01:00
|
|
|
'-am', 'messed version',
|
2013-08-21 11:16:07 +02:00
|
|
|
cwd=version_pkg_path,
|
|
|
|
expect_stderr=True,
|
|
|
|
)
|
2010-08-05 16:08:25 +02:00
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2013-02-08 07:17:27 +01:00
|
|
|
def assert_raises_regexp(exception, reg, run, *args, **kwargs):
|
|
|
|
"""Like assertRaisesRegexp in unittest"""
|
2013-08-24 09:53:10 +02:00
|
|
|
__tracebackhide__ = True
|
|
|
|
|
2013-02-08 07:17:27 +01:00
|
|
|
try:
|
|
|
|
run(*args, **kwargs)
|
2013-08-21 11:16:07 +02:00
|
|
|
assert False, "%s should have been thrown" % exception
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
except exception:
|
2013-02-08 07:17:27 +01:00
|
|
|
e = sys.exc_info()[1]
|
|
|
|
p = re.compile(reg)
|
|
|
|
assert p.search(str(e)), str(e)
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def requirements_file(contents, tmpdir):
|
|
|
|
"""Return a Path to a requirements file of given contents.
|
|
|
|
|
|
|
|
As long as the context manager is open, the requirements file will exist.
|
|
|
|
|
|
|
|
:param tmpdir: A Path to the folder in which to create the file
|
|
|
|
|
|
|
|
"""
|
|
|
|
path = tmpdir / 'reqs.txt'
|
|
|
|
path.write(contents)
|
|
|
|
yield path
|
|
|
|
path.remove()
|
2016-11-04 15:15:47 +01:00
|
|
|
|
|
|
|
|
|
|
|
def create_test_package_with_setup(script, **setup_kwargs):
|
|
|
|
assert 'name' in setup_kwargs, setup_kwargs
|
|
|
|
pkg_path = script.scratch_path / setup_kwargs['name']
|
|
|
|
pkg_path.mkdir()
|
|
|
|
pkg_path.join("setup.py").write(textwrap.dedent("""
|
|
|
|
from setuptools import setup
|
|
|
|
kwargs = %r
|
|
|
|
setup(**kwargs)
|
|
|
|
""") % setup_kwargs)
|
|
|
|
return pkg_path
|
2017-08-07 18:03:43 +02:00
|
|
|
|
|
|
|
|
|
|
|
def create_basic_wheel_for_package(script, name, version, depends, extras):
|
|
|
|
files = {
|
|
|
|
"{name}/__init__.py": """
|
|
|
|
def hello():
|
|
|
|
return "Hello From {name}"
|
|
|
|
""",
|
|
|
|
"{dist_info}/DESCRIPTION": """
|
|
|
|
UNKNOWN
|
|
|
|
""",
|
|
|
|
"{dist_info}/WHEEL": """
|
|
|
|
Wheel-Version: 1.0
|
|
|
|
Generator: pip-test-suite
|
|
|
|
Root-Is-Purelib: true
|
|
|
|
Tag: py2-none-any
|
|
|
|
Tag: py3-none-any
|
|
|
|
|
|
|
|
|
|
|
|
""",
|
|
|
|
"{dist_info}/METADATA": """
|
|
|
|
Metadata-Version: 2.0
|
|
|
|
Name: {name}
|
|
|
|
Version: {version}
|
|
|
|
Summary: UNKNOWN
|
|
|
|
Home-page: UNKNOWN
|
|
|
|
Author: UNKNOWN
|
|
|
|
Author-email: UNKNOWN
|
|
|
|
License: UNKNOWN
|
|
|
|
Platform: UNKNOWN
|
|
|
|
{requires_dist}
|
|
|
|
|
|
|
|
UNKNOWN
|
|
|
|
""",
|
|
|
|
"{dist_info}/top_level.txt": """
|
|
|
|
{name}
|
|
|
|
""",
|
|
|
|
# Have an empty RECORD becuase we don't want to be checking hashes.
|
|
|
|
"{dist_info}/RECORD": ""
|
|
|
|
}
|
|
|
|
|
|
|
|
# Some useful shorthands
|
|
|
|
archive_name = "{name}-{version}-py2.py3-none-any.whl".format(
|
|
|
|
name=name, version=version
|
|
|
|
)
|
|
|
|
dist_info = "{name}-{version}.dist-info".format(
|
|
|
|
name=name, version=version
|
|
|
|
)
|
|
|
|
|
|
|
|
requires_dist = "\n".join([
|
|
|
|
"Requires-Dist: {}".format(pkg) for pkg in depends
|
|
|
|
] + [
|
|
|
|
"Provides-Extra: {}".format(pkg) for pkg in extras.keys()
|
|
|
|
] + [
|
|
|
|
"Requires-Dist: {}; extra == \"{}\"".format(pkg, extra)
|
|
|
|
for extra in extras for pkg in extras[extra]
|
|
|
|
])
|
|
|
|
|
|
|
|
# Replace key-values with formatted values
|
|
|
|
for key, value in list(files.items()):
|
|
|
|
del files[key]
|
|
|
|
key = key.format(name=name, dist_info=dist_info)
|
|
|
|
files[key] = textwrap.dedent(value).format(
|
|
|
|
name=name, version=version, requires_dist=requires_dist
|
|
|
|
).strip()
|
|
|
|
|
|
|
|
for fname in files:
|
|
|
|
path = script.temp_path / fname
|
|
|
|
path.folder.mkdir()
|
|
|
|
path.write(files[fname])
|
|
|
|
|
|
|
|
retval = script.scratch_path / archive_name
|
|
|
|
generated = shutil.make_archive(retval, 'zip', script.temp_path)
|
|
|
|
shutil.move(generated, retval)
|
|
|
|
|
|
|
|
script.temp_path.rmtree()
|
|
|
|
script.temp_path.mkdir()
|
|
|
|
|
|
|
|
return retval
|