2021-07-10 02:38:41 +02:00
|
|
|
import json
|
2010-06-03 04:25:26 +02:00
|
|
|
import os
|
2013-02-08 07:17:27 +01:00
|
|
|
import re
|
2017-08-07 18:03:43 +02:00
|
|
|
import shutil
|
2019-09-24 13:49:29 +02:00
|
|
|
import site
|
2017-10-06 21:51:42 +02:00
|
|
|
import subprocess
|
2019-09-24 13:49:29 +02:00
|
|
|
import sys
|
|
|
|
import textwrap
|
2020-01-02 02:29:10 +01:00
|
|
|
from base64 import urlsafe_b64encode
|
2019-09-24 13:49:29 +02:00
|
|
|
from contextlib import contextmanager
|
2020-01-02 02:29:10 +01:00
|
|
|
from hashlib import sha256
|
|
|
|
from io import BytesIO
|
2019-09-24 13:49:29 +02:00
|
|
|
from textwrap import dedent
|
2021-02-19 13:56:59 +01:00
|
|
|
from typing import List, Optional
|
2020-01-02 02:29:10 +01:00
|
|
|
from zipfile import ZipFile
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2017-10-06 21:51:42 +02:00
|
|
|
import pytest
|
2021-07-10 02:38:41 +02:00
|
|
|
from pip._vendor.packaging.utils import canonicalize_name
|
2018-10-09 08:23:03 +02:00
|
|
|
from scripttest import FoundDir, TestFileEnvironment
|
2011-03-23 02:41:22 +01:00
|
|
|
|
2019-10-06 18:59:05 +02:00
|
|
|
from pip._internal.index.collector import LinkCollector
|
|
|
|
from pip._internal.index.package_finder import PackageFinder
|
2019-07-18 19:03:01 +02:00
|
|
|
from pip._internal.locations import get_major_minor_version
|
2019-06-28 20:14:55 +02:00
|
|
|
from pip._internal.models.search_scope import SearchScope
|
|
|
|
from pip._internal.models.selection_prefs import SelectionPreferences
|
2021-02-19 13:56:59 +01:00
|
|
|
from pip._internal.models.target_python import TargetPython
|
2019-09-27 00:39:53 +02:00
|
|
|
from pip._internal.network.session import PipSession
|
2019-06-28 20:14:55 +02:00
|
|
|
from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
|
2017-03-24 19:21:22 +01:00
|
|
|
from tests.lib.path import Path, curdir
|
2020-03-29 21:20:00 +02:00
|
|
|
from tests.lib.wheel import make_wheel
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2019-10-07 14:30:59 +02:00
|
|
|
DATA_DIR = Path(__file__).parent.parent.joinpath("data").resolve()
|
|
|
|
SRC_DIR = Path(__file__).resolve().parent.parent.parent
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2019-07-18 19:03:01 +02:00
|
|
|
pyversion = get_major_minor_version()
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2019-06-09 22:11:16 +02:00
|
|
|
CURRENT_PY_VERSION_INFO = sys.version_info[:3]
|
|
|
|
|
2010-04-26 08:52:46 +02:00
|
|
|
|
2019-02-10 21:36:59 +01:00
|
|
|
def assert_paths_equal(actual, expected):
|
2019-06-30 03:07:50 +02:00
|
|
|
assert os.path.normpath(actual) == os.path.normpath(expected)
|
2019-02-10 21:36:59 +01:00
|
|
|
|
|
|
|
|
2010-06-10 00:01:01 +02:00
|
|
|
def path_to_url(path):
|
|
|
|
"""
|
2010-08-19 05:08:24 +02:00
|
|
|
Convert a path to URI. The path will be made absolute and
|
|
|
|
will not have quoted path parts.
|
2010-06-10 00:01:01 +02:00
|
|
|
(adapted from pip.util)
|
|
|
|
"""
|
2010-06-10 01:06:35 +02:00
|
|
|
path = os.path.normpath(os.path.abspath(path))
|
2010-06-10 00:01:01 +02:00
|
|
|
drive, path = os.path.splitdrive(path)
|
|
|
|
filepath = path.split(os.path.sep)
|
2021-04-02 11:21:40 +02:00
|
|
|
url = "/".join(filepath)
|
2010-06-10 00:08:33 +02:00
|
|
|
if drive:
|
2017-10-06 21:51:42 +02:00
|
|
|
# Note: match urllib.request.pathname2url's
|
|
|
|
# behavior: uppercase the drive letter.
|
2021-04-02 11:21:40 +02:00
|
|
|
return "file:///" + drive.upper() + url
|
|
|
|
return "file://" + url
|
2010-06-10 00:01:01 +02:00
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
|
2018-11-10 08:37:21 +01:00
|
|
|
def _test_path_to_file_url(path):
|
|
|
|
"""
|
|
|
|
Convert a test Path to a "file://" URL.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
path: a tests.lib.path.Path object.
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
return "file://" + path.resolve().replace("\\", "/")
|
2018-11-10 08:37:21 +01:00
|
|
|
|
|
|
|
|
2017-06-05 13:48:23 +02:00
|
|
|
def create_file(path, contents=None):
|
2021-04-02 11:21:40 +02:00
|
|
|
"""Create a file on the path, with the given contents"""
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.misc import ensure_dir
|
2017-06-05 13:48:23 +02:00
|
|
|
|
|
|
|
ensure_dir(os.path.dirname(path))
|
|
|
|
with open(path, "w") as f:
|
|
|
|
if contents is not None:
|
|
|
|
f.write(contents)
|
|
|
|
else:
|
|
|
|
f.write("\n")
|
|
|
|
|
|
|
|
|
2019-09-10 18:46:04 +02:00
|
|
|
def make_test_search_scope(
|
2021-08-08 02:14:17 +02:00
|
|
|
find_links: Optional[List[str]] = None,
|
|
|
|
index_urls: Optional[List[str]] = None,
|
2019-09-10 18:46:04 +02:00
|
|
|
):
|
|
|
|
if find_links is None:
|
|
|
|
find_links = []
|
|
|
|
if index_urls is None:
|
|
|
|
index_urls = []
|
|
|
|
|
2019-09-14 20:08:32 +02:00
|
|
|
return SearchScope.create(find_links=find_links, index_urls=index_urls)
|
|
|
|
|
|
|
|
|
|
|
|
def make_test_link_collector(
|
2021-08-08 02:14:17 +02:00
|
|
|
find_links: Optional[List[str]] = None,
|
|
|
|
index_urls: Optional[List[str]] = None,
|
|
|
|
session: Optional[PipSession] = None,
|
|
|
|
) -> LinkCollector:
|
2019-09-14 20:08:32 +02:00
|
|
|
"""
|
|
|
|
Create a LinkCollector object for testing purposes.
|
|
|
|
"""
|
|
|
|
if session is None:
|
|
|
|
session = PipSession()
|
|
|
|
|
|
|
|
search_scope = make_test_search_scope(
|
2019-09-10 18:46:04 +02:00
|
|
|
find_links=find_links,
|
|
|
|
index_urls=index_urls,
|
|
|
|
)
|
|
|
|
|
2019-09-14 20:08:32 +02:00
|
|
|
return LinkCollector(session=session, search_scope=search_scope)
|
|
|
|
|
2019-09-10 18:46:04 +02:00
|
|
|
|
2019-06-01 18:45:53 +02:00
|
|
|
def make_test_finder(
|
2021-08-08 02:14:17 +02:00
|
|
|
find_links: Optional[List[str]] = None,
|
|
|
|
index_urls: Optional[List[str]] = None,
|
|
|
|
allow_all_prereleases: bool = False,
|
|
|
|
session: Optional[PipSession] = None,
|
|
|
|
target_python: Optional[TargetPython] = None,
|
|
|
|
) -> PackageFinder:
|
2019-06-01 18:45:53 +02:00
|
|
|
"""
|
|
|
|
Create a PackageFinder for testing purposes.
|
|
|
|
"""
|
2019-09-14 20:08:32 +02:00
|
|
|
link_collector = make_test_link_collector(
|
2019-06-21 18:10:03 +02:00
|
|
|
find_links=find_links,
|
|
|
|
index_urls=index_urls,
|
2019-09-14 20:08:32 +02:00
|
|
|
session=session,
|
2019-06-21 18:10:03 +02:00
|
|
|
)
|
2019-06-28 20:14:55 +02:00
|
|
|
selection_prefs = SelectionPreferences(
|
|
|
|
allow_yanked=True,
|
|
|
|
allow_all_prereleases=allow_all_prereleases,
|
|
|
|
)
|
2019-06-21 18:10:03 +02:00
|
|
|
|
2019-06-01 18:45:53 +02:00
|
|
|
return PackageFinder.create(
|
2019-09-14 20:08:32 +02:00
|
|
|
link_collector=link_collector,
|
2019-06-28 20:14:55 +02:00
|
|
|
selection_prefs=selection_prefs,
|
2019-06-01 18:45:53 +02:00
|
|
|
target_python=target_python,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-24 22:23:07 +01:00
|
|
|
class TestData:
|
2013-08-23 13:09:53 +02:00
|
|
|
"""
|
|
|
|
Represents a bundle of pre-created test data.
|
|
|
|
|
|
|
|
This copies a pristine set of test data into a root location that is
|
|
|
|
designed to be test specific. The reason for this is when running the tests
|
|
|
|
concurrently errors can be generated because the related tooling uses
|
|
|
|
the directory as a work space. This leads to two concurrent processes
|
|
|
|
trampling over each other. This class gets around that by copying all
|
|
|
|
data into a directory and operating on the copied data.
|
|
|
|
"""
|
|
|
|
|
2021-09-05 02:41:58 +02:00
|
|
|
__test__ = False
|
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
def __init__(self, root, source=None):
|
|
|
|
self.source = source or DATA_DIR
|
2019-10-07 14:30:59 +02:00
|
|
|
self.root = Path(root).resolve()
|
2013-08-23 13:09:53 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def copy(cls, root):
|
|
|
|
obj = cls(root)
|
|
|
|
obj.reset()
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def reset(self):
|
2019-07-20 01:59:53 +02:00
|
|
|
# Check explicitly for the target directory to avoid overly-broad
|
|
|
|
# try/except.
|
|
|
|
if self.root.exists():
|
|
|
|
shutil.rmtree(self.root)
|
2019-07-20 02:04:34 +02:00
|
|
|
shutil.copytree(self.source, self.root, symlinks=True)
|
2013-08-23 13:09:53 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def packages(self):
|
2019-07-02 07:00:32 +02:00
|
|
|
return self.root.joinpath("packages")
|
2013-08-23 13:09:53 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def packages2(self):
|
2019-07-02 07:00:32 +02:00
|
|
|
return self.root.joinpath("packages2")
|
2013-08-23 13:09:53 +02:00
|
|
|
|
2014-04-30 02:55:12 +02:00
|
|
|
@property
|
|
|
|
def packages3(self):
|
2019-07-02 07:00:32 +02:00
|
|
|
return self.root.joinpath("packages3")
|
2014-04-30 02:55:12 +02:00
|
|
|
|
2014-05-03 19:02:23 +02:00
|
|
|
@property
|
|
|
|
def src(self):
|
2019-07-02 07:00:32 +02:00
|
|
|
return self.root.joinpath("src")
|
2014-05-03 19:02:23 +02:00
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
@property
|
|
|
|
def indexes(self):
|
2019-07-02 07:00:32 +02:00
|
|
|
return self.root.joinpath("indexes")
|
2013-08-23 13:09:53 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def reqfiles(self):
|
2019-07-02 07:00:32 +02:00
|
|
|
return self.root.joinpath("reqfiles")
|
2013-08-23 13:09:53 +02:00
|
|
|
|
2018-06-21 19:29:31 +02:00
|
|
|
@property
|
|
|
|
def completion_paths(self):
|
2019-07-02 07:00:32 +02:00
|
|
|
return self.root.joinpath("completion_paths")
|
2018-06-21 19:29:31 +02:00
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
@property
|
|
|
|
def find_links(self):
|
|
|
|
return path_to_url(self.packages)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def find_links2(self):
|
|
|
|
return path_to_url(self.packages2)
|
|
|
|
|
2014-04-30 02:55:12 +02:00
|
|
|
@property
|
|
|
|
def find_links3(self):
|
|
|
|
return path_to_url(self.packages3)
|
|
|
|
|
2018-08-01 16:51:17 +02:00
|
|
|
@property
|
|
|
|
def backends(self):
|
2019-07-02 07:00:32 +02:00
|
|
|
return path_to_url(self.root.joinpath("backends"))
|
2018-08-01 16:51:17 +02:00
|
|
|
|
2013-08-23 13:09:53 +02:00
|
|
|
def index_url(self, index="simple"):
|
2019-07-02 07:00:32 +02:00
|
|
|
return path_to_url(self.root.joinpath("indexes", index))
|
2010-06-09 01:58:14 +02:00
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
class TestFailure(AssertionError):
|
|
|
|
"""
|
|
|
|
An "assertion" failed during testing.
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2020-12-24 22:23:07 +01:00
|
|
|
class TestPipResult:
|
2010-05-19 12:13:07 +02:00
|
|
|
def __init__(self, impl, verbose=False):
|
2010-04-28 22:55:10 +02:00
|
|
|
self._impl = impl
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-05-19 12:13:07 +02:00
|
|
|
if verbose:
|
2011-03-15 20:49:48 +01:00
|
|
|
print(self.stdout)
|
2010-05-19 12:13:07 +02:00
|
|
|
if self.stderr:
|
2021-04-02 11:21:40 +02:00
|
|
|
print("======= stderr ========")
|
2011-03-15 20:49:48 +01:00
|
|
|
print(self.stderr)
|
2021-04-02 11:21:40 +02:00
|
|
|
print("=======================")
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
def __getattr__(self, attr):
|
2010-06-03 04:25:26 +02:00
|
|
|
return getattr(self._impl, attr)
|
2010-04-29 16:04:49 +02:00
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
if sys.platform == "win32":
|
2010-07-29 18:56:03 +02:00
|
|
|
|
2010-05-01 23:34:06 +02:00
|
|
|
@property
|
|
|
|
def stdout(self):
|
2021-04-02 11:21:40 +02:00
|
|
|
return self._impl.stdout.replace("\r\n", "\n")
|
2010-05-01 23:34:06 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def stderr(self):
|
2021-04-02 11:21:40 +02:00
|
|
|
return self._impl.stderr.replace("\r\n", "\n")
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-05-02 00:09:45 +02:00
|
|
|
def __str__(self):
|
2021-04-02 11:21:40 +02:00
|
|
|
return str(self._impl).replace("\r\n", "\n")
|
|
|
|
|
2010-05-02 00:09:45 +02:00
|
|
|
else:
|
|
|
|
# Python doesn't automatically forward __str__ through __getattr__
|
2010-07-29 18:56:03 +02:00
|
|
|
|
2010-05-02 00:09:45 +02:00
|
|
|
def __str__(self):
|
|
|
|
return str(self._impl)
|
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
def assert_installed(
|
|
|
|
self,
|
|
|
|
pkg_name,
|
|
|
|
editable=True,
|
|
|
|
with_files=None,
|
|
|
|
without_files=None,
|
|
|
|
without_egg_link=False,
|
|
|
|
use_user_site=False,
|
|
|
|
sub_dir=False,
|
|
|
|
):
|
2020-06-04 23:11:08 +02:00
|
|
|
with_files = with_files or []
|
|
|
|
without_files = without_files or []
|
2010-04-28 22:55:10 +02:00
|
|
|
e = self.test_env
|
|
|
|
|
2013-02-11 07:00:57 +01:00
|
|
|
if editable:
|
2021-04-02 11:21:40 +02:00
|
|
|
pkg_dir = e.venv / "src" / pkg_name.lower()
|
2014-03-08 19:33:05 +01:00
|
|
|
# If package was installed in a sub directory
|
|
|
|
if sub_dir:
|
|
|
|
pkg_dir = pkg_dir / sub_dir
|
2013-02-11 07:00:57 +01:00
|
|
|
else:
|
|
|
|
without_egg_link = True
|
2014-02-24 22:52:23 +01:00
|
|
|
pkg_dir = e.site_packages / pkg_name
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2010-08-19 11:40:50 +02:00
|
|
|
if use_user_site:
|
2021-04-02 11:21:40 +02:00
|
|
|
egg_link_path = e.user_site / pkg_name + ".egg-link"
|
2010-08-19 11:40:50 +02:00
|
|
|
else:
|
2021-04-02 11:21:40 +02:00
|
|
|
egg_link_path = e.site_packages / pkg_name + ".egg-link"
|
2013-08-21 11:16:07 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
if without_egg_link:
|
|
|
|
if egg_link_path in self.files_created:
|
2014-01-28 15:17:51 +01:00
|
|
|
raise TestFailure(
|
2021-07-23 21:55:14 +02:00
|
|
|
f"unexpected egg link file created: {egg_link_path!r}\n{self}"
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2010-04-28 22:55:10 +02:00
|
|
|
else:
|
2014-03-26 23:24:19 +01:00
|
|
|
if egg_link_path not in self.files_created:
|
2014-01-28 15:17:51 +01:00
|
|
|
raise TestFailure(
|
2021-07-23 21:55:14 +02:00
|
|
|
f"expected egg link file missing: {egg_link_path!r}\n{self}"
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2010-05-20 08:35:04 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
egg_link_file = self.files_created[egg_link_path]
|
2021-04-02 11:21:40 +02:00
|
|
|
egg_link_contents = egg_link_file.bytes.replace(os.linesep, "\n")
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2014-03-08 19:33:05 +01:00
|
|
|
# FIXME: I don't understand why there's a trailing . here
|
2021-04-02 11:21:40 +02:00
|
|
|
if not (
|
|
|
|
egg_link_contents.endswith("\n.")
|
|
|
|
and egg_link_contents[:-2].endswith(pkg_dir)
|
|
|
|
):
|
|
|
|
expected_ending = pkg_dir + "\n."
|
|
|
|
raise TestFailure(
|
|
|
|
textwrap.dedent(
|
2021-04-02 12:03:39 +02:00
|
|
|
f"""
|
|
|
|
Incorrect egg_link file {egg_link_file!r}
|
|
|
|
Expected ending: {expected_ending!r}
|
|
|
|
------- Actual contents -------
|
|
|
|
{egg_link_contents!r}
|
|
|
|
-------------------------------
|
|
|
|
"""
|
|
|
|
).strip()
|
2021-04-02 11:21:40 +02:00
|
|
|
)
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2010-08-19 11:40:50 +02:00
|
|
|
if use_user_site:
|
2021-04-02 11:21:40 +02:00
|
|
|
pth_file = e.user_site / "easy-install.pth"
|
2010-08-19 11:40:50 +02:00
|
|
|
else:
|
2021-04-02 11:21:40 +02:00
|
|
|
pth_file = e.site_packages / "easy-install.pth"
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
if (pth_file in self.files_updated) == without_egg_link:
|
2021-04-02 11:21:40 +02:00
|
|
|
maybe = "" if without_egg_link else "not "
|
|
|
|
raise TestFailure(f"{pth_file} unexpectedly {maybe}updated by install")
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
if (pkg_dir in self.files_created) == (curdir in without_files):
|
2021-04-02 11:21:40 +02:00
|
|
|
maybe = "not " if curdir in without_files else ""
|
2021-02-18 09:33:44 +01:00
|
|
|
files = sorted(self.files_created)
|
2021-04-02 11:21:40 +02:00
|
|
|
raise TestFailure(
|
|
|
|
textwrap.dedent(
|
2021-04-02 12:03:39 +02:00
|
|
|
f"""
|
|
|
|
expected package directory {pkg_dir!r} {maybe}to be created
|
|
|
|
actually created:
|
|
|
|
{files}
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
)
|
|
|
|
)
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
for f in with_files:
|
2019-07-20 03:28:22 +02:00
|
|
|
normalized_path = os.path.normpath(pkg_dir / f)
|
|
|
|
if normalized_path not in self.files_created:
|
2014-01-28 15:17:51 +01:00
|
|
|
raise TestFailure(
|
2021-07-23 21:55:14 +02:00
|
|
|
f"Package directory {pkg_dir!r} missing expected content {f!r}"
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2010-04-28 22:55:10 +02:00
|
|
|
|
|
|
|
for f in without_files:
|
2019-07-20 03:28:22 +02:00
|
|
|
normalized_path = os.path.normpath(pkg_dir / f)
|
|
|
|
if normalized_path in self.files_created:
|
2014-01-28 15:17:51 +01:00
|
|
|
raise TestFailure(
|
2021-04-02 11:21:40 +02:00
|
|
|
f"Package directory {pkg_dir!r} has unexpected content {f}"
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2020-05-22 13:57:44 +02:00
|
|
|
def did_create(self, path, message=None):
|
|
|
|
assert str(path) in self.files_created, _one_or_both(message, self)
|
|
|
|
|
|
|
|
def did_not_create(self, path, message=None):
|
|
|
|
assert str(path) not in self.files_created, _one_or_both(message, self)
|
|
|
|
|
|
|
|
def did_update(self, path, message=None):
|
|
|
|
assert str(path) in self.files_updated, _one_or_both(message, self)
|
|
|
|
|
|
|
|
def did_not_update(self, path, message=None):
|
|
|
|
assert str(path) not in self.files_updated, _one_or_both(message, self)
|
|
|
|
|
|
|
|
|
|
|
|
def _one_or_both(a, b):
|
2021-04-02 11:21:40 +02:00
|
|
|
"""Returns f"{a}\n{b}" if a is truthy, else returns str(b)."""
|
2020-05-22 13:57:44 +02:00
|
|
|
if not a:
|
|
|
|
return str(b)
|
|
|
|
|
2020-12-23 20:25:12 +01:00
|
|
|
return f"{a}\n{b}"
|
2020-05-22 13:57:44 +02:00
|
|
|
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2019-03-30 11:11:16 +01:00
|
|
|
def make_check_stderr_message(stderr, line, reason):
|
|
|
|
"""
|
|
|
|
Create an exception message to use inside check_stderr().
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
return dedent(
|
|
|
|
"""\
|
2019-03-30 11:11:16 +01:00
|
|
|
{reason}:
|
|
|
|
Caused by line: {line!r}
|
|
|
|
Complete stderr: {stderr}
|
2021-04-02 11:21:40 +02:00
|
|
|
"""
|
|
|
|
).format(stderr=stderr, line=line, reason=reason)
|
2019-03-30 11:11:16 +01:00
|
|
|
|
|
|
|
|
2019-08-11 06:02:44 +02:00
|
|
|
def _check_stderr(
|
2021-04-02 11:21:40 +02:00
|
|
|
stderr,
|
|
|
|
allow_stderr_warning,
|
|
|
|
allow_stderr_error,
|
2019-03-16 11:49:42 +01:00
|
|
|
):
|
|
|
|
"""
|
|
|
|
Check the given stderr for logged warnings and errors.
|
|
|
|
|
|
|
|
:param stderr: stderr output as a string.
|
2019-03-18 19:38:48 +01:00
|
|
|
:param allow_stderr_warning: whether a logged warning (or deprecation
|
2019-08-11 06:02:44 +02:00
|
|
|
message) is allowed. Must be True if allow_stderr_error is True.
|
|
|
|
:param allow_stderr_error: whether a logged error is allowed.
|
2019-03-16 11:49:42 +01:00
|
|
|
"""
|
2019-08-11 06:02:44 +02:00
|
|
|
assert not (allow_stderr_error and not allow_stderr_warning)
|
2019-03-16 11:49:42 +01:00
|
|
|
|
|
|
|
lines = stderr.splitlines()
|
|
|
|
for line in lines:
|
2019-05-07 12:58:38 +02:00
|
|
|
# First check for logging errors, which we don't allow during
|
|
|
|
# tests even if allow_stderr_error=True (since a logging error
|
|
|
|
# would signal a bug in pip's code).
|
|
|
|
# Unlike errors logged with logger.error(), these errors are
|
|
|
|
# sent directly to stderr and so bypass any configured log formatter.
|
|
|
|
# The "--- Logging error ---" string is used in Python 3.4+, and
|
2019-03-30 11:11:16 +01:00
|
|
|
# "Logged from file " is used in Python 2.
|
2021-04-02 11:21:40 +02:00
|
|
|
if line.startswith("--- Logging error ---") or line.startswith(
|
|
|
|
"Logged from file "
|
|
|
|
):
|
|
|
|
reason = "stderr has a logging error, which is never allowed"
|
2019-03-30 11:11:16 +01:00
|
|
|
msg = make_check_stderr_message(stderr, line=line, reason=reason)
|
|
|
|
raise RuntimeError(msg)
|
2019-05-07 12:58:38 +02:00
|
|
|
if allow_stderr_error:
|
|
|
|
continue
|
2019-03-30 11:11:16 +01:00
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
if line.startswith("ERROR: "):
|
2019-03-30 11:11:16 +01:00
|
|
|
reason = (
|
2021-04-02 11:21:40 +02:00
|
|
|
"stderr has an unexpected error "
|
|
|
|
"(pass allow_stderr_error=True to permit this)"
|
2019-03-16 11:49:42 +01:00
|
|
|
)
|
2019-03-30 11:11:16 +01:00
|
|
|
msg = make_check_stderr_message(stderr, line=line, reason=reason)
|
|
|
|
raise RuntimeError(msg)
|
2019-03-18 19:38:48 +01:00
|
|
|
if allow_stderr_warning:
|
2019-03-16 11:49:42 +01:00
|
|
|
continue
|
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
if line.startswith("WARNING: ") or line.startswith(DEPRECATION_MSG_PREFIX):
|
2019-03-30 11:11:16 +01:00
|
|
|
reason = (
|
2021-04-02 11:21:40 +02:00
|
|
|
"stderr has an unexpected warning "
|
|
|
|
"(pass allow_stderr_warning=True to permit this)"
|
2019-03-16 11:49:42 +01:00
|
|
|
)
|
2019-03-30 11:11:16 +01:00
|
|
|
msg = make_check_stderr_message(stderr, line=line, reason=reason)
|
|
|
|
raise RuntimeError(msg)
|
2019-03-16 11:49:42 +01:00
|
|
|
|
|
|
|
|
2018-10-09 08:23:03 +02:00
|
|
|
class PipTestEnvironment(TestFileEnvironment):
|
2013-08-21 11:16:07 +02:00
|
|
|
"""
|
|
|
|
A specialized TestFileEnvironment for testing pip
|
|
|
|
"""
|
2010-05-02 20:11:45 +02:00
|
|
|
|
|
|
|
#
|
|
|
|
# Attribute naming convention
|
|
|
|
# ---------------------------
|
2010-06-03 04:25:26 +02:00
|
|
|
#
|
2010-05-02 20:11:45 +02:00
|
|
|
# Instances of this class have many attributes representing paths
|
|
|
|
# in the filesystem. To keep things straight, absolute paths have
|
|
|
|
# a name of the form xxxx_path and relative paths have a name that
|
|
|
|
# does not end in '_path'.
|
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
exe = sys.platform == "win32" and ".exe" or ""
|
2010-05-19 12:13:07 +02:00
|
|
|
verbose = False
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2021-01-01 19:55:24 +01:00
|
|
|
def __init__(self, base_path, *args, virtualenv, pip_expect_warning=None, **kwargs):
|
2013-08-21 11:16:07 +02:00
|
|
|
# Make our base_path a test.lib.path.Path object
|
|
|
|
base_path = Path(base_path)
|
2010-04-27 17:35:48 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Store paths related to the virtual environment
|
2021-01-01 19:55:24 +01:00
|
|
|
self.venv_path = virtualenv.location
|
|
|
|
self.lib_path = virtualenv.lib
|
|
|
|
self.site_packages_path = virtualenv.site
|
|
|
|
self.bin_path = virtualenv.bin
|
2010-04-28 17:41:55 +02:00
|
|
|
|
2019-07-02 07:00:32 +02:00
|
|
|
self.user_base_path = self.venv_path.joinpath("user")
|
|
|
|
self.user_site_path = self.venv_path.joinpath(
|
2013-08-21 11:16:07 +02:00
|
|
|
"user",
|
2021-04-02 11:21:40 +02:00
|
|
|
site.USER_SITE[len(site.USER_BASE) + 1 :],
|
2013-08-21 11:16:07 +02:00
|
|
|
)
|
2021-04-02 11:21:40 +02:00
|
|
|
if sys.platform == "win32":
|
2021-08-20 17:38:58 +02:00
|
|
|
scripts_base = Path(os.path.normpath(self.user_site_path.joinpath("..")))
|
2021-04-02 11:21:40 +02:00
|
|
|
self.user_bin_path = scripts_base.joinpath("Scripts")
|
2017-10-06 21:51:42 +02:00
|
|
|
else:
|
2019-07-02 07:00:32 +02:00
|
|
|
self.user_bin_path = self.user_base_path.joinpath(
|
2019-10-07 09:08:22 +02:00
|
|
|
os.path.relpath(self.bin_path, self.venv_path)
|
2017-10-06 21:51:42 +02:00
|
|
|
)
|
2013-04-12 08:13:01 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Create a Directory to use as a scratch pad
|
2019-08-17 03:34:17 +02:00
|
|
|
self.scratch_path = base_path.joinpath("scratch")
|
|
|
|
self.scratch_path.mkdir()
|
2010-04-17 23:49:29 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Set our default working directory
|
|
|
|
kwargs.setdefault("cwd", self.scratch_path)
|
2010-04-13 02:48:37 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Setup our environment
|
2020-07-23 10:01:37 +02:00
|
|
|
environ = kwargs.setdefault("environ", os.environ.copy())
|
2013-08-21 11:16:07 +02:00
|
|
|
environ["PATH"] = Path.pathsep.join(
|
|
|
|
[self.bin_path] + [environ.get("PATH", [])],
|
|
|
|
)
|
|
|
|
environ["PYTHONUSERBASE"] = self.user_base_path
|
2013-08-22 08:44:21 +02:00
|
|
|
# Writing bytecode can mess up updated file detection
|
|
|
|
environ["PYTHONDONTWRITEBYTECODE"] = "1"
|
2017-10-06 21:51:42 +02:00
|
|
|
# Make sure we get UTF-8 on output, even on Windows...
|
|
|
|
environ["PYTHONIOENCODING"] = "UTF-8"
|
2012-06-14 07:17:55 +02:00
|
|
|
|
2019-01-11 11:12:44 +01:00
|
|
|
# Whether all pip invocations should expect stderr
|
|
|
|
# (useful for Python version deprecation)
|
2021-01-01 19:55:24 +01:00
|
|
|
self.pip_expect_warning = pip_expect_warning
|
2019-01-11 11:12:44 +01:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Call the TestFileEnvironment __init__
|
2020-12-25 00:00:05 +01:00
|
|
|
super().__init__(base_path, *args, **kwargs)
|
2010-04-27 17:35:48 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# Expand our absolute path directories into relative
|
2021-04-02 11:21:40 +02:00
|
|
|
for name in [
|
|
|
|
"base",
|
|
|
|
"venv",
|
|
|
|
"bin",
|
|
|
|
"lib",
|
|
|
|
"site_packages",
|
|
|
|
"user_base",
|
|
|
|
"user_site",
|
|
|
|
"user_bin",
|
|
|
|
"scratch",
|
|
|
|
]:
|
2021-02-13 07:27:17 +01:00
|
|
|
real_name = f"{name}_path"
|
2021-04-02 11:21:40 +02:00
|
|
|
relative_path = Path(
|
|
|
|
os.path.relpath(getattr(self, real_name), self.base_path)
|
|
|
|
)
|
2019-10-07 09:08:22 +02:00
|
|
|
setattr(self, name, relative_path)
|
2013-04-12 08:13:01 +02:00
|
|
|
|
2014-04-28 16:38:39 +02:00
|
|
|
# Make sure temp_path is a Path object
|
|
|
|
self.temp_path = Path(self.temp_path)
|
2013-08-22 07:51:14 +02:00
|
|
|
# Ensure the tmp dir exists, things break horribly if it doesn't
|
2013-08-21 11:16:07 +02:00
|
|
|
self.temp_path.mkdir()
|
2012-07-13 00:59:44 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
# create easy-install.pth in user_site, so we always have it updated
|
|
|
|
# instead of created
|
2019-07-12 10:00:18 +02:00
|
|
|
self.user_site_path.mkdir(parents=True)
|
2019-07-02 07:00:32 +02:00
|
|
|
self.user_site_path.joinpath("easy-install.pth").touch()
|
2013-04-12 08:13:01 +02:00
|
|
|
|
2011-03-15 20:49:48 +01:00
|
|
|
def _ignore_file(self, fn):
|
2021-04-02 11:21:40 +02:00
|
|
|
if fn.endswith("__pycache__") or fn.endswith(".pyc"):
|
2011-03-15 20:49:48 +01:00
|
|
|
result = True
|
|
|
|
else:
|
2020-12-25 00:00:05 +01:00
|
|
|
result = super()._ignore_file(fn)
|
2011-03-15 20:49:48 +01:00
|
|
|
return result
|
|
|
|
|
2018-10-09 08:23:03 +02:00
|
|
|
def _find_traverse(self, path, result):
|
|
|
|
# Ignore symlinked directories to avoid duplicates in `run()`
|
|
|
|
# results because of venv `lib64 -> lib/` symlink on Linux.
|
|
|
|
full = os.path.join(self.base_path, path)
|
|
|
|
if os.path.isdir(full) and os.path.islink(full):
|
2021-04-02 11:21:40 +02:00
|
|
|
if not self.temp_path or path != "tmp":
|
2018-10-09 08:23:03 +02:00
|
|
|
result[path] = FoundDir(self.base_path, path)
|
|
|
|
else:
|
2020-12-25 00:00:05 +01:00
|
|
|
super()._find_traverse(path, result)
|
2018-10-09 08:23:03 +02:00
|
|
|
|
2021-01-01 19:55:24 +01:00
|
|
|
def run(
|
|
|
|
self,
|
|
|
|
*args,
|
|
|
|
cwd=None,
|
|
|
|
allow_stderr_error=None,
|
|
|
|
allow_stderr_warning=None,
|
|
|
|
allow_error=None,
|
|
|
|
**kw,
|
|
|
|
):
|
2019-03-16 11:49:42 +01:00
|
|
|
"""
|
2019-03-18 19:38:48 +01:00
|
|
|
:param allow_stderr_error: whether a logged error is allowed in
|
2019-03-16 11:49:42 +01:00
|
|
|
stderr. Passing True for this argument implies
|
2019-03-18 19:38:48 +01:00
|
|
|
`allow_stderr_warning` since warnings are weaker than errors.
|
2019-08-11 06:02:44 +02:00
|
|
|
:param allow_stderr_warning: whether a logged warning (or
|
|
|
|
deprecation message) is allowed in stderr.
|
2020-04-29 05:49:18 +02:00
|
|
|
:param allow_error: if True (default is False) does not raise
|
|
|
|
exception when the command exit value is non-zero. Implies
|
|
|
|
expect_error, but in contrast to expect_error will not assert
|
|
|
|
that the exit value is zero.
|
2019-08-11 06:02:44 +02:00
|
|
|
:param expect_error: if False (the default), asserts that the command
|
|
|
|
exits with 0. Otherwise, asserts that the command exits with a
|
|
|
|
non-zero exit code. Passing True also implies allow_stderr_error
|
|
|
|
and allow_stderr_warning.
|
2019-03-20 20:48:40 +01:00
|
|
|
:param expect_stderr: whether to allow warnings in stderr (equivalent
|
|
|
|
to `allow_stderr_warning`). This argument is an abbreviated
|
|
|
|
version of `allow_stderr_warning` and is also kept for backwards
|
2019-03-16 11:49:42 +01:00
|
|
|
compatibility.
|
|
|
|
"""
|
2010-05-19 12:13:07 +02:00
|
|
|
if self.verbose:
|
2021-04-02 11:21:40 +02:00
|
|
|
print(f">> running {args} {kw}")
|
2019-08-11 06:02:44 +02:00
|
|
|
|
2021-08-27 03:50:50 +02:00
|
|
|
cwd = cwd or self.cwd
|
2021-04-02 11:21:40 +02:00
|
|
|
if sys.platform == "win32":
|
2017-10-06 21:51:42 +02:00
|
|
|
# Partial fix for ScriptTest.run using `shell=True` on Windows.
|
2021-04-02 11:21:40 +02:00
|
|
|
args = [str(a).replace("^", "^^").replace("&", "^&") for a in args]
|
2019-03-16 11:49:42 +01:00
|
|
|
|
2020-04-29 05:49:18 +02:00
|
|
|
if allow_error:
|
2021-04-02 11:21:40 +02:00
|
|
|
kw["expect_error"] = True
|
2019-03-16 11:49:42 +01:00
|
|
|
|
2019-08-11 06:02:44 +02:00
|
|
|
# Propagate default values.
|
2021-04-02 11:21:40 +02:00
|
|
|
expect_error = kw.get("expect_error")
|
2019-08-11 06:02:44 +02:00
|
|
|
if expect_error:
|
2019-03-16 11:49:42 +01:00
|
|
|
# Then default to allowing logged errors.
|
2019-03-21 13:22:07 +01:00
|
|
|
if allow_stderr_error is not None and not allow_stderr_error:
|
|
|
|
raise RuntimeError(
|
2021-07-23 21:55:14 +02:00
|
|
|
"cannot pass allow_stderr_error=False with expect_error=True"
|
2019-03-21 13:22:07 +01:00
|
|
|
)
|
2019-03-18 19:38:48 +01:00
|
|
|
allow_stderr_error = True
|
2019-08-11 06:02:44 +02:00
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
elif kw.get("expect_stderr"):
|
2019-03-20 20:48:40 +01:00
|
|
|
# Then default to allowing logged warnings.
|
2019-03-21 13:22:07 +01:00
|
|
|
if allow_stderr_warning is not None and not allow_stderr_warning:
|
|
|
|
raise RuntimeError(
|
2021-07-23 21:55:14 +02:00
|
|
|
"cannot pass allow_stderr_warning=False with expect_stderr=True"
|
2019-03-21 13:22:07 +01:00
|
|
|
)
|
2019-03-20 20:48:40 +01:00
|
|
|
allow_stderr_warning = True
|
2019-03-16 11:49:42 +01:00
|
|
|
|
2019-08-11 06:02:44 +02:00
|
|
|
if allow_stderr_error:
|
|
|
|
if allow_stderr_warning is not None and not allow_stderr_warning:
|
|
|
|
raise RuntimeError(
|
2021-04-02 11:21:40 +02:00
|
|
|
"cannot pass allow_stderr_warning=False with "
|
|
|
|
"allow_stderr_error=True"
|
2019-08-11 06:02:44 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Default values if not set.
|
|
|
|
if allow_stderr_error is None:
|
|
|
|
allow_stderr_error = False
|
|
|
|
if allow_stderr_warning is None:
|
|
|
|
allow_stderr_warning = allow_stderr_error
|
|
|
|
|
2019-03-16 11:49:42 +01:00
|
|
|
# Pass expect_stderr=True to allow any stderr. We do this because
|
|
|
|
# we do our checking of stderr further on in check_stderr().
|
2021-04-02 11:21:40 +02:00
|
|
|
kw["expect_stderr"] = True
|
2020-12-25 00:00:05 +01:00
|
|
|
result = super().run(cwd=cwd, *args, **kw)
|
2019-03-16 11:49:42 +01:00
|
|
|
|
2020-04-29 05:49:18 +02:00
|
|
|
if expect_error and not allow_error:
|
2019-08-11 06:02:44 +02:00
|
|
|
if result.returncode == 0:
|
|
|
|
__tracebackhide__ = True
|
|
|
|
raise AssertionError("Script passed unexpectedly.")
|
|
|
|
|
|
|
|
_check_stderr(
|
2021-04-02 11:21:40 +02:00
|
|
|
result.stderr,
|
|
|
|
allow_stderr_error=allow_stderr_error,
|
2019-03-18 19:38:48 +01:00
|
|
|
allow_stderr_warning=allow_stderr_warning,
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2010-04-28 22:55:10 +02:00
|
|
|
|
2019-03-16 11:49:42 +01:00
|
|
|
return TestPipResult(result, verbose=self.verbose)
|
|
|
|
|
2021-01-01 19:55:24 +01:00
|
|
|
def pip(self, *args, use_module=True, **kwargs):
|
2019-08-11 06:02:44 +02:00
|
|
|
__tracebackhide__ = True
|
2019-03-20 14:35:28 +01:00
|
|
|
if self.pip_expect_warning:
|
2021-04-02 11:21:40 +02:00
|
|
|
kwargs["allow_stderr_warning"] = True
|
2021-01-01 19:55:24 +01:00
|
|
|
if use_module:
|
2021-04-02 11:21:40 +02:00
|
|
|
exe = "python"
|
|
|
|
args = ("-m", "pip") + args
|
2017-10-06 21:51:42 +02:00
|
|
|
else:
|
2021-04-02 11:21:40 +02:00
|
|
|
exe = "pip"
|
2017-10-06 21:51:42 +02:00
|
|
|
return self.run(exe, *args, **kwargs)
|
2010-04-15 13:01:36 +02:00
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
def pip_install_local(self, *args, **kwargs):
|
2013-08-23 13:09:53 +02:00
|
|
|
return self.pip(
|
2021-04-02 11:21:40 +02:00
|
|
|
"install",
|
|
|
|
"--no-index",
|
|
|
|
"--find-links",
|
|
|
|
path_to_url(os.path.join(DATA_DIR, "packages")),
|
|
|
|
*args,
|
|
|
|
**kwargs,
|
2013-08-21 11:16:07 +02:00
|
|
|
)
|
2008-10-16 00:02:57 +02:00
|
|
|
|
2018-10-08 18:09:53 +02:00
|
|
|
def easy_install(self, *args, **kwargs):
|
2021-04-02 11:21:40 +02:00
|
|
|
args = ("-m", "easy_install") + args
|
|
|
|
return self.run("python", *args, **kwargs)
|
2018-10-08 18:09:53 +02:00
|
|
|
|
2021-07-10 02:38:41 +02:00
|
|
|
def assert_installed(self, **kwargs):
|
|
|
|
ret = self.pip("list", "--format=json")
|
|
|
|
installed = set(
|
|
|
|
(canonicalize_name(val["name"]), val["version"])
|
|
|
|
for val in json.loads(ret.stdout)
|
|
|
|
)
|
|
|
|
expected = set((canonicalize_name(k), v) for k, v in kwargs.items())
|
|
|
|
assert expected <= installed, "{!r} not all in {!r}".format(expected, installed)
|
|
|
|
|
|
|
|
def assert_not_installed(self, *args):
|
|
|
|
ret = self.pip("list", "--format=json")
|
|
|
|
installed = set(
|
|
|
|
canonicalize_name(val["name"]) for val in json.loads(ret.stdout)
|
|
|
|
)
|
|
|
|
# None of the given names should be listed as installed, i.e. their
|
|
|
|
# intersection should be empty.
|
|
|
|
expected = set(canonicalize_name(k) for k in args)
|
|
|
|
assert not (expected & installed), "{!r} contained in {!r}".format(
|
|
|
|
expected, installed
|
|
|
|
)
|
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2009-04-01 00:17:08 +02:00
|
|
|
# FIXME ScriptTest does something similar, but only within a single
|
|
|
|
# ProcResult; this generalizes it so states can be compared across
|
|
|
|
# multiple commands. Maybe should be rolled into ScriptTest?
|
2009-04-06 19:59:20 +02:00
|
|
|
def diff_states(start, end, ignore=None):
|
2009-04-01 00:17:08 +02:00
|
|
|
"""
|
|
|
|
Differences two "filesystem states" as represented by dictionaries
|
|
|
|
of FoundFile and FoundDir objects.
|
|
|
|
|
|
|
|
Returns a dictionary with following keys:
|
|
|
|
|
|
|
|
``deleted``
|
|
|
|
Dictionary of files/directories found only in the start state.
|
|
|
|
|
|
|
|
``created``
|
|
|
|
Dictionary of files/directories found only in the end state.
|
|
|
|
|
|
|
|
``updated``
|
|
|
|
Dictionary of files whose size has changed (FIXME not entirely
|
|
|
|
reliable, but comparing contents is not possible because
|
|
|
|
FoundFile.bytes is lazy, and comparing mtime doesn't help if
|
|
|
|
we want to know if a file has been returned to its earlier
|
|
|
|
state).
|
|
|
|
|
|
|
|
Ignores mtime and other file attributes; only presence/absence and
|
|
|
|
size are considered.
|
2010-04-22 08:37:50 +02:00
|
|
|
|
2009-04-01 00:17:08 +02:00
|
|
|
"""
|
2009-04-06 19:59:20 +02:00
|
|
|
ignore = ignore or []
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-05-26 05:44:47 +02:00
|
|
|
def prefix_match(path, prefix):
|
2010-06-03 04:25:26 +02:00
|
|
|
if path == prefix:
|
2010-05-26 05:44:47 +02:00
|
|
|
return True
|
2010-05-26 06:02:15 +02:00
|
|
|
prefix = prefix.rstrip(os.path.sep) + os.path.sep
|
|
|
|
return path.startswith(prefix)
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
start_keys = {
|
|
|
|
k for k in start.keys() if not any([prefix_match(k, i) for i in ignore])
|
|
|
|
}
|
|
|
|
end_keys = {k for k in end.keys() if not any([prefix_match(k, i) for i in ignore])}
|
2017-12-15 06:56:04 +01:00
|
|
|
deleted = {k: start[k] for k in start_keys.difference(end_keys)}
|
|
|
|
created = {k: end[k] for k in end_keys.difference(start_keys)}
|
2009-04-01 00:17:08 +02:00
|
|
|
updated = {}
|
2009-04-06 19:59:20 +02:00
|
|
|
for k in start_keys.intersection(end_keys):
|
2021-04-02 11:21:40 +02:00
|
|
|
if start[k].size != end[k].size:
|
2009-04-01 00:17:08 +02:00
|
|
|
updated[k] = end[k]
|
|
|
|
return dict(deleted=deleted, created=created, updated=updated)
|
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
|
|
|
|
def assert_all_changes(start_state, end_state, expected_changes):
|
2010-05-03 04:08:03 +02:00
|
|
|
"""
|
2010-05-03 06:39:04 +02:00
|
|
|
Fails if anything changed that isn't listed in the
|
2010-06-03 04:25:26 +02:00
|
|
|
expected_changes.
|
2010-05-03 06:39:04 +02:00
|
|
|
|
|
|
|
start_state is either a dict mapping paths to
|
|
|
|
scripttest.[FoundFile|FoundDir] objects or a TestPipResult whose
|
|
|
|
files_before we'll test. end_state is either a similar dict or a
|
|
|
|
TestPipResult whose files_after we'll test.
|
|
|
|
|
|
|
|
Note: listing a directory means anything below
|
|
|
|
that directory can be expected to have changed.
|
2010-05-03 04:08:03 +02:00
|
|
|
"""
|
2013-08-22 12:20:25 +02:00
|
|
|
__tracebackhide__ = True
|
|
|
|
|
2010-05-03 06:39:04 +02:00
|
|
|
start_files = start_state
|
|
|
|
end_files = end_state
|
|
|
|
if isinstance(start_state, TestPipResult):
|
|
|
|
start_files = start_state.files_before
|
|
|
|
if isinstance(end_state, TestPipResult):
|
|
|
|
end_files = end_state.files_after
|
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
diff = diff_states(start_files, end_files, ignore=expected_changes)
|
2011-03-15 20:49:48 +01:00
|
|
|
if list(diff.values()) != [{}, {}, {}]:
|
2021-04-02 11:21:40 +02:00
|
|
|
raise TestFailure(
|
|
|
|
"Unexpected changes:\n"
|
|
|
|
+ "\n".join([k + ": " + ", ".join(v.keys()) for k, v in diff.items()])
|
|
|
|
)
|
2010-05-03 04:08:03 +02:00
|
|
|
|
|
|
|
# Don't throw away this potentially useful information
|
|
|
|
return diff
|
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
|
2018-10-23 02:38:34 +02:00
|
|
|
def _create_main_file(dir_path, name=None, output=None):
|
|
|
|
"""
|
|
|
|
Create a module with a main() function that prints the given output.
|
|
|
|
"""
|
|
|
|
if name is None:
|
2021-04-02 11:21:40 +02:00
|
|
|
name = "version_pkg"
|
2018-10-23 02:38:34 +02:00
|
|
|
if output is None:
|
2021-04-02 11:21:40 +02:00
|
|
|
output = "0.1"
|
|
|
|
text = textwrap.dedent(
|
2021-04-02 12:03:39 +02:00
|
|
|
f"""
|
|
|
|
def main():
|
|
|
|
print({output!r})
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
)
|
|
|
|
filename = f"{name}.py"
|
2019-07-02 07:00:32 +02:00
|
|
|
dir_path.joinpath(filename).write_text(text)
|
2018-10-23 02:38:34 +02:00
|
|
|
|
|
|
|
|
2020-01-04 01:19:18 +01:00
|
|
|
def _git_commit(
|
2020-01-04 01:21:36 +01:00
|
|
|
env_or_script,
|
|
|
|
repo_dir,
|
|
|
|
message=None,
|
|
|
|
allow_empty=False,
|
|
|
|
stage_modified=False,
|
2020-01-04 01:19:18 +01:00
|
|
|
):
|
2018-11-12 09:13:00 +01:00
|
|
|
"""
|
|
|
|
Run git-commit.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
env_or_script: pytest's `script` or `env` argument.
|
|
|
|
repo_dir: a path to a Git repository.
|
|
|
|
message: an optional commit message.
|
|
|
|
"""
|
|
|
|
if message is None:
|
2021-04-02 11:21:40 +02:00
|
|
|
message = "test commit"
|
2020-01-04 01:22:31 +01:00
|
|
|
|
|
|
|
args = []
|
2018-11-12 09:13:00 +01:00
|
|
|
|
2020-01-04 01:19:18 +01:00
|
|
|
if allow_empty:
|
|
|
|
args.append("--allow-empty")
|
|
|
|
|
2020-01-04 01:21:36 +01:00
|
|
|
if stage_modified:
|
|
|
|
args.append("--all")
|
|
|
|
|
2018-11-12 09:13:00 +01:00
|
|
|
new_args = [
|
2021-04-02 11:21:40 +02:00
|
|
|
"git",
|
|
|
|
"commit",
|
|
|
|
"-q",
|
|
|
|
"--author",
|
|
|
|
"pip <distutils-sig@python.org>",
|
2018-11-12 09:13:00 +01:00
|
|
|
]
|
|
|
|
new_args.extend(args)
|
2021-04-02 11:21:40 +02:00
|
|
|
new_args.extend(["-m", message])
|
2020-01-04 01:13:08 +01:00
|
|
|
env_or_script.run(*new_args, cwd=repo_dir)
|
2018-11-12 09:13:00 +01:00
|
|
|
|
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
def _vcs_add(script, version_pkg_path, vcs="git"):
|
|
|
|
if vcs == "git":
|
|
|
|
script.run("git", "init", cwd=version_pkg_path)
|
|
|
|
script.run("git", "add", ".", cwd=version_pkg_path)
|
|
|
|
_git_commit(script, version_pkg_path, message="initial version")
|
|
|
|
elif vcs == "hg":
|
|
|
|
script.run("hg", "init", cwd=version_pkg_path)
|
|
|
|
script.run("hg", "add", ".", cwd=version_pkg_path)
|
2018-11-12 07:20:37 +01:00
|
|
|
script.run(
|
2021-04-02 11:21:40 +02:00
|
|
|
"hg",
|
|
|
|
"commit",
|
|
|
|
"-q",
|
|
|
|
"--user",
|
|
|
|
"pip <distutils-sig@python.org>",
|
|
|
|
"-m",
|
|
|
|
"initial version",
|
|
|
|
cwd=version_pkg_path,
|
2018-11-12 07:20:37 +01:00
|
|
|
)
|
2021-04-02 11:21:40 +02:00
|
|
|
elif vcs == "svn":
|
2018-11-12 07:20:37 +01:00
|
|
|
repo_url = _create_svn_repo(script, version_pkg_path)
|
|
|
|
script.run(
|
2021-04-02 11:21:40 +02:00
|
|
|
"svn", "checkout", repo_url, "pip-test-package", cwd=script.scratch_path
|
2018-11-12 07:20:37 +01:00
|
|
|
)
|
2021-04-02 11:21:40 +02:00
|
|
|
checkout_path = script.scratch_path / "pip-test-package"
|
2018-11-12 07:20:37 +01:00
|
|
|
|
|
|
|
# svn internally stores windows drives as uppercase; we'll match that.
|
2021-04-02 11:21:40 +02:00
|
|
|
checkout_path = checkout_path.replace("c:", "C:")
|
2018-11-12 07:20:37 +01:00
|
|
|
|
|
|
|
version_pkg_path = checkout_path
|
2021-04-02 11:21:40 +02:00
|
|
|
elif vcs == "bazaar":
|
|
|
|
script.run("bzr", "init", cwd=version_pkg_path)
|
|
|
|
script.run("bzr", "add", ".", cwd=version_pkg_path)
|
2018-11-12 07:20:37 +01:00
|
|
|
script.run(
|
2021-04-02 11:21:40 +02:00
|
|
|
"bzr", "whoami", "pip <distutils-sig@python.org>", cwd=version_pkg_path
|
|
|
|
)
|
2018-11-12 07:20:37 +01:00
|
|
|
script.run(
|
2021-04-02 11:21:40 +02:00
|
|
|
"bzr",
|
|
|
|
"commit",
|
|
|
|
"-q",
|
|
|
|
"--author",
|
|
|
|
"pip <distutils-sig@python.org>",
|
|
|
|
"-m",
|
|
|
|
"initial version",
|
|
|
|
cwd=version_pkg_path,
|
2018-11-12 07:20:37 +01:00
|
|
|
)
|
|
|
|
else:
|
2021-04-02 11:21:40 +02:00
|
|
|
raise ValueError(f"Unknown vcs: {vcs}")
|
2018-11-12 07:20:37 +01:00
|
|
|
return version_pkg_path
|
|
|
|
|
|
|
|
|
2013-09-27 20:42:05 +02:00
|
|
|
def _create_test_package_with_subdirectory(script, subdirectory):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("version_pkg").mkdir()
|
2021-04-02 11:21:40 +02:00
|
|
|
version_pkg_path = script.scratch_path / "version_pkg"
|
2018-10-23 02:38:34 +02:00
|
|
|
_create_main_file(version_pkg_path, name="version_pkg", output="0.1")
|
2019-07-02 07:00:32 +02:00
|
|
|
version_pkg_path.joinpath("setup.py").write_text(
|
2021-04-02 11:21:40 +02:00
|
|
|
textwrap.dedent(
|
|
|
|
"""
|
2021-04-02 12:03:39 +02:00
|
|
|
from setuptools import setup, find_packages
|
|
|
|
|
|
|
|
setup(
|
|
|
|
name="version_pkg",
|
|
|
|
version="0.1",
|
|
|
|
packages=find_packages(),
|
|
|
|
py_modules=["version_pkg"],
|
|
|
|
entry_points=dict(console_scripts=["version_pkg=version_pkg:main"]),
|
|
|
|
)
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
)
|
|
|
|
)
|
2013-07-24 17:25:35 +02:00
|
|
|
|
2019-07-02 07:00:32 +02:00
|
|
|
subdirectory_path = version_pkg_path.joinpath(subdirectory)
|
2013-09-27 20:42:05 +02:00
|
|
|
subdirectory_path.mkdir()
|
2018-10-23 02:38:34 +02:00
|
|
|
_create_main_file(subdirectory_path, name="version_subpkg", output="0.1")
|
2013-07-24 17:25:35 +02:00
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
subdirectory_path.joinpath("setup.py").write_text(
|
|
|
|
textwrap.dedent(
|
|
|
|
"""
|
2021-04-02 12:03:39 +02:00
|
|
|
from setuptools import find_packages, setup
|
|
|
|
|
|
|
|
setup(
|
|
|
|
name="version_subpkg",
|
|
|
|
version="0.1",
|
|
|
|
packages=find_packages(),
|
|
|
|
py_modules=["version_subpkg"],
|
|
|
|
entry_points=dict(console_scripts=["version_pkg=version_subpkg:main"]),
|
|
|
|
)
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
)
|
|
|
|
)
|
2013-07-24 17:25:35 +02:00
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
script.run("git", "init", cwd=version_pkg_path)
|
|
|
|
script.run("git", "add", ".", cwd=version_pkg_path)
|
|
|
|
_git_commit(script, version_pkg_path, message="initial version")
|
2013-07-24 17:25:35 +02:00
|
|
|
|
|
|
|
return version_pkg_path
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
def _create_test_package_with_srcdir(script, name="version_pkg", vcs="git"):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath(name).mkdir()
|
2015-11-22 13:00:14 +01:00
|
|
|
version_pkg_path = script.scratch_path / name
|
2021-04-02 11:21:40 +02:00
|
|
|
subdir_path = version_pkg_path.joinpath("subdir")
|
2015-11-22 13:00:14 +01:00
|
|
|
subdir_path.mkdir()
|
2021-04-02 11:21:40 +02:00
|
|
|
src_path = subdir_path.joinpath("src")
|
2015-11-22 13:00:14 +01:00
|
|
|
src_path.mkdir()
|
2021-04-02 11:21:40 +02:00
|
|
|
pkg_path = src_path.joinpath("pkg")
|
2015-11-22 13:00:14 +01:00
|
|
|
pkg_path.mkdir()
|
2021-04-02 11:21:40 +02:00
|
|
|
pkg_path.joinpath("__init__.py").write_text("")
|
|
|
|
subdir_path.joinpath("setup.py").write_text(
|
|
|
|
textwrap.dedent(
|
|
|
|
"""
|
2021-04-02 12:03:39 +02:00
|
|
|
from setuptools import setup, find_packages
|
|
|
|
setup(
|
|
|
|
name="{name}",
|
|
|
|
version="0.1",
|
|
|
|
packages=find_packages(),
|
|
|
|
package_dir={{"": "src"}},
|
|
|
|
)
|
|
|
|
""".format(
|
2021-04-02 11:21:40 +02:00
|
|
|
name=name
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2015-11-22 13:00:14 +01:00
|
|
|
return _vcs_add(script, version_pkg_path, vcs)
|
|
|
|
|
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
def _create_test_package(script, name="version_pkg", vcs="git"):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath(name).mkdir()
|
2015-03-19 11:54:09 +01:00
|
|
|
version_pkg_path = script.scratch_path / name
|
2021-04-02 11:21:40 +02:00
|
|
|
_create_main_file(version_pkg_path, name=name, output="0.1")
|
|
|
|
version_pkg_path.joinpath("setup.py").write_text(
|
|
|
|
textwrap.dedent(
|
|
|
|
"""
|
2021-04-02 12:03:39 +02:00
|
|
|
from setuptools import setup, find_packages
|
|
|
|
setup(
|
|
|
|
name="{name}",
|
|
|
|
version="0.1",
|
|
|
|
packages=find_packages(),
|
|
|
|
py_modules=["{name}"],
|
|
|
|
entry_points=dict(console_scripts=["{name}={name}:main"]),
|
|
|
|
)
|
|
|
|
""".format(
|
2021-04-02 11:21:40 +02:00
|
|
|
name=name
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2015-11-22 12:10:00 +01:00
|
|
|
return _vcs_add(script, version_pkg_path, vcs)
|
|
|
|
|
|
|
|
|
2015-03-14 20:47:55 +01:00
|
|
|
def _create_svn_repo(script, version_pkg_path):
|
2021-04-02 11:21:40 +02:00
|
|
|
repo_url = path_to_url(script.scratch_path / "pip-test-package-repo" / "trunk")
|
|
|
|
script.run("svnadmin", "create", "pip-test-package-repo", cwd=script.scratch_path)
|
2015-03-14 20:47:55 +01:00
|
|
|
script.run(
|
2021-04-02 11:21:40 +02:00
|
|
|
"svn",
|
|
|
|
"import",
|
|
|
|
version_pkg_path,
|
|
|
|
repo_url,
|
|
|
|
"-m",
|
|
|
|
"Initial import of pip-test-package",
|
|
|
|
cwd=script.scratch_path,
|
2015-03-14 20:47:55 +01:00
|
|
|
)
|
|
|
|
return repo_url
|
|
|
|
|
|
|
|
|
2013-08-21 11:16:07 +02:00
|
|
|
def _change_test_package_version(script, version_pkg_path):
|
2018-10-23 02:38:34 +02:00
|
|
|
_create_main_file(
|
2021-04-02 11:21:40 +02:00
|
|
|
version_pkg_path, name="version_pkg", output="some different version"
|
2018-10-23 02:38:34 +02:00
|
|
|
)
|
2018-10-25 03:26:57 +02:00
|
|
|
# Pass -a to stage the change to the main file.
|
2021-04-02 11:21:40 +02:00
|
|
|
_git_commit(script, version_pkg_path, message="messed version", stage_modified=True)
|
2010-08-05 16:08:25 +02:00
|
|
|
|
2010-06-03 04:25:26 +02:00
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
@contextmanager
|
|
|
|
def requirements_file(contents, tmpdir):
|
|
|
|
"""Return a Path to a requirements file of given contents.
|
|
|
|
|
|
|
|
As long as the context manager is open, the requirements file will exist.
|
|
|
|
|
|
|
|
:param tmpdir: A Path to the folder in which to create the file
|
|
|
|
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
path = tmpdir / "reqs.txt"
|
2019-07-02 07:00:32 +02:00
|
|
|
path.write_text(contents)
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
yield path
|
2019-07-02 07:00:32 +02:00
|
|
|
path.unlink()
|
2016-11-04 15:15:47 +01:00
|
|
|
|
|
|
|
|
|
|
|
def create_test_package_with_setup(script, **setup_kwargs):
|
2021-04-02 11:21:40 +02:00
|
|
|
assert "name" in setup_kwargs, setup_kwargs
|
|
|
|
pkg_path = script.scratch_path / setup_kwargs["name"]
|
2016-11-04 15:15:47 +01:00
|
|
|
pkg_path.mkdir()
|
2021-04-02 11:21:40 +02:00
|
|
|
pkg_path.joinpath("setup.py").write_text(
|
|
|
|
textwrap.dedent(
|
|
|
|
f"""
|
2021-04-02 12:03:39 +02:00
|
|
|
from setuptools import setup
|
|
|
|
kwargs = {setup_kwargs!r}
|
|
|
|
setup(**kwargs)
|
|
|
|
"""
|
2021-04-02 11:21:40 +02:00
|
|
|
)
|
|
|
|
)
|
2016-11-04 15:15:47 +01:00
|
|
|
return pkg_path
|
2017-08-07 18:03:43 +02:00
|
|
|
|
|
|
|
|
2021-08-08 02:14:17 +02:00
|
|
|
def urlsafe_b64encode_nopad(data: bytes) -> str:
|
2020-01-02 02:29:10 +01:00
|
|
|
return urlsafe_b64encode(data).rstrip(b"=").decode("ascii")
|
|
|
|
|
|
|
|
|
2021-08-08 02:14:17 +02:00
|
|
|
def create_really_basic_wheel(name: str, version: str) -> bytes:
|
2020-01-02 02:29:10 +01:00
|
|
|
def digest(contents):
|
2021-04-02 11:21:40 +02:00
|
|
|
return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest()))
|
2020-01-02 02:29:10 +01:00
|
|
|
|
|
|
|
def add_file(path, text):
|
|
|
|
contents = text.encode("utf-8")
|
|
|
|
z.writestr(path, contents)
|
|
|
|
records.append((path, digest(contents), str(len(contents))))
|
|
|
|
|
2020-12-23 20:25:12 +01:00
|
|
|
dist_info = f"{name}-{version}.dist-info"
|
|
|
|
record_path = f"{dist_info}/RECORD"
|
2020-01-02 02:29:10 +01:00
|
|
|
records = [(record_path, "", "")]
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as z:
|
2020-12-23 20:25:12 +01:00
|
|
|
add_file(f"{dist_info}/WHEEL", "Wheel-Version: 1.0")
|
2020-01-02 02:29:10 +01:00
|
|
|
add_file(
|
2020-12-23 20:25:12 +01:00
|
|
|
f"{dist_info}/METADATA",
|
2020-01-02 02:29:10 +01:00
|
|
|
dedent(
|
|
|
|
"""\
|
|
|
|
Metadata-Version: 2.1
|
|
|
|
Name: {}
|
|
|
|
Version: {}
|
2021-04-02 11:21:40 +02:00
|
|
|
""".format(
|
|
|
|
name, version
|
|
|
|
)
|
2020-01-02 02:29:10 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
z.writestr(record_path, "\n".join(",".join(r) for r in records))
|
|
|
|
buf.seek(0)
|
|
|
|
return buf.read()
|
|
|
|
|
|
|
|
|
2019-12-21 04:38:12 +01:00
|
|
|
def create_basic_wheel_for_package(
|
2020-04-02 12:38:41 +02:00
|
|
|
script,
|
|
|
|
name,
|
|
|
|
version,
|
|
|
|
depends=None,
|
|
|
|
extras=None,
|
|
|
|
requires_python=None,
|
|
|
|
extra_files=None,
|
2019-12-21 04:38:12 +01:00
|
|
|
):
|
2018-10-25 17:35:25 +02:00
|
|
|
if depends is None:
|
|
|
|
depends = []
|
|
|
|
if extras is None:
|
|
|
|
extras = {}
|
2020-03-29 21:20:00 +02:00
|
|
|
if extra_files is None:
|
|
|
|
extra_files = {}
|
2017-08-07 18:03:43 +02:00
|
|
|
|
2020-04-16 21:34:50 +02:00
|
|
|
# Fix wheel distribution name by replacing runs of non-alphanumeric
|
|
|
|
# characters with an underscore _ as per PEP 491
|
|
|
|
name = re.sub(r"[^\w\d.]+", "_", name, re.UNICODE)
|
2020-12-23 20:25:12 +01:00
|
|
|
archive_name = f"{name}-{version}-py2.py3-none-any.whl"
|
2020-03-29 21:20:00 +02:00
|
|
|
archive_path = script.scratch_path / archive_name
|
|
|
|
|
2020-12-23 20:25:12 +01:00
|
|
|
package_init_py = f"{name}/__init__.py"
|
2020-04-01 19:25:05 +02:00
|
|
|
assert package_init_py not in extra_files
|
|
|
|
extra_files[package_init_py] = textwrap.dedent(
|
|
|
|
"""
|
|
|
|
__version__ = {version!r}
|
|
|
|
def hello():
|
|
|
|
return "Hello From {name}"
|
|
|
|
""",
|
|
|
|
).format(version=version, name=name)
|
|
|
|
|
2020-03-29 21:20:00 +02:00
|
|
|
requires_dist = depends + [
|
2020-12-23 20:25:12 +01:00
|
|
|
f'{package}; extra == "{extra}"'
|
2020-03-29 21:20:00 +02:00
|
|
|
for extra, packages in extras.items()
|
|
|
|
for package in packages
|
|
|
|
]
|
|
|
|
|
2020-04-02 12:38:41 +02:00
|
|
|
metadata_updates = {
|
|
|
|
"Provides-Extra": list(extras),
|
|
|
|
"Requires-Dist": requires_dist,
|
|
|
|
}
|
|
|
|
if requires_python is not None:
|
|
|
|
metadata_updates["Requires-Python"] = requires_python
|
|
|
|
|
2020-03-29 21:20:00 +02:00
|
|
|
wheel_builder = make_wheel(
|
|
|
|
name=name,
|
|
|
|
version=version,
|
|
|
|
wheel_metadata_updates={"Tag": ["py2-none-any", "py3-none-any"]},
|
2020-04-02 12:38:41 +02:00
|
|
|
metadata_updates=metadata_updates,
|
2020-03-29 21:20:00 +02:00
|
|
|
extra_metadata_files={"top_level.txt": name},
|
2020-04-01 19:25:05 +02:00
|
|
|
extra_files=extra_files,
|
2020-03-29 21:20:00 +02:00
|
|
|
# Have an empty RECORD because we don't want to be checking hashes.
|
|
|
|
record="",
|
2020-01-09 08:31:31 +01:00
|
|
|
)
|
2020-04-01 19:25:05 +02:00
|
|
|
wheel_builder.save_to(archive_path)
|
2017-08-07 18:03:43 +02:00
|
|
|
|
2020-03-29 21:20:00 +02:00
|
|
|
return archive_path
|
2017-10-06 21:51:42 +02:00
|
|
|
|
|
|
|
|
2021-04-02 11:21:40 +02:00
|
|
|
def create_basic_sdist_for_package(script, name, version, extra_files=None):
|
2020-04-03 11:51:22 +02:00
|
|
|
files = {
|
|
|
|
"setup.py": """
|
|
|
|
from setuptools import find_packages, setup
|
|
|
|
setup(name={name!r}, version={version!r})
|
|
|
|
""",
|
|
|
|
}
|
|
|
|
|
|
|
|
# Some useful shorthands
|
2021-04-02 11:21:40 +02:00
|
|
|
archive_name = "{name}-{version}.tar.gz".format(name=name, version=version)
|
2020-04-03 11:51:22 +02:00
|
|
|
|
|
|
|
# Replace key-values with formatted values
|
|
|
|
for key, value in list(files.items()):
|
|
|
|
del files[key]
|
|
|
|
key = key.format(name=name)
|
2021-04-02 11:21:40 +02:00
|
|
|
files[key] = textwrap.dedent(value).format(name=name, version=version).strip()
|
2020-04-03 11:51:22 +02:00
|
|
|
|
|
|
|
# Add new files after formatting
|
|
|
|
if extra_files:
|
|
|
|
files.update(extra_files)
|
|
|
|
|
|
|
|
for fname in files:
|
|
|
|
path = script.temp_path / fname
|
|
|
|
path.parent.mkdir(exist_ok=True, parents=True)
|
2020-12-26 17:31:00 +01:00
|
|
|
path.write_bytes(files[fname].encode("utf-8"))
|
2020-04-03 11:51:22 +02:00
|
|
|
|
|
|
|
retval = script.scratch_path / archive_name
|
|
|
|
generated = shutil.make_archive(
|
|
|
|
retval,
|
2021-04-02 11:21:40 +02:00
|
|
|
"gztar",
|
2020-04-03 11:51:22 +02:00
|
|
|
root_dir=script.temp_path,
|
2020-12-22 21:28:23 +01:00
|
|
|
base_dir=os.curdir,
|
2020-04-03 11:51:22 +02:00
|
|
|
)
|
|
|
|
shutil.move(generated, retval)
|
|
|
|
|
|
|
|
shutil.rmtree(script.temp_path)
|
|
|
|
script.temp_path.mkdir()
|
|
|
|
|
|
|
|
return retval
|
|
|
|
|
|
|
|
|
2017-10-06 21:51:42 +02:00
|
|
|
def need_executable(name, check_cmd):
|
|
|
|
def wrapper(fn):
|
|
|
|
try:
|
|
|
|
subprocess.check_output(check_cmd)
|
2020-03-29 12:39:11 +02:00
|
|
|
except (OSError, subprocess.CalledProcessError):
|
2021-04-02 11:21:40 +02:00
|
|
|
return pytest.mark.skip(reason=f"{name} is not available")(fn)
|
2017-10-06 21:51:42 +02:00
|
|
|
return fn
|
2021-04-02 11:21:40 +02:00
|
|
|
|
2017-10-06 21:51:42 +02:00
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2019-01-21 23:42:41 +01:00
|
|
|
def is_bzr_installed():
|
|
|
|
try:
|
2021-04-02 11:21:40 +02:00
|
|
|
subprocess.check_output(("bzr", "version", "--short"))
|
2019-01-21 23:42:41 +01:00
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-04-17 09:34:19 +02:00
|
|
|
def is_svn_installed():
|
|
|
|
try:
|
2021-04-02 11:21:40 +02:00
|
|
|
subprocess.check_output(("svn", "--version"))
|
2019-04-17 09:34:19 +02:00
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2017-10-06 21:51:42 +02:00
|
|
|
def need_bzr(fn):
|
2021-04-02 11:21:40 +02:00
|
|
|
return pytest.mark.bzr(need_executable("Bazaar", ("bzr", "version", "--short"))(fn))
|
2017-10-06 21:51:42 +02:00
|
|
|
|
|
|
|
|
2019-10-14 09:26:10 +02:00
|
|
|
def need_svn(fn):
|
2021-04-02 11:21:40 +02:00
|
|
|
return pytest.mark.svn(
|
|
|
|
need_executable("Subversion", ("svn", "--version"))(
|
|
|
|
need_executable("Subversion Admin", ("svnadmin", "--version"))(fn)
|
|
|
|
)
|
|
|
|
)
|
2019-10-14 09:26:10 +02:00
|
|
|
|
|
|
|
|
2017-10-06 21:51:42 +02:00
|
|
|
def need_mercurial(fn):
|
2021-04-02 11:21:40 +02:00
|
|
|
return pytest.mark.mercurial(need_executable("Mercurial", ("hg", "version"))(fn))
|