From cc9f03dea92af21a5446141c0030658b1e9b8b86 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sat, 20 Oct 2018 21:25:07 -0700 Subject: [PATCH 01/69] Rename get_branch() to get_current_branch(). --- src/pip/_internal/vcs/git.py | 4 ++-- tests/functional/test_vcs_git.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index 65722ceab..dc80a0532 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -79,7 +79,7 @@ class Git(VersionControl): version = '.'.join(version.split('.')[:3]) return parse_version(version) - def get_branch(self, location): + def get_current_branch(self, location): """ Return the current branch, or None if HEAD isn't at a branch (e.g. detached HEAD). @@ -210,7 +210,7 @@ class Git(VersionControl): if not self.is_commit_id_equal(dest, rev_options.rev): cmd_args = ['checkout', '-q'] + rev_options.to_args() self.run_command(cmd_args, cwd=dest) - elif self.get_branch(dest) != branch_name: + elif self.get_current_branch(dest) != branch_name: # Then a specific branch was requested, and that branch # is not yet checked out. track_branch = 'origin/{}'.format(branch_name) diff --git a/tests/functional/test_vcs_git.py b/tests/functional/test_vcs_git.py index be9020e21..9a51bf594 100644 --- a/tests/functional/test_vcs_git.py +++ b/tests/functional/test_vcs_git.py @@ -83,14 +83,14 @@ def test_get_remote_url(script, tmpdir): assert remote_url == source_url -def test_get_branch(script): +def test_get_current_branch(script): repo_dir = str(script.scratch_path) script.run('git', 'init', cwd=repo_dir) sha = do_commit(script, repo_dir) git = Git() - assert git.get_branch(repo_dir) == 'master' + assert git.get_current_branch(repo_dir) == 'master' # Switch to a branch with the same SHA as "master" but whose name # is alphabetically after. @@ -98,11 +98,11 @@ def test_get_branch(script): 'git', 'checkout', '-b', 'release', cwd=repo_dir, expect_stderr=True, ) - assert git.get_branch(repo_dir) == 'release' + assert git.get_current_branch(repo_dir) == 'release' # Also test the detached HEAD case. script.run('git', 'checkout', sha, cwd=repo_dir, expect_stderr=True) - assert git.get_branch(repo_dir) is None + assert git.get_current_branch(repo_dir) is None def test_get_revision_sha(script): From 9693bbec5adc465bfd6fdb5f31c83ddfd92374fa Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sat, 20 Oct 2018 21:19:26 -0700 Subject: [PATCH 02/69] Add failing test. --- tests/functional/test_vcs_git.py | 37 +++++++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/tests/functional/test_vcs_git.py b/tests/functional/test_vcs_git.py index 9a51bf594..6dad082f7 100644 --- a/tests/functional/test_vcs_git.py +++ b/tests/functional/test_vcs_git.py @@ -16,6 +16,16 @@ def get_head_sha(script, dest): return sha +def checkout_ref(script, repo_dir, ref): + script.run('git', 'checkout', ref, cwd=repo_dir, expect_stderr=True) + + +def checkout_new_branch(script, repo_dir, branch): + script.run( + 'git', 'checkout', '-b', branch, cwd=repo_dir, expect_stderr=True, + ) + + def do_commit(script, dest): _git_commit(script, dest, message='test commit', args=['--allow-empty']) return get_head_sha(script, dest) @@ -94,14 +104,31 @@ def test_get_current_branch(script): # Switch to a branch with the same SHA as "master" but whose name # is alphabetically after. - script.run( - 'git', 'checkout', '-b', 'release', cwd=repo_dir, - expect_stderr=True, - ) + checkout_new_branch(script, repo_dir, 'release') assert git.get_current_branch(repo_dir) == 'release' # Also test the detached HEAD case. - script.run('git', 'checkout', sha, cwd=repo_dir, expect_stderr=True) + checkout_ref(script, repo_dir, sha) + assert git.get_current_branch(repo_dir) is None + + +def test_get_current_branch__branch_and_tag_same_name(script, tmpdir): + """ + Check calling get_current_branch() from a branch or tag when the branch + and tag have the same name. + """ + repo_dir = str(tmpdir) + script.run('git', 'init', cwd=repo_dir) + do_commit(script, repo_dir) + checkout_new_branch(script, repo_dir, 'dev') + # Create a tag with the same name as the branch. + script.run('git', 'tag', 'dev', cwd=repo_dir) + + git = Git() + assert git.get_current_branch(repo_dir) == 'dev' + + # Now try with the tag checked out. + checkout_ref(script, repo_dir, 'refs/tags/dev') assert git.get_current_branch(repo_dir) is None From 1e903eab5ad7349812af2748b3cd779128837770 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Tue, 23 Oct 2018 23:40:48 -0700 Subject: [PATCH 03/69] Improve branch detection to work if a tag exists with the same name. --- src/pip/_internal/utils/misc.py | 7 ++++++- src/pip/_internal/vcs/__init__.py | 9 ++++++--- src/pip/_internal/vcs/git.py | 16 ++++++++++------ 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index c1faee394..fdd7de908 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -653,6 +653,7 @@ def call_subprocess( show_stdout=True, # type: bool cwd=None, # type: Optional[str] on_returncode='raise', # type: str + returncodes=None, # type: Optional[Iterable[int]] command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] unset_environ=None, # type: Optional[Iterable[str]] @@ -661,9 +662,13 @@ def call_subprocess( # type: (...) -> Optional[Text] """ Args: + returncodes: an iterable of integer return codes that are acceptable, + in addition to 0. Defaults to None, which means []. unset_environ: an iterable of environment variable names to unset prior to calling subprocess.Popen(). """ + if returncodes is None: + returncodes = [] if unset_environ is None: unset_environ = [] # This function's handling of subprocess output is confusing and I @@ -740,7 +745,7 @@ def call_subprocess( spinner.finish("error") else: spinner.finish("done") - if proc.returncode: + if proc.returncode and proc.returncode not in returncodes: if on_returncode == 'raise': if (logger.getEffectiveLevel() > std_logging.DEBUG and not show_stdout): diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index 7fc535261..a432c3868 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -17,7 +17,7 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: from typing import ( # noqa: F401 - Dict, Optional, Tuple, List, Type, Any, Mapping, Text + Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type ) from pip._internal.utils.ui import SpinnerInterface # noqa: F401 @@ -467,6 +467,7 @@ class VersionControl(object): show_stdout=True, # type: bool cwd=None, # type: Optional[str] on_returncode='raise', # type: str + returncodes=None, # type: Optional[Iterable[int]] command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] spinner=None # type: Optional[SpinnerInterface] @@ -480,8 +481,10 @@ class VersionControl(object): cmd = [self.name] + cmd try: return call_subprocess(cmd, show_stdout, cwd, - on_returncode, - command_desc, extra_environ, + on_returncode=on_returncode, + returncodes=returncodes, + command_desc=command_desc, + extra_environ=extra_environ, unset_environ=self.unset_environ, spinner=spinner) except OSError as e: diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index dc80a0532..e32bba8c1 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -84,14 +84,18 @@ class Git(VersionControl): Return the current branch, or None if HEAD isn't at a branch (e.g. detached HEAD). """ - args = ['rev-parse', '--abbrev-ref', 'HEAD'] - output = self.run_command(args, show_stdout=False, cwd=location) - branch = output.strip() + # The -q causes the command to exit with status code 1 instead of + # 128 if "HEAD" is not a symbolic ref but a detached HEAD. + args = ['symbolic-ref', '-q', 'HEAD'] + output = self.run_command( + args, returncodes=(1, ), show_stdout=False, cwd=location, + ) + ref = output.strip() - if branch == 'HEAD': - return None + if ref.startswith('refs/heads/'): + return ref[len('refs/heads/'):] - return branch + return None def export(self, location): """Export the Git repository at the url to the destination location""" From 929c95833d2214631a8493c4822a78d88766f44d Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Thu, 20 Dec 2018 17:36:28 -0800 Subject: [PATCH 04/69] Address review comments. --- src/pip/_internal/utils/misc.py | 12 ++++++------ src/pip/_internal/vcs/__init__.py | 4 ++-- src/pip/_internal/vcs/git.py | 8 +++++--- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index fdd7de908..bb6e51517 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -653,7 +653,7 @@ def call_subprocess( show_stdout=True, # type: bool cwd=None, # type: Optional[str] on_returncode='raise', # type: str - returncodes=None, # type: Optional[Iterable[int]] + extra_ok_returncodes=None, # type: Optional[Iterable[int]] command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] unset_environ=None, # type: Optional[Iterable[str]] @@ -662,13 +662,13 @@ def call_subprocess( # type: (...) -> Optional[Text] """ Args: - returncodes: an iterable of integer return codes that are acceptable, - in addition to 0. Defaults to None, which means []. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. unset_environ: an iterable of environment variable names to unset prior to calling subprocess.Popen(). """ - if returncodes is None: - returncodes = [] + if extra_ok_returncodes is None: + extra_ok_returncodes = [] if unset_environ is None: unset_environ = [] # This function's handling of subprocess output is confusing and I @@ -745,7 +745,7 @@ def call_subprocess( spinner.finish("error") else: spinner.finish("done") - if proc.returncode and proc.returncode not in returncodes: + if proc.returncode and proc.returncode not in extra_ok_returncodes: if on_returncode == 'raise': if (logger.getEffectiveLevel() > std_logging.DEBUG and not show_stdout): diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index a432c3868..c9353f088 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -467,7 +467,7 @@ class VersionControl(object): show_stdout=True, # type: bool cwd=None, # type: Optional[str] on_returncode='raise', # type: str - returncodes=None, # type: Optional[Iterable[int]] + extra_ok_returncodes=None, # type: Optional[Iterable[int]] command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] spinner=None # type: Optional[SpinnerInterface] @@ -482,7 +482,7 @@ class VersionControl(object): try: return call_subprocess(cmd, show_stdout, cwd, on_returncode=on_returncode, - returncodes=returncodes, + extra_ok_returncodes=extra_ok_returncodes, command_desc=command_desc, extra_environ=extra_environ, unset_environ=self.unset_environ, diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index e32bba8c1..cae01610f 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -84,11 +84,13 @@ class Git(VersionControl): Return the current branch, or None if HEAD isn't at a branch (e.g. detached HEAD). """ - # The -q causes the command to exit with status code 1 instead of - # 128 if "HEAD" is not a symbolic ref but a detached HEAD. + # git-symbolic-ref exits with empty stdout if "HEAD" is a detached + # HEAD rather than a symbolic ref. In addition, the -q causes the + # command to exit with status code 1 instead of 128 in this case + # and to suppress the message to stderr. args = ['symbolic-ref', '-q', 'HEAD'] output = self.run_command( - args, returncodes=(1, ), show_stdout=False, cwd=location, + args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, ) ref = output.strip() From 1764fbbdab78c5c59b4f8603a74b9d7bdf3127ce Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Thu, 20 Dec 2018 23:59:10 -0800 Subject: [PATCH 05/69] Add failing tests. --- src/pip/_internal/vcs/__init__.py | 4 ++++ tests/functional/test_freeze.py | 18 ++++++++++++++++++ tests/functional/test_vcs_git.py | 18 +++++++++++++++++- 3 files changed, 39 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index c9353f088..dc93028dc 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -29,6 +29,10 @@ __all__ = ['vcs'] logger = logging.getLogger(__name__) +class RemoteNotFoundError(Exception): + pass + + class RevOptions(object): """ diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index fa761e465..d8b0b9e68 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -137,6 +137,24 @@ def test_freeze_editable_not_vcs(script, tmpdir): _check_output(result.stdout, expected) +@pytest.mark.git +def test_freeze_editable_git_with_no_remote(script, tmpdir): + """ + Test an editable install that is not version controlled. + """ + pkg_path = _create_test_package(script) + script.pip('install', '-e', pkg_path) + result = script.pip('freeze') + + # We need to apply os.path.normcase() to the path since that is what + # the freeze code does. + expected = textwrap.dedent("""\ + ...# Editable, version-controlled with no remote (version-pkg==0.1) + -e {} + ...""".format(os.path.normcase(pkg_path))) + _check_output(result.stdout, expected) + + @pytest.mark.svn def test_freeze_svn(script, tmpdir): """Test freezing a svn checkout""" diff --git a/tests/functional/test_vcs_git.py b/tests/functional/test_vcs_git.py index 6dad082f7..c6cada2c0 100644 --- a/tests/functional/test_vcs_git.py +++ b/tests/functional/test_vcs_git.py @@ -4,7 +4,9 @@ Contains functional tests of the Git class. import os -from pip._internal.vcs.git import Git +import pytest + +from pip._internal.vcs.git import Git, RemoteNotFoundError from tests.lib import _create_test_package, _git_commit, _test_path_to_file_url @@ -93,6 +95,20 @@ def test_get_remote_url(script, tmpdir): assert remote_url == source_url +def test_get_remote_url__no_remote(script, tmpdir): + """ + Test a repo with no remote. + """ + repo_dir = tmpdir / 'temp-repo' + repo_dir.mkdir() + repo_dir = str(repo_dir) + + script.run('git', 'init', cwd=repo_dir) + + with pytest.raises(RemoteNotFoundError): + Git().get_remote_url(repo_dir) + + def test_get_current_branch(script): repo_dir = str(script.scratch_path) From 526ac40e63734d78b195407c7bf3a19a87f396df Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Fri, 21 Dec 2018 00:22:32 -0800 Subject: [PATCH 06/69] Fix freeze to handle Git repos with no remote. --- news/4759.bugfix | 1 + src/pip/_internal/operations/freeze.py | 11 ++++++++++- src/pip/_internal/vcs/__init__.py | 3 +++ src/pip/_internal/vcs/git.py | 23 +++++++++++++++++------ tests/functional/test_freeze.py | 4 ++-- 5 files changed, 33 insertions(+), 9 deletions(-) create mode 100644 news/4759.bugfix diff --git a/news/4759.bugfix b/news/4759.bugfix new file mode 100644 index 000000000..405fbe8db --- /dev/null +++ b/news/4759.bugfix @@ -0,0 +1 @@ +Editable Git installs without a remote now freeze as editable. diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py index 09141c254..c9ab4ade1 100644 --- a/src/pip/_internal/operations/freeze.py +++ b/src/pip/_internal/operations/freeze.py @@ -172,7 +172,7 @@ def get_requirement_info(dist): location = os.path.normcase(os.path.abspath(dist.location)) - from pip._internal.vcs import vcs + from pip._internal.vcs import vcs, RemoteNotFoundError vc_type = vcs.get_backend_type(location) if not vc_type: @@ -188,6 +188,15 @@ def get_requirement_info(dist): try: req = vc_type().get_src_requirement(location, dist.project_name) + except RemoteNotFoundError: + req = dist.as_requirement() + comments = [ + '# Editable {} install with no remote ({})'.format( + vc_type.__name__, req, + ) + ] + return (location, True, comments) + except BadCommand: logger.warning( 'cannot determine version of editable source in %s ' diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index dc93028dc..3e3e72f9a 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -456,6 +456,9 @@ class VersionControl(object): def get_remote_url(self, location): """ Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. """ raise NotImplementedError diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index cae01610f..9ebf0d19d 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -14,7 +14,7 @@ from pip._internal.utils.misc import ( display_path, make_vcs_requirement_url, redact_password_from_url, ) from pip._internal.utils.temp_dir import TempDirectory -from pip._internal.vcs import VersionControl, vcs +from pip._internal.vcs import RemoteNotFoundError, VersionControl, vcs urlsplit = urllib_parse.urlsplit urlunsplit = urllib_parse.urlunsplit @@ -250,13 +250,24 @@ class Git(VersionControl): self.update_submodules(dest) def get_remote_url(self, location): - """Return URL of the first remote encountered.""" - remotes = self.run_command( + """ + Return URL of the first remote encountered. + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + # We need to pass 1 for extra_ok_returncodes since the command + # exits with return code 1 if there are no matching lines. + stdout = self.run_command( ['config', '--get-regexp', r'remote\..*\.url'], - show_stdout=False, cwd=location, + extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, ) - remotes = remotes.splitlines() - found_remote = remotes[0] + remotes = stdout.splitlines() + try: + found_remote = remotes[0] + except IndexError: + raise RemoteNotFoundError + for remote in remotes: if remote.startswith('remote.origin.url '): found_remote = remote diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index d8b0b9e68..56c00baef 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -140,7 +140,7 @@ def test_freeze_editable_not_vcs(script, tmpdir): @pytest.mark.git def test_freeze_editable_git_with_no_remote(script, tmpdir): """ - Test an editable install that is not version controlled. + Test an editable Git install with no remote url. """ pkg_path = _create_test_package(script) script.pip('install', '-e', pkg_path) @@ -149,7 +149,7 @@ def test_freeze_editable_git_with_no_remote(script, tmpdir): # We need to apply os.path.normcase() to the path since that is what # the freeze code does. expected = textwrap.dedent("""\ - ...# Editable, version-controlled with no remote (version-pkg==0.1) + ...# Editable Git install with no remote (version-pkg==0.1) -e {} ...""".format(os.path.normcase(pkg_path))) _check_output(result.stdout, expected) From 61bb651bf168a0d6b7786c18f94a3cc0bc938cc6 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Fri, 21 Dec 2018 01:19:25 -0800 Subject: [PATCH 07/69] Remove outdated test_git_works_with_editable_non_origin_repo(). This test has been replaced by test_freeze_editable_git_with_no_remote() in test_freeze.py, with an updated and different test expectation. --- tests/functional/test_freeze.py | 2 ++ tests/functional/test_install_vcs_git.py | 14 -------------- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index 56c00baef..101164e48 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -146,6 +146,8 @@ def test_freeze_editable_git_with_no_remote(script, tmpdir): script.pip('install', '-e', pkg_path) result = script.pip('freeze') + assert result.stderr == '' + # We need to apply os.path.normcase() to the path since that is what # the freeze code does. expected = textwrap.dedent("""\ diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 767503502..b9691d742 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -367,20 +367,6 @@ def test_git_with_ambiguous_revs(script): result.assert_installed('version-pkg', with_files=['.git']) -def test_git_works_with_editable_non_origin_repo(script): - # set up, create a git repo and install it as editable from a local - # directory path - version_pkg_path = _create_test_package(script) - script.pip('install', '-e', version_pkg_path.abspath) - - # 'freeze'ing this should not fall over, but should result in stderr output - # warning - result = script.pip('freeze', expect_stderr=True) - assert "Error when trying to get requirement" in result.stderr - assert "Could not determine repository location" in result.stdout - assert "version-pkg==0.1" in result.stdout - - def test_editable__no_revision(script): """ Test a basic install in editable mode specifying no revision. From 7222cb8fdb7855ddd0e6b82ad45fe5b9c4bf7f73 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 2 Dec 2018 17:25:59 +0530 Subject: [PATCH 08/69] Remove --process-dependency-links and related support code --- src/pip/_internal/build_env.py | 2 -- src/pip/_internal/cli/base_command.py | 1 - src/pip/_internal/cli/cmdoptions.py | 12 ------- src/pip/_internal/commands/list.py | 9 ----- src/pip/_internal/index.py | 47 +++---------------------- src/pip/_internal/operations/prepare.py | 10 ++---- src/pip/_internal/req/req_file.py | 3 -- src/pip/_internal/utils/outdated.py | 1 - src/pip/_internal/vcs/subversion.py | 1 + tests/functional/test_install_reqs.py | 4 +-- tests/unit/test_finder.py | 44 ----------------------- tests/unit/test_req.py | 1 + tests/unit/test_req_file.py | 5 --- tests/unit/test_unit_outdated.py | 3 +- 14 files changed, 11 insertions(+), 132 deletions(-) diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index d8c7acc48..d744cc78a 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -189,8 +189,6 @@ class BuildEnvironment(object): args.extend(['--trusted-host', host]) if finder.allow_all_prereleases: args.append('--pre') - if finder.process_dependency_links: - args.append('--process-dependency-links') args.append('--') args.extend(requirements) with open_spinner(message) as spinner: diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 1212cd5f4..175dbadc1 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -296,7 +296,6 @@ class RequirementCommand(Command): index_urls=index_urls, trusted_hosts=options.trusted_hosts, allow_all_prereleases=options.pre, - process_dependency_links=options.process_dependency_links, session=session, platform=platform, versions=python_versions, diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 563cd58cd..0aebf75e8 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -347,17 +347,6 @@ def trusted_host(): ) -# Remove after 1.5 -process_dependency_links = partial( - Option, - "--process-dependency-links", - dest="process_dependency_links", - action="store_true", - default=False, - help="Enable the processing of dependency links.", -) # type: Callable[..., Option] - - def constraints(): # type: () -> Option return Option( @@ -773,6 +762,5 @@ index_group = { extra_index_url, no_index, find_links, - process_dependency_links, ] } # type: Dict[str, Any] diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index c6eeca79e..019f85dcd 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -118,7 +118,6 @@ class ListCommand(Command): index_urls=index_urls, allow_all_prereleases=options.pre, trusted_hosts=options.trusted_hosts, - process_dependency_links=options.process_dependency_links, session=session, ) @@ -168,16 +167,8 @@ class ListCommand(Command): logger.debug('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] - dependency_links = [] - for dist in packages: - if dist.has_metadata('dependency_links.txt'): - dependency_links.extend( - dist.get_metadata_lines('dependency_links.txt'), - ) - with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) - finder.add_dependency_links(dependency_links) for dist in packages: typ = 'unknown' diff --git a/src/pip/_internal/index.py b/src/pip/_internal/index.py index 1f2723e38..9eda3a351 100644 --- a/src/pip/_internal/index.py +++ b/src/pip/_internal/index.py @@ -31,7 +31,6 @@ from pip._internal.models.index import PyPI from pip._internal.models.link import Link from pip._internal.pep425tags import get_supported from pip._internal.utils.compat import ipaddress -from pip._internal.utils.deprecation import deprecated from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path, @@ -268,7 +267,6 @@ class PackageFinder(object): index_urls, # type: List[str] allow_all_prereleases=False, # type: bool trusted_hosts=None, # type: Optional[Iterable[str]] - process_dependency_links=False, # type: bool session=None, # type: Optional[PipSession] format_control=None, # type: Optional[FormatControl] platform=None, # type: Optional[str] @@ -315,7 +313,6 @@ class PackageFinder(object): self.find_links.append(link) self.index_urls = index_urls - self.dependency_links = [] # type: List[str] # These are boring links that have already been logged somehow: self.logged_links = set() # type: Set[Link] @@ -331,9 +328,6 @@ class PackageFinder(object): # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases - # Do we process dependency links? - self.process_dependency_links = process_dependency_links - # The Session we'll use to make requests self.session = session @@ -375,22 +369,6 @@ class PackageFinder(object): ) return "\n".join(lines) - def add_dependency_links(self, links): - # type: (Iterable[str]) -> None - # FIXME: this shouldn't be global list this, it should only - # apply to requirements of the package that specifies the - # dependency_links value - # FIXME: also, we should track comes_from (i.e., use Link) - if self.process_dependency_links: - deprecated( - "Dependency Links processing has been deprecated and will be " - "removed in a future release.", - replacement="PEP 508 URL dependencies", - gone_in="19.0", - issue=4187, - ) - self.dependency_links.extend(links) - @staticmethod def _sort_locations(locations, expand_dir=False): # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] @@ -587,7 +565,7 @@ class PackageFinder(object): # type: (str) -> List[Optional[InstallationCandidate]] """Find all available InstallationCandidate for project_name - This checks index_urls, find_links and dependency_links. + This checks index_urls and find_links. All versions found are returned as an InstallationCandidate list. See _link_package_versions for details on which files are accepted @@ -597,21 +575,18 @@ class PackageFinder(object): fl_file_loc, fl_url_loc = self._sort_locations( self.find_links, expand_dir=True, ) - dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) file_locations = (Link(url) for url in itertools.chain( - index_file_loc, fl_file_loc, dep_file_loc, + index_file_loc, fl_file_loc, )) # We trust every url that the user has given us whether it was given - # via --index-url or --find-links - # We explicitly do not trust links that came from dependency_links + # via --index-url or --find-links. # We want to filter out any thing which does not have a secure origin. url_locations = [ link for link in itertools.chain( (Link(url) for url in index_url_loc), (Link(url) for url in fl_url_loc), - (Link(url) for url in dep_url_loc), ) if self._validate_secure_origin(logger, link) ] @@ -639,17 +614,6 @@ class PackageFinder(object): self._package_versions(page.iter_links(), search) ) - dependency_versions = self._package_versions( - (Link(url) for url in self.dependency_links), search - ) - if dependency_versions: - logger.debug( - 'dependency_links found: %s', - ', '.join([ - version.location.url for version in dependency_versions - ]) - ) - file_versions = self._package_versions(file_locations, search) if file_versions: file_versions.sort(reverse=True) @@ -662,10 +626,7 @@ class PackageFinder(object): ) # This is an intentional priority ordering - return ( - file_versions + find_links_versions + page_versions + - dependency_versions - ) + return file_versions + find_links_versions + page_versions def find_requirement(self, req, upgrade): # type: (InstallRequirement, bool) -> Optional[Link] diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 23383b1af..8ec55db00 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -97,15 +97,9 @@ class IsWheel(DistAbstraction): class IsSDist(DistAbstraction): + # TODO: Remove 'finder' and the note in operations/check.py def dist(self, finder): - # type: (PackageFinder) -> pkg_resources.Distribution - dist = self.req.get_dist() - # FIXME: shouldn't be globally added. - if finder and dist.has_metadata('dependency_links.txt'): - finder.add_dependency_links( - dist.get_metadata_lines('dependency_links.txt') - ) - return dist + return self.req.get_dist() def prep_for_dist(self, finder, build_isolation): # type: (PackageFinder, bool) -> None diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index e92b7968b..726f2f6a0 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -55,7 +55,6 @@ SUPPORTED_OPTIONS = [ cmdoptions.no_binary, cmdoptions.only_binary, cmdoptions.pre, - cmdoptions.process_dependency_links, cmdoptions.trusted_host, cmdoptions.require_hashes, ] # type: List[Callable[..., optparse.Option]] @@ -251,8 +250,6 @@ def process_line( finder.find_links.append(value) if opts.pre: finder.allow_all_prereleases = True - if opts.process_dependency_links: - finder.process_dependency_links = True if opts.trusted_hosts: finder.secure_origins.extend( ("*", host, "*") for host in opts.trusted_hosts) diff --git a/src/pip/_internal/utils/outdated.py b/src/pip/_internal/utils/outdated.py index 03360c712..37c47a4a5 100644 --- a/src/pip/_internal/utils/outdated.py +++ b/src/pip/_internal/utils/outdated.py @@ -127,7 +127,6 @@ def pip_version_check(session, options): index_urls=[options.index_url] + options.extra_index_urls, allow_all_prereleases=False, # Explicitly set to False trusted_hosts=options.trusted_hosts, - process_dependency_links=options.process_dependency_links, session=session, ) all_candidates = finder.find_all_candidates("pip") diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 87970f1bc..1bdc9ca79 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -61,6 +61,7 @@ class Subversion(VersionControl): cmd_args = ['update'] + rev_options.to_args() + [dest] self.run_command(cmd_args) + # TODO: Remove def get_location(self, dist, dependency_links): for url in dependency_links: egg_fragment = Link(url).egg_fragment diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index bda45b107..5c56a0e12 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -175,8 +175,8 @@ def test_respect_order_in_requirements_file(script, data): def test_install_local_editable_with_extras(script, data): to_install = data.packages.join("LocalExtras") - res = script.pip( - 'install', '-e', to_install + '[bar]', '--process-dependency-links', + res = script.pip_install_local( + '-e', to_install + '[bar]', expect_error=False, expect_stderr=True, ) diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py index 9f82a18a8..c8ae693dd 100644 --- a/tests/unit/test_finder.py +++ b/tests/unit/test_finder.py @@ -300,50 +300,6 @@ def test_finder_priority_file_over_page(data): assert link.url.startswith("file://") -def test_finder_deplink(): - """ - Test PackageFinder with dependency links only - """ - req = install_req_from_line('gmpy==1.15', None) - finder = PackageFinder( - [], - [], - process_dependency_links=True, - session=PipSession(), - ) - finder.add_dependency_links( - ['https://files.pythonhosted.org/packages/source/g/gmpy/gmpy-1.15.zip'] - ) - link = finder.find_requirement(req, False) - assert link.url.startswith("https://files.pythonhosted.org/"), link - - -@pytest.mark.network -def test_finder_priority_page_over_deplink(): - """ - Test PackageFinder prefers page links over equivalent dependency links - """ - req = install_req_from_line('pip==1.5.6', None) - finder = PackageFinder( - [], - ["https://pypi.org/simple/"], - process_dependency_links=True, - session=PipSession(), - ) - finder.add_dependency_links([ - 'https://files.pythonhosted.org/packages/source/p/pip/pip-1.5.6.tar.gz' - ]) - all_versions = finder.find_all_candidates(req.name) - # Check that the dependency_link is last - assert all_versions[-1].location.url.startswith( - 'https://files.pythonhosted.org/' - ) - link = finder.find_requirement(req, False) - assert link.url.startswith( - "https://files.pythonhosted.org/packages/3f/08/7347ca4" - ), link - - def test_finder_priority_nonegg_over_eggfragments(): """Test PackageFinder prefers non-egg links over "#egg=" links""" req = install_req_from_line('bar==1.0', None) diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index 9d146e36c..32cc1b36d 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -82,6 +82,7 @@ class TestRequirementSet(object): reqset, ) + # TODO: Update test when Python 2.7 or Python 3.4 is dropped. def test_environment_marker_extras(self, data): """ Test that the environment marker extras are used with diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index bdf721794..b71ca68d0 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -413,11 +413,6 @@ class TestProcessLine(object): call = mock_parse.mock_calls[0] assert call[1][0] == 'http://me.com/me/reqs.txt' - def test_set_finder_process_dependency_links(self, finder): - list(process_line( - "--process-dependency-links", "file", 1, finder=finder)) - assert finder.process_dependency_links - class TestBreakOptionsArgs(object): diff --git a/tests/unit/test_unit_outdated.py b/tests/unit/test_unit_outdated.py index a9441a4d3..31b5a7fa3 100644 --- a/tests/unit/test_unit_outdated.py +++ b/tests/unit/test_unit_outdated.py @@ -50,8 +50,7 @@ def _options(): ''' Some default options that we pass to outdated.pip_version_check ''' return pretend.stub( find_links=False, extra_index_urls=[], index_url='default_url', - pre=False, trusted_hosts=False, process_dependency_links=False, - cache_dir='', + pre=False, trusted_hosts=False, cache_dir='', ) From 838984b372e368478ebb05589236b9c6a047b60b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 2 Dec 2018 17:26:51 +0530 Subject: [PATCH 09/69] Drop dependency links from test packages --- tests/data/packages/LocalEnvironMarker/setup.py | 9 +++------ tests/data/packages/LocalExtras-0.0.2/setup.py | 7 +------ tests/data/packages/LocalExtras/setup.py | 7 +------ 3 files changed, 5 insertions(+), 18 deletions(-) diff --git a/tests/data/packages/LocalEnvironMarker/setup.py b/tests/data/packages/LocalEnvironMarker/setup.py index 00f18d583..cc2cd317a 100644 --- a/tests/data/packages/LocalEnvironMarker/setup.py +++ b/tests/data/packages/LocalEnvironMarker/setup.py @@ -17,14 +17,11 @@ def path_to_url(path): return 'file://' + url -HERE = os.path.dirname(__file__) -DEP_PATH = os.path.join(HERE, '..', '..', 'indexes', 'simple', 'simple') -DEP_URL = path_to_url(DEP_PATH) - setup( name='LocalEnvironMarker', version='0.0.1', packages=find_packages(), - extras_require={":python_version == '2.7' or python_version == '3.4'": ['simple']}, - dependency_links=[DEP_URL] + extras_require={ + ":python_version == '2.7' or python_version == '3.4'": ['simple'], + } ) diff --git a/tests/data/packages/LocalExtras-0.0.2/setup.py b/tests/data/packages/LocalExtras-0.0.2/setup.py index acc55f967..cc5c83283 100644 --- a/tests/data/packages/LocalExtras-0.0.2/setup.py +++ b/tests/data/packages/LocalExtras-0.0.2/setup.py @@ -17,15 +17,10 @@ def path_to_url(path): return 'file://' + url -HERE = os.path.dirname(__file__) -DEP_PATH = os.path.join(HERE, '..', '..', 'indexes', 'simple', 'simple') -DEP_URL = path_to_url(DEP_PATH) - setup( name='LocalExtras', version='0.0.2', packages=find_packages(), install_requires=['simple==1.0'], - extras_require={'bar': ['simple==2.0'], 'baz': ['singlemodule']}, - dependency_links=[DEP_URL] + extras_require={'bar': ['simple==2.0'], 'baz': ['singlemodule']} ) diff --git a/tests/data/packages/LocalExtras/setup.py b/tests/data/packages/LocalExtras/setup.py index f25cc16c6..eb390e32e 100644 --- a/tests/data/packages/LocalExtras/setup.py +++ b/tests/data/packages/LocalExtras/setup.py @@ -17,14 +17,9 @@ def path_to_url(path): return 'file://' + url -HERE = os.path.dirname(__file__) -DEP_PATH = os.path.join(HERE, '..', '..', 'indexes', 'simple', 'simple') -DEP_URL = path_to_url(DEP_PATH) - setup( name='LocalExtras', version='0.0.1', packages=find_packages(), - extras_require={'bar': ['simple'], 'baz': ['singlemodule']}, - dependency_links=[DEP_URL] + extras_require={'bar': ['simple'], 'baz': ['singlemodule']} ) From 71b6e95322d2e62cc8dd6c27ed3e047fd69c9bb9 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 2 Dec 2018 17:27:34 +0530 Subject: [PATCH 10/69] Remove an unused Subversion method that expects dependency links --- src/pip/_internal/vcs/subversion.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 1bdc9ca79..3aea1c184 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -4,7 +4,6 @@ import logging import os import re -from pip._internal.models.link import Link from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc, @@ -61,21 +60,6 @@ class Subversion(VersionControl): cmd_args = ['update'] + rev_options.to_args() + [dest] self.run_command(cmd_args) - # TODO: Remove - def get_location(self, dist, dependency_links): - for url in dependency_links: - egg_fragment = Link(url).egg_fragment - if not egg_fragment: - continue - if '-' in egg_fragment: - # FIXME: will this work when a package has - in the name? - key = '-'.join(egg_fragment.split('-')[:-1]).lower() - else: - key = egg_fragment - if key == dist.key: - return url.split('#', 1)[0] - return None - def get_revision(self, location): """ Return the maximum revision for all files under a given location From f9b66cacb37f81faf4f1636ac906d22a62d7857f Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 2 Dec 2018 17:29:57 +0530 Subject: [PATCH 11/69] Stop passing finder to DistAbstraction.dist --- src/pip/_internal/operations/check.py | 5 +---- src/pip/_internal/operations/prepare.py | 15 +++++++-------- src/pip/_internal/resolve.py | 2 +- 3 files changed, 9 insertions(+), 13 deletions(-) diff --git a/src/pip/_internal/operations/check.py b/src/pip/_internal/operations/check.py index 494b0e219..0b56eda45 100644 --- a/src/pip/_internal/operations/check.py +++ b/src/pip/_internal/operations/check.py @@ -120,9 +120,6 @@ def check_install_conflicts(to_install): ) -# NOTE from @pradyunsg -# This required a minor update in dependency link handling logic over at -# operations.prepare.IsSDist.dist() to get it working def _simulate_installation_of(to_install, package_set): # type: (List[InstallRequirement], PackageSet) -> Set[str] """Computes the version of packages after installing to_install. @@ -133,7 +130,7 @@ def _simulate_installation_of(to_install, package_set): # Modify it as installing requirement_set would (assuming no errors) for inst_req in to_install: - dist = make_abstract_dist(inst_req).dist(finder=None) + dist = make_abstract_dist(inst_req).dist() name = canonicalize_name(dist.key) package_set[name] = PackageDetails(dist.version, dist.requires()) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 8ec55db00..742925557 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -71,8 +71,8 @@ class DistAbstraction(object): # type: (InstallRequirement) -> None self.req = req # type: InstallRequirement - def dist(self, finder): - # type: (PackageFinder) -> Any + def dist(self): + # type: () -> Any """Return a setuptools Dist object.""" raise NotImplementedError(self.dist) @@ -84,8 +84,8 @@ class DistAbstraction(object): class IsWheel(DistAbstraction): - def dist(self, finder): - # type: (PackageFinder) -> pkg_resources.Distribution + def dist(self): + # type: () -> pkg_resources.Distribution return list(pkg_resources.find_distributions( self.req.source_dir))[0] @@ -97,8 +97,7 @@ class IsWheel(DistAbstraction): class IsSDist(DistAbstraction): - # TODO: Remove 'finder' and the note in operations/check.py - def dist(self, finder): + def dist(self): return self.req.get_dist() def prep_for_dist(self, finder, build_isolation): @@ -162,8 +161,8 @@ class IsSDist(DistAbstraction): class Installed(DistAbstraction): - def dist(self, finder): - # type: (PackageFinder) -> pkg_resources.Distribution + def dist(self): + # type: () -> pkg_resources.Distribution return self.req.satisfied_by def prep_for_dist(self, finder, build_isolation): diff --git a/src/pip/_internal/resolve.py b/src/pip/_internal/resolve.py index 7452af041..33f572f1e 100644 --- a/src/pip/_internal/resolve.py +++ b/src/pip/_internal/resolve.py @@ -294,7 +294,7 @@ class Resolver(object): abstract_dist = self._get_abstract_dist_for(req_to_install) # Parse and return dependencies - dist = abstract_dist.dist(self.finder) + dist = abstract_dist.dist() try: check_dist_requires_python(dist) except UnsupportedPythonVersion as err: From b7a4d70b25c9d6b2915b0f78cdb55e180e5fed9b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 2 Dec 2018 17:30:44 +0530 Subject: [PATCH 12/69] Don't instantiate NotImplementedError --- src/pip/_internal/operations/prepare.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 742925557..4f31dd5a6 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -74,12 +74,12 @@ class DistAbstraction(object): def dist(self): # type: () -> Any """Return a setuptools Dist object.""" - raise NotImplementedError(self.dist) + raise NotImplementedError def prep_for_dist(self, finder, build_isolation): # type: (PackageFinder, bool) -> Any """Ensure that we can get a Dist for this requirement.""" - raise NotImplementedError(self.dist) + raise NotImplementedError class IsWheel(DistAbstraction): From f6f4eb78085756c7c9a34e457d9787cf51f16503 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 2 Dec 2018 17:37:42 +0530 Subject: [PATCH 13/69] :newspaper: --- news/6060.removal | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/6060.removal diff --git a/news/6060.removal b/news/6060.removal new file mode 100644 index 000000000..1b531c8c8 --- /dev/null +++ b/news/6060.removal @@ -0,0 +1 @@ +Remove the deprecated --process-dependency-links option. From 65270e53f84a01b6998a16dbc65fb724e392218f Mon Sep 17 00:00:00 2001 From: Chih-Hsuan Yen Date: Sat, 5 Jan 2019 22:14:48 +0800 Subject: [PATCH 14/69] Add a note in Vendoring Policies to remind of `vendored()` entries To avoid future breakages like #4660, #5418 or #6056 --- news/dbe44acc-c569-46e2-9348-2e55a2816d5c.trivial | 0 src/pip/_vendor/README.rst | 2 ++ 2 files changed, 2 insertions(+) create mode 100644 news/dbe44acc-c569-46e2-9348-2e55a2816d5c.trivial diff --git a/news/dbe44acc-c569-46e2-9348-2e55a2816d5c.trivial b/news/dbe44acc-c569-46e2-9348-2e55a2816d5c.trivial new file mode 100644 index 000000000..e69de29bb diff --git a/src/pip/_vendor/README.rst b/src/pip/_vendor/README.rst index 5c3f3fe65..d9e9b7f40 100644 --- a/src/pip/_vendor/README.rst +++ b/src/pip/_vendor/README.rst @@ -20,6 +20,8 @@ Vendoring Policy ``pip/_vendor/README.rst`` and their corresponding patches **MUST** be included ``tasks/vendoring/patches``. +* Vendored libraries should have corresponding ``vendored()`` entries in + ``pip/_vendor/__init__.py``. Rationale --------- From 4f359810855ef140cb93b908786519996a4f17e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Peter=20Lis=C3=A1k?= Date: Sat, 5 Jan 2019 21:58:16 +0100 Subject: [PATCH 15/69] pip list --not-required --outdated should list only outdated packages that are not dependencies of installed packages --- news/5737.bugfix | 1 + src/pip/_internal/commands/list.py | 6 +++--- tests/functional/test_list.py | 16 ++++++++++++++++ 3 files changed, 20 insertions(+), 3 deletions(-) create mode 100644 news/5737.bugfix diff --git a/news/5737.bugfix b/news/5737.bugfix new file mode 100644 index 000000000..d9e170389 --- /dev/null +++ b/news/5737.bugfix @@ -0,0 +1 @@ +`pip list --outdated --not-required` should list only outdated packages that are not dependencies of installed packages diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index c6eeca79e..e95e01ca6 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -134,14 +134,14 @@ class ListCommand(Command): include_editables=options.include_editable, ) + if options.not_required: + packages = self.get_not_required(packages, options) + if options.outdated: packages = self.get_outdated(packages, options) elif options.uptodate: packages = self.get_uptodate(packages, options) - if options.not_required: - packages = self.get_not_required(packages, options) - self.output_package_listing(packages, options) def get_outdated(self, packages, options): diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index c376049c5..e091e8d7c 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -384,6 +384,22 @@ def test_outdated_editables_columns_flag(script, data): ) +@pytest.mark.network +def test_outdated_not_required_flag(script, data): + """ + test the behavior of --outdated --not-required flag in the list command + """ + script.pip( + 'install', '-f', data.find_links, '--no-index', + 'simple==2.0', 'require_simple==1.0' + ) + result = script.pip( + 'list', '-f', data.find_links, '--no-index', '--outdated', + '--not-required', '--format=json', + ) + assert [] == json.loads(result.stdout) + + def test_outdated_pre(script, data): script.pip('install', '-f', data.find_links, '--no-index', 'simple==1.0') From 1a87e577d0266f724ad1f2cbd37d8cf26dd6988c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Peter=20Lis=C3=A1k?= Date: Mon, 7 Jan 2019 17:10:08 +0100 Subject: [PATCH 16/69] Add comments --- src/pip/_internal/commands/list.py | 4 ++++ tests/functional/test_list.py | 1 - 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index e95e01ca6..a2846b5ae 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -134,6 +134,10 @@ class ListCommand(Command): include_editables=options.include_editable, ) + # get_not_required must be called firstly in order to find and + # filter out all dependencies correctly. Otherwise a package + # can't be identified as requirement because some parent packages + # could be filtered out before. if options.not_required: packages = self.get_not_required(packages, options) diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index e091e8d7c..05f419ccc 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -384,7 +384,6 @@ def test_outdated_editables_columns_flag(script, data): ) -@pytest.mark.network def test_outdated_not_required_flag(script, data): """ test the behavior of --outdated --not-required flag in the list command From 00ae3f594d6374f6aaf02e1a441e0b4c18292299 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sun, 6 Jan 2019 16:22:04 -0800 Subject: [PATCH 17/69] Make VersionControl.run_command() a class method. --- src/pip/_internal/vcs/__init__.py | 9 +++++---- src/pip/_internal/vcs/git.py | 8 ++++---- tests/functional/test_vcs_git.py | 7 +++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index 3e3e72f9a..71703c6f7 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -468,8 +468,9 @@ class VersionControl(object): """ raise NotImplementedError + @classmethod def run_command( - self, + cls, cmd, # type: List[str] show_stdout=True, # type: bool cwd=None, # type: Optional[str] @@ -485,14 +486,14 @@ class VersionControl(object): This is simply a wrapper around call_subprocess that adds the VCS command name, and checks that the VCS is available """ - cmd = [self.name] + cmd + cmd = [cls.name] + cmd try: return call_subprocess(cmd, show_stdout, cwd, on_returncode=on_returncode, extra_ok_returncodes=extra_ok_returncodes, command_desc=command_desc, extra_environ=extra_environ, - unset_environ=self.unset_environ, + unset_environ=cls.unset_environ, spinner=spinner) except OSError as e: # errno.ENOENT = no such file or directory @@ -501,7 +502,7 @@ class VersionControl(object): raise BadCommand( 'Cannot find command %r - do you have ' '%r installed and in your ' - 'PATH?' % (self.name, self.name)) + 'PATH?' % (cls.name, cls.name)) else: raise # re-raise exception if a different error occurred diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index 9ebf0d19d..0b31beed8 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -351,10 +351,10 @@ class Git(VersionControl): if super(Git, cls).controls_location(location): return True try: - r = cls().run_command(['rev-parse'], - cwd=location, - show_stdout=False, - on_returncode='ignore') + r = cls.run_command(['rev-parse'], + cwd=location, + show_stdout=False, + on_returncode='ignore') return not r except BadCommand: logger.debug("could not determine if %s is under git control " diff --git a/tests/functional/test_vcs_git.py b/tests/functional/test_vcs_git.py index c6cada2c0..efac2b5d6 100644 --- a/tests/functional/test_vcs_git.py +++ b/tests/functional/test_vcs_git.py @@ -58,7 +58,7 @@ def test_git_dir_ignored(tmpdir): env = {'GIT_DIR': 'foo'} # If GIT_DIR is not ignored, then os.listdir() will return ['foo']. - Git().run_command(['init', repo_dir], cwd=repo_dir, extra_environ=env) + Git.run_command(['init', repo_dir], cwd=repo_dir, extra_environ=env) assert os.listdir(repo_dir) == ['.git'] @@ -70,13 +70,12 @@ def test_git_work_tree_ignored(tmpdir): repo_path.mkdir() repo_dir = str(repo_path) - git = Git() - git.run_command(['init', repo_dir], cwd=repo_dir) + Git.run_command(['init', repo_dir], cwd=repo_dir) # Choose a directory relative to the cwd that does not exist. # If GIT_WORK_TREE is not ignored, then the command will error out # with: "fatal: This operation must be run in a work tree". env = {'GIT_WORK_TREE': 'foo'} - git.run_command(['status', repo_dir], extra_environ=env, cwd=repo_dir) + Git.run_command(['status', repo_dir], extra_environ=env, cwd=repo_dir) def test_get_remote_url(script, tmpdir): From 41f058b4f5b0f2452fbf018e9024ac693b6a1a20 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sun, 6 Jan 2019 16:27:27 -0800 Subject: [PATCH 18/69] Make _get_subdirectory(), _get_svn_url_rev(), and _is_local_repository() class methods. --- src/pip/_internal/vcs/__init__.py | 3 ++- src/pip/_internal/vcs/git.py | 7 ++++--- src/pip/_internal/vcs/subversion.py | 7 ++++--- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index 71703c6f7..32df79608 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -210,7 +210,8 @@ class VersionControl(object): """ return RevOptions(self, rev, extra_args=extra_args) - def _is_local_repository(self, repo): + @classmethod + def _is_local_repository(cls, repo): # type: (str) -> bool """ posix absolute paths start with os.path.sep, diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index 0b31beed8..e7d4e2b16 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -283,11 +283,12 @@ class Git(VersionControl): ) return current_rev.strip() - def _get_subdirectory(self, location): + @classmethod + def _get_subdirectory(cls, location): """Return the relative path of setup.py to the git repo root.""" # find the repo root - git_dir = self.run_command(['rev-parse', '--git-dir'], - show_stdout=False, cwd=location).strip() + git_dir = cls.run_command(['rev-parse', '--git-dir'], + show_stdout=False, cwd=location).strip() if not os.path.isabs(git_dir): git_dir = os.path.join(location, git_dir) root_dir = os.path.join(git_dir, '..') diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 3aea1c184..6840474eb 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -136,10 +136,11 @@ class Subversion(VersionControl): return self._get_svn_url_rev(location)[0] - def _get_svn_url_rev(self, location): + @classmethod + def _get_svn_url_rev(cls, location): from pip._internal.exceptions import InstallationError - entries_path = os.path.join(location, self.dirname, 'entries') + entries_path = os.path.join(location, cls.dirname, 'entries') if os.path.exists(entries_path): with open(entries_path) as f: data = f.read() @@ -162,7 +163,7 @@ class Subversion(VersionControl): else: try: # subversion >= 1.7 - xml = self.run_command( + xml = cls.run_command( ['info', '--xml', location], show_stdout=False, ) From 8a771d027d9ab16a88e4bd048a6caf48bdaa447c Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sun, 6 Jan 2019 16:31:06 -0800 Subject: [PATCH 19/69] Make get_remote_url() a class method. --- src/pip/_internal/vcs/__init__.py | 3 ++- src/pip/_internal/vcs/bazaar.py | 7 ++++--- src/pip/_internal/vcs/git.py | 5 +++-- src/pip/_internal/vcs/mercurial.py | 7 ++++--- src/pip/_internal/vcs/subversion.py | 5 +++-- tests/functional/test_vcs_git.py | 4 ++-- 6 files changed, 18 insertions(+), 13 deletions(-) diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index 32df79608..349882e21 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -454,7 +454,8 @@ class VersionControl(object): """ raise NotImplementedError - def get_remote_url(self, location): + @classmethod + def get_remote_url(cls, location): """ Return the url used at location diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py index 4552b1173..f25236751 100644 --- a/src/pip/_internal/vcs/bazaar.py +++ b/src/pip/_internal/vcs/bazaar.py @@ -75,15 +75,16 @@ class Bazaar(VersionControl): url = 'bzr+' + url return url, rev, user_pass - def get_remote_url(self, location): - urls = self.run_command(['info'], show_stdout=False, cwd=location) + @classmethod + def get_remote_url(cls, location): + urls = cls.run_command(['info'], show_stdout=False, cwd=location) for line in urls.splitlines(): line = line.strip() for x in ('checkout of branch: ', 'parent branch: '): if line.startswith(x): repo = line.split(x)[1] - if self._is_local_repository(repo): + if cls._is_local_repository(repo): return path_to_url(repo) return repo return None diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index e7d4e2b16..3eae16e70 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -249,7 +249,8 @@ class Git(VersionControl): #: update submodules self.update_submodules(dest) - def get_remote_url(self, location): + @classmethod + def get_remote_url(cls, location): """ Return URL of the first remote encountered. @@ -258,7 +259,7 @@ class Git(VersionControl): """ # We need to pass 1 for extra_ok_returncodes since the command # exits with return code 1 if there are no matching lines. - stdout = self.run_command( + stdout = cls.run_command( ['config', '--get-regexp', r'remote\..*\.url'], extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, ) diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py index 4f106d5ef..7b6b78d03 100644 --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -64,11 +64,12 @@ class Mercurial(VersionControl): cmd_args = ['update', '-q'] + rev_options.to_args() self.run_command(cmd_args, cwd=dest) - def get_remote_url(self, location): - url = self.run_command( + @classmethod + def get_remote_url(cls, location): + url = cls.run_command( ['showconfig', 'paths.default'], show_stdout=False, cwd=location).strip() - if self._is_local_repository(url): + if cls._is_local_repository(url): url = path_to_url(url) return url.strip() diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 6840474eb..01e216a08 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -116,7 +116,8 @@ class Subversion(VersionControl): return extra_args - def get_remote_url(self, location): + @classmethod + def get_remote_url(cls, location): # In cases where the source is in a subdirectory, not alongside # setup.py we have to look up in the location until we find a real # setup.py @@ -134,7 +135,7 @@ class Subversion(VersionControl): ) return None - return self._get_svn_url_rev(location)[0] + return cls._get_svn_url_rev(location)[0] @classmethod def _get_svn_url_rev(cls, location): diff --git a/tests/functional/test_vcs_git.py b/tests/functional/test_vcs_git.py index efac2b5d6..aa1d663ee 100644 --- a/tests/functional/test_vcs_git.py +++ b/tests/functional/test_vcs_git.py @@ -90,7 +90,7 @@ def test_get_remote_url(script, tmpdir): repo_dir = str(tmpdir / 'repo') script.run('git', 'clone', source_url, repo_dir, expect_stderr=True) - remote_url = Git().get_remote_url(repo_dir) + remote_url = Git.get_remote_url(repo_dir) assert remote_url == source_url @@ -105,7 +105,7 @@ def test_get_remote_url__no_remote(script, tmpdir): script.run('git', 'init', cwd=repo_dir) with pytest.raises(RemoteNotFoundError): - Git().get_remote_url(repo_dir) + Git.get_remote_url(repo_dir) def test_get_current_branch(script): From 8a036105dd9435199bc1cc2fcb02b3dfd2bea96b Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sun, 6 Jan 2019 16:35:56 -0800 Subject: [PATCH 20/69] Make get_revision() and get_revision_hash() class methods. --- src/pip/_internal/vcs/__init__.py | 3 ++- src/pip/_internal/vcs/bazaar.py | 5 +++-- src/pip/_internal/vcs/git.py | 5 +++-- src/pip/_internal/vcs/mercurial.py | 10 ++++++---- src/pip/_internal/vcs/subversion.py | 11 ++++++----- 5 files changed, 20 insertions(+), 14 deletions(-) diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index 349882e21..0c8c9acbf 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -464,7 +464,8 @@ class VersionControl(object): """ raise NotImplementedError - def get_revision(self, location): + @classmethod + def get_revision(cls, location): """ Return the current commit id of the files at the given location. """ diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py index f25236751..9623a443d 100644 --- a/src/pip/_internal/vcs/bazaar.py +++ b/src/pip/_internal/vcs/bazaar.py @@ -89,8 +89,9 @@ class Bazaar(VersionControl): return repo return None - def get_revision(self, location): - revision = self.run_command( + @classmethod + def get_revision(cls, location): + revision = cls.run_command( ['revno'], show_stdout=False, cwd=location, ) return revision.splitlines()[-1] diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index 3eae16e70..02e20c769 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -276,10 +276,11 @@ class Git(VersionControl): url = found_remote.split(' ')[1] return url.strip() - def get_revision(self, location, rev=None): + @classmethod + def get_revision(cls, location, rev=None): if rev is None: rev = 'HEAD' - current_rev = self.run_command( + current_rev = cls.run_command( ['rev-parse', rev], show_stdout=False, cwd=location, ) return current_rev.strip() diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py index 7b6b78d03..3a180beba 100644 --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -73,14 +73,16 @@ class Mercurial(VersionControl): url = path_to_url(url) return url.strip() - def get_revision(self, location): - current_revision = self.run_command( + @classmethod + def get_revision(cls, location): + current_revision = cls.run_command( ['parents', '--template={rev}'], show_stdout=False, cwd=location).strip() return current_revision - def get_revision_hash(self, location): - current_rev_hash = self.run_command( + @classmethod + def get_revision_hash(cls, location): + current_rev_hash = cls.run_command( ['parents', '--template={node}'], show_stdout=False, cwd=location).strip() return current_rev_hash diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 01e216a08..a926b6e58 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -60,7 +60,8 @@ class Subversion(VersionControl): cmd_args = ['update'] + rev_options.to_args() + [dest] self.run_command(cmd_args) - def get_revision(self, location): + @classmethod + def get_revision(cls, location): """ Return the maximum revision for all files under a given location """ @@ -68,16 +69,16 @@ class Subversion(VersionControl): revision = 0 for base, dirs, files in os.walk(location): - if self.dirname not in dirs: + if cls.dirname not in dirs: dirs[:] = [] continue # no sense walking uncontrolled subdirs - dirs.remove(self.dirname) - entries_fn = os.path.join(base, self.dirname, 'entries') + dirs.remove(cls.dirname) + entries_fn = os.path.join(base, cls.dirname, 'entries') if not os.path.exists(entries_fn): # FIXME: should we warn? continue - dirurl, localrev = self._get_svn_url_rev(base) + dirurl, localrev = cls._get_svn_url_rev(base) if base == location: base = dirurl + '/' # save the root url From 8d45557ff17ab63dbccc1418a061e7161013a206 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sun, 6 Jan 2019 16:48:43 -0800 Subject: [PATCH 21/69] Make get_src_requirement() a class method. --- src/pip/_internal/operations/freeze.py | 2 +- src/pip/_internal/vcs/__init__.py | 3 ++- src/pip/_internal/vcs/bazaar.py | 7 ++++--- src/pip/_internal/vcs/git.py | 9 +++++---- src/pip/_internal/vcs/mercurial.py | 7 ++++--- src/pip/_internal/vcs/subversion.py | 7 ++++--- 6 files changed, 20 insertions(+), 15 deletions(-) diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py index c9ab4ade1..bf40d78b4 100644 --- a/src/pip/_internal/operations/freeze.py +++ b/src/pip/_internal/operations/freeze.py @@ -187,7 +187,7 @@ def get_requirement_info(dist): return (location, True, comments) try: - req = vc_type().get_src_requirement(location, dist.project_name) + req = vc_type.get_src_requirement(location, dist.project_name) except RemoteNotFoundError: req = dist.as_requirement() comments = [ diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index 0c8c9acbf..9cba76464 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -445,7 +445,8 @@ class VersionControl(object): rmtree(location) self.obtain(location) - def get_src_requirement(self, location, project_name): + @classmethod + def get_src_requirement(cls, location, project_name): """ Return a string representing the requirement needed to redownload the files currently present in location, something diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py index 9623a443d..4c6ac79d1 100644 --- a/src/pip/_internal/vcs/bazaar.py +++ b/src/pip/_internal/vcs/bazaar.py @@ -96,13 +96,14 @@ class Bazaar(VersionControl): ) return revision.splitlines()[-1] - def get_src_requirement(self, location, project_name): - repo = self.get_remote_url(location) + @classmethod + def get_src_requirement(cls, location, project_name): + repo = cls.get_remote_url(location) if not repo: return None if not repo.lower().startswith('bzr:'): repo = 'bzr+' + repo - current_rev = self.get_revision(location) + current_rev = cls.get_revision(location) return make_vcs_requirement_url(repo, current_rev, project_name) def is_commit_id_equal(self, dest, name): diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index 02e20c769..dd2bd61e8 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -313,12 +313,13 @@ class Git(VersionControl): return None return os.path.relpath(location, root_dir) - def get_src_requirement(self, location, project_name): - repo = self.get_remote_url(location) + @classmethod + def get_src_requirement(cls, location, project_name): + repo = cls.get_remote_url(location) if not repo.lower().startswith('git:'): repo = 'git+' + repo - current_rev = self.get_revision(location) - subdir = self._get_subdirectory(location) + current_rev = cls.get_revision(location) + subdir = cls._get_subdirectory(location) req = make_vcs_requirement_url(repo, current_rev, project_name, subdir=subdir) diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py index 3a180beba..26e75dee4 100644 --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -87,11 +87,12 @@ class Mercurial(VersionControl): show_stdout=False, cwd=location).strip() return current_rev_hash - def get_src_requirement(self, location, project_name): - repo = self.get_remote_url(location) + @classmethod + def get_src_requirement(cls, location, project_name): + repo = cls.get_remote_url(location) if not repo.lower().startswith('hg:'): repo = 'hg+' + repo - current_rev_hash = self.get_revision_hash(location) + current_rev_hash = cls.get_revision_hash(location) return make_vcs_requirement_url(repo, current_rev_hash, project_name) def is_commit_id_equal(self, dest, name): diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index a926b6e58..42ac5ac35 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -183,12 +183,13 @@ class Subversion(VersionControl): return url, rev - def get_src_requirement(self, location, project_name): - repo = self.get_remote_url(location) + @classmethod + def get_src_requirement(cls, location, project_name): + repo = cls.get_remote_url(location) if repo is None: return None repo = 'svn+' + repo - rev = self.get_revision(location) + rev = cls.get_revision(location) return make_vcs_requirement_url(repo, rev, project_name) def is_commit_id_equal(self, dest, name): From 6a882b6d9113d36905639d74c5e14b0327aba849 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Wed, 9 Jan 2019 01:41:58 -0800 Subject: [PATCH 22/69] Fix tests. --- tests/unit/test_vcs.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index e84035341..c9bbf37c7 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -1,5 +1,5 @@ import pytest -from mock import Mock, patch +from mock import patch from pip._vendor.packaging.version import parse as parse_version from pip._internal.vcs import RevOptions, VersionControl @@ -70,16 +70,6 @@ def test_rev_options_make_new(): assert new_options.vcs is vcs -@pytest.fixture -def git(): - git_url = 'https://github.com/pypa/pip-test-package' - sha = '5547fa909e83df8bd743d3978d6667497983a4b7' - git = Git() - git.get_remote_url = Mock(return_value=git_url) - git.get_revision = Mock(return_value=sha) - return git - - def test_looks_like_hash(): assert looks_like_hash(40 * 'a') assert looks_like_hash(40 * 'A') @@ -89,9 +79,17 @@ def test_looks_like_hash(): assert not looks_like_hash(39 * 'a') +@patch('pip._internal.vcs.git.Git.get_revision') +@patch('pip._internal.vcs.git.Git.get_remote_url') @pytest.mark.network -def test_git_get_src_requirements(git): - ret = git.get_src_requirement('.', 'pip-test-package') +def test_git_get_src_requirements(mock_get_remote_url, mock_get_revision): + git_url = 'https://github.com/pypa/pip-test-package' + sha = '5547fa909e83df8bd743d3978d6667497983a4b7' + + mock_get_remote_url.return_value = git_url + mock_get_revision.return_value = sha + + ret = Git.get_src_requirement('.', 'pip-test-package') assert ret == ( 'git+https://github.com/pypa/pip-test-package' @@ -152,11 +150,13 @@ def test_git_resolve_revision_not_found_warning(get_sha_mock, caplog): ('foo', False), (None, False), )) -def test_git_is_commit_id_equal(git, rev_name, result): +@patch('pip._internal.vcs.git.Git.get_revision') +def test_git_is_commit_id_equal(mock_get_revision, rev_name, result): """ Test Git.is_commit_id_equal(). """ - assert git.is_commit_id_equal('/path', rev_name) is result + mock_get_revision.return_value = '5547fa909e83df8bd743d3978d6667497983a4b7' + assert Git().is_commit_id_equal('/path', rev_name) is result # The non-SVN backends all use the same get_netloc_and_auth(), so only test From a09913673f267326c1471336c15f9c099a476969 Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Wed, 9 Jan 2019 22:59:59 +0100 Subject: [PATCH 23/69] Redact index password from logs Closes #6124 --- news/6124.bugfix | 1 + src/pip/_internal/cli/base_command.py | 9 +++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 news/6124.bugfix diff --git a/news/6124.bugfix b/news/6124.bugfix new file mode 100644 index 000000000..94d1339e3 --- /dev/null +++ b/news/6124.bugfix @@ -0,0 +1 @@ +Redact the password from index urls in a debug message (using --no-index & --verbose options together) diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 175dbadc1..a8371aa87 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -27,7 +27,9 @@ from pip._internal.req.constructors import ( ) from pip._internal.req.req_file import parse_requirements from pip._internal.utils.logging import setup_logging -from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.misc import ( + get_prog, normalize_path, redact_password_from_url, +) from pip._internal.utils.outdated import pip_version_check from pip._internal.utils.typing import MYPY_CHECK_RUNNING @@ -287,7 +289,10 @@ class RequirementCommand(Command): """ index_urls = [options.index_url] + options.extra_index_urls if options.no_index: - logger.debug('Ignoring indexes: %s', ','.join(index_urls)) + logger.debug( + 'Ignoring indexes: %s', + ','.join(redact_password_from_url(url) for url in index_urls), + ) index_urls = [] return PackageFinder( From 4dc8710654e02c75824d81b039dec00937f65674 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sat, 12 Jan 2019 12:44:45 -0800 Subject: [PATCH 24/69] Tweak freeze comment when no version control detected. (#6128) --- src/pip/_internal/operations/freeze.py | 2 +- tests/functional/test_freeze.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py index bf40d78b4..388bb73ab 100644 --- a/src/pip/_internal/operations/freeze.py +++ b/src/pip/_internal/operations/freeze.py @@ -182,7 +182,7 @@ def get_requirement_info(dist): location, ) comments = [ - '# Editable, no version control detected ({})'.format(req) + '# Editable install with no version control ({})'.format(req) ] return (location, True, comments) diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index 101164e48..61bcc0ffc 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -131,7 +131,7 @@ def test_freeze_editable_not_vcs(script, tmpdir): # We need to apply os.path.normcase() to the path since that is what # the freeze code does. expected = textwrap.dedent("""\ - ...# Editable, no version control detected (version-pkg==0.1) + ...# Editable install with no version control (version-pkg==0.1) -e {} ...""".format(os.path.normcase(pkg_path))) _check_output(result.stdout, expected) From 5e573df69c47064e94e99b847bb6644633399ca6 Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Fri, 11 Jan 2019 11:12:44 +0100 Subject: [PATCH 25/69] Add deprecate_python fixture to easily deprecate python versions pip usually emits a warning and tests have to be adapted --- tests/conftest.py | 11 +++++++++- tests/functional/test_completion.py | 6 +++--- tests/functional/test_freeze.py | 13 ++++++++---- tests/functional/test_install.py | 10 +++++---- tests/functional/test_install_check.py | 28 +++++++++++++++++--------- tests/lib/__init__.py | 6 ++++++ 6 files changed, 53 insertions(+), 21 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 45e888393..a7bf81c5b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -277,7 +277,7 @@ def with_wheel(virtualenv, wheel_install): @pytest.fixture -def script(tmpdir, virtualenv): +def script(tmpdir, virtualenv, deprecated_python): """ Return a PipTestEnvironment which is unique to each test function and will execute all commands inside of the unique virtual environment for this @@ -301,6 +301,9 @@ def script(tmpdir, virtualenv): # PipTestEnvironment needs to capture and assert against temp capture_temp=True, assert_no_temp=True, + + # Deprecated python versions produce an extra deprecation warning + pip_expect_stderr=deprecated_python, ) @@ -341,3 +344,9 @@ class InMemoryPip(object): @pytest.fixture def in_memory_pip(): return InMemoryPip() + + +@pytest.fixture +def deprecated_python(): + """Used to indicate wheither pip deprecated this python version""" + return False diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py index 53380bc37..b4e93da40 100644 --- a/tests/functional/test_completion.py +++ b/tests/functional/test_completion.py @@ -283,10 +283,10 @@ def test_completion_path_after_option(script, data): @pytest.mark.parametrize('flag', ['--bash', '--zsh', '--fish']) -def test_completion_uses_same_executable_name(script, flag): - expect_stderr = sys.version_info[:2] == (3, 3) +def test_completion_uses_same_executable_name(script, flag, deprecated_python): executable_name = 'pip{}'.format(sys.version_info[0]) + # Deprecated python versions produce an extra deprecation warning result = script.run( - executable_name, 'completion', flag, expect_stderr=expect_stderr + executable_name, 'completion', flag, expect_stderr=deprecated_python, ) assert executable_name in result.stdout diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index 101164e48..6948a15b6 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -138,7 +138,7 @@ def test_freeze_editable_not_vcs(script, tmpdir): @pytest.mark.git -def test_freeze_editable_git_with_no_remote(script, tmpdir): +def test_freeze_editable_git_with_no_remote(script, tmpdir, deprecated_python): """ Test an editable Git install with no remote url. """ @@ -146,7 +146,8 @@ def test_freeze_editable_git_with_no_remote(script, tmpdir): script.pip('install', '-e', pkg_path) result = script.pip('freeze') - assert result.stderr == '' + if not deprecated_python: + assert result.stderr == '' # We need to apply os.path.normcase() to the path since that is what # the freeze code does. @@ -460,7 +461,8 @@ _freeze_req_opts = textwrap.dedent("""\ """) -def test_freeze_with_requirement_option_file_url_egg_not_installed(script): +def test_freeze_with_requirement_option_file_url_egg_not_installed( + script, deprecated_python): """ Test "freeze -r requirements.txt" with a local file URL whose egg name is not installed. @@ -477,7 +479,10 @@ def test_freeze_with_requirement_option_file_url_egg_not_installed(script): 'Requirement file [requirements.txt] contains {}, but package ' "'Does.Not-Exist' is not installed\n" ).format(url) - assert result.stderr == expected_err + if deprecated_python: + assert expected_err in result.stderr + else: + assert expected_err == result.stderr def test_freeze_with_requirement_option(script): diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 4ee64dd8e..19794ef55 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -37,7 +37,8 @@ def test_pep518_uses_build_env(script, data, common_wheels, command, variant): ) -def test_pep518_build_env_uses_same_pip(script, data, pip_src, common_wheels): +def test_pep518_build_env_uses_same_pip( + script, data, pip_src, common_wheels, deprecated_python): """Ensure the subprocess call to pip for installing the build dependencies is using the same version of pip. """ @@ -47,6 +48,7 @@ def test_pep518_build_env_uses_same_pip(script, data, pip_src, common_wheels): 'python', pip_src / 'src/pip', 'install', '--no-index', '-f', common_wheels, '-f', data.packages, data.src.join("pep518-3.0"), + expect_stderr=deprecated_python, ) @@ -162,8 +164,8 @@ def test_pep518_forkbombs(script, data, common_wheels, command, package): @pytest.mark.network -def test_pip_second_command_line_interface_works(script, pip_src, data, - common_wheels): +def test_pip_second_command_line_interface_works( + script, pip_src, data, common_wheels, deprecated_python): """ Check if ``pip`` commands behaves equally """ @@ -171,7 +173,7 @@ def test_pip_second_command_line_interface_works(script, pip_src, data, script.pip_install_local('-f', common_wheels, pip_src) # On old versions of Python, urllib3/requests will raise a warning about # the lack of an SSLContext. - kwargs = {} + kwargs = {'expect_stderr': deprecated_python} if pyversion_tuple < (2, 7, 9): kwargs['expect_stderr'] = True diff --git a/tests/functional/test_install_check.py b/tests/functional/test_install_check.py index b7d987289..01032e08b 100644 --- a/tests/functional/test_install_check.py +++ b/tests/functional/test_install_check.py @@ -1,11 +1,14 @@ from tests.lib import create_test_package_with_setup -def matches_expected_lines(string, expected_lines): - return set(string.splitlines()) == set(expected_lines) +def matches_expected_lines(string, expected_lines, exact=True): + if exact: + return set(string.splitlines()) == set(expected_lines) + # If not exact, check that all expected lines are present + return set(expected_lines) <= set(string.splitlines()) -def test_check_install_canonicalization(script): +def test_check_install_canonicalization(script, deprecated_python): pkga_path = create_test_package_with_setup( script, name='pkgA', @@ -33,7 +36,9 @@ def test_check_install_canonicalization(script): expected_lines = [ "pkga 1.0 requires SPECIAL.missing, which is not installed.", ] - assert matches_expected_lines(result.stderr, expected_lines) + # Deprecated python versions produce an extra warning on stderr + assert matches_expected_lines( + result.stderr, expected_lines, exact=not deprecated_python) assert result.returncode == 0 # Install the second missing package and expect that there is no warning @@ -42,7 +47,8 @@ def test_check_install_canonicalization(script): result = script.pip( 'install', '--no-index', special_path, '--quiet', ) - assert matches_expected_lines(result.stderr, []) + assert matches_expected_lines( + result.stderr, [], exact=not deprecated_python) assert result.returncode == 0 # Double check that all errors are resolved in the end @@ -54,7 +60,8 @@ def test_check_install_canonicalization(script): assert result.returncode == 0 -def test_check_install_does_not_warn_for_out_of_graph_issues(script): +def test_check_install_does_not_warn_for_out_of_graph_issues( + script, deprecated_python): pkg_broken_path = create_test_package_with_setup( script, name='broken', @@ -74,7 +81,9 @@ def test_check_install_does_not_warn_for_out_of_graph_issues(script): # Install a package without it's dependencies result = script.pip('install', '--no-index', pkg_broken_path, '--no-deps') - assert matches_expected_lines(result.stderr, []) + # Deprecated python versions produce an extra warning on stderr + assert matches_expected_lines( + result.stderr, [], exact=not deprecated_python) # Install conflict package result = script.pip( @@ -86,14 +95,15 @@ def test_check_install_does_not_warn_for_out_of_graph_issues(script): "broken 1.0 has requirement conflict<1.0, but " "you'll have conflict 1.0 which is incompatible." ), - ]) + ], exact=not deprecated_python) # Install unrelated package result = script.pip( 'install', '--no-index', pkg_unrelated_path, '--quiet', ) # should not warn about broken's deps when installing unrelated package - assert matches_expected_lines(result.stderr, []) + assert matches_expected_lines( + result.stderr, [], exact=not deprecated_python) result = script.pip('check', expect_error=True) expected_lines = [ diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 935c8f2cf..5269b8409 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -323,6 +323,10 @@ class PipTestEnvironment(TestFileEnvironment): environ["PYTHONIOENCODING"] = "UTF-8" kwargs["environ"] = environ + # Whether all pip invocations should expect stderr + # (useful for Python version deprecation) + self.pip_expect_stderr = kwargs.pop('pip_expect_stderr', None) + # Call the TestFileEnvironment __init__ super(PipTestEnvironment, self).__init__(base_path, *args, **kwargs) @@ -375,6 +379,8 @@ class PipTestEnvironment(TestFileEnvironment): ) def pip(self, *args, **kwargs): + if self.pip_expect_stderr: + kwargs['expect_stderr'] = True # On old versions of Python, urllib3/requests will raise a warning # about the lack of an SSLContext. Expect it when running commands # that will touch the outside world. From b268c67f84b1e4ef93c67831c8250d3d5bb1d3c4 Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Fri, 11 Jan 2019 11:14:25 +0100 Subject: [PATCH 26/69] Deprecate Python 3.4 --- news/6106.removal | 1 + src/pip/_internal/cli/base_command.py | 10 ++++++++++ tests/conftest.py | 2 +- 3 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 news/6106.removal diff --git a/news/6106.removal b/news/6106.removal new file mode 100644 index 000000000..56bd9a9c7 --- /dev/null +++ b/news/6106.removal @@ -0,0 +1 @@ +Deprecate support for Python 3.4 diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index a8371aa87..e1f45826c 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -26,6 +26,7 @@ from pip._internal.req.constructors import ( install_req_from_editable, install_req_from_line, ) from pip._internal.req.req_file import parse_requirements +from pip._internal.utils.deprecation import deprecated from pip._internal.utils.logging import setup_logging from pip._internal.utils.misc import ( get_prog, normalize_path, redact_password_from_url, @@ -134,6 +135,15 @@ class Command(object): user_log_file=options.log, ) + if sys.version_info[:2] == (3, 4): + deprecated( + "Python 3.4 support has been deprecated. pip 19.1 will be the " + "last one supporting it. Please upgrade your Python as Python " + "3.4 won't be maintained after March 2019 (cf PEP 429).", + replacement=None, + gone_in='19.2', + ) + # TODO: Try to get these passing down from the command? # without resorting to os.environ to hold these. # This also affects isolated builds and it should. diff --git a/tests/conftest.py b/tests/conftest.py index a7bf81c5b..1b51fc8d3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -349,4 +349,4 @@ def in_memory_pip(): @pytest.fixture def deprecated_python(): """Used to indicate wheither pip deprecated this python version""" - return False + return sys.version_info[:2] == (3, 4) From 35b1cc1c97fc69347ec4140660031a0ecfab0d1e Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Mon, 14 Jan 2019 23:25:20 -0800 Subject: [PATCH 27/69] Tweak two use_pep517 identity checks. (#6135) --- src/pip/_internal/pyproject.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index c99b6c641..d3e1bbe7d 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -57,17 +57,20 @@ def load_pyproject_toml( build_system = None # The following cases must use PEP 517 - # We check for use_pep517 equalling False because that - # means the user explicitly requested --no-use-pep517 + # We check for use_pep517 being non-None and falsey because that means + # the user explicitly requested --no-use-pep517. The value 0 as + # opposed to False can occur when the value is provided via an + # environment variable or config file option (due to the quirk of + # strtobool() returning an integer in pip's configuration code). if has_pyproject and not has_setup: - if use_pep517 is False: + if use_pep517 is not None and not use_pep517: raise InstallationError( "Disabling PEP 517 processing is invalid: " "project does not have a setup.py" ) use_pep517 = True elif build_system and "build-backend" in build_system: - if use_pep517 is False: + if use_pep517 is not None and not use_pep517: raise InstallationError( "Disabling PEP 517 processing is invalid: " "project specifies a build backend of {} " From c0cc004ca82f14a19282f6e742ff2ac976febcba Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sun, 13 Jan 2019 07:59:17 -0800 Subject: [PATCH 28/69] Raise an error if the user tries to use PIP_NO_USE_PEP517. --- src/pip/_internal/cli/cmdoptions.py | 42 +++++++++- tests/unit/test_options.py | 117 ++++++++++++++++++++++++++++ 2 files changed, 158 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 0aebf75e8..3000b078d 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -9,6 +9,7 @@ pass on state. To be consistent, all options will follow this design. """ from __future__ import absolute_import +import textwrap import warnings from distutils.util import strtobool from functools import partial @@ -28,6 +29,20 @@ if MYPY_CHECK_RUNNING: from pip._internal.cli.parser import ConfigOptionParser # noqa: F401 +def raise_option_error(parser, option, msg): + """ + Raise an option parsing error using parser.error(). + + Args: + parser: an OptionParser instance. + option: an Option instance. + msg: the error text. + """ + msg = '{} error: {}'.format(option, msg) + msg = textwrap.fill(' '.join(msg.split())) + parser.error(msg) + + def make_option_group(group, parser): # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup """ @@ -601,6 +616,30 @@ no_build_isolation = partial( 'if this option is used.' ) # type: Callable[..., Option] + +def no_use_pep517_callback(option, opt, value, parser): + """ + Process a value provided for the --no-use-pep517 option. + + This is an optparse.Option callback for the no_use_pep517 option. + """ + # Since --no-use-pep517 doesn't accept arguments, the value argument + # will be None if --no-use-pep517 is passed via the command-line. + # However, the value can be non-None if the option is triggered e.g. + # by an environment variable, for example "PIP_NO_USE_PEP517=true". + if value is not None: + msg = """A value was passed for --no-use-pep517, + probably using either the PIP_NO_USE_PEP517 environment variable + or the "no-use-pep517" config file option. Use an appropriate value + of the PIP_USE_PEP517 environment variable or the "use-pep517" + config file option instead. + """ + raise_option_error(parser, option=option, msg=msg) + + # Otherwise, --no-use-pep517 was passed via the command-line. + parser.values.use_pep517 = False + + use_pep517 = partial( Option, '--use-pep517', @@ -615,7 +654,8 @@ no_use_pep517 = partial( Option, '--no-use-pep517', dest='use_pep517', - action='store_false', + action='callback', + callback=no_use_pep517_callback, default=None, help=SUPPRESS_HELP ) # type: Any diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py index e0a0885b3..b8da7b073 100644 --- a/tests/unit/test_options.py +++ b/tests/unit/test_options.py @@ -5,9 +5,27 @@ import pytest import pip._internal.configuration from pip._internal import main +from pip._internal.commands import DownloadCommand from tests.lib.options_helpers import AddFakeCommandMixin +@contextmanager +def temp_environment_variable(name, value): + not_set = object() + original = os.environ[name] if name in os.environ else not_set + os.environ[name] = value + + try: + yield + finally: + # Return the environment variable to its original state. + if original is not_set: + if name in os.environ: + del os.environ[name] + else: + os.environ[name] = original + + @contextmanager def assert_raises_message(exc_class, expected): """ @@ -19,6 +37,22 @@ def assert_raises_message(exc_class, expected): assert str(excinfo.value) == expected +@contextmanager +def assert_option_error(capsys, expected): + """ + Assert that a SystemExit occurred because of a parsing error. + + Args: + expected: an expected substring of stderr. + """ + with pytest.raises(SystemExit) as excinfo: + yield + + assert excinfo.value.code == 2 + stderr = capsys.readouterr().err + assert expected in stderr + + def assert_is_default_cache_dir(value): # This path looks different on different platforms, but the path always # has the substring "pip". @@ -157,6 +191,89 @@ class TestOptionPrecedence(AddFakeCommandMixin): main(['--no-cache-dir', 'fake']) +class TestUsePEP517Options(object): + + """ + Test options related to using --use-pep517. + """ + + def parse_args(self, args): + # We use DownloadCommand since that is one of the few Command + # classes with the use_pep517 options. + command = DownloadCommand() + options, args = command.parse_args(args) + + return options + + def test_no_option(self): + """ + Test passing no option. + """ + options = self.parse_args([]) + assert options.use_pep517 is None + + def test_use_pep517(self): + """ + Test passing --use-pep517. + """ + options = self.parse_args(['--use-pep517']) + assert options.use_pep517 is True + + def test_no_use_pep517(self): + """ + Test passing --no-use-pep517. + """ + options = self.parse_args(['--no-use-pep517']) + assert options.use_pep517 is False + + def test_PIP_USE_PEP517_true(self): + """ + Test setting PIP_USE_PEP517 to "true". + """ + with temp_environment_variable('PIP_USE_PEP517', 'true'): + options = self.parse_args([]) + # This is an int rather than a boolean because strtobool() in pip's + # configuration code returns an int. + assert options.use_pep517 == 1 + + def test_PIP_USE_PEP517_false(self): + """ + Test setting PIP_USE_PEP517 to "false". + """ + with temp_environment_variable('PIP_USE_PEP517', 'false'): + options = self.parse_args([]) + # This is an int rather than a boolean because strtobool() in pip's + # configuration code returns an int. + assert options.use_pep517 == 0 + + def test_use_pep517_and_PIP_USE_PEP517_false(self): + """ + Test passing --use-pep517 and setting PIP_USE_PEP517 to "false". + """ + with temp_environment_variable('PIP_USE_PEP517', 'false'): + options = self.parse_args(['--use-pep517']) + assert options.use_pep517 is True + + def test_no_use_pep517_and_PIP_USE_PEP517_true(self): + """ + Test passing --no-use-pep517 and setting PIP_USE_PEP517 to "true". + """ + with temp_environment_variable('PIP_USE_PEP517', 'true'): + options = self.parse_args(['--no-use-pep517']) + assert options.use_pep517 is False + + def test_PIP_NO_USE_PEP517(self, capsys): + """ + Test setting PIP_NO_USE_PEP517, which isn't allowed. + """ + expected_err = ( + '--no-use-pep517 error: A value was passed for --no-use-pep517,\n' + ) + with temp_environment_variable('PIP_NO_USE_PEP517', 'true'): + with assert_option_error(capsys, expected=expected_err): + self.parse_args([]) + + class TestOptionsInterspersed(AddFakeCommandMixin): def test_general_option_after_subcommand(self): From 5fe31579177dd3d221c41e1e6002cd831bb66a9e Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Sun, 13 Jan 2019 02:06:34 -0800 Subject: [PATCH 29/69] Change the --no-cache-dir error to use raise_option_error(). --- src/pip/_internal/cli/cmdoptions.py | 5 ++++- tests/unit/test_options.py | 18 +++++------------- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 3000b078d..5cf5ee970 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -555,7 +555,10 @@ def no_cache_dir_callback(option, opt, value, parser): # environment variable, like PIP_NO_CACHE_DIR=true. if value is not None: # Then parse the string value to get argument error-checking. - strtobool(value) + try: + strtobool(value) + except ValueError as exc: + raise_option_error(parser, option=option, msg=str(exc)) # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() # converted to 0 (like "false" or "no") caused cache_dir to be disabled diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py index b8da7b073..3215a9540 100644 --- a/tests/unit/test_options.py +++ b/tests/unit/test_options.py @@ -26,17 +26,6 @@ def temp_environment_variable(name, value): os.environ[name] = original -@contextmanager -def assert_raises_message(exc_class, expected): - """ - Assert that an exception with the given type and message is raised. - """ - with pytest.raises(exc_class) as excinfo: - yield - - assert str(excinfo.value) == expected - - @contextmanager def assert_option_error(capsys, expected): """ @@ -181,13 +170,16 @@ class TestOptionPrecedence(AddFakeCommandMixin): # value in this case). assert options.cache_dir is False - def test_cache_dir__PIP_NO_CACHE_DIR_invalid__with_no_cache_dir(self): + def test_cache_dir__PIP_NO_CACHE_DIR_invalid__with_no_cache_dir( + self, capsys, + ): """ Test setting PIP_NO_CACHE_DIR to an invalid value while also passing --no-cache-dir. """ os.environ['PIP_NO_CACHE_DIR'] = 'maybe' - with assert_raises_message(ValueError, "invalid truth value 'maybe'"): + expected_err = "--no-cache-dir error: invalid truth value 'maybe'" + with assert_option_error(capsys, expected=expected_err): main(['--no-cache-dir', 'fake']) From a06d846a1d5567b5298406a52cfd3d052b0f2c6f Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Tue, 15 Jan 2019 19:44:05 -0800 Subject: [PATCH 30/69] Remove workaround for unsupported Pythons subprocess.check_call() was added in Python 2.5. https://docs.python.org/2/library/subprocess.html#subprocess.check_call --- tests/lib/local_repos.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/tests/lib/local_repos.py b/tests/lib/local_repos.py index 54a330bb2..612e45509 100644 --- a/tests/lib/local_repos.py +++ b/tests/lib/local_repos.py @@ -8,14 +8,9 @@ from pip._vendor.six.moves.urllib import request as urllib_request from pip._internal.vcs import bazaar, git, mercurial, subversion from tests.lib import path_to_url -if hasattr(subprocess, "check_call"): - subprocess_call = subprocess.check_call -else: - subprocess_call = subprocess.call - def _create_initools_repository(directory): - subprocess_call('svnadmin create INITools'.split(), cwd=directory) + subprocess.check_call('svnadmin create INITools'.split(), cwd=directory) def _dump_initools_repository(directory): @@ -26,7 +21,7 @@ def _dump_initools_repository(directory): initools_folder = os.path.join(directory, 'INITools') devnull = open(os.devnull, 'w') dump = open(filename) - subprocess_call( + subprocess.check_call( ['svnadmin', 'load', initools_folder], stdin=dump, stdout=devnull, From 96a0ac24bda9db81b81ad662b8a158a65285dba6 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 09:23:03 +0530 Subject: [PATCH 31/69] Alphabetically sort the vendor requirements --- src/pip/_vendor/vendor.txt | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 2e6de2a92..2e0914c8b 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,23 +1,23 @@ appdirs==1.4.3 +CacheControl==0.12.5 +colorama==0.3.9 distlib==0.2.7 distro==1.3.0 html5lib==1.0.1 -six==1.11.0 -colorama==0.3.9 -CacheControl==0.12.5 -msgpack==0.5.6 -lockfile==0.12.2 -progress==1.4 ipaddress==1.0.22 # Only needed on 2.6 and 2.7 +lockfile==0.12.2 +msgpack==0.5.6 packaging==18.0 pep517==0.3 +progress==1.4 pyparsing==2.2.1 pytoml==0.1.19 -retrying==1.3.3 requests==2.19.1 + certifi==2018.8.24 chardet==3.0.4 idna==2.7 urllib3==1.23 - certifi==2018.8.24 +retrying==1.3.3 setuptools==40.4.3 +six==1.11.0 webencodings==0.5.1 From e35524ae5d77217b387c24e995aee65d3cd6a185 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:40:11 +0530 Subject: [PATCH 32/69] Update colorama to 0.4.1 --- news/colorama.vendor | 1 + src/pip/_vendor/colorama/LICENSE.txt | 1 - src/pip/_vendor/colorama/__init__.py | 3 +- src/pip/_vendor/colorama/ansitowin32.py | 43 ++++++++++++++++++------- src/pip/_vendor/colorama/initialise.py | 2 -- src/pip/_vendor/colorama/win32.py | 18 ++++------- src/pip/_vendor/colorama/winterm.py | 11 +++++-- src/pip/_vendor/vendor.txt | 2 +- 8 files changed, 51 insertions(+), 30 deletions(-) create mode 100644 news/colorama.vendor diff --git a/news/colorama.vendor b/news/colorama.vendor new file mode 100644 index 000000000..ee550c5fc --- /dev/null +++ b/news/colorama.vendor @@ -0,0 +1 @@ +Update colorama to 0.4.1 diff --git a/src/pip/_vendor/colorama/LICENSE.txt b/src/pip/_vendor/colorama/LICENSE.txt index 5f567799f..3105888ec 100644 --- a/src/pip/_vendor/colorama/LICENSE.txt +++ b/src/pip/_vendor/colorama/LICENSE.txt @@ -25,4 +25,3 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/src/pip/_vendor/colorama/__init__.py b/src/pip/_vendor/colorama/__init__.py index f4d9ce210..2a3bf4714 100644 --- a/src/pip/_vendor/colorama/__init__.py +++ b/src/pip/_vendor/colorama/__init__.py @@ -3,5 +3,4 @@ from .initialise import init, deinit, reinit, colorama_text from .ansi import Fore, Back, Style, Cursor from .ansitowin32 import AnsiToWin32 -__version__ = '0.3.9' - +__version__ = '0.4.1' diff --git a/src/pip/_vendor/colorama/ansitowin32.py b/src/pip/_vendor/colorama/ansitowin32.py index 1d6e6059c..359c92be5 100644 --- a/src/pip/_vendor/colorama/ansitowin32.py +++ b/src/pip/_vendor/colorama/ansitowin32.py @@ -13,14 +13,6 @@ if windll is not None: winterm = WinTerm() -def is_stream_closed(stream): - return not hasattr(stream, 'closed') or stream.closed - - -def is_a_tty(stream): - return hasattr(stream, 'isatty') and stream.isatty() - - class StreamWrapper(object): ''' Wraps a stream (such as stdout), acting as a transparent proxy for all @@ -36,9 +28,38 @@ class StreamWrapper(object): def __getattr__(self, name): return getattr(self.__wrapped, name) + def __enter__(self, *args, **kwargs): + # special method lookup bypasses __getattr__/__getattribute__, see + # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit + # thus, contextlib magic methods are not proxied via __getattr__ + return self.__wrapped.__enter__(*args, **kwargs) + + def __exit__(self, *args, **kwargs): + return self.__wrapped.__exit__(*args, **kwargs) + def write(self, text): self.__convertor.write(text) + def isatty(self): + stream = self.__wrapped + if 'PYCHARM_HOSTED' in os.environ: + if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): + return True + try: + stream_isatty = stream.isatty + except AttributeError: + return False + else: + return stream_isatty() + + @property + def closed(self): + stream = self.__wrapped + try: + return stream.closed + except AttributeError: + return True + class AnsiToWin32(object): ''' @@ -68,12 +89,12 @@ class AnsiToWin32(object): # should we strip ANSI sequences from our output? if strip is None: - strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped)) + strip = conversion_supported or (not self.stream.closed and not self.stream.isatty()) self.strip = strip # should we should convert ANSI sequences into win32 calls? if convert is None: - convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped) + convert = conversion_supported and not self.stream.closed and self.stream.isatty() self.convert = convert # dict of ansi codes to win32 functions and parameters @@ -149,7 +170,7 @@ class AnsiToWin32(object): def reset_all(self): if self.convert: self.call_win32('m', (0,)) - elif not self.strip and not is_stream_closed(self.wrapped): + elif not self.strip and not self.stream.closed: self.wrapped.write(Style.RESET_ALL) diff --git a/src/pip/_vendor/colorama/initialise.py b/src/pip/_vendor/colorama/initialise.py index 834962a35..430d06687 100644 --- a/src/pip/_vendor/colorama/initialise.py +++ b/src/pip/_vendor/colorama/initialise.py @@ -78,5 +78,3 @@ def wrap_stream(stream, convert, strip, autoreset, wrap): if wrapper.should_wrap(): stream = wrapper.stream return stream - - diff --git a/src/pip/_vendor/colorama/win32.py b/src/pip/_vendor/colorama/win32.py index 8262e350a..c2d836033 100644 --- a/src/pip/_vendor/colorama/win32.py +++ b/src/pip/_vendor/colorama/win32.py @@ -89,11 +89,6 @@ else: ] _SetConsoleTitleW.restype = wintypes.BOOL - handles = { - STDOUT: _GetStdHandle(STDOUT), - STDERR: _GetStdHandle(STDERR), - } - def _winapi_test(handle): csbi = CONSOLE_SCREEN_BUFFER_INFO() success = _GetConsoleScreenBufferInfo( @@ -101,17 +96,18 @@ else: return bool(success) def winapi_test(): - return any(_winapi_test(h) for h in handles.values()) + return any(_winapi_test(h) for h in + (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) def GetConsoleScreenBufferInfo(stream_id=STDOUT): - handle = handles[stream_id] + handle = _GetStdHandle(stream_id) csbi = CONSOLE_SCREEN_BUFFER_INFO() success = _GetConsoleScreenBufferInfo( handle, byref(csbi)) return csbi def SetConsoleTextAttribute(stream_id, attrs): - handle = handles[stream_id] + handle = _GetStdHandle(stream_id) return _SetConsoleTextAttribute(handle, attrs) def SetConsoleCursorPosition(stream_id, position, adjust=True): @@ -129,11 +125,11 @@ else: adjusted_position.Y += sr.Top adjusted_position.X += sr.Left # Resume normal processing - handle = handles[stream_id] + handle = _GetStdHandle(stream_id) return _SetConsoleCursorPosition(handle, adjusted_position) def FillConsoleOutputCharacter(stream_id, char, length, start): - handle = handles[stream_id] + handle = _GetStdHandle(stream_id) char = c_char(char.encode()) length = wintypes.DWORD(length) num_written = wintypes.DWORD(0) @@ -144,7 +140,7 @@ else: def FillConsoleOutputAttribute(stream_id, attr, length, start): ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' - handle = handles[stream_id] + handle = _GetStdHandle(stream_id) attribute = wintypes.WORD(attr) length = wintypes.DWORD(length) num_written = wintypes.DWORD(0) diff --git a/src/pip/_vendor/colorama/winterm.py b/src/pip/_vendor/colorama/winterm.py index 60309d3c0..0fdb4ec4e 100644 --- a/src/pip/_vendor/colorama/winterm.py +++ b/src/pip/_vendor/colorama/winterm.py @@ -44,6 +44,7 @@ class WinTerm(object): def reset_all(self, on_stderr=None): self.set_attrs(self._default) self.set_console(attrs=self._default) + self._light = 0 def fore(self, fore=None, light=False, on_stderr=False): if fore is None: @@ -122,12 +123,15 @@ class WinTerm(object): if mode == 0: from_coord = csbi.dwCursorPosition cells_to_erase = cells_in_screen - cells_before_cursor - if mode == 1: + elif mode == 1: from_coord = win32.COORD(0, 0) cells_to_erase = cells_before_cursor elif mode == 2: from_coord = win32.COORD(0, 0) cells_to_erase = cells_in_screen + else: + # invalid mode + return # fill the entire screen with blanks win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) # now set the buffer's attributes accordingly @@ -147,12 +151,15 @@ class WinTerm(object): if mode == 0: from_coord = csbi.dwCursorPosition cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X - if mode == 1: + elif mode == 1: from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) cells_to_erase = csbi.dwCursorPosition.X elif mode == 2: from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) cells_to_erase = csbi.dwSize.X + else: + # invalid mode + return # fill the entire screen with blanks win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) # now set the buffer's attributes accordingly diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 2e0914c8b..25e02fd7f 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,6 +1,6 @@ appdirs==1.4.3 CacheControl==0.12.5 -colorama==0.3.9 +colorama==0.4.1 distlib==0.2.7 distro==1.3.0 html5lib==1.0.1 From 606c0edd630383bc34532db65163a3ea2ad2c037 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:41:41 +0530 Subject: [PATCH 33/69] Update urllib3 to 1.24.1 --- news/urllib3.vendor | 1 + src/pip/_vendor/urllib3/__init__.py | 9 +- src/pip/_vendor/urllib3/_collections.py | 5 +- src/pip/_vendor/urllib3/connection.py | 40 +-- src/pip/_vendor/urllib3/connectionpool.py | 18 +- .../urllib3/contrib/_appengine_environ.py | 30 ++ src/pip/_vendor/urllib3/contrib/appengine.py | 36 +-- src/pip/_vendor/urllib3/contrib/ntlmpool.py | 3 +- src/pip/_vendor/urllib3/contrib/pyopenssl.py | 25 +- .../urllib3/packages/backports/makefile.py | 2 +- .../_vendor/urllib3/packages/ordered_dict.py | 259 ------------------ .../ssl_match_hostname/_implementation.py | 3 +- src/pip/_vendor/urllib3/poolmanager.py | 1 + src/pip/_vendor/urllib3/request.py | 2 +- src/pip/_vendor/urllib3/response.py | 47 +++- src/pip/_vendor/urllib3/util/connection.py | 8 + src/pip/_vendor/urllib3/util/response.py | 10 +- src/pip/_vendor/urllib3/util/retry.py | 2 +- src/pip/_vendor/urllib3/util/ssl_.py | 35 +-- src/pip/_vendor/urllib3/util/wait.py | 5 +- src/pip/_vendor/vendor.txt | 2 +- 21 files changed, 151 insertions(+), 392 deletions(-) create mode 100644 news/urllib3.vendor create mode 100755 src/pip/_vendor/urllib3/contrib/_appengine_environ.py delete mode 100755 src/pip/_vendor/urllib3/packages/ordered_dict.py diff --git a/news/urllib3.vendor b/news/urllib3.vendor new file mode 100644 index 000000000..ba188f653 --- /dev/null +++ b/news/urllib3.vendor @@ -0,0 +1 @@ +Update urllib3 to 1.24.1 diff --git a/src/pip/_vendor/urllib3/__init__.py b/src/pip/_vendor/urllib3/__init__.py index 4bd533b5b..148a9c31a 100755 --- a/src/pip/_vendor/urllib3/__init__.py +++ b/src/pip/_vendor/urllib3/__init__.py @@ -23,16 +23,11 @@ from .util.retry import Retry # Set default logging handler to avoid "No handler found" warnings. import logging -try: # Python 2.7+ - from logging import NullHandler -except ImportError: - class NullHandler(logging.Handler): - def emit(self, record): - pass +from logging import NullHandler __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' -__version__ = '1.23' +__version__ = '1.24.1' __all__ = ( 'HTTPConnectionPool', diff --git a/src/pip/_vendor/urllib3/_collections.py b/src/pip/_vendor/urllib3/_collections.py index 6e36b84e5..34f23811c 100755 --- a/src/pip/_vendor/urllib3/_collections.py +++ b/src/pip/_vendor/urllib3/_collections.py @@ -14,10 +14,7 @@ except ImportError: # Platform-specific: No threads available pass -try: # Python 2.7+ - from collections import OrderedDict -except ImportError: - from .packages.ordered_dict import OrderedDict +from collections import OrderedDict from .exceptions import InvalidHeader from .packages.six import iterkeys, itervalues, PY3 diff --git a/src/pip/_vendor/urllib3/connection.py b/src/pip/_vendor/urllib3/connection.py index a03b573f0..02b36654b 100755 --- a/src/pip/_vendor/urllib3/connection.py +++ b/src/pip/_vendor/urllib3/connection.py @@ -2,7 +2,6 @@ from __future__ import absolute_import import datetime import logging import os -import sys import socket from socket import error as SocketError, timeout as SocketTimeout import warnings @@ -78,9 +77,6 @@ class HTTPConnection(_HTTPConnection, object): - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - ``source_address``: Set the source address for the current connection. - - .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x - - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. @@ -108,21 +104,13 @@ class HTTPConnection(_HTTPConnection, object): if six.PY3: # Python 3 kw.pop('strict', None) - # Pre-set source_address in case we have an older Python like 2.6. + # Pre-set source_address. self.source_address = kw.get('source_address') - if sys.version_info < (2, 7): # Python 2.6 - # _HTTPConnection on Python 2.6 will balk at this keyword arg, but - # not newer versions. We can still use it when creating a - # connection though, so we pop it *after* we have saved it as - # self.source_address. - kw.pop('source_address', None) - #: The socket options provided by the user. If no options are #: provided, we use the default options. self.socket_options = kw.pop('socket_options', self.default_socket_options) - # Superclass also sets self.source_address in Python 2.7+. _HTTPConnection.__init__(self, *args, **kw) @property @@ -183,10 +171,7 @@ class HTTPConnection(_HTTPConnection, object): def _prepare_conn(self, conn): self.sock = conn - # the _tunnel_host attribute was added in python 2.6.3 (via - # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do - # not have them. - if getattr(self, '_tunnel_host', None): + if self._tunnel_host: # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable @@ -217,13 +202,13 @@ class HTTPConnection(_HTTPConnection, object): self.endheaders() if body is not None: - stringish_types = six.string_types + (six.binary_type,) + stringish_types = six.string_types + (bytes,) if isinstance(body, stringish_types): body = (body,) for chunk in body: if not chunk: continue - if not isinstance(chunk, six.binary_type): + if not isinstance(chunk, bytes): chunk = chunk.encode('utf8') len_str = hex(len(chunk))[2:] self.send(len_str.encode('utf-8')) @@ -242,7 +227,7 @@ class HTTPSConnection(HTTPConnection): def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - ssl_context=None, **kw): + ssl_context=None, server_hostname=None, **kw): HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) @@ -250,6 +235,7 @@ class HTTPSConnection(HTTPConnection): self.key_file = key_file self.cert_file = cert_file self.ssl_context = ssl_context + self.server_hostname = server_hostname # Required property for Google AppEngine 1.9.0 which otherwise causes # HTTPS requests to go out as HTTP. (See Issue #356) @@ -270,6 +256,7 @@ class HTTPSConnection(HTTPConnection): keyfile=self.key_file, certfile=self.cert_file, ssl_context=self.ssl_context, + server_hostname=self.server_hostname ) @@ -312,12 +299,9 @@ class VerifiedHTTPSConnection(HTTPSConnection): def connect(self): # Add certificate verification conn = self._new_conn() - hostname = self.host - if getattr(self, '_tunnel_host', None): - # _tunnel_host was added in Python 2.6.3 - # (See: http://hg.python.org/cpython/rev/0f57b30a152f) + if self._tunnel_host: self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. @@ -328,6 +312,10 @@ class VerifiedHTTPSConnection(HTTPSConnection): # Override the host with the one we're requesting data from. hostname = self._tunnel_host + server_hostname = hostname + if self.server_hostname is not None: + server_hostname = self.server_hostname + is_time_off = datetime.date.today() < RECENT_DATE if is_time_off: warnings.warn(( @@ -352,7 +340,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): certfile=self.cert_file, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, - server_hostname=hostname, + server_hostname=server_hostname, ssl_context=context) if self.assert_fingerprint: @@ -373,7 +361,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): 'for details.)'.format(hostname)), SubjectAltNameWarning ) - _match_hostname(cert, self.assert_hostname or hostname) + _match_hostname(cert, self.assert_hostname or server_hostname) self.is_verified = ( context.verify_mode == ssl.CERT_REQUIRED or diff --git a/src/pip/_vendor/urllib3/connectionpool.py b/src/pip/_vendor/urllib3/connectionpool.py index 8fcb0bce7..f7a8f193d 100755 --- a/src/pip/_vendor/urllib3/connectionpool.py +++ b/src/pip/_vendor/urllib3/connectionpool.py @@ -89,7 +89,7 @@ class ConnectionPool(object): # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 -_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) +_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} class HTTPConnectionPool(ConnectionPool, RequestMethods): @@ -313,7 +313,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Catch possible read timeouts thrown as SSL errors. If not the # case, rethrow the original. We need to do this because of: # http://bugs.python.org/issue10272 - if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 + if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python < 2.7.4 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) def _make_request(self, conn, method, url, timeout=_Default, chunked=False, @@ -375,7 +375,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): try: try: # Python 2.7, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) - except TypeError: # Python 2.6 and older, Python 3 + except TypeError: # Python 3 try: httplib_response = conn.getresponse() except Exception as e: @@ -801,17 +801,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): Establish tunnel connection early, because otherwise httplib would improperly set Host: header to proxy's IP:port. """ - # Python 2.7+ - try: - set_tunnel = conn.set_tunnel - except AttributeError: # Platform-specific: Python 2.6 - set_tunnel = conn._set_tunnel - - if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older - set_tunnel(self._proxy_host, self.port) - else: - set_tunnel(self._proxy_host, self.port, self.proxy_headers) - + conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) conn.connect() def _new_conn(self): diff --git a/src/pip/_vendor/urllib3/contrib/_appengine_environ.py b/src/pip/_vendor/urllib3/contrib/_appengine_environ.py new file mode 100755 index 000000000..f3e00942c --- /dev/null +++ b/src/pip/_vendor/urllib3/contrib/_appengine_environ.py @@ -0,0 +1,30 @@ +""" +This module provides means to detect the App Engine environment. +""" + +import os + + +def is_appengine(): + return (is_local_appengine() or + is_prod_appengine() or + is_prod_appengine_mvms()) + + +def is_appengine_sandbox(): + return is_appengine() and not is_prod_appengine_mvms() + + +def is_local_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Development/' in os.environ['SERVER_SOFTWARE']) + + +def is_prod_appengine(): + return ('APPENGINE_RUNTIME' in os.environ and + 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and + not is_prod_appengine_mvms()) + + +def is_prod_appengine_mvms(): + return os.environ.get('GAE_VM', False) == 'true' diff --git a/src/pip/_vendor/urllib3/contrib/appengine.py b/src/pip/_vendor/urllib3/contrib/appengine.py index 59f2a617c..9b42952d7 100755 --- a/src/pip/_vendor/urllib3/contrib/appengine.py +++ b/src/pip/_vendor/urllib3/contrib/appengine.py @@ -39,8 +39,8 @@ urllib3 on Google App Engine: """ from __future__ import absolute_import +import io import logging -import os import warnings from ..packages.six.moves.urllib.parse import urljoin @@ -53,11 +53,11 @@ from ..exceptions import ( SSLError ) -from ..packages.six import BytesIO from ..request import RequestMethods from ..response import HTTPResponse from ..util.timeout import Timeout from ..util.retry import Retry +from . import _appengine_environ try: from google.appengine.api import urlfetch @@ -239,7 +239,7 @@ class AppEngineManager(RequestMethods): original_response = HTTPResponse( # In order for decoding to work, we must present the content as # a file-like object. - body=BytesIO(urlfetch_resp.content), + body=io.BytesIO(urlfetch_resp.content), msg=urlfetch_resp.header_msg, headers=urlfetch_resp.headers, status=urlfetch_resp.status_code, @@ -247,7 +247,7 @@ class AppEngineManager(RequestMethods): ) return HTTPResponse( - body=BytesIO(urlfetch_resp.content), + body=io.BytesIO(urlfetch_resp.content), headers=urlfetch_resp.headers, status=urlfetch_resp.status_code, original_response=original_response, @@ -280,26 +280,10 @@ class AppEngineManager(RequestMethods): return retries -def is_appengine(): - return (is_local_appengine() or - is_prod_appengine() or - is_prod_appengine_mvms()) +# Alias methods from _appengine_environ to maintain public API interface. - -def is_appengine_sandbox(): - return is_appengine() and not is_prod_appengine_mvms() - - -def is_local_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Development/' in os.environ['SERVER_SOFTWARE']) - - -def is_prod_appengine(): - return ('APPENGINE_RUNTIME' in os.environ and - 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and - not is_prod_appengine_mvms()) - - -def is_prod_appengine_mvms(): - return os.environ.get('GAE_VM', False) == 'true' +is_appengine = _appengine_environ.is_appengine +is_appengine_sandbox = _appengine_environ.is_appengine_sandbox +is_local_appengine = _appengine_environ.is_local_appengine +is_prod_appengine = _appengine_environ.is_prod_appengine +is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms diff --git a/src/pip/_vendor/urllib3/contrib/ntlmpool.py b/src/pip/_vendor/urllib3/contrib/ntlmpool.py index 642e99ed2..8ea127c58 100755 --- a/src/pip/_vendor/urllib3/contrib/ntlmpool.py +++ b/src/pip/_vendor/urllib3/contrib/ntlmpool.py @@ -43,8 +43,7 @@ class NTLMConnectionPool(HTTPSConnectionPool): log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', self.num_connections, self.host, self.authurl) - headers = {} - headers['Connection'] = 'Keep-Alive' + headers = {'Connection': 'Keep-Alive'} req_header = 'Authorization' resp_header = 'www-authenticate' diff --git a/src/pip/_vendor/urllib3/contrib/pyopenssl.py b/src/pip/_vendor/urllib3/contrib/pyopenssl.py index 6dd3a01e2..363667cb5 100755 --- a/src/pip/_vendor/urllib3/contrib/pyopenssl.py +++ b/src/pip/_vendor/urllib3/contrib/pyopenssl.py @@ -163,6 +163,9 @@ def _dnsname_to_stdlib(name): from ASCII bytes. We need to idna-encode that string to get it back, and then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). + + If the name cannot be idna-encoded then we return None signalling that + the name given should be skipped. """ def idna_encode(name): """ @@ -172,14 +175,19 @@ def _dnsname_to_stdlib(name): """ from pip._vendor import idna - for prefix in [u'*.', u'.']: - if name.startswith(prefix): - name = name[len(prefix):] - return prefix.encode('ascii') + idna.encode(name) - return idna.encode(name) + try: + for prefix in [u'*.', u'.']: + if name.startswith(prefix): + name = name[len(prefix):] + return prefix.encode('ascii') + idna.encode(name) + return idna.encode(name) + except idna.core.IDNAError: + return None name = idna_encode(name) - if sys.version_info >= (3, 0): + if name is None: + return None + elif sys.version_info >= (3, 0): name = name.decode('utf-8') return name @@ -223,9 +231,10 @@ def get_subj_alt_name(peer_cert): # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 # decoded. This is pretty frustrating, but that's what the standard library # does with certificates, and so we need to attempt to do the same. + # We also want to skip over names which cannot be idna encoded. names = [ - ('DNS', _dnsname_to_stdlib(name)) - for name in ext.get_values_for_type(x509.DNSName) + ('DNS', name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) + if name is not None ] names.extend( ('IP Address', str(name)) diff --git a/src/pip/_vendor/urllib3/packages/backports/makefile.py b/src/pip/_vendor/urllib3/packages/backports/makefile.py index 75b80dcf8..740db377d 100755 --- a/src/pip/_vendor/urllib3/packages/backports/makefile.py +++ b/src/pip/_vendor/urllib3/packages/backports/makefile.py @@ -16,7 +16,7 @@ def backport_makefile(self, mode="r", buffering=None, encoding=None, """ Backport of ``socket.makefile`` from Python 3.5. """ - if not set(mode) <= set(["r", "w", "b"]): + if not set(mode) <= {"r", "w", "b"}: raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) diff --git a/src/pip/_vendor/urllib3/packages/ordered_dict.py b/src/pip/_vendor/urllib3/packages/ordered_dict.py deleted file mode 100755 index 4479363cc..000000000 --- a/src/pip/_vendor/urllib3/packages/ordered_dict.py +++ /dev/null @@ -1,259 +0,0 @@ -# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. -# Passes Python2.7's test suite and incorporates all the latest updates. -# Copyright 2009 Raymond Hettinger, released under the MIT License. -# http://code.activestate.com/recipes/576693/ -try: - from thread import get_ident as _get_ident -except ImportError: - from dummy_thread import get_ident as _get_ident - -try: - from _abcoll import KeysView, ValuesView, ItemsView -except ImportError: - pass - - -class OrderedDict(dict): - 'Dictionary that remembers insertion order' - # An inherited dict maps keys to values. - # The inherited dict provides __getitem__, __len__, __contains__, and get. - # The remaining methods are order-aware. - # Big-O running times for all methods are the same as for regular dictionaries. - - # The internal self.__map dictionary maps keys to links in a doubly linked list. - # The circular doubly linked list starts and ends with a sentinel element. - # The sentinel element never gets deleted (this simplifies the algorithm). - # Each link is stored as a list of length three: [PREV, NEXT, KEY]. - - def __init__(self, *args, **kwds): - '''Initialize an ordered dictionary. Signature is the same as for - regular dictionaries, but keyword arguments are not recommended - because their insertion order is arbitrary. - - ''' - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__root - except AttributeError: - self.__root = root = [] # sentinel node - root[:] = [root, root, None] - self.__map = {} - self.__update(*args, **kwds) - - def __setitem__(self, key, value, dict_setitem=dict.__setitem__): - 'od.__setitem__(i, y) <==> od[i]=y' - # Setting a new item creates a new link which goes at the end of the linked - # list, and the inherited dictionary is updated with the new key/value pair. - if key not in self: - root = self.__root - last = root[0] - last[1] = root[0] = self.__map[key] = [last, root, key] - dict_setitem(self, key, value) - - def __delitem__(self, key, dict_delitem=dict.__delitem__): - 'od.__delitem__(y) <==> del od[y]' - # Deleting an existing item uses self.__map to find the link which is - # then removed by updating the links in the predecessor and successor nodes. - dict_delitem(self, key) - link_prev, link_next, key = self.__map.pop(key) - link_prev[1] = link_next - link_next[0] = link_prev - - def __iter__(self): - 'od.__iter__() <==> iter(od)' - root = self.__root - curr = root[1] - while curr is not root: - yield curr[2] - curr = curr[1] - - def __reversed__(self): - 'od.__reversed__() <==> reversed(od)' - root = self.__root - curr = root[0] - while curr is not root: - yield curr[2] - curr = curr[0] - - def clear(self): - 'od.clear() -> None. Remove all items from od.' - try: - for node in self.__map.itervalues(): - del node[:] - root = self.__root - root[:] = [root, root, None] - self.__map.clear() - except AttributeError: - pass - dict.clear(self) - - def popitem(self, last=True): - '''od.popitem() -> (k, v), return and remove a (key, value) pair. - Pairs are returned in LIFO order if last is true or FIFO order if false. - - ''' - if not self: - raise KeyError('dictionary is empty') - root = self.__root - if last: - link = root[0] - link_prev = link[0] - link_prev[1] = root - root[0] = link_prev - else: - link = root[1] - link_next = link[1] - root[1] = link_next - link_next[0] = root - key = link[2] - del self.__map[key] - value = dict.pop(self, key) - return key, value - - # -- the following methods do not depend on the internal structure -- - - def keys(self): - 'od.keys() -> list of keys in od' - return list(self) - - def values(self): - 'od.values() -> list of values in od' - return [self[key] for key in self] - - def items(self): - 'od.items() -> list of (key, value) pairs in od' - return [(key, self[key]) for key in self] - - def iterkeys(self): - 'od.iterkeys() -> an iterator over the keys in od' - return iter(self) - - def itervalues(self): - 'od.itervalues -> an iterator over the values in od' - for k in self: - yield self[k] - - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) items in od' - for k in self: - yield (k, self[k]) - - def update(*args, **kwds): - '''od.update(E, **F) -> None. Update od from dict/iterable E and F. - - If E is a dict instance, does: for k in E: od[k] = E[k] - If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] - Or if E is an iterable of items, does: for k, v in E: od[k] = v - In either case, this is followed by: for k, v in F.items(): od[k] = v - - ''' - if len(args) > 2: - raise TypeError('update() takes at most 2 positional ' - 'arguments (%d given)' % (len(args),)) - elif not args: - raise TypeError('update() takes at least 1 argument (0 given)') - self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] - if isinstance(other, dict): - for key in other: - self[key] = other[key] - elif hasattr(other, 'keys'): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value - for key, value in kwds.items(): - self[key] = value - - __update = update # let subclasses override update without breaking __init__ - - __marker = object() - - def pop(self, key, default=__marker): - '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - - ''' - if key in self: - result = self[key] - del self[key] - return result - if default is self.__marker: - raise KeyError(key) - return default - - def setdefault(self, key, default=None): - 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' - if key in self: - return self[key] - self[key] = default - return default - - def __repr__(self, _repr_running={}): - 'od.__repr__() <==> repr(od)' - call_key = id(self), _get_ident() - if call_key in _repr_running: - return '...' - _repr_running[call_key] = 1 - try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - finally: - del _repr_running[call_key] - - def __reduce__(self): - 'Return state information for pickling' - items = [[k, self[k]] for k in self] - inst_dict = vars(self).copy() - for k in vars(OrderedDict()): - inst_dict.pop(k, None) - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def copy(self): - 'od.copy() -> a shallow copy of od' - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S - and values equal to v (which defaults to None). - - ''' - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive - while comparison to a regular mapping is order-insensitive. - - ''' - if isinstance(other, OrderedDict): - return len(self)==len(other) and self.items() == other.items() - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - # -- the following methods are only used in Python 2.7 -- - - def viewkeys(self): - "od.viewkeys() -> a set-like object providing a view on od's keys" - return KeysView(self) - - def viewvalues(self): - "od.viewvalues() -> an object providing a view on od's values" - return ValuesView(self) - - def viewitems(self): - "od.viewitems() -> a set-like object providing a view on od's items" - return ItemsView(self) diff --git a/src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py index 92c9bc7ea..970cf653e 100755 --- a/src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py +++ b/src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py @@ -9,8 +9,7 @@ import sys # ipaddress has been backported to 2.6+ in pypi. If it is installed on the # system, use it to handle IPAddress ServerAltnames (this was added in # python-3.5) otherwise only do DNS matching. This allows -# backports.ssl_match_hostname to continue to be used all the way back to -# python-2.4. +# backports.ssl_match_hostname to continue to be used in Python 2.7. try: from pip._vendor import ipaddress except ImportError: diff --git a/src/pip/_vendor/urllib3/poolmanager.py b/src/pip/_vendor/urllib3/poolmanager.py index 506a3c9b8..fe5491cfd 100755 --- a/src/pip/_vendor/urllib3/poolmanager.py +++ b/src/pip/_vendor/urllib3/poolmanager.py @@ -47,6 +47,7 @@ _key_fields = ( 'key__socks_options', # dict 'key_assert_hostname', # bool or string 'key_assert_fingerprint', # str + 'key_server_hostname', #str ) #: The namedtuple class used to construct keys for the connection pool. diff --git a/src/pip/_vendor/urllib3/request.py b/src/pip/_vendor/urllib3/request.py index 1be333411..8f2f44bb2 100755 --- a/src/pip/_vendor/urllib3/request.py +++ b/src/pip/_vendor/urllib3/request.py @@ -36,7 +36,7 @@ class RequestMethods(object): explicitly. """ - _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) + _encode_url_methods = {'DELETE', 'GET', 'HEAD', 'OPTIONS'} def __init__(self, headers=None): self.headers = headers or {} diff --git a/src/pip/_vendor/urllib3/response.py b/src/pip/_vendor/urllib3/response.py index 9873cb942..c112690b0 100755 --- a/src/pip/_vendor/urllib3/response.py +++ b/src/pip/_vendor/urllib3/response.py @@ -11,7 +11,7 @@ from .exceptions import ( BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked, IncompleteRead, InvalidHeader ) -from .packages.six import string_types as basestring, binary_type, PY3 +from .packages.six import string_types as basestring, PY3 from .packages.six.moves import http_client as httplib from .connection import HTTPException, BaseSSLError from .util.response import is_fp_closed, is_response_to_head @@ -23,7 +23,7 @@ class DeflateDecoder(object): def __init__(self): self._first_try = True - self._data = binary_type() + self._data = b'' self._obj = zlib.decompressobj() def __getattr__(self, name): @@ -69,9 +69,9 @@ class GzipDecoder(object): return getattr(self._obj, name) def decompress(self, data): - ret = binary_type() + ret = bytearray() if self._state == GzipDecoderState.SWALLOW_DATA or not data: - return ret + return bytes(ret) while True: try: ret += self._obj.decompress(data) @@ -81,16 +81,40 @@ class GzipDecoder(object): self._state = GzipDecoderState.SWALLOW_DATA if previous_state == GzipDecoderState.OTHER_MEMBERS: # Allow trailing garbage acceptable in other gzip clients - return ret + return bytes(ret) raise data = self._obj.unused_data if not data: - return ret + return bytes(ret) self._state = GzipDecoderState.OTHER_MEMBERS self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) +class MultiDecoder(object): + """ + From RFC7231: + If one or more encodings have been applied to a representation, the + sender that applied the encodings MUST generate a Content-Encoding + header field that lists the content codings in the order in which + they were applied. + """ + + def __init__(self, modes): + self._decoders = [_get_decoder(m.strip()) for m in modes.split(',')] + + def flush(self): + return self._decoders[0].flush() + + def decompress(self, data): + for d in reversed(self._decoders): + data = d.decompress(data) + return data + + def _get_decoder(mode): + if ',' in mode: + return MultiDecoder(mode) + if mode == 'gzip': return GzipDecoder() @@ -159,7 +183,7 @@ class HTTPResponse(io.IOBase): self.msg = msg self._request_url = request_url - if body and isinstance(body, (basestring, binary_type)): + if body and isinstance(body, (basestring, bytes)): self._body = body self._pool = pool @@ -283,8 +307,13 @@ class HTTPResponse(io.IOBase): # Note: content-encoding value should be case-insensitive, per RFC 7230 # Section 3.2 content_encoding = self.headers.get('content-encoding', '').lower() - if self._decoder is None and content_encoding in self.CONTENT_DECODERS: - self._decoder = _get_decoder(content_encoding) + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + elif ',' in content_encoding: + encodings = [e.strip() for e in content_encoding.split(',') if e.strip() in self.CONTENT_DECODERS] + if len(encodings): + self._decoder = _get_decoder(content_encoding) def _decode(self, data, decode_content, flush_decoder): """ diff --git a/src/pip/_vendor/urllib3/util/connection.py b/src/pip/_vendor/urllib3/util/connection.py index 5cf488f4b..5ad70b2f1 100755 --- a/src/pip/_vendor/urllib3/util/connection.py +++ b/src/pip/_vendor/urllib3/util/connection.py @@ -1,6 +1,7 @@ from __future__ import absolute_import import socket from .wait import NoWayToWaitForSocketError, wait_for_read +from ..contrib import _appengine_environ def is_connection_dropped(conn): # Platform-specific @@ -105,6 +106,13 @@ def _has_ipv6(host): sock = None has_ipv6 = False + # App Engine doesn't support IPV6 sockets and actually has a quota on the + # number of sockets that can be used, so just early out here instead of + # creating a socket needlessly. + # See https://github.com/urllib3/urllib3/issues/1446 + if _appengine_environ.is_appengine_sandbox(): + return False + if socket.has_ipv6: # has_ipv6 returns true if cPython was compiled with IPv6 support. # It does not tell us if the system has IPv6 support enabled. To diff --git a/src/pip/_vendor/urllib3/util/response.py b/src/pip/_vendor/urllib3/util/response.py index 67cf730ab..3d5486485 100755 --- a/src/pip/_vendor/urllib3/util/response.py +++ b/src/pip/_vendor/urllib3/util/response.py @@ -59,8 +59,14 @@ def assert_header_parsing(headers): get_payload = getattr(headers, 'get_payload', None) unparsed_data = None - if get_payload: # Platform-specific: Python 3. - unparsed_data = get_payload() + if get_payload: + # get_payload is actually email.message.Message.get_payload; + # we're only interested in the result if it's not a multipart message + if not headers.is_multipart(): + payload = get_payload() + + if isinstance(payload, (bytes, str)): + unparsed_data = payload if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) diff --git a/src/pip/_vendor/urllib3/util/retry.py b/src/pip/_vendor/urllib3/util/retry.py index 7ad3dc660..e7d0abd61 100755 --- a/src/pip/_vendor/urllib3/util/retry.py +++ b/src/pip/_vendor/urllib3/util/retry.py @@ -115,7 +115,7 @@ class Retry(object): (most errors are resolved immediately by a second try without a delay). urllib3 will sleep for:: - {backoff factor} * (2 ^ ({number of total retries} - 1)) + {backoff factor} * (2 ** ({number of total retries} - 1)) seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer diff --git a/src/pip/_vendor/urllib3/util/ssl_.py b/src/pip/_vendor/urllib3/util/ssl_.py index 325428052..dfc553ff4 100755 --- a/src/pip/_vendor/urllib3/util/ssl_.py +++ b/src/pip/_vendor/urllib3/util/ssl_.py @@ -56,9 +56,8 @@ except ImportError: OP_NO_COMPRESSION = 0x20000 -# Python 2.7 and earlier didn't have inet_pton on non-Linux -# so we fallback on inet_aton in those cases. This means that -# we can only detect IPv4 addresses in this case. +# Python 2.7 doesn't have inet_pton on non-Linux so we fallback on inet_aton in +# those cases. This means that we can only detect IPv4 addresses in this case. if hasattr(socket, 'inet_pton'): inet_pton = socket.inet_pton else: @@ -67,7 +66,7 @@ else: from pip._vendor import ipaddress def inet_pton(_, host): - if isinstance(host, six.binary_type): + if isinstance(host, bytes): host = host.decode('ascii') return ipaddress.ip_address(host) @@ -115,10 +114,7 @@ try: except ImportError: import sys - class SSLContext(object): # Platform-specific: Python 2 & 3.1 - supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or - (3, 2) <= sys.version_info) - + class SSLContext(object): # Platform-specific: Python 2 def __init__(self, protocol_version): self.protocol = protocol_version # Use default values from a real SSLContext @@ -141,12 +137,6 @@ except ImportError: raise SSLError("CA directories not supported in older Pythons") def set_ciphers(self, cipher_suite): - if not self.supports_set_ciphers: - raise TypeError( - 'Your version of Python does not support setting ' - 'a custom cipher suite. Please upgrade to Python ' - '2.7, 3.2, or later if you need this functionality.' - ) self.ciphers = cipher_suite def wrap_socket(self, socket, server_hostname=None, server_side=False): @@ -167,10 +157,7 @@ except ImportError: 'ssl_version': self.protocol, 'server_side': server_side, } - if self.supports_set_ciphers: # Platform-specific: Python 2.7+ - return wrap_socket(socket, ciphers=self.ciphers, **kwargs) - else: # Platform-specific: Python 2.6 - return wrap_socket(socket, **kwargs) + return wrap_socket(socket, ciphers=self.ciphers, **kwargs) def assert_fingerprint(cert, fingerprint): @@ -276,6 +263,8 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, """ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) + context.set_ciphers(ciphers or DEFAULT_CIPHERS) + # Setting the default here, as we may have no ssl module on import cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs @@ -291,9 +280,6 @@ def create_urllib3_context(ssl_version=None, cert_reqs=None, context.options |= options - if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 - context.set_ciphers(ciphers or DEFAULT_CIPHERS) - context.verify_mode = cert_reqs if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 # We do our own verification, including fingerprints and alternative @@ -316,8 +302,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: - A string of ciphers we wish the client to support. This is not - supported on Python 2.6 as the ssl module does not support it. + A string of ciphers we wish the client to support. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to @@ -334,7 +319,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, if ca_certs or ca_cert_dir: try: context.load_verify_locations(ca_certs, ca_cert_dir) - except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 + except IOError as e: # Platform-specific: Python 2.7 raise SSLError(e) # Py33 raises FileNotFoundError which subclasses OSError # These are not equivalent unless we check the errno attribute @@ -378,7 +363,7 @@ def is_ipaddress(hostname): :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise. """ - if six.PY3 and isinstance(hostname, six.binary_type): + if six.PY3 and isinstance(hostname, bytes): # IDN A-label bytes are ASCII compatible. hostname = hostname.decode('ascii') diff --git a/src/pip/_vendor/urllib3/util/wait.py b/src/pip/_vendor/urllib3/util/wait.py index fa686eff4..4db71bafd 100755 --- a/src/pip/_vendor/urllib3/util/wait.py +++ b/src/pip/_vendor/urllib3/util/wait.py @@ -43,9 +43,6 @@ if sys.version_info >= (3, 5): else: # Old and broken Pythons. def _retry_on_intr(fn, timeout): - if timeout is not None and timeout <= 0: - return fn(timeout) - if timeout is None: deadline = float("inf") else: @@ -117,7 +114,7 @@ def _have_working_poll(): # from libraries like eventlet/greenlet. try: poll_obj = select.poll() - poll_obj.poll(0) + _retry_on_intr(poll_obj.poll, 0) except (AttributeError, OSError): return False else: diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 25e02fd7f..60b8d6ef5 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -16,7 +16,7 @@ requests==2.19.1 certifi==2018.8.24 chardet==3.0.4 idna==2.7 - urllib3==1.23 + urllib3==1.24.1 retrying==1.3.3 setuptools==40.4.3 six==1.11.0 From a90c04eee83a73c03226c1558e60e045bf7cd413 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:43:46 +0530 Subject: [PATCH 34/69] Update certifi to 2018.11.29 --- news/certifi.vendor | 1 + src/pip/_vendor/certifi/__init__.py | 4 +- src/pip/_vendor/certifi/cacert.pem | 272 +++++++++++++++++++++++++--- src/pip/_vendor/certifi/core.py | 17 -- src/pip/_vendor/vendor.txt | 2 +- 5 files changed, 246 insertions(+), 50 deletions(-) create mode 100644 news/certifi.vendor diff --git a/news/certifi.vendor b/news/certifi.vendor new file mode 100644 index 000000000..276053ed7 --- /dev/null +++ b/news/certifi.vendor @@ -0,0 +1 @@ +Update certifi to 2018.11.29 diff --git a/src/pip/_vendor/certifi/__init__.py b/src/pip/_vendor/certifi/__init__.py index aa329fbb4..ef71f3af3 100644 --- a/src/pip/_vendor/certifi/__init__.py +++ b/src/pip/_vendor/certifi/__init__.py @@ -1,3 +1,3 @@ -from .core import where, old_where +from .core import where -__version__ = "2018.08.24" +__version__ = "2018.11.29" diff --git a/src/pip/_vendor/certifi/cacert.pem b/src/pip/_vendor/certifi/cacert.pem index 85de024e7..db68797e2 100644 --- a/src/pip/_vendor/certifi/cacert.pem +++ b/src/pip/_vendor/certifi/cacert.pem @@ -326,36 +326,6 @@ OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS -----END CERTIFICATE----- -# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association -# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association -# Label: "Visa eCommerce Root" -# Serial: 25952180776285836048024890241505565794 -# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 -# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 -# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 ------BEGIN CERTIFICATE----- -MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr -MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl -cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv -bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw -CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h -dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l -cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h -2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E -lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV -ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq -299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t -vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL -dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF -AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR -zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 -LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd -7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw -++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt -398znM/jra6O1I7mT1GvFpLgXPYHDw== ------END CERTIFICATE----- - # Issuer: CN=AAA Certificate Services O=Comodo CA Limited # Subject: CN=AAA Certificate Services O=Comodo CA Limited # Label: "Comodo AAA Services root" @@ -4298,3 +4268,245 @@ rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV 57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 -----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 146587175971765017618439757810265552097 +# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 +# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 +# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM +f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX +mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 +zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P +fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc +vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 +Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp +zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO +Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW +k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ +DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF +lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW +Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 +d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z +XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR +gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 +d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv +J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg +DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM ++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy +F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 +SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws +E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 146587176055767053814479386953112547951 +# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b +# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d +# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv +CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg +GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu +XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd +re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu +PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 +mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K +8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj +x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR +nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 +kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok +twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp +8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT +vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT +z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA +pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb +pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB +R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R +RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk +0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC +5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF +izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn +yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 146587176140553309517047991083707763997 +# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 +# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 +# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 +-----BEGIN CERTIFICATE----- +MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout +736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A +DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk +fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA +njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 146587176229350439916519468929765261721 +# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 +# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb +# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd +-----BEGIN CERTIFICATE----- +MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu +hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l +xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 +CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx +sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- diff --git a/src/pip/_vendor/certifi/core.py b/src/pip/_vendor/certifi/core.py index eab9d1d17..2d02ea44c 100644 --- a/src/pip/_vendor/certifi/core.py +++ b/src/pip/_vendor/certifi/core.py @@ -8,14 +8,6 @@ certifi.py This module returns the installation location of cacert.pem. """ import os -import warnings - - -class DeprecatedBundleWarning(DeprecationWarning): - """ - The weak security bundle is being deprecated. Please bother your service - provider to get them to stop using cross-signed roots. - """ def where(): @@ -24,14 +16,5 @@ def where(): return os.path.join(f, 'cacert.pem') -def old_where(): - warnings.warn( - "The weak security bundle has been removed. certifi.old_where() is now an alias " - "of certifi.where(). Please update your code to use certifi.where() instead. " - "certifi.old_where() will be removed in 2018.", - DeprecatedBundleWarning - ) - return where() - if __name__ == '__main__': print(where()) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 60b8d6ef5..2d2684d0a 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -13,7 +13,7 @@ progress==1.4 pyparsing==2.2.1 pytoml==0.1.19 requests==2.19.1 - certifi==2018.8.24 + certifi==2018.11.29 chardet==3.0.4 idna==2.7 urllib3==1.24.1 From 7591a54b893f36b2cd17ec23e371a71e8c31101c Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:44:13 +0530 Subject: [PATCH 35/69] Update pyparsing to 2.3.1 --- news/pyparsing.vendor | 1 + src/pip/_vendor/pyparsing.LICENSE | 36 +- src/pip/_vendor/pyparsing.py | 2342 +++++++++++++++++++---------- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 1546 insertions(+), 835 deletions(-) create mode 100644 news/pyparsing.vendor diff --git a/news/pyparsing.vendor b/news/pyparsing.vendor new file mode 100644 index 000000000..956534864 --- /dev/null +++ b/news/pyparsing.vendor @@ -0,0 +1 @@ +Update pyparsing to 2.3.1 diff --git a/src/pip/_vendor/pyparsing.LICENSE b/src/pip/_vendor/pyparsing.LICENSE index bbc959e0d..1bf98523e 100644 --- a/src/pip/_vendor/pyparsing.LICENSE +++ b/src/pip/_vendor/pyparsing.LICENSE @@ -1,18 +1,18 @@ -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/pip/_vendor/pyparsing.py b/src/pip/_vendor/pyparsing.py index 865152d7c..bea4d9c75 100644 --- a/src/pip/_vendor/pyparsing.py +++ b/src/pip/_vendor/pyparsing.py @@ -1,6 +1,7 @@ +#-*- coding: utf-8 -*- # module pyparsing.py # -# Copyright (c) 2003-2018 Paul T. McGuire +# Copyright (c) 2003-2019 Paul T. McGuire # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -27,15 +28,18 @@ __doc__ = \ pyparsing module - Classes and methods to define and execute parsing grammars ============================================================================= -The pyparsing module is an alternative approach to creating and executing simple grammars, -vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you -don't need to learn a new syntax for defining grammars or matching expressions - the parsing module -provides a library of classes that you use to construct the grammar directly in Python. +The pyparsing module is an alternative approach to creating and +executing simple grammars, vs. the traditional lex/yacc approach, or the +use of regular expressions. With pyparsing, you don't need to learn +a new syntax for defining grammars or matching expressions - the parsing +module provides a library of classes that you use to construct the +grammar directly in Python. -Here is a program to parse "Hello, World!" (or any greeting of the form -C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements -(L{'+'} operator gives L{And} expressions, strings are auto-converted to -L{Literal} expressions):: +Here is a program to parse "Hello, World!" (or any greeting of the form +``", !"``), built up using :class:`Word`, +:class:`Literal`, and :class:`And` elements +(the :class:`'+'` operators create :class:`And` expressions, +and the strings are auto-converted to :class:`Literal` expressions):: from pip._vendor.pyparsing import Word, alphas @@ -49,33 +53,48 @@ The program outputs the following:: Hello, World! -> ['Hello', ',', 'World', '!'] -The Python representation of the grammar is quite readable, owing to the self-explanatory -class names, and the use of '+', '|' and '^' operators. +The Python representation of the grammar is quite readable, owing to the +self-explanatory class names, and the use of '+', '|' and '^' operators. -The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an -object with named attributes. +The :class:`ParseResults` object returned from +:class:`ParserElement.parseString` can be +accessed as a nested list, a dictionary, or an object with named +attributes. -The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - - quoted strings - - embedded comments +The pyparsing module handles some of the problems that are typically +vexing when writing text parsers: + + - extra or missing whitespace (the above program will also handle + "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments Getting Started - ----------------- -Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing +Visit the classes :class:`ParserElement` and :class:`ParseResults` to +see the base classes that most other pyparsing classes inherit from. Use the docstrings for examples of how to: - - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes - - construct character word-group expressions using the L{Word} class - - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes - - use L{'+'}, L{'|'}, L{'^'}, and L{'&'} operators to combine simple expressions into more complex ones - - associate names with your parsed results using L{ParserElement.setResultsName} - - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} - - find more useful common expressions in the L{pyparsing_common} namespace class + + - construct literal match expressions from :class:`Literal` and + :class:`CaselessLiteral` classes + - construct character word-group expressions using the :class:`Word` + class + - see how to create repetitive expressions using :class:`ZeroOrMore` + and :class:`OneOrMore` classes + - use :class:`'+'`, :class:`'|'`, :class:`'^'`, + and :class:`'&'` operators to combine simple expressions into + more complex ones + - associate names with your parsed results using + :class:`ParserElement.setResultsName` + - find some helpful expression short-cuts like :class:`delimitedList` + and :class:`oneOf` + - find more useful common expressions in the :class:`pyparsing_common` + namespace class """ -__version__ = "2.2.1" -__versionTime__ = "18 Sep 2018 00:49 UTC" +__version__ = "2.3.1" +__versionTime__ = "09 Jan 2019 23:26 UTC" __author__ = "Paul McGuire " import string @@ -91,6 +110,12 @@ import traceback import types from datetime import datetime +try: + # Python 3 + from itertools import filterfalse +except ImportError: + from itertools import ifilterfalse as filterfalse + try: from _thread import RLock except ImportError: @@ -113,27 +138,33 @@ except ImportError: except ImportError: _OrderedDict = None +try: + from types import SimpleNamespace +except ImportError: + class SimpleNamespace: pass + + #~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) __all__ = [ 'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', 'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', -'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', +'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', 'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', 'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', -'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', -'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', +'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char', 'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', 'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', 'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', 'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', 'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', 'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', -'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', +'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', 'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', 'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', -'CloseMatch', 'tokenMap', 'pyparsing_common', +'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set', ] system_version = tuple(sys.version_info)[:3] @@ -142,6 +173,7 @@ if PY_3: _MAX_INT = sys.maxsize basestring = str unichr = chr + unicode = str _ustr = str # build list of single arg builtins, that can be used as parse actions @@ -152,9 +184,11 @@ else: range = xrange def _ustr(obj): - """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries - str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It - then < returns the unicode object | encodes it with the default encoding | ... >. + """Drop-in replacement for str(obj) that tries to be Unicode + friendly. It first tries str(obj). If that fails with + a UnicodeEncodeError, then it tries unicode(obj). It then + < returns the unicode object | encodes it with the default + encoding | ... >. """ if isinstance(obj,unicode): return obj @@ -179,9 +213,9 @@ else: singleArgBuiltins.append(getattr(__builtin__,fname)) except AttributeError: continue - + _generatorType = type((y for y in range(1))) - + def _xml_escape(data): """Escape &, <, >, ", ', etc. in a string of data.""" @@ -192,9 +226,6 @@ def _xml_escape(data): data = data.replace(from_, to_) return data -class _Constants(object): - pass - alphas = string.ascii_uppercase + string.ascii_lowercase nums = "0123456789" hexnums = nums + "ABCDEFabcdef" @@ -220,16 +251,16 @@ class ParseBaseException(Exception): @classmethod def _from_exception(cls, pe): """ - internal factory method to simplify creating one type of ParseException + internal factory method to simplify creating one type of ParseException from another - avoids having __init__ signature conflicts among subclasses """ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) def __getattr__( self, aname ): """supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text """ if( aname == "lineno" ): return lineno( self.loc, self.pstr ) @@ -262,22 +293,94 @@ class ParseException(ParseBaseException): """ Exception thrown when parse expressions don't match class; supported attributes by name are: - - lineno - returns the line number of the exception text - - col - returns the column number of the exception text - - line - returns the line containing the exception text - + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + Example:: + try: Word(nums).setName("integer").parseString("ABC") except ParseException as pe: print(pe) print("column: {}".format(pe.col)) - + prints:: + Expected integer (at char 0), (line:1, col:1) column: 1 + """ - pass + + @staticmethod + def explain(exc, depth=16): + """ + Method to take an exception and translate the Python internal traceback into a list + of the pyparsing expressions that caused the exception to be raised. + + Parameters: + + - exc - exception raised during parsing (need not be a ParseException, in support + of Python exceptions that might be raised in a parse action) + - depth (default=16) - number of levels back in the stack trace to list expression + and function names; if None, the full stack trace names will be listed; if 0, only + the failing input line, marker, and exception string will be shown + + Returns a multi-line string listing the ParserElements and/or function names in the + exception's stack trace. + + Note: the diagnostic output will include string representations of the expressions + that failed to parse. These representations will be more helpful if you use `setName` to + give identifiable names to your expressions. Otherwise they will use the default string + forms, which may be cryptic to read. + + explain() is only supported under Python 3. + """ + import inspect + + if depth is None: + depth = sys.getrecursionlimit() + ret = [] + if isinstance(exc, ParseBaseException): + ret.append(exc.line) + ret.append(' ' * (exc.col - 1) + '^') + ret.append("{0}: {1}".format(type(exc).__name__, exc)) + + if depth > 0: + callers = inspect.getinnerframes(exc.__traceback__, context=depth) + seen = set() + for i, ff in enumerate(callers[-depth:]): + frm = ff.frame + + f_self = frm.f_locals.get('self', None) + if isinstance(f_self, ParserElement): + if frm.f_code.co_name not in ('parseImpl', '_parseNoCache'): + continue + if f_self in seen: + continue + seen.add(f_self) + + self_type = type(f_self) + ret.append("{0}.{1} - {2}".format(self_type.__module__, + self_type.__name__, + f_self)) + elif f_self is not None: + self_type = type(f_self) + ret.append("{0}.{1}".format(self_type.__module__, + self_type.__name__)) + else: + code = frm.f_code + if code.co_name in ('wrapper', ''): + continue + + ret.append("{0}".format(code.co_name)) + + depth -= 1 + if not depth: + break + + return '\n'.join(ret) + class ParseFatalException(ParseBaseException): """user-throwable exception thrown when inconsistent parse content @@ -285,9 +388,11 @@ class ParseFatalException(ParseBaseException): pass class ParseSyntaxException(ParseFatalException): - """just like L{ParseFatalException}, but thrown internally when an - L{ErrorStop} ('-' operator) indicates that parsing is to stop - immediately because an unbacktrackable syntax error has been found""" + """just like :class:`ParseFatalException`, but thrown internally + when an :class:`ErrorStop` ('-' operator) indicates + that parsing is to stop immediately because an unbacktrackable + syntax error has been found. + """ pass #~ class ReparseException(ParseBaseException): @@ -304,7 +409,9 @@ class ParseSyntaxException(ParseFatalException): #~ self.reparseLoc = restartLoc class RecursiveGrammarException(Exception): - """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" + """exception thrown by :class:`ParserElement.validate` if the + grammar could be improperly recursive + """ def __init__( self, parseElementList ): self.parseElementTrace = parseElementList @@ -322,16 +429,18 @@ class _ParseResultsWithOffset(object): self.tup = (self.tup[0],i) class ParseResults(object): - """ - Structured parse results, to provide multiple means of access to the parsed data: - - as a list (C{len(results)}) - - by list index (C{results[0], results[1]}, etc.) - - by attribute (C{results.} - see L{ParserElement.setResultsName}) + """Structured parse results, to provide multiple means of access to + the parsed data: + + - as a list (``len(results)``) + - by list index (``results[0], results[1]``, etc.) + - by attribute (``results.`` - see :class:`ParserElement.setResultsName`) Example:: + integer = Word(nums) - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + integer.setResultsName("day")) # equivalent form: # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") @@ -348,7 +457,9 @@ class ParseResults(object): test("'month' in result") test("'minutes' in result") test("result.dump()", str) + prints:: + list(result) -> ['1999', '/', '12', '/', '31'] result[0] -> '1999' result['month'] -> '12' @@ -398,7 +509,7 @@ class ParseResults(object): toklist = [ toklist ] if asList: if isinstance(toklist,ParseResults): - self[name] = _ParseResultsWithOffset(toklist.copy(),0) + self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0) else: self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) self[name].__name = name @@ -467,19 +578,19 @@ class ParseResults(object): def _itervalues( self ): return (self[k] for k in self._iterkeys()) - + def _iteritems( self ): return ((k, self[k]) for k in self._iterkeys()) if PY_3: - keys = _iterkeys - """Returns an iterator of all named result keys (Python 3.x only).""" + keys = _iterkeys + """Returns an iterator of all named result keys.""" values = _itervalues - """Returns an iterator of all named result values (Python 3.x only).""" + """Returns an iterator of all named result values.""" items = _iteritems - """Returns an iterator of all named result key-value tuples (Python 3.x only).""" + """Returns an iterator of all named result key-value tuples.""" else: iterkeys = _iterkeys @@ -498,7 +609,7 @@ class ParseResults(object): def values( self ): """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.itervalues()) - + def items( self ): """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" return list(self.iteritems()) @@ -507,19 +618,20 @@ class ParseResults(object): """Since keys() returns an iterator, this method is helpful in bypassing code that looks for the existence of any defined results names.""" return bool(self.__tokdict) - + def pop( self, *args, **kwargs): """ - Removes and returns item at specified index (default=C{last}). - Supports both C{list} and C{dict} semantics for C{pop()}. If passed no - argument or an integer argument, it will use C{list} semantics - and pop tokens from the list of parsed tokens. If passed a - non-integer argument (most likely a string), it will use C{dict} - semantics and pop the corresponding value from any defined - results names. A second default return value argument is - supported, just as in C{dict.pop()}. + Removes and returns item at specified index (default= ``last``). + Supports both ``list`` and ``dict`` semantics for ``pop()``. If + passed no argument or an integer argument, it will use ``list`` + semantics and pop tokens from the list of parsed tokens. If passed + a non-integer argument (most likely a string), it will use ``dict`` + semantics and pop the corresponding value from any defined results + names. A second default return value argument is supported, just as in + ``dict.pop()``. Example:: + def remove_first(tokens): tokens.pop(0) print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] @@ -536,7 +648,9 @@ class ParseResults(object): return tokens patt.addParseAction(remove_LABEL) print(patt.parseString("AAB 123 321").dump()) + prints:: + ['AAB', '123', '321'] - LABEL: AAB @@ -549,8 +663,8 @@ class ParseResults(object): args = (args[0], v) else: raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) or - len(args) == 1 or + if (isinstance(args[0], int) or + len(args) == 1 or args[0] in self): index = args[0] ret = self[index] @@ -563,14 +677,15 @@ class ParseResults(object): def get(self, key, defaultValue=None): """ Returns named result matching the given key, or if there is no - such name, then returns the given C{defaultValue} or C{None} if no - C{defaultValue} is specified. + such name, then returns the given ``defaultValue`` or ``None`` if no + ``defaultValue`` is specified. + + Similar to ``dict.get()``. - Similar to C{dict.get()}. - Example:: + integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString("1999/12/31") print(result.get("year")) # -> '1999' @@ -585,10 +700,11 @@ class ParseResults(object): def insert( self, index, insStr ): """ Inserts new element at location index in the list of parsed tokens. - - Similar to C{list.insert()}. + + Similar to ``list.insert()``. Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] # use a parse action to insert the parse location in the front of the parsed results @@ -607,8 +723,9 @@ class ParseResults(object): Add single element to end of ParseResults list of elements. Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] - + # use a parse action to compute the sum of the parsed integers, and add it to the end def append_sum(tokens): tokens.append(sum(map(int, tokens))) @@ -621,8 +738,9 @@ class ParseResults(object): Add sequence of elements to end of ParseResults list of elements. Example:: + patt = OneOrMore(Word(alphas)) - + # use a parse action to append the reverse of the matched strings, to make a palindrome def make_palindrome(tokens): tokens.extend(reversed([t[::-1] for t in tokens])) @@ -646,7 +764,7 @@ class ParseResults(object): return self[name] except KeyError: return "" - + if name in self.__tokdict: if name not in self.__accumNames: return self.__tokdict[name][-1][0] @@ -671,7 +789,7 @@ class ParseResults(object): self[k] = v if isinstance(v[0],ParseResults): v[0].__parent = wkref(self) - + self.__toklist += other.__toklist self.__accumNames.update( other.__accumNames ) return self @@ -683,7 +801,7 @@ class ParseResults(object): else: # this may raise a TypeError - so be it return other + self - + def __repr__( self ): return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) @@ -706,11 +824,12 @@ class ParseResults(object): Returns the parse results as a nested list of matching tokens, all converted to strings. Example:: + patt = OneOrMore(Word(alphas)) result = patt.parseString("sldkj lsdkj sldkj") # even though the result prints in string-like form, it is actually a pyparsing ParseResults print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] - + # Use asList() to create an actual list result_list = result.asList() print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] @@ -722,12 +841,13 @@ class ParseResults(object): Returns the named parse results as a nested dictionary. Example:: + integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - + result = date_str.parseString('12/31/1999') print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) - + result_dict = result.asDict() print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} @@ -740,7 +860,7 @@ class ParseResults(object): item_fn = self.items else: item_fn = self.iteritems - + def toItem(obj): if isinstance(obj, ParseResults): if obj.haskeys(): @@ -749,15 +869,15 @@ class ParseResults(object): return [toItem(v) for v in obj] else: return obj - + return dict((k,toItem(v)) for k,v in item_fn()) def copy( self ): """ - Returns a new copy of a C{ParseResults} object. + Returns a new copy of a :class:`ParseResults` object. """ ret = ParseResults( self.__toklist ) - ret.__tokdict = self.__tokdict.copy() + ret.__tokdict = dict(self.__tokdict.items()) ret.__parent = self.__parent ret.__accumNames.update( self.__accumNames ) ret.__name = self.__name @@ -833,22 +953,25 @@ class ParseResults(object): def getName(self): r""" - Returns the results name for this token expression. Useful when several + Returns the results name for this token expression. Useful when several different expressions might match at a particular location. Example:: + integer = Word(nums) ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") house_number_expr = Suppress('#') + Word(nums, alphanums) - user_data = (Group(house_number_expr)("house_number") + user_data = (Group(house_number_expr)("house_number") | Group(ssn_expr)("ssn") | Group(integer)("age")) user_info = OneOrMore(user_data) - + result = user_info.parseString("22 111-22-3333 #221B") for item in result: print(item.getName(), ':', item[0]) + prints:: + age : 22 ssn : 111-22-3333 house_number : 221B @@ -870,17 +993,20 @@ class ParseResults(object): def dump(self, indent='', depth=0, full=True): """ - Diagnostic method for listing out the contents of a C{ParseResults}. - Accepts an optional C{indent} argument so that this string can be embedded - in a nested display of other data. + Diagnostic method for listing out the contents of + a :class:`ParseResults`. Accepts an optional ``indent`` argument so + that this string can be embedded in a nested display of other data. Example:: + integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - + result = date_str.parseString('12/31/1999') print(result.dump()) + prints:: + ['12', '/', '31', '/', '1999'] - day: 1999 - month: 31 @@ -910,16 +1036,18 @@ class ParseResults(object): out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) else: out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) - + return "".join(out) def pprint(self, *args, **kwargs): """ - Pretty-printer for parsed results as a list, using the C{pprint} module. - Accepts additional positional or keyword args as defined for the - C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) + Pretty-printer for parsed results as a list, using the + `pprint `_ module. + Accepts additional positional or keyword args as defined for + `pprint.pprint `_ . Example:: + ident = Word(alphas, alphanums) num = Word(nums) func = Forward() @@ -927,7 +1055,9 @@ class ParseResults(object): func <<= ident + Group(Optional(delimitedList(term))) result = func.parseString("fna a,b,(fnb c,d,200),100") result.pprint(width=40) + prints:: + ['fna', ['a', 'b', @@ -970,24 +1100,25 @@ def col (loc,strg): The first column is number 1. Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{ParserElement.parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. + before starting the parsing process. See + :class:`ParserElement.parseString` for more + information on parsing strings containing ```` s, and suggested + methods to maintain a consistent view of the parsed string, the parse + location, and line and column positions within the parsed string. """ s = strg return 1 if 0} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - """ + Note - the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See :class:`ParserElement.parseString` + for more information on parsing strings containing ```` s, and + suggested methods to maintain a consistent view of the parsed string, the + parse location, and line and column positions within the parsed string. + """ return strg.count("\n",0,loc) + 1 def line( loc, strg ): @@ -1041,7 +1172,7 @@ def _trim_arity(func, maxargs=2): return lambda s,l,t: func(t) limit = [0] foundArity = [False] - + # traceback return data structure changed in Py3.5 - normalize back to plain tuples if system_version[:2] >= (3,5): def extract_stack(limit=0): @@ -1056,12 +1187,12 @@ def _trim_arity(func, maxargs=2): else: extract_stack = traceback.extract_stack extract_tb = traceback.extract_tb - - # synthesize what would be returned by traceback.extract_stack at the call to + + # synthesize what would be returned by traceback.extract_stack at the call to # user's parse action 'func', so that we don't incur call penalty at parse time - + LINE_DIFF = 6 - # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND + # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! this_line = extract_stack(limit=2)[-1] pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) @@ -1092,7 +1223,7 @@ def _trim_arity(func, maxargs=2): # copy func name to wrapper for sensible debug output func_name = "" try: - func_name = getattr(func, '__name__', + func_name = getattr(func, '__name__', getattr(func, '__class__').__name__) except Exception: func_name = str(func) @@ -1111,9 +1242,10 @@ class ParserElement(object): Overrides the default whitespace chars Example:: + # default whitespace chars are space, and newline OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] - + # change to just treat newline as significant ParserElement.setDefaultWhitespaceChars(" \t") OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] @@ -1124,18 +1256,19 @@ class ParserElement(object): def inlineLiteralsUsing(cls): """ Set class to be used for inclusion of string literals into a parser. - + Example:: + # default literal class used is Literal integer = Word(nums) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] # change to Suppress ParserElement.inlineLiteralsUsing(Suppress) - date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] """ @@ -1149,7 +1282,7 @@ class ParserElement(object): self.resultsName = None self.saveAsList = savelist self.skipWhitespace = True - self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS) self.copyDefaultWhiteChars = True self.mayReturnEmpty = False # used when checking for left-recursion self.keepTabs = False @@ -1166,18 +1299,24 @@ class ParserElement(object): def copy( self ): """ - Make a copy of this C{ParserElement}. Useful for defining different parse actions - for the same parsing pattern, using copies of the original parse element. - + Make a copy of this :class:`ParserElement`. Useful for defining + different parse actions for the same parsing pattern, using copies of + the original parse element. + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") - + print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) + prints:: + [5120, 100, 655360, 268435456] - Equivalent form of C{expr.copy()} is just C{expr()}:: + + Equivalent form of ``expr.copy()`` is just ``expr()``:: + integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") """ cpy = copy.copy( self ) @@ -1190,8 +1329,9 @@ class ParserElement(object): def setName( self, name ): """ Define name for this expression, makes debugging and exception messages clearer. - + Example:: + Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) """ @@ -1205,17 +1345,18 @@ class ParserElement(object): """ Define name for referencing matching tokens as a nested attribute of the returned parse results. - NOTE: this returns a *copy* of the original C{ParserElement} object; + NOTE: this returns a *copy* of the original :class:`ParserElement` object; this is so that the client can define a basic element, such as an integer, and reference it in multiple places with different names. You can also set results names using the abbreviated syntax, - C{expr("name")} in place of C{expr.setResultsName("name")} - - see L{I{__call__}<__call__>}. + ``expr("name")`` in place of ``expr.setResultsName("name")`` + - see :class:`__call__`. Example:: - date_str = (integer.setResultsName("year") + '/' - + integer.setResultsName("month") + '/' + + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + integer.setResultsName("day")) # equivalent form: @@ -1231,7 +1372,7 @@ class ParserElement(object): def setBreak(self,breakFlag = True): """Method to invoke the Python pdb debugger when this element is - about to be parsed. Set C{breakFlag} to True to enable, False to + about to be parsed. Set ``breakFlag`` to True to enable, False to disable. """ if breakFlag: @@ -1250,25 +1391,28 @@ class ParserElement(object): def setParseAction( self, *fns, **kwargs ): """ Define one or more actions to perform when successfully matching parse element definition. - Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, - C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object + Parse action fn is a callable method with 0-3 arguments, called as ``fn(s,loc,toks)`` , + ``fn(loc,toks)`` , ``fn(toks)`` , or just ``fn()`` , where: + + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object + If the functions in fns modify the tokens, they can return them as the return value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. Optional keyword arguments: - - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing + - callDuringTry = (default= ``False`` ) indicate if parse action should be run during lookaheads and alternate testing Note: the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See L{I{parseString}} for more information - on parsing strings containing C{}s, and suggested methods to maintain a - consistent view of the parsed string, the parse location, and line and column - positions within the parsed string. - + before starting the parsing process. See :class:`parseString for more + information on parsing strings containing ```` s, and suggested + methods to maintain a consistent view of the parsed string, the parse + location, and line and column positions within the parsed string. + Example:: + integer = Word(nums) date_str = integer + '/' + integer + '/' + integer @@ -1287,24 +1431,25 @@ class ParserElement(object): def addParseAction( self, *fns, **kwargs ): """ - Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}}. - - See examples in L{I{copy}}. + Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`. + + See examples in :class:`copy`. """ self.parseAction += list(map(_trim_arity, list(fns))) self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) return self def addCondition(self, *fns, **kwargs): - """Add a boolean predicate function to expression's list of parse actions. See - L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, - functions passed to C{addCondition} need to return boolean success/fail of the condition. + """Add a boolean predicate function to expression's list of parse actions. See + :class:`setParseAction` for function call signatures. Unlike ``setParseAction``, + functions passed to ``addCondition`` need to return boolean success/fail of the condition. Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - + - message = define a custom message to be used in the raised exception + - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) year_int = integer.copy() year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") @@ -1315,8 +1460,9 @@ class ParserElement(object): msg = kwargs.get("message", "failed user-defined condition") exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException for fn in fns: + fn = _trim_arity(fn) def pa(s,l,t): - if not bool(_trim_arity(fn)(s,l,t)): + if not bool(fn(s,l,t)): raise exc_type(s,l,msg) self.parseAction.append(pa) self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) @@ -1325,12 +1471,12 @@ class ParserElement(object): def setFailAction( self, fn ): """Define action to perform if parsing fails at this expression. Fail acton fn is a callable function that takes the arguments - C{fn(s,loc,expr,err)} where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown - The function returns no value. It may throw C{L{ParseFatalException}} + ``fn(s,loc,expr,err)`` where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw :class:`ParseFatalException` if it is desired to stop parsing immediately.""" self.failAction = fn return self @@ -1412,8 +1558,14 @@ class ParserElement(object): if debugging: try: for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: + try: + tokens = fn( instring, tokensStart, retTokens ) + except IndexError as parse_action_exc: + exc = ParseException("exception raised in parse action") + exc.__cause__ = parse_action_exc + raise exc + + if tokens is not None and tokens is not retTokens: retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), @@ -1425,8 +1577,14 @@ class ParserElement(object): raise else: for fn in self.parseAction: - tokens = fn( instring, tokensStart, retTokens ) - if tokens is not None: + try: + tokens = fn( instring, tokensStart, retTokens ) + except IndexError as parse_action_exc: + exc = ParseException("exception raised in parse action") + exc.__cause__ = parse_action_exc + raise exc + + if tokens is not None and tokens is not retTokens: retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), @@ -1443,7 +1601,7 @@ class ParserElement(object): return self._parse( instring, loc, doActions=False )[0] except ParseFatalException: raise ParseException( instring, loc, self.errmsg, self) - + def canParseNext(self, instring, loc): try: self.tryParse(instring, loc) @@ -1465,7 +1623,7 @@ class ParserElement(object): def clear(self): cache.clear() - + def cache_len(self): return len(cache) @@ -1577,23 +1735,23 @@ class ParserElement(object): often in many complex grammars) can immediately return a cached value, instead of re-executing parsing/validating code. Memoizing is done of both valid results and parsing exceptions. - + Parameters: - - cache_size_limit - (default=C{128}) - if an integer value is provided - will limit the size of the packrat cache; if None is passed, then - the cache size will be unbounded; if 0 is passed, the cache will - be effectively disabled. - + + - cache_size_limit - (default= ``128``) - if an integer value is provided + will limit the size of the packrat cache; if None is passed, then + the cache size will be unbounded; if 0 is passed, the cache will + be effectively disabled. + This speedup may break existing programs that use parse actions that have side-effects. For this reason, packrat parsing is disabled when you first import pyparsing. To activate the packrat feature, your - program must call the class method C{ParserElement.enablePackrat()}. If - your program uses C{psyco} to "compile as you go", you must call - C{enablePackrat} before calling C{psyco.full()}. If you do not do this, - Python will crash. For best results, call C{enablePackrat()} immediately - after importing pyparsing. - + program must call the class method :class:`ParserElement.enablePackrat`. + For best results, call ``enablePackrat()`` immediately after + importing pyparsing. + Example:: + from pip._vendor import pyparsing pyparsing.ParserElement.enablePackrat() """ @@ -1612,23 +1770,25 @@ class ParserElement(object): expression has been built. If you want the grammar to require that the entire input string be - successfully parsed, then set C{parseAll} to True (equivalent to ending - the grammar with C{L{StringEnd()}}). + successfully parsed, then set ``parseAll`` to True (equivalent to ending + the grammar with ``StringEnd()``). - Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string, in order to report proper column numbers in parse actions. If the input string contains tabs and - the grammar uses parse actions that use the C{loc} argument to index into the + the grammar uses parse actions that use the ``loc`` argument to index into the string being parsed, you can ensure you have a consistent view of the input string by: - - calling C{parseWithTabs} on your grammar before calling C{parseString} - (see L{I{parseWithTabs}}) - - define your parse action using the full C{(s,loc,toks)} signature, and - reference the input string using the parse action's C{s} argument - - explictly expand the tabs in your input string before calling - C{parseString} - + + - calling ``parseWithTabs`` on your grammar before calling ``parseString`` + (see :class:`parseWithTabs`) + - define your parse action using the full ``(s,loc,toks)`` signature, and + reference the input string using the parse action's ``s`` argument + - explictly expand the tabs in your input string before calling + ``parseString`` + Example:: + Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text """ @@ -1659,22 +1819,23 @@ class ParserElement(object): """ Scan the input string for expression matches. Each match will return the matching tokens, start location, and end location. May be called with optional - C{maxMatches} argument, to clip scanning after 'n' matches are found. If - C{overlap} is specified, then overlapping matches will be reported. + ``maxMatches`` argument, to clip scanning after 'n' matches are found. If + ``overlap`` is specified, then overlapping matches will be reported. Note that the start and end locations are reported relative to the string - being parsed. See L{I{parseString}} for more information on parsing + being parsed. See :class:`parseString` for more information on parsing strings with embedded tabs. Example:: + source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" print(source) for tokens,start,end in Word(alphas).scanString(source): print(' '*start + '^'*(end-start)) print(' '*start + tokens[0]) - + prints:: - + sldjf123lsdjjkf345sldkjf879lkjsfd987 ^^^^^ sldjf @@ -1728,19 +1889,22 @@ class ParserElement(object): def transformString( self, instring ): """ - Extension to C{L{scanString}}, to modify matching text with modified tokens that may - be returned from a parse action. To use C{transformString}, define a grammar and + Extension to :class:`scanString`, to modify matching text with modified tokens that may + be returned from a parse action. To use ``transformString``, define a grammar and attach a parse action to it that modifies the returned token list. - Invoking C{transformString()} on a target string will then scan for matches, + Invoking ``transformString()`` on a target string will then scan for matches, and replace the matched text patterns according to the logic in the parse - action. C{transformString()} returns the resulting transformed string. - + action. ``transformString()`` returns the resulting transformed string. + Example:: + wd = Word(alphas) wd.setParseAction(lambda toks: toks[0].title()) - + print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) - Prints:: + + prints:: + Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. """ out = [] @@ -1771,19 +1935,22 @@ class ParserElement(object): def searchString( self, instring, maxMatches=_MAX_INT ): """ - Another extension to C{L{scanString}}, simplifying the access to the tokens found + Another extension to :class:`scanString`, simplifying the access to the tokens found to match the given parse expression. May be called with optional - C{maxMatches} argument, to clip searching after 'n' matches are found. - + ``maxMatches`` argument, to clip searching after 'n' matches are found. + Example:: + # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters cap_word = Word(alphas.upper(), alphas.lower()) - + print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) # the sum() builtin can be used to merge results into a single ParseResults object print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) + prints:: + [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] """ @@ -1799,14 +1966,17 @@ class ParserElement(object): def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): """ Generator method to split a string using the given expression as a separator. - May be called with optional C{maxsplit} argument, to limit the number of splits; - and the optional C{includeSeparators} argument (default=C{False}), if the separating + May be called with optional ``maxsplit`` argument, to limit the number of splits; + and the optional ``includeSeparators`` argument (default= ``False``), if the separating matching text should be included in the split results. - - Example:: + + Example:: + punc = oneOf(list(".,;:/-!?")) print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) + prints:: + ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] """ splits = 0 @@ -1820,14 +1990,17 @@ class ParserElement(object): def __add__(self, other ): """ - Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement - converts them to L{Literal}s by default. - + Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement + converts them to :class:`Literal`s by default. + Example:: + greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" print (hello, "->", greet.parseString(hello)) - Prints:: + + prints:: + Hello, World! -> ['Hello', ',', 'World', '!'] """ if isinstance( other, basestring ): @@ -1840,7 +2013,7 @@ class ParserElement(object): def __radd__(self, other ): """ - Implementation of + operator when left operand is not a C{L{ParserElement}} + Implementation of + operator when left operand is not a :class:`ParserElement` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -1852,7 +2025,7 @@ class ParserElement(object): def __sub__(self, other): """ - Implementation of - operator, returns C{L{And}} with error stop + Implementation of - operator, returns :class:`And` with error stop """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -1864,7 +2037,7 @@ class ParserElement(object): def __rsub__(self, other ): """ - Implementation of - operator when left operand is not a C{L{ParserElement}} + Implementation of - operator when left operand is not a :class:`ParserElement` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -1876,23 +2049,23 @@ class ParserElement(object): def __mul__(self,other): """ - Implementation of * operator, allows use of C{expr * 3} in place of - C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer - tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples - may also include C{None} as in: - - C{expr*(n,None)} or C{expr*(n,)} is equivalent - to C{expr*n + L{ZeroOrMore}(expr)} - (read as "at least n instances of C{expr}") - - C{expr*(None,n)} is equivalent to C{expr*(0,n)} - (read as "0 to n instances of C{expr}") - - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} + Implementation of * operator, allows use of ``expr * 3`` in place of + ``expr + expr + expr``. Expressions may also me multiplied by a 2-integer + tuple, similar to ``{min,max}`` multipliers in regular expressions. Tuples + may also include ``None`` as in: + - ``expr*(n,None)`` or ``expr*(n,)`` is equivalent + to ``expr*n + ZeroOrMore(expr)`` + (read as "at least n instances of ``expr``") + - ``expr*(None,n)`` is equivalent to ``expr*(0,n)`` + (read as "0 to n instances of ``expr``") + - ``expr*(None,None)`` is equivalent to ``ZeroOrMore(expr)`` + - ``expr*(1,None)`` is equivalent to ``OneOrMore(expr)`` - Note that C{expr*(None,n)} does not raise an exception if + Note that ``expr*(None,n)`` does not raise an exception if more than n exprs exist in the input stream; that is, - C{expr*(None,n)} does not enforce a maximum number of expr + ``expr*(None,n)`` does not enforce a maximum number of expr occurrences. If this behavior is desired, then write - C{expr*(None,n) + ~expr} + ``expr*(None,n) + ~expr`` """ if isinstance(other,int): minElements, optElements = other,0 @@ -1947,7 +2120,7 @@ class ParserElement(object): def __or__(self, other ): """ - Implementation of | operator - returns C{L{MatchFirst}} + Implementation of | operator - returns :class:`MatchFirst` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -1959,7 +2132,7 @@ class ParserElement(object): def __ror__(self, other ): """ - Implementation of | operator when left operand is not a C{L{ParserElement}} + Implementation of | operator when left operand is not a :class:`ParserElement` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -1971,7 +2144,7 @@ class ParserElement(object): def __xor__(self, other ): """ - Implementation of ^ operator - returns C{L{Or}} + Implementation of ^ operator - returns :class:`Or` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -1983,7 +2156,7 @@ class ParserElement(object): def __rxor__(self, other ): """ - Implementation of ^ operator when left operand is not a C{L{ParserElement}} + Implementation of ^ operator when left operand is not a :class:`ParserElement` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -1995,7 +2168,7 @@ class ParserElement(object): def __and__(self, other ): """ - Implementation of & operator - returns C{L{Each}} + Implementation of & operator - returns :class:`Each` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -2007,7 +2180,7 @@ class ParserElement(object): def __rand__(self, other ): """ - Implementation of & operator when left operand is not a C{L{ParserElement}} + Implementation of & operator when left operand is not a :class:`ParserElement` """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) @@ -2019,23 +2192,24 @@ class ParserElement(object): def __invert__( self ): """ - Implementation of ~ operator - returns C{L{NotAny}} + Implementation of ~ operator - returns :class:`NotAny` """ return NotAny( self ) def __call__(self, name=None): """ - Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. - - If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be - passed as C{True}. - - If C{name} is omitted, same as calling C{L{copy}}. + Shortcut for :class:`setResultsName`, with ``listAllMatches=False``. + + If ``name`` is given with a trailing ``'*'`` character, then ``listAllMatches`` will be + passed as ``True``. + + If ``name` is omitted, same as calling :class:`copy`. Example:: + # these are equivalent userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") - userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") """ if name is not None: return self.setResultsName(name) @@ -2044,7 +2218,7 @@ class ParserElement(object): def suppress( self ): """ - Suppresses the output of this C{ParserElement}; useful to keep punctuation from + Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from cluttering up returned output. """ return Suppress( self ) @@ -2052,7 +2226,7 @@ class ParserElement(object): def leaveWhitespace( self ): """ Disables the skipping of whitespace before matching the characters in the - C{ParserElement}'s defined pattern. This is normally only used internally by + :class:`ParserElement`'s defined pattern. This is normally only used internally by the pyparsing module, but may be needed in some whitespace-sensitive grammars. """ self.skipWhitespace = False @@ -2069,9 +2243,9 @@ class ParserElement(object): def parseWithTabs( self ): """ - Overrides default behavior to expand C{}s to spaces before parsing the input string. - Must be called before C{parseString} when the input grammar contains elements that - match C{} characters. + Overrides default behavior to expand ````s to spaces before parsing the input string. + Must be called before ``parseString`` when the input grammar contains elements that + match ```` characters. """ self.keepTabs = True return self @@ -2081,11 +2255,12 @@ class ParserElement(object): Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other ignorable patterns. - + Example:: + patt = OneOrMore(Word(alphas)) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] - + patt.ignore(cStyleComment) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] """ @@ -2112,19 +2287,21 @@ class ParserElement(object): def setDebug( self, flag=True ): """ Enable display of debugging messages while doing pattern matching. - Set C{flag} to True to enable, False to disable. + Set ``flag`` to True to enable, False to disable. Example:: + wd = Word(alphas).setName("alphaword") integer = Word(nums).setName("numword") term = wd | integer - + # turn on debugging for wd wd.setDebug() OneOrMore(term).parseString("abc 123 xyz 890") - + prints:: + Match alphaword at loc 0(1,1) Matched alphaword -> ['abc'] Match alphaword at loc 3(1,4) @@ -2137,12 +2314,12 @@ class ParserElement(object): Exception raised:Expected alphaword (at char 15), (line:1, col:16) The output shown is that produced by the default debug actions - custom debug actions can be - specified using L{setDebugActions}. Prior to attempting - to match the C{wd} expression, the debugging message C{"Match at loc (,)"} - is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} - message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, + specified using :class:`setDebugActions`. Prior to attempting + to match the ``wd`` expression, the debugging message ``"Match at loc (,)"`` + is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"`` + message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression, which makes debugging and exception messages easier to understand - for instance, the default - name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. + name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``. """ if flag: self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) @@ -2212,14 +2389,15 @@ class ParserElement(object): def matches(self, testString, parseAll=True): """ - Method for quick testing of a parser against a test string. Good for simple + Method for quick testing of a parser against a test string. Good for simple inline microtests of sub expressions while building up larger parser. - + Parameters: - testString - to test against this expression for a match - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - + - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests + Example:: + expr = Word(nums) assert expr.matches("100") """ @@ -2228,28 +2406,32 @@ class ParserElement(object): return True except ParseBaseException: return False - - def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): + + def runTests(self, tests, parseAll=True, comment='#', + fullDump=True, printResults=True, failureTests=False, postParse=None): """ Execute the parse expression on a series of test strings, showing each test, the parsed results or where the parse failed. Quick and easy way to run a parse expression against a list of sample strings. - + Parameters: - tests - a list of separate test strings, or a multiline string of test strings - - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - - comment - (default=C{'#'}) - expression for indicating embedded comments in the test + - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests + - comment - (default= ``'#'``) - expression for indicating embedded comments in the test string; pass None to disable comment filtering - - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; + - fullDump - (default= ``True``) - dump results as list followed by results names in nested outline; if False, only dump nested list - - printResults - (default=C{True}) prints test output to stdout - - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing + - printResults - (default= ``True``) prints test output to stdout + - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing + - postParse - (default= ``None``) optional callback for successful parse results; called as + `fn(test_string, parse_results)` and returns a string to be added to the test output Returns: a (success, results) tuple, where success indicates that all tests succeeded - (or failed if C{failureTests} is True), and the results contain a list of lines of each + (or failed if ``failureTests`` is True), and the results contain a list of lines of each test's output - + Example:: + number_expr = pyparsing_common.number.copy() result = number_expr.runTests(''' @@ -2273,7 +2455,9 @@ class ParserElement(object): 3.14.159 ''', failureTests=True) print("Success" if result[0] else "Failed!") + prints:: + # unsigned integer 100 [100] @@ -2291,7 +2475,7 @@ class ParserElement(object): [1e-12] Success - + # stray character 100Z ^ @@ -2313,7 +2497,7 @@ class ParserElement(object): lines, create a test like this:: expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") - + (Note that this is a raw string literal, you must include the leading 'r'.) """ if isinstance(tests, basestring): @@ -2332,10 +2516,18 @@ class ParserElement(object): out = ['\n'.join(comments), t] comments = [] try: - t = t.replace(r'\n','\n') + # convert newline marks to actual newlines, and strip leading BOM if present + t = t.replace(r'\n','\n').lstrip('\ufeff') result = self.parseString(t, parseAll=parseAll) out.append(result.dump(full=fullDump)) success = success and not failureTests + if postParse is not None: + try: + pp_value = postParse(t, result) + if pp_value is not None: + out.append(str(pp_value)) + except Exception as e: + out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e)) except ParseBaseException as pe: fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" if '\n' in t: @@ -2357,21 +2549,20 @@ class ParserElement(object): print('\n'.join(out)) allResults.append((t, result)) - + return success, allResults - + class Token(ParserElement): - """ - Abstract C{ParserElement} subclass, for defining atomic matching patterns. + """Abstract :class:`ParserElement` subclass, for defining atomic + matching patterns. """ def __init__( self ): super(Token,self).__init__( savelist=False ) class Empty(Token): - """ - An empty token, will always match. + """An empty token, will always match. """ def __init__( self ): super(Empty,self).__init__() @@ -2381,8 +2572,7 @@ class Empty(Token): class NoMatch(Token): - """ - A token that will never match. + """A token that will never match. """ def __init__( self ): super(NoMatch,self).__init__() @@ -2396,18 +2586,18 @@ class NoMatch(Token): class Literal(Token): - """ - Token to exactly match a specified string. - + """Token to exactly match a specified string. + Example:: + Literal('blah').parseString('blah') # -> ['blah'] Literal('blah').parseString('blahfooblah') # -> ['blah'] Literal('blah').parseString('bla') # -> Exception: Expected "blah" - - For case-insensitive matching, use L{CaselessLiteral}. - + + For case-insensitive matching, use :class:`CaselessLiteral`. + For keyword matching (force word break before and after the matched string), - use L{Keyword} or L{CaselessKeyword}. + use :class:`Keyword` or :class:`CaselessKeyword`. """ def __init__( self, matchString ): super(Literal,self).__init__() @@ -2437,21 +2627,29 @@ _L = Literal ParserElement._literalStringClass = Literal class Keyword(Token): - """ - Token to exactly match a specified string as a keyword, that is, it must be - immediately followed by a non-keyword character. Compare with C{L{Literal}}: - - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} - Accepts two optional constructor arguments in addition to the keyword string: - - C{identChars} is a string of characters that would be valid identifier characters, - defaulting to all alphanumerics + "_" and "$" - - C{caseless} allows case-insensitive matching, default is C{False}. - + """Token to exactly match a specified string as a keyword, that is, + it must be immediately followed by a non-keyword character. Compare + with :class:`Literal`: + + - ``Literal("if")`` will match the leading ``'if'`` in + ``'ifAndOnlyIf'``. + - ``Keyword("if")`` will not; it will only match the leading + ``'if'`` in ``'if x=1'``, or ``'if(y==2)'`` + + Accepts two optional constructor arguments in addition to the + keyword string: + + - ``identChars`` is a string of characters that would be valid + identifier characters, defaulting to all alphanumerics + "_" and + "$" + - ``caseless`` allows case-insensitive matching, default is ``False``. + Example:: + Keyword("start").parseString("start") # -> ['start'] Keyword("start").parseString("starting") # -> Exception - For case-insensitive matching, use L{CaselessKeyword}. + For case-insensitive matching, use :class:`CaselessKeyword`. """ DEFAULT_KEYWORD_CHARS = alphanums+"_$" @@ -2502,15 +2700,15 @@ class Keyword(Token): Keyword.DEFAULT_KEYWORD_CHARS = chars class CaselessLiteral(Literal): - """ - Token to match a specified string, ignoring case of letters. + """Token to match a specified string, ignoring case of letters. Note: the matched results will always be in the case of the given match string, NOT the case of the input text. Example:: + OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] - - (Contrast with example for L{CaselessKeyword}.) + + (Contrast with example for :class:`CaselessKeyword`.) """ def __init__( self, matchString ): super(CaselessLiteral,self).__init__( matchString.upper() ) @@ -2526,36 +2724,39 @@ class CaselessLiteral(Literal): class CaselessKeyword(Keyword): """ - Caseless version of L{Keyword}. + Caseless version of :class:`Keyword`. Example:: + OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] - - (Contrast with example for L{CaselessLiteral}.) + + (Contrast with example for :class:`CaselessLiteral`.) """ def __init__( self, matchString, identChars=None ): super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) - def parseImpl( self, instring, loc, doActions=True ): - if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and - (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): - return loc+self.matchLen, self.match - raise ParseException(instring, loc, self.errmsg, self) - class CloseMatch(Token): - """ - A variation on L{Literal} which matches "close" matches, that is, - strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - - C{match_string} - string to be matched - - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match - - The results from a successful parse will contain the matched text from the input string and the following named results: - - C{mismatches} - a list of the positions within the match_string where mismatches were found - - C{original} - the original match_string used to compare against the input string - - If C{mismatches} is an empty list, then the match was an exact match. - + """A variation on :class:`Literal` which matches "close" matches, + that is, strings with at most 'n' mismatching characters. + :class:`CloseMatch` takes parameters: + + - ``match_string`` - string to be matched + - ``maxMismatches`` - (``default=1``) maximum number of + mismatches allowed to count as a match + + The results from a successful parse will contain the matched text + from the input string and the following named results: + + - ``mismatches`` - a list of the positions within the + match_string where mismatches were found + - ``original`` - the original match_string used to compare + against the input string + + If ``mismatches`` is an empty list, then the match was an exact + match. + Example:: + patt = CloseMatch("ATCATCGAATGGA") patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) @@ -2604,49 +2805,55 @@ class CloseMatch(Token): class Word(Token): - """ - Token for matching words composed of allowed character sets. - Defined with string containing all allowed initial characters, - an optional string containing allowed body characters (if omitted, + """Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, an + optional string containing allowed body characters (if omitted, defaults to the initial character set), and an optional minimum, - maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. An optional - C{excludeChars} parameter can list characters that might be found in - the input C{bodyChars} string; useful to define a word of all printables - except for one or two characters, for instance. - - L{srange} is useful for defining custom character set strings for defining - C{Word} expressions, using range notation from regular expression character sets. - - A common mistake is to use C{Word} to match a specific literal string, as in - C{Word("Address")}. Remember that C{Word} uses the string argument to define - I{sets} of matchable characters. This expression would match "Add", "AAA", - "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. - To match an exact literal string, use L{Literal} or L{Keyword}. + maximum, and/or exact length. The default value for ``min`` is + 1 (a minimum value < 1 is not valid); the default values for + ``max`` and ``exact`` are 0, meaning no maximum or exact + length restriction. An optional ``excludeChars`` parameter can + list characters that might be found in the input ``bodyChars`` + string; useful to define a word of all printables except for one or + two characters, for instance. + + :class:`srange` is useful for defining custom character set strings + for defining ``Word`` expressions, using range notation from + regular expression character sets. + + A common mistake is to use :class:`Word` to match a specific literal + string, as in ``Word("Address")``. Remember that :class:`Word` + uses the string argument to define *sets* of matchable characters. + This expression would match "Add", "AAA", "dAred", or any other word + made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an + exact literal string, use :class:`Literal` or :class:`Keyword`. pyparsing includes helper strings for building Words: - - L{alphas} - - L{nums} - - L{alphanums} - - L{hexnums} - - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - - L{printables} (any non-whitespace character) + + - :class:`alphas` + - :class:`nums` + - :class:`alphanums` + - :class:`hexnums` + - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255 + - accented, tilded, umlauted, etc.) + - :class:`punc8bit` (non-alphabetic characters in ASCII range + 128-255 - currency, symbols, superscripts, diacriticals, etc.) + - :class:`printables` (any non-whitespace character) Example:: + # a word composed of digits integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) - + # a word with a leading capital, and zero or more lowercase capital_word = Word(alphas.upper(), alphas.lower()) # hostnames are alphanumeric, with leading alpha, and '-' hostname = Word(alphas, alphanums+'-') - + # roman numeral (not a strict parser, accepts invalid mix of characters) roman = Word("IVXLCDM") - + # any string of non-whitespace characters, except for ',' csv_value = Word(printables, excludeChars=",") """ @@ -2762,22 +2969,38 @@ class Word(Token): return self.strRepr +class Char(Word): + """A short-cut class for defining ``Word(characters, exact=1)``, + when defining a match of any single character in a string of + characters. + """ + def __init__(self, charset): + super(Char, self).__init__(charset, exact=1) + self.reString = "[%s]" % _escapeRegexRangeChars(self.initCharsOrig) + self.re = re.compile( self.reString ) + + class Regex(Token): - r""" - Token for matching strings that match a given regular expression. - Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. - If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as - named parse results. + r"""Token for matching strings that match a given regular + expression. Defined with string specifying the regular expression in + a form recognized by the stdlib Python `re module `_. + If the given regex contains named groups (defined using ``(?P...)``), + these will be preserved as named parse results. Example:: + realnum = Regex(r"[+-]?\d+\.\d*") date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') - # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression - roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression + roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") """ compiledREtype = type(re.compile("[A-Z]")) - def __init__( self, pattern, flags=0): - """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" + def __init__( self, pattern, flags=0, asGroupList=False, asMatch=False): + """The parameters ``pattern`` and ``flags`` are passed + to the ``re.compile()`` function as-is. See the Python + `re module `_ module for an + explanation of the acceptable patterns and flags. + """ super(Regex,self).__init__() if isinstance(pattern, basestring): @@ -2801,7 +3024,7 @@ class Regex(Token): self.pattern = \ self.reString = str(pattern) self.flags = flags - + else: raise ValueError("Regex may only be constructed with a string or a compiled RE object") @@ -2809,6 +3032,8 @@ class Regex(Token): self.errmsg = "Expected " + self.name self.mayIndexError = False self.mayReturnEmpty = True + self.asGroupList = asGroupList + self.asMatch = asMatch def parseImpl( self, instring, loc, doActions=True ): result = self.re.match(instring,loc) @@ -2816,11 +3041,16 @@ class Regex(Token): raise ParseException(instring, loc, self.errmsg, self) loc = result.end() - d = result.groupdict() - ret = ParseResults(result.group()) - if d: - for k in d: - ret[k] = d[k] + if self.asMatch: + ret = result + elif self.asGroupList: + ret = result.groups() + else: + ret = ParseResults(result.group()) + d = result.groupdict() + if d: + for k, v in d.items(): + ret[k] = v return loc,ret def __str__( self ): @@ -2834,28 +3064,70 @@ class Regex(Token): return self.strRepr + def sub(self, repl): + """ + Return Regex with an attached parse action to transform the parsed + result as if called using `re.sub(expr, repl, string) `_. + + Example:: + + make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2") + print(make_html.transformString("h1:main title:")) + # prints "

main title

" + """ + if self.asGroupList: + warnings.warn("cannot use sub() with Regex(asGroupList=True)", + SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if self.asMatch and callable(repl): + warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)", + SyntaxWarning, stacklevel=2) + raise SyntaxError() + + if self.asMatch: + def pa(tokens): + return tokens[0].expand(repl) + else: + def pa(tokens): + return self.re.sub(repl, tokens[0]) + return self.addParseAction(pa) class QuotedString(Token): r""" Token for matching strings that are delimited by quoting characters. - + Defined with the following parameters: - - quoteChar - string of one or more characters defining the quote delimiting string - - escChar - character to escape quotes, typically backslash (default=C{None}) - - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) + + - quoteChar - string of one or more characters defining the + quote delimiting string + - escChar - character to escape quotes, typically backslash + (default= ``None`` ) + - escQuote - special quote sequence to escape an embedded quote + string (such as SQL's ``""`` to escape an embedded ``"``) + (default= ``None`` ) + - multiline - boolean indicating whether quotes can span + multiple lines (default= ``False`` ) + - unquoteResults - boolean indicating whether the matched text + should be unquoted (default= ``True`` ) + - endQuoteChar - string of one or more characters defining the + end of the quote delimited string (default= ``None`` => same as + quoteChar) + - convertWhitespaceEscapes - convert escaped whitespace + (``'\t'``, ``'\n'``, etc.) to actual whitespace + (default= ``True`` ) Example:: + qs = QuotedString('"') print(qs.searchString('lsjdf "This is the quote" sldjf')) complex_qs = QuotedString('{{', endQuoteChar='}}') print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) sql_qs = QuotedString('"', escQuote='""') print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) + prints:: + [['This is the quote']] [['This is the "quote"']] [['This is the quote with "embedded" quotes']] @@ -2973,19 +3245,23 @@ class QuotedString(Token): class CharsNotIn(Token): - """ - Token for matching words composed of characters I{not} in a given set (will - include whitespace in matched characters if not listed in the provided exclusion set - see example). - Defined with string containing all disallowed characters, and an optional - minimum, maximum, and/or exact length. The default value for C{min} is 1 (a - minimum value < 1 is not valid); the default values for C{max} and C{exact} - are 0, meaning no maximum or exact length restriction. + """Token for matching words composed of characters *not* in a given + set (will include whitespace in matched characters if not listed in + the provided exclusion set - see example). Defined with string + containing all disallowed characters, and an optional minimum, + maximum, and/or exact length. The default value for ``min`` is + 1 (a minimum value < 1 is not valid); the default values for + ``max`` and ``exact`` are 0, meaning no maximum or exact + length restriction. Example:: + # define a comma-separated-value as anything that is not a ',' csv_value = CharsNotIn(',') print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) + prints:: + ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] """ def __init__( self, notChars, min=1, max=0, exact=0 ): @@ -2994,7 +3270,9 @@ class CharsNotIn(Token): self.notChars = notChars if min < 1: - raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + raise ValueError( + "cannot specify a minimum length < 1; use " + + "Optional(CharsNotIn()) if zero-length char group is permitted") self.minLen = min @@ -3044,19 +3322,38 @@ class CharsNotIn(Token): return self.strRepr class White(Token): - """ - Special matching class for matching whitespace. Normally, whitespace is ignored - by pyparsing grammars. This class is included when some whitespace structures - are significant. Define with a string containing the whitespace characters to be - matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, - as defined for the C{L{Word}} class. + """Special matching class for matching whitespace. Normally, + whitespace is ignored by pyparsing grammars. This class is included + when some whitespace structures are significant. Define with + a string containing the whitespace characters to be matched; default + is ``" \\t\\r\\n"``. Also takes optional ``min``, + ``max``, and ``exact`` arguments, as defined for the + :class:`Word` class. """ whiteStrs = { - " " : "", - "\t": "", - "\n": "", - "\r": "", - "\f": "", + ' ' : '', + '\t': '', + '\n': '', + '\r': '', + '\f': '', + 'u\00A0': '', + 'u\1680': '', + 'u\180E': '', + 'u\2000': '', + 'u\2001': '', + 'u\2002': '', + 'u\2003': '', + 'u\2004': '', + 'u\2005': '', + 'u\2006': '', + 'u\2007': '', + 'u\2008': '', + 'u\2009': '', + 'u\200A': '', + 'u\200B': '', + 'u\202F': '', + 'u\205F': '', + 'u\3000': '', } def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): super(White,self).__init__() @@ -3102,8 +3399,8 @@ class _PositionToken(Token): self.mayIndexError = False class GoToColumn(_PositionToken): - """ - Token to advance to a specific column of input text; useful for tabular report scraping. + """Token to advance to a specific column of input text; useful for + tabular report scraping. """ def __init__( self, colno ): super(GoToColumn,self).__init__() @@ -3128,11 +3425,11 @@ class GoToColumn(_PositionToken): class LineStart(_PositionToken): - """ - Matches if current position is at the beginning of a line within the parse string - + """Matches if current position is at the beginning of a line within + the parse string + Example:: - + test = '''\ AAA this line AAA and this line @@ -3142,10 +3439,11 @@ class LineStart(_PositionToken): for t in (LineStart() + 'AAA' + restOfLine).searchString(test): print(t) - - Prints:: + + prints:: + ['AAA', ' this line'] - ['AAA', ' and this line'] + ['AAA', ' and this line'] """ def __init__( self ): @@ -3158,8 +3456,8 @@ class LineStart(_PositionToken): raise ParseException(instring, loc, self.errmsg, self) class LineEnd(_PositionToken): - """ - Matches if current position is at the end of a line within the parse string + """Matches if current position is at the end of a line within the + parse string """ def __init__( self ): super(LineEnd,self).__init__() @@ -3178,8 +3476,8 @@ class LineEnd(_PositionToken): raise ParseException(instring, loc, self.errmsg, self) class StringStart(_PositionToken): - """ - Matches if current position is at the beginning of the parse string + """Matches if current position is at the beginning of the parse + string """ def __init__( self ): super(StringStart,self).__init__() @@ -3193,8 +3491,7 @@ class StringStart(_PositionToken): return loc, [] class StringEnd(_PositionToken): - """ - Matches if current position is at the end of the parse string + """Matches if current position is at the end of the parse string """ def __init__( self ): super(StringEnd,self).__init__() @@ -3211,12 +3508,13 @@ class StringEnd(_PositionToken): raise ParseException(instring, loc, self.errmsg, self) class WordStart(_PositionToken): - """ - Matches if the current position is at the beginning of a Word, and - is not preceded by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of - the string being parsed, or at the beginning of a line. + """Matches if the current position is at the beginning of a Word, + and is not preceded by any character in a given set of + ``wordChars`` (default= ``printables``). To emulate the + ``\b`` behavior of regular expressions, use + ``WordStart(alphanums)``. ``WordStart`` will also match at + the beginning of the string being parsed, or at the beginning of + a line. """ def __init__(self, wordChars = printables): super(WordStart,self).__init__() @@ -3231,12 +3529,12 @@ class WordStart(_PositionToken): return loc, [] class WordEnd(_PositionToken): - """ - Matches if the current position is at the end of a Word, and - is not followed by any character in a given set of C{wordChars} - (default=C{printables}). To emulate the C{\b} behavior of regular expressions, - use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of - the string being parsed, or at the end of a line. + """Matches if the current position is at the end of a Word, and is + not followed by any character in a given set of ``wordChars`` + (default= ``printables``). To emulate the ``\b`` behavior of + regular expressions, use ``WordEnd(alphanums)``. ``WordEnd`` + will also match at the end of the string being parsed, or at the end + of a line. """ def __init__(self, wordChars = printables): super(WordEnd,self).__init__() @@ -3254,8 +3552,8 @@ class WordEnd(_PositionToken): class ParseExpression(ParserElement): - """ - Abstract subclass of ParserElement, for combining and post-processing parsed tokens. + """Abstract subclass of ParserElement, for combining and + post-processing parsed tokens. """ def __init__( self, exprs, savelist = False ): super(ParseExpression,self).__init__(savelist) @@ -3286,7 +3584,7 @@ class ParseExpression(ParserElement): return self def leaveWhitespace( self ): - """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on + """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on all contained expressions.""" self.skipWhitespace = False self.exprs = [ e.copy() for e in self.exprs ] @@ -3347,7 +3645,7 @@ class ParseExpression(ParserElement): self.mayIndexError |= other.mayIndexError self.errmsg = "Expected " + _ustr(self) - + return self def setResultsName( self, name, listAllMatches=False ): @@ -3359,7 +3657,7 @@ class ParseExpression(ParserElement): for e in self.exprs: e.validate(tmp) self.checkRecursion( [] ) - + def copy(self): ret = super(ParseExpression,self).copy() ret.exprs = [e.copy() for e in self.exprs] @@ -3367,12 +3665,14 @@ class ParseExpression(ParserElement): class And(ParseExpression): """ - Requires all given C{ParseExpression}s to be found in the given order. + Requires all given :class:`ParseExpression` s to be found in the given order. Expressions may be separated by whitespace. - May be constructed using the C{'+'} operator. - May also be constructed using the C{'-'} operator, which will suppress backtracking. + May be constructed using the ``'+'`` operator. + May also be constructed using the ``'-'`` operator, which will + suppress backtracking. Example:: + integer = Word(nums) name_expr = OneOrMore(Word(alphas)) @@ -3394,6 +3694,11 @@ class And(ParseExpression): self.skipWhitespace = self.exprs[0].skipWhitespace self.callPreparse = True + def streamline(self): + super(And, self).streamline() + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + return self + def parseImpl( self, instring, loc, doActions=True ): # pass False as last arg to _parse for first element, since we already # pre-parsed the string as part of our And pre-parsing @@ -3442,17 +3747,20 @@ class And(ParseExpression): class Or(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the expression that matches the longest string will be used. - May be constructed using the C{'^'} operator. + """Requires that at least one :class:`ParseExpression` is found. If + two expressions match, the expression that matches the longest + string will be used. May be constructed using the ``'^'`` + operator. Example:: + # construct Or using '^' operator - + number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) print(number.searchString("123 3.1416 789")) + prints:: + [['123'], ['3.1416'], ['789']] """ def __init__( self, exprs, savelist = False ): @@ -3462,6 +3770,11 @@ class Or(ParseExpression): else: self.mayReturnEmpty = True + def streamline(self): + super(Or, self).streamline() + self.saveAsList = any(e.saveAsList for e in self.exprs) + return self + def parseImpl( self, instring, loc, doActions=True ): maxExcLoc = -1 maxException = None @@ -3521,14 +3834,14 @@ class Or(ParseExpression): class MatchFirst(ParseExpression): - """ - Requires that at least one C{ParseExpression} is found. - If two expressions match, the first one listed is the one that will match. - May be constructed using the C{'|'} operator. + """Requires that at least one :class:`ParseExpression` is found. If + two expressions match, the first one listed is the one that will + match. May be constructed using the ``'|'`` operator. Example:: + # construct MatchFirst using '|' operator - + # watch the order of expressions to match number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] @@ -3541,9 +3854,15 @@ class MatchFirst(ParseExpression): super(MatchFirst,self).__init__(exprs, savelist) if self.exprs: self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + # self.saveAsList = any(e.saveAsList for e in self.exprs) else: self.mayReturnEmpty = True + def streamline(self): + super(MatchFirst, self).streamline() + self.saveAsList = any(e.saveAsList for e in self.exprs) + return self + def parseImpl( self, instring, loc, doActions=True ): maxExcLoc = -1 maxException = None @@ -3589,12 +3908,13 @@ class MatchFirst(ParseExpression): class Each(ParseExpression): - """ - Requires all given C{ParseExpression}s to be found, but in any order. - Expressions may be separated by whitespace. - May be constructed using the C{'&'} operator. + """Requires all given :class:`ParseExpression` s to be found, but in + any order. Expressions may be separated by whitespace. + + May be constructed using the ``'&'`` operator. Example:: + color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") integer = Word(nums) @@ -3603,7 +3923,7 @@ class Each(ParseExpression): color_attr = "color:" + color("color") size_attr = "size:" + integer("size") - # use Each (using operator '&') to accept attributes in any order + # use Each (using operator '&') to accept attributes in any order # (shape and posn are required, color and size are optional) shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) @@ -3613,7 +3933,9 @@ class Each(ParseExpression): color:GREEN size:20 shape:TRIANGLE posn:20,40 ''' ) + prints:: + shape: SQUARE color: BLACK posn: 100, 120 ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - color: BLACK @@ -3647,6 +3969,12 @@ class Each(ParseExpression): self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) self.skipWhitespace = True self.initExprGroups = True + self.saveAsList = True + + def streamline(self): + super(Each, self).streamline() + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + return self def parseImpl( self, instring, loc, doActions=True ): if self.initExprGroups: @@ -3713,8 +4041,8 @@ class Each(ParseExpression): class ParseElementEnhance(ParserElement): - """ - Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. + """Abstract subclass of :class:`ParserElement`, for combining and + post-processing parsed tokens. """ def __init__( self, expr, savelist=False ): super(ParseElementEnhance,self).__init__(savelist) @@ -3790,20 +4118,25 @@ class ParseElementEnhance(ParserElement): class FollowedBy(ParseElementEnhance): - """ - Lookahead matching of the given parse expression. C{FollowedBy} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression matches at the current - position. C{FollowedBy} always returns a null token list. + """Lookahead matching of the given parse expression. + ``FollowedBy`` does *not* advance the parsing position within + the input string, it only verifies that the specified parse + expression matches at the current position. ``FollowedBy`` + always returns a null token list. If any results names are defined + in the lookahead expression, those *will* be returned for access by + name. Example:: + # use FollowedBy to match a label only if it is followed by a ':' data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - + OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() + prints:: + [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] """ def __init__( self, expr ): @@ -3811,20 +4144,108 @@ class FollowedBy(ParseElementEnhance): self.mayReturnEmpty = True def parseImpl( self, instring, loc, doActions=True ): - self.expr.tryParse( instring, loc ) - return loc, [] + _, ret = self.expr._parse(instring, loc, doActions=doActions) + del ret[:] + return loc, ret + + +class PrecededBy(ParseElementEnhance): + """Lookbehind matching of the given parse expression. + ``PrecededBy`` does not advance the parsing position within the + input string, it only verifies that the specified parse expression + matches prior to the current position. ``PrecededBy`` always + returns a null token list, but if a results name is defined on the + given expression, it is returned. + + Parameters: + + - expr - expression that must match prior to the current parse + location + - retreat - (default= ``None``) - (int) maximum number of characters + to lookbehind prior to the current parse location + + If the lookbehind expression is a string, Literal, Keyword, or + a Word or CharsNotIn with a specified exact or maximum length, then + the retreat parameter is not required. Otherwise, retreat must be + specified to give a maximum number of characters to look back from + the current parse position for a lookbehind match. + + Example:: + + # VB-style variable names with type prefixes + int_var = PrecededBy("#") + pyparsing_common.identifier + str_var = PrecededBy("$") + pyparsing_common.identifier + + """ + def __init__(self, expr, retreat=None): + super(PrecededBy, self).__init__(expr) + self.expr = self.expr().leaveWhitespace() + self.mayReturnEmpty = True + self.mayIndexError = False + self.exact = False + if isinstance(expr, str): + retreat = len(expr) + self.exact = True + elif isinstance(expr, (Literal, Keyword)): + retreat = expr.matchLen + self.exact = True + elif isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT: + retreat = expr.maxLen + self.exact = True + elif isinstance(expr, _PositionToken): + retreat = 0 + self.exact = True + self.retreat = retreat + self.errmsg = "not preceded by " + str(expr) + self.skipWhitespace = False + + def parseImpl(self, instring, loc=0, doActions=True): + if self.exact: + if loc < self.retreat: + raise ParseException(instring, loc, self.errmsg) + start = loc - self.retreat + _, ret = self.expr._parse(instring, start) + else: + # retreat specified a maximum lookbehind window, iterate + test_expr = self.expr + StringEnd() + instring_slice = instring[:loc] + last_expr = ParseException(instring, loc, self.errmsg) + for offset in range(1, min(loc, self.retreat+1)): + try: + _, ret = test_expr._parse(instring_slice, loc-offset) + except ParseBaseException as pbe: + last_expr = pbe + else: + break + else: + raise last_expr + # return empty list of tokens, but preserve any defined results names + del ret[:] + return loc, ret class NotAny(ParseElementEnhance): - """ - Lookahead to disallow matching with the given parse expression. C{NotAny} - does I{not} advance the parsing position within the input string, it only - verifies that the specified parse expression does I{not} match at the current - position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} - always returns a null token list. May be constructed using the '~' operator. + """Lookahead to disallow matching with the given parse expression. + ``NotAny`` does *not* advance the parsing position within the + input string, it only verifies that the specified parse expression + does *not* match at the current position. Also, ``NotAny`` does + *not* skip over leading whitespace. ``NotAny`` always returns + a null token list. May be constructed using the '~' operator. Example:: - + + AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split()) + + # take care not to mistake keywords for identifiers + ident = ~(AND | OR | NOT) + Word(alphas) + boolean_term = Optional(NOT) + ident + + # very crude boolean expression - to support parenthesis groups and + # operation hierarchy, use infixNotation + boolean_expr = boolean_term + ZeroOrMore((AND | OR) + boolean_term) + + # integers that are followed by "." are actually floats + integer = Word(nums) + ~Char(".") """ def __init__( self, expr ): super(NotAny,self).__init__(expr) @@ -3862,7 +4283,7 @@ class _MultipleMatch(ParseElementEnhance): check_ender = self.not_ender is not None if check_ender: try_not_ender = self.not_ender.tryParse - + # must be at least one (but first see if we are the stopOn sentinel; # if so, fail) if check_ender: @@ -3884,18 +4305,18 @@ class _MultipleMatch(ParseElementEnhance): pass return loc, tokens - + class OneOrMore(_MultipleMatch): - """ - Repetition of one or more of the given expression. - + """Repetition of one or more of the given expression. + Parameters: - expr - expression that must match one or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) + - stopOn - (default= ``None``) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) Example:: + data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) @@ -3906,7 +4327,7 @@ class OneOrMore(_MultipleMatch): # use stopOn attribute for OneOrMore to avoid reading label string as part of the data attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] - + # could also be written as (attr_expr * (1,)).parseString(text).pprint() """ @@ -3921,21 +4342,20 @@ class OneOrMore(_MultipleMatch): return self.strRepr class ZeroOrMore(_MultipleMatch): - """ - Optional repetition of zero or more of the given expression. - + """Optional repetition of zero or more of the given expression. + Parameters: - expr - expression that must match zero or more times - - stopOn - (default=C{None}) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) + - stopOn - (default= ``None``) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) - Example: similar to L{OneOrMore} + Example: similar to :class:`OneOrMore` """ def __init__( self, expr, stopOn=None): super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) self.mayReturnEmpty = True - + def parseImpl( self, instring, loc, doActions=True ): try: return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) @@ -3960,27 +4380,29 @@ class _NullToken(object): _optionalNotMatched = _NullToken() class Optional(ParseElementEnhance): - """ - Optional matching of the given expression. + """Optional matching of the given expression. Parameters: - expr - expression that must match zero or more times - default (optional) - value to be returned if the optional expression is not found. Example:: + # US postal code can be a 5-digit zip, plus optional 4-digit qualifier zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) zip.runTests(''' # traditional ZIP code 12345 - + # ZIP+4 form 12101-0001 - + # invalid ZIP 98765- ''') + prints:: + # traditional ZIP code 12345 ['12345'] @@ -4024,20 +4446,21 @@ class Optional(ParseElementEnhance): return self.strRepr class SkipTo(ParseElementEnhance): - """ - Token for skipping over all undefined text until the matched expression is found. + """Token for skipping over all undefined text until the matched + expression is found. Parameters: - expr - target expression marking the end of the data to be skipped - - include - (default=C{False}) if True, the target expression is also parsed + - include - (default= ``False``) if True, the target expression is also parsed (the skipped text and target expression are returned as a 2-element list). - - ignore - (default=C{None}) used to define grammars (typically quoted strings and + - ignore - (default= ``None``) used to define grammars (typically quoted strings and comments) that might contain false matches to the target expression - - failOn - (default=C{None}) define expressions that are not allowed to be - included in the skipped test; if found before the target expression is found, + - failOn - (default= ``None``) define expressions that are not allowed to be + included in the skipped test; if found before the target expression is found, the SkipTo is not a match Example:: + report = ''' Outstanding Issues Report - 1 Jan 2000 @@ -4054,14 +4477,16 @@ class SkipTo(ParseElementEnhance): # - parse action will call token.strip() for each matched token, i.e., the description body string_data = SkipTo(SEP, ignore=quotedString) string_data.setParseAction(tokenMap(str.strip)) - ticket_expr = (integer("issue_num") + SEP - + string_data("sev") + SEP - + string_data("desc") + SEP + ticket_expr = (integer("issue_num") + SEP + + string_data("sev") + SEP + + string_data("desc") + SEP + integer("days_open")) - + for tkt in ticket_expr.searchString(report): print tkt.dump() + prints:: + ['101', 'Critical', 'Intermittent system crash', '6'] - days_open: 6 - desc: Intermittent system crash @@ -4084,7 +4509,7 @@ class SkipTo(ParseElementEnhance): self.mayReturnEmpty = True self.mayIndexError = False self.includeMatch = include - self.asList = False + self.saveAsList = False if isinstance(failOn, basestring): self.failOn = ParserElement._literalStringClass(failOn) else: @@ -4098,14 +4523,14 @@ class SkipTo(ParseElementEnhance): expr_parse = self.expr._parse self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - + tmploc = loc while tmploc <= instrlen: if self_failOn_canParseNext is not None: # break if failOn expression matches if self_failOn_canParseNext(instring, tmploc): break - + if self_ignoreExpr_tryParse is not None: # advance past ignore expressions while 1: @@ -4113,7 +4538,7 @@ class SkipTo(ParseElementEnhance): tmploc = self_ignoreExpr_tryParse(instring, tmploc) except ParseBaseException: break - + try: expr_parse(instring, tmploc, doActions=False, callPreParse=False) except (ParseException, IndexError): @@ -4131,7 +4556,7 @@ class SkipTo(ParseElementEnhance): loc = tmploc skiptext = instring[startloc:loc] skipresult = ParseResults(skiptext) - + if self.includeMatch: loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) skipresult += mat @@ -4139,23 +4564,31 @@ class SkipTo(ParseElementEnhance): return loc, skipresult class Forward(ParseElementEnhance): - """ - Forward declaration of an expression to be defined later - + """Forward declaration of an expression to be defined later - used for recursive grammars, such as algebraic infix notation. - When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + When the expression is known, it is assigned to the ``Forward`` + variable using the '<<' operator. + + Note: take care when assigning to ``Forward`` not to overlook + precedence of operators. - Note: take care when assigning to C{Forward} not to overlook precedence of operators. Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you - explicitly group the values inserted into the C{Forward}:: + explicitly group the values inserted into the ``Forward``:: + fwdExpr << (a | b | c) + Converting to use the '<<=' operator instead will avoid this problem. - See L{ParseResults.pprint} for an example of a recursive parser created using - C{Forward}. + See :class:`ParseResults.pprint` for an example of a recursive + parser created using ``Forward``. """ def __init__( self, other=None ): super(Forward,self).__init__( other, savelist=False ) @@ -4172,10 +4605,10 @@ class Forward(ParseElementEnhance): self.saveAsList = self.expr.saveAsList self.ignoreExprs.extend(self.expr.ignoreExprs) return self - + def __ilshift__(self, other): return self << other - + def leaveWhitespace( self ): self.skipWhitespace = False return self @@ -4225,19 +4658,20 @@ class _ForwardNoRecurse(Forward): class TokenConverter(ParseElementEnhance): """ - Abstract subclass of C{ParseExpression}, for converting parsed results. + Abstract subclass of :class:`ParseExpression`, for converting parsed results. """ def __init__( self, expr, savelist=False ): super(TokenConverter,self).__init__( expr )#, savelist ) self.saveAsList = False class Combine(TokenConverter): - """ - Converter to concatenate all matching tokens to a single string. - By default, the matching patterns must also be contiguous in the input string; - this can be disabled by specifying C{'adjacent=False'} in the constructor. + """Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the + input string; this can be disabled by specifying + ``'adjacent=False'`` in the constructor. Example:: + real = Word(nums) + '.' + Word(nums) print(real.parseString('3.1416')) # -> ['3', '.', '1416'] # will also erroneously match the following @@ -4276,10 +4710,11 @@ class Combine(TokenConverter): return retToks class Group(TokenConverter): - """ - Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. + """Converter to return the matched tokens as a list - useful for + returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions. Example:: + ident = Word(alphas) num = Word(nums) term = ident | num @@ -4291,38 +4726,40 @@ class Group(TokenConverter): """ def __init__( self, expr ): super(Group,self).__init__( expr ) - self.saveAsList = True + self.saveAsList = expr.saveAsList def postParse( self, instring, loc, tokenlist ): return [ tokenlist ] class Dict(TokenConverter): - """ - Converter to return a repetitive expression as a list, but also as a dictionary. - Each element can also be referenced using the first token in the expression as its key. - Useful for tabular report scraping when the first column can be used as a item key. + """Converter to return a repetitive expression as a list, but also + as a dictionary. Each element can also be referenced using the first + token in the expression as its key. Useful for tabular report + scraping when the first column can be used as a item key. Example:: + data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) text = "shape: SQUARE posn: upper left color: light blue texture: burlap" attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) - + # print attributes as plain groups print(OneOrMore(attr_expr).parseString(text).dump()) - + # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names result = Dict(OneOrMore(Group(attr_expr))).parseString(text) print(result.dump()) - - # access named fields as dict entries, or output as dict - print(result['shape']) - print(result.asDict()) - prints:: - ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] + # access named fields as dict entries, or output as dict + print(result['shape']) + print(result.asDict()) + + prints:: + + ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - color: light blue - posn: upper left @@ -4330,7 +4767,8 @@ class Dict(TokenConverter): - texture: burlap SQUARE {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} - See more examples at L{ParseResults} of accessing fields by results name. + + See more examples at :class:`ParseResults` of accessing fields by results name. """ def __init__( self, expr ): super(Dict,self).__init__( expr ) @@ -4362,10 +4800,10 @@ class Dict(TokenConverter): class Suppress(TokenConverter): - """ - Converter for ignoring the results of a parsed expression. + """Converter for ignoring the results of a parsed expression. Example:: + source = "a, b, c,d" wd = Word(alphas) wd_list1 = wd + ZeroOrMore(',' + wd) @@ -4375,10 +4813,13 @@ class Suppress(TokenConverter): # way afterward - use Suppress to keep them out of the parsed output wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) print(wd_list2.parseString(source)) + prints:: + ['a', ',', 'b', ',', 'c', ',', 'd'] ['a', 'b', 'c', 'd'] - (See also L{delimitedList}.) + + (See also :class:`delimitedList`.) """ def postParse( self, instring, loc, tokenlist ): return [] @@ -4388,8 +4829,7 @@ class Suppress(TokenConverter): class OnlyOnce(object): - """ - Wrapper for parse actions, to ensure they are only called once. + """Wrapper for parse actions, to ensure they are only called once. """ def __init__(self, methodCall): self.callable = _trim_arity(methodCall) @@ -4404,13 +4844,15 @@ class OnlyOnce(object): self.called = False def traceParseAction(f): - """ - Decorator for debugging parse actions. - - When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} - When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. + """Decorator for debugging parse actions. + + When the parse action is called, this decorator will print + ``">> entering method-name(line:, , )"``. + When the parse action completes, the decorator will print + ``"<<"`` followed by the returned value, or any exception that the parse action raised. Example:: + wd = Word(alphas) @traceParseAction @@ -4419,7 +4861,9 @@ def traceParseAction(f): wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + prints:: + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) < ['aa', 'bb', 'cc'] delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] """ @@ -4467,16 +4913,21 @@ def delimitedList( expr, delim=",", combine=False ): return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) def countedArray( expr, intExpr=None ): - """ - Helper to define a counted list of expressions. + """Helper to define a counted list of expressions. + This helper defines a pattern of the form:: + integer expr expr expr... + where the leading integer tells how many expr expressions follow. - The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. - - If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. + The matched tokens returns the array of expr tokens as a list - the + leading count token is suppressed. + + If ``intExpr`` is specified, it should be a pyparsing expression + that produces an integer value. Example:: + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] # in this parser, the leading integer value is given in binary, @@ -4507,17 +4958,19 @@ def _flatten(L): return ret def matchPreviousLiteral(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + first = Word(nums) second = matchPreviousLiteral(first) matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches a - previous literal, will also match the leading C{"1:1"} in C{"1:10"}. - If this is not desired, use C{matchPreviousExpr}. - Do I{not} use with packrat parsing enabled. + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches a previous literal, will also match the leading + ``"1:1"`` in ``"1:10"``. If this is not desired, use + :class:`matchPreviousExpr`. Do *not* use with packrat parsing + enabled. """ rep = Forward() def copyTokenToRepeater(s,l,t): @@ -4535,18 +4988,19 @@ def matchPreviousLiteral(expr): return rep def matchPreviousExpr(expr): - """ - Helper to define an expression that is indirectly defined from - the tokens matched in a previous expression, that is, it looks - for a 'repeat' of a previous expression. For example:: + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks for + a 'repeat' of a previous expression. For example:: + first = Word(nums) second = matchPreviousExpr(first) matchExpr = first + ":" + second - will match C{"1:1"}, but not C{"1:2"}. Because this matches by - expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; - the expressions are evaluated first, and then compared, so - C{"1"} is compared with C{"10"}. - Do I{not} use with packrat parsing enabled. + + will match ``"1:1"``, but not ``"1:2"``. Because this + matches by expressions, will *not* match the leading ``"1:1"`` + in ``"1:10"``; the expressions are evaluated first, and then + compared, so ``"1"`` is compared with ``"10"``. Do *not* use + with packrat parsing enabled. """ rep = Forward() e2 = expr.copy() @@ -4571,26 +5025,33 @@ def _escapeRegexRangeChars(s): return _ustr(s) def oneOf( strs, caseless=False, useRegex=True ): - """ - Helper to quickly define a set of alternative Literals, and makes sure to do - longest-first testing when there is a conflict, regardless of the input order, - but returns a C{L{MatchFirst}} for best performance. + """Helper to quickly define a set of alternative Literals, and makes + sure to do longest-first testing when there is a conflict, + regardless of the input order, but returns + a :class:`MatchFirst` for best performance. Parameters: - - strs - a string of space-delimited literals, or a collection of string literals - - caseless - (default=C{False}) - treat all literals as caseless - - useRegex - (default=C{True}) - as an optimization, will generate a Regex - object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or - if creating a C{Regex} raises an exception) + + - strs - a string of space-delimited literals, or a collection of + string literals + - caseless - (default= ``False``) - treat all literals as + caseless + - useRegex - (default= ``True``) - as an optimization, will + generate a Regex object; otherwise, will generate + a :class:`MatchFirst` object (if ``caseless=True``, or if + creating a :class:`Regex` raises an exception) Example:: + comp_oper = oneOf("< = > <= >= !=") var = Word(alphas) number = Word(nums) term = var | number comparison_expr = term + comp_oper + term print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + prints:: + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] """ if caseless: @@ -4644,19 +5105,21 @@ def oneOf( strs, caseless=False, useRegex=True ): return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) def dictOf( key, value ): - """ - Helper to easily and clearly define a dictionary by specifying the respective patterns - for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens - in the proper order. The key pattern can include delimiting markers or punctuation, - as long as they are suppressed, thereby leaving the significant key text. The value - pattern can include named results, so that the C{Dict} results can include named token - fields. + """Helper to easily and clearly define a dictionary by specifying + the respective patterns for the key and value. Takes care of + defining the :class:`Dict`, :class:`ZeroOrMore`, and + :class:`Group` tokens in the proper order. The key pattern + can include delimiting markers or punctuation, as long as they are + suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the :class:`Dict` results + can include named token fields. Example:: + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) print(OneOrMore(attr_expr).parseString(text).dump()) - + attr_label = label attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) @@ -4666,7 +5129,9 @@ def dictOf( key, value ): print(result['shape']) print(result.shape) # object attribute access works too print(result.asDict()) + prints:: + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - color: light blue - posn: upper left @@ -4676,29 +5141,34 @@ def dictOf( key, value ): SQUARE {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} """ - return Dict( ZeroOrMore( Group ( key + value ) ) ) + return Dict(OneOrMore(Group(key + value))) def originalTextFor(expr, asString=True): - """ - Helper to return the original, untokenized text for a given expression. Useful to - restore the parsed fields of an HTML start tag into the raw tag text itself, or to - revert separate tokens with intervening whitespace back to the original matching - input text. By default, returns astring containing the original parsed text. - - If the optional C{asString} argument is passed as C{False}, then the return value is a - C{L{ParseResults}} containing any results names that were originally matched, and a - single token containing the original matched text from the input string. So if - the expression passed to C{L{originalTextFor}} contains expressions with defined - results names, you must set C{asString} to C{False} if you want to preserve those - results name values. + """Helper to return the original, untokenized text for a given + expression. Useful to restore the parsed fields of an HTML start + tag into the raw tag text itself, or to revert separate tokens with + intervening whitespace back to the original matching input text. By + default, returns astring containing the original parsed text. + + If the optional ``asString`` argument is passed as + ``False``, then the return value is + a :class:`ParseResults` containing any results names that + were originally matched, and a single token containing the original + matched text from the input string. So if the expression passed to + :class:`originalTextFor` contains expressions with defined + results names, you must set ``asString`` to ``False`` if you + want to preserve those results name values. Example:: + src = "this is test bold text normal text " for tag in ("b","i"): opener,closer = makeHTMLTags(tag) patt = originalTextFor(opener + SkipTo(closer) + closer) print(patt.searchString(src)[0]) + prints:: + [' bold text '] ['text'] """ @@ -4715,29 +5185,33 @@ def originalTextFor(expr, asString=True): matchExpr.ignoreExprs = expr.ignoreExprs return matchExpr -def ungroup(expr): - """ - Helper to undo pyparsing's default grouping of And expressions, even - if all but one are non-empty. +def ungroup(expr): + """Helper to undo pyparsing's default grouping of And expressions, + even if all but one are non-empty. """ return TokenConverter(expr).setParseAction(lambda t:t[0]) def locatedExpr(expr): - """ - Helper to decorate a returned token with its starting and ending locations in the input string. + """Helper to decorate a returned token with its starting and ending + locations in the input string. + This helper adds the following results names: + - locn_start = location where matched expression begins - locn_end = location where matched expression ends - value = the actual parsed results - Be careful if the input text contains C{} characters, you may want to call - C{L{ParserElement.parseWithTabs}} + Be careful if the input text contains ```` characters, you + may want to call :class:`ParserElement.parseWithTabs` Example:: + wd = Word(alphas) for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): print(match) + prints:: + [[0, 'ljsdf', 5]] [[8, 'lksdjjf', 15]] [[18, 'lkkjj', 23]] @@ -4761,22 +5235,30 @@ _charRange = Group(_singleChar + Suppress("-") + _singleChar) _reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" def srange(s): - r""" - Helper to easily define string ranges for use in Word construction. Borrows - syntax from regexp '[]' string range definitions:: + r"""Helper to easily define string ranges for use in Word + construction. Borrows syntax from regexp '[]' string range + definitions:: + srange("[0-9]") -> "0123456789" srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" - The input string must be enclosed in []'s, and the returned string is the expanded - character set joined into a single string. - The values enclosed in the []'s may be: + + The input string must be enclosed in []'s, and the returned string + is the expanded character set joined into a single string. The + values enclosed in the []'s may be: + - a single character - - an escaped character with a leading backslash (such as C{\-} or C{\]}) - - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) - (C{\0x##} is also supported for backwards compatibility) - - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) + - an escaped character with a leading backslash (such as ``\-`` + or ``\]``) + - an escaped hex character with a leading ``'\x'`` + (``\x21``, which is a ``'!'`` character) (``\0x##`` + is also supported for backwards compatibility) + - an escaped octal character with a leading ``'\0'`` + (``\041``, which is a ``'!'`` character) + - a range of any of the above, separated by a dash (``'a-z'``, + etc.) + - any combination of the above (``'aeiouy'``, + ``'a-zA-Z0-9_$'``, etc.) """ _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) try: @@ -4785,9 +5267,8 @@ def srange(s): return "" def matchOnlyAtCol(n): - """ - Helper method for defining parse actions that require matching at a specific - column in the input text. + """Helper method for defining parse actions that require matching at + a specific column in the input text. """ def verifyCol(strg,locn,toks): if col(locn,strg) != n: @@ -4795,24 +5276,26 @@ def matchOnlyAtCol(n): return verifyCol def replaceWith(replStr): - """ - Helper method for common parse actions that simply return a literal value. Especially - useful when used with C{L{transformString}()}. + """Helper method for common parse actions that simply return + a literal value. Especially useful when used with + :class:`transformString` (). Example:: + num = Word(nums).setParseAction(lambda toks: int(toks[0])) na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) term = na | num - + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] """ return lambda s,l,t: [replStr] def removeQuotes(s,l,t): - """ - Helper parse action for removing quotation marks from parsed quoted strings. + """Helper parse action for removing quotation marks from parsed + quoted strings. Example:: + # by default, quotation marks are included in parsed results quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] @@ -4823,18 +5306,20 @@ def removeQuotes(s,l,t): return t[0][1:-1] def tokenMap(func, *args): - """ - Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional - args are passed, they are forwarded to the given function as additional arguments after - the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the - parsed data to an integer using base 16. + """Helper to define a parse action by mapping a function to all + elements of a ParseResults list. If any additional args are passed, + they are forwarded to the given function as additional arguments + after the token, as in + ``hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))``, + which will convert the parsed data to an integer using base 16. + + Example (compare the last to example in :class:`ParserElement.transformString`:: - Example (compare the last to example in L{ParserElement.transformString}:: hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) hex_ints.runTests(''' 00 11 22 aa FF 0a 0d 1a ''') - + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) OneOrMore(upperword).runTests(''' my kingdom for a horse @@ -4844,7 +5329,9 @@ def tokenMap(func, *args): OneOrMore(wd).setParseAction(' '.join).runTests(''' now is the winter of our discontent made glorious summer by this sun of york ''') + prints:: + 00 11 22 aa FF 0a 0d 1a [0, 17, 34, 170, 255, 10, 13, 26] @@ -4858,7 +5345,7 @@ def tokenMap(func, *args): return [func(tokn, *args) for tokn in t] try: - func_name = getattr(func, '__name__', + func_name = getattr(func, '__name__', getattr(func, '__class__').__name__) except Exception: func_name = str(func) @@ -4867,11 +5354,13 @@ def tokenMap(func, *args): return pa upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) -"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" +"""(Deprecated) Helper parse action to convert tokens to upper case. +Deprecated in favor of :class:`pyparsing_common.upcaseTokens`""" downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) -"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" - +"""(Deprecated) Helper parse action to convert tokens to lower case. +Deprecated in favor of :class:`pyparsing_common.downcaseTokens`""" + def _makeTags(tagStr, xml): """Internal helper to construct opening and closing tag expressions, given a tag name""" if isinstance(tagStr,basestring): @@ -4902,55 +5391,63 @@ def _makeTags(tagStr, xml): return openTag, closeTag def makeHTMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches - tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. + """Helper to construct opening and closing tag expressions for HTML, + given a tag name. Matches tags in either upper or lower case, + attributes with namespaces and with quoted or unquoted values. Example:: - text = 'More info at the pyparsing wiki page' - # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple + + text = 'More info at the pyparsing wiki page' + # makeHTMLTags returns pyparsing expressions for the opening and + # closing tags as a 2-tuple a,a_end = makeHTMLTags("A") link_expr = a + SkipTo(a_end)("link_text") + a_end - + for link in link_expr.searchString(text): - # attributes in the tag (like "href" shown here) are also accessible as named results + # attributes in the tag (like "href" shown here) are + # also accessible as named results print(link.link_text, '->', link.href) + prints:: - pyparsing -> http://pyparsing.wikispaces.com + + pyparsing -> https://github.com/pyparsing/pyparsing/wiki """ return _makeTags( tagStr, False ) def makeXMLTags(tagStr): - """ - Helper to construct opening and closing tag expressions for XML, given a tag name. Matches - tags only in the given upper/lower case. + """Helper to construct opening and closing tag expressions for XML, + given a tag name. Matches tags only in the given upper/lower case. - Example: similar to L{makeHTMLTags} + Example: similar to :class:`makeHTMLTags` """ return _makeTags( tagStr, True ) def withAttribute(*args,**attrDict): - """ - Helper to create a validating parse action to be used with start tags created - with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag - with a required attribute value, to avoid false matches on common tags such as - C{} or C{
}. + """Helper to create a validating parse action to be used with start + tags created with :class:`makeXMLTags` or + :class:`makeHTMLTags`. Use ``withAttribute`` to qualify + a starting tag with a required attribute value, to avoid false + matches on common tags such as ```` or ``
``. - Call C{withAttribute} with a series of attribute names and values. Specify the list - of filter attributes names and values as: - - keyword arguments, as in C{(align="right")}, or - - as an explicit dict with C{**} operator, when an attribute name is also a Python - reserved word, as in C{**{"class":"Customer", "align":"right"}} - - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) - For attribute names with a namespace prefix, you must use the second form. Attribute - names are matched insensitive to upper/lower case. - - If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + Call ``withAttribute`` with a series of attribute names and + values. Specify the list of filter attributes names and values as: - To verify that the attribute exists, but without specifying a value, pass - C{withAttribute.ANY_VALUE} as the value. + - keyword arguments, as in ``(align="right")``, or + - as an explicit dict with ``**`` operator, when an attribute + name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}`` + - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align","right"))`` + + For attribute names with a namespace prefix, you must use the second + form. Attribute names are matched insensitive to upper/lower case. + + If just testing for ``class`` (with or without a namespace), use + :class:`withClass`. + + To verify that the attribute exists, but without specifying a value, + pass ``withAttribute.ANY_VALUE`` as the value. Example:: + html = '''
Some text @@ -4958,7 +5455,7 @@ def withAttribute(*args,**attrDict):
1,3 2,3 1,1
this has no type
- + ''' div,div_end = makeHTMLTags("div") @@ -4967,13 +5464,15 @@ def withAttribute(*args,**attrDict): grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) - + # construct a match with any div tag having a type attribute, regardless of the value div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) + prints:: + 1 4 0 1 0 1 4 0 1 0 @@ -4995,11 +5494,12 @@ def withAttribute(*args,**attrDict): withAttribute.ANY_VALUE = object() def withClass(classname, namespace=''): - """ - Simplified version of C{L{withAttribute}} when matching on a div class - made - difficult because C{class} is a reserved word in Python. + """Simplified version of :class:`withAttribute` when + matching on a div class - made difficult because ``class`` is + a reserved word in Python. Example:: + html = '''
Some text @@ -5007,84 +5507,96 @@ def withClass(classname, namespace=''):
1,3 2,3 1,1
this <div> has no class
- + ''' div,div_end = makeHTMLTags("div") div_grid = div().setParseAction(withClass("grid")) - + grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) - + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) + prints:: + 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 """ classattr = "%s:class" % namespace if namespace else "class" - return withAttribute(**{classattr : classname}) + return withAttribute(**{classattr : classname}) -opAssoc = _Constants() +opAssoc = SimpleNamespace() opAssoc.LEFT = object() opAssoc.RIGHT = object() def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): - """ - Helper method for constructing grammars of expressions made up of - operators working in a precedence hierarchy. Operators may be unary or - binary, left- or right-associative. Parse actions can also be attached - to operator expressions. The generated parser will also recognize the use - of parentheses to override operator precedences (see example below). - - Note: if you define a deep operator list, you may see performance issues - when using infixNotation. See L{ParserElement.enablePackrat} for a - mechanism to potentially improve your parser performance. + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary + or binary, left- or right-associative. Parse actions can also be + attached to operator expressions. The generated parser will also + recognize the use of parentheses to override operator precedences + (see example below). + + Note: if you define a deep operator list, you may see performance + issues when using infixNotation. See + :class:`ParserElement.enablePackrat` for a mechanism to potentially + improve your parser performance. Parameters: - - baseExpr - expression representing the most basic element for the nested - - opList - list of tuples, one for each operator precedence level in the - expression grammar; each tuple is of the form - (opExpr, numTerms, rightLeftAssoc, parseAction), where: - - opExpr is the pyparsing expression for the operator; - may also be a string, which will be converted to a Literal; - if numTerms is 3, opExpr is a tuple of two expressions, for the - two operators separating the 3 terms - - numTerms is the number of terms for this operator (must - be 1, 2, or 3) - - rightLeftAssoc is the indicator whether the operator is - right or left associative, using the pyparsing-defined - constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - baseExpr - expression representing the most basic element for the + nested + - opList - list of tuples, one for each operator precedence level + in the expression grammar; each tuple is of the form ``(opExpr, + numTerms, rightLeftAssoc, parseAction)``, where: + + - opExpr is the pyparsing expression for the operator; may also + be a string, which will be converted to a Literal; if numTerms + is 3, opExpr is a tuple of two expressions, for the two + operators separating the 3 terms + - numTerms is the number of terms for this operator (must be 1, + 2, or 3) + - rightLeftAssoc is the indicator whether the operator is right + or left associative, using the pyparsing-defined constants + ``opAssoc.RIGHT`` and ``opAssoc.LEFT``. - parseAction is the parse action to be associated with - expressions matching this operator expression (the - parse action tuple member may be omitted); if the parse action - is passed a tuple or list of functions, this is equivalent to - calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) - - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + expressions matching this operator expression (the parse action + tuple member may be omitted); if the parse action is passed + a tuple or list of functions, this is equivalent to calling + ``setParseAction(*fn)`` + (:class:`ParserElement.setParseAction`) + - lpar - expression for matching left-parentheses + (default= ``Suppress('(')``) + - rpar - expression for matching right-parentheses + (default= ``Suppress(')')``) Example:: - # simple example of four-function arithmetic with ints and variable names + + # simple example of four-function arithmetic with ints and + # variable names integer = pyparsing_common.signed_integer - varname = pyparsing_common.identifier - + varname = pyparsing_common.identifier + arith_expr = infixNotation(integer | varname, [ ('-', 1, opAssoc.RIGHT), (oneOf('* /'), 2, opAssoc.LEFT), (oneOf('+ -'), 2, opAssoc.LEFT), ]) - + arith_expr.runTests(''' 5+3*6 (5+3)*6 -2--11 ''', fullDump=False) + prints:: + 5+3*6 [[5, '+', [3, '*', 6]]] @@ -5094,6 +5606,12 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): -2--11 [[['-', 2], '-', ['-', 11]]] """ + # captive version of FollowedBy that does not do parse actions or capture results names + class _FB(FollowedBy): + def parseImpl(self, instring, loc, doActions=True): + self.expr.tryParse(instring, loc) + return loc, [] + ret = Forward() lastExpr = baseExpr | ( lpar + ret + rpar ) for i,operDef in enumerate(opList): @@ -5101,19 +5619,20 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr if arity == 3: if opExpr is None or len(opExpr) != 2: - raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + raise ValueError( + "if numterms=3, opExpr must be a tuple or list of two expressions") opExpr1, opExpr2 = opExpr thisExpr = Forward().setName(termName) if rightLeftAssoc == opAssoc.LEFT: if arity == 1: - matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + matchExpr = _FB(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) elif arity == 2: if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) else: - matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + matchExpr = _FB(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + matchExpr = _FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") @@ -5122,14 +5641,14 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): # try to avoid LR with this extra test if not isinstance(opExpr, Optional): opExpr = Optional(opExpr) - matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + matchExpr = _FB(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) elif arity == 2: if opExpr is not None: - matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) else: - matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + matchExpr = _FB(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) elif arity == 3: - matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + matchExpr = _FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") @@ -5146,7 +5665,8 @@ def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): return ret operatorPrecedence = infixNotation -"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" +"""(Deprecated) Former name of :class:`infixNotation`, will be +dropped in a future release.""" dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") @@ -5155,28 +5675,33 @@ quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+) unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): - """ - Helper method for defining nested lists enclosed in opening and closing - delimiters ("(" and ")" are the default). + """Helper method for defining nested lists enclosed in opening and + closing delimiters ("(" and ")" are the default). Parameters: - - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - - content - expression for items within the nested lists (default=C{None}) - - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) + - opener - opening character for a nested list + (default= ``"("``); can also be a pyparsing expression + - closer - closing character for a nested list + (default= ``")"``); can also be a pyparsing expression + - content - expression for items within the nested lists + (default= ``None``) + - ignoreExpr - expression for ignoring opening and closing + delimiters (default= :class:`quotedString`) - If an expression is not provided for the content argument, the nested - expression will capture all whitespace-delimited content between delimiters - as a list of separate values. + If an expression is not provided for the content argument, the + nested expression will capture all whitespace-delimited content + between delimiters as a list of separate values. - Use the C{ignoreExpr} argument to define expressions that may contain - opening or closing characters that should not be treated as opening - or closing characters for nesting, such as quotedString or a comment - expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. - The default is L{quotedString}, but if no expressions are to be ignored, - then pass C{None} for this argument. + Use the ``ignoreExpr`` argument to define expressions that may + contain opening or closing characters that should not be treated as + opening or closing characters for nesting, such as quotedString or + a comment expression. Specify multiple expressions using an + :class:`Or` or :class:`MatchFirst`. The default is + :class:`quotedString`, but if no expressions are to be ignored, then + pass ``None`` for this argument. Example:: + data_type = oneOf("void int short long char float double") decl_data_type = Combine(data_type + Optional(Word('*'))) ident = Word(alphas+'_', alphanums+'_') @@ -5186,29 +5711,31 @@ def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.cop code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) - c_function = (decl_data_type("type") + c_function = (decl_data_type("type") + ident("name") - + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + code_body("body")) c_function.ignore(cStyleComment) - + source_code = ''' - int is_odd(int x) { - return (x%2); + int is_odd(int x) { + return (x%2); } - - int dec_to_hex(char hchar) { - if (hchar >= '0' && hchar <= '9') { - return (ord(hchar)-ord('0')); - } else { + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { return (10+ord(hchar)-ord('A')); - } + } } ''' for func in c_function.searchString(source_code): print("%(name)s (%(type)s) args: %(args)s" % func) + prints:: + is_odd (int) args: [['int', 'x']] dec_to_hex (int) args: [['char', 'hchar']] """ @@ -5226,7 +5753,7 @@ def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.cop ).setParseAction(lambda t:t[0].strip())) else: if ignoreExpr is not None: - content = (Combine(OneOrMore(~ignoreExpr + + content = (Combine(OneOrMore(~ignoreExpr + ~Literal(opener) + ~Literal(closer) + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) ).setParseAction(lambda t:t[0].strip())) @@ -5245,23 +5772,24 @@ def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.cop return ret def indentedBlock(blockStatementExpr, indentStack, indent=True): - """ - Helper method for defining space-delimited indentation blocks, such as - those used to define block statements in Python source code. + """Helper method for defining space-delimited indentation blocks, + such as those used to define block statements in Python source code. Parameters: - - blockStatementExpr - expression defining syntax of statement that - is repeated within the indented block - - indentStack - list created by caller to manage indentation stack - (multiple statementWithIndentedBlock expressions within a single grammar - should share a common indentStack) - - indent - boolean indicating whether block must be indented beyond the - the current level; set to False for block of left-most statements - (default=C{True}) - A valid block must contain at least one C{blockStatement}. + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single + grammar should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond + the the current level; set to False for block of left-most + statements (default= ``True``) + + A valid block must contain at least one ``blockStatement``. Example:: + data = ''' def A(z): A1 @@ -5302,7 +5830,9 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): parseTree = module_body.parseString(data) parseTree.pprint() + prints:: + [['def', 'A', ['(', 'z', ')'], @@ -5320,7 +5850,7 @@ def indentedBlock(blockStatementExpr, indentStack, indent=True): 'spam', ['(', 'x', 'y', ')'], ':', - [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] """ def checkPeerIndent(s,l,t): if l >= len(s): return @@ -5370,51 +5900,61 @@ def replaceHTMLEntity(t): # it's easy to get these comment structures wrong - they're very common, so may as well make them available cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") -"Comment of the form C{/* ... */}" +"Comment of the form ``/* ... */``" htmlComment = Regex(r"").setName("HTML comment") -"Comment of the form C{}" +"Comment of the form ````" restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") -"Comment of the form C{// ... (to end of line)}" +"Comment of the form ``// ... (to end of line)``" cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") -"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" +"Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`" javaStyleComment = cppStyleComment -"Same as C{L{cppStyleComment}}" +"Same as :class:`cppStyleComment`" pythonStyleComment = Regex(r"#.*").setName("Python style comment") -"Comment of the form C{# ... (to end of line)}" +"Comment of the form ``# ... (to end of line)``" _commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + Optional( Word(" \t") + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") -"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. - This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" +"""(Deprecated) Predefined expression of 1 or more printable words or +quoted strings, separated by commas. + +This expression is deprecated in favor of :class:`pyparsing_common.comma_separated_list`. +""" # some other useful expressions - using lower-case class name since we are really using this as a namespace class pyparsing_common: - """ - Here are some common low-level expressions that may be useful in jump-starting parser development: - - numeric forms (L{integers}, L{reals}, L{scientific notation}) - - common L{programming identifiers} - - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - - ISO8601 L{dates} and L{datetime} - - L{UUID} - - L{comma-separated list} + """Here are some common low-level expressions that may be useful in + jump-starting parser development: + + - numeric forms (:class:`integers`, :class:`reals`, + :class:`scientific notation`) + - common :class:`programming identifiers` + - network addresses (:class:`MAC`, + :class:`IPv4`, :class:`IPv6`) + - ISO8601 :class:`dates` and + :class:`datetime` + - :class:`UUID` + - :class:`comma-separated list` + Parse actions: - - C{L{convertToInteger}} - - C{L{convertToFloat}} - - C{L{convertToDate}} - - C{L{convertToDatetime}} - - C{L{stripHTMLTags}} - - C{L{upcaseTokens}} - - C{L{downcaseTokens}} + + - :class:`convertToInteger` + - :class:`convertToFloat` + - :class:`convertToDate` + - :class:`convertToDatetime` + - :class:`stripHTMLTags` + - :class:`upcaseTokens` + - :class:`downcaseTokens` Example:: + pyparsing_common.number.runTests(''' # any int or real number, returned as the appropriate type 100 @@ -5461,7 +6001,9 @@ class pyparsing_common: # uuid 12345678-1234-5678-1234-567812345678 ''') + prints:: + # any int or real number, returned as the appropriate type 100 [100] @@ -5563,7 +6105,8 @@ class pyparsing_common: """expression that parses a floating point number and returns a float""" sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) - """expression that parses a floating point number with optional scientific notation and returns a float""" + """expression that parses a floating point number with optional + scientific notation and returns a float""" # streamlining this expression makes the docs nicer-looking number = (sci_real | real | signed_integer).streamline() @@ -5571,12 +6114,12 @@ class pyparsing_common: fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) """any int or real number, returned as float""" - + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" - + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") - "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + "IPv4 address (``0.0.0.0 - 255.255.255.255``)" _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") @@ -5585,7 +6128,7 @@ class pyparsing_common: _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") "IPv6 address (long, short, or mixed form)" - + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" @@ -5595,13 +6138,16 @@ class pyparsing_common: Helper to create a parse action for converting parsed date string to Python datetime.date Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``) Example:: + date_expr = pyparsing_common.iso8601_date.copy() date_expr.setParseAction(pyparsing_common.convertToDate()) print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] """ def cvt_fn(s,l,t): @@ -5613,17 +6159,20 @@ class pyparsing_common: @staticmethod def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): - """ - Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + """Helper to create a parse action for converting parsed + datetime string to Python datetime.datetime Params - - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``) Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() dt_expr.setParseAction(pyparsing_common.convertToDatetime()) print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] """ def cvt_fn(s,l,t): @@ -5634,31 +6183,34 @@ class pyparsing_common: return cvt_fn iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") - "ISO8601 date (C{yyyy-mm-dd})" + "ISO8601 date (``yyyy-mm-dd``)" iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") - "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + "ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``" uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") - "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + "UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)" _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() @staticmethod def stripHTMLTags(s, l, tokens): - """ - Parse action to remove HTML tags from web page HTML source + """Parse action to remove HTML tags from web page HTML source Example:: - # strip HTML links from normal text - text = 'More info at the
pyparsing wiki page' + + # strip HTML links from normal text + text = 'More info at the pyparsing wiki page' td,td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - - print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + print(table_text.parseString(text).body) + + Prints:: + + More info at the pyparsing wiki page """ return pyparsing_common._html_stripper.transformString(tokens[0]) - _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + Optional( White(" \t") ) ) ).streamline().setName("commaItem") comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" @@ -5670,6 +6222,164 @@ class pyparsing_common: """Parse action to convert tokens to lower case.""" +class _lazyclassproperty(object): + def __init__(self, fn): + self.fn = fn + self.__doc__ = fn.__doc__ + self.__name__ = fn.__name__ + + def __get__(self, obj, cls): + if cls is None: + cls = type(obj) + if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) for superclass in cls.__mro__[1:]): + cls._intern = {} + attrname = self.fn.__name__ + if attrname not in cls._intern: + cls._intern[attrname] = self.fn(cls) + return cls._intern[attrname] + + +class unicode_set(object): + """ + A set of Unicode characters, for language-specific strings for + ``alphas``, ``nums``, ``alphanums``, and ``printables``. + A unicode_set is defined by a list of ranges in the Unicode character + set, in a class attribute ``_ranges``, such as:: + + _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] + + A unicode set can also be defined using multiple inheritance of other unicode sets:: + + class CJK(Chinese, Japanese, Korean): + pass + """ + _ranges = [] + + @classmethod + def _get_chars_for_ranges(cls): + ret = [] + for cc in cls.__mro__: + if cc is unicode_set: + break + for rr in cc._ranges: + ret.extend(range(rr[0], rr[-1]+1)) + return [unichr(c) for c in sorted(set(ret))] + + @_lazyclassproperty + def printables(cls): + "all non-whitespace characters in this range" + return u''.join(filterfalse(unicode.isspace, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def alphas(cls): + "all alphabetic characters in this range" + return u''.join(filter(unicode.isalpha, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def nums(cls): + "all numeric digit characters in this range" + return u''.join(filter(unicode.isdigit, cls._get_chars_for_ranges())) + + @_lazyclassproperty + def alphanums(cls): + "all alphanumeric characters in this range" + return cls.alphas + cls.nums + + +class pyparsing_unicode(unicode_set): + """ + A namespace class for defining common language unicode_sets. + """ + _ranges = [(32, sys.maxunicode)] + + class Latin1(unicode_set): + "Unicode set for Latin-1 Unicode Character Range" + _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),] + + class LatinA(unicode_set): + "Unicode set for Latin-A Unicode Character Range" + _ranges = [(0x0100, 0x017f),] + + class LatinB(unicode_set): + "Unicode set for Latin-B Unicode Character Range" + _ranges = [(0x0180, 0x024f),] + + class Greek(unicode_set): + "Unicode set for Greek Unicode Character Ranges" + _ranges = [ + (0x0370, 0x03ff), (0x1f00, 0x1f15), (0x1f18, 0x1f1d), (0x1f20, 0x1f45), (0x1f48, 0x1f4d), + (0x1f50, 0x1f57), (0x1f59,), (0x1f5b,), (0x1f5d,), (0x1f5f, 0x1f7d), (0x1f80, 0x1fb4), (0x1fb6, 0x1fc4), + (0x1fc6, 0x1fd3), (0x1fd6, 0x1fdb), (0x1fdd, 0x1fef), (0x1ff2, 0x1ff4), (0x1ff6, 0x1ffe), + ] + + class Cyrillic(unicode_set): + "Unicode set for Cyrillic Unicode Character Range" + _ranges = [(0x0400, 0x04ff)] + + class Chinese(unicode_set): + "Unicode set for Chinese Unicode Character Range" + _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f), ] + + class Japanese(unicode_set): + "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" + _ranges = [ ] + + class Kanji(unicode_set): + "Unicode set for Kanji Unicode Character Range" + _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f), ] + + class Hiragana(unicode_set): + "Unicode set for Hiragana Unicode Character Range" + _ranges = [(0x3040, 0x309f), ] + + class Katakana(unicode_set): + "Unicode set for Katakana Unicode Character Range" + _ranges = [(0x30a0, 0x30ff), ] + + class Korean(unicode_set): + "Unicode set for Korean Unicode Character Range" + _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f), ] + + class CJK(Chinese, Japanese, Korean): + "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" + pass + + class Thai(unicode_set): + "Unicode set for Thai Unicode Character Range" + _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b), ] + + class Arabic(unicode_set): + "Unicode set for Arabic Unicode Character Range" + _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f), ] + + class Hebrew(unicode_set): + "Unicode set for Hebrew Unicode Character Range" + _ranges = [(0x0590, 0x05ff), ] + + class Devanagari(unicode_set): + "Unicode set for Devanagari Unicode Character Range" + _ranges = [(0x0900, 0x097f), (0xa8e0, 0xa8ff)] + +pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges + + pyparsing_unicode.Japanese.Hiragana._ranges + + pyparsing_unicode.Japanese.Katakana._ranges) + +# define ranges in language character sets +if PY_3: + setattr(pyparsing_unicode, "العربية", pyparsing_unicode.Arabic) + setattr(pyparsing_unicode, "中文", pyparsing_unicode.Chinese) + setattr(pyparsing_unicode, "кириллица", pyparsing_unicode.Cyrillic) + setattr(pyparsing_unicode, "Ελληνικά", pyparsing_unicode.Greek) + setattr(pyparsing_unicode, "עִברִית", pyparsing_unicode.Hebrew) + setattr(pyparsing_unicode, "日本語", pyparsing_unicode.Japanese) + setattr(pyparsing_unicode.Japanese, "漢字", pyparsing_unicode.Japanese.Kanji) + setattr(pyparsing_unicode.Japanese, "カタカナ", pyparsing_unicode.Japanese.Katakana) + setattr(pyparsing_unicode.Japanese, "ひらがな", pyparsing_unicode.Japanese.Hiragana) + setattr(pyparsing_unicode, "한국어", pyparsing_unicode.Korean) + setattr(pyparsing_unicode, "ไทย", pyparsing_unicode.Thai) + setattr(pyparsing_unicode, "देवनागरी", pyparsing_unicode.Devanagari) + + if __name__ == "__main__": selectToken = CaselessLiteral("select") @@ -5683,7 +6393,7 @@ if __name__ == "__main__": tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) tableNameList = Group(delimitedList(tableName)).setName("tables") - + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") # demo runTests method, including embedded comments in test string diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 2d2684d0a..1e514fa79 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -10,7 +10,7 @@ msgpack==0.5.6 packaging==18.0 pep517==0.3 progress==1.4 -pyparsing==2.2.1 +pyparsing==2.3.1 pytoml==0.1.19 requests==2.19.1 certifi==2018.11.29 From 0071af5c8e165c3b74dea30a9bfe99d4520e07cf Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:44:33 +0530 Subject: [PATCH 36/69] Update six to 1.12.0 --- news/six.vendor | 1 + src/pip/_vendor/six.LICENSE | 2 +- src/pip/_vendor/six.py | 65 +++++++++++++++++++++++++++++++++++-- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 66 insertions(+), 4 deletions(-) create mode 100644 news/six.vendor diff --git a/news/six.vendor b/news/six.vendor new file mode 100644 index 000000000..ca2d82131 --- /dev/null +++ b/news/six.vendor @@ -0,0 +1 @@ +Update six to 1.12.0 diff --git a/src/pip/_vendor/six.LICENSE b/src/pip/_vendor/six.LICENSE index f3068bfd9..365d10741 100644 --- a/src/pip/_vendor/six.LICENSE +++ b/src/pip/_vendor/six.LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2010-2017 Benjamin Peterson +Copyright (c) 2010-2018 Benjamin Peterson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/src/pip/_vendor/six.py b/src/pip/_vendor/six.py index 6bf4fd381..89b2188fd 100644 --- a/src/pip/_vendor/six.py +++ b/src/pip/_vendor/six.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2017 Benjamin Peterson +# Copyright (c) 2010-2018 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -29,7 +29,7 @@ import sys import types __author__ = "Benjamin Peterson " -__version__ = "1.11.0" +__version__ = "1.12.0" # Useful for very coarse version differentiation. @@ -844,10 +844,71 @@ def add_metaclass(metaclass): orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, text_type): + return s.encode(encoding, errors) + elif isinstance(s, binary_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + if PY2 and isinstance(s, text_type): + s = s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + s = s.decode(encoding, errors) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + + def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 1e514fa79..7027c182c 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -19,5 +19,5 @@ requests==2.19.1 urllib3==1.24.1 retrying==1.3.3 setuptools==40.4.3 -six==1.11.0 +six==1.12.0 webencodings==0.5.1 From c42f16d7078ff0042f6ad5186c43e68fe5223727 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:45:18 +0530 Subject: [PATCH 37/69] Update distlib to 0.2.8 --- news/distlib.vendor | 1 + src/pip/_vendor/distlib/__init__.py | 2 +- src/pip/_vendor/distlib/database.py | 7 +++++-- src/pip/_vendor/distlib/locators.py | 11 +++++++---- src/pip/_vendor/distlib/metadata.py | 9 ++++++--- src/pip/_vendor/distlib/scripts.py | 6 ++++-- src/pip/_vendor/distlib/util.py | 15 ++++++++------- src/pip/_vendor/distlib/wheel.py | 8 ++++++-- src/pip/_vendor/vendor.txt | 2 +- 9 files changed, 39 insertions(+), 22 deletions(-) create mode 100644 news/distlib.vendor diff --git a/news/distlib.vendor b/news/distlib.vendor new file mode 100644 index 000000000..8ed44bd74 --- /dev/null +++ b/news/distlib.vendor @@ -0,0 +1 @@ +Update distlib to 0.2.8 diff --git a/src/pip/_vendor/distlib/__init__.py b/src/pip/_vendor/distlib/__init__.py index d4aab453a..a786b4d3b 100644 --- a/src/pip/_vendor/distlib/__init__.py +++ b/src/pip/_vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.2.7' +__version__ = '0.2.8' class DistlibException(Exception): pass diff --git a/src/pip/_vendor/distlib/database.py b/src/pip/_vendor/distlib/database.py index a19905e21..b13cdac92 100644 --- a/src/pip/_vendor/distlib/database.py +++ b/src/pip/_vendor/distlib/database.py @@ -20,7 +20,8 @@ import zipimport from . import DistlibException, resources from .compat import StringIO from .version import get_scheme, UnsupportedVersionError -from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) from .util import (parse_requirement, cached_property, parse_name_and_version, read_exports, write_exports, CSVReader, CSVWriter) @@ -132,7 +133,9 @@ class DistributionPath(object): if not r or r.path in seen: continue if self._include_dist and entry.endswith(DISTINFO_EXT): - possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] + possible_filenames = [METADATA_FILENAME, + WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME] for metadata_filename in possible_filenames: metadata_path = posixpath.join(entry, metadata_filename) pydist = finder.find(metadata_path) diff --git a/src/pip/_vendor/distlib/locators.py b/src/pip/_vendor/distlib/locators.py index 11d26361c..5c655c3e5 100644 --- a/src/pip/_vendor/distlib/locators.py +++ b/src/pip/_vendor/distlib/locators.py @@ -255,7 +255,9 @@ class Locator(object): if path.endswith('.whl'): try: wheel = Wheel(path) - if is_compatible(wheel, self.wheel_tags): + if not is_compatible(wheel, self.wheel_tags): + logger.debug('Wheel not compatible: %s', path) + else: if project_name is None: include = True else: @@ -613,6 +615,7 @@ class SimpleScrapingLocator(Locator): # as it is for coordinating our internal threads - the ones created # in _prepare_threads. self._gplock = threading.RLock() + self.platform_check = False # See issue #112 def _prepare_threads(self): """ @@ -658,8 +661,8 @@ class SimpleScrapingLocator(Locator): del self.result return result - platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' - r'win(32|-amd64)|macosx-?\d+)\b', re.I) + platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' + r'win(32|_amd64)|macosx_?\d+)\b', re.I) def _is_platform_dependent(self, url): """ @@ -677,7 +680,7 @@ class SimpleScrapingLocator(Locator): Note that the return value isn't actually used other than as a boolean value. """ - if self._is_platform_dependent(url): + if self.platform_check and self._is_platform_dependent(url): info = None else: info = self.convert_url_to_download_info(url, self.project_name) diff --git a/src/pip/_vendor/distlib/metadata.py b/src/pip/_vendor/distlib/metadata.py index 6d6470fff..77eed7f96 100644 --- a/src/pip/_vendor/distlib/metadata.py +++ b/src/pip/_vendor/distlib/metadata.py @@ -91,7 +91,9 @@ _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension') -_566_FIELDS = _426_FIELDS + ('Description-Content-Type',) +# See issue #106: Sometimes 'Requires' occurs wrongly in the metadata. Include +# it in the tuple literal below to allow it (for now) +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires') _566_MARKERS = ('Description-Content-Type',) @@ -377,8 +379,8 @@ class LegacyMetadata(object): value = msg[field] if value is not None and value != 'UNKNOWN': self.set(field, value) - logger.debug('Attempting to set metadata for %s', self) - self.set_metadata_version() + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() def write(self, filepath, skip_unknown=False): """Write the metadata fields to filepath.""" @@ -648,6 +650,7 @@ class LegacyMetadata(object): METADATA_FILENAME = 'pydist.json' WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' class Metadata(object): diff --git a/src/pip/_vendor/distlib/scripts.py b/src/pip/_vendor/distlib/scripts.py index 0b7c3d0b3..8e22cb916 100644 --- a/src/pip/_vendor/distlib/scripts.py +++ b/src/pip/_vendor/distlib/scripts.py @@ -236,8 +236,10 @@ class ScriptMaker(object): def _write_script(self, names, shebang, script_bytes, filenames, ext): use_launcher = self.add_launchers and self._is_nt linesep = os.linesep.encode('utf-8') + if not shebang.endswith(linesep): + shebang += linesep if not use_launcher: - script_bytes = shebang + linesep + script_bytes + script_bytes = shebang + script_bytes else: # pragma: no cover if ext == 'py': launcher = self._get_launcher('t') @@ -247,7 +249,7 @@ class ScriptMaker(object): with ZipFile(stream, 'w') as zf: zf.writestr('__main__.py', script_bytes) zip_data = stream.getvalue() - script_bytes = launcher + shebang + linesep + zip_data + script_bytes = launcher + shebang + zip_data for name in names: outname = os.path.join(self.target_dir, name) if use_launcher: # pragma: no cover diff --git a/src/pip/_vendor/distlib/util.py b/src/pip/_vendor/distlib/util.py index 0b14a93b3..9d4bfd3be 100644 --- a/src/pip/_vendor/distlib/util.py +++ b/src/pip/_vendor/distlib/util.py @@ -545,16 +545,14 @@ class FileOperator(object): def write_binary_file(self, path, data): self.ensure_dir(os.path.dirname(path)) if not self.dry_run: + if os.path.exists(path): + os.remove(path) with open(path, 'wb') as f: f.write(data) self.record_as_written(path) def write_text_file(self, path, data, encoding): - self.ensure_dir(os.path.dirname(path)) - if not self.dry_run: - with open(path, 'wb') as f: - f.write(data.encode(encoding)) - self.record_as_written(path) + self.write_binary_file(path, data.encode(encoding)) def set_mode(self, bits, mask, files): if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): @@ -582,7 +580,7 @@ class FileOperator(object): if self.record: self.dirs_created.add(path) - def byte_compile(self, path, optimize=False, force=False, prefix=None): + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): dpath = cache_from_source(path, not optimize) logger.info('Byte-compiling %s to %s', path, dpath) if not self.dry_run: @@ -592,7 +590,10 @@ class FileOperator(object): else: assert path.startswith(prefix) diagpath = path[len(prefix):] - py_compile.compile(path, dpath, diagpath, True) # raise error + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error self.record_as_written(dpath) return dpath diff --git a/src/pip/_vendor/distlib/wheel.py b/src/pip/_vendor/distlib/wheel.py index 77372235c..b04bfaefe 100644 --- a/src/pip/_vendor/distlib/wheel.py +++ b/src/pip/_vendor/distlib/wheel.py @@ -442,7 +442,9 @@ class Wheel(object): This can be used to issue any warnings to raise any exceptions. If kwarg ``lib_only`` is True, only the purelib/platlib files are installed, and the headers, scripts, data and dist-info metadata are - not written. + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 2.7+). The return value is a :class:`InstalledDistribution` instance unless ``options.lib_only`` is True, in which case the return value is ``None``. @@ -451,6 +453,7 @@ class Wheel(object): dry_run = maker.dry_run warner = kwargs.get('warner') lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) @@ -557,7 +560,8 @@ class Wheel(object): '%s' % outfile) if bc and outfile.endswith('.py'): try: - pyc = fileop.byte_compile(outfile) + pyc = fileop.byte_compile(outfile, + hashed_invalidation=bc_hashed_invalidation) outfiles.append(pyc) except Exception: # Don't give up if byte-compilation fails, diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 7027c182c..c7df334e4 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,7 +1,7 @@ appdirs==1.4.3 CacheControl==0.12.5 colorama==0.4.1 -distlib==0.2.7 +distlib==0.2.8 distro==1.3.0 html5lib==1.0.1 ipaddress==1.0.22 # Only needed on 2.6 and 2.7 From f0de122e3d336acb3851b49e29c0856a5666569a Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:45:57 +0530 Subject: [PATCH 38/69] Update idna to 2.8 --- news/idna.vendor | 1 + src/pip/_vendor/idna/LICENSE.rst | 2 +- src/pip/_vendor/idna/core.py | 5 +- src/pip/_vendor/idna/idnadata.py | 122 ++++- src/pip/_vendor/idna/package_data.py | 2 +- src/pip/_vendor/idna/uts46data.py | 660 ++++++++++++++------------- src/pip/_vendor/vendor.txt | 2 +- 7 files changed, 452 insertions(+), 342 deletions(-) create mode 100644 news/idna.vendor mode change 100644 => 100755 src/pip/_vendor/idna/LICENSE.rst diff --git a/news/idna.vendor b/news/idna.vendor new file mode 100644 index 000000000..565c55efb --- /dev/null +++ b/news/idna.vendor @@ -0,0 +1 @@ +Update idna to 2.8 diff --git a/src/pip/_vendor/idna/LICENSE.rst b/src/pip/_vendor/idna/LICENSE.rst old mode 100644 new mode 100755 index 9d38815eb..3ee64fba2 --- a/src/pip/_vendor/idna/LICENSE.rst +++ b/src/pip/_vendor/idna/LICENSE.rst @@ -1,7 +1,7 @@ License ------- -Copyright (c) 2013-2017, Kim Davies. All rights reserved. +Copyright (c) 2013-2018, Kim Davies. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/src/pip/_vendor/idna/core.py b/src/pip/_vendor/idna/core.py index 090c2c18d..104624ad2 100755 --- a/src/pip/_vendor/idna/core.py +++ b/src/pip/_vendor/idna/core.py @@ -267,10 +267,7 @@ def alabel(label): try: label = label.encode('ascii') - try: - ulabel(label) - except IDNAError: - raise IDNAError('The label {0} is not a valid A-label'.format(label)) + ulabel(label) if not valid_label_length(label): raise IDNAError('Label too long') return label diff --git a/src/pip/_vendor/idna/idnadata.py b/src/pip/_vendor/idna/idnadata.py index 17974e233..a80c959d2 100755 --- a/src/pip/_vendor/idna/idnadata.py +++ b/src/pip/_vendor/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = "10.0.0" +__version__ = "11.0.0" scripts = { 'Greek': ( 0x37000000374, @@ -49,7 +49,7 @@ scripts = { 0x30210000302a, 0x30380000303c, 0x340000004db6, - 0x4e0000009feb, + 0x4e0000009ff0, 0xf9000000fa6e, 0xfa700000fada, 0x200000002a6d7, @@ -62,7 +62,7 @@ scripts = { 'Hebrew': ( 0x591000005c8, 0x5d0000005eb, - 0x5f0000005f5, + 0x5ef000005f5, 0xfb1d0000fb37, 0xfb380000fb3d, 0xfb3e0000fb3f, @@ -248,6 +248,7 @@ joining_types = { 0x6fb: 68, 0x6fc: 68, 0x6ff: 68, + 0x70f: 84, 0x710: 82, 0x712: 68, 0x713: 68, @@ -522,6 +523,7 @@ joining_types = { 0x1875: 68, 0x1876: 68, 0x1877: 68, + 0x1878: 68, 0x1880: 85, 0x1881: 85, 0x1882: 85, @@ -690,6 +692,70 @@ joining_types = { 0x10bad: 68, 0x10bae: 68, 0x10baf: 85, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f45: 85, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x110bd: 85, + 0x110cd: 85, 0x1e900: 68, 0x1e901: 68, 0x1e902: 68, @@ -1034,14 +1100,15 @@ codepoint_classes = { 0x52d0000052e, 0x52f00000530, 0x5590000055a, - 0x56100000587, + 0x56000000587, + 0x58800000589, 0x591000005be, 0x5bf000005c0, 0x5c1000005c3, 0x5c4000005c6, 0x5c7000005c8, 0x5d0000005eb, - 0x5f0000005f3, + 0x5ef000005f3, 0x6100000061b, 0x62000000640, 0x64100000660, @@ -1054,12 +1121,13 @@ codepoint_classes = { 0x7100000074b, 0x74d000007b2, 0x7c0000007f6, + 0x7fd000007fe, 0x8000000082e, 0x8400000085c, 0x8600000086b, 0x8a0000008b5, 0x8b6000008be, - 0x8d4000008e2, + 0x8d3000008e2, 0x8e300000958, 0x96000000964, 0x96600000970, @@ -1077,6 +1145,7 @@ codepoint_classes = { 0x9e0000009e4, 0x9e6000009f2, 0x9fc000009fd, + 0x9fe000009ff, 0xa0100000a04, 0xa0500000a0b, 0xa0f00000a11, @@ -1136,8 +1205,7 @@ codepoint_classes = { 0xbd000000bd1, 0xbd700000bd8, 0xbe600000bf0, - 0xc0000000c04, - 0xc0500000c0d, + 0xc0000000c0d, 0xc0e00000c11, 0xc1200000c29, 0xc2a00000c3a, @@ -1276,7 +1344,7 @@ codepoint_classes = { 0x17dc000017de, 0x17e0000017ea, 0x18100000181a, - 0x182000001878, + 0x182000001879, 0x1880000018ab, 0x18b0000018f6, 0x19000000191f, @@ -1544,11 +1612,11 @@ codepoint_classes = { 0x309d0000309f, 0x30a1000030fb, 0x30fc000030ff, - 0x31050000312f, + 0x310500003130, 0x31a0000031bb, 0x31f000003200, 0x340000004db6, - 0x4e0000009feb, + 0x4e0000009ff0, 0xa0000000a48d, 0xa4d00000a4fe, 0xa5000000a60d, @@ -1655,8 +1723,10 @@ codepoint_classes = { 0xa7a50000a7a6, 0xa7a70000a7a8, 0xa7a90000a7aa, + 0xa7af0000a7b0, 0xa7b50000a7b6, 0xa7b70000a7b8, + 0xa7b90000a7ba, 0xa7f70000a7f8, 0xa7fa0000a828, 0xa8400000a874, @@ -1664,8 +1734,7 @@ codepoint_classes = { 0xa8d00000a8da, 0xa8e00000a8f8, 0xa8fb0000a8fc, - 0xa8fd0000a8fe, - 0xa9000000a92e, + 0xa8fd0000a92e, 0xa9300000a954, 0xa9800000a9c1, 0xa9cf0000a9da, @@ -1743,7 +1812,7 @@ codepoint_classes = { 0x10a0500010a07, 0x10a0c00010a14, 0x10a1500010a18, - 0x10a1900010a34, + 0x10a1900010a36, 0x10a3800010a3b, 0x10a3f00010a40, 0x10a6000010a7d, @@ -1756,6 +1825,11 @@ codepoint_classes = { 0x10b8000010b92, 0x10c0000010c49, 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10f0000010f1d, + 0x10f2700010f28, + 0x10f3000010f51, 0x1100000011047, 0x1106600011070, 0x1107f000110bb, @@ -1763,10 +1837,11 @@ codepoint_classes = { 0x110f0000110fa, 0x1110000011135, 0x1113600011140, + 0x1114400011147, 0x1115000011174, 0x1117600011177, 0x11180000111c5, - 0x111ca000111cd, + 0x111c9000111cd, 0x111d0000111db, 0x111dc000111dd, 0x1120000011212, @@ -1786,7 +1861,7 @@ codepoint_classes = { 0x1132a00011331, 0x1133200011334, 0x113350001133a, - 0x1133c00011345, + 0x1133b00011345, 0x1134700011349, 0x1134b0001134e, 0x1135000011351, @@ -1796,6 +1871,7 @@ codepoint_classes = { 0x1137000011375, 0x114000001144b, 0x114500001145a, + 0x1145e0001145f, 0x11480000114c6, 0x114c7000114c8, 0x114d0000114da, @@ -1807,15 +1883,17 @@ codepoint_classes = { 0x116500001165a, 0x11680000116b8, 0x116c0000116ca, - 0x117000001171a, + 0x117000001171b, 0x1171d0001172c, 0x117300001173a, + 0x118000001183b, 0x118c0000118ea, 0x118ff00011900, 0x11a0000011a3f, 0x11a4700011a48, 0x11a5000011a84, 0x11a8600011a9a, + 0x11a9d00011a9e, 0x11ac000011af9, 0x11c0000011c09, 0x11c0a00011c37, @@ -1831,6 +1909,13 @@ codepoint_classes = { 0x11d3c00011d3e, 0x11d3f00011d48, 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, 0x120000001239a, 0x1248000012544, 0x130000001342f, @@ -1845,11 +1930,12 @@ codepoint_classes = { 0x16b5000016b5a, 0x16b6300016b78, 0x16b7d00016b90, + 0x16e6000016e80, 0x16f0000016f45, 0x16f5000016f7f, 0x16f8f00016fa0, 0x16fe000016fe2, - 0x17000000187ed, + 0x17000000187f2, 0x1880000018af3, 0x1b0000001b11f, 0x1b1700001b2fc, diff --git a/src/pip/_vendor/idna/package_data.py b/src/pip/_vendor/idna/package_data.py index 39c192bae..257e89893 100755 --- a/src/pip/_vendor/idna/package_data.py +++ b/src/pip/_vendor/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '2.7' +__version__ = '2.8' diff --git a/src/pip/_vendor/idna/uts46data.py b/src/pip/_vendor/idna/uts46data.py index 79731cb9e..a68ed4c0e 100755 --- a/src/pip/_vendor/idna/uts46data.py +++ b/src/pip/_vendor/idna/uts46data.py @@ -4,7 +4,7 @@ """IDNA Mapping Table from UTS46.""" -__version__ = "10.0.0" +__version__ = "11.0.0" def _seg_0(): return [ (0x0, '3'), @@ -1029,11 +1029,8 @@ def _seg_9(): (0x556, 'M', u'ֆ'), (0x557, 'X'), (0x559, 'V'), - (0x560, 'X'), - (0x561, 'V'), (0x587, 'M', u'եւ'), - (0x588, 'X'), - (0x589, 'V'), + (0x588, 'V'), (0x58B, 'X'), (0x58D, 'V'), (0x590, 'X'), @@ -1041,15 +1038,15 @@ def _seg_9(): (0x5C8, 'X'), (0x5D0, 'V'), (0x5EB, 'X'), - (0x5F0, 'V'), + (0x5EF, 'V'), (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), ] def _seg_10(): return [ - (0x606, 'V'), - (0x61C, 'X'), - (0x61E, 'V'), (0x675, 'M', u'اٴ'), (0x676, 'M', u'وٴ'), (0x677, 'M', u'ۇٴ'), @@ -1064,7 +1061,7 @@ def _seg_10(): (0x7B2, 'X'), (0x7C0, 'V'), (0x7FB, 'X'), - (0x800, 'V'), + (0x7FD, 'V'), (0x82E, 'X'), (0x830, 'V'), (0x83F, 'X'), @@ -1078,7 +1075,7 @@ def _seg_10(): (0x8B5, 'X'), (0x8B6, 'V'), (0x8BE, 'X'), - (0x8D4, 'V'), + (0x8D3, 'V'), (0x8E2, 'X'), (0x8E3, 'V'), (0x958, 'M', u'क़'), @@ -1118,7 +1115,7 @@ def _seg_10(): (0x9E0, 'V'), (0x9E4, 'X'), (0x9E6, 'V'), - (0x9FE, 'X'), + (0x9FF, 'X'), (0xA01, 'V'), (0xA04, 'X'), (0xA05, 'V'), @@ -1147,19 +1144,19 @@ def _seg_10(): (0xA4E, 'X'), (0xA51, 'V'), (0xA52, 'X'), + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), ] def _seg_11(): return [ - (0xA59, 'M', u'ਖ਼'), - (0xA5A, 'M', u'ਗ਼'), - (0xA5B, 'M', u'ਜ਼'), (0xA5C, 'V'), (0xA5D, 'X'), (0xA5E, 'M', u'ਫ਼'), (0xA5F, 'X'), (0xA66, 'V'), - (0xA76, 'X'), + (0xA77, 'X'), (0xA81, 'V'), (0xA84, 'X'), (0xA85, 'V'), @@ -1250,16 +1247,14 @@ def _seg_11(): (0xBE6, 'V'), (0xBFB, 'X'), (0xC00, 'V'), - (0xC04, 'X'), - ] - -def _seg_12(): - return [ - (0xC05, 'V'), (0xC0D, 'X'), (0xC0E, 'V'), (0xC11, 'X'), (0xC12, 'V'), + ] + +def _seg_12(): + return [ (0xC29, 'X'), (0xC2A, 'V'), (0xC3A, 'X'), @@ -1278,8 +1273,6 @@ def _seg_12(): (0xC66, 'V'), (0xC70, 'X'), (0xC78, 'V'), - (0xC84, 'X'), - (0xC85, 'V'), (0xC8D, 'X'), (0xC8E, 'V'), (0xC91, 'X'), @@ -1355,10 +1348,6 @@ def _seg_12(): (0xE83, 'X'), (0xE84, 'V'), (0xE85, 'X'), - ] - -def _seg_13(): - return [ (0xE87, 'V'), (0xE89, 'X'), (0xE8A, 'V'), @@ -1366,6 +1355,10 @@ def _seg_13(): (0xE8D, 'V'), (0xE8E, 'X'), (0xE94, 'V'), + ] + +def _seg_13(): + return [ (0xE98, 'X'), (0xE99, 'V'), (0xEA0, 'X'), @@ -1459,10 +1452,6 @@ def _seg_13(): (0x124E, 'X'), (0x1250, 'V'), (0x1257, 'X'), - ] - -def _seg_14(): - return [ (0x1258, 'V'), (0x1259, 'X'), (0x125A, 'V'), @@ -1470,6 +1459,10 @@ def _seg_14(): (0x1260, 'V'), (0x1289, 'X'), (0x128A, 'V'), + ] + +def _seg_14(): + return [ (0x128E, 'X'), (0x1290, 'V'), (0x12B1, 'X'), @@ -1538,7 +1531,7 @@ def _seg_14(): (0x1810, 'V'), (0x181A, 'X'), (0x1820, 'V'), - (0x1878, 'X'), + (0x1879, 'X'), (0x1880, 'V'), (0x18AB, 'X'), (0x18B0, 'V'), @@ -1563,10 +1556,6 @@ def _seg_14(): (0x19DB, 'X'), (0x19DE, 'V'), (0x1A1C, 'X'), - ] - -def _seg_15(): - return [ (0x1A1E, 'V'), (0x1A5F, 'X'), (0x1A60, 'V'), @@ -1574,6 +1563,10 @@ def _seg_15(): (0x1A7F, 'V'), (0x1A8A, 'X'), (0x1A90, 'V'), + ] + +def _seg_15(): + return [ (0x1A9A, 'X'), (0x1AA0, 'V'), (0x1AAE, 'X'), @@ -1667,10 +1660,6 @@ def _seg_15(): (0x1D68, 'M', u'ρ'), (0x1D69, 'M', u'φ'), (0x1D6A, 'M', u'χ'), - ] - -def _seg_16(): - return [ (0x1D6B, 'V'), (0x1D78, 'M', u'н'), (0x1D79, 'V'), @@ -1678,6 +1667,10 @@ def _seg_16(): (0x1D9C, 'M', u'c'), (0x1D9D, 'M', u'ɕ'), (0x1D9E, 'M', u'ð'), + ] + +def _seg_16(): + return [ (0x1D9F, 'M', u'ɜ'), (0x1DA0, 'M', u'f'), (0x1DA1, 'M', u'ɟ'), @@ -1771,10 +1764,6 @@ def _seg_16(): (0x1E36, 'M', u'ḷ'), (0x1E37, 'V'), (0x1E38, 'M', u'ḹ'), - ] - -def _seg_17(): - return [ (0x1E39, 'V'), (0x1E3A, 'M', u'ḻ'), (0x1E3B, 'V'), @@ -1782,6 +1771,10 @@ def _seg_17(): (0x1E3D, 'V'), (0x1E3E, 'M', u'ḿ'), (0x1E3F, 'V'), + ] + +def _seg_17(): + return [ (0x1E40, 'M', u'ṁ'), (0x1E41, 'V'), (0x1E42, 'M', u'ṃ'), @@ -1875,10 +1868,6 @@ def _seg_17(): (0x1E9F, 'V'), (0x1EA0, 'M', u'ạ'), (0x1EA1, 'V'), - ] - -def _seg_18(): - return [ (0x1EA2, 'M', u'ả'), (0x1EA3, 'V'), (0x1EA4, 'M', u'ấ'), @@ -1886,6 +1875,10 @@ def _seg_18(): (0x1EA6, 'M', u'ầ'), (0x1EA7, 'V'), (0x1EA8, 'M', u'ẩ'), + ] + +def _seg_18(): + return [ (0x1EA9, 'V'), (0x1EAA, 'M', u'ẫ'), (0x1EAB, 'V'), @@ -1979,10 +1972,6 @@ def _seg_18(): (0x1F0B, 'M', u'ἃ'), (0x1F0C, 'M', u'ἄ'), (0x1F0D, 'M', u'ἅ'), - ] - -def _seg_19(): - return [ (0x1F0E, 'M', u'ἆ'), (0x1F0F, 'M', u'ἇ'), (0x1F10, 'V'), @@ -1990,6 +1979,10 @@ def _seg_19(): (0x1F18, 'M', u'ἐ'), (0x1F19, 'M', u'ἑ'), (0x1F1A, 'M', u'ἒ'), + ] + +def _seg_19(): + return [ (0x1F1B, 'M', u'ἓ'), (0x1F1C, 'M', u'ἔ'), (0x1F1D, 'M', u'ἕ'), @@ -2083,10 +2076,6 @@ def _seg_19(): (0x1F9A, 'M', u'ἢι'), (0x1F9B, 'M', u'ἣι'), (0x1F9C, 'M', u'ἤι'), - ] - -def _seg_20(): - return [ (0x1F9D, 'M', u'ἥι'), (0x1F9E, 'M', u'ἦι'), (0x1F9F, 'M', u'ἧι'), @@ -2094,6 +2083,10 @@ def _seg_20(): (0x1FA1, 'M', u'ὡι'), (0x1FA2, 'M', u'ὢι'), (0x1FA3, 'M', u'ὣι'), + ] + +def _seg_20(): + return [ (0x1FA4, 'M', u'ὤι'), (0x1FA5, 'M', u'ὥι'), (0x1FA6, 'M', u'ὦι'), @@ -2187,10 +2180,6 @@ def _seg_20(): (0x2024, 'X'), (0x2027, 'V'), (0x2028, 'X'), - ] - -def _seg_21(): - return [ (0x202F, '3', u' '), (0x2030, 'V'), (0x2033, 'M', u'′′'), @@ -2198,6 +2187,10 @@ def _seg_21(): (0x2035, 'V'), (0x2036, 'M', u'‵‵'), (0x2037, 'M', u'‵‵‵'), + ] + +def _seg_21(): + return [ (0x2038, 'V'), (0x203C, '3', u'!!'), (0x203D, 'V'), @@ -2291,10 +2284,6 @@ def _seg_21(): (0x2120, 'M', u'sm'), (0x2121, 'M', u'tel'), (0x2122, 'M', u'tm'), - ] - -def _seg_22(): - return [ (0x2123, 'V'), (0x2124, 'M', u'z'), (0x2125, 'V'), @@ -2302,6 +2291,10 @@ def _seg_22(): (0x2127, 'V'), (0x2128, 'M', u'z'), (0x2129, 'V'), + ] + +def _seg_22(): + return [ (0x212A, 'M', u'k'), (0x212B, 'M', u'å'), (0x212C, 'M', u'b'), @@ -2395,10 +2388,6 @@ def _seg_22(): (0x226E, '3'), (0x2270, 'V'), (0x2329, 'M', u'〈'), - ] - -def _seg_23(): - return [ (0x232A, 'M', u'〉'), (0x232B, 'V'), (0x2427, 'X'), @@ -2406,6 +2395,10 @@ def _seg_23(): (0x244B, 'X'), (0x2460, 'M', u'1'), (0x2461, 'M', u'2'), + ] + +def _seg_23(): + return [ (0x2462, 'M', u'3'), (0x2463, 'M', u'4'), (0x2464, 'M', u'5'), @@ -2499,10 +2492,6 @@ def _seg_23(): (0x24CF, 'M', u'z'), (0x24D0, 'M', u'a'), (0x24D1, 'M', u'b'), - ] - -def _seg_24(): - return [ (0x24D2, 'M', u'c'), (0x24D3, 'M', u'd'), (0x24D4, 'M', u'e'), @@ -2510,6 +2499,10 @@ def _seg_24(): (0x24D6, 'M', u'g'), (0x24D7, 'M', u'h'), (0x24D8, 'M', u'i'), + ] + +def _seg_24(): + return [ (0x24D9, 'M', u'j'), (0x24DA, 'M', u'k'), (0x24DB, 'M', u'l'), @@ -2541,13 +2534,9 @@ def _seg_24(): (0x2B76, 'V'), (0x2B96, 'X'), (0x2B98, 'V'), - (0x2BBA, 'X'), - (0x2BBD, 'V'), (0x2BC9, 'X'), (0x2BCA, 'V'), - (0x2BD3, 'X'), - (0x2BEC, 'V'), - (0x2BF0, 'X'), + (0x2BFF, 'X'), (0x2C00, 'M', u'ⰰ'), (0x2C01, 'M', u'ⰱ'), (0x2C02, 'M', u'ⰲ'), @@ -2603,10 +2592,6 @@ def _seg_24(): (0x2C62, 'M', u'ɫ'), (0x2C63, 'M', u'ᵽ'), (0x2C64, 'M', u'ɽ'), - ] - -def _seg_25(): - return [ (0x2C65, 'V'), (0x2C67, 'M', u'ⱨ'), (0x2C68, 'V'), @@ -2618,6 +2603,10 @@ def _seg_25(): (0x2C6E, 'M', u'ɱ'), (0x2C6F, 'M', u'ɐ'), (0x2C70, 'M', u'ɒ'), + ] + +def _seg_25(): + return [ (0x2C71, 'V'), (0x2C72, 'M', u'ⱳ'), (0x2C73, 'V'), @@ -2707,10 +2696,6 @@ def _seg_25(): (0x2CCD, 'V'), (0x2CCE, 'M', u'ⳏ'), (0x2CCF, 'V'), - ] - -def _seg_26(): - return [ (0x2CD0, 'M', u'ⳑ'), (0x2CD1, 'V'), (0x2CD2, 'M', u'ⳓ'), @@ -2722,6 +2707,10 @@ def _seg_26(): (0x2CD8, 'M', u'ⳙ'), (0x2CD9, 'V'), (0x2CDA, 'M', u'ⳛ'), + ] + +def _seg_26(): + return [ (0x2CDB, 'V'), (0x2CDC, 'M', u'ⳝ'), (0x2CDD, 'V'), @@ -2768,7 +2757,7 @@ def _seg_26(): (0x2DD8, 'V'), (0x2DDF, 'X'), (0x2DE0, 'V'), - (0x2E4A, 'X'), + (0x2E4F, 'X'), (0x2E80, 'V'), (0x2E9A, 'X'), (0x2E9B, 'V'), @@ -2811,10 +2800,6 @@ def _seg_26(): (0x2F20, 'M', u'士'), (0x2F21, 'M', u'夂'), (0x2F22, 'M', u'夊'), - ] - -def _seg_27(): - return [ (0x2F23, 'M', u'夕'), (0x2F24, 'M', u'大'), (0x2F25, 'M', u'女'), @@ -2826,6 +2811,10 @@ def _seg_27(): (0x2F2B, 'M', u'尸'), (0x2F2C, 'M', u'屮'), (0x2F2D, 'M', u'山'), + ] + +def _seg_27(): + return [ (0x2F2E, 'M', u'巛'), (0x2F2F, 'M', u'工'), (0x2F30, 'M', u'己'), @@ -2915,10 +2904,6 @@ def _seg_27(): (0x2F84, 'M', u'至'), (0x2F85, 'M', u'臼'), (0x2F86, 'M', u'舌'), - ] - -def _seg_28(): - return [ (0x2F87, 'M', u'舛'), (0x2F88, 'M', u'舟'), (0x2F89, 'M', u'艮'), @@ -2930,6 +2915,10 @@ def _seg_28(): (0x2F8F, 'M', u'行'), (0x2F90, 'M', u'衣'), (0x2F91, 'M', u'襾'), + ] + +def _seg_28(): + return [ (0x2F92, 'M', u'見'), (0x2F93, 'M', u'角'), (0x2F94, 'M', u'言'), @@ -3019,13 +3008,9 @@ def _seg_28(): (0x309F, 'M', u'より'), (0x30A0, 'V'), (0x30FF, 'M', u'コト'), - ] - -def _seg_29(): - return [ (0x3100, 'X'), (0x3105, 'V'), - (0x312F, 'X'), + (0x3130, 'X'), (0x3131, 'M', u'ᄀ'), (0x3132, 'M', u'ᄁ'), (0x3133, 'M', u'ᆪ'), @@ -3034,6 +3019,10 @@ def _seg_29(): (0x3136, 'M', u'ᆭ'), (0x3137, 'M', u'ᄃ'), (0x3138, 'M', u'ᄄ'), + ] + +def _seg_29(): + return [ (0x3139, 'M', u'ᄅ'), (0x313A, 'M', u'ᆰ'), (0x313B, 'M', u'ᆱ'), @@ -3123,10 +3112,6 @@ def _seg_29(): (0x318F, 'X'), (0x3190, 'V'), (0x3192, 'M', u'一'), - ] - -def _seg_30(): - return [ (0x3193, 'M', u'二'), (0x3194, 'M', u'三'), (0x3195, 'M', u'四'), @@ -3138,6 +3123,10 @@ def _seg_30(): (0x319B, 'M', u'丙'), (0x319C, 'M', u'丁'), (0x319D, 'M', u'天'), + ] + +def _seg_30(): + return [ (0x319E, 'M', u'地'), (0x319F, 'M', u'人'), (0x31A0, 'V'), @@ -3227,10 +3216,6 @@ def _seg_30(): (0x3256, 'M', u'26'), (0x3257, 'M', u'27'), (0x3258, 'M', u'28'), - ] - -def _seg_31(): - return [ (0x3259, 'M', u'29'), (0x325A, 'M', u'30'), (0x325B, 'M', u'31'), @@ -3242,6 +3227,10 @@ def _seg_31(): (0x3261, 'M', u'ᄂ'), (0x3262, 'M', u'ᄃ'), (0x3263, 'M', u'ᄅ'), + ] + +def _seg_31(): + return [ (0x3264, 'M', u'ᄆ'), (0x3265, 'M', u'ᄇ'), (0x3266, 'M', u'ᄉ'), @@ -3331,10 +3320,6 @@ def _seg_31(): (0x32BA, 'M', u'45'), (0x32BB, 'M', u'46'), (0x32BC, 'M', u'47'), - ] - -def _seg_32(): - return [ (0x32BD, 'M', u'48'), (0x32BE, 'M', u'49'), (0x32BF, 'M', u'50'), @@ -3346,6 +3331,10 @@ def _seg_32(): (0x32C5, 'M', u'6月'), (0x32C6, 'M', u'7月'), (0x32C7, 'M', u'8月'), + ] + +def _seg_32(): + return [ (0x32C8, 'M', u'9月'), (0x32C9, 'M', u'10月'), (0x32CA, 'M', u'11月'), @@ -3435,10 +3424,6 @@ def _seg_32(): (0x331E, 'M', u'コーポ'), (0x331F, 'M', u'サイクル'), (0x3320, 'M', u'サンチーム'), - ] - -def _seg_33(): - return [ (0x3321, 'M', u'シリング'), (0x3322, 'M', u'センチ'), (0x3323, 'M', u'セント'), @@ -3450,6 +3435,10 @@ def _seg_33(): (0x3329, 'M', u'ノット'), (0x332A, 'M', u'ハイツ'), (0x332B, 'M', u'パーセント'), + ] + +def _seg_33(): + return [ (0x332C, 'M', u'パーツ'), (0x332D, 'M', u'バーレル'), (0x332E, 'M', u'ピアストル'), @@ -3539,10 +3528,6 @@ def _seg_33(): (0x3382, 'M', u'μa'), (0x3383, 'M', u'ma'), (0x3384, 'M', u'ka'), - ] - -def _seg_34(): - return [ (0x3385, 'M', u'kb'), (0x3386, 'M', u'mb'), (0x3387, 'M', u'gb'), @@ -3554,6 +3539,10 @@ def _seg_34(): (0x338D, 'M', u'μg'), (0x338E, 'M', u'mg'), (0x338F, 'M', u'kg'), + ] + +def _seg_34(): + return [ (0x3390, 'M', u'hz'), (0x3391, 'M', u'khz'), (0x3392, 'M', u'mhz'), @@ -3643,10 +3632,6 @@ def _seg_34(): (0x33E6, 'M', u'7日'), (0x33E7, 'M', u'8日'), (0x33E8, 'M', u'9日'), - ] - -def _seg_35(): - return [ (0x33E9, 'M', u'10日'), (0x33EA, 'M', u'11日'), (0x33EB, 'M', u'12日'), @@ -3658,6 +3643,10 @@ def _seg_35(): (0x33F1, 'M', u'18日'), (0x33F2, 'M', u'19日'), (0x33F3, 'M', u'20日'), + ] + +def _seg_35(): + return [ (0x33F4, 'M', u'21日'), (0x33F5, 'M', u'22日'), (0x33F6, 'M', u'23日'), @@ -3673,7 +3662,7 @@ def _seg_35(): (0x3400, 'V'), (0x4DB6, 'X'), (0x4DC0, 'V'), - (0x9FEB, 'X'), + (0x9FF0, 'X'), (0xA000, 'V'), (0xA48D, 'X'), (0xA490, 'V'), @@ -3747,10 +3736,6 @@ def _seg_35(): (0xA692, 'M', u'ꚓ'), (0xA693, 'V'), (0xA694, 'M', u'ꚕ'), - ] - -def _seg_36(): - return [ (0xA695, 'V'), (0xA696, 'M', u'ꚗ'), (0xA697, 'V'), @@ -3762,6 +3747,10 @@ def _seg_36(): (0xA69D, 'M', u'ь'), (0xA69E, 'V'), (0xA6F8, 'X'), + ] + +def _seg_36(): + return [ (0xA700, 'V'), (0xA722, 'M', u'ꜣ'), (0xA723, 'V'), @@ -3851,10 +3840,6 @@ def _seg_36(): (0xA780, 'M', u'ꞁ'), (0xA781, 'V'), (0xA782, 'M', u'ꞃ'), - ] - -def _seg_37(): - return [ (0xA783, 'V'), (0xA784, 'M', u'ꞅ'), (0xA785, 'V'), @@ -3866,6 +3851,10 @@ def _seg_37(): (0xA78E, 'V'), (0xA790, 'M', u'ꞑ'), (0xA791, 'V'), + ] + +def _seg_37(): + return [ (0xA792, 'M', u'ꞓ'), (0xA793, 'V'), (0xA796, 'M', u'ꞗ'), @@ -3893,7 +3882,7 @@ def _seg_37(): (0xA7AC, 'M', u'ɡ'), (0xA7AD, 'M', u'ɬ'), (0xA7AE, 'M', u'ɪ'), - (0xA7AF, 'X'), + (0xA7AF, 'V'), (0xA7B0, 'M', u'ʞ'), (0xA7B1, 'M', u'ʇ'), (0xA7B2, 'M', u'ʝ'), @@ -3903,6 +3892,8 @@ def _seg_37(): (0xA7B6, 'M', u'ꞷ'), (0xA7B7, 'V'), (0xA7B8, 'X'), + (0xA7B9, 'V'), + (0xA7BA, 'X'), (0xA7F7, 'V'), (0xA7F8, 'M', u'ħ'), (0xA7F9, 'M', u'œ'), @@ -3917,8 +3908,6 @@ def _seg_37(): (0xA8CE, 'V'), (0xA8DA, 'X'), (0xA8E0, 'V'), - (0xA8FE, 'X'), - (0xA900, 'V'), (0xA954, 'X'), (0xA95F, 'V'), (0xA97D, 'X'), @@ -3955,10 +3944,6 @@ def _seg_37(): (0xAB5F, 'M', u'ꭒ'), (0xAB60, 'V'), (0xAB66, 'X'), - ] - -def _seg_38(): - return [ (0xAB70, 'M', u'Ꭰ'), (0xAB71, 'M', u'Ꭱ'), (0xAB72, 'M', u'Ꭲ'), @@ -3970,6 +3955,10 @@ def _seg_38(): (0xAB78, 'M', u'Ꭸ'), (0xAB79, 'M', u'Ꭹ'), (0xAB7A, 'M', u'Ꭺ'), + ] + +def _seg_38(): + return [ (0xAB7B, 'M', u'Ꭻ'), (0xAB7C, 'M', u'Ꭼ'), (0xAB7D, 'M', u'Ꭽ'), @@ -4059,10 +4048,6 @@ def _seg_38(): (0xF907, 'M', u'龜'), (0xF909, 'M', u'契'), (0xF90A, 'M', u'金'), - ] - -def _seg_39(): - return [ (0xF90B, 'M', u'喇'), (0xF90C, 'M', u'奈'), (0xF90D, 'M', u'懶'), @@ -4074,6 +4059,10 @@ def _seg_39(): (0xF913, 'M', u'邏'), (0xF914, 'M', u'樂'), (0xF915, 'M', u'洛'), + ] + +def _seg_39(): + return [ (0xF916, 'M', u'烙'), (0xF917, 'M', u'珞'), (0xF918, 'M', u'落'), @@ -4163,10 +4152,6 @@ def _seg_39(): (0xF96C, 'M', u'塞'), (0xF96D, 'M', u'省'), (0xF96E, 'M', u'葉'), - ] - -def _seg_40(): - return [ (0xF96F, 'M', u'說'), (0xF970, 'M', u'殺'), (0xF971, 'M', u'辰'), @@ -4178,6 +4163,10 @@ def _seg_40(): (0xF977, 'M', u'亮'), (0xF978, 'M', u'兩'), (0xF979, 'M', u'凉'), + ] + +def _seg_40(): + return [ (0xF97A, 'M', u'梁'), (0xF97B, 'M', u'糧'), (0xF97C, 'M', u'良'), @@ -4267,10 +4256,6 @@ def _seg_40(): (0xF9D0, 'M', u'類'), (0xF9D1, 'M', u'六'), (0xF9D2, 'M', u'戮'), - ] - -def _seg_41(): - return [ (0xF9D3, 'M', u'陸'), (0xF9D4, 'M', u'倫'), (0xF9D5, 'M', u'崙'), @@ -4282,6 +4267,10 @@ def _seg_41(): (0xF9DB, 'M', u'率'), (0xF9DC, 'M', u'隆'), (0xF9DD, 'M', u'利'), + ] + +def _seg_41(): + return [ (0xF9DE, 'M', u'吏'), (0xF9DF, 'M', u'履'), (0xF9E0, 'M', u'易'), @@ -4371,10 +4360,6 @@ def _seg_41(): (0xFA39, 'M', u'塀'), (0xFA3A, 'M', u'墨'), (0xFA3B, 'M', u'層'), - ] - -def _seg_42(): - return [ (0xFA3C, 'M', u'屮'), (0xFA3D, 'M', u'悔'), (0xFA3E, 'M', u'慨'), @@ -4386,6 +4371,10 @@ def _seg_42(): (0xFA44, 'M', u'梅'), (0xFA45, 'M', u'海'), (0xFA46, 'M', u'渚'), + ] + +def _seg_42(): + return [ (0xFA47, 'M', u'漢'), (0xFA48, 'M', u'煮'), (0xFA49, 'M', u'爫'), @@ -4475,10 +4464,6 @@ def _seg_42(): (0xFA9F, 'M', u'犯'), (0xFAA0, 'M', u'猪'), (0xFAA1, 'M', u'瑱'), - ] - -def _seg_43(): - return [ (0xFAA2, 'M', u'甆'), (0xFAA3, 'M', u'画'), (0xFAA4, 'M', u'瘝'), @@ -4490,6 +4475,10 @@ def _seg_43(): (0xFAAA, 'M', u'着'), (0xFAAB, 'M', u'磌'), (0xFAAC, 'M', u'窱'), + ] + +def _seg_43(): + return [ (0xFAAD, 'M', u'節'), (0xFAAE, 'M', u'类'), (0xFAAF, 'M', u'絛'), @@ -4579,10 +4568,6 @@ def _seg_43(): (0xFB38, 'M', u'טּ'), (0xFB39, 'M', u'יּ'), (0xFB3A, 'M', u'ךּ'), - ] - -def _seg_44(): - return [ (0xFB3B, 'M', u'כּ'), (0xFB3C, 'M', u'לּ'), (0xFB3D, 'X'), @@ -4594,6 +4579,10 @@ def _seg_44(): (0xFB43, 'M', u'ףּ'), (0xFB44, 'M', u'פּ'), (0xFB45, 'X'), + ] + +def _seg_44(): + return [ (0xFB46, 'M', u'צּ'), (0xFB47, 'M', u'קּ'), (0xFB48, 'M', u'רּ'), @@ -4683,10 +4672,6 @@ def _seg_44(): (0xFC19, 'M', u'خج'), (0xFC1A, 'M', u'خح'), (0xFC1B, 'M', u'خم'), - ] - -def _seg_45(): - return [ (0xFC1C, 'M', u'سج'), (0xFC1D, 'M', u'سح'), (0xFC1E, 'M', u'سخ'), @@ -4698,6 +4683,10 @@ def _seg_45(): (0xFC24, 'M', u'ضخ'), (0xFC25, 'M', u'ضم'), (0xFC26, 'M', u'طح'), + ] + +def _seg_45(): + return [ (0xFC27, 'M', u'طم'), (0xFC28, 'M', u'ظم'), (0xFC29, 'M', u'عج'), @@ -4787,10 +4776,6 @@ def _seg_45(): (0xFC7D, 'M', u'في'), (0xFC7E, 'M', u'قى'), (0xFC7F, 'M', u'قي'), - ] - -def _seg_46(): - return [ (0xFC80, 'M', u'كا'), (0xFC81, 'M', u'كل'), (0xFC82, 'M', u'كم'), @@ -4802,6 +4787,10 @@ def _seg_46(): (0xFC88, 'M', u'ما'), (0xFC89, 'M', u'مم'), (0xFC8A, 'M', u'نر'), + ] + +def _seg_46(): + return [ (0xFC8B, 'M', u'نز'), (0xFC8C, 'M', u'نم'), (0xFC8D, 'M', u'نن'), @@ -4891,10 +4880,6 @@ def _seg_46(): (0xFCE1, 'M', u'بم'), (0xFCE2, 'M', u'به'), (0xFCE3, 'M', u'تم'), - ] - -def _seg_47(): - return [ (0xFCE4, 'M', u'ته'), (0xFCE5, 'M', u'ثم'), (0xFCE6, 'M', u'ثه'), @@ -4906,6 +4891,10 @@ def _seg_47(): (0xFCEC, 'M', u'كم'), (0xFCED, 'M', u'لم'), (0xFCEE, 'M', u'نم'), + ] + +def _seg_47(): + return [ (0xFCEF, 'M', u'نه'), (0xFCF0, 'M', u'يم'), (0xFCF1, 'M', u'يه'), @@ -4995,10 +4984,6 @@ def _seg_47(): (0xFD57, 'M', u'تمخ'), (0xFD58, 'M', u'جمح'), (0xFD5A, 'M', u'حمي'), - ] - -def _seg_48(): - return [ (0xFD5B, 'M', u'حمى'), (0xFD5C, 'M', u'سحج'), (0xFD5D, 'M', u'سجح'), @@ -5010,6 +4995,10 @@ def _seg_48(): (0xFD66, 'M', u'صمم'), (0xFD67, 'M', u'شحم'), (0xFD69, 'M', u'شجي'), + ] + +def _seg_48(): + return [ (0xFD6A, 'M', u'شمخ'), (0xFD6C, 'M', u'شمم'), (0xFD6E, 'M', u'ضحى'), @@ -5099,10 +5088,6 @@ def _seg_48(): (0xFDF3, 'M', u'اكبر'), (0xFDF4, 'M', u'محمد'), (0xFDF5, 'M', u'صلعم'), - ] - -def _seg_49(): - return [ (0xFDF6, 'M', u'رسول'), (0xFDF7, 'M', u'عليه'), (0xFDF8, 'M', u'وسلم'), @@ -5114,6 +5099,10 @@ def _seg_49(): (0xFDFE, 'X'), (0xFE00, 'I'), (0xFE10, '3', u','), + ] + +def _seg_49(): + return [ (0xFE11, 'M', u'、'), (0xFE12, 'X'), (0xFE13, '3', u':'), @@ -5203,10 +5192,6 @@ def _seg_49(): (0xFE8F, 'M', u'ب'), (0xFE93, 'M', u'ة'), (0xFE95, 'M', u'ت'), - ] - -def _seg_50(): - return [ (0xFE99, 'M', u'ث'), (0xFE9D, 'M', u'ج'), (0xFEA1, 'M', u'ح'), @@ -5218,6 +5203,10 @@ def _seg_50(): (0xFEB1, 'M', u'س'), (0xFEB5, 'M', u'ش'), (0xFEB9, 'M', u'ص'), + ] + +def _seg_50(): + return [ (0xFEBD, 'M', u'ض'), (0xFEC1, 'M', u'ط'), (0xFEC5, 'M', u'ظ'), @@ -5307,10 +5296,6 @@ def _seg_50(): (0xFF41, 'M', u'a'), (0xFF42, 'M', u'b'), (0xFF43, 'M', u'c'), - ] - -def _seg_51(): - return [ (0xFF44, 'M', u'd'), (0xFF45, 'M', u'e'), (0xFF46, 'M', u'f'), @@ -5322,6 +5307,10 @@ def _seg_51(): (0xFF4C, 'M', u'l'), (0xFF4D, 'M', u'm'), (0xFF4E, 'M', u'n'), + ] + +def _seg_51(): + return [ (0xFF4F, 'M', u'o'), (0xFF50, 'M', u'p'), (0xFF51, 'M', u'q'), @@ -5411,10 +5400,6 @@ def _seg_51(): (0xFFA5, 'M', u'ᆬ'), (0xFFA6, 'M', u'ᆭ'), (0xFFA7, 'M', u'ᄃ'), - ] - -def _seg_52(): - return [ (0xFFA8, 'M', u'ᄄ'), (0xFFA9, 'M', u'ᄅ'), (0xFFAA, 'M', u'ᆰ'), @@ -5426,6 +5411,10 @@ def _seg_52(): (0xFFB0, 'M', u'ᄚ'), (0xFFB1, 'M', u'ᄆ'), (0xFFB2, 'M', u'ᄇ'), + ] + +def _seg_52(): + return [ (0xFFB3, 'M', u'ᄈ'), (0xFFB4, 'M', u'ᄡ'), (0xFFB5, 'M', u'ᄉ'), @@ -5515,10 +5504,6 @@ def _seg_52(): (0x10300, 'V'), (0x10324, 'X'), (0x1032D, 'V'), - ] - -def _seg_53(): - return [ (0x1034B, 'X'), (0x10350, 'V'), (0x1037B, 'X'), @@ -5530,6 +5515,10 @@ def _seg_53(): (0x103D6, 'X'), (0x10400, 'M', u'𐐨'), (0x10401, 'M', u'𐐩'), + ] + +def _seg_53(): + return [ (0x10402, 'M', u'𐐪'), (0x10403, 'M', u'𐐫'), (0x10404, 'M', u'𐐬'), @@ -5619,10 +5608,6 @@ def _seg_53(): (0x10570, 'X'), (0x10600, 'V'), (0x10737, 'X'), - ] - -def _seg_54(): - return [ (0x10740, 'V'), (0x10756, 'X'), (0x10760, 'V'), @@ -5634,6 +5619,10 @@ def _seg_54(): (0x1080A, 'V'), (0x10836, 'X'), (0x10837, 'V'), + ] + +def _seg_54(): + return [ (0x10839, 'X'), (0x1083C, 'V'), (0x1083D, 'X'), @@ -5666,11 +5655,11 @@ def _seg_54(): (0x10A15, 'V'), (0x10A18, 'X'), (0x10A19, 'V'), - (0x10A34, 'X'), + (0x10A36, 'X'), (0x10A38, 'V'), (0x10A3B, 'X'), (0x10A3F, 'V'), - (0x10A48, 'X'), + (0x10A49, 'X'), (0x10A50, 'V'), (0x10A59, 'X'), (0x10A60, 'V'), @@ -5723,10 +5712,6 @@ def _seg_54(): (0x10C9B, 'M', u'𐳛'), (0x10C9C, 'M', u'𐳜'), (0x10C9D, 'M', u'𐳝'), - ] - -def _seg_55(): - return [ (0x10C9E, 'M', u'𐳞'), (0x10C9F, 'M', u'𐳟'), (0x10CA0, 'M', u'𐳠'), @@ -5738,6 +5723,10 @@ def _seg_55(): (0x10CA6, 'M', u'𐳦'), (0x10CA7, 'M', u'𐳧'), (0x10CA8, 'M', u'𐳨'), + ] + +def _seg_55(): + return [ (0x10CA9, 'M', u'𐳩'), (0x10CAA, 'M', u'𐳪'), (0x10CAB, 'M', u'𐳫'), @@ -5752,9 +5741,15 @@ def _seg_55(): (0x10CC0, 'V'), (0x10CF3, 'X'), (0x10CFA, 'V'), - (0x10D00, 'X'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), (0x10E60, 'V'), (0x10E7F, 'X'), + (0x10F00, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), (0x11000, 'V'), (0x1104E, 'X'), (0x11052, 'V'), @@ -5770,7 +5765,7 @@ def _seg_55(): (0x11100, 'V'), (0x11135, 'X'), (0x11136, 'V'), - (0x11144, 'X'), + (0x11147, 'X'), (0x11150, 'V'), (0x11177, 'X'), (0x11180, 'V'), @@ -5811,7 +5806,7 @@ def _seg_55(): (0x11334, 'X'), (0x11335, 'V'), (0x1133A, 'X'), - (0x1133C, 'V'), + (0x1133B, 'V'), (0x11345, 'X'), (0x11347, 'V'), (0x11349, 'X'), @@ -5827,16 +5822,16 @@ def _seg_55(): (0x1136D, 'X'), (0x11370, 'V'), (0x11375, 'X'), - ] - -def _seg_56(): - return [ (0x11400, 'V'), (0x1145A, 'X'), (0x1145B, 'V'), (0x1145C, 'X'), (0x1145D, 'V'), - (0x1145E, 'X'), + ] + +def _seg_56(): + return [ + (0x1145F, 'X'), (0x11480, 'V'), (0x114C8, 'X'), (0x114D0, 'V'), @@ -5856,11 +5851,13 @@ def _seg_56(): (0x116C0, 'V'), (0x116CA, 'X'), (0x11700, 'V'), - (0x1171A, 'X'), + (0x1171B, 'X'), (0x1171D, 'V'), (0x1172C, 'X'), (0x11730, 'V'), (0x11740, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), (0x118A0, 'M', u'𑣀'), (0x118A1, 'M', u'𑣁'), (0x118A2, 'M', u'𑣂'), @@ -5902,8 +5899,6 @@ def _seg_56(): (0x11A50, 'V'), (0x11A84, 'X'), (0x11A86, 'V'), - (0x11A9D, 'X'), - (0x11A9E, 'V'), (0x11AA3, 'X'), (0x11AC0, 'V'), (0x11AF9, 'X'), @@ -5931,14 +5926,28 @@ def _seg_56(): (0x11D3B, 'X'), (0x11D3C, 'V'), (0x11D3E, 'X'), - ] - -def _seg_57(): - return [ (0x11D3F, 'V'), (0x11D48, 'X'), (0x11D50, 'V'), (0x11D5A, 'X'), + (0x11D60, 'V'), + ] + +def _seg_57(): + return [ + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), (0x12000, 'V'), (0x1239A, 'X'), (0x12400, 'V'), @@ -5973,6 +5982,8 @@ def _seg_57(): (0x16B78, 'X'), (0x16B7D, 'V'), (0x16B90, 'X'), + (0x16E60, 'V'), + (0x16E9B, 'X'), (0x16F00, 'V'), (0x16F45, 'X'), (0x16F50, 'V'), @@ -5982,7 +5993,7 @@ def _seg_57(): (0x16FE0, 'V'), (0x16FE2, 'X'), (0x17000, 'V'), - (0x187ED, 'X'), + (0x187F2, 'X'), (0x18800, 'V'), (0x18AF3, 'X'), (0x1B000, 'V'), @@ -6024,21 +6035,23 @@ def _seg_57(): (0x1D1C1, 'V'), (0x1D1E9, 'X'), (0x1D200, 'V'), + ] + +def _seg_58(): + return [ (0x1D246, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), (0x1D300, 'V'), (0x1D357, 'X'), (0x1D360, 'V'), - (0x1D372, 'X'), + (0x1D379, 'X'), (0x1D400, 'M', u'a'), (0x1D401, 'M', u'b'), (0x1D402, 'M', u'c'), (0x1D403, 'M', u'd'), (0x1D404, 'M', u'e'), (0x1D405, 'M', u'f'), - ] - -def _seg_58(): - return [ (0x1D406, 'M', u'g'), (0x1D407, 'M', u'h'), (0x1D408, 'M', u'i'), @@ -6126,6 +6139,10 @@ def _seg_58(): (0x1D45A, 'M', u'm'), (0x1D45B, 'M', u'n'), (0x1D45C, 'M', u'o'), + ] + +def _seg_59(): + return [ (0x1D45D, 'M', u'p'), (0x1D45E, 'M', u'q'), (0x1D45F, 'M', u'r'), @@ -6139,10 +6156,6 @@ def _seg_58(): (0x1D467, 'M', u'z'), (0x1D468, 'M', u'a'), (0x1D469, 'M', u'b'), - ] - -def _seg_59(): - return [ (0x1D46A, 'M', u'c'), (0x1D46B, 'M', u'd'), (0x1D46C, 'M', u'e'), @@ -6230,6 +6243,10 @@ def _seg_59(): (0x1D4C1, 'M', u'l'), (0x1D4C2, 'M', u'm'), (0x1D4C3, 'M', u'n'), + ] + +def _seg_60(): + return [ (0x1D4C4, 'X'), (0x1D4C5, 'M', u'p'), (0x1D4C6, 'M', u'q'), @@ -6243,10 +6260,6 @@ def _seg_59(): (0x1D4CE, 'M', u'y'), (0x1D4CF, 'M', u'z'), (0x1D4D0, 'M', u'a'), - ] - -def _seg_60(): - return [ (0x1D4D1, 'M', u'b'), (0x1D4D2, 'M', u'c'), (0x1D4D3, 'M', u'd'), @@ -6334,6 +6347,10 @@ def _seg_60(): (0x1D526, 'M', u'i'), (0x1D527, 'M', u'j'), (0x1D528, 'M', u'k'), + ] + +def _seg_61(): + return [ (0x1D529, 'M', u'l'), (0x1D52A, 'M', u'm'), (0x1D52B, 'M', u'n'), @@ -6347,10 +6364,6 @@ def _seg_60(): (0x1D533, 'M', u'v'), (0x1D534, 'M', u'w'), (0x1D535, 'M', u'x'), - ] - -def _seg_61(): - return [ (0x1D536, 'M', u'y'), (0x1D537, 'M', u'z'), (0x1D538, 'M', u'a'), @@ -6438,6 +6451,10 @@ def _seg_61(): (0x1D58C, 'M', u'g'), (0x1D58D, 'M', u'h'), (0x1D58E, 'M', u'i'), + ] + +def _seg_62(): + return [ (0x1D58F, 'M', u'j'), (0x1D590, 'M', u'k'), (0x1D591, 'M', u'l'), @@ -6451,10 +6468,6 @@ def _seg_61(): (0x1D599, 'M', u't'), (0x1D59A, 'M', u'u'), (0x1D59B, 'M', u'v'), - ] - -def _seg_62(): - return [ (0x1D59C, 'M', u'w'), (0x1D59D, 'M', u'x'), (0x1D59E, 'M', u'y'), @@ -6542,6 +6555,10 @@ def _seg_62(): (0x1D5F0, 'M', u'c'), (0x1D5F1, 'M', u'd'), (0x1D5F2, 'M', u'e'), + ] + +def _seg_63(): + return [ (0x1D5F3, 'M', u'f'), (0x1D5F4, 'M', u'g'), (0x1D5F5, 'M', u'h'), @@ -6555,10 +6572,6 @@ def _seg_62(): (0x1D5FD, 'M', u'p'), (0x1D5FE, 'M', u'q'), (0x1D5FF, 'M', u'r'), - ] - -def _seg_63(): - return [ (0x1D600, 'M', u's'), (0x1D601, 'M', u't'), (0x1D602, 'M', u'u'), @@ -6646,6 +6659,10 @@ def _seg_63(): (0x1D654, 'M', u'y'), (0x1D655, 'M', u'z'), (0x1D656, 'M', u'a'), + ] + +def _seg_64(): + return [ (0x1D657, 'M', u'b'), (0x1D658, 'M', u'c'), (0x1D659, 'M', u'd'), @@ -6659,10 +6676,6 @@ def _seg_63(): (0x1D661, 'M', u'l'), (0x1D662, 'M', u'm'), (0x1D663, 'M', u'n'), - ] - -def _seg_64(): - return [ (0x1D664, 'M', u'o'), (0x1D665, 'M', u'p'), (0x1D666, 'M', u'q'), @@ -6750,6 +6763,10 @@ def _seg_64(): (0x1D6B9, 'M', u'θ'), (0x1D6BA, 'M', u'σ'), (0x1D6BB, 'M', u'τ'), + ] + +def _seg_65(): + return [ (0x1D6BC, 'M', u'υ'), (0x1D6BD, 'M', u'φ'), (0x1D6BE, 'M', u'χ'), @@ -6763,10 +6780,6 @@ def _seg_64(): (0x1D6C6, 'M', u'ε'), (0x1D6C7, 'M', u'ζ'), (0x1D6C8, 'M', u'η'), - ] - -def _seg_65(): - return [ (0x1D6C9, 'M', u'θ'), (0x1D6CA, 'M', u'ι'), (0x1D6CB, 'M', u'κ'), @@ -6854,6 +6867,10 @@ def _seg_65(): (0x1D71F, 'M', u'δ'), (0x1D720, 'M', u'ε'), (0x1D721, 'M', u'ζ'), + ] + +def _seg_66(): + return [ (0x1D722, 'M', u'η'), (0x1D723, 'M', u'θ'), (0x1D724, 'M', u'ι'), @@ -6867,10 +6884,6 @@ def _seg_65(): (0x1D72C, 'M', u'ρ'), (0x1D72D, 'M', u'θ'), (0x1D72E, 'M', u'σ'), - ] - -def _seg_66(): - return [ (0x1D72F, 'M', u'τ'), (0x1D730, 'M', u'υ'), (0x1D731, 'M', u'φ'), @@ -6958,6 +6971,10 @@ def _seg_66(): (0x1D785, 'M', u'φ'), (0x1D786, 'M', u'χ'), (0x1D787, 'M', u'ψ'), + ] + +def _seg_67(): + return [ (0x1D788, 'M', u'ω'), (0x1D789, 'M', u'∂'), (0x1D78A, 'M', u'ε'), @@ -6971,10 +6988,6 @@ def _seg_66(): (0x1D792, 'M', u'γ'), (0x1D793, 'M', u'δ'), (0x1D794, 'M', u'ε'), - ] - -def _seg_67(): - return [ (0x1D795, 'M', u'ζ'), (0x1D796, 'M', u'η'), (0x1D797, 'M', u'θ'), @@ -7062,6 +7075,10 @@ def _seg_67(): (0x1D7EC, 'M', u'0'), (0x1D7ED, 'M', u'1'), (0x1D7EE, 'M', u'2'), + ] + +def _seg_68(): + return [ (0x1D7EF, 'M', u'3'), (0x1D7F0, 'M', u'4'), (0x1D7F1, 'M', u'5'), @@ -7075,10 +7092,6 @@ def _seg_67(): (0x1D7F9, 'M', u'3'), (0x1D7FA, 'M', u'4'), (0x1D7FB, 'M', u'5'), - ] - -def _seg_68(): - return [ (0x1D7FC, 'M', u'6'), (0x1D7FD, 'M', u'7'), (0x1D7FE, 'M', u'8'), @@ -7143,6 +7156,8 @@ def _seg_68(): (0x1E95A, 'X'), (0x1E95E, 'V'), (0x1E960, 'X'), + (0x1EC71, 'V'), + (0x1ECB5, 'X'), (0x1EE00, 'M', u'ا'), (0x1EE01, 'M', u'ب'), (0x1EE02, 'M', u'ج'), @@ -7164,6 +7179,10 @@ def _seg_68(): (0x1EE12, 'M', u'ق'), (0x1EE13, 'M', u'ر'), (0x1EE14, 'M', u'ش'), + ] + +def _seg_69(): + return [ (0x1EE15, 'M', u'ت'), (0x1EE16, 'M', u'ث'), (0x1EE17, 'M', u'خ'), @@ -7179,10 +7198,6 @@ def _seg_68(): (0x1EE21, 'M', u'ب'), (0x1EE22, 'M', u'ج'), (0x1EE23, 'X'), - ] - -def _seg_69(): - return [ (0x1EE24, 'M', u'ه'), (0x1EE25, 'X'), (0x1EE27, 'M', u'ح'), @@ -7268,6 +7283,10 @@ def _seg_69(): (0x1EE81, 'M', u'ب'), (0x1EE82, 'M', u'ج'), (0x1EE83, 'M', u'د'), + ] + +def _seg_70(): + return [ (0x1EE84, 'M', u'ه'), (0x1EE85, 'M', u'و'), (0x1EE86, 'M', u'ز'), @@ -7283,10 +7302,6 @@ def _seg_69(): (0x1EE90, 'M', u'ف'), (0x1EE91, 'M', u'ص'), (0x1EE92, 'M', u'ق'), - ] - -def _seg_70(): - return [ (0x1EE93, 'M', u'ر'), (0x1EE94, 'M', u'ش'), (0x1EE95, 'M', u'ت'), @@ -7372,6 +7387,10 @@ def _seg_70(): (0x1F122, '3', u'(s)'), (0x1F123, '3', u'(t)'), (0x1F124, '3', u'(u)'), + ] + +def _seg_71(): + return [ (0x1F125, '3', u'(v)'), (0x1F126, '3', u'(w)'), (0x1F127, '3', u'(x)'), @@ -7382,15 +7401,11 @@ def _seg_70(): (0x1F12C, 'M', u'r'), (0x1F12D, 'M', u'cd'), (0x1F12E, 'M', u'wz'), - (0x1F12F, 'X'), + (0x1F12F, 'V'), (0x1F130, 'M', u'a'), (0x1F131, 'M', u'b'), (0x1F132, 'M', u'c'), (0x1F133, 'M', u'd'), - ] - -def _seg_71(): - return [ (0x1F134, 'M', u'e'), (0x1F135, 'M', u'f'), (0x1F136, 'M', u'g'), @@ -7476,6 +7491,10 @@ def _seg_71(): (0x1F239, 'M', u'割'), (0x1F23A, 'M', u'営'), (0x1F23B, 'M', u'配'), + ] + +def _seg_72(): + return [ (0x1F23C, 'X'), (0x1F240, 'M', u'〔本〕'), (0x1F241, 'M', u'〔三〕'), @@ -7491,21 +7510,17 @@ def _seg_71(): (0x1F251, 'M', u'可'), (0x1F252, 'X'), (0x1F260, 'V'), - ] - -def _seg_72(): - return [ (0x1F266, 'X'), (0x1F300, 'V'), (0x1F6D5, 'X'), (0x1F6E0, 'V'), (0x1F6ED, 'X'), (0x1F6F0, 'V'), - (0x1F6F9, 'X'), + (0x1F6FA, 'X'), (0x1F700, 'V'), (0x1F774, 'X'), (0x1F780, 'V'), - (0x1F7D5, 'X'), + (0x1F7D9, 'X'), (0x1F800, 'V'), (0x1F80C, 'X'), (0x1F810, 'V'), @@ -7521,15 +7536,21 @@ def _seg_72(): (0x1F910, 'V'), (0x1F93F, 'X'), (0x1F940, 'V'), - (0x1F94D, 'X'), - (0x1F950, 'V'), - (0x1F96C, 'X'), - (0x1F980, 'V'), - (0x1F998, 'X'), + (0x1F971, 'X'), + (0x1F973, 'V'), + (0x1F977, 'X'), + (0x1F97A, 'V'), + (0x1F97B, 'X'), + (0x1F97C, 'V'), + (0x1F9A3, 'X'), + (0x1F9B0, 'V'), + (0x1F9BA, 'X'), (0x1F9C0, 'V'), - (0x1F9C1, 'X'), + (0x1F9C3, 'X'), (0x1F9D0, 'V'), - (0x1F9E7, 'X'), + (0x1FA00, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), (0x20000, 'V'), (0x2A6D7, 'X'), (0x2A700, 'V'), @@ -7574,6 +7595,10 @@ def _seg_72(): (0x2F81F, 'M', u'㓟'), (0x2F820, 'M', u'刻'), (0x2F821, 'M', u'剆'), + ] + +def _seg_73(): + return [ (0x2F822, 'M', u'割'), (0x2F823, 'M', u'剷'), (0x2F824, 'M', u'㔕'), @@ -7595,10 +7620,6 @@ def _seg_72(): (0x2F836, 'M', u'及'), (0x2F837, 'M', u'叟'), (0x2F838, 'M', u'𠭣'), - ] - -def _seg_73(): - return [ (0x2F839, 'M', u'叫'), (0x2F83A, 'M', u'叱'), (0x2F83B, 'M', u'吆'), @@ -7678,6 +7699,10 @@ def _seg_73(): (0x2F887, 'M', u'幩'), (0x2F888, 'M', u'㡢'), (0x2F889, 'M', u'𢆃'), + ] + +def _seg_74(): + return [ (0x2F88A, 'M', u'㡼'), (0x2F88B, 'M', u'庰'), (0x2F88C, 'M', u'庳'), @@ -7699,10 +7724,6 @@ def _seg_73(): (0x2F89E, 'M', u'志'), (0x2F89F, 'M', u'忹'), (0x2F8A0, 'M', u'悁'), - ] - -def _seg_74(): - return [ (0x2F8A1, 'M', u'㤺'), (0x2F8A2, 'M', u'㤜'), (0x2F8A3, 'M', u'悔'), @@ -7782,6 +7803,10 @@ def _seg_74(): (0x2F8ED, 'M', u'櫛'), (0x2F8EE, 'M', u'㰘'), (0x2F8EF, 'M', u'次'), + ] + +def _seg_75(): + return [ (0x2F8F0, 'M', u'𣢧'), (0x2F8F1, 'M', u'歔'), (0x2F8F2, 'M', u'㱎'), @@ -7803,10 +7828,6 @@ def _seg_74(): (0x2F902, 'M', u'流'), (0x2F903, 'M', u'浩'), (0x2F904, 'M', u'浸'), - ] - -def _seg_75(): - return [ (0x2F905, 'M', u'涅'), (0x2F906, 'M', u'𣴞'), (0x2F907, 'M', u'洴'), @@ -7886,6 +7907,10 @@ def _seg_75(): (0x2F953, 'M', u'祖'), (0x2F954, 'M', u'𥚚'), (0x2F955, 'M', u'𥛅'), + ] + +def _seg_76(): + return [ (0x2F956, 'M', u'福'), (0x2F957, 'M', u'秫'), (0x2F958, 'M', u'䄯'), @@ -7907,10 +7932,6 @@ def _seg_75(): (0x2F969, 'M', u'糣'), (0x2F96A, 'M', u'紀'), (0x2F96B, 'M', u'𥾆'), - ] - -def _seg_76(): - return [ (0x2F96C, 'M', u'絣'), (0x2F96D, 'M', u'䌁'), (0x2F96E, 'M', u'緇'), @@ -7990,6 +8011,10 @@ def _seg_76(): (0x2F9B8, 'M', u'蚈'), (0x2F9B9, 'M', u'蜎'), (0x2F9BA, 'M', u'蛢'), + ] + +def _seg_77(): + return [ (0x2F9BB, 'M', u'蝹'), (0x2F9BC, 'M', u'蜨'), (0x2F9BD, 'M', u'蝫'), @@ -8011,10 +8036,6 @@ def _seg_76(): (0x2F9CD, 'M', u'䚾'), (0x2F9CE, 'M', u'䛇'), (0x2F9CF, 'M', u'誠'), - ] - -def _seg_77(): - return [ (0x2F9D0, 'M', u'諭'), (0x2F9D1, 'M', u'變'), (0x2F9D2, 'M', u'豕'), @@ -8094,6 +8115,10 @@ def _seg_77(): (0x2FA1D, 'M', u'𪘀'), (0x2FA1E, 'X'), (0xE0100, 'I'), + ] + +def _seg_78(): + return [ (0xE01F0, 'X'), ] @@ -8176,4 +8201,5 @@ uts46data = tuple( + _seg_75() + _seg_76() + _seg_77() + + _seg_78() ) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index c7df334e4..38ae237c8 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -15,7 +15,7 @@ pytoml==0.1.19 requests==2.19.1 certifi==2018.11.29 chardet==3.0.4 - idna==2.7 + idna==2.8 urllib3==1.24.1 retrying==1.3.3 setuptools==40.4.3 From 5f2a93685976635a233999a55b6201a253e3d62e Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:46:31 +0530 Subject: [PATCH 39/69] Update pep517 to 0.5.0 --- news/pep517.vendor | 1 + src/pip/_vendor/pep517/LICENSE | 21 +++++ src/pip/_vendor/pep517/__init__.py | 2 +- src/pip/_vendor/pep517/_in_process.py | 37 ++++++--- src/pip/_vendor/pep517/build.py | 108 ++++++++++++++++++++++++++ src/pip/_vendor/pep517/check.py | 32 +++++--- src/pip/_vendor/pep517/colorlog.py | 21 +++-- src/pip/_vendor/pep517/envbuild.py | 14 +++- src/pip/_vendor/pep517/wrappers.py | 14 +++- src/pip/_vendor/vendor.txt | 2 +- 10 files changed, 213 insertions(+), 39 deletions(-) create mode 100644 news/pep517.vendor create mode 100644 src/pip/_vendor/pep517/LICENSE create mode 100644 src/pip/_vendor/pep517/build.py diff --git a/news/pep517.vendor b/news/pep517.vendor new file mode 100644 index 000000000..c8c69f8cf --- /dev/null +++ b/news/pep517.vendor @@ -0,0 +1 @@ +Update pep517 to 0.5.0 diff --git a/src/pip/_vendor/pep517/LICENSE b/src/pip/_vendor/pep517/LICENSE new file mode 100644 index 000000000..b0ae9dbc2 --- /dev/null +++ b/src/pip/_vendor/pep517/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Thomas Kluyver + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/src/pip/_vendor/pep517/__init__.py b/src/pip/_vendor/pep517/__init__.py index 3d46629c2..9c1a098f7 100644 --- a/src/pip/_vendor/pep517/__init__.py +++ b/src/pip/_vendor/pep517/__init__.py @@ -1,4 +1,4 @@ """Wrappers to build Python packages using PEP 517 hooks """ -__version__ = '0.3' +__version__ = '0.5.0' diff --git a/src/pip/_vendor/pep517/_in_process.py b/src/pip/_vendor/pep517/_in_process.py index d1ad7b7e5..d6524b660 100644 --- a/src/pip/_vendor/pep517/_in_process.py +++ b/src/pip/_vendor/pep517/_in_process.py @@ -21,9 +21,11 @@ import sys # This is run as a script, not a module, so it can't do a relative import import compat + class BackendUnavailable(Exception): """Raised if we cannot import the backend""" + def _build_backend(): """Find and load the build backend""" ep = os.environ['PEP517_BUILD_BACKEND'] @@ -37,9 +39,10 @@ def _build_backend(): obj = getattr(obj, path_part) return obj + def get_requires_for_build_wheel(config_settings): """Invoke the optional get_requires_for_build_wheel hook - + Returns [] if the hook is not defined. """ backend = _build_backend() @@ -50,9 +53,10 @@ def get_requires_for_build_wheel(config_settings): else: return hook(config_settings) + def prepare_metadata_for_build_wheel(metadata_directory, config_settings): """Invoke optional prepare_metadata_for_build_wheel - + Implements a fallback by building a wheel if the hook isn't defined. """ backend = _build_backend() @@ -64,8 +68,10 @@ def prepare_metadata_for_build_wheel(metadata_directory, config_settings): else: return hook(metadata_directory, config_settings) + WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL' + def _dist_info_files(whl_zip): """Identify the .dist-info folder inside a wheel ZipFile.""" res = [] @@ -77,11 +83,13 @@ def _dist_info_files(whl_zip): return res raise Exception("No .dist-info folder found in wheel") -def _get_wheel_metadata_from_wheel(backend, metadata_directory, config_settings): + +def _get_wheel_metadata_from_wheel( + backend, metadata_directory, config_settings): """Build a wheel and extract the metadata from it. - - Fallback for when the build backend does not define the 'get_wheel_metadata' - hook. + + Fallback for when the build backend does not + define the 'get_wheel_metadata' hook. """ from zipfile import ZipFile whl_basename = backend.build_wheel(metadata_directory, config_settings) @@ -94,6 +102,7 @@ def _get_wheel_metadata_from_wheel(backend, metadata_directory, config_settings) zipf.extractall(path=metadata_directory, members=dist_info) return dist_info[0].split('/')[0] + def _find_already_built_wheel(metadata_directory): """Check for a wheel already built during the get_wheel_metadata hook. """ @@ -111,14 +120,16 @@ def _find_already_built_wheel(metadata_directory): print('Found multiple .whl files; unspecified behaviour. ' 'Will call build_wheel.') return None - + # Exactly one .whl file return whl_files[0] + def build_wheel(wheel_directory, config_settings, metadata_directory=None): """Invoke the mandatory build_wheel hook. - - If a wheel was already built in the prepare_metadata_for_build_wheel fallback, this + + If a wheel was already built in the + prepare_metadata_for_build_wheel fallback, this will copy it rather than rebuilding the wheel. """ prebuilt_whl = _find_already_built_wheel(metadata_directory) @@ -143,12 +154,15 @@ def get_requires_for_build_sdist(config_settings): else: return hook(config_settings) + class _DummyException(Exception): """Nothing should ever raise this exception""" + class GotUnsupportedOperation(Exception): """For internal use when backend raises UnsupportedOperation""" + def build_sdist(sdist_directory, config_settings): """Invoke the mandatory build_sdist hook.""" backend = _build_backend() @@ -157,6 +171,7 @@ def build_sdist(sdist_directory, config_settings): except getattr(backend, 'UnsupportedOperation', _DummyException): raise GotUnsupportedOperation + HOOK_NAMES = { 'get_requires_for_build_wheel', 'prepare_metadata_for_build_wheel', @@ -165,6 +180,7 @@ HOOK_NAMES = { 'build_sdist', } + def main(): if len(sys.argv) < 3: sys.exit("Needs args: hook_name, control_dir") @@ -183,8 +199,9 @@ def main(): json_out['no_backend'] = True except GotUnsupportedOperation: json_out['unsupported'] = True - + compat.write_json(json_out, pjoin(control_dir, 'output.json'), indent=2) + if __name__ == '__main__': main() diff --git a/src/pip/_vendor/pep517/build.py b/src/pip/_vendor/pep517/build.py new file mode 100644 index 000000000..ac6c9495c --- /dev/null +++ b/src/pip/_vendor/pep517/build.py @@ -0,0 +1,108 @@ +"""Build a project using PEP 517 hooks. +""" +import argparse +import logging +import os +import contextlib +from pip._vendor import pytoml +import shutil +import errno +import tempfile + +from .envbuild import BuildEnvironment +from .wrappers import Pep517HookCaller + +log = logging.getLogger(__name__) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + + +def _do_build(hooks, env, dist, dest): + get_requires_name = 'get_requires_for_build_{dist}'.format(**locals()) + get_requires = getattr(hooks, get_requires_name) + reqs = get_requires({}) + log.info('Got build requires: %s', reqs) + + env.pip_install(reqs) + log.info('Installed dynamic build dependencies') + + with tempdir() as td: + log.info('Trying to build %s in %s', dist, td) + build_name = 'build_{dist}'.format(**locals()) + build = getattr(hooks, build_name) + filename = build(td, {}) + source = os.path.join(td, filename) + shutil.move(source, os.path.join(dest, os.path.basename(filename))) + + +def mkdir_p(*args, **kwargs): + """Like `mkdir`, but does not raise an exception if the + directory already exists. + """ + try: + return os.mkdir(*args, **kwargs) + except OSError as exc: + if exc.errno != errno.EEXIST: + raise + + +def build(source_dir, dist, dest=None): + pyproject = os.path.join(source_dir, 'pyproject.toml') + dest = os.path.join(source_dir, dest or 'dist') + mkdir_p(dest) + + with open(pyproject) as f: + pyproject_data = pytoml.load(f) + # Ensure the mandatory data can be loaded + buildsys = pyproject_data['build-system'] + requires = buildsys['requires'] + backend = buildsys['build-backend'] + + hooks = Pep517HookCaller(source_dir, backend) + + with BuildEnvironment() as env: + env.pip_install(requires) + _do_build(hooks, env, dist, dest) + + +parser = argparse.ArgumentParser() +parser.add_argument( + 'source_dir', + help="A directory containing pyproject.toml", +) +parser.add_argument( + '--binary', '-b', + action='store_true', + default=False, +) +parser.add_argument( + '--source', '-s', + action='store_true', + default=False, +) +parser.add_argument( + '--out-dir', '-o', + help="Destination in which to save the builds relative to source dir", +) + + +def main(args): + # determine which dists to build + dists = list(filter(None, ( + 'sdist' if args.source or not args.binary else None, + 'wheel' if args.binary or not args.source else None, + ))) + + for dist in dists: + build(args.source_dir, dist, args.out_dir) + + +if __name__ == '__main__': + main(parser.parse_args()) diff --git a/src/pip/_vendor/pep517/check.py b/src/pip/_vendor/pep517/check.py index c65d51cf3..f4cdc6bec 100644 --- a/src/pip/_vendor/pep517/check.py +++ b/src/pip/_vendor/pep517/check.py @@ -18,10 +18,11 @@ from .wrappers import Pep517HookCaller log = logging.getLogger(__name__) -def check_build_sdist(hooks): + +def check_build_sdist(hooks, build_sys_requires): with BuildEnvironment() as env: try: - env.pip_install(hooks.build_sys_requires) + env.pip_install(build_sys_requires) log.info('Installed static build dependencies') except CalledProcessError: log.error('Failed to install static build dependencies') @@ -30,7 +31,7 @@ def check_build_sdist(hooks): try: reqs = hooks.get_requires_for_build_sdist({}) log.info('Got build requires: %s', reqs) - except: + except Exception: log.error('Failure in get_requires_for_build_sdist', exc_info=True) return False @@ -47,12 +48,13 @@ def check_build_sdist(hooks): try: filename = hooks.build_sdist(td, {}) log.info('build_sdist returned %r', filename) - except: + except Exception: log.info('Failure in build_sdist', exc_info=True) return False if not filename.endswith('.tar.gz'): - log.error("Filename %s doesn't have .tar.gz extension", filename) + log.error( + "Filename %s doesn't have .tar.gz extension", filename) return False path = pjoin(td, filename) @@ -73,10 +75,11 @@ def check_build_sdist(hooks): return True -def check_build_wheel(hooks): + +def check_build_wheel(hooks, build_sys_requires): with BuildEnvironment() as env: try: - env.pip_install(hooks.build_sys_requires) + env.pip_install(build_sys_requires) log.info('Installed static build dependencies') except CalledProcessError: log.error('Failed to install static build dependencies') @@ -85,7 +88,7 @@ def check_build_wheel(hooks): try: reqs = hooks.get_requires_for_build_wheel({}) log.info('Got build requires: %s', reqs) - except: + except Exception: log.error('Failure in get_requires_for_build_sdist', exc_info=True) return False @@ -102,7 +105,7 @@ def check_build_wheel(hooks): try: filename = hooks.build_wheel(td, {}) log.info('build_wheel returned %r', filename) - except: + except Exception: log.info('Failure in build_wheel', exc_info=True) return False @@ -151,8 +154,8 @@ def check(source_dir): hooks = Pep517HookCaller(source_dir, backend) - sdist_ok = check_build_sdist(hooks) - wheel_ok = check_build_wheel(hooks) + sdist_ok = check_build_sdist(hooks, requires) + wheel_ok = check_build_wheel(hooks, requires) if not sdist_ok: log.warning('Sdist checks failed; scroll up to see') @@ -164,7 +167,8 @@ def check(source_dir): def main(argv=None): ap = argparse.ArgumentParser() - ap.add_argument('source_dir', + ap.add_argument( + 'source_dir', help="A directory containing pyproject.toml") args = ap.parse_args(argv) @@ -178,17 +182,21 @@ def main(argv=None): print(ansi('Checks failed', 'red')) sys.exit(1) + ansi_codes = { 'reset': '\x1b[0m', 'bold': '\x1b[1m', 'red': '\x1b[31m', 'green': '\x1b[32m', } + + def ansi(s, attr): if os.name != 'nt' and sys.stdout.isatty(): return ansi_codes[attr] + str(s) + ansi_codes['reset'] else: return str(s) + if __name__ == '__main__': main() diff --git a/src/pip/_vendor/pep517/colorlog.py b/src/pip/_vendor/pep517/colorlog.py index 26cf7480d..69c8a59d3 100644 --- a/src/pip/_vendor/pep517/colorlog.py +++ b/src/pip/_vendor/pep517/colorlog.py @@ -24,6 +24,7 @@ try: except ImportError: curses = None + def _stderr_supports_color(): color = False if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): @@ -35,13 +36,14 @@ def _stderr_supports_color(): pass return color + class LogFormatter(logging.Formatter): """Log formatter with colour support """ DEFAULT_COLORS = { - logging.INFO: 2, # Green - logging.WARNING: 3, # Yellow - logging.ERROR: 1, # Red + logging.INFO: 2, # Green + logging.WARNING: 3, # Yellow + logging.ERROR: 1, # Red logging.CRITICAL: 1, } @@ -75,7 +77,8 @@ class LogFormatter(logging.Formatter): fg_color = str(fg_color, "ascii") for levelno, code in self.DEFAULT_COLORS.items(): - self._colors[levelno] = str(curses.tparm(fg_color, code), "ascii") + self._colors[levelno] = str( + curses.tparm(fg_color, code), "ascii") self._normal = str(curses.tigetstr("sgr0"), "ascii") scr = curses.initscr() @@ -83,15 +86,16 @@ class LogFormatter(logging.Formatter): curses.endwin() else: self._normal = '' - # Default width is usually 80, but too wide is worse than too narrow + # Default width is usually 80, but too wide is + # worse than too narrow self.termwidth = 70 def formatMessage(self, record): - l = len(record.message) + mlen = len(record.message) right_text = '{initial}-{name}'.format(initial=record.levelname[0], name=record.name) - if l + len(right_text) < self.termwidth: - space = ' ' * (self.termwidth - (l + len(right_text))) + if mlen + len(right_text) < self.termwidth: + space = ' ' * (self.termwidth - (mlen + len(right_text))) else: space = ' ' @@ -103,6 +107,7 @@ class LogFormatter(logging.Formatter): return record.message + space + start_color + right_text + end_color + def enable_colourful_output(level=logging.INFO): handler = logging.StreamHandler() handler.setFormatter(LogFormatter()) diff --git a/src/pip/_vendor/pep517/envbuild.py b/src/pip/_vendor/pep517/envbuild.py index c264f4630..f7ac5f46f 100644 --- a/src/pip/_vendor/pep517/envbuild.py +++ b/src/pip/_vendor/pep517/envbuild.py @@ -14,6 +14,7 @@ from .wrappers import Pep517HookCaller log = logging.getLogger(__name__) + def _load_pyproject(source_dir): with open(os.path.join(source_dir, 'pyproject.toml')) as f: pyproject_data = pytoml.load(f) @@ -89,11 +90,17 @@ class BuildEnvironment(object): if not reqs: return log.info('Calling pip to install %s', reqs) - check_call([sys.executable, '-m', 'pip', 'install', '--ignore-installed', - '--prefix', self.path] + list(reqs)) + check_call([ + sys.executable, '-m', 'pip', 'install', '--ignore-installed', + '--prefix', self.path] + list(reqs)) def __exit__(self, exc_type, exc_val, exc_tb): - if self._cleanup and (self.path is not None) and os.path.isdir(self.path): + needs_cleanup = ( + self._cleanup and + self.path is not None and + os.path.isdir(self.path) + ) + if needs_cleanup: shutil.rmtree(self.path) if self.save_path is None: @@ -106,6 +113,7 @@ class BuildEnvironment(object): else: os.environ['PYTHONPATH'] = self.save_pythonpath + def build_wheel(source_dir, wheel_dir, config_settings=None): """Build a wheel from a source directory using PEP 517 hooks. diff --git a/src/pip/_vendor/pep517/wrappers.py b/src/pip/_vendor/pep517/wrappers.py index d14338ba3..b14b89915 100644 --- a/src/pip/_vendor/pep517/wrappers.py +++ b/src/pip/_vendor/pep517/wrappers.py @@ -10,6 +10,7 @@ from . import compat _in_proc_script = pjoin(dirname(abspath(__file__)), '_in_process.py') + @contextmanager def tempdir(): td = mkdtemp() @@ -18,12 +19,15 @@ def tempdir(): finally: shutil.rmtree(td) + class BackendUnavailable(Exception): """Will be raised if the backend cannot be imported in the hook process.""" + class UnsupportedOperation(Exception): """May be raised by build_sdist if the backend indicates that it can't.""" + def default_subprocess_runner(cmd, cwd=None, extra_environ=None): """The default method of calling the wrapper subprocess.""" env = os.environ.copy() @@ -32,6 +36,7 @@ def default_subprocess_runner(cmd, cwd=None, extra_environ=None): check_call(cmd, cwd=cwd, env=env) + class Pep517HookCaller(object): """A wrapper around a source directory to be built with a PEP 517 backend. @@ -66,7 +71,8 @@ class Pep517HookCaller(object): 'config_settings': config_settings }) - def prepare_metadata_for_build_wheel(self, metadata_directory, config_settings=None): + def prepare_metadata_for_build_wheel( + self, metadata_directory, config_settings=None): """Prepare a *.dist-info folder with metadata for this project. Returns the name of the newly created folder. @@ -80,7 +86,9 @@ class Pep517HookCaller(object): 'config_settings': config_settings, }) - def build_wheel(self, wheel_directory, config_settings=None, metadata_directory=None): + def build_wheel( + self, wheel_directory, config_settings=None, + metadata_directory=None): """Build a wheel from this project. Returns the name of the newly created file. @@ -124,7 +132,6 @@ class Pep517HookCaller(object): 'config_settings': config_settings, }) - def _call_hook(self, hook_name, kwargs): # On Python 2, pytoml returns Unicode values (which is correct) but the # environment passed to check_call needs to contain string values. We @@ -154,4 +161,3 @@ class Pep517HookCaller(object): if data.get('no_backend'): raise BackendUnavailable return data['return_val'] - diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 38ae237c8..f23409089 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -8,7 +8,7 @@ ipaddress==1.0.22 # Only needed on 2.6 and 2.7 lockfile==0.12.2 msgpack==0.5.6 packaging==18.0 -pep517==0.3 +pep517==0.5.0 progress==1.4 pyparsing==2.3.1 pytoml==0.1.19 From e772c0c9b49ddcf0380a2b3465489d76568d530b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:47:41 +0530 Subject: [PATCH 40/69] Update pytoml to 0.1.20 --- news/pytoml.vendor | 1 + src/pip/_vendor/pytoml/__init__.py | 3 +- src/pip/_vendor/pytoml/parser.py | 55 +++++------------------- src/pip/_vendor/pytoml/test.py | 30 +++++++++++++ src/pip/_vendor/pytoml/utils.py | 67 ++++++++++++++++++++++++++++++ src/pip/_vendor/pytoml/writer.py | 39 ++++------------- src/pip/_vendor/vendor.txt | 2 +- 7 files changed, 121 insertions(+), 76 deletions(-) create mode 100644 news/pytoml.vendor create mode 100644 src/pip/_vendor/pytoml/test.py create mode 100644 src/pip/_vendor/pytoml/utils.py diff --git a/news/pytoml.vendor b/news/pytoml.vendor new file mode 100644 index 000000000..2ecdb0b3b --- /dev/null +++ b/news/pytoml.vendor @@ -0,0 +1 @@ +Update pytoml to 0.1.20 diff --git a/src/pip/_vendor/pytoml/__init__.py b/src/pip/_vendor/pytoml/__init__.py index 8dc731553..8ed060ff5 100644 --- a/src/pip/_vendor/pytoml/__init__.py +++ b/src/pip/_vendor/pytoml/__init__.py @@ -1,3 +1,4 @@ from .core import TomlError from .parser import load, loads -from .writer import dump, dumps +from .test import translate_to_test +from .writer import dump, dumps \ No newline at end of file diff --git a/src/pip/_vendor/pytoml/parser.py b/src/pip/_vendor/pytoml/parser.py index 9f94e9230..3493aa644 100644 --- a/src/pip/_vendor/pytoml/parser.py +++ b/src/pip/_vendor/pytoml/parser.py @@ -1,5 +1,6 @@ import string, re, sys, datetime from .core import TomlError +from .utils import rfc3339_re, parse_rfc3339_re if sys.version_info[0] == 2: _chr = unichr @@ -179,13 +180,13 @@ _ws_re = re.compile(r'[ \t]*') def _p_ws(s): s.expect_re(_ws_re) -_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'', - '\\': '\\', '/': '/', 'f': '\f' } +_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', + '\\': '\\', 'f': '\f' } _basicstr_re = re.compile(r'[^"\\\000-\037]*') _short_uni_re = re.compile(r'u([0-9a-fA-F]{4})') _long_uni_re = re.compile(r'U([0-9a-fA-F]{8})') -_escapes_re = re.compile('[bnrt"\'\\\\/f]') +_escapes_re = re.compile(r'[btnfr\"\\]') _newline_esc_re = re.compile('\n[ \t\n]*') def _p_basicstr_content(s, content=_basicstr_re): res = [] @@ -196,7 +197,10 @@ def _p_basicstr_content(s, content=_basicstr_re): if s.consume_re(_newline_esc_re): pass elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re): - res.append(_chr(int(s.last().group(1), 16))) + v = int(s.last().group(1), 16) + if 0xd800 <= v < 0xe000: + s.fail() + res.append(_chr(v)) else: s.expect_re(_escapes_re) res.append(_escapes[s.last().group(0)]) @@ -220,9 +224,8 @@ def _p_key(s): return s.expect_re(_key_re).group(0) _float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?') -_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') -_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*') +_basicstr_ml_re = re.compile(r'(?:""?(?!")|[^"\\\000-\011\013-\037])*') _litstr_re = re.compile(r"[^'\000\010\012-\037]*") _litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\010\013-\037]))*") def _p_value(s, object_pairs_hook): @@ -251,24 +254,9 @@ def _p_value(s, object_pairs_hook): s.expect('\'') return 'str', r, r, pos - if s.consume_re(_datetime_re): + if s.consume_re(rfc3339_re): m = s.last() - s0 = m.group(0) - r = map(int, m.groups()[:6]) - if m.group(7): - micro = float(m.group(7)) - else: - micro = 0 - - if m.group(8): - g = int(m.group(8), 10) * 60 + int(m.group(9), 10) - tz = _TimeZone(datetime.timedelta(0, g * 60)) - else: - tz = _TimeZone(datetime.timedelta(0, 0)) - - y, m, d, H, M, S = r - dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) - return 'datetime', s0, dt, pos + return 'datetime', m.group(0), parse_rfc3339_re(m), pos if s.consume_re(_float_re): m = s.last().group(0) @@ -351,24 +339,3 @@ def _p_toml(s, object_pairs_hook): _p_ews(s) s.expect_eof() return stmts - -class _TimeZone(datetime.tzinfo): - def __init__(self, offset): - self._offset = offset - - def utcoffset(self, dt): - return self._offset - - def dst(self, dt): - return None - - def tzname(self, dt): - m = self._offset.total_seconds() // 60 - if m < 0: - res = '-' - m = -m - else: - res = '+' - h = m // 60 - m = m - h * 60 - return '{}{:.02}{:.02}'.format(res, h, m) diff --git a/src/pip/_vendor/pytoml/test.py b/src/pip/_vendor/pytoml/test.py new file mode 100644 index 000000000..ec8abfc65 --- /dev/null +++ b/src/pip/_vendor/pytoml/test.py @@ -0,0 +1,30 @@ +import datetime +from .utils import format_rfc3339 + +try: + _string_types = (str, unicode) + _int_types = (int, long) +except NameError: + _string_types = str + _int_types = int + +def translate_to_test(v): + if isinstance(v, dict): + return { k: translate_to_test(v) for k, v in v.items() } + if isinstance(v, list): + a = [translate_to_test(x) for x in v] + if v and isinstance(v[0], dict): + return a + else: + return {'type': 'array', 'value': a} + if isinstance(v, datetime.datetime): + return {'type': 'datetime', 'value': format_rfc3339(v)} + if isinstance(v, bool): + return {'type': 'bool', 'value': 'true' if v else 'false'} + if isinstance(v, _int_types): + return {'type': 'integer', 'value': str(v)} + if isinstance(v, float): + return {'type': 'float', 'value': '{:.17}'.format(v)} + if isinstance(v, _string_types): + return {'type': 'string', 'value': v} + raise RuntimeError('unexpected value: {!r}'.format(v)) diff --git a/src/pip/_vendor/pytoml/utils.py b/src/pip/_vendor/pytoml/utils.py new file mode 100644 index 000000000..636a680b0 --- /dev/null +++ b/src/pip/_vendor/pytoml/utils.py @@ -0,0 +1,67 @@ +import datetime +import re + +rfc3339_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') + +def parse_rfc3339(v): + m = rfc3339_re.match(v) + if not m or m.group(0) != v: + return None + return parse_rfc3339_re(m) + +def parse_rfc3339_re(m): + r = map(int, m.groups()[:6]) + if m.group(7): + micro = float(m.group(7)) + else: + micro = 0 + + if m.group(8): + g = int(m.group(8), 10) * 60 + int(m.group(9), 10) + tz = _TimeZone(datetime.timedelta(0, g * 60)) + else: + tz = _TimeZone(datetime.timedelta(0, 0)) + + y, m, d, H, M, S = r + return datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) + + +def format_rfc3339(v): + offs = v.utcoffset() + offs = int(offs.total_seconds()) // 60 if offs is not None else 0 + + if offs == 0: + suffix = 'Z' + else: + if offs > 0: + suffix = '+' + else: + suffix = '-' + offs = -offs + suffix = '{0}{1:02}:{2:02}'.format(suffix, offs // 60, offs % 60) + + if v.microsecond: + return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix + else: + return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix + +class _TimeZone(datetime.tzinfo): + def __init__(self, offset): + self._offset = offset + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return None + + def tzname(self, dt): + m = self._offset.total_seconds() // 60 + if m < 0: + res = '-' + m = -m + else: + res = '+' + h = m // 60 + m = m - h * 60 + return '{}{:.02}{:.02}'.format(res, h, m) diff --git a/src/pip/_vendor/pytoml/writer.py b/src/pip/_vendor/pytoml/writer.py index 6eaf5d76a..73b5089c2 100644 --- a/src/pip/_vendor/pytoml/writer.py +++ b/src/pip/_vendor/pytoml/writer.py @@ -1,5 +1,7 @@ from __future__ import unicode_literals -import io, datetime, math, sys +import io, datetime, math, string, sys + +from .utils import format_rfc3339 if sys.version_info[0] == 3: long = int @@ -39,22 +41,13 @@ def _escape_string(s): return '"' + ''.join(res) + '"' +_key_chars = string.digits + string.ascii_letters + '-_' def _escape_id(s): - if any(not c.isalnum() and c not in '-_' for c in s): + if any(c not in _key_chars for c in s): return _escape_string(s) return s -def _format_list(v): - return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) - -# Formula from: -# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds -# Once support for py26 is dropped, this can be replaced by td.total_seconds() -def _total_seconds(td): - return ((td.microseconds - + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6) - def _format_value(v): if isinstance(v, bool): return 'true' if v else 'false' @@ -68,25 +61,11 @@ def _format_value(v): elif isinstance(v, unicode) or isinstance(v, bytes): return _escape_string(v) elif isinstance(v, datetime.datetime): - offs = v.utcoffset() - offs = _total_seconds(offs) // 60 if offs is not None else 0 - - if offs == 0: - suffix = 'Z' - else: - if offs > 0: - suffix = '+' - else: - suffix = '-' - offs = -offs - suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60) - - if v.microsecond: - return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix - else: - return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix + return format_rfc3339(v) elif isinstance(v, list): - return _format_list(v) + return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) + elif isinstance(v, dict): + return '{{{0}}}'.format(', '.join('{} = {}'.format(_escape_id(k), _format_value(obj)) for k, obj in v.items())) else: raise RuntimeError(v) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index f23409089..abbe8d694 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -11,7 +11,7 @@ packaging==18.0 pep517==0.5.0 progress==1.4 pyparsing==2.3.1 -pytoml==0.1.19 +pytoml==0.1.20 requests==2.19.1 certifi==2018.11.29 chardet==3.0.4 From 45e356b1fa6d2a753435d4f072d7b00379d58db4 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:48:32 +0530 Subject: [PATCH 41/69] Update pkg_resources to 40.6.3 (via setuptools) --- news/pkg_resources.vendor | 1 + src/pip/_vendor/pkg_resources/__init__.py | 26 +++++++++++++++++++++-- src/pip/_vendor/vendor.txt | 2 +- 3 files changed, 26 insertions(+), 3 deletions(-) create mode 100644 news/pkg_resources.vendor diff --git a/news/pkg_resources.vendor b/news/pkg_resources.vendor new file mode 100644 index 000000000..d5015732a --- /dev/null +++ b/news/pkg_resources.vendor @@ -0,0 +1 @@ +Update pkg_resources to 40.6.3 (via setuptools) diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 0b432f689..9c4fd8ea1 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -238,6 +238,9 @@ __all__ = [ 'register_finder', 'register_namespace_handler', 'register_loader_type', 'fixup_namespace_packages', 'get_importer', + # Warnings + 'PkgResourcesDeprecationWarning', + # Deprecated/backward compatibility only 'run_main', 'AvailableDistributions', ] @@ -2228,7 +2231,18 @@ register_namespace_handler(object, null_ns_handler) def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(filename)) + return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) + + +def _cygwin_patch(filename): # pragma: nocover + """ + Contrary to POSIX 2008, on Cygwin, getcwd (3) contains + symlink components. Using + os.path.abspath() works around this limitation. A fix in os.getcwd() + would probably better, in Cygwin even more so, except + that this seems to be by design... + """ + return os.path.abspath(filename) if sys.platform == 'cygwin' else filename def _normalize_cached(filename, _cache={}): @@ -2324,7 +2338,7 @@ class EntryPoint: warnings.warn( "Parameters to load are deprecated. Call .resolve and " ".require separately.", - DeprecationWarning, + PkgResourcesDeprecationWarning, stacklevel=2, ) if require: @@ -3147,3 +3161,11 @@ def _initialize_master_working_set(): # match order list(map(working_set.add_entry, sys.path)) globals().update(locals()) + +class PkgResourcesDeprecationWarning(Warning): + """ + Base class for warning about deprecations in ``pkg_resources`` + + This class is not derived from ``DeprecationWarning``, and as such is + visible by default. + """ diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index abbe8d694..8b8ec3c85 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -18,6 +18,6 @@ requests==2.19.1 idna==2.8 urllib3==1.24.1 retrying==1.3.3 -setuptools==40.4.3 +setuptools==40.6.3 six==1.12.0 webencodings==0.5.1 From 785ecf476a63e00819ef1b14bc8ee758dc9c12cb Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 18 Jan 2019 10:48:57 +0530 Subject: [PATCH 42/69] Update requests to 2.21.0 --- news/requests.vendor | 1 + src/pip/_vendor/requests/LICENSE | 4 +- src/pip/_vendor/requests/__init__.py | 17 +++---- src/pip/_vendor/requests/__version__.py | 4 +- src/pip/_vendor/requests/adapters.py | 27 +++++----- src/pip/_vendor/requests/api.py | 20 +++++--- src/pip/_vendor/requests/auth.py | 4 +- src/pip/_vendor/requests/compat.py | 3 +- src/pip/_vendor/requests/cookies.py | 31 ++++++------ src/pip/_vendor/requests/help.py | 3 +- src/pip/_vendor/requests/hooks.py | 4 +- src/pip/_vendor/requests/models.py | 19 +++---- src/pip/_vendor/requests/sessions.py | 63 +++++++++++++++++------- src/pip/_vendor/requests/status_codes.py | 2 +- src/pip/_vendor/requests/utils.py | 23 ++++----- src/pip/_vendor/vendor.txt | 2 +- 16 files changed, 132 insertions(+), 95 deletions(-) create mode 100644 news/requests.vendor diff --git a/news/requests.vendor b/news/requests.vendor new file mode 100644 index 000000000..1d22bfe1b --- /dev/null +++ b/news/requests.vendor @@ -0,0 +1 @@ +Update requests to 2.21.0 diff --git a/src/pip/_vendor/requests/LICENSE b/src/pip/_vendor/requests/LICENSE index db78ea69f..841c6023b 100644 --- a/src/pip/_vendor/requests/LICENSE +++ b/src/pip/_vendor/requests/LICENSE @@ -1,10 +1,10 @@ -Copyright 2017 Kenneth Reitz +Copyright 2018 Kenneth Reitz Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/src/pip/_vendor/requests/__init__.py b/src/pip/_vendor/requests/__init__.py index 3f3f4f2d3..80c4ce1d2 100644 --- a/src/pip/_vendor/requests/__init__.py +++ b/src/pip/_vendor/requests/__init__.py @@ -22,7 +22,7 @@ usage: ... or POST: >>> payload = dict(key1='value1', key2='value2') - >>> r = requests.post('http://httpbin.org/post', data=payload) + >>> r = requests.post('https://httpbin.org/post', data=payload) >>> print(r.text) { ... @@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version): # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.23 + # urllib3 >= 1.21.1, <= 1.24 assert major == 1 assert minor >= 21 - assert minor <= 23 + assert minor <= 24 # Check chardet for compatibility. major, minor, patch = chardet_version.split('.')[:3] @@ -79,14 +79,14 @@ def _check_cryptography(cryptography_version): return if cryptography_version < [1, 3, 4]: - warning = 'Old version of cryptography ({0}) may cause slowdown.'.format(cryptography_version) + warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) warnings.warn(warning, RequestsDependencyWarning) # Check imported dependencies for compatibility. try: check_compatibility(urllib3.__version__, chardet.__version__) except (AssertionError, ValueError): - warnings.warn("urllib3 ({0}) or chardet ({1}) doesn't match a supported " + warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported " "version!".format(urllib3.__version__, chardet.__version__), RequestsDependencyWarning) @@ -125,12 +125,7 @@ from .exceptions import ( # Set default logging handler to avoid "No handler found" warnings. import logging -try: # Python 2.7+ - from logging import NullHandler -except ImportError: - class NullHandler(logging.Handler): - def emit(self, record): - pass +from logging import NullHandler logging.getLogger(__name__).addHandler(NullHandler()) diff --git a/src/pip/_vendor/requests/__version__.py b/src/pip/_vendor/requests/__version__.py index ef61ec0f5..f5b5d0367 100644 --- a/src/pip/_vendor/requests/__version__.py +++ b/src/pip/_vendor/requests/__version__.py @@ -5,8 +5,8 @@ __title__ = 'requests' __description__ = 'Python HTTP for Humans.' __url__ = 'http://python-requests.org' -__version__ = '2.19.1' -__build__ = 0x021901 +__version__ = '2.21.0' +__build__ = 0x022100 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' diff --git a/src/pip/_vendor/requests/adapters.py b/src/pip/_vendor/requests/adapters.py index f6f3f9965..c30e7c92d 100644 --- a/src/pip/_vendor/requests/adapters.py +++ b/src/pip/_vendor/requests/adapters.py @@ -26,6 +26,7 @@ from pip._vendor.urllib3.exceptions import ProtocolError from pip._vendor.urllib3.exceptions import ReadTimeoutError from pip._vendor.urllib3.exceptions import SSLError as _SSLError from pip._vendor.urllib3.exceptions import ResponseError +from pip._vendor.urllib3.exceptions import LocationValueError from .models import Response from .compat import urlparse, basestring @@ -35,7 +36,8 @@ from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, from .structures import CaseInsensitiveDict from .cookies import extract_cookies_to_jar from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, - ProxyError, RetryError, InvalidSchema, InvalidProxyURL) + ProxyError, RetryError, InvalidSchema, InvalidProxyURL, + InvalidURL) from .auth import _basic_auth_str try: @@ -127,8 +129,7 @@ class HTTPAdapter(BaseAdapter): self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) def __getstate__(self): - return dict((attr, getattr(self, attr, None)) for attr in - self.__attrs__) + return {attr: getattr(self, attr, None) for attr in self.__attrs__} def __setstate__(self, state): # Can't handle by adding 'proxy_manager' to self.__attrs__ because @@ -224,7 +225,7 @@ class HTTPAdapter(BaseAdapter): if not cert_loc or not os.path.exists(cert_loc): raise IOError("Could not find a suitable TLS CA certificate bundle, " - "invalid path: {0}".format(cert_loc)) + "invalid path: {}".format(cert_loc)) conn.cert_reqs = 'CERT_REQUIRED' @@ -246,10 +247,10 @@ class HTTPAdapter(BaseAdapter): conn.key_file = None if conn.cert_file and not os.path.exists(conn.cert_file): raise IOError("Could not find the TLS certificate file, " - "invalid path: {0}".format(conn.cert_file)) + "invalid path: {}".format(conn.cert_file)) if conn.key_file and not os.path.exists(conn.key_file): raise IOError("Could not find the TLS key file, " - "invalid path: {0}".format(conn.key_file)) + "invalid path: {}".format(conn.key_file)) def build_response(self, req, resp): """Builds a :class:`Response ` object from a urllib3 @@ -378,7 +379,7 @@ class HTTPAdapter(BaseAdapter): when subclassing the :class:`HTTPAdapter `. - :param proxies: The url of the proxy being used for this request. + :param proxy: The url of the proxy being used for this request. :rtype: dict """ headers = {} @@ -407,7 +408,10 @@ class HTTPAdapter(BaseAdapter): :rtype: requests.Response """ - conn = self.get_connection(request.url, proxies) + try: + conn = self.get_connection(request.url, proxies) + except LocationValueError as e: + raise InvalidURL(e, request=request) self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) @@ -421,7 +425,7 @@ class HTTPAdapter(BaseAdapter): timeout = TimeoutSauce(connect=connect, read=read) except ValueError as e: # this may raise a string formatting error. - err = ("Invalid timeout {0}. Pass a (connect, read) " + err = ("Invalid timeout {}. Pass a (connect, read) " "timeout tuple, or a single float to set " "both timeouts to the same value".format(timeout)) raise ValueError(err) @@ -471,11 +475,10 @@ class HTTPAdapter(BaseAdapter): # Receive the response from the server try: - # For Python 2.7+ versions, use buffering of HTTP - # responses + # For Python 2.7, use buffering of HTTP responses r = low_conn.getresponse(buffering=True) except TypeError: - # For compatibility with Python 2.6 versions and back + # For compatibility with Python 3.3+ r = low_conn.getresponse() resp = HTTPResponse.from_httplib( diff --git a/src/pip/_vendor/requests/api.py b/src/pip/_vendor/requests/api.py index a2cc84d76..abada96d4 100644 --- a/src/pip/_vendor/requests/api.py +++ b/src/pip/_vendor/requests/api.py @@ -18,8 +18,10 @@ def request(method, url, **kwargs): :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. - :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. @@ -47,7 +49,7 @@ def request(method, url, **kwargs): Usage:: >>> import requests - >>> req = requests.request('GET', 'http://httpbin.org/get') + >>> req = requests.request('GET', 'https://httpbin.org/get') """ @@ -62,7 +64,8 @@ def get(url, params=None, **kwargs): r"""Sends a GET request. :param url: URL for the new :class:`Request` object. - :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response @@ -102,7 +105,8 @@ def post(url, data=None, json=None, **kwargs): r"""Sends a POST request. :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object @@ -116,7 +120,8 @@ def put(url, data=None, **kwargs): r"""Sends a PUT request. :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object @@ -130,7 +135,8 @@ def patch(url, data=None, **kwargs): r"""Sends a PATCH request. :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object diff --git a/src/pip/_vendor/requests/auth.py b/src/pip/_vendor/requests/auth.py index 4ae459474..bdde51c7f 100644 --- a/src/pip/_vendor/requests/auth.py +++ b/src/pip/_vendor/requests/auth.py @@ -38,7 +38,7 @@ def _basic_auth_str(username, password): if not isinstance(username, basestring): warnings.warn( "Non-string usernames will no longer be supported in Requests " - "3.0.0. Please convert the object you've passed in ({0!r}) to " + "3.0.0. Please convert the object you've passed in ({!r}) to " "a string or bytes object in the near future to avoid " "problems.".format(username), category=DeprecationWarning, @@ -48,7 +48,7 @@ def _basic_auth_str(username, password): if not isinstance(password, basestring): warnings.warn( "Non-string passwords will no longer be supported in Requests " - "3.0.0. Please convert the object you've passed in ({0!r}) to " + "3.0.0. Please convert the object you've passed in ({!r}) to " "a string or bytes object in the near future to avoid " "problems.".format(password), category=DeprecationWarning, diff --git a/src/pip/_vendor/requests/compat.py b/src/pip/_vendor/requests/compat.py index ec5d30585..6a86893dc 100644 --- a/src/pip/_vendor/requests/compat.py +++ b/src/pip/_vendor/requests/compat.py @@ -47,9 +47,8 @@ if is_py2: import cookielib from Cookie import Morsel from StringIO import StringIO - from collections import Callable, Mapping, MutableMapping + from collections import Callable, Mapping, MutableMapping, OrderedDict - from pip._vendor.urllib3.packages.ordered_dict import OrderedDict builtin_str = str bytes = str diff --git a/src/pip/_vendor/requests/cookies.py b/src/pip/_vendor/requests/cookies.py index 50883a84f..56fccd9c2 100644 --- a/src/pip/_vendor/requests/cookies.py +++ b/src/pip/_vendor/requests/cookies.py @@ -444,20 +444,21 @@ def create_cookie(name, value, **kwargs): By default, the pair of `name` and `value` will be set for the domain '' and sent on every request (this is sometimes called a "supercookie"). """ - result = dict( - version=0, - name=name, - value=value, - port=None, - domain='', - path='/', - secure=False, - expires=None, - discard=True, - comment=None, - comment_url=None, - rest={'HttpOnly': None}, - rfc2109=False,) + result = { + 'version': 0, + 'name': name, + 'value': value, + 'port': None, + 'domain': '', + 'path': '/', + 'secure': False, + 'expires': None, + 'discard': True, + 'comment': None, + 'comment_url': None, + 'rest': {'HttpOnly': None}, + 'rfc2109': False, + } badargs = set(kwargs) - set(result) if badargs: @@ -511,6 +512,7 @@ def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): :param cookiejar: (optional) A cookiejar to add the cookies to. :param overwrite: (optional) If False, will not replace cookies already in the jar with new ones. + :rtype: CookieJar """ if cookiejar is None: cookiejar = RequestsCookieJar() @@ -529,6 +531,7 @@ def merge_cookies(cookiejar, cookies): :param cookiejar: CookieJar object to add the cookies to. :param cookies: Dictionary or CookieJar object to be added. + :rtype: CookieJar """ if not isinstance(cookiejar, cookielib.CookieJar): raise ValueError('You can only merge into CookieJar') diff --git a/src/pip/_vendor/requests/help.py b/src/pip/_vendor/requests/help.py index df1b4ebcf..3c3072ba1 100644 --- a/src/pip/_vendor/requests/help.py +++ b/src/pip/_vendor/requests/help.py @@ -89,8 +89,7 @@ def info(): 'version': getattr(idna, '__version__', ''), } - # OPENSSL_VERSION_NUMBER doesn't exist in the Python 2.6 ssl module. - system_ssl = getattr(ssl, 'OPENSSL_VERSION_NUMBER', None) + system_ssl = ssl.OPENSSL_VERSION_NUMBER system_ssl_info = { 'version': '%x' % system_ssl if system_ssl is not None else '' } diff --git a/src/pip/_vendor/requests/hooks.py b/src/pip/_vendor/requests/hooks.py index 32b32de75..7a51f212c 100644 --- a/src/pip/_vendor/requests/hooks.py +++ b/src/pip/_vendor/requests/hooks.py @@ -15,14 +15,14 @@ HOOKS = ['response'] def default_hooks(): - return dict((event, []) for event in HOOKS) + return {event: [] for event in HOOKS} # TODO: response is the only one def dispatch_hook(key, hooks, hook_data, **kwargs): """Dispatches a hook dictionary on a given piece of data.""" - hooks = hooks or dict() + hooks = hooks or {} hooks = hooks.get(key) if hooks: if hasattr(hooks, '__call__'): diff --git a/src/pip/_vendor/requests/models.py b/src/pip/_vendor/requests/models.py index 4230535d2..083995747 100644 --- a/src/pip/_vendor/requests/models.py +++ b/src/pip/_vendor/requests/models.py @@ -204,9 +204,13 @@ class Request(RequestHooksMixin): :param url: URL to send. :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. - :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. :param json: json for the body to attach to the request (if files or data is not specified). - :param params: dictionary of URL parameters to append to the URL. + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. :param hooks: dictionary of callback hooks, for internal usage. @@ -214,7 +218,7 @@ class Request(RequestHooksMixin): Usage:: >>> import requests - >>> req = requests.Request('GET', 'http://httpbin.org/get') + >>> req = requests.Request('GET', 'https://httpbin.org/get') >>> req.prepare() """ @@ -274,7 +278,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): Usage:: >>> import requests - >>> req = requests.Request('GET', 'http://httpbin.org/get') + >>> req = requests.Request('GET', 'https://httpbin.org/get') >>> r = req.prepare() @@ -648,10 +652,7 @@ class Response(object): if not self._content_consumed: self.content - return dict( - (attr, getattr(self, attr, None)) - for attr in self.__attrs__ - ) + return {attr: getattr(self, attr, None) for attr in self.__attrs__} def __setstate__(self, state): for name, value in state.items(): @@ -780,7 +781,7 @@ class Response(object): return chunks - def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): + def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. diff --git a/src/pip/_vendor/requests/sessions.py b/src/pip/_vendor/requests/sessions.py index ba135268a..d73d700fa 100644 --- a/src/pip/_vendor/requests/sessions.py +++ b/src/pip/_vendor/requests/sessions.py @@ -19,7 +19,7 @@ from .cookies import ( from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook from ._internal_utils import to_native_string -from .utils import to_key_val_list, default_headers +from .utils import to_key_val_list, default_headers, DEFAULT_PORTS from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) @@ -115,6 +115,31 @@ class SessionRedirectMixin(object): return to_native_string(location, 'utf8') return None + def should_strip_auth(self, old_url, new_url): + """Decide whether Authorization header should be removed when redirecting""" + old_parsed = urlparse(old_url) + new_parsed = urlparse(new_url) + if old_parsed.hostname != new_parsed.hostname: + return True + # Special case: allow http -> https redirect when using the standard + # ports. This isn't specified by RFC 7235, but is kept to avoid + # breaking backwards compatibility with older versions of requests + # that allowed any redirects on the same host. + if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) + and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): + return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if (not changed_scheme and old_parsed.port in default_port + and new_parsed.port in default_port): + return False + + # Standard case: root URI must match + return changed_port or changed_scheme + def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): """Receives a Response. Returns a generator of Responses or Requests.""" @@ -236,14 +261,10 @@ class SessionRedirectMixin(object): headers = prepared_request.headers url = prepared_request.url - if 'Authorization' in headers: + if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): # If we get redirected to a new host, we should strip out any # authentication headers. - original_parsed = urlparse(response.request.url) - redirect_parsed = urlparse(url) - - if (original_parsed.hostname != redirect_parsed.hostname): - del headers['Authorization'] + del headers['Authorization'] # .netrc might have more auth for us on our new host. new_auth = get_netrc_auth(url) if self.trust_env else None @@ -299,7 +320,7 @@ class SessionRedirectMixin(object): """ method = prepared_request.method - # http://tools.ietf.org/html/rfc7231#section-6.4.4 + # https://tools.ietf.org/html/rfc7231#section-6.4.4 if response.status_code == codes.see_other and method != 'HEAD': method = 'GET' @@ -325,13 +346,13 @@ class Session(SessionRedirectMixin): >>> import requests >>> s = requests.Session() - >>> s.get('http://httpbin.org/get') + >>> s.get('https://httpbin.org/get') Or as a context manager:: >>> with requests.Session() as s: - >>> s.get('http://httpbin.org/get') + >>> s.get('https://httpbin.org/get') """ @@ -453,8 +474,8 @@ class Session(SessionRedirectMixin): :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. - :param data: (optional) Dictionary, bytes, or file-like object to send - in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the @@ -550,7 +571,8 @@ class Session(SessionRedirectMixin): r"""Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response @@ -562,7 +584,8 @@ class Session(SessionRedirectMixin): r"""Sends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ @@ -573,7 +596,8 @@ class Session(SessionRedirectMixin): r"""Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. - :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ @@ -723,7 +747,7 @@ class Session(SessionRedirectMixin): self.adapters[key] = self.adapters.pop(key) def __getstate__(self): - state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) + state = {attr: getattr(self, attr, None) for attr in self.__attrs__} return state def __setstate__(self, state): @@ -735,7 +759,12 @@ def session(): """ Returns a :class:`Session` for context-management. + .. deprecated:: 1.0.0 + + This method has been deprecated since version 1.0.0 and is only kept for + backwards compatibility. New code should use :class:`~requests.sessions.Session` + to create a session. This may be removed at a future date. + :rtype: Session """ - return Session() diff --git a/src/pip/_vendor/requests/status_codes.py b/src/pip/_vendor/requests/status_codes.py index ff462c6c6..813e8c4e6 100644 --- a/src/pip/_vendor/requests/status_codes.py +++ b/src/pip/_vendor/requests/status_codes.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -""" +r""" The ``codes`` object defines a mapping from common names for HTTP statuses to their numerical codes, accessible either as attributes or as dictionary items. diff --git a/src/pip/_vendor/requests/utils.py b/src/pip/_vendor/requests/utils.py index 431f6be07..8170a8d2c 100644 --- a/src/pip/_vendor/requests/utils.py +++ b/src/pip/_vendor/requests/utils.py @@ -38,6 +38,8 @@ NETRC_FILES = ('.netrc', '_netrc') DEFAULT_CA_BUNDLE_PATH = certs.where() +DEFAULT_PORTS = {'http': 80, 'https': 443} + if sys.platform == 'win32': # provide a proxy_bypass version on Windows without DNS lookups @@ -173,10 +175,10 @@ def get_netrc_auth(url, raise_errors=False): for f in NETRC_FILES: try: - loc = os.path.expanduser('~/{0}'.format(f)) + loc = os.path.expanduser('~/{}'.format(f)) except KeyError: # os.path.expanduser can fail when $HOME is undefined and - # getpwuid fails. See http://bugs.python.org/issue20164 & + # getpwuid fails. See https://bugs.python.org/issue20164 & # https://github.com/requests/requests/issues/1846 return @@ -264,7 +266,7 @@ def from_key_val_list(value): >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') - ValueError: need more than 1 value to unpack + ValueError: cannot encode objects that are not 2-tuples >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) @@ -466,7 +468,7 @@ def _parse_content_type_header(header): if index_of_equals != -1: key = param[:index_of_equals].strip(items_to_strip) value = param[index_of_equals + 1:].strip(items_to_strip) - params_dict[key] = value + params_dict[key.lower()] = value return content_type, params_dict @@ -706,6 +708,10 @@ def should_bypass_proxies(url, no_proxy): no_proxy = get_proxy('no_proxy') parsed = urlparse(url) + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + if no_proxy: # We need to check whether we match here. We need to see if we match # the end of the hostname, both with and without the port. @@ -725,7 +731,7 @@ def should_bypass_proxies(url, no_proxy): else: host_with_port = parsed.hostname if parsed.port: - host_with_port += ':{0}'.format(parsed.port) + host_with_port += ':{}'.format(parsed.port) for host in no_proxy: if parsed.hostname.endswith(host) or host_with_port.endswith(host): @@ -733,13 +739,8 @@ def should_bypass_proxies(url, no_proxy): # to apply the proxies on this URL. return True - # If the system proxy settings indicate that this URL should be bypassed, - # don't proxy. - # The proxy_bypass function is incredibly buggy on OS X in early versions - # of Python 2.6, so allow this call to fail. Only catch the specific - # exceptions we've seen, though: this call failing in other ways can reveal - # legitimate problems. with set_environ('no_proxy', no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. try: bypass = proxy_bypass(parsed.hostname) except (TypeError, socket.gaierror): diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 8b8ec3c85..6a6f6f6b0 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -12,7 +12,7 @@ pep517==0.5.0 progress==1.4 pyparsing==2.3.1 pytoml==0.1.20 -requests==2.19.1 +requests==2.21.0 certifi==2018.11.29 chardet==3.0.4 idna==2.8 From 07b318ff08eb09d3eb2362143e0a1abd7941fc4b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 20 Jan 2019 00:04:56 +0530 Subject: [PATCH 43/69] Update license year to 2019 --- LICENSE.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE.txt b/LICENSE.txt index d3379faca..737fec5c5 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2008-2018 The pip developers (see AUTHORS.txt file) +Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the From 71dbc9c3d51bee333acf43143dd3d44ad0f37450 Mon Sep 17 00:00:00 2001 From: Donald Stufft Date: Sat, 19 Jan 2019 14:48:55 -0500 Subject: [PATCH 44/69] Warn on Python 2.7 --- news/6148.removal | 2 ++ src/pip/_internal/cli/base_command.py | 8 ++++++++ 2 files changed, 10 insertions(+) create mode 100644 news/6148.removal diff --git a/news/6148.removal b/news/6148.removal new file mode 100644 index 000000000..1be44f454 --- /dev/null +++ b/news/6148.removal @@ -0,0 +1,2 @@ +Start printing a warning for Python 2.7 to warn of impending Python 2.7 End-of-life and +prompt users to start migrating to Python 3. diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index e1f45826c..648b7f7cc 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -143,6 +143,14 @@ class Command(object): replacement=None, gone_in='19.2', ) + elif sys.version_info[:2] == (2, 7): + deprecated( + "Python 2.7 will reach the end of it's life on January 1st, 2020. " + "Please upgrade your Python as Python 2.7 won't be maintained after " + "that date. A future version of pip will drop support for Python 2.7.", + replacement=None, + gone_in=None, + ) # TODO: Try to get these passing down from the command? # without resorting to os.environ to hold these. From a44579ca2dcf4498d71654a3a0ccedec9292a128 Mon Sep 17 00:00:00 2001 From: Donald Stufft Date: Sat, 19 Jan 2019 15:01:33 -0500 Subject: [PATCH 45/69] Fix tests for deprecated Pythons --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 1b51fc8d3..c2d8fc328 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -349,4 +349,4 @@ def in_memory_pip(): @pytest.fixture def deprecated_python(): """Used to indicate wheither pip deprecated this python version""" - return sys.version_info[:2] == (3, 4) + return sys.version_info[:2] in [(3, 4), (2, 7)] From f3b9cf2e8f03b22c4e9b2d5754d69cf2a55b8896 Mon Sep 17 00:00:00 2001 From: Donald Stufft Date: Sat, 19 Jan 2019 19:09:04 -0500 Subject: [PATCH 46/69] fix grammar, line lengths --- src/pip/_internal/cli/base_command.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 648b7f7cc..a4b6aa1ef 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -145,9 +145,10 @@ class Command(object): ) elif sys.version_info[:2] == (2, 7): deprecated( - "Python 2.7 will reach the end of it's life on January 1st, 2020. " - "Please upgrade your Python as Python 2.7 won't be maintained after " - "that date. A future version of pip will drop support for Python 2.7.", + "Python 2.7 will reach the end of its life on January 1st, " + "2020. Please upgrade your Python as Python 2.7 won't be " + "maintained after that date. A future version of pip will " + "drop support for Python 2.7.", replacement=None, gone_in=None, ) From 2a90808387982a17d8c8bb9776b9acafcd528a75 Mon Sep 17 00:00:00 2001 From: Ami Fischman Date: Sun, 20 Jan 2019 03:02:22 -0800 Subject: [PATCH 47/69] Prefix user_log (--log) entries with timestamp (#6142) Why? Eases post-facto analysis of time spent in different phases of pip operation. Historical note: https://github.com/pypa/pip/commit/767d11e49cb916e2d4637421d524efcb8d02ae8d#diff-b670e3b192038c9ffe810c1a12c0c51fL219 made it so that pip invocations emit zero timestamp information to the log file. Prior to that each pip invocation's start time was written out (search that commit's diff for [strftime]). Result: https://gist.github.com/fischman/f570886219de5c64a3b695300195c70a Resolves https://github.com/pypa/pip/issues/6141 --- news/6141.feature | 1 + src/pip/_internal/utils/logging.py | 24 ++++++++++++++-- tests/unit/test_base_command.py | 28 +++++++++++++++++-- tests/unit/test_logging.py | 45 ++++++++++++++++++++++++++++++ 4 files changed, 92 insertions(+), 6 deletions(-) create mode 100644 news/6141.feature create mode 100644 tests/unit/test_logging.py diff --git a/news/6141.feature b/news/6141.feature new file mode 100644 index 000000000..de26edf7d --- /dev/null +++ b/news/6141.feature @@ -0,0 +1 @@ +Prefix pip's ``--log`` file lines with their timestamp. diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index d9b954144..bc8b5fbd7 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -44,15 +44,28 @@ def get_indentation(): class IndentingFormatter(logging.Formatter): + def __init__(self, *args, **kwargs): + """ + A logging.Formatter obeying containing indent_log contexts. + + :param add_timestamp: A bool indicating output lines should be prefixed + with their record's timestamp. + """ + self.add_timestamp = kwargs.pop("add_timestamp", False) + super(IndentingFormatter, self).__init__(*args, **kwargs) def format(self, record): """ Calls the standard formatter, but will indent all of the log messages by our current indentation level. """ - formatted = logging.Formatter.format(self, record) + formatted = super(IndentingFormatter, self).format(record) + prefix = '' + if self.add_timestamp: + prefix = self.formatTime(record, "%Y-%m-%dT%H:%M:%S ") + prefix += " " * get_indentation() formatted = "".join([ - (" " * get_indentation()) + line + prefix + line for line in formatted.splitlines(True) ]) return formatted @@ -186,6 +199,11 @@ def setup_logging(verbosity, no_color, user_log_file): "()": IndentingFormatter, "format": "%(message)s", }, + "indent_with_timestamp": { + "()": IndentingFormatter, + "format": "%(message)s", + "add_timestamp": True, + }, }, "handlers": { "console": { @@ -208,7 +226,7 @@ def setup_logging(verbosity, no_color, user_log_file): "class": handler_classes["file"], "filename": additional_log_file, "delay": True, - "formatter": "indent", + "formatter": "indent_with_timestamp", }, }, "root": { diff --git a/tests/unit/test_base_command.py b/tests/unit/test_base_command.py index 1cd7277c4..c64143629 100644 --- a/tests/unit/test_base_command.py +++ b/tests/unit/test_base_command.py @@ -1,4 +1,6 @@ import logging +import os +import time from pip._internal.cli.base_command import Command @@ -38,6 +40,26 @@ class Test_base_command_logging(object): options """ + def setup(self): + self.old_time = time.time + time.time = lambda: 1547704837.4 + # Robustify the tests below to the ambient timezone by setting it + # explicitly here. + self.old_tz = getattr(os.environ, 'TZ', None) + os.environ['TZ'] = 'UTC' + # time.tzset() is not implemented on some platforms (notably, Windows). + if hasattr(time, 'tzset'): + time.tzset() + + def teardown(self): + if self.old_tz: + os.environ['TZ'] = self.old_tz + else: + del os.environ['TZ'] + if 'tzset' in dir(time): + time.tzset() + time.time = self.old_time + def test_log_command_success(self, tmpdir): """ Test the --log option logs when command succeeds @@ -46,7 +68,7 @@ class Test_base_command_logging(object): log_path = tmpdir.join('log') cmd.main(['fake', '--log', log_path]) with open(log_path) as f: - assert 'fake' == f.read().strip()[:4] + assert f.read().rstrip() == '2019-01-17T06:00:37 fake' def test_log_command_error(self, tmpdir): """ @@ -56,7 +78,7 @@ class Test_base_command_logging(object): log_path = tmpdir.join('log') cmd.main(['fake', '--log', log_path]) with open(log_path) as f: - assert 'fake' == f.read().strip()[:4] + assert f.read().startswith('2019-01-17T06:00:37 fake') def test_log_file_command_error(self, tmpdir): """ @@ -66,7 +88,7 @@ class Test_base_command_logging(object): log_file_path = tmpdir.join('log_file') cmd.main(['fake', '--log-file', log_file_path]) with open(log_file_path) as f: - assert 'fake' == f.read().strip()[:4] + assert f.read().startswith('2019-01-17T06:00:37 fake') def test_unicode_messages(self, tmpdir): """ diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py new file mode 100644 index 000000000..e2f407e36 --- /dev/null +++ b/tests/unit/test_logging.py @@ -0,0 +1,45 @@ +import logging +import os +import time + +from pip._internal.utils.logging import IndentingFormatter + + +class TestIndentingFormatter(object): + """ + Test `pip._internal.utils.logging.IndentingFormatter`. + """ + + def setup(self): + # Robustify the tests below to the ambient timezone by setting it + # explicitly here. + self.old_tz = getattr(os.environ, 'TZ', None) + os.environ['TZ'] = 'UTC' + # time.tzset() is not implemented on some platforms (notably, Windows). + if hasattr(time, 'tzset'): + time.tzset() + + def teardown(self): + if self.old_tz: + os.environ['TZ'] = self.old_tz + else: + del os.environ['TZ'] + if 'tzset' in dir(time): + time.tzset() + + def test_format(self, tmpdir): + record = logging.makeLogRecord(dict( + created=1547704837.4, + msg='hello\nworld', + )) + f = IndentingFormatter(fmt="%(message)s") + assert f.format(record) == 'hello\nworld' + + def test_format_with_timestamp(self, tmpdir): + record = logging.makeLogRecord(dict( + created=1547704837.4, + msg='hello\nworld', + )) + f = IndentingFormatter(fmt="%(message)s", add_timestamp=True) + expected = '2019-01-17T06:00:37 hello\n2019-01-17T06:00:37 world' + assert f.format(record) == expected From c90a3ff00327b1a5357c1f816a0218ac94fc3f4f Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 20 Jan 2019 16:52:52 +0530 Subject: [PATCH 48/69] Update packaging to 19.0 --- news/packaging.vendor | 1 + src/pip/_vendor/packaging/__about__.py | 14 +++- src/pip/_vendor/packaging/__init__.py | 20 ++++- src/pip/_vendor/packaging/_compat.py | 7 +- src/pip/_vendor/packaging/_structures.py | 2 - src/pip/_vendor/packaging/markers.py | 89 +++++++++++------------ src/pip/_vendor/packaging/requirements.py | 34 +++++---- src/pip/_vendor/packaging/specifiers.py | 67 ++++++----------- src/pip/_vendor/packaging/utils.py | 8 +- src/pip/_vendor/packaging/version.py | 49 ++++--------- src/pip/_vendor/vendor.txt | 2 +- 11 files changed, 131 insertions(+), 162 deletions(-) create mode 100644 news/packaging.vendor diff --git a/news/packaging.vendor b/news/packaging.vendor new file mode 100644 index 000000000..a64a114d4 --- /dev/null +++ b/news/packaging.vendor @@ -0,0 +1 @@ +Update packaging to 19.0 diff --git a/src/pip/_vendor/packaging/__about__.py b/src/pip/_vendor/packaging/__about__.py index 21fc6ce3e..7481c9e29 100644 --- a/src/pip/_vendor/packaging/__about__.py +++ b/src/pip/_vendor/packaging/__about__.py @@ -4,18 +4,24 @@ from __future__ import absolute_import, division, print_function __all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", ] __title__ = "packaging" __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "18.0" +__version__ = "19.0" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" __license__ = "BSD or Apache License, Version 2.0" -__copyright__ = "Copyright 2014-2018 %s" % __author__ +__copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/src/pip/_vendor/packaging/__init__.py b/src/pip/_vendor/packaging/__init__.py index 5ee622020..a0cf67df5 100644 --- a/src/pip/_vendor/packaging/__init__.py +++ b/src/pip/_vendor/packaging/__init__.py @@ -4,11 +4,23 @@ from __future__ import absolute_import, division, print_function from .__about__ import ( - __author__, __copyright__, __email__, __license__, __summary__, __title__, - __uri__, __version__ + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, ) __all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", ] diff --git a/src/pip/_vendor/packaging/_compat.py b/src/pip/_vendor/packaging/_compat.py index 210bb80b7..25da473c1 100644 --- a/src/pip/_vendor/packaging/_compat.py +++ b/src/pip/_vendor/packaging/_compat.py @@ -12,9 +12,9 @@ PY3 = sys.version_info[0] == 3 # flake8: noqa if PY3: - string_types = str, + string_types = (str,) else: - string_types = basestring, + string_types = (basestring,) def with_metaclass(meta, *bases): @@ -27,4 +27,5 @@ def with_metaclass(meta, *bases): class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) + + return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/src/pip/_vendor/packaging/_structures.py b/src/pip/_vendor/packaging/_structures.py index e9fc4a049..68dcca634 100644 --- a/src/pip/_vendor/packaging/_structures.py +++ b/src/pip/_vendor/packaging/_structures.py @@ -5,7 +5,6 @@ from __future__ import absolute_import, division, print_function class Infinity(object): - def __repr__(self): return "Infinity" @@ -38,7 +37,6 @@ Infinity = Infinity() class NegativeInfinity(object): - def __repr__(self): return "-Infinity" diff --git a/src/pip/_vendor/packaging/markers.py b/src/pip/_vendor/packaging/markers.py index e5834ce62..548247681 100644 --- a/src/pip/_vendor/packaging/markers.py +++ b/src/pip/_vendor/packaging/markers.py @@ -17,8 +17,11 @@ from .specifiers import Specifier, InvalidSpecifier __all__ = [ - "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", - "Marker", "default_environment", + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", ] @@ -42,7 +45,6 @@ class UndefinedEnvironmentName(ValueError): class Node(object): - def __init__(self, value): self.value = value @@ -57,62 +59,52 @@ class Node(object): class Variable(Node): - def serialize(self): return str(self) class Value(Node): - def serialize(self): return '"{0}"'.format(self) class Op(Node): - def serialize(self): return str(self) VARIABLE = ( - L("implementation_version") | - L("platform_python_implementation") | - L("implementation_name") | - L("python_full_version") | - L("platform_release") | - L("platform_version") | - L("platform_machine") | - L("platform_system") | - L("python_version") | - L("sys_platform") | - L("os_name") | - L("os.name") | # PEP-345 - L("sys.platform") | # PEP-345 - L("platform.version") | # PEP-345 - L("platform.machine") | # PEP-345 - L("platform.python_implementation") | # PEP-345 - L("python_implementation") | # undocumented setuptools legacy - L("extra") + L("implementation_version") + | L("platform_python_implementation") + | L("implementation_name") + | L("python_full_version") + | L("platform_release") + | L("platform_version") + | L("platform_machine") + | L("platform_system") + | L("python_version") + | L("sys_platform") + | L("os_name") + | L("os.name") + | L("sys.platform") # PEP-345 + | L("platform.version") # PEP-345 + | L("platform.machine") # PEP-345 + | L("platform.python_implementation") # PEP-345 + | L("python_implementation") # PEP-345 + | L("extra") # undocumented setuptools legacy ) ALIASES = { - 'os.name': 'os_name', - 'sys.platform': 'sys_platform', - 'platform.version': 'platform_version', - 'platform.machine': 'platform_machine', - 'platform.python_implementation': 'platform_python_implementation', - 'python_implementation': 'platform_python_implementation' + "os.name": "os_name", + "sys.platform": "sys_platform", + "platform.version": "platform_version", + "platform.machine": "platform_machine", + "platform.python_implementation": "platform_python_implementation", + "python_implementation": "platform_python_implementation", } VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) VERSION_CMP = ( - L("===") | - L("==") | - L(">=") | - L("<=") | - L("!=") | - L("~=") | - L(">") | - L("<") + L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") ) MARKER_OP = VERSION_CMP | L("not in") | L("in") @@ -152,8 +144,11 @@ def _format_marker(marker, first=True): # where the single item is itself it's own list. In that case we want skip # the rest of this function so that we don't get extraneous () on the # outside. - if (isinstance(marker, list) and len(marker) == 1 and - isinstance(marker[0], (list, tuple))): + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): return _format_marker(marker[0]) if isinstance(marker, list): @@ -239,20 +234,20 @@ def _evaluate_markers(markers, environment): def format_full_version(info): - version = '{0.major}.{0.minor}.{0.micro}'.format(info) + version = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel - if kind != 'final': + if kind != "final": version += kind[0] + str(info.serial) return version def default_environment(): - if hasattr(sys, 'implementation'): + if hasattr(sys, "implementation"): iver = format_full_version(sys.implementation.version) implementation_name = sys.implementation.name else: - iver = '0' - implementation_name = '' + iver = "0" + implementation_name = "" return { "implementation_name": implementation_name, @@ -270,13 +265,13 @@ def default_environment(): class Marker(object): - def __init__(self, marker): try: self._markers = _coerce_parse_result(MARKER.parseString(marker)) except ParseException as e: err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( - marker, marker[e.loc:e.loc + 8]) + marker, marker[e.loc : e.loc + 8] + ) raise InvalidMarker(err_str) def __str__(self): diff --git a/src/pip/_vendor/packaging/requirements.py b/src/pip/_vendor/packaging/requirements.py index d40bd8c5c..dbc5f11db 100644 --- a/src/pip/_vendor/packaging/requirements.py +++ b/src/pip/_vendor/packaging/requirements.py @@ -38,8 +38,8 @@ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) NAME = IDENTIFIER("name") EXTRA = IDENTIFIER -URI = Regex(r'[^ ]+')("url") -URL = (AT + URI) +URI = Regex(r"[^ ]+")("url") +URL = AT + URI EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") @@ -48,17 +48,18 @@ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), - joinString=",", adjacent=False)("_raw_spec") +VERSION_MANY = Combine( + VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False +)("_raw_spec") _VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start:t._original_end]) + lambda s, l, t: Marker(s[t._original_start : t._original_end]) ) MARKER_SEPARATOR = SEMICOLON MARKER = MARKER_SEPARATOR + MARKER_EXPR @@ -66,8 +67,7 @@ MARKER = MARKER_SEPARATOR + MARKER_EXPR VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) URL_AND_MARKER = URL + Optional(MARKER) -NAMED_REQUIREMENT = \ - NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) +NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd # pyparsing isn't thread safe during initialization, so we do it eagerly, see @@ -92,15 +92,21 @@ class Requirement(object): try: req = REQUIREMENT.parseString(requirement_string) except ParseException as e: - raise InvalidRequirement("Parse error at \"{0!r}\": {1}".format( - requirement_string[e.loc:e.loc + 8], e.msg - )) + raise InvalidRequirement( + 'Parse error at "{0!r}": {1}'.format( + requirement_string[e.loc : e.loc + 8], e.msg + ) + ) self.name = req.name if req.url: parsed_url = urlparse.urlparse(req.url) - if not (parsed_url.scheme and parsed_url.netloc) or ( - not parsed_url.scheme and not parsed_url.netloc): + if parsed_url.scheme == "file": + if urlparse.urlunparse(parsed_url) != req.url: + raise InvalidRequirement("Invalid URL given") + elif not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc + ): raise InvalidRequirement("Invalid URL: {0}".format(req.url)) self.url = req.url else: @@ -120,6 +126,8 @@ class Requirement(object): if self.url: parts.append("@ {0}".format(self.url)) + if self.marker: + parts.append(" ") if self.marker: parts.append("; {0}".format(self.marker)) diff --git a/src/pip/_vendor/packaging/specifiers.py b/src/pip/_vendor/packaging/specifiers.py index 4c798999d..743576a08 100644 --- a/src/pip/_vendor/packaging/specifiers.py +++ b/src/pip/_vendor/packaging/specifiers.py @@ -19,7 +19,6 @@ class InvalidSpecifier(ValueError): class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): - @abc.abstractmethod def __str__(self): """ @@ -84,10 +83,7 @@ class _IndividualSpecifier(BaseSpecifier): if not match: raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - self._spec = ( - match.group("operator").strip(), - match.group("version").strip(), - ) + self._spec = (match.group("operator").strip(), match.group("version").strip()) # Store whether or not this Specifier should accept prereleases self._prereleases = prereleases @@ -99,11 +95,7 @@ class _IndividualSpecifier(BaseSpecifier): else "" ) - return "<{0}({1!r}{2})>".format( - self.__class__.__name__, - str(self), - pre, - ) + return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) def __str__(self): return "{0}{1}".format(*self._spec) @@ -194,8 +186,9 @@ class _IndividualSpecifier(BaseSpecifier): # If our version is a prerelease, and we were not set to allow # prereleases, then we'll store it for later incase nothing # else matches this specifier. - if (parsed_version.is_prerelease and not - (prereleases or self.prereleases)): + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): found_prereleases.append(version) # Either this is not a prerelease, or we should have been # accepting prereleases from the beginning. @@ -213,8 +206,7 @@ class _IndividualSpecifier(BaseSpecifier): class LegacySpecifier(_IndividualSpecifier): - _regex_str = ( - r""" + _regex_str = r""" (?P(==|!=|<=|>=|<|>)) \s* (?P @@ -225,10 +217,8 @@ class LegacySpecifier(_IndividualSpecifier): # them, and a comma since it's a version separator. ) """ - ) - _regex = re.compile( - r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) _operators = { "==": "equal", @@ -269,13 +259,13 @@ def _require_version_compare(fn): if not isinstance(prospective, Version): return False return fn(self, prospective, spec) + return wrapped class Specifier(_IndividualSpecifier): - _regex_str = ( - r""" + _regex_str = r""" (?P(~=|==|!=|<=|>=|<|>|===)) (?P (?: @@ -367,10 +357,8 @@ class Specifier(_IndividualSpecifier): ) ) """ - ) - _regex = re.compile( - r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) _operators = { "~=": "compatible", @@ -397,8 +385,7 @@ class Specifier(_IndividualSpecifier): prefix = ".".join( list( itertools.takewhile( - lambda x: (not x.startswith("post") and not - x.startswith("dev")), + lambda x: (not x.startswith("post") and not x.startswith("dev")), _version_split(spec), ) )[:-1] @@ -407,8 +394,9 @@ class Specifier(_IndividualSpecifier): # Add the prefix notation to the end of our string prefix += ".*" - return (self._get_operator(">=")(prospective, spec) and - self._get_operator("==")(prospective, prefix)) + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) @_require_version_compare def _compare_equal(self, prospective, spec): @@ -428,7 +416,7 @@ class Specifier(_IndividualSpecifier): # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. - prospective = prospective[:len(spec)] + prospective = prospective[: len(spec)] # Pad out our two sides with zeros so that they both equal the same # length. @@ -567,27 +555,17 @@ def _pad_version(left, right): right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) # Get the rest of our versions - left_split.append(left[len(left_split[0]):]) - right_split.append(right[len(right_split[0]):]) + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) # Insert our padding - left_split.insert( - 1, - ["0"] * max(0, len(right_split[0]) - len(left_split[0])), - ) - right_split.insert( - 1, - ["0"] * max(0, len(left_split[0]) - len(right_split[0])), - ) + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - return ( - list(itertools.chain(*left_split)), - list(itertools.chain(*right_split)), - ) + return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) class SpecifierSet(BaseSpecifier): - def __init__(self, specifiers="", prereleases=None): # Split on , to break each indidivual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. @@ -721,10 +699,7 @@ class SpecifierSet(BaseSpecifier): # given version is contained within all of them. # Note: This use of all() here means that an empty set of specifiers # will always return True, this is an explicit design decision. - return all( - s.contains(item, prereleases=prereleases) - for s in self._specs - ) + return all(s.contains(item, prereleases=prereleases) for s in self._specs) def filter(self, iterable, prereleases=None): # Determine if we're forcing a prerelease or not, if we're not forcing diff --git a/src/pip/_vendor/packaging/utils.py b/src/pip/_vendor/packaging/utils.py index 4b94a82fb..884187869 100644 --- a/src/pip/_vendor/packaging/utils.py +++ b/src/pip/_vendor/packaging/utils.py @@ -36,13 +36,7 @@ def canonicalize_version(version): # Release segment # NB: This strips trailing '.0's to normalize - parts.append( - re.sub( - r'(\.0)+$', - '', - ".".join(str(x) for x in version.release) - ) - ) + parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) # Pre-release if version.pre is not None: diff --git a/src/pip/_vendor/packaging/version.py b/src/pip/_vendor/packaging/version.py index 6ed5cbbdc..95157a1f7 100644 --- a/src/pip/_vendor/packaging/version.py +++ b/src/pip/_vendor/packaging/version.py @@ -10,14 +10,11 @@ import re from ._structures import Infinity -__all__ = [ - "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" -] +__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] _Version = collections.namedtuple( - "_Version", - ["epoch", "release", "dev", "pre", "post", "local"], + "_Version", ["epoch", "release", "dev", "pre", "post", "local"] ) @@ -40,7 +37,6 @@ class InvalidVersion(ValueError): class _BaseVersion(object): - def __hash__(self): return hash(self._key) @@ -70,7 +66,6 @@ class _BaseVersion(object): class LegacyVersion(_BaseVersion): - def __init__(self, version): self._version = str(version) self._key = _legacy_cmpkey(self._version) @@ -126,12 +121,14 @@ class LegacyVersion(_BaseVersion): return False -_legacy_version_component_re = re.compile( - r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, -) +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) _legacy_version_replacement_map = { - "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", } @@ -215,10 +212,7 @@ VERSION_PATTERN = r""" class Version(_BaseVersion): - _regex = re.compile( - r"^\s*" + VERSION_PATTERN + r"\s*$", - re.VERBOSE | re.IGNORECASE, - ) + _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) def __init__(self, version): # Validate the version and parse it into pieces @@ -230,18 +224,11 @@ class Version(_BaseVersion): self._version = _Version( epoch=int(match.group("epoch")) if match.group("epoch") else 0, release=tuple(int(i) for i in match.group("release").split(".")), - pre=_parse_letter_version( - match.group("pre_l"), - match.group("pre_n"), - ), + pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")), post=_parse_letter_version( - match.group("post_l"), - match.group("post_n1") or match.group("post_n2"), - ), - dev=_parse_letter_version( - match.group("dev_l"), - match.group("dev_n"), + match.group("post_l"), match.group("post_n1") or match.group("post_n2") ), + dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")), local=_parse_local_version(match.group("local")), ) @@ -395,12 +382,7 @@ def _cmpkey(epoch, release, pre, post, dev, local): # re-reverse it back into the correct order and make it a tuple and use # that for our sorting key. release = tuple( - reversed(list( - itertools.dropwhile( - lambda x: x == 0, - reversed(release), - ) - )) + reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) ) # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. @@ -433,9 +415,6 @@ def _cmpkey(epoch, release, pre, post, dev, local): # - Numeric segments sort numerically # - Shorter versions sort before longer versions when the prefixes # match exactly - local = tuple( - (i, "") if isinstance(i, int) else (-Infinity, i) - for i in local - ) + local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local) return epoch, release, pre, post, dev, local diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 6a6f6f6b0..c7d3a3822 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -7,7 +7,7 @@ html5lib==1.0.1 ipaddress==1.0.22 # Only needed on 2.6 and 2.7 lockfile==0.12.2 msgpack==0.5.6 -packaging==18.0 +packaging==19.0 pep517==0.5.0 progress==1.4 pyparsing==2.3.1 From e6d01279859d18a654a0b44113316096db374f37 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Tue, 23 Oct 2018 00:46:43 -0700 Subject: [PATCH 49/69] Add failing tests. --- src/pip/_internal/utils/logging.py | 7 ++ src/pip/_internal/utils/misc.py | 7 ++ tests/functional/test_broken_stdout.py | 65 +++++++++++++++++ tests/unit/test_base_command.py | 47 ++++++++++-- tests/unit/test_logging.py | 99 +++++++++++++++++++++++++- 5 files changed, 220 insertions(+), 5 deletions(-) create mode 100644 tests/functional/test_broken_stdout.py diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index bc8b5fbd7..a12c21e6c 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -26,6 +26,13 @@ _log_state = threading.local() _log_state.indentation = 0 +class BrokenStdoutLoggingError(Exception): + """ + Raised if BrokenPipeError occurs for the stdout stream while logging. + """ + pass + + @contextlib.contextmanager def indent_log(num=2): """ diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index bb6e51517..84605ee36 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -864,6 +864,13 @@ def captured_stdout(): return captured_output('stdout') +def captured_stderr(): + """ + See captured_stdout(). + """ + return captured_output('stderr') + + class cached_property(object): """A property that is only computed once per instance and then replaces itself with an ordinary attribute. Deleting the attribute resets the diff --git a/tests/functional/test_broken_stdout.py b/tests/functional/test_broken_stdout.py new file mode 100644 index 000000000..788ab3a3c --- /dev/null +++ b/tests/functional/test_broken_stdout.py @@ -0,0 +1,65 @@ +import subprocess +import sys + +import pytest + +from pip._internal.utils.compat import WINDOWS + +if sys.version_info < (3, 6): + _BROKEN_STDOUT_RETURN_CODE = 1 +else: + # The new exit status was added in Python 3.6 as a result of: + # https://bugs.python.org/issue5319 + _BROKEN_STDOUT_RETURN_CODE = 120 + + +def setup_broken_stdout_test(args, deprecated_python): + proc = subprocess.Popen( + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + ) + # Call close() on stdout to cause a broken pipe. + proc.stdout.close() + # This line causes a timeout on Windows. + returncode = proc.wait() + stderr = proc.stderr.read().decode('utf-8') + + expected_msg = 'ERROR: Pipe to stdout was broken' + if deprecated_python: + assert expected_msg in stderr + else: + assert stderr.startswith(expected_msg) + + return stderr, returncode + + +@pytest.mark.skipif(WINDOWS, reason="test times out on Windows") +def test_broken_stdout_pipe(deprecated_python): + """ + Test a broken pipe to stdout. + """ + stderr, returncode = setup_broken_stdout_test( + ['pip', 'list'], deprecated_python=deprecated_python, + ) + + # Check that no traceback occurs. + assert 'raise BrokenStdoutLoggingError()' not in stderr + assert stderr.count('Traceback') == 0 + + assert returncode == _BROKEN_STDOUT_RETURN_CODE + + +@pytest.mark.skipif(WINDOWS, reason="test times out on Windows") +def test_broken_stdout_pipe__verbose(deprecated_python): + """ + Test a broken pipe to stdout with verbose logging enabled. + """ + stderr, returncode = setup_broken_stdout_test( + ['pip', '-v', 'list'], deprecated_python=deprecated_python, + ) + + # Check that a traceback occurs and that it occurs at most once. + # We permit up to two because the exception can be chained. + assert 'raise BrokenStdoutLoggingError()' in stderr + assert 1 <= stderr.count('Traceback') <= 2 + + assert returncode == _BROKEN_STDOUT_RETURN_CODE diff --git a/tests/unit/test_base_command.py b/tests/unit/test_base_command.py index c64143629..e25df4a80 100644 --- a/tests/unit/test_base_command.py +++ b/tests/unit/test_base_command.py @@ -3,14 +3,19 @@ import os import time from pip._internal.cli.base_command import Command +from pip._internal.utils.logging import BrokenStdoutLoggingError class FakeCommand(Command): name = 'fake' summary = name - def __init__(self, error=False): - self.error = error + def __init__(self, run_func=None, error=False): + if error: + def run_func(): + raise SystemExit(1) + + self.run_func = run_func super(FakeCommand, self).__init__() def main(self, args): @@ -19,8 +24,8 @@ class FakeCommand(Command): def run(self, options, args): logging.getLogger("pip.tests").info("fake") - if self.error: - raise SystemExit(1) + if self.run_func: + return self.run_func() class FakeCommandWithUnicode(FakeCommand): @@ -34,6 +39,40 @@ class FakeCommandWithUnicode(FakeCommand): ) +class TestCommand(object): + + def call_main(self, capsys, args): + """ + Call command.main(), and return the command's stderr. + """ + def raise_broken_stdout(): + raise BrokenStdoutLoggingError() + + cmd = FakeCommand(run_func=raise_broken_stdout) + status = cmd.main(args) + assert status == 1 + stderr = capsys.readouterr().err + + return stderr + + def test_raise_broken_stdout(self, capsys): + """ + Test raising BrokenStdoutLoggingError. + """ + stderr = self.call_main(capsys, []) + + assert stderr.rstrip() == 'ERROR: Pipe to stdout was broken' + + def test_raise_broken_stdout__debug_logging(self, capsys): + """ + Test raising BrokenStdoutLoggingError with debug logging enabled. + """ + stderr = self.call_main(capsys, ['-v']) + + assert 'ERROR: Pipe to stdout was broken' in stderr + assert 'Traceback (most recent call last):' in stderr + + class Test_base_command_logging(object): """ Test `pip.base_command.Command` setting up logging consumers based on diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index e2f407e36..af70f568f 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -1,8 +1,31 @@ +import errno import logging import os import time -from pip._internal.utils.logging import IndentingFormatter +import pytest +from mock import patch +from pip._vendor.six import PY2 + +from pip._internal.utils.logging import ( + BrokenStdoutLoggingError, ColorizedStreamHandler, IndentingFormatter, +) +from pip._internal.utils.misc import captured_stderr, captured_stdout + +logger = logging.getLogger(__name__) + + +# This is a Python 2/3 compatibility helper. +def _make_broken_pipe_error(): + """ + Return an exception object representing a broken pipe. + """ + if PY2: + # This is one way a broken pipe error can show up in Python 2 + # (a non-Windows example in this case). + return IOError(errno.EPIPE, 'Broken pipe') + + return BrokenPipeError() # noqa: F821 class TestIndentingFormatter(object): @@ -43,3 +66,77 @@ class TestIndentingFormatter(object): f = IndentingFormatter(fmt="%(message)s", add_timestamp=True) expected = '2019-01-17T06:00:37 hello\n2019-01-17T06:00:37 world' assert f.format(record) == expected + + +class TestColorizedStreamHandler(object): + + def _make_log_record(self): + attrs = { + 'msg': 'my error', + } + record = logging.makeLogRecord(attrs) + + return record + + def test_broken_pipe_in_stderr_flush(self): + """ + Test sys.stderr.flush() raising BrokenPipeError. + + This error should _not_ trigger an error in the logging framework. + """ + record = self._make_log_record() + + with captured_stderr() as stderr: + handler = ColorizedStreamHandler(stream=stderr) + with patch('sys.stderr.flush') as mock_flush: + mock_flush.side_effect = _make_broken_pipe_error() + # The emit() call raises no exception. + handler.emit(record) + + err_text = stderr.getvalue() + + assert err_text.startswith('my error') + # Check that the logging framework tried to log the exception. + if PY2: + assert 'IOError: [Errno 32] Broken pipe' in err_text + assert 'Logged from file' in err_text + else: + assert 'Logging error' in err_text + assert 'BrokenPipeError' in err_text + assert "Message: 'my error'" in err_text + + def test_broken_pipe_in_stdout_write(self): + """ + Test sys.stdout.write() raising BrokenPipeError. + + This error _should_ trigger an error in the logging framework. + """ + record = self._make_log_record() + + with captured_stdout() as stdout: + handler = ColorizedStreamHandler(stream=stdout) + with patch('sys.stdout.write') as mock_write: + mock_write.side_effect = _make_broken_pipe_error() + with pytest.raises(BrokenStdoutLoggingError): + handler.emit(record) + + def test_broken_pipe_in_stdout_flush(self): + """ + Test sys.stdout.flush() raising BrokenPipeError. + + This error _should_ trigger an error in the logging framework. + """ + record = self._make_log_record() + + with captured_stdout() as stdout: + handler = ColorizedStreamHandler(stream=stdout) + with patch('sys.stdout.flush') as mock_flush: + mock_flush.side_effect = _make_broken_pipe_error() + with pytest.raises(BrokenStdoutLoggingError): + handler.emit(record) + + output = stdout.getvalue() + + # Sanity check that the log record was written, since flush() happens + # after write(). + assert output.startswith('my error') From 7a9e1f344b6b24a737b5076a2161add1cedffb89 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Tue, 23 Oct 2018 01:55:07 -0700 Subject: [PATCH 50/69] Handle BrokenPipeError gracefully. --- news/4170.bugfix | 1 + src/pip/_internal/cli/base_command.py | 13 ++++++-- src/pip/_internal/utils/logging.py | 44 +++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 2 deletions(-) create mode 100644 news/4170.bugfix diff --git a/news/4170.bugfix b/news/4170.bugfix new file mode 100644 index 000000000..a8e8d07b2 --- /dev/null +++ b/news/4170.bugfix @@ -0,0 +1 @@ +Handle a broken stdout pipe more gracefully (e.g. when running ``pip list | head``). diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index a4b6aa1ef..8a602bab4 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -1,11 +1,12 @@ """Base Command class, and related routines""" -from __future__ import absolute_import +from __future__ import absolute_import, print_function import logging import logging.config import optparse import os import sys +import traceback from pip._internal.cli import cmdoptions from pip._internal.cli.parser import ( @@ -27,7 +28,7 @@ from pip._internal.req.constructors import ( ) from pip._internal.req.req_file import parse_requirements from pip._internal.utils.deprecation import deprecated -from pip._internal.utils.logging import setup_logging +from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging from pip._internal.utils.misc import ( get_prog, normalize_path, redact_password_from_url, ) @@ -191,6 +192,14 @@ class Command(object): logger.critical('ERROR: %s', exc) logger.debug('Exception information:', exc_info=True) + return ERROR + except BrokenStdoutLoggingError: + # Bypass our logger and write any remaining messages to stderr + # because stdout no longer works. + print('ERROR: Pipe to stdout was broken', file=sys.stderr) + if logger.getEffectiveLevel() <= logging.DEBUG: + traceback.print_exc(file=sys.stderr) + return ERROR except KeyboardInterrupt: logger.critical('Operation cancelled by user') diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index a12c21e6c..5776fc9fd 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -1,9 +1,13 @@ from __future__ import absolute_import import contextlib +import errno import logging import logging.handlers import os +import sys + +from pip._vendor.six import PY2 from pip._internal.utils.compat import WINDOWS from pip._internal.utils.misc import ensure_dir @@ -33,6 +37,23 @@ class BrokenStdoutLoggingError(Exception): pass +if PY2: + # BrokenPipeError does not exist in Python 2. + def _is_broken_pipe_error(exc_class, exc): + """ + Return whether an exception is a broken pipe error. + + Args: + exc_class: an exception class. + exc: an exception instance. + """ + return (exc_class is IOError and exc.errno == errno.EPIPE) + +else: + def _is_broken_pipe_error(exc_class, exc): + return (exc_class is BrokenPipeError) # noqa: F821 + + @contextlib.contextmanager def indent_log(num=2): """ @@ -103,6 +124,16 @@ class ColorizedStreamHandler(logging.StreamHandler): if WINDOWS and colorama: self.stream = colorama.AnsiToWin32(self.stream) + def _using_stdout(self): + """ + Return whether the handler is using sys.stdout. + """ + if WINDOWS and colorama: + # Then self.stream is an AnsiToWin32 object. + return self.stream.wrapped is sys.stdout + + return self.stream is sys.stdout + def should_color(self): # Don't colorize things if we do not have colorama or if told not to if not colorama or self._no_color: @@ -135,6 +166,19 @@ class ColorizedStreamHandler(logging.StreamHandler): return msg + # The logging module says handleError() can be customized. + def handleError(self, record): + exc_class, exc = sys.exc_info()[:2] + # If a broken pipe occurred while calling write() or flush() on the + # stdout stream in logging's Handler.emit(), then raise our special + # exception so we can handle it in main() instead of logging the + # broken pipe error and continuing. + if (exc_class and self._using_stdout() and + _is_broken_pipe_error(exc_class, exc)): + raise BrokenStdoutLoggingError() + + return super(ColorizedStreamHandler, self).handleError(record) + class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): From eef02fd6665ab8d4fd8bbed9dd70447f67386143 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Wed, 16 Jan 2019 01:51:25 -0800 Subject: [PATCH 51/69] Unskip / get the end-to-end tests working for Windows. --- src/pip/_internal/utils/logging.py | 30 ++++++++++++++++++++------ tests/functional/test_broken_stdout.py | 7 ------ 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index 5776fc9fd..a86aaf1df 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -37,8 +37,30 @@ class BrokenStdoutLoggingError(Exception): pass -if PY2: - # BrokenPipeError does not exist in Python 2. +# BrokenPipeError does not exist in Python 2 and, in addition, manifests +# differently in Windows and non-Windows. +if WINDOWS: + # In Windows, a broken pipe can show up as EINVAL rather than EPIPE: + # https://bugs.python.org/issue19612 + # https://bugs.python.org/issue30418 + if PY2: + def _is_broken_pipe_error(exc_class, exc): + """See the docstring for non-Windows Python 3 below.""" + return (exc_class is IOError and + exc.errno in (errno.EINVAL, errno.EPIPE)) + else: + # In Windows, a broken pipe IOError became OSError in Python 3. + def _is_broken_pipe_error(exc_class, exc): + """See the docstring for non-Windows Python 3 below.""" + return ((exc_class is BrokenPipeError) or # noqa: F821 + (exc_class is OSError and + exc.errno in (errno.EINVAL, errno.EPIPE))) +elif PY2: + def _is_broken_pipe_error(exc_class, exc): + """See the docstring for non-Windows Python 3 below.""" + return (exc_class is IOError and exc.errno == errno.EPIPE) +else: + # Then we are in the non-Windows Python 3 case. def _is_broken_pipe_error(exc_class, exc): """ Return whether an exception is a broken pipe error. @@ -47,10 +69,6 @@ if PY2: exc_class: an exception class. exc: an exception instance. """ - return (exc_class is IOError and exc.errno == errno.EPIPE) - -else: - def _is_broken_pipe_error(exc_class, exc): return (exc_class is BrokenPipeError) # noqa: F821 diff --git a/tests/functional/test_broken_stdout.py b/tests/functional/test_broken_stdout.py index 788ab3a3c..afb66f5a5 100644 --- a/tests/functional/test_broken_stdout.py +++ b/tests/functional/test_broken_stdout.py @@ -1,10 +1,6 @@ import subprocess import sys -import pytest - -from pip._internal.utils.compat import WINDOWS - if sys.version_info < (3, 6): _BROKEN_STDOUT_RETURN_CODE = 1 else: @@ -19,7 +15,6 @@ def setup_broken_stdout_test(args, deprecated_python): ) # Call close() on stdout to cause a broken pipe. proc.stdout.close() - # This line causes a timeout on Windows. returncode = proc.wait() stderr = proc.stderr.read().decode('utf-8') @@ -32,7 +27,6 @@ def setup_broken_stdout_test(args, deprecated_python): return stderr, returncode -@pytest.mark.skipif(WINDOWS, reason="test times out on Windows") def test_broken_stdout_pipe(deprecated_python): """ Test a broken pipe to stdout. @@ -48,7 +42,6 @@ def test_broken_stdout_pipe(deprecated_python): assert returncode == _BROKEN_STDOUT_RETURN_CODE -@pytest.mark.skipif(WINDOWS, reason="test times out on Windows") def test_broken_stdout_pipe__verbose(deprecated_python): """ Test a broken pipe to stdout with verbose logging enabled. From 9d00420c164200d2249ee588940704423c41bd09 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Mon, 21 Jan 2019 04:56:12 -0800 Subject: [PATCH 52/69] Use the requested log level when handling broken stdout pipe. --- src/pip/_internal/cli/base_command.py | 4 ++-- src/pip/_internal/utils/logging.py | 6 ++++++ tests/functional/test_broken_stdout.py | 18 ++++++++++++++++++ 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 8a602bab4..975f3fe2c 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -130,7 +130,7 @@ class Command(object): # Set verbosity so that it can be used elsewhere. self.verbosity = options.verbose - options.quiet - setup_logging( + level_number = setup_logging( verbosity=self.verbosity, no_color=options.no_color, user_log_file=options.log, @@ -197,7 +197,7 @@ class Command(object): # Bypass our logger and write any remaining messages to stderr # because stdout no longer works. print('ERROR: Pipe to stdout was broken', file=sys.stderr) - if logger.getEffectiveLevel() <= logging.DEBUG: + if level_number <= logging.DEBUG: traceback.print_exc(file=sys.stderr) return ERROR diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index a86aaf1df..579d69621 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -216,6 +216,8 @@ class MaxLevelFilter(logging.Filter): def setup_logging(verbosity, no_color, user_log_file): """Configures and sets up all of the logging + + Returns the requested logging level, as its integer value. """ # Determine the level to be logging at. @@ -230,6 +232,8 @@ def setup_logging(verbosity, no_color, user_log_file): else: level = "INFO" + level_number = getattr(logging, level) + # The "root" logger should match the "console" level *unless* we also need # to log to a user log file. include_user_log = user_log_file is not None @@ -310,3 +314,5 @@ def setup_logging(verbosity, no_color, user_log_file): } }, }) + + return level_number diff --git a/tests/functional/test_broken_stdout.py b/tests/functional/test_broken_stdout.py index afb66f5a5..cb98e31f0 100644 --- a/tests/functional/test_broken_stdout.py +++ b/tests/functional/test_broken_stdout.py @@ -1,3 +1,4 @@ +import os import subprocess import sys @@ -42,6 +43,23 @@ def test_broken_stdout_pipe(deprecated_python): assert returncode == _BROKEN_STDOUT_RETURN_CODE +def test_broken_stdout_pipe__log_option(deprecated_python, tmpdir): + """ + Test a broken pipe to stdout when --log is passed. + """ + log_path = os.path.join(str(tmpdir), 'log.txt') + stderr, returncode = setup_broken_stdout_test( + ['pip', '--log', log_path, 'list'], + deprecated_python=deprecated_python, + ) + + # Check that no traceback occurs. + assert 'raise BrokenStdoutLoggingError()' not in stderr + assert stderr.count('Traceback') == 0 + + assert returncode == _BROKEN_STDOUT_RETURN_CODE + + def test_broken_stdout_pipe__verbose(deprecated_python): """ Test a broken pipe to stdout with verbose logging enabled. From 8de0ce82454760ce9eb904d6b6cf24df8367db01 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Tue, 22 Jan 2019 21:53:19 +0530 Subject: [PATCH 53/69] Generate AUTHORS for 19.0 --- AUTHORS.txt | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/AUTHORS.txt b/AUTHORS.txt index aff7dc3fb..5a312d62f 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -1,8 +1,11 @@ AceGentile Adam Chainz +Adam Tse Adam Wentz Adrien Morison Alan Yee +Albert-Guan +albertg Aleks Bunin Alethea Flowers Alex Gaynor @@ -14,6 +17,7 @@ Alexandre Conrad Alexey Popravka Alexey Popravka Alli +Ami Fischman Anatoly Techtonik Andrei Geacar Andrew Gaul @@ -49,6 +53,7 @@ Ben Darnell Ben Hoyt Ben Rosser Bence Nagy +Benjamin Peterson Benjamin VanEvery Benoit Pierre Berker Peksag @@ -59,6 +64,8 @@ Brad Erickson Bradley Ayers Brandon L. Reiss Brett Randall +Brian Cristante <33549821+brcrista@users.noreply.github.com> +Brian Cristante Brian Rosner BrownTruck Bruno Oliveira @@ -75,6 +82,7 @@ Carol Willing Carter Thayer Cass Chandrasekhar Atina +Chih-Hsuan Yen Chris Brinker Chris Jerdonek Chris McDonough @@ -133,9 +141,11 @@ Ed Morley <501702+edmorley@users.noreply.github.com> Ed Morley elainechan Eli Schwartz +Eli Schwartz Emil Styrke Endoh Takanao enoch +Erdinc Mutlu Eric Gillingham Eric Hanchrow Eric Hopper @@ -149,6 +159,7 @@ Felix Yan fiber-space Filip Kokosiński Florian Briand +Florian Rathgeber Francesco Francesco Montesano Gabriel Curio @@ -253,6 +264,8 @@ Marc Tamlyn Marcus Smith Mariatta Mark Kohler +Mark Williams +Mark Williams Markus Hametner Masklinn Matej Stuchlik @@ -271,6 +284,7 @@ Matthias Bussonnier mattip Maxim Kurnikov Maxime Rouyrre +mbaluna <44498973+mbaluna@users.noreply.github.com> memoselyk Michael Michael Aquilina @@ -284,6 +298,7 @@ Mihir Singh Min RK MinRK Miro Hrončok +Monica Baluna montefra Monty Taylor Nate Coraor @@ -318,6 +333,7 @@ Paulus Schoutsen Pavithra Eswaramoorthy <33131404+QueenCoffee@users.noreply.github.com> Pawel Jasinski Pekka Klärck +Peter Lisák Peter Waller petr-tik Phaneendra Chiruvella @@ -329,6 +345,7 @@ Philippe Ombredanne Pi Delport Pierre-Yves Rofes pip +Prabhjyotsing Surjit Singh Sodhi Pradyun Gedam Pratik Mallya Preston Holmes @@ -379,6 +396,7 @@ Stephan Erb stepshal Steve (Gadget) Barnes Steve Barnes +Steve Dower Steve Kowalik Steven Myint stonebig @@ -418,11 +436,13 @@ W. Trevor King Wil Tan Wilfred Hughes William ML Leslie +William T Olson wim glenn Wolfgang Maier Xavier Fernandez Xavier Fernandez xoviat +xtreak YAMAMOTO Takashi Yen Chi Hsuan Yoval P From c38805ab59e4df53153a544bfe32f671cc82be25 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Tue, 22 Jan 2019 21:55:04 +0530 Subject: [PATCH 54/69] Bump version to 19.0 --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 98e17e8d3..16de4ad0f 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1 +1 @@ -__version__ = "19.0.dev0" +__version__ = "19.0" From 71945093f7a7652aee11a3950cfdd1486976bb6f Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Tue, 22 Jan 2019 22:05:42 +0530 Subject: [PATCH 55/69] Generate NEWS for 19.0 --- NEWS.rst | 76 +++++++++++++++++++ ...D1F422-0CB9-450F-B4DD-9486524712E5.feature | 1 - ...be0039-1489-4442-94fc-ffe0790c4d73.trivial | 0 ...72f685-7454-4242-a350-2e7334dc7924.trivial | 0 news/3055.bugfix | 1 - ...88AD7C-9097-4941-849B-3709C172E640.trivial | 0 ...E4C9AC-13F6-4772-8222-61E495F4CC5C.trivial | 0 news/4746.bugfix | 1 - news/4759.bugfix | 1 - news/4833.bugfix | 1 - news/5008.feature | 3 - news/5031.feature | 1 - ...6D2F71-CA51-40F5-9D4B-F5502026CFCF.trivial | 0 news/5147.bugfix | 1 - news/5213.feature | 1 - news/5270.bugfix | 2 - news/5385.bugfix | 1 - news/5483.bugfix | 2 - news/5656.bugfix | 1 - news/5735.feature | 2 - news/5737.bugfix | 1 - news/5743.feature | 1 - news/5827.feature | 1 - news/5838.bugfix | 1 - news/5839.bugfix | 1 - news/5841.bugfix | 1 - news/5848.bugfix | 1 - news/5866.removal | 2 - news/5868.bugfix | 1 - news/5870.bugfix | 1 - news/5888.doc | 1 - news/5949.doc | 1 - news/5958.doc | 1 - news/5961.trivial | 2 - news/5968.bugfix | 1 - news/5984.doc | 1 - news/6060.removal | 1 - news/6106.removal | 1 - news/6124.bugfix | 1 - news/6141.feature | 1 - news/6148.removal | 2 - ...23E2F2-6D0A-46E1-8378-BB116BFDBC9C.trivial | 0 ...FB6292-DB3F-4DF2-AADE-7DE25A678686.trivial | 0 ...39276D-88B8-4AB5-9AA4-6B5DD25B306D.trivial | 0 ...91612D-8317-4A33-B373-9AAAD1CDA4DA.trivial | 0 ...14E56D-726C-4417-AA87-3AACF3677289.trivial | 0 ...C1E675-2DF0-4D59-9183-19CF908F5CCF.trivial | 0 ...BB707A-F48C-4FC9-8459-9B23C2D07FBE.trivial | 0 ...2E6CBD-7420-43A9-9A9B-607F7F7B5D90.trivial | 0 ...A540D9-7665-48A5-A1C8-5141ED49E404.trivial | 0 ...4EEB42-0067-4C7E-B02A-01FAE49D1D98.trivial | 0 ...E4CACD-41B3-478D-926D-2069D76A6059.trivial | 0 ...35c999-332b-4096-aa90-455f8d603129.trivial | 0 news/certifi.vendor | 1 - news/colorama.vendor | 1 - ...e44acc-c569-46e2-9348-2e55a2816d5c.trivial | 0 news/deadbeef.trivial | 1 - news/distlib.vendor | 1 - news/idna.vendor | 1 - news/pep517.vendor | 1 - news/pkg_resources.vendor | 1 - news/pyparsing.vendor | 1 - news/pytoml.vendor | 1 - news/requests.vendor | 1 - news/six.vendor | 1 - news/urllib3.vendor | 1 - 66 files changed, 76 insertions(+), 55 deletions(-) delete mode 100644 news/13D1F422-0CB9-450F-B4DD-9486524712E5.feature delete mode 100644 news/15be0039-1489-4442-94fc-ffe0790c4d73.trivial delete mode 100644 news/2972f685-7454-4242-a350-2e7334dc7924.trivial delete mode 100644 news/3055.bugfix delete mode 100644 news/4088AD7C-9097-4941-849B-3709C172E640.trivial delete mode 100644 news/43E4C9AC-13F6-4772-8222-61E495F4CC5C.trivial delete mode 100644 news/4746.bugfix delete mode 100644 news/4759.bugfix delete mode 100644 news/4833.bugfix delete mode 100644 news/5008.feature delete mode 100644 news/5031.feature delete mode 100644 news/506D2F71-CA51-40F5-9D4B-F5502026CFCF.trivial delete mode 100644 news/5147.bugfix delete mode 100644 news/5213.feature delete mode 100644 news/5270.bugfix delete mode 100644 news/5385.bugfix delete mode 100644 news/5483.bugfix delete mode 100644 news/5656.bugfix delete mode 100644 news/5735.feature delete mode 100644 news/5737.bugfix delete mode 100644 news/5743.feature delete mode 100644 news/5827.feature delete mode 100644 news/5838.bugfix delete mode 100644 news/5839.bugfix delete mode 100644 news/5841.bugfix delete mode 100644 news/5848.bugfix delete mode 100644 news/5866.removal delete mode 100644 news/5868.bugfix delete mode 100644 news/5870.bugfix delete mode 100644 news/5888.doc delete mode 100644 news/5949.doc delete mode 100644 news/5958.doc delete mode 100644 news/5961.trivial delete mode 100644 news/5968.bugfix delete mode 100644 news/5984.doc delete mode 100644 news/6060.removal delete mode 100644 news/6106.removal delete mode 100644 news/6124.bugfix delete mode 100644 news/6141.feature delete mode 100644 news/6148.removal delete mode 100644 news/6923E2F2-6D0A-46E1-8378-BB116BFDBC9C.trivial delete mode 100644 news/75FB6292-DB3F-4DF2-AADE-7DE25A678686.trivial delete mode 100644 news/8339276D-88B8-4AB5-9AA4-6B5DD25B306D.trivial delete mode 100644 news/9891612D-8317-4A33-B373-9AAAD1CDA4DA.trivial delete mode 100644 news/A514E56D-726C-4417-AA87-3AACF3677289.trivial delete mode 100644 news/A6C1E675-2DF0-4D59-9183-19CF908F5CCF.trivial delete mode 100644 news/B4BB707A-F48C-4FC9-8459-9B23C2D07FBE.trivial delete mode 100644 news/C32E6CBD-7420-43A9-9A9B-607F7F7B5D90.trivial delete mode 100644 news/D9A540D9-7665-48A5-A1C8-5141ED49E404.trivial delete mode 100644 news/E74EEB42-0067-4C7E-B02A-01FAE49D1D98.trivial delete mode 100644 news/EFE4CACD-41B3-478D-926D-2069D76A6059.trivial delete mode 100644 news/a035c999-332b-4096-aa90-455f8d603129.trivial delete mode 100644 news/certifi.vendor delete mode 100644 news/colorama.vendor delete mode 100644 news/dbe44acc-c569-46e2-9348-2e55a2816d5c.trivial delete mode 100644 news/deadbeef.trivial delete mode 100644 news/distlib.vendor delete mode 100644 news/idna.vendor delete mode 100644 news/pep517.vendor delete mode 100644 news/pkg_resources.vendor delete mode 100644 news/pyparsing.vendor delete mode 100644 news/pytoml.vendor delete mode 100644 news/requests.vendor delete mode 100644 news/six.vendor delete mode 100644 news/urllib3.vendor diff --git a/NEWS.rst b/NEWS.rst index 8e728d2b8..7b2948317 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -7,6 +7,82 @@ .. towncrier release notes start +19.0 (2019-01-22) +================= + +Deprecations and Removals +------------------------- + +- Remove the deprecated SVN editable detection based on dependency links + during freeze. (`#5866 `_) +- Remove the deprecated --process-dependency-links option. (`#6060 `_) +- Deprecate support for Python 3.4 (`#6106 `_) +- Start printing a warning for Python 2.7 to warn of impending Python 2.7 End-of-life and + prompt users to start migrating to Python 3. (`#6148 `_) + +Features +-------- + +- Include the package name in a freeze warning if the package is not installed. (`#13D1F422-0CB9-450F-B4DD-9486524712E5 `_) +- Implement manylinux2010 platform tag support. manylinux2010 is the successor + to manylinux1. It allows carefully compiled binary wheels to be installed + on compatible Linux platforms. (`#5008 `_) +- Editable, non-VCS installs now freeze as editable. (`#5031 `_) +- Pip now includes license text of 3rd party libraries. (`#5213 `_) +- Make ``PIP_NO_CACHE_DIR`` disable the cache also for truthy values like + ``"true"``, ``"yes"``, ``"1"``, etc. (`#5735 `_) +- Implement PEP 517 (allow projects to specify a build backend via pyproject.toml). (`#5743 `_) +- A warning message is emitted when dropping an ``--[extra-]index-url`` value that points to an existing local directory. (`#5827 `_) +- Prefix pip's ``--log`` file lines with their timestamp. (`#6141 `_) + +Bug Fixes +--------- + +- Handle a broken stdout pipe more gracefully (e.g. when running ``pip list | head``). (`#4170 `_) +- Avoids creating excessively long temporary paths when uninstalling packages. (`#3055 `_) +- Redact the password from the URL in various log messages. (`#4746 `_) +- Editable Git installs without a remote now freeze as editable. (`#4759 `_) +- give 401 warning if username/password do not work for URL (`#4833 `_) +- Invalid requirement no longer causes stack trace to be printed. (`#5147 `_) +- Handle `requests.exceptions.RetryError` raised in `PackageFinder` that was + causing pip to fail silently when some indexes were unreachable. (`#5270 `_, `#5483 `_) +- Setting ``PIP_NO_CACHE_DIR=yes`` no longer causes pip to crash. (`#5385 `_) +- - Improve PEP 518 build isolation: handle .pth files, so namespace packages are correctly supported under Python 3.2 and earlier. (`#5656 `_) +- `pip list --outdated --not-required` should list only outdated packages that are not dependencies of installed packages (`#5737 `_) +- Fix content type detection if a directory named like an archive is used as a package source. (`#5838 `_) +- Fix crashes from unparseable requirements when checking installed packages. (`#5839 `_) +- Fix support for invoking pip using `python src/pip ...`. (`#5841 `_) +- Greatly reduce memory usage when installing wheels containing large files. (`#5848 `_) +- Fix sorting `TypeError` in `move_wheel_files()` when installing some packages. (`#5868 `_) +- Canonicalize sdist file names so they can be matched to a canonicalized package name passed to ``pip install``. (`#5870 `_) +- Percent-decode special characters in SVN URL credentials. (`#5968 `_) +- Redact the password from index urls in a debug message (using --no-index & --verbose options together) (`#6124 `_) + +Vendored Libraries +------------------ + +- Update certifi to 2018.11.29 +- Update colorama to 0.4.1 +- Update distlib to 0.2.8 +- Update idna to 2.8 +- Update packaging to 19.0 +- Update pep517 to 0.5.0 +- Update pkg_resources to 40.6.3 (via setuptools) +- Update pyparsing to 2.3.1 +- Update pytoml to 0.1.20 +- Update requests to 2.21.0 +- Update six to 1.12.0 +- Update urllib3 to 1.24.1 + +Improved Documentation +---------------------- + +- Remove references to removed #egg=- functionality (`#5888 `_) +- Adds instructions for running pip from source to Development documentation. (`#5949 `_) +- Include the Vendoring Policy in the documentation. (`#5958 `_) +- Add command information in usage document for pip cmd (`#5984 `_) + + 18.1 (2018-10-05) ================= diff --git a/news/13D1F422-0CB9-450F-B4DD-9486524712E5.feature b/news/13D1F422-0CB9-450F-B4DD-9486524712E5.feature deleted file mode 100644 index dbe3638c6..000000000 --- a/news/13D1F422-0CB9-450F-B4DD-9486524712E5.feature +++ /dev/null @@ -1 +0,0 @@ -Include the package name in a freeze warning if the package is not installed. \ No newline at end of file diff --git a/news/15be0039-1489-4442-94fc-ffe0790c4d73.trivial b/news/15be0039-1489-4442-94fc-ffe0790c4d73.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/2972f685-7454-4242-a350-2e7334dc7924.trivial b/news/2972f685-7454-4242-a350-2e7334dc7924.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/3055.bugfix b/news/3055.bugfix deleted file mode 100644 index 380c57390..000000000 --- a/news/3055.bugfix +++ /dev/null @@ -1 +0,0 @@ -Avoids creating excessively long temporary paths when uninstalling packages. \ No newline at end of file diff --git a/news/4088AD7C-9097-4941-849B-3709C172E640.trivial b/news/4088AD7C-9097-4941-849B-3709C172E640.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/43E4C9AC-13F6-4772-8222-61E495F4CC5C.trivial b/news/43E4C9AC-13F6-4772-8222-61E495F4CC5C.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/4746.bugfix b/news/4746.bugfix deleted file mode 100644 index 3ad37679f..000000000 --- a/news/4746.bugfix +++ /dev/null @@ -1 +0,0 @@ -Redact the password from the URL in various log messages. \ No newline at end of file diff --git a/news/4759.bugfix b/news/4759.bugfix deleted file mode 100644 index 405fbe8db..000000000 --- a/news/4759.bugfix +++ /dev/null @@ -1 +0,0 @@ -Editable Git installs without a remote now freeze as editable. diff --git a/news/4833.bugfix b/news/4833.bugfix deleted file mode 100644 index 9bb9fdaa9..000000000 --- a/news/4833.bugfix +++ /dev/null @@ -1 +0,0 @@ -give 401 warning if username/password do not work for URL diff --git a/news/5008.feature b/news/5008.feature deleted file mode 100644 index b42457c85..000000000 --- a/news/5008.feature +++ /dev/null @@ -1,3 +0,0 @@ -Implement manylinux2010 platform tag support. manylinux2010 is the successor -to manylinux1. It allows carefully compiled binary wheels to be installed -on compatible Linux platforms. diff --git a/news/5031.feature b/news/5031.feature deleted file mode 100644 index 23b71677e..000000000 --- a/news/5031.feature +++ /dev/null @@ -1 +0,0 @@ -Editable, non-VCS installs now freeze as editable. diff --git a/news/506D2F71-CA51-40F5-9D4B-F5502026CFCF.trivial b/news/506D2F71-CA51-40F5-9D4B-F5502026CFCF.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/5147.bugfix b/news/5147.bugfix deleted file mode 100644 index 59f3846e9..000000000 --- a/news/5147.bugfix +++ /dev/null @@ -1 +0,0 @@ -Invalid requirement no longer causes stack trace to be printed. diff --git a/news/5213.feature b/news/5213.feature deleted file mode 100644 index c0266d1af..000000000 --- a/news/5213.feature +++ /dev/null @@ -1 +0,0 @@ -Pip now includes license text of 3rd party libraries. diff --git a/news/5270.bugfix b/news/5270.bugfix deleted file mode 100644 index 9db8184c3..000000000 --- a/news/5270.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Handle `requests.exceptions.RetryError` raised in `PackageFinder` that was -causing pip to fail silently when some indexes were unreachable. diff --git a/news/5385.bugfix b/news/5385.bugfix deleted file mode 100644 index b880318cf..000000000 --- a/news/5385.bugfix +++ /dev/null @@ -1 +0,0 @@ -Setting ``PIP_NO_CACHE_DIR=yes`` no longer causes pip to crash. diff --git a/news/5483.bugfix b/news/5483.bugfix deleted file mode 100644 index 9db8184c3..000000000 --- a/news/5483.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Handle `requests.exceptions.RetryError` raised in `PackageFinder` that was -causing pip to fail silently when some indexes were unreachable. diff --git a/news/5656.bugfix b/news/5656.bugfix deleted file mode 100644 index 5bf365a8d..000000000 --- a/news/5656.bugfix +++ /dev/null @@ -1 +0,0 @@ -- Improve PEP 518 build isolation: handle .pth files, so namespace packages are correctly supported under Python 3.2 and earlier. diff --git a/news/5735.feature b/news/5735.feature deleted file mode 100644 index 823bdcb6d..000000000 --- a/news/5735.feature +++ /dev/null @@ -1,2 +0,0 @@ -Make ``PIP_NO_CACHE_DIR`` disable the cache also for truthy values like -``"true"``, ``"yes"``, ``"1"``, etc. diff --git a/news/5737.bugfix b/news/5737.bugfix deleted file mode 100644 index d9e170389..000000000 --- a/news/5737.bugfix +++ /dev/null @@ -1 +0,0 @@ -`pip list --outdated --not-required` should list only outdated packages that are not dependencies of installed packages diff --git a/news/5743.feature b/news/5743.feature deleted file mode 100644 index 1181b3479..000000000 --- a/news/5743.feature +++ /dev/null @@ -1 +0,0 @@ -Implement PEP 517 (allow projects to specify a build backend via pyproject.toml). diff --git a/news/5827.feature b/news/5827.feature deleted file mode 100644 index 2ef8d45be..000000000 --- a/news/5827.feature +++ /dev/null @@ -1 +0,0 @@ -A warning message is emitted when dropping an ``--[extra-]index-url`` value that points to an existing local directory. diff --git a/news/5838.bugfix b/news/5838.bugfix deleted file mode 100644 index b83a9fa91..000000000 --- a/news/5838.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix content type detection if a directory named like an archive is used as a package source. diff --git a/news/5839.bugfix b/news/5839.bugfix deleted file mode 100644 index a9ce698d6..000000000 --- a/news/5839.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix crashes from unparseable requirements when checking installed packages. diff --git a/news/5841.bugfix b/news/5841.bugfix deleted file mode 100644 index ba6f9b282..000000000 --- a/news/5841.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix support for invoking pip using `python src/pip ...`. diff --git a/news/5848.bugfix b/news/5848.bugfix deleted file mode 100644 index f525bc791..000000000 --- a/news/5848.bugfix +++ /dev/null @@ -1 +0,0 @@ -Greatly reduce memory usage when installing wheels containing large files. diff --git a/news/5866.removal b/news/5866.removal deleted file mode 100644 index f9bbd0549..000000000 --- a/news/5866.removal +++ /dev/null @@ -1,2 +0,0 @@ -Remove the deprecated SVN editable detection based on dependency links -during freeze. diff --git a/news/5868.bugfix b/news/5868.bugfix deleted file mode 100644 index 05befeeec..000000000 --- a/news/5868.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix sorting `TypeError` in `move_wheel_files()` when installing some packages. \ No newline at end of file diff --git a/news/5870.bugfix b/news/5870.bugfix deleted file mode 100644 index 7498015ab..000000000 --- a/news/5870.bugfix +++ /dev/null @@ -1 +0,0 @@ -Canonicalize sdist file names so they can be matched to a canonicalized package name passed to ``pip install``. diff --git a/news/5888.doc b/news/5888.doc deleted file mode 100644 index 3a19f3755..000000000 --- a/news/5888.doc +++ /dev/null @@ -1 +0,0 @@ -Remove references to removed #egg=- functionality diff --git a/news/5949.doc b/news/5949.doc deleted file mode 100644 index fae71a81d..000000000 --- a/news/5949.doc +++ /dev/null @@ -1 +0,0 @@ -Adds instructions for running pip from source to Development documentation. \ No newline at end of file diff --git a/news/5958.doc b/news/5958.doc deleted file mode 100644 index ff0c893c7..000000000 --- a/news/5958.doc +++ /dev/null @@ -1 +0,0 @@ -Include the Vendoring Policy in the documentation. diff --git a/news/5961.trivial b/news/5961.trivial deleted file mode 100644 index 8e4a895c0..000000000 --- a/news/5961.trivial +++ /dev/null @@ -1,2 +0,0 @@ - -Adds hyperlinks to User IRC and Dev IRC in README. \ No newline at end of file diff --git a/news/5968.bugfix b/news/5968.bugfix deleted file mode 100644 index c4401b3d1..000000000 --- a/news/5968.bugfix +++ /dev/null @@ -1 +0,0 @@ -Percent-decode special characters in SVN URL credentials. diff --git a/news/5984.doc b/news/5984.doc deleted file mode 100644 index 945f54a1d..000000000 --- a/news/5984.doc +++ /dev/null @@ -1 +0,0 @@ -Add command information in usage document for pip cmd diff --git a/news/6060.removal b/news/6060.removal deleted file mode 100644 index 1b531c8c8..000000000 --- a/news/6060.removal +++ /dev/null @@ -1 +0,0 @@ -Remove the deprecated --process-dependency-links option. diff --git a/news/6106.removal b/news/6106.removal deleted file mode 100644 index 56bd9a9c7..000000000 --- a/news/6106.removal +++ /dev/null @@ -1 +0,0 @@ -Deprecate support for Python 3.4 diff --git a/news/6124.bugfix b/news/6124.bugfix deleted file mode 100644 index 94d1339e3..000000000 --- a/news/6124.bugfix +++ /dev/null @@ -1 +0,0 @@ -Redact the password from index urls in a debug message (using --no-index & --verbose options together) diff --git a/news/6141.feature b/news/6141.feature deleted file mode 100644 index de26edf7d..000000000 --- a/news/6141.feature +++ /dev/null @@ -1 +0,0 @@ -Prefix pip's ``--log`` file lines with their timestamp. diff --git a/news/6148.removal b/news/6148.removal deleted file mode 100644 index 1be44f454..000000000 --- a/news/6148.removal +++ /dev/null @@ -1,2 +0,0 @@ -Start printing a warning for Python 2.7 to warn of impending Python 2.7 End-of-life and -prompt users to start migrating to Python 3. diff --git a/news/6923E2F2-6D0A-46E1-8378-BB116BFDBC9C.trivial b/news/6923E2F2-6D0A-46E1-8378-BB116BFDBC9C.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/75FB6292-DB3F-4DF2-AADE-7DE25A678686.trivial b/news/75FB6292-DB3F-4DF2-AADE-7DE25A678686.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/8339276D-88B8-4AB5-9AA4-6B5DD25B306D.trivial b/news/8339276D-88B8-4AB5-9AA4-6B5DD25B306D.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/9891612D-8317-4A33-B373-9AAAD1CDA4DA.trivial b/news/9891612D-8317-4A33-B373-9AAAD1CDA4DA.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/A514E56D-726C-4417-AA87-3AACF3677289.trivial b/news/A514E56D-726C-4417-AA87-3AACF3677289.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/A6C1E675-2DF0-4D59-9183-19CF908F5CCF.trivial b/news/A6C1E675-2DF0-4D59-9183-19CF908F5CCF.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/B4BB707A-F48C-4FC9-8459-9B23C2D07FBE.trivial b/news/B4BB707A-F48C-4FC9-8459-9B23C2D07FBE.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/C32E6CBD-7420-43A9-9A9B-607F7F7B5D90.trivial b/news/C32E6CBD-7420-43A9-9A9B-607F7F7B5D90.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/D9A540D9-7665-48A5-A1C8-5141ED49E404.trivial b/news/D9A540D9-7665-48A5-A1C8-5141ED49E404.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/E74EEB42-0067-4C7E-B02A-01FAE49D1D98.trivial b/news/E74EEB42-0067-4C7E-B02A-01FAE49D1D98.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/EFE4CACD-41B3-478D-926D-2069D76A6059.trivial b/news/EFE4CACD-41B3-478D-926D-2069D76A6059.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/a035c999-332b-4096-aa90-455f8d603129.trivial b/news/a035c999-332b-4096-aa90-455f8d603129.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/certifi.vendor b/news/certifi.vendor deleted file mode 100644 index 276053ed7..000000000 --- a/news/certifi.vendor +++ /dev/null @@ -1 +0,0 @@ -Update certifi to 2018.11.29 diff --git a/news/colorama.vendor b/news/colorama.vendor deleted file mode 100644 index ee550c5fc..000000000 --- a/news/colorama.vendor +++ /dev/null @@ -1 +0,0 @@ -Update colorama to 0.4.1 diff --git a/news/dbe44acc-c569-46e2-9348-2e55a2816d5c.trivial b/news/dbe44acc-c569-46e2-9348-2e55a2816d5c.trivial deleted file mode 100644 index e69de29bb..000000000 diff --git a/news/deadbeef.trivial b/news/deadbeef.trivial deleted file mode 100644 index 8b1378917..000000000 --- a/news/deadbeef.trivial +++ /dev/null @@ -1 +0,0 @@ - diff --git a/news/distlib.vendor b/news/distlib.vendor deleted file mode 100644 index 8ed44bd74..000000000 --- a/news/distlib.vendor +++ /dev/null @@ -1 +0,0 @@ -Update distlib to 0.2.8 diff --git a/news/idna.vendor b/news/idna.vendor deleted file mode 100644 index 565c55efb..000000000 --- a/news/idna.vendor +++ /dev/null @@ -1 +0,0 @@ -Update idna to 2.8 diff --git a/news/pep517.vendor b/news/pep517.vendor deleted file mode 100644 index c8c69f8cf..000000000 --- a/news/pep517.vendor +++ /dev/null @@ -1 +0,0 @@ -Update pep517 to 0.5.0 diff --git a/news/pkg_resources.vendor b/news/pkg_resources.vendor deleted file mode 100644 index d5015732a..000000000 --- a/news/pkg_resources.vendor +++ /dev/null @@ -1 +0,0 @@ -Update pkg_resources to 40.6.3 (via setuptools) diff --git a/news/pyparsing.vendor b/news/pyparsing.vendor deleted file mode 100644 index 956534864..000000000 --- a/news/pyparsing.vendor +++ /dev/null @@ -1 +0,0 @@ -Update pyparsing to 2.3.1 diff --git a/news/pytoml.vendor b/news/pytoml.vendor deleted file mode 100644 index 2ecdb0b3b..000000000 --- a/news/pytoml.vendor +++ /dev/null @@ -1 +0,0 @@ -Update pytoml to 0.1.20 diff --git a/news/requests.vendor b/news/requests.vendor deleted file mode 100644 index 1d22bfe1b..000000000 --- a/news/requests.vendor +++ /dev/null @@ -1 +0,0 @@ -Update requests to 2.21.0 diff --git a/news/six.vendor b/news/six.vendor deleted file mode 100644 index ca2d82131..000000000 --- a/news/six.vendor +++ /dev/null @@ -1 +0,0 @@ -Update six to 1.12.0 diff --git a/news/urllib3.vendor b/news/urllib3.vendor deleted file mode 100644 index ba188f653..000000000 --- a/news/urllib3.vendor +++ /dev/null @@ -1 +0,0 @@ -Update urllib3 to 1.24.1 From 42bedbf82456ddecf851d21e5489f29050e3efb1 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Tue, 22 Jan 2019 22:13:35 +0530 Subject: [PATCH 56/69] Fix backticks in NEWS --- NEWS.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/NEWS.rst b/NEWS.rst index 7b2948317..b8323a67c 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -44,16 +44,15 @@ Bug Fixes - Editable Git installs without a remote now freeze as editable. (`#4759 `_) - give 401 warning if username/password do not work for URL (`#4833 `_) - Invalid requirement no longer causes stack trace to be printed. (`#5147 `_) -- Handle `requests.exceptions.RetryError` raised in `PackageFinder` that was - causing pip to fail silently when some indexes were unreachable. (`#5270 `_, `#5483 `_) +- Handle ``requests.exceptions.RetryError`` raised in ``PackageFinder`` that was causing pip to fail silently when some indexes were unreachable. (`#5270 `_, `#5483 `_) - Setting ``PIP_NO_CACHE_DIR=yes`` no longer causes pip to crash. (`#5385 `_) - - Improve PEP 518 build isolation: handle .pth files, so namespace packages are correctly supported under Python 3.2 and earlier. (`#5656 `_) -- `pip list --outdated --not-required` should list only outdated packages that are not dependencies of installed packages (`#5737 `_) +- ``pip list --outdated --not-required`` should list only outdated packages that are not dependencies of installed packages (`#5737 `_) - Fix content type detection if a directory named like an archive is used as a package source. (`#5838 `_) - Fix crashes from unparseable requirements when checking installed packages. (`#5839 `_) -- Fix support for invoking pip using `python src/pip ...`. (`#5841 `_) +- Fix support for invoking pip using ``python src/pip ...``. (`#5841 `_) - Greatly reduce memory usage when installing wheels containing large files. (`#5848 `_) -- Fix sorting `TypeError` in `move_wheel_files()` when installing some packages. (`#5868 `_) +- Fix sorting ``TypeError`` in ``move_wheel_files()`` when installing some packages. (`#5868 `_) - Canonicalize sdist file names so they can be matched to a canonicalized package name passed to ``pip install``. (`#5870 `_) - Percent-decode special characters in SVN URL credentials. (`#5968 `_) - Redact the password from index urls in a debug message (using --no-index & --verbose options together) (`#6124 `_) From a851c980a6066065bdc69fa6789ea436d8f2808d Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Tue, 22 Jan 2019 22:26:47 +0530 Subject: [PATCH 57/69] Reword a lot of NEWS entries --- NEWS.rst | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/NEWS.rst b/NEWS.rst index b8323a67c..6236bae04 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -15,7 +15,7 @@ Deprecations and Removals - Remove the deprecated SVN editable detection based on dependency links during freeze. (`#5866 `_) -- Remove the deprecated --process-dependency-links option. (`#6060 `_) +- Remove the deprecated ``--process-dependency-links`` option. (`#6060 `_) - Deprecate support for Python 3.4 (`#6106 `_) - Start printing a warning for Python 2.7 to warn of impending Python 2.7 End-of-life and prompt users to start migrating to Python 3. (`#6148 `_) @@ -23,38 +23,38 @@ Deprecations and Removals Features -------- -- Include the package name in a freeze warning if the package is not installed. (`#13D1F422-0CB9-450F-B4DD-9486524712E5 `_) +- Include the package name in a freeze warning if the package is not installed. (`#5943 `_) - Implement manylinux2010 platform tag support. manylinux2010 is the successor to manylinux1. It allows carefully compiled binary wheels to be installed on compatible Linux platforms. (`#5008 `_) - Editable, non-VCS installs now freeze as editable. (`#5031 `_) -- Pip now includes license text of 3rd party libraries. (`#5213 `_) +- Include license text of vendored 3rd party libraries. (`#5213 `_) - Make ``PIP_NO_CACHE_DIR`` disable the cache also for truthy values like ``"true"``, ``"yes"``, ``"1"``, etc. (`#5735 `_) - Implement PEP 517 (allow projects to specify a build backend via pyproject.toml). (`#5743 `_) -- A warning message is emitted when dropping an ``--[extra-]index-url`` value that points to an existing local directory. (`#5827 `_) +- Warn when dropping an ``--[extra-]index-url`` value that points to an existing local directory. (`#5827 `_) - Prefix pip's ``--log`` file lines with their timestamp. (`#6141 `_) Bug Fixes --------- - Handle a broken stdout pipe more gracefully (e.g. when running ``pip list | head``). (`#4170 `_) -- Avoids creating excessively long temporary paths when uninstalling packages. (`#3055 `_) +- Avoid creating excessively long temporary paths when uninstalling packages. (`#3055 `_) - Redact the password from the URL in various log messages. (`#4746 `_) - Editable Git installs without a remote now freeze as editable. (`#4759 `_) -- give 401 warning if username/password do not work for URL (`#4833 `_) -- Invalid requirement no longer causes stack trace to be printed. (`#5147 `_) +- Present 401 warning if username/password do not work for URL (`#4833 `_) +- Avoid printing a stack trace when given an invalid requirement. (`#5147 `_) - Handle ``requests.exceptions.RetryError`` raised in ``PackageFinder`` that was causing pip to fail silently when some indexes were unreachable. (`#5270 `_, `#5483 `_) -- Setting ``PIP_NO_CACHE_DIR=yes`` no longer causes pip to crash. (`#5385 `_) -- - Improve PEP 518 build isolation: handle .pth files, so namespace packages are correctly supported under Python 3.2 and earlier. (`#5656 `_) -- ``pip list --outdated --not-required`` should list only outdated packages that are not dependencies of installed packages (`#5737 `_) +- Fix crash from setting ``PIP_NO_CACHE_DIR=yes``. (`#5385 `_) +- Improve build isolation: handle ``.pth`` files, so namespace packages are correctly supported under Python 3.2 and earlier. (`#5656 `_) +- Fix listing of outdated packages that are not dependencies of installed packages in ``pip list --outdated --not-required`` (`#5737 `_) - Fix content type detection if a directory named like an archive is used as a package source. (`#5838 `_) -- Fix crashes from unparseable requirements when checking installed packages. (`#5839 `_) +- Fix crash from unparseable requirements when checking installed packages. (`#5839 `_) - Fix support for invoking pip using ``python src/pip ...``. (`#5841 `_) - Greatly reduce memory usage when installing wheels containing large files. (`#5848 `_) - Fix sorting ``TypeError`` in ``move_wheel_files()`` when installing some packages. (`#5868 `_) - Canonicalize sdist file names so they can be matched to a canonicalized package name passed to ``pip install``. (`#5870 `_) -- Percent-decode special characters in SVN URL credentials. (`#5968 `_) +- Properly decode special characters in SVN URL credentials. (`#5968 `_) - Redact the password from index urls in a debug message (using --no-index & --verbose options together) (`#6124 `_) Vendored Libraries @@ -76,10 +76,10 @@ Vendored Libraries Improved Documentation ---------------------- -- Remove references to removed #egg=- functionality (`#5888 `_) -- Adds instructions for running pip from source to Development documentation. (`#5949 `_) +- Remove references to removed ``#egg=-`` functionality (`#5888 `_) +- Add instructions for running pip from source to Development documentation. (`#5949 `_) - Include the Vendoring Policy in the documentation. (`#5958 `_) -- Add command information in usage document for pip cmd (`#5984 `_) +- Fix omission of command name in HTML usage documentation (`#5984 `_) 18.1 (2018-10-05) From 5b93b49a88377caf09dddbdd8adee218c3c834de Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Tue, 22 Jan 2019 22:39:30 +0530 Subject: [PATCH 58/69] Merge related NEWS entries --- NEWS.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/NEWS.rst b/NEWS.rst index 6236bae04..c9d3ccece 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -40,7 +40,7 @@ Bug Fixes - Handle a broken stdout pipe more gracefully (e.g. when running ``pip list | head``). (`#4170 `_) - Avoid creating excessively long temporary paths when uninstalling packages. (`#3055 `_) -- Redact the password from the URL in various log messages. (`#4746 `_) +- Redact the password from the URL in various log messages. (`#4746 `_, `#6124 `_) - Editable Git installs without a remote now freeze as editable. (`#4759 `_) - Present 401 warning if username/password do not work for URL (`#4833 `_) - Avoid printing a stack trace when given an invalid requirement. (`#5147 `_) @@ -55,7 +55,6 @@ Bug Fixes - Fix sorting ``TypeError`` in ``move_wheel_files()`` when installing some packages. (`#5868 `_) - Canonicalize sdist file names so they can be matched to a canonicalized package name passed to ``pip install``. (`#5870 `_) - Properly decode special characters in SVN URL credentials. (`#5968 `_) -- Redact the password from index urls in a debug message (using --no-index & --verbose options together) (`#6124 `_) Vendored Libraries ------------------ From e6c3f099e65422dd4a867c7e637f2ac6edfe8fe7 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Tue, 22 Jan 2019 22:49:12 +0530 Subject: [PATCH 59/69] Reorder NEWS entries - Moved into appropriate sections as @pradyunsg felt - Sort by "importance" and bundle together similar entries --- NEWS.rst | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/NEWS.rst b/NEWS.rst index c9d3ccece..fc509f7b8 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -13,52 +13,52 @@ Deprecations and Removals ------------------------- -- Remove the deprecated SVN editable detection based on dependency links - during freeze. (`#5866 `_) -- Remove the deprecated ``--process-dependency-links`` option. (`#6060 `_) - Deprecate support for Python 3.4 (`#6106 `_) - Start printing a warning for Python 2.7 to warn of impending Python 2.7 End-of-life and prompt users to start migrating to Python 3. (`#6148 `_) +- Remove the deprecated ``--process-dependency-links`` option. (`#6060 `_) +- Remove the deprecated SVN editable detection based on dependency links + during freeze. (`#5866 `_) Features -------- -- Include the package name in a freeze warning if the package is not installed. (`#5943 `_) +- Implement PEP 517 (allow projects to specify a build backend via pyproject.toml). (`#5743 `_) - Implement manylinux2010 platform tag support. manylinux2010 is the successor to manylinux1. It allows carefully compiled binary wheels to be installed on compatible Linux platforms. (`#5008 `_) -- Editable, non-VCS installs now freeze as editable. (`#5031 `_) -- Include license text of vendored 3rd party libraries. (`#5213 `_) -- Make ``PIP_NO_CACHE_DIR`` disable the cache also for truthy values like - ``"true"``, ``"yes"``, ``"1"``, etc. (`#5735 `_) -- Implement PEP 517 (allow projects to specify a build backend via pyproject.toml). (`#5743 `_) +- Improve build isolation: handle ``.pth`` files, so namespace packages are correctly supported under Python 3.2 and earlier. (`#5656 `_) +- Include the package name in a freeze warning if the package is not installed. (`#5943 `_) - Warn when dropping an ``--[extra-]index-url`` value that points to an existing local directory. (`#5827 `_) - Prefix pip's ``--log`` file lines with their timestamp. (`#6141 `_) Bug Fixes --------- -- Handle a broken stdout pipe more gracefully (e.g. when running ``pip list | head``). (`#4170 `_) - Avoid creating excessively long temporary paths when uninstalling packages. (`#3055 `_) - Redact the password from the URL in various log messages. (`#4746 `_, `#6124 `_) -- Editable Git installs without a remote now freeze as editable. (`#4759 `_) -- Present 401 warning if username/password do not work for URL (`#4833 `_) +- Avoid creating excessively long temporary paths when uninstalling packages. (`#3055 `_) - Avoid printing a stack trace when given an invalid requirement. (`#5147 `_) +- Present 401 warning if username/password do not work for URL (`#4833 `_) - Handle ``requests.exceptions.RetryError`` raised in ``PackageFinder`` that was causing pip to fail silently when some indexes were unreachable. (`#5270 `_, `#5483 `_) +- Handle a broken stdout pipe more gracefully (e.g. when running ``pip list | head``). (`#4170 `_) - Fix crash from setting ``PIP_NO_CACHE_DIR=yes``. (`#5385 `_) -- Improve build isolation: handle ``.pth`` files, so namespace packages are correctly supported under Python 3.2 and earlier. (`#5656 `_) -- Fix listing of outdated packages that are not dependencies of installed packages in ``pip list --outdated --not-required`` (`#5737 `_) -- Fix content type detection if a directory named like an archive is used as a package source. (`#5838 `_) - Fix crash from unparseable requirements when checking installed packages. (`#5839 `_) +- Fix content type detection if a directory named like an archive is used as a package source. (`#5838 `_) +- Fix listing of outdated packages that are not dependencies of installed packages in ``pip list --outdated --not-required`` (`#5737 `_) +- Fix sorting ``TypeError`` in ``move_wheel_files()`` when installing some packages. (`#5868 `_) - Fix support for invoking pip using ``python src/pip ...``. (`#5841 `_) - Greatly reduce memory usage when installing wheels containing large files. (`#5848 `_) -- Fix sorting ``TypeError`` in ``move_wheel_files()`` when installing some packages. (`#5868 `_) +- Editable non-VCS installs now freeze as editable. (`#5031 `_) +- Editable Git installs without a remote now freeze as editable. (`#4759 `_) - Canonicalize sdist file names so they can be matched to a canonicalized package name passed to ``pip install``. (`#5870 `_) - Properly decode special characters in SVN URL credentials. (`#5968 `_) +- Make ``PIP_NO_CACHE_DIR`` disable the cache also for truthy values like ``"true"``, ``"yes"``, ``"1"``, etc. (`#5735 `_) Vendored Libraries ------------------ +- Include license text of vendored 3rd party libraries. (`#5213 `_) - Update certifi to 2018.11.29 - Update colorama to 0.4.1 - Update distlib to 0.2.8 @@ -75,9 +75,9 @@ Vendored Libraries Improved Documentation ---------------------- -- Remove references to removed ``#egg=-`` functionality (`#5888 `_) -- Add instructions for running pip from source to Development documentation. (`#5949 `_) - Include the Vendoring Policy in the documentation. (`#5958 `_) +- Add instructions for running pip from source to Development documentation. (`#5949 `_) +- Remove references to removed ``#egg=-`` functionality (`#5888 `_) - Fix omission of command name in HTML usage documentation (`#5984 `_) From 3a2546e7ccbf13e536e1709210f9cf1ab069849d Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 23 Jan 2019 01:14:27 +0530 Subject: [PATCH 60/69] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 16de4ad0f..6791d0720 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1 +1 @@ -__version__ = "19.0" +__version__ = "19.1.dev0" From e55fcea956749e3943f2fd97a634726e53e13686 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 23 Jan 2019 08:01:29 +0530 Subject: [PATCH 61/69] Add tests for the bug --- tests/functional/test_install_wheel.py | 8 ++++++++ tests/functional/test_pep517.py | 14 ++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py index 9937d5a0a..e09fdce85 100644 --- a/tests/functional/test_install_wheel.py +++ b/tests/functional/test_install_wheel.py @@ -420,3 +420,11 @@ def test_wheel_compile_syntax_error(script, data): result = script.pip('install', '--compile', package, '--no-index') assert 'yield from' not in result.stdout assert 'SyntaxError: ' not in result.stdout + + +def test_wheel_install_with_no_cache_dir(script, tmpdir, data): + """Check wheel installations work, even with no cache. + """ + package = data.packages.join("simple.dist-0.1-py2.py3-none-any.whl") + result = script.pip('install', '--no-cache-dir', '--no-index', package) + result.assert_installed('simpledist', editable=False) diff --git a/tests/functional/test_pep517.py b/tests/functional/test_pep517.py index 52e3acded..a1a45d27b 100644 --- a/tests/functional/test_pep517.py +++ b/tests/functional/test_pep517.py @@ -109,3 +109,17 @@ def test_pep517_backend_requirements_already_satisfied(script, tmpdir, data): project_dir, ) assert 'Installing backend dependencies:' not in result.stdout + + +def test_pep517_install_with_no_cache_dir(script, tmpdir, data): + """Check builds with a custom backends work, even with no cache. + """ + project_dir = make_project( + tmpdir, requires=['test_backend'], + backend="test_backend" + ) + result = script.pip( + 'install', '--no-cache-dir', '--no-index', '-f', data.backends, + project_dir, + ) + result.assert_installed('project', editable=False) From 7db266687cb6304b0708eb408c8f15efb78eedeb Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 23 Jan 2019 08:07:23 +0530 Subject: [PATCH 62/69] Check wheel build locations only when ephem cache isn't used When the ephemeral cache is used, the build can always occur. There is no need to check for those. --- src/pip/_internal/wheel.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/wheel.py b/src/pip/_internal/wheel.py index 93b476862..3f200b77e 100644 --- a/src/pip/_internal/wheel.py +++ b/src/pip/_internal/wheel.py @@ -840,12 +840,6 @@ class WheelBuilder(object): newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ - # TODO: This check fails if --no-cache-dir is set. And yet we - # might be able to build into the ephemeral cache, surely? - building_is_possible = self._wheel_dir or ( - autobuilding and self.wheel_cache.cache_dir - ) - assert building_is_possible buildset = [] format_control = self.finder.format_control @@ -884,6 +878,13 @@ class WheelBuilder(object): if not buildset: return [] + # Is any wheel build not using the ephemeral cache? + if any(not ephem_cache for _, ephem_cache in buildset): + have_directory_for_build = self._wheel_dir or ( + autobuilding and self.wheel_cache.cache_dir + ) + assert have_directory_for_build + # TODO by @pradyunsg # Should break up this method into 2 separate methods. From a1cd49abeb88f295bb3376ac39516c5ff2adae41 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 23 Jan 2019 09:56:43 +0530 Subject: [PATCH 63/69] :newspaper: --- news/6158.bugfix | 1 + news/6171.bugfix | 1 + 2 files changed, 2 insertions(+) create mode 100644 news/6158.bugfix create mode 100644 news/6171.bugfix diff --git a/news/6158.bugfix b/news/6158.bugfix new file mode 100644 index 000000000..fe5a0352f --- /dev/null +++ b/news/6158.bugfix @@ -0,0 +1 @@ +Fix a crash when using --no-cache-dir with PEP 517 distributions diff --git a/news/6171.bugfix b/news/6171.bugfix new file mode 100644 index 000000000..fe5a0352f --- /dev/null +++ b/news/6171.bugfix @@ -0,0 +1 @@ +Fix a crash when using --no-cache-dir with PEP 517 distributions From 80976e0fcf3bd89771db2f1cc0fdf18af3e89c19 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 23 Jan 2019 10:02:04 +0530 Subject: [PATCH 64/69] Remove NEWS fragments that didn't get deleted --- news/4170.bugfix | 1 - news/packaging.vendor | 1 - 2 files changed, 2 deletions(-) delete mode 100644 news/4170.bugfix delete mode 100644 news/packaging.vendor diff --git a/news/4170.bugfix b/news/4170.bugfix deleted file mode 100644 index a8e8d07b2..000000000 --- a/news/4170.bugfix +++ /dev/null @@ -1 +0,0 @@ -Handle a broken stdout pipe more gracefully (e.g. when running ``pip list | head``). diff --git a/news/packaging.vendor b/news/packaging.vendor deleted file mode 100644 index a64a114d4..000000000 --- a/news/packaging.vendor +++ /dev/null @@ -1 +0,0 @@ -Update packaging to 19.0 From 94ffc8bd1f368448e4b12a63901d1faa3740dce5 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 23 Jan 2019 19:25:23 +0530 Subject: [PATCH 65/69] Bump version for bugfix release --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 6791d0720..39b5f3fcf 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1 +1 @@ -__version__ = "19.1.dev0" +__version__ = "19.0.1" From a006602b8c0f9ec1829223117912f09a60e778d0 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 23 Jan 2019 19:26:01 +0530 Subject: [PATCH 66/69] Generate NEWS --- NEWS.rst | 9 +++++++++ news/6158.bugfix | 1 - news/6171.bugfix | 1 - 3 files changed, 9 insertions(+), 2 deletions(-) delete mode 100644 news/6158.bugfix delete mode 100644 news/6171.bugfix diff --git a/NEWS.rst b/NEWS.rst index fc509f7b8..6ea4be59d 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -7,6 +7,15 @@ .. towncrier release notes start +19.0.1 (2019-01-23) +=================== + +Bug Fixes +--------- + +- Fix a crash when using --no-cache-dir with PEP 517 distributions (`#6158 `_, `#6171 `_) + + 19.0 (2019-01-22) ================= diff --git a/news/6158.bugfix b/news/6158.bugfix deleted file mode 100644 index fe5a0352f..000000000 --- a/news/6158.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a crash when using --no-cache-dir with PEP 517 distributions diff --git a/news/6171.bugfix b/news/6171.bugfix deleted file mode 100644 index fe5a0352f..000000000 --- a/news/6171.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a crash when using --no-cache-dir with PEP 517 distributions From 43e196388f7646042afc5b74d47426b3aa698d2f Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 23 Jan 2019 19:27:27 +0530 Subject: [PATCH 67/69] Bump version for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 39b5f3fcf..6791d0720 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1 +1 @@ -__version__ = "19.0.1" +__version__ = "19.1.dev0" From 8074db86d6ce8ff16175186bf6a6cbd582cc0425 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Wed, 23 Jan 2019 18:44:54 -0800 Subject: [PATCH 68/69] Add failing test. --- src/pip/_internal/wheel.py | 51 +++++++++++++++++++++++--------------- tests/unit/test_wheel.py | 42 +++++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+), 20 deletions(-) diff --git a/src/pip/_internal/wheel.py b/src/pip/_internal/wheel.py index 93b476862..db5858a10 100644 --- a/src/pip/_internal/wheel.py +++ b/src/pip/_internal/wheel.py @@ -55,7 +55,7 @@ if MYPY_CHECK_RUNNING: from pip._internal.cache import WheelCache # noqa: F401 from pip._internal.pep425tags import Pep425Tag # noqa: F401 - InstalledCSVRow = Tuple[str, Union[str, Text], str] + InstalledCSVRow = Tuple[str, ...] VERSION_COMPATIBLE = (1, 0) @@ -64,6 +64,10 @@ VERSION_COMPATIBLE = (1, 0) logger = logging.getLogger(__name__) +def normpath(src, p): + return os.path.relpath(src, p).replace(os.path.sep, '/') + + def rehash(path, blocksize=1 << 20): # type: (str, int) -> Tuple[str, str] """Return (hash, length) for path using hashlib.sha256()""" @@ -255,6 +259,28 @@ def sorted_outrows(outrows): return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) +def get_csv_rows_for_installed( + old_csv_rows, # type: Iterable[List[str]] + installed, # type: Dict[str, str] + changed, # set + generated, # type: List[str] + lib_dir, +): + # type: (...) -> List[InstalledCSVRow] + installed_rows = [] # type: List[InstalledCSVRow] + for fpath, digest, length in old_csv_rows: + fpath = installed.pop(fpath, fpath) + if fpath in changed: + digest, length = rehash(fpath) + installed_rows.append((fpath, digest, str(length))) + for f in generated: + digest, length = rehash(f) + installed_rows.append((normpath(f, lib_dir), digest, str(length))) + for f in installed: + installed_rows.append((installed[f], '', '')) + return installed_rows + + def move_wheel_files( name, # type: str req, # type: Requirement @@ -305,9 +331,6 @@ def move_wheel_files( compileall.compile_dir(source, force=True, quiet=True) logger.debug(stdout.getvalue()) - def normpath(src, p): - return os.path.relpath(src, p).replace(os.path.sep, '/') - def record_installed(srcfile, destfile, modified=False): """Map archive RECORD paths to installation RECORD paths.""" oldpath = normpath(srcfile, wheeldir) @@ -559,28 +582,16 @@ if __name__ == '__main__': shutil.move(temp_installer, installer) generated.append(installer) - def get_csv_rows_for_installed(old_csv_rows): - # type: (Iterable[List[str]]) -> List[InstalledCSVRow] - installed_rows = [] # type: List[InstalledCSVRow] - for fpath, digest, length in old_csv_rows: - fpath = installed.pop(fpath, fpath) - if fpath in changed: - digest, length = rehash(fpath) - installed_rows.append((fpath, digest, str(length))) - for f in generated: - digest, length = rehash(f) - installed_rows.append((normpath(f, lib_dir), digest, str(length))) - for f in installed: - installed_rows.append((installed[f], '', '')) - return installed_rows - # Record details of all files installed record = os.path.join(info_dir[0], 'RECORD') temp_record = os.path.join(info_dir[0], 'RECORD.pip') with open_for_csv(record, 'r') as record_in: with open_for_csv(temp_record, 'w+') as record_out: reader = csv.reader(record_in) - outrows = get_csv_rows_for_installed(reader) + outrows = get_csv_rows_for_installed( + reader, installed=installed, changed=changed, + generated=generated, lib_dir=lib_dir, + ) writer = csv.writer(record_out) # Sort to simplify testing. for row in sorted_outrows(outrows): diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index bf316e772..c62034e5d 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -1,6 +1,8 @@ """Tests for wheel binary packages and .dist-info.""" +import csv import logging import os +import textwrap import pytest from mock import Mock, patch @@ -76,6 +78,46 @@ def test_sorted_outrows(outrows, expected): assert actual == expected +def call_get_csv_rows_for_installed(tmpdir, text): + path = tmpdir.join('temp.txt') + path.write(text) + + installed = {} + changed = set() + generated = [] + lib_dir = '/lib/dir' + + with wheel.open_for_csv(path, 'r') as f: + reader = csv.reader(f) + outrows = wheel.get_csv_rows_for_installed( + reader, installed=installed, changed=changed, + generated=generated, lib_dir=lib_dir, + ) + return outrows + + +def test_get_csv_rows_for_installed(tmpdir): + text = textwrap.dedent("""\ + a,b,c + d,e,f + """) + outrows = call_get_csv_rows_for_installed(tmpdir, text) + + expected = [ + ('a', 'b', 'c'), + ('d', 'e', 'f'), + ] + assert outrows == expected + + +def test_get_csv_rows_for_installed__long_lines(tmpdir): + text = textwrap.dedent("""\ + a,b,c,d + e,f,g,h + """) + outrows = call_get_csv_rows_for_installed(tmpdir, text) + + def test_wheel_version(tmpdir, data): future_wheel = 'futurewheel-1.9-py2.py3-none-any.whl' broken_wheel = 'brokenwheel-1.0-py2.py3-none-any.whl' From 7f25059bf7dbfe5df3a6752c9431416a10eab3d8 Mon Sep 17 00:00:00 2001 From: Chris Jerdonek Date: Wed, 23 Jan 2019 19:16:10 -0800 Subject: [PATCH 69/69] Allow and warn on RECORD lines with more than three elements. --- news/6165.bugfix | 1 + src/pip/_internal/wheel.py | 15 +++++++++++---- tests/unit/test_wheel.py | 23 ++++++++++++++++++++--- 3 files changed, 32 insertions(+), 7 deletions(-) create mode 100644 news/6165.bugfix diff --git a/news/6165.bugfix b/news/6165.bugfix new file mode 100644 index 000000000..2031b40e4 --- /dev/null +++ b/news/6165.bugfix @@ -0,0 +1 @@ +Allow ``RECORD`` lines with more than three elements, and display a warning. diff --git a/src/pip/_internal/wheel.py b/src/pip/_internal/wheel.py index db5858a10..e14ae651f 100644 --- a/src/pip/_internal/wheel.py +++ b/src/pip/_internal/wheel.py @@ -262,17 +262,24 @@ def sorted_outrows(outrows): def get_csv_rows_for_installed( old_csv_rows, # type: Iterable[List[str]] installed, # type: Dict[str, str] - changed, # set + changed, # type: set generated, # type: List[str] - lib_dir, + lib_dir, # type: str ): # type: (...) -> List[InstalledCSVRow] installed_rows = [] # type: List[InstalledCSVRow] - for fpath, digest, length in old_csv_rows: + for row in old_csv_rows: + if len(row) > 3: + logger.warning( + 'RECORD line has more than three elements: {}'.format(row) + ) + fpath = row[0] fpath = installed.pop(fpath, fpath) if fpath in changed: digest, length = rehash(fpath) - installed_rows.append((fpath, digest, str(length))) + row[1] = digest + row[2] = length + installed_rows.append(tuple(row)) for f in generated: digest, length = rehash(f) installed_rows.append((normpath(f, lib_dir), digest, str(length))) diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index c62034e5d..6fe125da8 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -96,7 +96,7 @@ def call_get_csv_rows_for_installed(tmpdir, text): return outrows -def test_get_csv_rows_for_installed(tmpdir): +def test_get_csv_rows_for_installed(tmpdir, caplog): text = textwrap.dedent("""\ a,b,c d,e,f @@ -108,15 +108,32 @@ def test_get_csv_rows_for_installed(tmpdir): ('d', 'e', 'f'), ] assert outrows == expected + # Check there were no warnings. + assert len(caplog.records) == 0 -def test_get_csv_rows_for_installed__long_lines(tmpdir): +def test_get_csv_rows_for_installed__long_lines(tmpdir, caplog): text = textwrap.dedent("""\ a,b,c,d - e,f,g,h + e,f,g + h,i,j,k """) outrows = call_get_csv_rows_for_installed(tmpdir, text) + expected = [ + ('a', 'b', 'c', 'd'), + ('e', 'f', 'g'), + ('h', 'i', 'j', 'k'), + ] + assert outrows == expected + + messages = [rec.message for rec in caplog.records] + expected = [ + "RECORD line has more than three elements: ['a', 'b', 'c', 'd']", + "RECORD line has more than three elements: ['h', 'i', 'j', 'k']" + ] + assert messages == expected + def test_wheel_version(tmpdir, data): future_wheel = 'futurewheel-1.9-py2.py3-none-any.whl'