Pass PEP 8 checks.

This commit is contained in:
Erik Rose 2015-09-24 18:53:39 -04:00
parent 11dbb92440
commit 0c17248998
7 changed files with 48 additions and 43 deletions

View File

@ -523,6 +523,7 @@ always_unzip = partial(
help=SUPPRESS_HELP,
)
def _good_hashes():
"""Return names of hashlib algorithms at least as strong as sha256."""
# Remove getattr when 2.6 dies.
@ -550,6 +551,7 @@ def _merge_hash(option, opt_str, value, parser):
(opt_str, ', '.join(sorted(goods))))
parser.values.hashes.setdefault(algo, []).append(digest)
hash = partial(
Option,
'-H', '--hash',

View File

@ -2,7 +2,6 @@ from __future__ import absolute_import
import cgi
import email.utils
import hashlib
import getpass
import json
import logging
@ -29,9 +28,8 @@ from pip.exceptions import InstallationError, HashMismatch
from pip.models import PyPI
from pip.utils import (splitext, rmtree, format_size, display_path,
backup_dir, ask_path_exists, unpack_file,
call_subprocess, ARCHIVE_EXTENSIONS, consume)
ARCHIVE_EXTENSIONS, consume)
from pip.utils.filesystem import check_path_owner
from pip.utils.logging import indent_log
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
from pip.locations import write_delete_marker_file
from pip.vcs import vcs
@ -587,7 +585,7 @@ def _download_url(resp, link, content_file, hashes):
downloaded_chunks = written_chunks(progress_indicator(resp_read(4096),
4096))
if hashes:
hashes.check_against_chunks(downloaded_chunks)
hashes.check_against_chunks(downloaded_chunks)
else:
consume(downloaded_chunks)
@ -686,7 +684,9 @@ def unpack_file_url(link, location, download_dir=None, hashes=None):
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path

View File

@ -115,7 +115,7 @@ class HashError(InstallationError):
def __str__(self):
return '%s\n%s' % (self.head(), self.body())
def _requirement_name(self): # TODO: Make sure this is the best it can be and is DRY with subclasses.
def _requirement_name(self):
"""Return a description of the requirement that triggered me.
This default implementation returns long description of the req, with

View File

@ -1,7 +1,6 @@
from __future__ import absolute_import
from collections import defaultdict
import functools
from itertools import chain
import logging
import os
@ -359,7 +358,6 @@ class RequirementSet(object):
if hash_errors:
raise hash_errors
def _check_skip_installed(self, req_to_install, finder):
"""Check if req_to_install should be skipped.
@ -529,7 +527,7 @@ class RequirementSet(object):
elif is_file_url(link) and is_dir_url(link):
raise DirectoryUrlHashUnsupported()
if (not req_to_install.original_link and
not req_to_install.is_pinned):
not req_to_install.is_pinned):
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the

View File

@ -228,12 +228,13 @@ def test_hashed_install_success(script, data, tmpdir):
"""
file_url = path_to_url(
(data.packages / 'simple-1.0.tar.gz').abspath)
with requirements_file('simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e3848c2b9bbd2e8bf01db88c2c7\n'
'{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6ca016b42d2e6ce53619b653'.format(simple=file_url),
tmpdir) as reqs_file:
result = script.pip_install_local('-r',
reqs_file.abspath,
expect_error=False)
with requirements_file(
'simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e'
'3848c2b9bbd2e8bf01db88c2c7\n'
'{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c'
'a016b42d2e6ce53619b653'.format(simple=file_url),
tmpdir) as reqs_file:
script.pip_install_local('-r', reqs_file.abspath, expect_error=False)
def test_hashed_install_failure(script, data, tmpdir):
@ -244,9 +245,8 @@ def test_hashed_install_failure(script, data, tmpdir):
kinds of hashes are in test_req.py.
"""
file_url = path_to_url(
(data.packages / 'simple-1.0.tar.gz').abspath)
with requirements_file('simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e3848c2b9bbd2e8bf01db88c2c\n',
with requirements_file('simple2==1.0 --hash=sha256:9336af72ca661e6336eb87b'
'c7de3e8844d853e3848c2b9bbd2e8bf01db88c2c\n',
tmpdir) as reqs_file:
result = script.pip_install_local('-r',
reqs_file.abspath,
@ -254,7 +254,6 @@ def test_hashed_install_failure(script, data, tmpdir):
assert len(result.files_created) == 0
def test_install_from_local_directory_with_symlinks_to_directories(
script, data):
"""

View File

@ -9,8 +9,7 @@ from mock import Mock, patch, mock_open
from pip.exceptions import (PreviousBuildDirError, InvalidWheelFilename,
UnsupportedWheel)
from pip.download import path_to_url, PipSession
from pip.exceptions import (HashMissing, HashUnpinned, VcsHashUnsupported,
HashErrors, InstallationError)
from pip.exceptions import HashErrors, InstallationError
from pip.index import PackageFinder
from pip.req import (InstallRequirement, RequirementSet, Requirements)
from pip.req.req_file import process_line
@ -85,7 +84,9 @@ class TestRequirementSet(object):
list(process_line('blessings==1.0', 'file', 1))[0])
# This flag activates --require-hashes mode:
reqset.add_requirement(
list(process_line('tracefront==0.1 --hash=sha256:somehash', 'file', 2))[0])
list(process_line('tracefront==0.1 --hash=sha256:somehash',
'file',
2))[0])
# This hash should be accepted because it came from the reqs file, not
# from the internet:
reqset.add_requirement(
@ -117,12 +118,12 @@ class TestRequirementSet(object):
assert_raises_regexp(
HashErrors,
r'These requirements were missing hashes.*\n'
r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95fb866'
r'd6ca016b42d2e6ce53619b653$',
r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95'
r'fb866d6ca016b42d2e6ce53619b653$',
reqset.prepare_files,
finder)
def test_unsupported_hashes(self, data): # NEXT: Add any other test cases needed, probably delete the ones in test_install or just have one or two functional tests to make sure prepare_files() gets called when we expect (so we can actually stop on hash errors), clean up, and call it a day. Make sure we test that hashes are checked all 3 places in pip.download. Test http success.
def test_unsupported_hashes(self, data):
"""VCS and dir links should raise errors when --require-hashes is
on.
@ -133,7 +134,7 @@ class TestRequirementSet(object):
reqset = self.basic_reqset(require_hashes=True)
reqset.add_requirement(
list(process_line(
'git+git://github.com/pypa/pip-test-package --hash=sha256:12345',
'git+git://github.com/pypa/pip-test-package -H sha256:12345',
'file',
1))[0])
dir_path = data.packages.join('FSPkg')
@ -162,16 +163,16 @@ class TestRequirementSet(object):
reqset = self.basic_reqset()
# Test that there must be exactly 1 specifier:
reqset.add_requirement(
list(process_line('simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97d9fcf'
'2d0fc9a786985250c1c83fd68df5911dd',
list(process_line('simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97'
'd9fcf2d0fc9a786985250c1c83fd68df5911dd',
'file',
1))[0])
# Test that the operator must be ==:
reqset.add_requirement(
list(process_line('simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0123f6'
'a7e44a9115db1ef945d4d92c123dfe21815a06',
'file',
2))[0])
reqset.add_requirement(list(process_line(
'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0'
'123f6a7e44a9115db1ef945d4d92c123dfe21815a06',
'file',
2))[0])
finder = PackageFinder([data.find_links], [], session=PipSession())
assert_raises_regexp(
HashErrors,
@ -188,7 +189,7 @@ class TestRequirementSet(object):
(data.packages / 'simple-1.0.tar.gz').abspath)
reqset = self.basic_reqset(require_hashes=True)
reqset.add_requirement(
list(process_line('%s --hash=sha256:badbad' % file_url, 'file', 1))[0])
list(process_line('%s -H sha256:badbad' % file_url, 'file', 1))[0])
finder = PackageFinder([data.find_links], [], session=PipSession())
assert_raises_regexp(
HashErrors,
@ -196,7 +197,7 @@ class TestRequirementSet(object):
r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n'
r' Expected sha256 badbad\n'
r' Got 393043e672415891885c9a2a0929b1af95fb866d'
r'6ca016b42d2e6ce53619b653$',
r'6ca016b42d2e6ce53619b653$',
reqset.prepare_files,
finder)
@ -207,7 +208,7 @@ class TestRequirementSet(object):
req = list(process_line(
'TopoRequires2==0.0.1 '
'--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd'
'e3591d14f7896bdbefcf48543720c970',
'e3591d14f7896bdbefcf48543720c970',
'file', 1))[0]
deps = reqset._prepare_file(finder, req, require_hashes=True)
assert deps == [], ('_prepare_files() resolved dependencies even '

View File

@ -168,16 +168,21 @@ class TestProcessLine(object):
Make sure it reads and preserve multiple hashes.
"""
line = ('SomeProject '
'--hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 '
'-H sha384:59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f '
'--hash=sha256:486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65260e9cb8a7')
line = ('SomeProject --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b1'
'61e5c1fa7425e73043362938b9824 '
'-H sha384:59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553'
'bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f '
'--hash=sha256:486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8'
'e5a6c65260e9cb8a7')
filename = 'filename'
req = list(process_line(line, filename, 1))[0]
assert req.options == {'hashes': {
'sha256': ['2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824',
'486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65260e9cb8a7'],
'sha384': ['59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f']}}
'sha256': ['2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e730433'
'62938b9824',
'486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65'
'260e9cb8a7'],
'sha384': ['59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcd'
'b9c666fa90125a3c79f90397bdf5f6a13de828684f']}}
def test_set_isolated(self, options):
line = 'SomeProject'