Style fixes for the latest flake8

This commit is contained in:
Donald Stufft 2015-02-24 07:46:10 -05:00
parent c1c638bd88
commit 0bc8aa5622
17 changed files with 160 additions and 159 deletions

View File

@ -220,8 +220,8 @@ class Command(object):
sys.exit(VIRTUALENV_NOT_FOUND) sys.exit(VIRTUALENV_NOT_FOUND)
# Check if we're using the latest version of pip available # Check if we're using the latest version of pip available
if (not options.disable_pip_version_check if (not options.disable_pip_version_check and not
and not getattr(options, "no_index", False)): getattr(options, "no_index", False)):
with self._build_session( with self._build_session(
options, options,
retries=0, retries=0,

View File

@ -211,8 +211,8 @@ class InstallCommand(Command):
# If we have --no-install or --no-download and no --build we use the # If we have --no-install or --no-download and no --build we use the
# legacy static build dir # legacy static build dir
if (options.build_dir is None if (options.build_dir is None and
and (options.no_install or options.no_download)): (options.no_install or options.no_download)):
options.build_dir = build_prefix options.build_dir = build_prefix
if options.download_dir: if options.download_dir:
@ -237,8 +237,8 @@ class InstallCommand(Command):
options.ignore_installed = True options.ignore_installed = True
temp_target_dir = tempfile.mkdtemp() temp_target_dir = tempfile.mkdtemp()
options.target_dir = os.path.abspath(options.target_dir) options.target_dir = os.path.abspath(options.target_dir)
if (os.path.exists(options.target_dir) if (os.path.exists(options.target_dir) and not
and not os.path.isdir(options.target_dir)): os.path.isdir(options.target_dir)):
raise CommandError( raise CommandError(
"Target path exists but is not a directory, will not " "Target path exists but is not a directory, will not "
"continue." "continue."
@ -376,9 +376,9 @@ class InstallCommand(Command):
raise raise
finally: finally:
# Clean up # Clean up
if ((not options.no_clean) if ((not options.no_clean) and
and ((not options.no_install) ((not options.no_install) or
or options.download_dir)): options.download_dir)):
requirement_set.cleanup_files() requirement_set.cleanup_files()
if options.target_dir: if options.target_dir:

View File

@ -152,8 +152,8 @@ class ZipCommand(Command):
def unzip_package(self, module_name, filename): def unzip_package(self, module_name, filename):
zip_filename = os.path.dirname(filename) zip_filename = os.path.dirname(filename)
if (not os.path.isfile(zip_filename) if (not os.path.isfile(zip_filename) and
and zipfile.is_zipfile(zip_filename)): zipfile.is_zipfile(zip_filename)):
raise InstallationError( raise InstallationError(
'Module %s (in %s) isn\'t located in a zip file in %s' 'Module %s (in %s) isn\'t located in a zip file in %s'
% (module_name, filename, zip_filename)) % (module_name, filename, zip_filename))
@ -355,9 +355,9 @@ class ZipCommand(Command):
if os.path.dirname(path) not in self.paths(): if os.path.dirname(path) not in self.paths():
logger.info('Zipped egg: %s', display_path(path)) logger.info('Zipped egg: %s', display_path(path))
continue continue
if (basename != 'site-packages' if (basename != 'site-packages' and
and basename != 'dist-packages' basename != 'dist-packages' and not
and not path.replace('\\', '/').endswith('lib/python')): path.replace('\\', '/').endswith('lib/python')):
continue continue
logger.info('In %s:', display_path(path)) logger.info('In %s:', display_path(path))

View File

@ -99,5 +99,5 @@ if sys.version_info >= (2, 7):
# windows detection, covers cpython and ironpython # windows detection, covers cpython and ironpython
WINDOWS = (sys.platform.startswith("win") WINDOWS = (sys.platform.startswith("win") or
or (sys.platform == 'cli' and os.name == 'nt')) (sys.platform == 'cli' and os.name == 'nt'))

View File

@ -376,8 +376,8 @@ def get_file_content(url, comes_from=None, session=None):
match = _scheme_re.search(url) match = _scheme_re.search(url)
if match: if match:
scheme = match.group(1).lower() scheme = match.group(1).lower()
if (scheme == 'file' and comes_from if (scheme == 'file' and comes_from and
and comes_from.startswith('http')): comes_from.startswith('http')):
raise InstallationError( raise InstallationError(
'Requirements file %s references URL %s, which is local' 'Requirements file %s references URL %s, which is local'
% (comes_from, url)) % (comes_from, url))

View File

@ -228,8 +228,8 @@ class PackageFinder(object):
addr = ipaddress.ip_address( addr = ipaddress.ip_address(
origin[1] origin[1]
if ( if (
isinstance(origin[1], six.text_type) isinstance(origin[1], six.text_type) or
or origin[1] is None origin[1] is None
) )
else origin[1].decode("utf8") else origin[1].decode("utf8")
) )
@ -250,9 +250,9 @@ class PackageFinder(object):
continue continue
# Check to see if the port patches # Check to see if the port patches
if (origin[2] != secure_origin[2] if (origin[2] != secure_origin[2] and
and secure_origin[2] != "*" secure_origin[2] != "*" and
and secure_origin[2] is not None): secure_origin[2] is not None):
continue continue
# If we've gotten here, then this origin matches the current # If we've gotten here, then this origin matches the current
@ -369,10 +369,10 @@ class PackageFinder(object):
req.name.lower() req.name.lower()
) )
) )
if (not found_versions if (not found_versions and not
and not page_versions page_versions and not
and not dependency_versions dependency_versions and not
and not file_versions): file_versions):
logger.critical( logger.critical(
'Could not find any downloads that satisfy the requirement %s', 'Could not find any downloads that satisfy the requirement %s',
req, req,
@ -417,8 +417,8 @@ class PackageFinder(object):
# This is an intentional priority ordering # This is an intentional priority ordering
all_versions = ( all_versions = (
file_versions + found_versions + page_versions file_versions + found_versions + page_versions +
+ dependency_versions dependency_versions
) )
# Filter out anything which doesn't match our specifier # Filter out anything which doesn't match our specifier
@ -496,8 +496,8 @@ class PackageFinder(object):
'Installed version (%s) is most up-to-date (past versions: ' 'Installed version (%s) is most up-to-date (past versions: '
'%s)', '%s)',
req.satisfied_by.version, req.satisfied_by.version,
', '.join(str(i.version) for i in applicable_versions[1:]) ', '.join(str(i.version) for i in applicable_versions[1:]) or
or "none", "none",
) )
raise BestVersionAlreadyInstalled raise BestVersionAlreadyInstalled
@ -510,8 +510,8 @@ class PackageFinder(object):
selected_version = applicable_versions[0].location selected_version = applicable_versions[0].location
if (selected_version.verifiable is not None if (selected_version.verifiable is not None and not
and not selected_version.verifiable): selected_version.verifiable):
logger.warning( logger.warning(
"%s is potentially insecure and unverifiable.", req.name, "%s is potentially insecure and unverifiable.", req.name,
) )
@ -572,8 +572,8 @@ class PackageFinder(object):
for link in page.rel_links(): for link in page.rel_links():
normalized = normalize_name(req.name).lower() normalized = normalize_name(req.name).lower()
if (normalized not in self.allow_external if (normalized not in self.allow_external and not
and not self.allow_all_external): self.allow_all_external):
self.need_warn_external = True self.need_warn_external = True
logger.debug( logger.debug(
"Not searching %s for files because external " "Not searching %s for files because external "
@ -582,9 +582,9 @@ class PackageFinder(object):
) )
continue continue
if (link.trusted is not None if (link.trusted is not None and not
and not link.trusted link.trusted and
and normalized not in self.allow_unverified): normalized not in self.allow_unverified):
logger.debug( logger.debug(
"Not searching %s for urls, it is an " "Not searching %s for urls, it is an "
"untrusted link and cannot produce safe or " "untrusted link and cannot produce safe or "
@ -675,8 +675,8 @@ class PackageFinder(object):
link link
) )
return return
if (pkg_resources.safe_name(wheel.name).lower() if (pkg_resources.safe_name(wheel.name).lower() !=
!= pkg_resources.safe_name(search_name).lower()): pkg_resources.safe_name(search_name).lower()):
logger.debug( logger.debug(
'Skipping link %s; wrong project name (not %s)', 'Skipping link %s; wrong project name (not %s)',
link, link,
@ -699,12 +699,12 @@ class PackageFinder(object):
comes_from = getattr(link, "comes_from", None) comes_from = getattr(link, "comes_from", None)
if ( if (
( (
not platform.startswith('win') not platform.startswith('win') and not
and not platform.startswith('macosx') platform.startswith('macosx') and not
and not platform == 'cli' platform == 'cli'
) ) and
and comes_from is not None comes_from is not None and
and urllib_parse.urlparse( urllib_parse.urlparse(
comes_from.url comes_from.url
).netloc.endswith(PyPI.netloc)): ).netloc.endswith(PyPI.netloc)):
if not wheel.supported(tags=supported_tags_noarch): if not wheel.supported(tags=supported_tags_noarch):
@ -726,21 +726,21 @@ class PackageFinder(object):
) )
return return
if (link.internal is not None if (link.internal is not None and not
and not link.internal link.internal and not
and not normalize_name(search_name).lower() normalize_name(search_name).lower()
in self.allow_external in self.allow_external and not
and not self.allow_all_external): self.allow_all_external):
# We have a link that we are sure is external, so we should skip # We have a link that we are sure is external, so we should skip
# it unless we are allowing externals # it unless we are allowing externals
logger.debug("Skipping %s because it is externally hosted.", link) logger.debug("Skipping %s because it is externally hosted.", link)
self.need_warn_external = True self.need_warn_external = True
return return
if (link.verifiable is not None if (link.verifiable is not None and not
and not link.verifiable link.verifiable and not
and not (normalize_name(search_name).lower() (normalize_name(search_name).lower()
in self.allow_unverified)): in self.allow_unverified)):
# We have a link that we are sure we cannot verify its integrity, # We have a link that we are sure we cannot verify its integrity,
# so we should skip it unless we are allowing unsafe installs # so we should skip it unless we are allowing unsafe installs
# for this requirement. # for this requirement.
@ -856,8 +856,8 @@ class HTMLPage(object):
# Tack index.html onto file:// URLs that point to directories # Tack index.html onto file:// URLs that point to directories
(scheme, netloc, path, params, query, fragment) = \ (scheme, netloc, path, params, query, fragment) = \
urllib_parse.urlparse(url) urllib_parse.urlparse(url)
if (scheme == 'file' if (scheme == 'file' and
and os.path.isdir(urllib_request.url2pathname(path))): os.path.isdir(urllib_request.url2pathname(path))):
# add trailing slash if not present so urljoin doesn't trim # add trailing slash if not present so urljoin doesn't trim
# final segment # final segment
if not url.endswith('/'): if not url.endswith('/'):
@ -978,8 +978,8 @@ class HTMLPage(object):
# Only api_versions >= 2 have a distinction between # Only api_versions >= 2 have a distinction between
# external and internal links # external and internal links
internal = bool( internal = bool(
anchor.get("rel") anchor.get("rel") and
and "internal" in anchor.get("rel").split() "internal" in anchor.get("rel").split()
) )
yield Link(url, self, internal=internal) yield Link(url, self, internal=internal)
@ -1016,9 +1016,9 @@ class HTMLPage(object):
if not href_match: if not href_match:
continue continue
url = ( url = (
href_match.group(1) href_match.group(1) or
or href_match.group(2) href_match.group(2) or
or href_match.group(3) href_match.group(3)
) )
if not url: if not url:
continue continue

View File

@ -54,10 +54,10 @@ def freeze(
if requirement: if requirement:
with open(requirement) as req_file: with open(requirement) as req_file:
for line in req_file: for line in req_file:
if (not line.strip() if (not line.strip() or
or line.strip().startswith('#') line.strip().startswith('#') or
or (skip_match and skip_match.search(line)) (skip_match and skip_match.search(line)) or
or line.startswith(( line.startswith((
'-r', '--requirement', '-r', '--requirement',
'-Z', '--always-unzip', '-Z', '--always-unzip',
'-f', '--find-links', '-f', '--find-links',

View File

@ -134,8 +134,8 @@ class InstallRequirement(object):
if is_url(name): if is_url(name):
link = Link(name) link = Link(name)
elif (os.path.isdir(path) elif (os.path.isdir(path) and
and (os.path.sep in name or name.startswith('.'))): (os.path.sep in name or name.startswith('.'))):
if not is_installable_dir(path): if not is_installable_dir(path):
raise InstallationError( raise InstallationError(
"Directory %r is not installable. File 'setup.py' not " "Directory %r is not installable. File 'setup.py' not "
@ -323,8 +323,8 @@ class InstallRequirement(object):
# NOTE: this self.name check only works when installing from a # NOTE: this self.name check only works when installing from a
# specifier (not archive path/urls) # specifier (not archive path/urls)
# TODO: take this out later # TODO: take this out later
if (self.name == 'distribute' if (self.name == 'distribute' and not
and not os.path.isdir( os.path.isdir(
os.path.join(self.source_dir, 'setuptools'))): os.path.join(self.source_dir, 'setuptools'))):
rmtree(os.path.join(self.source_dir, 'distribute.egg-info')) rmtree(os.path.join(self.source_dir, 'distribute.egg-info'))
@ -431,8 +431,8 @@ exec(compile(
if ( if (
os.path.exists( os.path.exists(
os.path.join(root, dir, 'bin', 'python') os.path.join(root, dir, 'bin', 'python')
) ) or
or os.path.exists( os.path.exists(
os.path.join( os.path.join(
root, dir, 'Scripts', 'Python.exe' root, dir, 'Scripts', 'Python.exe'
) )
@ -457,8 +457,8 @@ exec(compile(
# an extracted tarball for testing purposes. # an extracted tarball for testing purposes.
if len(filenames) > 1: if len(filenames) > 1:
filenames.sort( filenames.sort(
key=lambda x: x.count(os.path.sep) key=lambda x: x.count(os.path.sep) +
+ (os.path.altsep and x.count(os.path.altsep) or 0) (os.path.altsep and x.count(os.path.altsep) or 0)
) )
self._egg_info_path = os.path.join(base, filenames[0]) self._egg_info_path = os.path.join(base, filenames[0])
return os.path.join(self._egg_info_path, filename) return os.path.join(self._egg_info_path, filename)
@ -878,10 +878,10 @@ exec(compile(
"import setuptools, tokenize; __file__=%r; exec(compile(" "import setuptools, tokenize; __file__=%r; exec(compile("
"getattr(tokenize, 'open', open)(__file__).read().replace" "getattr(tokenize, 'open', open)(__file__).read().replace"
"('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py "('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py
] ] +
+ list(global_options) list(global_options) +
+ ['develop', '--no-deps'] ['develop', '--no-deps'] +
+ list(install_options), list(install_options),
cwd=cwd, filter_stdout=self._filter_install, cwd=cwd, filter_stdout=self._filter_install,
show_stdout=False) show_stdout=False)
@ -924,9 +924,9 @@ exec(compile(
# then this check has already run before. we don't want it to # then this check has already run before. we don't want it to
# run again, and return False, since it would block the uninstall # run again, and return False, since it would block the uninstall
# TODO: remove this later # TODO: remove this later
if (self.req.project_name == 'setuptools' if (self.req.project_name == 'setuptools' and
and self.conflicts_with self.conflicts_with and
and self.conflicts_with.project_name == 'distribute'): self.conflicts_with.project_name == 'distribute'):
return True return True
else: else:
self.satisfied_by = pkg_resources.get_distribution(self.req) self.satisfied_by = pkg_resources.get_distribution(self.req)
@ -939,8 +939,8 @@ exec(compile(
if self.use_user_site: if self.use_user_site:
if dist_in_usersite(existing_dist): if dist_in_usersite(existing_dist):
self.conflicts_with = existing_dist self.conflicts_with = existing_dist
elif (running_under_virtualenv() elif (running_under_virtualenv() and
and dist_in_site_packages(existing_dist)): dist_in_site_packages(existing_dist)):
raise InstallationError( raise InstallationError(
"Will not install to the user site because it will " "Will not install to the user site because it will "
"lack sys.path precedence to %s in %s" % "lack sys.path precedence to %s in %s" %

View File

@ -169,8 +169,8 @@ class RequirementSet(object):
if self.upgrade: if self.upgrade:
# don't uninstall conflict if user install and # don't uninstall conflict if user install and
# conflict is not user install # conflict is not user install
if not (self.use_user_site if not (self.use_user_site and
and not dist_in_usersite( not dist_in_usersite(
req_to_install.satisfied_by req_to_install.satisfied_by
)): )):
req_to_install.conflicts_with = \ req_to_install.conflicts_with = \
@ -195,8 +195,8 @@ class RequirementSet(object):
self.build_dir, self.build_dir,
) )
if (req_to_install.source_dir is not None if (req_to_install.source_dir is not None and not
and not os.path.isdir(req_to_install.source_dir)): os.path.isdir(req_to_install.source_dir)):
raise InstallationError( raise InstallationError(
'Could not install requirement %s because source folder %s' 'Could not install requirement %s because source folder %s'
' does not exist (perhaps --no-download was used without ' ' does not exist (perhaps --no-download was used without '
@ -245,8 +245,8 @@ class RequirementSet(object):
if not best_installed: if not best_installed:
# don't uninstall conflict if user install and # don't uninstall conflict if user install and
# conflict is not user install # conflict is not user install
if not (self.use_user_site if not (self.use_user_site and not
and not dist_in_usersite( dist_in_usersite(
req_to_install.satisfied_by req_to_install.satisfied_by
)): )):
req_to_install.conflicts_with = \ req_to_install.conflicts_with = \
@ -269,8 +269,8 @@ class RequirementSet(object):
if req_to_install.editable: if req_to_install.editable:
logger.info('Obtaining %s', req_to_install) logger.info('Obtaining %s', req_to_install)
elif install: elif install:
if (req_to_install.url if (req_to_install.url and
and req_to_install.url.lower().startswith('file:')): req_to_install.url.lower().startswith('file:')):
path = url_to_path(req_to_install.url) path = url_to_path(req_to_install.url)
logger.info('Processing %s', display_path(path)) logger.info('Processing %s', display_path(path))
else: else:
@ -341,8 +341,8 @@ class RequirementSet(object):
try: try:
if ( if (
url.filename.endswith(wheel_ext) url.filename.endswith(wheel_ext) and
and self.wheel_download_dir self.wheel_download_dir
): ):
# when doing 'pip wheel` # when doing 'pip wheel`
download_dir = self.wheel_download_dir download_dir = self.wheel_download_dir
@ -393,8 +393,8 @@ class RequirementSet(object):
if self.upgrade or self.ignore_installed: if self.upgrade or self.ignore_installed:
# don't uninstall conflict if user install and # don't uninstall conflict if user install and
# conflict is not user install # conflict is not user install
if not (self.use_user_site if not (self.use_user_site and not
and not dist_in_usersite( dist_in_usersite(
req_to_install.satisfied_by)): req_to_install.satisfied_by)):
req_to_install.conflicts_with = \ req_to_install.conflicts_with = \
req_to_install.satisfied_by req_to_install.satisfied_by
@ -471,8 +471,8 @@ class RequirementSet(object):
def _pip_has_created_build_dir(self): def _pip_has_created_build_dir(self):
return ( return (
self.build_dir == build_prefix self.build_dir == build_prefix and
and os.path.exists( os.path.exists(
os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME) os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME)
) )
) )
@ -493,9 +493,9 @@ class RequirementSet(object):
# TODO: take this out later # TODO: take this out later
distribute_req = pkg_resources.Requirement.parse("distribute>=0.7") distribute_req = pkg_resources.Requirement.parse("distribute>=0.7")
for req in to_install: for req in to_install:
if (req.name == 'distribute' if (req.name == 'distribute' and
and req.installed_version is not None req.installed_version is not None and
and req.installed_version in distribute_req): req.installed_version in distribute_req):
to_install.remove(req) to_install.remove(req)
to_install.append(req) to_install.append(req)
@ -550,13 +550,13 @@ class RequirementSet(object):
) )
except: except:
# if install did not succeed, rollback previous uninstall # if install did not succeed, rollback previous uninstall
if (requirement.conflicts_with if (requirement.conflicts_with and not
and not requirement.install_succeeded): requirement.install_succeeded):
requirement.rollback_uninstall() requirement.rollback_uninstall()
raise raise
else: else:
if (requirement.conflicts_with if (requirement.conflicts_with and
and requirement.install_succeeded): requirement.install_succeeded):
requirement.commit_uninstall() requirement.commit_uninstall()
requirement.remove_temporary_source() requirement.remove_temporary_source()

View File

@ -224,8 +224,8 @@ def is_svn_page(html):
""" """
Returns true if the page appears to be the index page of an svn repository Returns true if the page appears to be the index page of an svn repository
""" """
return (re.search(r'<title>[^<]*Revision \d+:', html) return (re.search(r'<title>[^<]*Revision \d+:', html) and
and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
def file_contents(filename): def file_contents(filename):
@ -236,8 +236,8 @@ def file_contents(filename):
def split_leading_dir(path): def split_leading_dir(path):
path = str(path) path = str(path)
path = path.lstrip('/').lstrip('\\') path = path.lstrip('/').lstrip('\\')
if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
or '\\' not in path): '\\' not in path):
return path.split('/', 1) return path.split('/', 1)
elif '\\' in path: elif '\\' in path:
return path.split('\\', 1) return path.split('\\', 1)
@ -421,11 +421,11 @@ def get_installed_distributions(local_only=True,
return True return True
return [d for d in pkg_resources.working_set return [d for d in pkg_resources.working_set
if local_test(d) if local_test(d) and
and d.key not in skip d.key not in skip and
and editable_test(d) editable_test(d) and
and editables_only_test(d) editables_only_test(d) and
and user_test(d) user_test(d)
] ]
@ -576,8 +576,8 @@ def untar_file(filename, location):
os.makedirs(location) os.makedirs(location)
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
mode = 'r:gz' mode = 'r:gz'
elif (filename.lower().endswith('.bz2') elif (filename.lower().endswith('.bz2') or
or filename.lower().endswith('.tbz')): filename.lower().endswith('.tbz')):
mode = 'r:bz2' mode = 'r:bz2'
elif filename.lower().endswith('.tar'): elif filename.lower().endswith('.tar'):
mode = 'r' mode = 'r'
@ -644,22 +644,22 @@ def untar_file(filename, location):
def unpack_file(filename, location, content_type, link): def unpack_file(filename, location, content_type, link):
filename = os.path.realpath(filename) filename = os.path.realpath(filename)
if (content_type == 'application/zip' if (content_type == 'application/zip' or
or filename.endswith('.zip') filename.endswith('.zip') or
or filename.endswith('.whl') filename.endswith('.whl') or
or zipfile.is_zipfile(filename)): zipfile.is_zipfile(filename)):
unzip_file( unzip_file(
filename, filename,
location, location,
flatten=not filename.endswith('.whl') flatten=not filename.endswith('.whl')
) )
elif (content_type == 'application/x-gzip' elif (content_type == 'application/x-gzip' or
or tarfile.is_tarfile(filename) tarfile.is_tarfile(filename) or
or splitext(filename)[1].lower() in ( splitext(filename)[1].lower() in (
'.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')): '.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
untar_file(filename, location) untar_file(filename, location)
elif (content_type and content_type.startswith('text/html') elif (content_type and content_type.startswith('text/html') and
and is_svn_page(file_contents(filename))): is_svn_page(file_contents(filename))):
# We don't really care about this # We don't really care about this
from pip.vcs.subversion import Subversion from pip.vcs.subversion import Subversion
Subversion('svn+' + link.url).unpack(location) Subversion('svn+' + link.url).unpack(location)

View File

@ -168,9 +168,9 @@ class Subversion(VersionControl):
with open(os.path.join(location, self.dirname, 'entries')) as f: with open(os.path.join(location, self.dirname, 'entries')) as f:
data = f.read() data = f.read()
if (data.startswith('8') if (data.startswith('8') or
or data.startswith('9') data.startswith('9') or
or data.startswith('10')): data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n'))) data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8' del data[0][0] # get rid of the '8'
url = data[0][3] url = data[0][3]

View File

@ -185,10 +185,10 @@ def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
if is_base and basedir == '' and destsubdir.endswith('.data'): if is_base and basedir == '' and destsubdir.endswith('.data'):
data_dirs.append(s) data_dirs.append(s)
continue continue
elif (is_base elif (is_base and
and s.endswith('.dist-info') s.endswith('.dist-info') and
# is self.req.project_name case preserving? # is self.req.project_name case preserving?
and s.lower().startswith( s.lower().startswith(
req.project_name.replace('-', '_').lower())): req.project_name.replace('-', '_').lower())):
assert not info_dir, 'Multiple .dist-info directories' assert not info_dir, 'Multiple .dist-info directories'
info_dir.append(destsubdir) info_dir.append(destsubdir)

View File

@ -18,9 +18,9 @@ def pytest_collection_modifyitems(items):
) )
module_root_dir = module_path.split(os.pathsep)[0] module_root_dir = module_path.split(os.pathsep)[0]
if (module_root_dir.startswith("functional") if (module_root_dir.startswith("functional") or
or module_root_dir.startswith("integration") module_root_dir.startswith("integration") or
or module_root_dir.startswith("lib")): module_root_dir.startswith("lib")):
item.add_marker(pytest.mark.integration) item.add_marker(pytest.mark.integration)
elif module_root_dir.startswith("unit"): elif module_root_dir.startswith("unit"):
item.add_marker(pytest.mark.unit) item.add_marker(pytest.mark.unit)

View File

@ -84,6 +84,6 @@ def test_help_commands_equally_functional(script):
continue continue
assert ( assert (
script.pip('help', name).stdout script.pip('help', name).stdout ==
== script.pip(name, '--help').stdout script.pip(name, '--help').stdout
) )

View File

@ -173,8 +173,8 @@ class TestPipResult(object):
egg_link_file = self.files_created[egg_link_path] egg_link_file = self.files_created[egg_link_path]
# FIXME: I don't understand why there's a trailing . here # FIXME: I don't understand why there's a trailing . here
if not (egg_link_file.bytes.endswith('\n.') if not (egg_link_file.bytes.endswith('\n.') and
and egg_link_file.bytes[:-2].endswith(pkg_dir)): egg_link_file.bytes[:-2].endswith(pkg_dir)):
raise TestFailure(textwrap.dedent(u('''\ raise TestFailure(textwrap.dedent(u('''\
Incorrect egg_link file %r Incorrect egg_link file %r
Expected ending: %r Expected ending: %r

View File

@ -20,8 +20,8 @@ class TestUserCacheDir:
) )
monkeypatch.setattr(appdirs, "WINDOWS", True) monkeypatch.setattr(appdirs, "WINDOWS", True)
assert (appdirs.user_cache_dir("pip").replace("/", "\\") assert (appdirs.user_cache_dir("pip").replace("/", "\\") ==
== "C:\\Users\\test\\AppData\\Local\\pip\\Cache") "C:\\Users\\test\\AppData\\Local\\pip\\Cache")
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")] assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
def test_user_cache_dir_osx(self, monkeypatch): def test_user_cache_dir_osx(self, monkeypatch):
@ -110,8 +110,8 @@ class TestUserDataDir:
) )
monkeypatch.setattr(appdirs, "WINDOWS", True) monkeypatch.setattr(appdirs, "WINDOWS", True)
assert (appdirs.user_data_dir("pip").replace("/", "\\") assert (appdirs.user_data_dir("pip").replace("/", "\\") ==
== "C:\\Users\\test\\AppData\\Local\\pip") "C:\\Users\\test\\AppData\\Local\\pip")
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")] assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
def test_user_data_dir_win_yes_roaming(self, monkeypatch): def test_user_data_dir_win_yes_roaming(self, monkeypatch):
@ -127,16 +127,18 @@ class TestUserDataDir:
) )
monkeypatch.setattr(appdirs, "WINDOWS", True) monkeypatch.setattr(appdirs, "WINDOWS", True)
assert (appdirs.user_data_dir("pip", roaming=True).replace("/", "\\") assert (
== "C:\\Users\\test\\AppData\\Roaming\\pip") appdirs.user_data_dir("pip", roaming=True).replace("/", "\\") ==
"C:\\Users\\test\\AppData\\Roaming\\pip"
)
assert _get_win_folder.calls == [pretend.call("CSIDL_APPDATA")] assert _get_win_folder.calls == [pretend.call("CSIDL_APPDATA")]
def test_user_data_dir_osx(self, monkeypatch): def test_user_data_dir_osx(self, monkeypatch):
monkeypatch.setenv("HOME", "/home/test") monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin") monkeypatch.setattr(sys, "platform", "darwin")
assert (appdirs.user_data_dir("pip") assert (appdirs.user_data_dir("pip") ==
== "/home/test/Library/Application Support/pip") "/home/test/Library/Application Support/pip")
def test_user_data_dir_linux(self, monkeypatch): def test_user_data_dir_linux(self, monkeypatch):
monkeypatch.delenv("XDG_DATA_HOME") monkeypatch.delenv("XDG_DATA_HOME")
@ -169,8 +171,8 @@ class TestUserConfigDir:
monkeypatch.setattr(appdirs, "WINDOWS", True) monkeypatch.setattr(appdirs, "WINDOWS", True)
assert ( assert (
appdirs.user_config_dir("pip", roaming=False).replace("/", "\\") appdirs.user_config_dir("pip", roaming=False).replace("/", "\\") ==
== "C:\\Users\\test\\AppData\\Local\\pip" "C:\\Users\\test\\AppData\\Local\\pip"
) )
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")] assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
@ -187,16 +189,16 @@ class TestUserConfigDir:
) )
monkeypatch.setattr(appdirs, "WINDOWS", True) monkeypatch.setattr(appdirs, "WINDOWS", True)
assert (appdirs.user_config_dir("pip").replace("/", "\\") assert (appdirs.user_config_dir("pip").replace("/", "\\") ==
== "C:\\Users\\test\\AppData\\Roaming\\pip") "C:\\Users\\test\\AppData\\Roaming\\pip")
assert _get_win_folder.calls == [pretend.call("CSIDL_APPDATA")] assert _get_win_folder.calls == [pretend.call("CSIDL_APPDATA")]
def test_user_config_dir_osx(self, monkeypatch): def test_user_config_dir_osx(self, monkeypatch):
monkeypatch.setenv("HOME", "/home/test") monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin") monkeypatch.setattr(sys, "platform", "darwin")
assert (appdirs.user_config_dir("pip") assert (appdirs.user_config_dir("pip") ==
== "/home/test/Library/Application Support/pip") "/home/test/Library/Application Support/pip")
def test_user_config_dir_linux(self, monkeypatch): def test_user_config_dir_linux(self, monkeypatch):
monkeypatch.delenv("XDG_CONFIG_HOME") monkeypatch.delenv("XDG_CONFIG_HOME")
@ -228,16 +230,16 @@ class TestUserLogDir:
) )
monkeypatch.setattr(appdirs, "WINDOWS", True) monkeypatch.setattr(appdirs, "WINDOWS", True)
assert (appdirs.user_log_dir("pip").replace("/", "\\") assert (appdirs.user_log_dir("pip").replace("/", "\\") ==
== "C:\\Users\\test\\AppData\\Local\\pip\\Logs") "C:\\Users\\test\\AppData\\Local\\pip\\Logs")
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")] assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
def test_user_log_dir_osx(self, monkeypatch): def test_user_log_dir_osx(self, monkeypatch):
monkeypatch.setenv("HOME", "/home/test") monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin") monkeypatch.setattr(sys, "platform", "darwin")
assert (appdirs.user_log_dir("pip") assert (appdirs.user_log_dir("pip") ==
== "/home/test/Library/Logs/pip") "/home/test/Library/Logs/pip")
def test_uuser_log_dir_linux(self, monkeypatch): def test_uuser_log_dir_linux(self, monkeypatch):
monkeypatch.delenv("XDG_CACHE_HOME") monkeypatch.delenv("XDG_CACHE_HOME")

View File

@ -239,8 +239,7 @@ class Test_unpack_file_url(object):
# confirm hash is for simple1-1.0 # confirm hash is for simple1-1.0
# the previous bad download has been removed # the previous bad download has been removed
assert (hashlib.md5(open(dest_file, 'rb').read()).hexdigest() assert (hashlib.md5(open(dest_file, 'rb').read()).hexdigest() ==
==
dist_path_md5 dist_path_md5
), hashlib.md5(open(dest_file, 'rb').read()).hexdigest() ), hashlib.md5(open(dest_file, 'rb').read()).hexdigest()
@ -306,8 +305,8 @@ class TestPipSession:
assert hasattr(session.adapters["https://"], "cache") assert hasattr(session.adapters["https://"], "cache")
assert (session.adapters["https://"].cache.directory assert (session.adapters["https://"].cache.directory ==
== tmpdir.join("test-cache")) tmpdir.join("test-cache"))
def test_http_cache_is_not_enabled(self, tmpdir): def test_http_cache_is_not_enabled(self, tmpdir):
session = PipSession(cache=tmpdir.join("test-cache")) session = PipSession(cache=tmpdir.join("test-cache"))