1
1
Fork 0
mirror of https://github.com/pypa/pip synced 2023-12-13 21:30:23 +01:00

Merge pull request #8722 from McSinyx/late-dl-indent

Dedent late download logs
This commit is contained in:
Xavier Fernandez 2020-09-16 14:02:51 +02:00 committed by GitHub
commit 33890bf825
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 42 additions and 45 deletions

View file

@ -487,10 +487,10 @@ class RequirementPreparer(object):
self._log_preparing_link(req) self._log_preparing_link(req)
with indent_log(): with indent_log():
wheel_dist = self._fetch_metadata_using_lazy_wheel(link) wheel_dist = self._fetch_metadata_using_lazy_wheel(link)
if wheel_dist is not None: if wheel_dist is not None:
req.needs_more_preparation = True req.needs_more_preparation = True
return wheel_dist return wheel_dist
return self._prepare_linked_requirement(req, parallel_builds) return self._prepare_linked_requirement(req, parallel_builds)
def prepare_linked_requirements_more(self, reqs, parallel_builds=False): def prepare_linked_requirements_more(self, reqs, parallel_builds=False):
# type: (Iterable[InstallRequirement], bool) -> None # type: (Iterable[InstallRequirement], bool) -> None
@ -519,51 +519,48 @@ class RequirementPreparer(object):
link = req.link link = req.link
download_dir = self._get_download_dir(link) download_dir = self._get_download_dir(link)
with indent_log(): self._ensure_link_req_src_dir(req, download_dir, parallel_builds)
self._ensure_link_req_src_dir(req, download_dir, parallel_builds) hashes = self._get_linked_req_hashes(req)
hashes = self._get_linked_req_hashes(req) if link.url not in self._downloaded:
if link.url not in self._downloaded: try:
try: local_file = unpack_url(
local_file = unpack_url( link, req.source_dir, self._download,
link, req.source_dir, self._download, download_dir, hashes,
download_dir, hashes, )
) except NetworkConnectionError as exc:
except NetworkConnectionError as exc: raise InstallationError(
raise InstallationError( 'Could not install requirement {} because of HTTP '
'Could not install requirement {} because of HTTP ' 'error {} for URL {}'.format(req, exc, link)
'error {} for URL {}'.format(req, exc, link) )
) else:
else: file_path, content_type = self._downloaded[link.url]
file_path, content_type = self._downloaded[link.url] if hashes:
if hashes: hashes.check_against_path(file_path)
hashes.check_against_path(file_path) local_file = File(file_path, content_type)
local_file = File(file_path, content_type)
# For use in later processing, preserve the file path on the # For use in later processing,
# requirement. # preserve the file path on the requirement.
if local_file: if local_file:
req.local_file_path = local_file.path req.local_file_path = local_file.path
dist = _get_prepared_distribution( dist = _get_prepared_distribution(
req, self.req_tracker, self.finder, self.build_isolation, req, self.req_tracker, self.finder, self.build_isolation,
) )
if download_dir: if download_dir:
if link.is_existing_dir(): if link.is_existing_dir():
logger.info('Link is a directory, ignoring download_dir') logger.info('Link is a directory, ignoring download_dir')
elif local_file: elif local_file:
download_location = os.path.join( download_location = os.path.join(download_dir, link.filename)
download_dir, link.filename if not os.path.exists(download_location):
) shutil.copy(local_file.path, download_location)
if not os.path.exists(download_location): download_path = display_path(download_location)
shutil.copy(local_file.path, download_location) logger.info('Saved %s', download_path)
download_path = display_path(download_location)
logger.info('Saved %s', download_path)
if self._download_should_save: if self._download_should_save:
# Make a .zip of the source_dir we already created. # Make a .zip of the source_dir we already created.
if link.is_vcs: if link.is_vcs:
req.archive(self.download_dir) req.archive(self.download_dir)
return dist return dist
def prepare_editable_requirement( def prepare_editable_requirement(