Revert "Merge pull request #1519 from dstufft/remove-dependency-links"

This reverts commit da02f073da, reversing
changes made to 2ad8888901.

Conflicts:
	CHANGES.txt
	pip/commands/freeze.py
	pip/commands/list.py
	pip/commands/wheel.py
	pip/index.py
	pip/req/req_set.py
	pip/vcs/subversion.py
This commit is contained in:
Donald Stufft 2014-08-01 16:20:23 -04:00
parent fcdde73e9f
commit 90688e82b1
18 changed files with 197 additions and 76 deletions

View File

@ -14,10 +14,6 @@
until their removal in pip v1.8. For more information please see
https://pip.pypa.io/en/latest/reference/pip_install.html#caching
* Removed the deprecated support for dependency links and the
``--process-dependency-links`` flag that turned them on. For alternatives to
dependency links please see http://www.pip-installer.org/en/latest/dependency_links.html
* `wsgiref` and `argparse` (for >py26) are now excluded from `pip list` and `pip
freeze` (:pull:`1606`, :pull:`1369`)
@ -167,9 +163,6 @@
* **BACKWARD INCOMPATIBLE** pip no longer respects dependency links by default.
Users may opt into respecting them again using ``--process-dependency-links``.
* **DEPRECATION** ``pip install --process-dependency-links`` and the ability to
use dependency links at all has been deprecated and will be removed in 1.6.
* **DEPRECATION** ``pip install --no-install`` and ``pip install
--no-download`` are now formally deprecated. See :issue:`906` for discussion on
possible alternatives, or lack thereof, in future releases.

View File

@ -1,64 +0,0 @@
:orphan:
Dependency Links
================
In pip 1.5 processing dependency links was deprecated and it was removed
completely in pip 1.6. Dependency links supports a few different scenarios.
Depending on a Fork of a Project
--------------------------------
If you need to depend on a forked version of a project and it is for your own
personal use, than you can simply use a requirements.txt file that points to
the fork.
.. code::
# We need this fork instead of the foobar that exists on PyPI
git+https://github.com/example/foobar.git#egg=foobar
myproject==1.0 # myproject has a setup.py dependency on foobar
If you need to depend on a forked version of a project for something you want
to distribute to other people than you should rename the project and upload
it with a new name to PyPI. This way people can depend and install on it
normally.
Deploying Directly from VCS
---------------------------
If you're using dependency_links to essentially deploy a tree of dependencies
directly from VCS then you have two primary options. You can either setup
a requirements.txt that lists all of the repositories such as:
.. code::
# These are the locations of the git repos
git+https://github.com/example/foobar.git#egg=foobar
git+https://github.com/example/super.git#egg=super
git+https://github.com/example/duper.git#egg=duper
# This is my main package
myproject==1.0 # This depends on foobar, super, and duper from git repos
Or you can setup a private package index and point pip to use it instead. This
can be as simple as a directory full of packages exposed using Apache2 or Nginx
with an auto index, or can be as complex as a full blown index using software
such as `devpi <http://devpi.net/>`_.
If you're using a simple autoindex, then you can add it to pip using:
.. code:: console
$ pip install --find-links https://example.com/deploy/ myproject
Or if you're using a full blown index it could be:
.. code:: console
# Replace PyPI with the custom index
$ pip install --index-url https://example.com/simple/ myproject
# Add a custom index in addition to PyPI
$ pip install --extra-index-url https://example.com/simple/ myproject

View File

@ -199,7 +199,7 @@ class FrozenRequirement(object):
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
@classmethod
def from_dist(cls, dist, find_tags=False):
def from_dist(cls, dist, dependency_links, find_tags=False):
location = os.path.normcase(os.path.abspath(dist.location))
comments = []
from pip.vcs import vcs, get_src_requirement
@ -227,7 +227,38 @@ class FrozenRequirement(object):
req = dist.as_requirement()
specs = req.specs
assert len(specs) == 1 and specs[0][0] == '=='
version = specs[0][1]
ver_match = cls._rev_re.search(version)
date_match = cls._date_re.search(version)
if ver_match or date_match:
svn_backend = vcs.get_backend('svn')
if svn_backend:
svn_location = svn_backend().get_location(
dist,
dependency_links,
)
if not svn_location:
logger.warn(
'Warning: cannot find svn location for %s' % req)
comments.append(
'## FIXME: could not find svn URL in dependency_links '
'for this package:'
)
else:
comments.append(
'# Installing as editable to satisfy requirement %s:' %
req
)
if ver_match:
rev = ver_match.group(1)
else:
rev = '{%s}' % date_match.group(1)
editable = True
req = '%s@%s#egg=%s' % (
svn_location,
rev,
cls.egg_name(dist)
)
return cls(dist.project_name, req, editable, comments)
@staticmethod

View File

@ -279,6 +279,15 @@ no_allow_unsafe = OptionMaker(
help=SUPPRESS_HELP
)
# Remove after 1.5
process_dependency_links = OptionMaker(
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
)
requirements = OptionMaker(
'-r', '--requirement',
dest='requirements',
@ -431,5 +440,6 @@ index_group = {
no_allow_external,
allow_unsafe,
no_allow_unsafe,
process_dependency_links,
]
}

View File

@ -7,6 +7,7 @@ from pip.req import InstallRequirement
from pip.log import logger
from pip.basecommand import Command
from pip.util import get_installed_distributions
from pip._vendor import pkg_resources
# packages to exclude from freeze output
freeze_excludes = stdlib_pkgs + ['setuptools', 'pip', 'distribute']
@ -67,14 +68,28 @@ class FreezeCommand(Command):
if skip_regex:
skip_match = re.compile(skip_regex)
dependency_links = []
f = sys.stdout
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt')
)
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
f.write('-f %s\n' % link)
installations = {}
for dist in get_installed_distributions(local_only=local_only,
skip=freeze_excludes):
req = pip.FrozenRequirement.from_dist(dist, find_tags=find_tags)
req = pip.FrozenRequirement.from_dist(
dist,
dependency_links,
find_tags=find_tags,
)
installations[req.name] = req
if requirement:
req_f = open(requirement)

View File

@ -180,6 +180,7 @@ class InstallCommand(Command):
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
)

View File

@ -70,6 +70,7 @@ class ListCommand(Command):
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
)
@ -115,8 +116,16 @@ class ListCommand(Command):
)
index_urls += options.mirrors
dependency_links = []
for dist in get_installed_distributions(local_only=options.local):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
installed_packages = get_installed_distributions(
local_only=options.local,

View File

@ -162,6 +162,7 @@ class WheelCommand(Command):
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
)

View File

@ -38,7 +38,7 @@ class PackageFinder(object):
def __init__(self, find_links, index_urls,
use_wheel=True, allow_external=[], allow_unverified=[],
allow_all_external=False, allow_all_prereleases=False,
session=None):
process_dependency_links=False, session=None):
if session is None:
raise TypeError(
"PackageFinder() missing 1 required keyword argument: "
@ -47,6 +47,7 @@ class PackageFinder(object):
self.find_links = find_links
self.index_urls = index_urls
self.dependency_links = []
# These are boring links that have already been logged somehow:
self.logged_links = set()
@ -78,9 +79,28 @@ class PackageFinder(object):
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
self._have_warned_dependency_links = False
# The Session we'll use to make requests
self.session = session
def add_dependency_links(self, links):
# # FIXME: this shouldn't be global list this, it should only
# # apply to requirements of the package that specifies the
# # dependency_links value
# # FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
if not self._have_warned_dependency_links:
logger.deprecated(
"1.6",
"Dependency Links processing has been deprecated with an "
"accelerated time schedule and will be removed in pip 1.6",
)
self._have_warned_dependency_links = True
self.dependency_links.extend(links)
def _sort_locations(self, locations):
"""
Sort locations into "files" (archives) and "urls", and return
@ -207,11 +227,16 @@ class PackageFinder(object):
posixpath.join(main_index_url.url, version)] + locations
file_locations, url_locations = self._sort_locations(locations)
_flocations, _ulocations = self._sort_locations(self.dependency_links)
file_locations.extend(_flocations)
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
locations = [Link(url, trusted=True) for url in url_locations]
# We explicitly do not trust links that came from dependency_links
locations.extend([Link(url) for url in _ulocations])
logger.debug('URLs to search for versions for %s:' % req)
for location in locations:
logger.debug('* %s' % location)
@ -260,6 +285,15 @@ class PackageFinder(object):
)
finally:
logger.indent -= 2
dependency_versions = list(self._package_versions(
[Link(url) for url in self.dependency_links], req.name.lower()))
if dependency_versions:
logger.info(
'dependency_links found: %s' %
', '.join([
link.url for p, link, version in dependency_versions
])
)
file_versions = list(
self._package_versions(
[Link(url) for url in file_locations],
@ -268,6 +302,7 @@ class PackageFinder(object):
)
if (not found_versions
and not page_versions
and not dependency_versions
and not file_versions):
logger.fatal(
'Could not find any downloads that satisfy the requirement'
@ -307,7 +342,7 @@ class PackageFinder(object):
)
# this is an intentional priority ordering
all_versions = installed_version + file_versions + found_versions \
+ page_versions
+ page_versions + dependency_versions
applicable_versions = []
for (parsed_version, link, version) in all_versions:
if version not in req.req:

View File

@ -448,6 +448,10 @@ exec(compile(
p.feed(data or '')
return p.close()
@property
def dependency_links(self):
return self.egg_info_lines('dependency_links.txt')
_requirements_section_re = re.compile(r'\[(.*?)\]')
def requirements(self, extras=()):

View File

@ -425,6 +425,10 @@ class RequirementSet(object):
# sdists
else:
# FIXME: shouldn't be globally added:
finder.add_dependency_links(
req_to_install.dependency_links
)
if (req_to_install.extras):
logger.notify(
"Installing extra requirements: %r" %

View File

@ -1,6 +1,8 @@
import os
import re
from pip.compat import urlparse
from pip.index import Link
from pip.util import rmtree, display_path, call_subprocess
from pip.log import logger
from pip.vcs import vcs, VersionControl
@ -34,6 +36,7 @@ class Subversion(VersionControl):
'Cannot determine URL of svn checkout %s' %
display_path(location)
)
logger.info('Output that cannot be parsed: \n%s' % output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
@ -84,6 +87,20 @@ class Subversion(VersionControl):
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
# FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None
def get_revision(self, location):
"""
Return the maximum revision for all files under a given location

View File

@ -0,0 +1 @@
/LocalExtras.egg-info

View File

@ -0,0 +1,29 @@
import os
from setuptools import setup, find_packages
def path_to_url(path):
"""
Convert a path to URI. The path will be made absolute and
will not have quoted path parts.
"""
path = os.path.normpath(os.path.abspath(path))
drive, path = os.path.splitdrive(path)
filepath = path.split(os.path.sep)
url = '/'.join(filepath)
if drive:
return 'file:///' + drive + url
return 'file://' +url
HERE = os.path.dirname(__file__)
DEP_PATH = os.path.join(HERE, '..', '..', 'indexes', 'simple', 'simple')
DEP_URL = path_to_url(DEP_PATH)
setup(
name='LocalExtras',
version='0.0.1',
packages=find_packages(),
extras_require={ 'bar': ['simple'] },
dependency_links=[DEP_URL]
)

View File

@ -50,6 +50,10 @@ LineEndings
-----------
contains DOS line endings
LocalExtras
-----------
has an extra in a local file:// dependency link
parent/child-0.1.tar.gz
-----------------------
The parent-0.1.tar.gz and child-0.1.tar.gz packages are used by

View File

@ -130,6 +130,21 @@ def test_respect_order_in_requirements_file(script, data):
)
def test_install_local_editable_with_extras(script, data):
to_install = data.packages.join("LocalExtras")
res = script.pip(
'install', '-e', to_install + '[bar]', '--process-dependency-links',
expect_error=False,
)
assert script.site_packages / 'easy-install.pth' in res.files_updated, (
str(res)
)
assert (
script.site_packages / 'LocalExtras.egg-link' in res.files_created
), str(res)
assert script.site_packages / 'simple' in res.files_created, str(res)
def test_install_local_editable_with_subdirectory(script):
version_pkg_path = _create_test_package_with_subdirectory(script,
'version_subdir')

View File

@ -254,6 +254,22 @@ def test_finder_priority_file_over_page(data):
assert link.url.startswith("file://")
def test_finder_priority_page_over_deplink():
"""
Test PackageFinder prefers page links over equivalent dependency links
"""
req = InstallRequirement.from_line('gmpy==1.15', None)
finder = PackageFinder(
[],
["https://pypi.python.org/simple"],
process_dependency_links=True,
session=PipSession(),
)
finder.add_dependency_links(['http://c.pypi.python.org/simple/gmpy/'])
link = finder.find_requirement(req, False)
assert link.url.startswith("https://pypi"), link
def test_finder_priority_nonegg_over_eggfragments():
"""Test PackageFinder prefers non-egg links over "#egg=" links"""
req = InstallRequirement.from_line('bar==1.0', None)