1
1
Fork 0
mirror of https://github.com/pypa/pip synced 2023-12-13 21:30:23 +01:00

Merge pull request #1243 from pfmoore/distlib-revendor

Re-vendor distlib and modify normalized version handling to match
This commit is contained in:
Paul Moore 2013-10-22 08:11:14 -07:00
commit 6ece87808d
20 changed files with 2021 additions and 946 deletions

View file

@ -709,11 +709,11 @@ def is_prerelease(vers):
Will return True if it is a pre-release and False if not. Versions are
assumed to be a pre-release if they cannot be parsed.
"""
normalized = version.suggest_normalized_version(vers)
normalized = version._suggest_normalized_version(vers)
if normalized is None:
# Cannot normalize, assume it is a pre-release
return True
parsed = version.normalized_key(normalized)
parsed = version._normalized_key(normalized)
return any([any([y in set(["a", "b", "c", "rc", "dev"]) for y in x]) for x in parsed])

View file

@ -6,7 +6,7 @@
#
import logging
__version__ = '0.1.1'
__version__ = '0.1.3'
class DistlibException(Exception):
pass

View file

@ -0,0 +1,84 @@
[posix_prefix]
# Configuration directories. Some of these come straight out of the
# configure script. They are for implementing the other variables, not to
# be used directly in [resource_locations].
confdir = /etc
datadir = /usr/share
libdir = /usr/lib
statedir = /var
# User resource directory
local = ~/.local/{distribution.name}
stdlib = {base}/lib/python{py_version_short}
platstdlib = {platbase}/lib/python{py_version_short}
purelib = {base}/lib/python{py_version_short}/site-packages
platlib = {platbase}/lib/python{py_version_short}/site-packages
include = {base}/include/python{py_version_short}{abiflags}
platinclude = {platbase}/include/python{py_version_short}{abiflags}
data = {base}
[posix_home]
stdlib = {base}/lib/python
platstdlib = {base}/lib/python
purelib = {base}/lib/python
platlib = {base}/lib/python
include = {base}/include/python
platinclude = {base}/include/python
scripts = {base}/bin
data = {base}
[nt]
stdlib = {base}/Lib
platstdlib = {base}/Lib
purelib = {base}/Lib/site-packages
platlib = {base}/Lib/site-packages
include = {base}/Include
platinclude = {base}/Include
scripts = {base}/Scripts
data = {base}
[os2]
stdlib = {base}/Lib
platstdlib = {base}/Lib
purelib = {base}/Lib/site-packages
platlib = {base}/Lib/site-packages
include = {base}/Include
platinclude = {base}/Include
scripts = {base}/Scripts
data = {base}
[os2_home]
stdlib = {userbase}/lib/python{py_version_short}
platstdlib = {userbase}/lib/python{py_version_short}
purelib = {userbase}/lib/python{py_version_short}/site-packages
platlib = {userbase}/lib/python{py_version_short}/site-packages
include = {userbase}/include/python{py_version_short}
scripts = {userbase}/bin
data = {userbase}
[nt_user]
stdlib = {userbase}/Python{py_version_nodot}
platstdlib = {userbase}/Python{py_version_nodot}
purelib = {userbase}/Python{py_version_nodot}/site-packages
platlib = {userbase}/Python{py_version_nodot}/site-packages
include = {userbase}/Python{py_version_nodot}/Include
scripts = {userbase}/Scripts
data = {userbase}
[posix_user]
stdlib = {userbase}/lib/python{py_version_short}
platstdlib = {userbase}/lib/python{py_version_short}
purelib = {userbase}/lib/python{py_version_short}/site-packages
platlib = {userbase}/lib/python{py_version_short}/site-packages
include = {userbase}/include/python{py_version_short}
scripts = {userbase}/bin
data = {userbase}
[osx_framework_user]
stdlib = {userbase}/lib/python
platstdlib = {userbase}/lib/python
purelib = {userbase}/lib/python/site-packages
platlib = {userbase}/lib/python/site-packages
include = {userbase}/include
scripts = {userbase}/bin
data = {userbase}

View file

@ -53,6 +53,35 @@ if sys.version_info[0] < 3:
if match: return match.group(1, 2)
return None, host
else:
from io import StringIO
string_types = str,
text_type = str
from io import TextIOWrapper as file_type
import builtins
import configparser
import shutil
from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote,
unquote, urlsplit, urlunsplit, splittype)
from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
pathname2url,
HTTPBasicAuthHandler, HTTPPasswordMgr,
HTTPSHandler, HTTPHandler, HTTPRedirectHandler,
build_opener)
from urllib.error import HTTPError, URLError, ContentTooShortError
import http.client as httplib
import urllib.request as urllib2
import xmlrpc.client as xmlrpclib
import queue
from html.parser import HTMLParser
import html.entities as htmlentitydefs
raw_input = input
from itertools import filterfalse
filter = filter
try:
from ssl import match_hostname, CertificateError
except ImportError:
class CertificateError(ValueError):
pass
@ -111,33 +140,84 @@ if sys.version_info[0] < 3:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
else:
from io import StringIO
string_types = str,
text_type = str
from io import TextIOWrapper as file_type
import builtins
import configparser
import shutil
from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote,
unquote, urlsplit, urlunsplit, splittype)
from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
pathname2url,
HTTPBasicAuthHandler, HTTPPasswordMgr,
HTTPSHandler, HTTPHandler, HTTPRedirectHandler,
build_opener)
from urllib.error import HTTPError, URLError, ContentTooShortError
import http.client as httplib
import urllib.request as urllib2
import xmlrpc.client as xmlrpclib
import queue
from html.parser import HTMLParser
import html.entities as htmlentitydefs
raw_input = input
from itertools import filterfalse
filter = filter
from ssl import match_hostname, CertificateError
try:
from types import SimpleNamespace as Container
except ImportError:
class Container(object):
"""
A generic container for when multiple values need to be returned
"""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
try:
from shutil import which
except ImportError:
# Implementation from Python 3.3
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
# ZipFile is a context manager in 2.7, but not in 2.6
@ -752,3 +832,230 @@ except ImportError: # pragma: no cover
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
try:
from logging.config import BaseConfigurator, valid_ident
except ImportError: # pragma: no cover
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = staticmethod(__import__)
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
#print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
#rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, string_types):
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value

View file

@ -9,19 +9,20 @@ from __future__ import unicode_literals
import base64
import codecs
import contextlib
import hashlib
import logging
import os
import posixpath
import sys
import zipimport
from . import DistlibException
from .compat import StringIO, configparser, string_types
from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .markers import interpret
from .metadata import Metadata
from .util import (parse_requirement, cached_property, get_export_entry,
CSVReader, CSVWriter)
from .metadata import Metadata, METADATA_FILENAME
from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter)
__all__ = ['Distribution', 'BaseInstalledDistribution',
@ -31,8 +32,11 @@ __all__ = ['Distribution', 'BaseInstalledDistribution',
logger = logging.getLogger(__name__)
DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES',
'EXPORTS', 'SHARED')
EXPORTS_FILENAME = 'pydist-exports.json'
COMMANDS_FILENAME = 'pydist-commands.json'
DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
'RESOURCES', EXPORTS_FILENAME, 'SHARED')
DISTINFO_EXT = '.dist-info'
@ -66,6 +70,7 @@ class _Cache(object):
self.path[dist.path] = dist
self.name.setdefault(dist.key, []).append(dist)
class DistributionPath(object):
"""
Represents a set of distributions installed on a path (typically sys.path).
@ -110,17 +115,39 @@ class DistributionPath(object):
"""
Yield .dist-info and/or .egg(-info) distributions.
"""
# We need to check if we've seen some resources already, because on
# some Linux systems (e.g. some Debian/Ubuntu variants) there are
# symlinks which alias other files in the environment.
seen = set()
for path in self.path:
realpath = os.path.realpath(path)
if not os.path.isdir(realpath):
finder = resources.finder_for_path(path)
if finder is None:
continue
for dir in os.listdir(realpath):
dist_path = os.path.join(realpath, dir)
if self._include_dist and dir.endswith(DISTINFO_EXT):
yield new_dist_class(dist_path, env=self)
elif self._include_egg and dir.endswith(('.egg-info',
'.egg')):
yield old_dist_class(dist_path, self)
r = finder.find('')
if not r or not r.is_container:
continue
rset = sorted(r.resources)
for entry in rset:
r = finder.find(entry)
if not r or r.path in seen:
continue
if self._include_dist and entry.endswith(DISTINFO_EXT):
metadata_path = posixpath.join(entry, METADATA_FILENAME)
pydist = finder.find(metadata_path)
if not pydist:
continue
metadata = Metadata(fileobj=pydist.as_stream(),
scheme='legacy')
logger.debug('Found %s', r.path)
seen.add(r.path)
yield new_dist_class(r.path, metadata=metadata,
env=self)
elif self._include_egg and entry.endswith(('.egg-info',
'.egg')):
logger.debug('Found %s', r.path)
seen.add(r.path)
yield old_dist_class(r.path, self)
def _generate_cache(self):
"""
@ -163,7 +190,6 @@ class DistributionPath(object):
name = name.replace('-', '_')
return '-'.join([name, version]) + DISTINFO_EXT
def get_distributions(self):
"""
Provides an iterator that looks for distributions and returns
@ -186,7 +212,6 @@ class DistributionPath(object):
for dist in self._cache_egg.path.values():
yield dist
def get_distribution(self, name):
"""
Looks for a named distribution on the path.
@ -239,18 +264,12 @@ class DistributionPath(object):
provided = dist.provides
for p in provided:
p_components = p.rsplit(' ', 1)
if len(p_components) == 1 or matcher is None:
if name == p_components[0]:
p_name, p_ver = parse_name_and_version(p)
if matcher is None:
if p_name == name:
yield dist
break
else:
p_name, p_ver = p_components
if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')':
raise DistlibException(
'distribution %r has invalid Provides field: %r' %
(dist.name, p))
p_ver = p_ver[1:-1] # trim off the parenthesis
if p_name == name and matcher.match(p_ver):
yield dist
break
@ -282,6 +301,7 @@ class DistributionPath(object):
for v in d.values():
yield v
class Distribution(object):
"""
A base class for distributions, whether installed or from indexes.
@ -311,15 +331,18 @@ class Distribution(object):
self.key = self.name.lower() # for case-insensitive comparisons
self.version = metadata.version
self.locator = None
self.md5_digest = None
self.extras = None # additional features requested during installation
self.digest = None
self.extras = None # additional features requested
self.context = None # environment marker overrides
@property
def download_url(self):
def source_url(self):
"""
The download URL for this distribution.
The source archive download URL for this distribution.
"""
return self.metadata.download_url
return self.metadata.source_url
download_url = source_url # Backward compatibility
@property
def name_and_version(self):
@ -334,56 +357,36 @@ class Distribution(object):
A set of distribution names and versions provided by this distribution.
:return: A set of "name (version)" strings.
"""
plist = self.metadata['Provides-Dist']
plist = self.metadata.provides
s = '%s (%s)' % (self.name, self.version)
if s not in plist:
plist.append(s)
return self.filter_requirements(plist)
return plist
def _get_requirements(self, req_attr):
reqts = getattr(self.metadata, req_attr)
return set(self.metadata.get_requirements(reqts, extras=self.extras,
env=self.context))
@property
def requires(self):
rlist = self.metadata['Requires-Dist']
return self.filter_requirements(rlist)
def run_requires(self):
return self._get_requirements('run_requires')
@property
def setup_requires(self):
rlist = self.metadata['Setup-Requires-Dist']
return self.filter_requirements(rlist)
def meta_requires(self):
return self._get_requirements('meta_requires')
@property
def build_requires(self):
return self._get_requirements('build_requires')
@property
def test_requires(self):
rlist = self.metadata['Requires-Dist']
return self.filter_requirements(rlist, extras=['test'])
return self._get_requirements('test_requires')
@property
def doc_requires(self):
rlist = self.metadata['Requires-Dist']
return self.filter_requirements(rlist, extras=['doc'])
def filter_requirements(self, rlist, context=None, extras=None):
result = set()
marked = []
for req in rlist:
if ';' not in req:
result.add(req)
else:
marked.append(req.split(';', 1))
if marked:
if context is None:
context = {}
if extras is None:
extras = self.extras
if not extras:
extras = [None]
else:
extras = list(extras) # leave original alone
extras.append(None)
for extra in extras:
context['extra'] = extra
for r, marker in marked:
if interpret(marker, context):
result.add(r.strip())
return result
def dev_requires(self):
return self._get_requirements('dev_requires')
def matches_requirement(self, req):
"""
@ -392,9 +395,12 @@ class Distribution(object):
:rtype req: str
:return: True if it matches, else False.
"""
# Requirement may contain extras - parse to lose those
# from what's passed to the matcher
r = parse_requirement(req)
scheme = get_scheme(self.metadata.scheme)
try:
matcher = scheme.matcher(req)
matcher = scheme.matcher(r.requirement)
except UnsupportedVersionError:
# XXX compat-mode if cannot read the version
logger.warning('could not read version %r - using name only',
@ -405,15 +411,12 @@ class Distribution(object):
name = matcher.key # case-insensitive
result = False
# Note this is similar to code in make_graph - to be refactored
for p in self.provides:
vm = scheme.matcher(p)
if vm.key != name:
p_name, p_ver = parse_name_and_version(p)
if p_name != name:
continue
version = vm.exact_version
assert version
try:
result = matcher.match(version)
result = matcher.match(p_ver)
break
except UnsupportedVersionError:
pass
@ -423,8 +426,8 @@ class Distribution(object):
"""
Return a textual representation of this instance,
"""
if self.download_url:
suffix = ' [%s]' % self.download_url
if self.source_url:
suffix = ' [%s]' % self.source_url
else:
suffix = ''
return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix)
@ -434,7 +437,7 @@ class Distribution(object):
See if this distribution is the same as another.
:param other: The distribution to compare with. To be equal to one
another. distributions must have the same type, name,
version and download_url.
version and source_url.
:return: True if it is the same, else False.
"""
if type(other) is not type(self):
@ -442,14 +445,14 @@ class Distribution(object):
else:
result = (self.name == other.name and
self.version == other.version and
self.download_url == other.download_url)
self.source_url == other.source_url)
return result
def __hash__(self):
"""
Compute hash in a way which matches the equality test.
"""
return hash(self.name) + hash(self.version) + hash(self.download_url)
return hash(self.name) + hash(self.version) + hash(self.source_url)
class BaseInstalledDistribution(Distribution):
@ -473,7 +476,7 @@ class BaseInstalledDistribution(Distribution):
"""
super(BaseInstalledDistribution, self).__init__(metadata)
self.path = path
self.dist_path = env
self.dist_path = env
def get_hash(self, data, hasher=None):
"""
@ -506,28 +509,44 @@ class BaseInstalledDistribution(Distribution):
digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
return '%s%s' % (prefix, digest)
class InstalledDistribution(BaseInstalledDistribution):
"""Created with the *path* of the ``.dist-info`` directory provided to the
constructor. It reads the metadata contained in ``METADATA`` when it is
"""
Created with the *path* of the ``.dist-info`` directory provided to the
constructor. It reads the metadata contained in ``pydist.json`` when it is
instantiated., or uses a passed in Metadata instance (useful for when
dry-run mode is being used)."""
dry-run mode is being used).
"""
hasher = 'sha256'
def __init__(self, path, metadata=None, env=None):
self.finder = finder = resources.finder_for_path(path)
if finder is None:
import pdb; pdb.set_trace ()
if env and env._cache_enabled and path in env._cache.path:
metadata = env._cache.path[path].metadata
elif metadata is None:
metadata_path = os.path.join(path, 'METADATA')
metadata = Metadata(path=metadata_path, scheme='legacy')
r = finder.find(METADATA_FILENAME)
# Temporary - for legacy support
if r is None:
r = finder.find('METADATA')
if r is None:
raise ValueError('no %s found in %s' % (METADATA_FILENAME,
path))
with contextlib.closing(r.as_stream()) as stream:
metadata = Metadata(fileobj=stream, scheme='legacy')
super(InstalledDistribution, self).__init__(metadata, path, env)
if env and env._cache_enabled:
env._cache.add(self)
path = self.get_distinfo_file('REQUESTED')
self.requested = os.path.exists(path)
try:
r = finder.find('REQUESTED')
except AttributeError:
import pdb; pdb.set_trace ()
self.requested = r is not None
def __repr__(self):
return '<InstalledDistribution %r %s at %r>' % (
@ -544,84 +563,60 @@ class InstalledDistribution(BaseInstalledDistribution):
as stored in the file (which is as in PEP 376).
"""
results = []
path = self.get_distinfo_file('RECORD')
with CSVReader(path) as record_reader:
# Base location is parent dir of .dist-info dir
#base_location = os.path.dirname(self.path)
#base_location = os.path.abspath(base_location)
for row in record_reader:
missing = [None for i in range(len(row), 3)]
path, checksum, size = row + missing
#if not os.path.isabs(path):
# path = path.replace('/', os.sep)
# path = os.path.join(base_location, path)
results.append((path, checksum, size))
r = self.get_distinfo_resource('RECORD')
with contextlib.closing(r.as_stream()) as stream:
with CSVReader(stream=stream) as record_reader:
# Base location is parent dir of .dist-info dir
#base_location = os.path.dirname(self.path)
#base_location = os.path.abspath(base_location)
for row in record_reader:
missing = [None for i in range(len(row), 3)]
path, checksum, size = row + missing
#if not os.path.isabs(path):
# path = path.replace('/', os.sep)
# path = os.path.join(base_location, path)
results.append((path, checksum, size))
return results
@cached_property
def exports(self):
"""
Return the information exported by this distribution.
:return: A dictionary of exports, mapping an export category to a list
:return: A dictionary of exports, mapping an export category to a dict
of :class:`ExportEntry` instances describing the individual
export entries.
export entries, and keyed by name.
"""
result = {}
rf = self.get_distinfo_file('EXPORTS')
if os.path.exists(rf):
result = self.read_exports(rf)
r = self.get_distinfo_resource(EXPORTS_FILENAME)
if r:
result = self.read_exports()
return result
def read_exports(self, filename=None):
def read_exports(self):
"""
Read exports data from a file in .ini format.
:param filename: An absolute pathname of the file to read. If not
specified, the EXPORTS file in the .dist-info
directory of the distribution is read.
:return: A dictionary of exports, mapping an export category to a list
of :class:`ExportEntry` instances describing the individual
export entries.
"""
result = {}
rf = filename or self.get_distinfo_file('EXPORTS')
if os.path.exists(rf):
cp = configparser.ConfigParser()
cp.read(rf)
for key in cp.sections():
result[key] = entries = {}
for name, value in cp.items(key):
s = '%s = %s' % (name, value)
entry = get_export_entry(s)
assert entry is not None
entry.dist = self
entries[name] = entry
r = self.get_distinfo_resource(EXPORTS_FILENAME)
if r:
with contextlib.closing(r.as_stream()) as stream:
result = read_exports(stream)
return result
def write_exports(self, exports, filename=None):
def write_exports(self, exports):
"""
Write a dictionary of exports to a file in .ini format.
:param exports: A dictionary of exports, mapping an export category to
a list of :class:`ExportEntry` instances describing the
individual export entries.
:param filename: The absolute pathname of the file to write to. If not
specified, the EXPORTS file in the .dist-info
directory is written to.
"""
rf = filename or self.get_distinfo_file('EXPORTS')
cp = configparser.ConfigParser()
for k, v in exports.items():
# TODO check k, v for valid values
cp.add_section(k)
for entry in v.values():
if entry.suffix is None:
s = entry.prefix
else:
s = '%s:%s' % (entry.prefix, entry.suffix)
if entry.flags:
s = '%s [%s]' % (s, ', '.join(entry.flags))
cp.set(k, entry.name, s)
rf = self.get_distinfo_file(EXPORTS_FILENAME)
with open(rf, 'w') as f:
cp.write(f)
write_exports(exports, f)
def get_resource_path(self, relative_path):
"""
@ -634,11 +629,12 @@ class InstalledDistribution(BaseInstalledDistribution):
of interest.
:return: The absolute path where the resource is to be found.
"""
path = self.get_distinfo_file('RESOURCES')
with CSVReader(path) as resources_reader:
for relative, destination in resources_reader:
if relative == relative_path:
return destination
r = self.get_distinfo_resource('RESOURCES')
with contextlib.closing(r.as_stream()) as stream:
with CSVReader(stream=stream) as resources_reader:
for relative, destination in resources_reader:
if relative == relative_path:
return destination
raise KeyError('no resource file with relative path %r '
'is installed' % relative_path)
@ -663,10 +659,10 @@ class InstalledDistribution(BaseInstalledDistribution):
base = os.path.dirname(self.path)
base_under_prefix = base.startswith(prefix)
base = os.path.join(base, '')
record_path = os.path.join(self.path, 'RECORD')
record_path = self.get_distinfo_file('RECORD')
logger.info('creating %s', record_path)
if dry_run:
return
return None
with CSVWriter(record_path) as writer:
for path in paths:
if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
@ -677,7 +673,7 @@ class InstalledDistribution(BaseInstalledDistribution):
with open(path, 'rb') as fp:
hash_value = self.get_hash(fp.read())
if path.startswith(base) or (base_under_prefix and
path.startswith(prefix)):
path.startswith(prefix)):
path = os.path.relpath(path, base)
writer.writerow((path, hash_value, size))
@ -685,6 +681,7 @@ class InstalledDistribution(BaseInstalledDistribution):
if record_path.startswith(base):
record_path = os.path.relpath(record_path, base)
writer.writerow((record_path, '', ''))
return record_path
def check_installed_files(self):
"""
@ -697,7 +694,7 @@ class InstalledDistribution(BaseInstalledDistribution):
"""
mismatches = []
base = os.path.dirname(self.path)
record_path = os.path.join(self.path, 'RECORD')
record_path = self.get_distinfo_file('RECORD')
for path, hash_value, size in self.list_installed_files():
if not os.path.isabs(path):
path = os.path.join(base, path)
@ -760,7 +757,7 @@ class InstalledDistribution(BaseInstalledDistribution):
shared_path = os.path.join(self.path, 'SHARED')
logger.info('creating %s', shared_path)
if dry_run:
return
return None
lines = []
for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
path = paths[key]
@ -773,6 +770,15 @@ class InstalledDistribution(BaseInstalledDistribution):
f.write('\n'.join(lines))
return shared_path
def get_distinfo_resource(self, path):
if path not in DIST_FILES:
raise DistlibException('invalid path for a dist-info file: '
'%r at %r' % (path, self.path))
finder = resources.finder_for_path(self.path)
if finder is None:
raise DistlibException('Unable to get a finder for %s' % self.path)
return finder.find(path)
def get_distinfo_file(self, path):
"""
Returns a path located under the ``.dist-info`` directory. Returns a
@ -783,7 +789,7 @@ class InstalledDistribution(BaseInstalledDistribution):
If *path* is an absolute path and doesn't start
with the ``.dist-info`` directory path,
a :class:`DistlibException` is raised
:type path: string
:type path: str
:rtype: str
"""
# Check if it is an absolute path # XXX use relpath, add tests
@ -797,8 +803,8 @@ class InstalledDistribution(BaseInstalledDistribution):
# The file must be relative
if path not in DIST_FILES:
raise DistlibException('invalid path for a dist-info file: %r' %
path)
raise DistlibException('invalid path for a dist-info file: '
'%r at %r' % (path, self.path))
return os.path.join(self.path, path)
@ -842,15 +848,15 @@ class EggInfoDistribution(BaseInstalledDistribution):
s.version = v
self.path = path
self.dist_path = env
self.dist_path = env
if env and env._cache_enabled and path in env._cache_egg.path:
metadata = env._cache_egg.path[path].metadata
set_name_and_version(self, metadata['Name'], metadata['Version'])
set_name_and_version(self, metadata.name, metadata.version)
else:
metadata = self._get_metadata(path)
# Need to be set before caching
set_name_and_version(self, metadata['Name'], metadata['Version'])
set_name_and_version(self, metadata.name, metadata.version)
if env and env._cache_enabled:
env._cache_egg.add(self)
@ -859,19 +865,13 @@ class EggInfoDistribution(BaseInstalledDistribution):
def _get_metadata(self, path):
requires = None
def parse_requires(req_path):
def parse_requires_data(data):
"""Create a list of dependencies from a requires.txt file.
*req_path* must be the path to a setuptools-produced requires.txt file.
*data*: the contents of a setuptools-produced requires.txt file.
"""
reqs = []
try:
with open(req_path, 'r') as fp:
lines = fp.read().splitlines()
except IOError:
return reqs
lines = data.splitlines()
for line in lines:
line = line.strip()
if line.startswith('['):
@ -892,12 +892,26 @@ class EggInfoDistribution(BaseInstalledDistribution):
reqs.append('%s (%s)' % (r.name, cons))
return reqs
def parse_requires_path(req_path):
"""Create a list of dependencies from a requires.txt file.
*req_path*: the path to a setuptools-produced requires.txt file.
"""
reqs = []
try:
with codecs.open(req_path, 'r', 'utf-8') as fp:
reqs = parse_requires_data(fp.read())
except IOError:
pass
return reqs
if path.endswith('.egg'):
if os.path.isdir(path):
meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
metadata = Metadata(path=meta_path, scheme='legacy')
req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
requires = parse_requires(req_path)
requires = parse_requires_path(req_path)
else:
# FIXME handle the case where zipfile is not available
zipf = zipimport.zipimporter(path)
@ -905,26 +919,22 @@ class EggInfoDistribution(BaseInstalledDistribution):
zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
metadata = Metadata(fileobj=fileobj, scheme='legacy')
try:
requires = zipf.get_data('EGG-INFO/requires.txt')
data = zipf.get_data('EGG-INFO/requires.txt')
requires = parse_requires_data(data.decode('utf-8'))
except IOError:
requires = None
elif path.endswith('.egg-info'):
if os.path.isdir(path):
path = os.path.join(path, 'PKG-INFO')
req_path = os.path.join(path, 'requires.txt')
requires = parse_requires(req_path)
requires = parse_requires_path(req_path)
metadata = Metadata(path=path, scheme='legacy')
else:
raise DistlibException('path must end with .egg-info or .egg, '
'got %r' % path)
if requires:
if metadata['Metadata-Version'] == '1.1':
# we can't have 1.1 metadata *and* Setuptools requires
for field in ('Obsoletes', 'Requires', 'Provides'):
if field in metadata:
del metadata[field]
metadata['Requires-Dist'] += requires
metadata.add_requirements(requires)
return metadata
def __repr__(self):
@ -946,14 +956,14 @@ class EggInfoDistribution(BaseInstalledDistribution):
mismatches = []
record_path = os.path.join(self.path, 'installed-files.txt')
if os.path.exists(record_path):
for path, hash, size in self.list_installed_files():
for path, _, _ in self.list_installed_files():
if path == record_path:
continue
if not os.path.exists(path):
mismatches.append((path, 'exists', True, False))
return mismatches
def list_installed_files(self, local=False):
def list_installed_files(self):
"""
Iterates over the ``installed-files.txt`` entries and returns a tuple
``(path, hash, size)`` for each line.
@ -991,16 +1001,16 @@ class EggInfoDistribution(BaseInstalledDistribution):
result.append((record_path, None, None))
return result
def list_distinfo_files(self, local=False):
def list_distinfo_files(self, absolute=False):
"""
Iterates over the ``installed-files.txt`` entries and returns paths for
each line if the path is pointing to a file located in the
``.egg-info`` directory or one of its subdirectories.
:parameter local: If *local* is ``True``, each returned path is
:parameter absolute: If *absolute* is ``True``, each returned path is
transformed into a local absolute path. Otherwise the
raw value from ``installed-files.txt`` is returned.
:type local: boolean
:type absolute: boolean
:returns: iterator of paths
"""
record_path = os.path.join(self.path, 'installed-files.txt')
@ -1014,7 +1024,7 @@ class EggInfoDistribution(BaseInstalledDistribution):
if not skip:
p = os.path.normpath(os.path.join(self.path, line))
if p.startswith(self.path):
if local:
if absolute:
yield p
else:
yield line
@ -1121,7 +1131,7 @@ class DependencyGraph(object):
for other, label in adjs:
if not label is None:
f.write('"%s" -> "%s" [label="%s"]\n' %
(dist.name, other.name, label))
(dist.name, other.name, label))
else:
f.write('"%s" -> "%s"\n' % (dist.name, other.name))
if not skip_disconnected and len(disconnected) > 0:
@ -1162,7 +1172,7 @@ class DependencyGraph(object):
for k, v in alist.items():
alist[k] = [(d, r) for d, r in v if d not in to_remove]
logger.debug('Moving to result: %s',
['%s (%s)' % (d.name, d.version) for d in to_remove])
['%s (%s)' % (d.name, d.version) for d in to_remove])
result.extend(to_remove)
return result, list(alist.keys())
@ -1191,28 +1201,14 @@ def make_graph(dists, scheme='default'):
graph.add_distribution(dist)
for p in dist.provides:
comps = p.strip().rsplit(" ", 1)
name = comps[0]
version = None
if len(comps) == 2:
version = comps[1]
if len(version) < 3 or version[0] != '(' or version[-1] != ')':
logger.warning('distribution %r has ill-formed '
'provides field: %r', dist.name, p)
continue
# don't raise an exception. Legacy installed distributions
# could have all manner of metadata
#raise DistlibException('distribution %r has ill-formed '
# 'provides field: %r' % (dist.name, p))
version = version[1:-1] # trim off parenthesis
# Add name in lower case for case-insensitivity
name = name.lower()
name, version = parse_name_and_version(p)
logger.debug('Add to provided: %s, %s, %s', name, version, dist)
provided.setdefault(name, []).append((version, dist))
# now make the edges
for dist in dists:
requires = (dist.requires | dist.setup_requires)
requires = (dist.run_requires | dist.meta_requires |
dist.build_requires | dist.dev_requires)
for req in requires:
try:
matcher = scheme.matcher(req)
@ -1267,6 +1263,7 @@ def get_dependent_dists(dists, dist):
dep.pop(0) # remove dist from dep, was there to prevent infinite loops
return dep
def get_required_dists(dists, dist):
"""Recursively generate a list of distributions from *dists* that are
required by *dist*.
@ -1291,11 +1288,14 @@ def get_required_dists(dists, dist):
return req
def make_dist(name, version, **kwargs):
"""
A convenience method for making a dist given just a name and version.
"""
summary = kwargs.pop('summary', 'Placeholder for summary')
md = Metadata(**kwargs)
md['Name'] = name
md['Version'] = version
md.name = name
md.version = version
md.summary = summary or 'Plaeholder for summary'
return Distribution(md)

View file

@ -5,12 +5,9 @@
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import hashlib
import itertools
import logging
import os
import shutil
import socket
from string import ascii_lowercase
import subprocess
import tempfile
from threading import Thread
@ -22,7 +19,6 @@ from distlib.util import cached_property, zip_dir
logger = logging.getLogger(__name__)
DEFAULT_MIRROR_HOST = 'last.pypi.python.org'
DEFAULT_INDEX = 'http://pypi.python.org/pypi'
DEFAULT_REALM = 'pypi'
@ -34,23 +30,14 @@ class PackageIndex(object):
boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
def __init__(self, url=None, mirror_host=None):
def __init__(self, url=None):
"""
Initialise an instance.
:param url: The URL of the index. If not specified, the URL for PyPI is
used.
:param mirror_host: If not specified, ``last.pypi.python.org`` is used.
This is expected to have a canonial name which
allows all mirror hostnames to be divined (e.g. if
the canonical hostname for ``last.pypi.python.org``
is ``g.pypi.python.org``, then the mirrors that are
available would be assumed to be
``a.pypi.python.org``, ``b.pypi.python.org``, ...
up to and including ``g.pypi.python.org``.
"""
self.url = url or DEFAULT_INDEX
self.mirror_host = mirror_host or DEFAULT_MIRROR_HOST
self.read_configuration()
scheme, netloc, path, params, query, frag = urlparse(self.url)
if params or query or frag or scheme not in ('http', 'https'):
@ -130,10 +117,8 @@ class PackageIndex(object):
request.
"""
self.check_credentials()
missing, warnings = metadata.check(True) # strict check
logger.debug('result of check: missing: %s, warnings: %s',
missing, warnings)
d = metadata.todict(True)
metadata.validate()
d = metadata.todict()
d[':action'] = 'verify'
request = self.encode_request(d.items(), [])
response = self.send_request(request)
@ -259,10 +244,8 @@ class PackageIndex(object):
self.check_credentials()
if not os.path.exists(filename):
raise DistlibException('not found: %s' % filename)
missing, warnings = metadata.check(True) # strict check
logger.debug('result of check: missing: %s, warnings: %s',
missing, warnings)
d = metadata.todict(True)
metadata.validate()
d = metadata.todict()
sig_file = None
if signer:
if not self.gpg:
@ -271,13 +254,15 @@ class PackageIndex(object):
sig_file = self.sign_file(filename, signer, sign_password)
with open(filename, 'rb') as f:
file_data = f.read()
digest = hashlib.md5(file_data).hexdigest()
md5_digest = hashlib.md5(file_data).hexdigest()
sha256_digest = hashlib.sha256(file_data).hexdigest()
d.update({
':action': 'file_upload',
'protcol_version': '1',
'filetype': filetype,
'pyversion': pyversion,
'md5_digest': digest,
'md5_digest': md5_digest,
'sha256_digest': sha256_digest,
})
files = [('content', os.path.basename(filename), file_data)]
if sig_file:
@ -309,9 +294,7 @@ class PackageIndex(object):
fn = os.path.join(doc_dir, 'index.html')
if not os.path.exists(fn):
raise DistlibException('not found: %r' % fn)
missing, warnings = metadata.check(True) # strict check
logger.debug('result of check: missing: %s, warnings: %s',
missing, warnings)
metadata.validate()
name, version = metadata.name, metadata.version
zip_data = zip_dir(doc_dir).getvalue()
fields = [(':action', 'doc_upload'),
@ -382,12 +365,14 @@ class PackageIndex(object):
"""
if digest is None:
digester = None
logger.debug('No digest specified')
else:
if isinstance(digest, (list, tuple)):
hasher, digest = digest
else:
hasher = 'md5'
digester = getattr(hashlib, hasher)()
logger.debug('Digest specified: %s' % digest)
# The following code is equivalent to urlretrieve.
# We need to do it this way so that we can compute the
# digest of the file as we go.
@ -428,9 +413,10 @@ class PackageIndex(object):
if digester:
actual = digester.hexdigest()
if digest != actual:
raise DistlibException('MD5 digest mismatch for %s: expected '
'%s, got %s' % (destfile, digest,
actual))
raise DistlibException('%s digest mismatch for %s: expected '
'%s, got %s' % (hasher, destfile,
digest, actual))
logger.debug('Digest verified: %s', digest)
def send_request(self, req):
"""
@ -490,26 +476,3 @@ class PackageIndex(object):
'Content-length': str(len(body))
}
return Request(self.url, body, headers)
@cached_property
def mirrors(self):
"""
Return the list of hostnames which are mirrors for this index.
:return: A (possibly empty) list of hostnames of mirrors.
"""
result = []
try:
host = socket.gethostbyname_ex(self.mirror_host)[0]
except socket.gaierror: # pragma: no cover
host = None
if host:
last, rest = host.split('.', 1)
n = len(last)
host_list = (''.join(w) for w in itertools.chain.from_iterable(
itertools.product(ascii_lowercase, repeat=i)
for i in range(1, n + 1)))
for s in host_list:
result.append('.'.join((s, rest)))
if s == last:
break
return result

View file

@ -24,13 +24,13 @@ from .database import Distribution, DistributionPath, make_dist
from .metadata import Metadata
from .util import (cached_property, parse_credentials, ensure_slash,
split_filename, get_project_data, parse_requirement,
ServerProxy)
parse_name_and_version, ServerProxy)
from .version import get_scheme, UnsupportedVersionError
from .wheel import Wheel, is_compatible
logger = logging.getLogger(__name__)
MD5_HASH = re.compile('^md5=([a-f0-9]+)$')
HASHER_HASH = re.compile('^(\w+)=([a-f0-9]+)')
CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
DEFAULT_INDEX = 'http://python.org/pypi'
@ -106,6 +106,10 @@ class Locator(object):
# Because of bugs in some of the handlers on some of the platforms,
# we use our own opener rather than just using urlopen.
self.opener = build_opener(RedirectHandler())
# If get_project() is called from locate(), the matcher instance
# is set from the requirement passed to locate(). See issue #18 for
# why this can be useful to know.
self.matcher = None
def clear_cache(self):
self._cache.clear()
@ -124,6 +128,9 @@ class Locator(object):
instances.
This should be implemented in subclasses.
If called from a locate() request, self.matcher will be set to a
matcher for the requirement to satisfy, otherwise it will be None.
"""
raise NotImplementedError('Please implement in the subclass')
@ -167,10 +174,8 @@ class Locator(object):
The current implement favours http:// URLs over https://, archives
from PyPI over those from other locations and then the archive name.
"""
if url1 == 'UNKNOWN':
result = url2
else:
result = url2
result = url2
if url1:
s1 = self.score_url(url1)
s2 = self.score_url(url2)
if s1 > s2:
@ -210,6 +215,11 @@ class Locator(object):
if frag.lower().startswith('egg='):
logger.debug('%s: version hint in fragment: %r',
project_name, frag)
m = HASHER_HASH.match(frag)
if m:
algo, digest = m.groups()
else:
algo, digest = None, None
origpath = path
if path and path[-1] == '/':
path = path[:-1]
@ -231,9 +241,6 @@ class Locator(object):
'python-version': ', '.join(
['.'.join(list(v[2:])) for v in wheel.pyver]),
}
m = MD5_HASH.match(frag)
if m:
result['md5_digest'] = m.group(1)
except Exception as e:
logger.warning('invalid path for wheel: %s', path)
elif path.endswith(self.downloadable_extensions):
@ -257,10 +264,25 @@ class Locator(object):
}
if pyver:
result['python-version'] = pyver
m = MD5_HASH.match(frag)
if m:
result['md5_digest'] = m.group(1)
break
if result and algo:
result['%s_digest' % algo] = digest
return result
def _get_digest(self, info):
"""
Get a digest from a dictionary by looking at keys of the form
'algo_digest'.
Returns a 2-tuple (algo, digest) if found, else None. Currently
looks only for SHA256, then MD5.
"""
result = None
for algo in ('sha256', 'md5'):
key = '%s_digest' % algo
if key in info:
result = (algo, info[key])
break
return result
def _update_version_data(self, result, info):
@ -277,12 +299,9 @@ class Locator(object):
else:
dist = make_dist(name, version, scheme=self.scheme)
md = dist.metadata
dist.md5_digest = info.get('md5_digest')
if 'python-version' in info:
md['Requires-Python'] = info['python-version']
if md['Download-URL'] != info['url']:
md['Download-URL'] = self.prefer_url(md['Download-URL'],
info['url'])
dist.digest = self._get_digest(info)
if md.source_url != info['url']:
md.source_url = self.prefer_url(md.source_url, info['url'])
dist.locator = self
result[version] = dist
@ -300,20 +319,17 @@ class Locator(object):
distribution could be located.
"""
result = None
scheme = get_scheme(self.scheme)
r = parse_requirement(requirement)
if r is None:
raise DistlibException('Not a valid requirement: %r' % requirement)
if r.extras:
# lose the extras part of the requirement
requirement = r.requirement
matcher = scheme.matcher(requirement)
vcls = matcher.version_class
scheme = get_scheme(self.scheme)
self.matcher = matcher = scheme.matcher(r.requirement)
logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
versions = self.get_project(matcher.name)
versions = self.get_project(r.name)
if versions:
# sometimes, versions are invalid
slist = []
vcls = matcher.version_class
for k in versions:
try:
if not matcher.match(k):
@ -322,7 +338,8 @@ class Locator(object):
if prereleases or not vcls(k).is_prerelease:
slist.append(k)
else:
logger.debug('skipping pre-release version %s', k)
logger.debug('skipping pre-release '
'version %s of %s', k, matcher.name)
except Exception:
logger.warning('error matching %s with %r', matcher, k)
pass # slist.append(k)
@ -333,13 +350,14 @@ class Locator(object):
result = versions[slist[-1]]
if result and r.extras:
result.extras = r.extras
self.matcher = None
return result
class PyPIRPCLocator(Locator):
"""
This locator uses XML-RPC to locate distributions. It therefore cannot be
used with simple mirrors (that only mirror file content).
This locator uses XML-RPC to locate distributions. It therefore
cannot be used with simple mirrors (that only mirror file content).
"""
def __init__(self, url, **kwargs):
"""
@ -365,12 +383,16 @@ class PyPIRPCLocator(Locator):
urls = self.client.release_urls(name, v)
data = self.client.release_data(name, v)
metadata = Metadata(scheme=self.scheme)
metadata.update(data)
metadata.name = data['name']
metadata.version = data['version']
metadata.license = data.get('license')
metadata.keywords = data.get('keywords', [])
metadata.summary = data.get('summary')
dist = Distribution(metadata)
if urls:
info = urls[0]
metadata['Download-URL'] = info['url']
dist.md5_digest = info.get('md5_digest')
metadata.source_url = info['url']
dist.digest = self._get_digest(info)
dist.locator = self
result[v] = dist
return result
@ -398,13 +420,18 @@ class PyPIJSONLocator(Locator):
data = resp.read().decode() # for now
d = json.loads(data)
md = Metadata(scheme=self.scheme)
md.update(d['info'])
data = d['info']
md.name = data['name']
md.version = data['version']
md.license = data.get('license')
md.keywords = data.get('keywords', [])
md.summary = data.get('summary')
dist = Distribution(md)
urls = d['urls']
if urls:
info = urls[0]
md['Download-URL'] = info['url']
dist.md5_digest = info.get('md5_digest')
md.source_url = info['url']
dist.digest = self._get_digest(info)
dist.locator = self
result[md.version] = dist
except Exception as e:
@ -791,11 +818,18 @@ class JSONLocator(Locator):
for info in data.get('files', []):
if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
continue
# We don't store summary in project metadata as it makes
# the data bigger for no benefit during dependency
# resolution
dist = make_dist(data['name'], info['version'],
summary=data.get('summary',
'Placeholder for summary'),
scheme=self.scheme)
md = dist.metadata
md['Download-URL'] = info['url']
dist.md5_digest = info.get('digest')
md.source_url = info['url']
# TODO SHA256 digest
if 'digest' in info and info['digest']:
dist.digest = ('md5', info['digest'])
md.dependencies = info.get('requirements', {})
dist.exports = info.get('exports', {})
result[dist.version] = dist
@ -860,13 +894,32 @@ class AggregatingLocator(Locator):
def _get_project(self, name):
result = {}
for locator in self.locators:
r = locator.get_project(name)
if r:
d = locator.get_project(name)
if d:
if self.merge:
result.update(r)
result.update(d)
else:
result = r
break
# See issue #18. If any dists are found and we're looking
# for specific constraints, we only return something if
# a match is found. For example, if a DirectoryLocator
# returns just foo (1.0) while we're looking for
# foo (>= 2.0), we'll pretend there was nothing there so
# that subsequent locators can be queried. Otherwise we
# would just return foo (1.0) which would then lead to a
# failure to find foo (>= 2.0), because other locators
# weren't searched. Note that this only matters when
# merge=False.
if self.matcher is None:
found = True
else:
found = False
for k in d:
if self.matcher.match(k):
found = True
break
if found:
result = d
break
return result
def get_distribution_names(self):
@ -882,13 +935,19 @@ class AggregatingLocator(Locator):
return result
# We use a legacy scheme simply because most of the dists on PyPI use legacy
# versions which don't conform to PEP 426 / PEP 440.
default_locator = AggregatingLocator(
JSONLocator(),
SimpleScrapingLocator('https://pypi.python.org/simple/',
timeout=3.0))
timeout=3.0),
scheme='legacy')
locate = default_locator.locate
NAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*'
r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$')
class DependencyFinder(object):
"""
Locate dependencies for distributions.
@ -902,25 +961,6 @@ class DependencyFinder(object):
self.locator = locator or default_locator
self.scheme = get_scheme(self.locator.scheme)
def _get_name_and_version(self, p):
"""
A utility method used to get name and version from e.g. a Provides-Dist
value.
:param p: A value in a form foo (1.0)
:return: The name and version as a tuple.
"""
comps = p.strip().rsplit(' ', 1)
name = comps[0]
version = None
if len(comps) == 2:
version = comps[1]
if len(version) < 3 or version[0] != '(' or version[-1] != ')':
raise DistlibException('Ill-formed provides field: %r' % p)
version = version[1:-1] # trim off parentheses
# Name in lower case for case-insensitivity
return name.lower(), version
def add_distribution(self, dist):
"""
Add a distribution to the finder. This will update internal information
@ -932,7 +972,7 @@ class DependencyFinder(object):
self.dists_by_name[name] = dist
self.dists[(name, dist.version)] = dist
for p in dist.provides:
name, version = self._get_name_and_version(p)
name, version = parse_name_and_version(p)
logger.debug('Add to provided: %s, %s, %s', name, version, dist)
self.provided.setdefault(name, set()).add((version, dist))
@ -947,7 +987,7 @@ class DependencyFinder(object):
del self.dists_by_name[name]
del self.dists[(name, dist.version)]
for p in dist.provides:
name, version = self._get_name_and_version(p)
name, version = parse_name_and_version(p)
logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
s = self.provided[name]
s.remove((version, dist))
@ -1033,15 +1073,17 @@ class DependencyFinder(object):
result = True
return result
def find(self, requirement, tests=False, prereleases=False):
def find(self, requirement, meta_extras=None, prereleases=False):
"""
Find a distribution matching requirement and all distributions
it depends on. Use the ``tests`` argument to determine whether
distributions used only for testing should be included in the
results. Allow ``requirement`` to be either a :class:`Distribution`
instance or a string expressing a requirement. If ``prereleases``
is True, allow pre-release versions to be returned - otherwise,
don't.
Find a distribution and all distributions it depends on.
:param requirement: The requirement specifying the distribution to
find, or a Distribution instance.
:param meta_extras: A list of meta extras such as :test:, :build: and
so on.
:param prereleases: If ``True``, allow pre-release versions to be
returned - otherwise, don't return prereleases
unless they're all that's available.
Return a set of :class:`Distribution` instances and a set of
problems.
@ -1062,6 +1104,12 @@ class DependencyFinder(object):
self.dists_by_name = {}
self.reqts = {}
meta_extras = set(meta_extras or [])
if ':*:' in meta_extras:
meta_extras.remove(':*:')
# :meta: and :run: are implicitly included
meta_extras |= set([':test:', ':build:', ':dev:'])
if isinstance(requirement, Distribution):
dist = odist = requirement
logger.debug('passed %s as requirement', odist)
@ -1077,7 +1125,7 @@ class DependencyFinder(object):
install_dists = set([odist])
while todo:
dist = todo.pop()
name = dist.key # case-insensitive
name = dist.key # case-insensitive
if name not in self.dists_by_name:
self.add_distribution(dist)
else:
@ -1086,19 +1134,24 @@ class DependencyFinder(object):
if other != dist:
self.try_to_replace(dist, other, problems)
ireqts = dist.requires
sreqts = dist.setup_requires
ireqts = dist.run_requires | dist.meta_requires
sreqts = dist.build_requires
ereqts = set()
if not tests or dist not in install_dists:
treqts = set()
else:
treqts = dist.test_requires
all_reqts = ireqts | sreqts | treqts | ereqts
if dist in install_dists:
for key in ('test', 'build', 'dev'):
e = ':%s:' % key
if e in meta_extras:
ereqts |= getattr(dist, '%s_requires' % key)
all_reqts = ireqts | sreqts | ereqts
for r in all_reqts:
providers = self.find_providers(r)
if not providers:
logger.debug('No providers found for %r', r)
provider = self.locator.locate(r, prereleases=prereleases)
# If no provider is found and we didn't consider
# prereleases, consider them now.
if provider is None and not prereleases:
provider = self.locator.locate(r, prereleases=True)
if provider is None:
logger.debug('Cannot satisfy %r', r)
problems.add(('unsatisfied', r))

View file

@ -97,6 +97,7 @@ class Manifest(object):
"""
Return sorted files in directory order
"""
def add_dir(dirs, d):
dirs.add(d)
logger.debug('add_dir added %s', d)
@ -185,7 +186,7 @@ class Manifest(object):
if not self._exclude_pattern(None, prefix=dirpattern):
logger.warning('no previously-included directories found '
'matching %r', dirpattern)
else: #pragma: no cover
else: # pragma: no cover
# This should never happen, as it should be caught in
# _parse_template_line
raise DistlibException(
@ -202,9 +203,12 @@ class Manifest(object):
:return: A tuple of action, patterns, thedir, dir_patterns
"""
words = directive.split()
if len(words) == 1 and words[0] not in (
'include', 'exclude', 'global-include', 'global-exclude',
'recursive-include', 'recursive-exclude', 'graft', 'prune'):
if len(words) == 1 and words[0] not in ('include', 'exclude',
'global-include',
'global-exclude',
'recursive-include',
'recursive-exclude',
'graft', 'prune'):
# no action given, let's use the default 'include'
words.insert(0, 'include')
@ -281,7 +285,7 @@ class Manifest(object):
return found
def _exclude_pattern(self, pattern, anchor=True, prefix=None,
is_regex=False):
is_regex=False):
"""Remove strings (presumably filenames) from 'files' that match
'pattern'.
@ -300,7 +304,6 @@ class Manifest(object):
found = True
return found
def _translate_pattern(self, pattern, anchor=True, prefix=None,
is_regex=False):
"""Translate a shell-like wildcard pattern to a compiled regular
@ -330,7 +333,7 @@ class Manifest(object):
if os.sep == '\\':
sep = r'\\'
pattern_re = '^' + base + sep.join((prefix_re,
'.*' + pattern_re))
'.*' + pattern_re))
else: # no prefix -- respect anchor flag
if anchor:
pattern_re = '^' + base + pattern_re

View file

@ -16,6 +16,7 @@ from .util import in_venv
__all__ = ['interpret']
class Evaluator(object):
"""
A limited evaluator for Python expressions.
@ -34,16 +35,17 @@ class Evaluator(object):
}
allowed_values = {
'sys.platform': sys.platform,
'sys_platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os.name': os.name,
'platform.in_venv': str(in_venv()),
'platform.version': platform.version(),
'platform.machine': platform.machine(),
'platform.python_implementation': platform.python_implementation(),
'os_name': os.name,
'platform_in_venv': str(in_venv()),
'platform_release': platform.release(),
'platform_version': platform.version(),
'platform_machine': platform.machine(),
'platform_python_implementation': python_implementation(),
}
def __init__(self, context=None):
@ -102,7 +104,6 @@ class Evaluator(object):
return '%s.%s' % (node.value.id, node.attr)
def do_attribute(self, node):
valid = True
if not isinstance(node.value, ast.Name):
valid = False
else:
@ -133,11 +134,11 @@ class Evaluator(object):
valid = True
if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):
valid = False
elif (isinstance(lhsnode, ast.Attribute)
and isinstance(rhsnode, ast.Attribute)):
klhs = self.get_attr_key(lhsnode)
krhs = self.get_attr_key(rhsnode)
valid = klhs != krhs
#elif (isinstance(lhsnode, ast.Attribute)
# and isinstance(rhsnode, ast.Attribute)):
# klhs = self.get_attr_key(lhsnode)
# krhs = self.get_attr_key(rhsnode)
# valid = klhs != krhs
if not valid:
s = self.get_fragment(node.col_offset)
raise SyntaxError('Invalid comparison: %s' % s)

View file

@ -11,17 +11,20 @@ from __future__ import unicode_literals
import codecs
from email import message_from_file
import json
import logging
import re
from . import DistlibException
from .compat import StringIO, string_types
from . import DistlibException, __version__
from .compat import StringIO, string_types, text_type
from .markers import interpret
from .version import get_scheme
from .util import extract_by_key, get_extras
from .version import get_scheme, PEP426_VERSION_RE
logger = logging.getLogger(__name__)
class MetadataMissingError(DistlibException):
"""A required metadata is missing"""
@ -34,31 +37,8 @@ class MetadataUnrecognizedVersionError(DistlibException):
"""Unknown metadata version number."""
try:
# docutils is installed
from docutils.utils import Reporter
from docutils.parsers.rst import Parser
from docutils import frontend
from docutils import nodes
class SilentReporter(Reporter, object):
def __init__(self, source, report_level, halt_level, stream=None,
debug=0, encoding='ascii', error_handler='replace'):
self.messages = []
super(SilentReporter, self).__init__(
source, report_level, halt_level, stream,
debug, encoding, error_handler)
def system_message(self, level, message, *children, **kwargs):
self.messages.append((level, message, children, kwargs))
return nodes.system_message(message, level=level, type=self.
levels[level], *children, **kwargs)
_HAS_DOCUTILS = True
except ImportError:
# docutils is not installed
_HAS_DOCUTILS = False
class MetadataInvalidError(DistlibException):
"""A metadata value is invalid"""
# public API of this module
__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
@ -118,6 +98,7 @@ _ALL_FIELDS.update(_426_FIELDS)
EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
def _version2fieldlist(version):
if version == '1.0':
return _241_FIELDS
@ -238,32 +219,39 @@ _MISSING = object()
_FILESAFE = re.compile('[^A-Za-z0-9.]+')
class Metadata(object):
"""The metadata of a release.
def _get_name_and_version(name, version, for_filename=False):
"""Return the distribution name with version.
If for_filename is true, return a filename-escaped form."""
if for_filename:
# For both name and version any runs of non-alphanumeric or '.'
# characters are replaced with a single '-'. Additionally any
# spaces in the version string become '.'
name = _FILESAFE.sub('-', name)
version = _FILESAFE.sub('-', version.replace(' ', '.'))
return '%s-%s' % (name, version)
class LegacyMetadata(object):
"""The legacy metadata of a release.
Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
instantiate the class with one of these arguments (or none):
- *path*, the path to a METADATA file
- *fileobj* give a file-like object with METADATA as content
- *path*, the path to a metadata file
- *fileobj* give a file-like object with metadata as content
- *mapping* is a dict-like object
- *scheme* is a version scheme name
"""
# TODO document that execution_context and platform_dependent are used
# to filter on query, not when setting a key
# also document the mapping API and UNKNOWN default key
# TODO document the mapping API and UNKNOWN default key
def __init__(self, path=None, platform_dependent=False,
execution_context=None, fileobj=None, mapping=None,
def __init__(self, path=None, fileobj=None, mapping=None,
scheme='default'):
self._fields = {}
self.requires_files = []
self.docutils_support = _HAS_DOCUTILS
self.platform_dependent = platform_dependent
self.execution_context = execution_context
self._dependencies = None
self.scheme = scheme
if [path, fileobj, mapping].count(None) < 2:
raise TypeError('path, fileobj and mapping are exclusive')
self._fields = {}
self.requires_files = []
self._dependencies = None
self.scheme = scheme
if path is not None:
self.read(path)
elif fileobj is not None:
@ -275,8 +263,8 @@ class Metadata(object):
def set_metadata_version(self):
self._fields['Metadata-Version'] = _best_version(self._fields)
def _write_field(self, file, name, value):
file.write('%s: %s\n' % (name, value))
def _write_field(self, fileobj, name, value):
fileobj.write('%s: %s\n' % (name, value))
def __getitem__(self, name):
return self.get(name)
@ -306,38 +294,6 @@ class Metadata(object):
return []
return 'UNKNOWN'
def _check_rst_data(self, data):
"""Return warnings when the provided data has syntax errors."""
source_path = StringIO()
parser = Parser()
settings = frontend.OptionParser().get_default_values()
settings.tab_width = 4
settings.pep_references = None
settings.rfc_references = None
reporter = SilentReporter(source_path,
settings.report_level,
settings.halt_level,
stream=settings.warning_stream,
debug=settings.debug,
encoding=settings.error_encoding,
error_handler=settings.error_encoding_error_handler)
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, -1)
try:
parser.parse(data, document)
except AttributeError:
reporter.messages.append((-1, 'Could not finish the parsing.',
'', {}))
return reporter.messages
def _platform(self, value):
if not self.platform_dependent or ';' not in value:
return True, value
value, marker = value.split(';')
return interpret(marker, self.execution_context), value
def _remove_line_prefix(self, value):
return _LINE_PREFIX.sub('\n', value)
@ -346,69 +302,17 @@ class Metadata(object):
return self[name]
raise AttributeError(name)
def _get_dependencies(self):
def handle_req(req, rlist, extras):
if ';' not in req:
rlist.append(req)
else:
r, marker = req.split(';')
m = EXTRA_RE.search(marker)
if m:
extra = m.groups()[0][1:-1]
extras.setdefault(extra, []).append(r)
result = self._dependencies
if result is None:
self._dependencies = result = {}
extras = {}
setup_reqs = self['Setup-Requires-Dist']
if setup_reqs:
result['setup'] = setup_reqs
install_reqs = []
for req in self['Requires-Dist']:
handle_req(req, install_reqs, extras)
if install_reqs:
result['install'] = install_reqs
if extras:
result['extras'] = extras
return result
def _set_dependencies(self, value):
if 'test' in value:
value = dict(value) # don't change value passed in
value.setdefault('extras', {})['test'] = value.pop('test')
self._dependencies = value
setup_reqs = value.get('setup', [])
install_reqs = value.get('install', [])
klist = []
for k, rlist in value.get('extras', {}).items():
klist.append(k)
for r in rlist:
install_reqs.append('%s; extra == "%s"' % (r, k))
if setup_reqs:
self['Setup-Requires-Dist'] = setup_reqs
if install_reqs:
self['Requires-Dist'] = install_reqs
if klist:
self['Provides-Extra'] = klist
#
# Public API
#
dependencies = property(_get_dependencies, _set_dependencies)
# dependencies = property(_get_dependencies, _set_dependencies)
def get_fullname(self, filesafe=False):
"""Return the distribution name with version.
If filesafe is true, return a filename-escaped form."""
name, version = self['Name'], self['Version']
if filesafe:
# For both name and version any runs of non-alphanumeric or '.'
# characters are replaced with a single '-'. Additionally any
# spaces in the version string become '.'
name = _FILESAFE.sub('-', name)
version = _FILESAFE.sub('-', version.replace(' ', '.'))
return '%s-%s' % (name, version)
return _get_name_and_version(self['Name'], self['Version'], filesafe)
def is_field(self, name):
"""return True if name is a valid metadata key"""
@ -432,7 +336,10 @@ class Metadata(object):
msg = message_from_file(fileob)
self._fields['Metadata-Version'] = msg['metadata-version']
for field in _version2fieldlist(self['Metadata-Version']):
# When reading, get all the fields we can
for field in _ALL_FIELDS:
if field not in msg:
continue
if field in _LISTFIELDS:
# we can have multiple lines
values = msg.get_all(field)
@ -564,9 +471,6 @@ class Metadata(object):
return []
res = []
for val in value:
valid, val = self._platform(val)
if not valid:
continue
if name not in _LISTTUPLEFIELDS:
res.append(val)
else:
@ -575,17 +479,12 @@ class Metadata(object):
return res
elif name in _ELEMENTSFIELD:
valid, value = self._platform(self._fields[name])
if not valid:
return []
value = self._fields[name]
if isinstance(value, string_types):
return value.split(',')
valid, value = self._platform(self._fields[name])
if not valid:
return None
return value
return self._fields[name]
def check(self, strict=False, restructuredtext=False):
def check(self, strict=False):
"""Check if the metadata is compliant. If strict is True then raise if
no Name or Version are provided"""
self.set_metadata_version()
@ -605,9 +504,6 @@ class Metadata(object):
if attr not in self:
missing.append(attr)
if _HAS_DOCUTILS and restructuredtext:
warnings.extend(self._check_rst_data(self['Description']))
# checking metadata 1.2 (XXX needs to check 1.1, 1.0)
if self['Metadata-Version'] != '1.2':
return missing, warnings
@ -669,6 +565,8 @@ class Metadata(object):
('provides_dist', 'Provides-Dist'),
('obsoletes_dist', 'Obsoletes-Dist'),
('project_url', 'Project-URL'),
('maintainer', 'Maintainer'),
('maintainer_email', 'Maintainer-email'),
)
for key, field_name in mapping_1_2:
if not skip_missing or field_name in self._fields:
@ -683,11 +581,20 @@ class Metadata(object):
('requires', 'Requires'),
('obsoletes', 'Obsoletes'),
)
if not skip_missing or field_name in self._fields:
data[key] = self[field_name]
for key, field_name in mapping_1_1:
if not skip_missing or field_name in self._fields:
data[key] = self[field_name]
return data
def add_requirements(self, requirements):
if self['Metadata-Version'] == '1.1':
# we can't have 1.1 metadata *and* Setuptools requires
for field in ('Obsoletes', 'Requires', 'Provides'):
if field in self:
del self[field]
self['Requires-Dist'] += requirements
# Mapping API
# TODO could add iter* variants
@ -705,4 +612,402 @@ class Metadata(object):
return [(key, self[key]) for key in self.keys()]
def __repr__(self):
return '<Metadata %s %s>' % (self.name, self.version)
return '<%s %s %s>' % (self.__class__.__name__, self.name,
self.version)
METADATA_FILENAME = 'pydist.json'
class Metadata(object):
"""
The metadata of a release. This implementation uses 2.0 (JSON)
metadata where possible. If not possible, it wraps a LegacyMetadata
instance which handles the key-value metadata format.
"""
METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$')
NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
VERSION_MATCHER = PEP426_VERSION_RE
SUMMARY_MATCHER = re.compile('.{1,2047}')
METADATA_VERSION = '2.0'
GENERATOR = 'distlib (%s)' % __version__
MANDATORY_KEYS = {
'name': (),
'version': (),
'summary': ('legacy',),
}
INDEX_KEYS = 'name version license summary description'
DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
'dev_requires provides meta_requires obsoleted_by '
'supports_environments')
SYNTAX_VALIDATORS = {
'metadata_version': (METADATA_VERSION_MATCHER, ()),
'name': (NAME_MATCHER, ('legacy',)),
'version': (VERSION_MATCHER, ('legacy',)),
'summary': (SUMMARY_MATCHER, ('legacy',)),
}
__slots__ = ('_legacy', '_data', 'scheme')
def __init__(self, path=None, fileobj=None, mapping=None,
scheme='default'):
if [path, fileobj, mapping].count(None) < 2:
raise TypeError('path, fileobj and mapping are exclusive')
self._legacy = None
self._data = None
self.scheme = scheme
#import pdb; pdb.set_trace()
if mapping is not None:
try:
self._validate_mapping(mapping, scheme)
self._data = mapping
except MetadataUnrecognizedVersionError:
self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
self.validate()
else:
data = None
if path:
with open(path, 'rb') as f:
data = f.read()
elif fileobj:
data = fileobj.read()
if data is None:
# Initialised with no args - to be added
self._data = {
'metadata_version': self.METADATA_VERSION,
'generator': self.GENERATOR,
}
else:
if not isinstance(data, text_type):
data = data.decode('utf-8')
try:
self._data = json.loads(data)
self._validate_mapping(self._data, scheme)
except ValueError:
# Note: MetadataUnrecognizedVersionError does not
# inherit from ValueError (it's a DistlibException,
# which should not inherit from ValueError).
# The ValueError comes from the json.load - if that
# succeeds and we get a validation error, we want
# that to propagate
self._legacy = LegacyMetadata(fileobj=StringIO(data),
scheme=scheme)
self.validate()
common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
none_list = (None, list)
none_dict = (None, dict)
mapped_keys = {
'run_requires': ('Requires-Dist', list),
'build_requires': ('Setup-Requires-Dist', list),
'dev_requires': none_list,
'test_requires': none_list,
'meta_requires': none_list,
'extras': ('Provides-Extra', list),
'modules': none_list,
'namespaces': none_list,
'exports': none_dict,
'commands': none_dict,
'classifiers': ('Classifier', list),
'source_url': ('Download-URL', None),
'metadata_version': ('Metadata-Version', None),
}
del none_list, none_dict
def __getattribute__(self, key):
common = object.__getattribute__(self, 'common_keys')
mapped = object.__getattribute__(self, 'mapped_keys')
if key in mapped:
lk, maker = mapped[key]
if self._legacy:
if lk is None:
result = None if maker is None else maker()
else:
result = self._legacy.get(lk)
else:
value = None if maker is None else maker()
result = self._data.get(key, value)
elif key not in common:
result = object.__getattribute__(self, key)
elif self._legacy:
result = self._legacy.get(key)
else:
result = self._data.get(key)
return result
def _validate_value(self, key, value, scheme=None):
if key in self.SYNTAX_VALIDATORS:
pattern, exclusions = self.SYNTAX_VALIDATORS[key]
if (scheme or self.scheme) not in exclusions:
m = pattern.match(value)
if not m:
raise MetadataInvalidError('%r is an invalid value for '
'the %r property' % (value,
key))
def __setattr__(self, key, value):
self._validate_value(key, value)
common = object.__getattribute__(self, 'common_keys')
mapped = object.__getattribute__(self, 'mapped_keys')
if key in mapped:
lk, _ = mapped[key]
if self._legacy:
if lk is None:
raise NotImplementedError
self._legacy[lk] = value
else:
self._data[key] = value
elif key not in common:
object.__setattr__(self, key, value)
else:
if key == 'keywords':
if isinstance(value, string_types):
value = value.strip()
if value:
value = value.split()
else:
value = []
if self._legacy:
self._legacy[key] = value
else:
self._data[key] = value
@property
def name_and_version(self):
return _get_name_and_version(self.name, self.version, True)
@property
def provides(self):
if self._legacy:
result = self._legacy['Provides-Dist']
else:
result = self._data.setdefault('provides', [])
s = '%s (%s)' % (self.name, self.version)
if s not in result:
result.append(s)
return result
@provides.setter
def provides(self, value):
if self._legacy:
self._legacy['Provides-Dist'] = value
else:
self._data['provides'] = value
def get_requirements(self, reqts, extras=None, env=None):
"""
Base method to get dependencies, given a set of extras
to satisfy and an optional environment context.
:param reqts: A list of sometimes-wanted dependencies,
perhaps dependent on extras and environment.
:param extras: A list of optional components being requested.
:param env: An optional environment for marker evaluation.
"""
if self._legacy:
result = reqts
else:
result = []
extras = get_extras(extras or [], self.extras)
for d in reqts:
if 'extra' not in d and 'environment' not in d:
# unconditional
include = True
else:
if 'extra' not in d:
# Not extra-dependent - only environment-dependent
include = True
else:
include = d.get('extra') in extras
if include:
# Not excluded because of extras, check environment
marker = d.get('environment')
if marker:
include = interpret(marker, env)
if include:
result.extend(d['requires'])
for key in ('build', 'dev', 'test'):
e = ':%s:' % key
if e in extras:
extras.remove(e)
# A recursive call, but it should terminate since 'test'
# has been removed from the extras
reqts = self._data.get('%s_requires' % key, [])
result.extend(self.get_requirements(reqts, extras=extras,
env=env))
return result
@property
def dictionary(self):
if self._legacy:
return self._from_legacy()
return self._data
@property
def dependencies(self):
if self._legacy:
raise NotImplementedError
else:
return extract_by_key(self._data, self.DEPENDENCY_KEYS)
@dependencies.setter
def dependencies(self, value):
if self._legacy:
raise NotImplementedError
else:
self._data.update(value)
def _validate_mapping(self, mapping, scheme):
if mapping.get('metadata_version') != self.METADATA_VERSION:
raise MetadataUnrecognizedVersionError()
missing = []
for key, exclusions in self.MANDATORY_KEYS.items():
if key not in mapping:
if scheme not in exclusions:
missing.append(key)
if missing:
msg = 'Missing metadata items: %s' % ', '.join(missing)
raise MetadataMissingError(msg)
for k, v in mapping.items():
self._validate_value(k, v, scheme)
def validate(self):
if self._legacy:
missing, warnings = self._legacy.check(True)
if missing or warnings:
logger.warning('Metadata: missing: %s, warnings: %s',
missing, warnings)
else:
self._validate_mapping(self._data, self.scheme)
def todict(self):
if self._legacy:
return self._legacy.todict(True)
else:
result = extract_by_key(self._data, self.INDEX_KEYS)
return result
def _from_legacy(self):
assert self._legacy and not self._data
result = {
'metadata_version': self.METADATA_VERSION,
'generator': self.GENERATOR,
}
lmd = self._legacy.todict(True) # skip missing ones
for k in ('name', 'version', 'license', 'summary', 'description',
'classifier'):
if k in lmd:
if k == 'classifier':
nk = 'classifiers'
else:
nk = k
result[nk] = lmd[k]
kw = lmd.get('Keywords', [])
if kw == ['']:
kw = []
result['keywords'] = kw
keys = (('requires_dist', 'run_requires'),
('setup_requires_dist', 'build_requires'))
for ok, nk in keys:
if ok in lmd and lmd[ok]:
result[nk] = [{'requires': lmd[ok]}]
result['provides'] = self.provides
author = {}
maintainer = {}
return result
LEGACY_MAPPING = {
'name': 'Name',
'version': 'Version',
'license': 'License',
'summary': 'Summary',
'description': 'Description',
'classifiers': 'Classifier',
}
def _to_legacy(self):
def process_entries(entries):
reqts = set()
for e in entries:
extra = e.get('extra')
env = e.get('environment')
rlist = e['requires']
for r in rlist:
if not env and not extra:
reqts.add(r)
else:
marker = ''
if extra:
marker = 'extra == "%s"' % extra
if env:
if marker:
marker = '(%s) and %s' % (env, marker)
else:
marker = env
reqts.add(';'.join((r, marker)))
return reqts
assert self._data and not self._legacy
result = LegacyMetadata()
nmd = self._data
for nk, ok in self.LEGACY_MAPPING.items():
if nk in nmd:
result[ok] = nmd[nk]
r1 = process_entries(self.run_requires + self.meta_requires)
r2 = process_entries(self.build_requires + self.dev_requires)
if self.extras:
result['Provides-Extra'] = sorted(self.extras)
result['Requires-Dist'] = sorted(r1)
result['Setup-Requires-Dist'] = sorted(r2)
# TODO: other fields such as contacts
return result
def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
if [path, fileobj].count(None) != 1:
raise ValueError('Exactly one of path and fileobj is needed')
self.validate()
if legacy:
if self._legacy:
legacy_md = self._legacy
else:
legacy_md = self._to_legacy()
if path:
legacy_md.write(path, skip_unknown=skip_unknown)
else:
legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
else:
if self._legacy:
d = self._from_legacy()
else:
d = self._data
if fileobj:
json.dump(d, fileobj, ensure_ascii=True, indent=2,
sort_keys=True)
else:
with codecs.open(path, 'w', 'utf-8') as f:
json.dump(d, f, ensure_ascii=True, indent=2,
sort_keys=True)
def add_requirements(self, requirements):
if self._legacy:
self._legacy.add_requirements(requirements)
else:
self._data.setdefault('run_requires', []).extend(requirements)
def __repr__(self):
name = self.name or '(no name)'
version = self.version or 'no version'
return '<%s %s %s (%s)>' % (self.__class__.__name__,
self.metadata_version, name, version)

View file

@ -10,8 +10,10 @@ import bisect
import io
import logging
import os
import pkgutil
import shutil
import sys
import types
import zipimport
from . import DistlibException
@ -19,6 +21,7 @@ from .util import cached_property, get_cache_base, path_to_cache_dir
logger = logging.getLogger(__name__)
class Cache(object):
"""
A class implementing a cache for resources that need to live in the file system
@ -101,42 +104,51 @@ class Cache(object):
cache = Cache()
class ResourceBase(object):
def __init__(self, finder, name):
self.finder = finder
self.name = name
class Resource(ResourceBase):
"""
A class representing an in-package resource, such as a data file. This is
not normally instantiated by user code, but rather by a
:class:`ResourceFinder` which manages the resource.
"""
is_container = False # Backwards compatibility
is_container = False # Backwards compatibility
def as_stream(self):
"Get the resource as a stream. Not a property, as not idempotent."
"""
Get the resource as a stream.
This is not a property to make it obvious that it returns a new stream
each time.
"""
return self.finder.get_stream(self)
@cached_property
def file_path(self):
return cache.get(self)
@cached_property
def bytes(self):
return self.finder.get_bytes(self)
@cached_property
def size(self):
return self.finder.get_size(self)
class ResourceContainer(ResourceBase):
is_container = True # Backwards compatibility
is_container = True # Backwards compatibility
@cached_property
def resources(self):
return self.finder.get_resources(self)
class ResourceFinder(object):
"""
Resource finder for file system resources.
@ -149,7 +161,7 @@ class ResourceFinder(object):
def _make_path(self, resource_name):
parts = resource_name.split('/')
parts.insert(0, self.base)
return os.path.join(*parts)
return os.path.realpath(os.path.join(*parts))
def _find(self, path):
return os.path.exists(path)
@ -186,9 +198,10 @@ class ResourceFinder(object):
def is_container(self, resource):
return self._is_directory(resource.path)
_is_directory = staticmethod(os.path.isdir)
class ZipResourceFinder(ResourceFinder):
"""
Resource finder for resources in .zip files.
@ -209,7 +222,7 @@ class ZipResourceFinder(ResourceFinder):
if path in self._files:
result = True
else:
if path[-1] != os.sep:
if path and path[-1] != os.sep:
path = path + os.sep
i = bisect.bisect(self.index, path)
try:
@ -239,7 +252,7 @@ class ZipResourceFinder(ResourceFinder):
def get_resources(self, resource):
path = resource.path[self.prefix_len:]
if path[-1] != os.sep:
if path and path[-1] != os.sep:
path += os.sep
plen = len(path)
result = set()
@ -254,7 +267,7 @@ class ZipResourceFinder(ResourceFinder):
def _is_directory(self, path):
path = path[self.prefix_len:]
if path[-1] != os.sep:
if path and path[-1] != os.sep:
path += os.sep
i = bisect.bisect(self.index, path)
try:
@ -271,14 +284,17 @@ _finder_registry = {
try:
import _frozen_importlib
_finder_registry[_frozen_importlib.SourceFileLoader] = ResourceFinder
_finder_registry[_frozen_importlib.FileFinder] = ResourceFinder
except (ImportError, AttributeError):
pass
def register_finder(loader, finder_maker):
_finder_registry[type(loader)] = finder_maker
_finder_cache = {}
def finder(package):
"""
Return a resource finder for a package.
@ -302,3 +318,26 @@ def finder(package):
result = finder_maker(module)
_finder_cache[package] = result
return result
_dummy_module = types.ModuleType(str('__dummy__'))
def finder_for_path(path):
"""
Return a resource finder for a path, which should represent a container.
:param path: The path.
:return: A :class:`ResourceFinder` instance for the path.
"""
result = None
# calls any path hooks, gets importer into cache
pkgutil.get_importer(path)
loader = sys.path_importer_cache.get(path)
finder = _finder_registry.get(type(loader))
if finder:
module = _dummy_module
module.__file__ = os.path.join(path, '')
module.__loader__ = loader
result = finder(module)
return result

View file

@ -4,22 +4,40 @@
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys
from . import DistlibException
from .compat import sysconfig, fsencode, detect_encoding
from .compat import sysconfig, fsencode, detect_encoding, ZipFile
from .resources import finder
from .util import FileOperator, get_export_entry, convert_path, get_executable
logger = logging.getLogger(__name__)
_DEFAULT_MANIFEST = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>'''.strip()
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''%(shebang)s
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
import sys, re
@ -33,7 +51,7 @@ if __name__ == '__main__':
return result
try:
sys.argv[0] = re.sub('-script.pyw?$', '', sys.argv[0])
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
@ -59,32 +77,33 @@ class ScriptMaker(object):
self.target_dir = target_dir
self.add_launchers = add_launchers
self.force = False
self.clobber = False
self.set_mode = False
self.variants = set(('', 'X.Y'))
self._fileop = fileop or FileOperator(dry_run)
def _get_alternate_executable(self, executable, flags):
if 'gui' in flags and os.name == 'nt':
def _get_alternate_executable(self, executable, options):
if options.get('gui', False) and os.name == 'nt':
dn, fn = os.path.split(executable)
fn = fn.replace('python', 'pythonw')
executable = os.path.join(dn, fn)
return executable
def _get_shebang(self, encoding, post_interp=b'', flags=None):
def _get_shebang(self, encoding, post_interp=b'', options=None):
if self.executable:
executable = self.executable
elif not sysconfig.is_python_build():
executable = get_executable()
elif hasattr(sys, 'base_prefix') and sys.prefix != sys.base_prefix:
executable = os.path.join(
sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
executable = os.path.join(sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
else:
executable = os.path.join(
sysconfig.get_config_var('BINDIR'),
'python%s%s' % (sysconfig.get_config_var('VERSION'),
sysconfig.get_config_var('EXE')))
if flags:
executable = self._get_alternate_executable(executable, flags)
if options:
executable = self._get_alternate_executable(executable, options)
executable = fsencode(executable)
shebang = b'#!' + executable + post_interp + b'\n'
@ -110,39 +129,87 @@ class ScriptMaker(object):
'from the script encoding (%r)' % (shebang, encoding))
return shebang
def _get_script_text(self, shebang, entry):
return self.script_template % dict(shebang=shebang,
module=entry.prefix,
def _get_script_text(self, entry):
return self.script_template % dict(module=entry.prefix,
func=entry.suffix)
def _make_script(self, entry, filenames):
shebang = self._get_shebang('utf-8', flags=entry.flags).decode('utf-8')
script = self._get_script_text(shebang, entry)
outname = os.path.join(self.target_dir, entry.name)
manifest = _DEFAULT_MANIFEST
def get_manifest(self, exename):
base = os.path.basename(exename)
return self.manifest % base
def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and os.name == 'nt'
if use_launcher:
exename = '%s.exe' % outname
if 'gui' in entry.flags:
ext = 'pyw'
launcher = self._get_launcher('w')
else:
ext = 'py'
linesep = os.linesep.encode('utf-8')
if not use_launcher:
script_bytes = shebang + linesep + script_bytes
else:
if ext == 'py':
launcher = self._get_launcher('t')
outname = '%s-script.%s' % (outname, ext)
self._fileop.write_text_file(outname, script, 'utf-8')
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
if use_launcher:
self._fileop.write_binary_file(exename, launcher)
filenames.append(exename)
else:
launcher = self._get_launcher('w')
stream = BytesIO()
with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue()
script_bytes = launcher + shebang + linesep + zip_data
for name in names:
outname = os.path.join(self.target_dir, name)
if use_launcher:
n, e = os.path.splitext(outname)
if e.startswith('.py'):
outname = n
outname = '%s.exe' % outname
try:
self._fileop.write_binary_file(outname, script_bytes)
except Exception:
# Failed writing an executable - it might be in use.
logger.warning('Failed to write executable - trying to '
'use .deleteme logic')
dfname = '%s.deleteme' % outname
if os.path.exists(dfname):
os.remove(dfname) # Not allowed to fail here
os.rename(outname, dfname) # nor here
self._fileop.write_binary_file(outname, script_bytes)
logger.debug('Able to replace executable using '
'.deleteme logic')
try:
os.remove(dfname)
except Exception:
pass # still in use - ignore error
else:
if os.name == 'nt' and not outname.endswith('.' + ext):
outname = '%s.%s' % (outname, ext)
if os.path.exists(outname) and not self.clobber:
logger.warning('Skipping existing file %s', outname)
continue
self._fileop.write_binary_file(outname, script_bytes)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
def _make_script(self, entry, filenames, options=None):
shebang = self._get_shebang('utf-8', options=options)
script = self._get_script_text(entry).encode('utf-8')
name = entry.name
scriptnames = set()
if '' in self.variants:
scriptnames.add(name)
if 'X' in self.variants:
scriptnames.add('%s%s' % (name, sys.version[0]))
if 'X.Y' in self.variants:
scriptnames.add('%s-%s' % (name, sys.version[:3]))
if options and options.get('gui', False):
ext = 'pyw'
else:
ext = 'py'
self._write_script(scriptnames, shebang, script, filenames, ext)
def _copy_script(self, script, filenames):
adjust = False
script = convert_path(script)
script = os.path.join(self.source_dir, convert_path(script))
outname = os.path.join(self.target_dir, os.path.basename(script))
filenames.append(outname)
script = os.path.join(self.source_dir, script)
if not self.force and not self._fileop.newer(script, outname):
logger.debug('not copying %s (up-to-date)', script)
return
@ -174,31 +241,22 @@ class ScriptMaker(object):
if f:
f.close()
self._fileop.copy_file(script, outname)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
else:
logger.info('copying and adjusting %s -> %s', script,
self.target_dir)
if not self._fileop.dry_run:
shebang = self._get_shebang(encoding, post_interp)
use_launcher = self.add_launchers and os.name == 'nt'
if use_launcher:
n, e = os.path.splitext(outname)
exename = n + '.exe'
if b'pythonw' in first_line:
launcher = self._get_launcher('w')
suffix = '-script.pyw'
else:
launcher = self._get_launcher('t')
suffix = '-script.py'
outname = n + suffix
filenames[-1] = outname
self._fileop.write_binary_file(outname, shebang + f.read())
if use_launcher:
self._fileop.write_binary_file(exename, launcher)
filenames.append(exename)
if b'pythonw' in first_line:
ext = 'pyw'
else:
ext = 'py'
n = os.path.basename(outname)
self._write_script([n], shebang, f.read(), filenames, ext)
if f:
f.close()
if self.set_mode:
self._fileop.set_executable_mode([outname])
@property
def dry_run(self):
@ -223,7 +281,7 @@ class ScriptMaker(object):
# Public API follows
def make(self, specification):
def make(self, specification, options=None):
"""
Make a script.
@ -231,17 +289,18 @@ class ScriptMaker(object):
entry specification (to make a script from a
callable) or a filename (to make a script by
copying from a source location).
:return: A list of all absolute pathnames written to,
:param options: A dictionary of options controlling script generation.
:return: A list of all absolute pathnames written to.
"""
filenames = []
entry = get_export_entry(specification)
if entry is None:
self._copy_script(specification, filenames)
else:
self._make_script(entry, filenames)
self._make_script(entry, filenames, options=options)
return filenames
def make_multiple(self, specifications):
def make_multiple(self, specifications, options=None):
"""
Take a list of specifications and make scripts from them,
:param specifications: A list of specifications.
@ -249,5 +308,5 @@ class ScriptMaker(object):
"""
filenames = []
for specification in specifications:
filenames.extend(self.make(specification))
filenames.extend(self.make(specification, options))
return filenames

BIN
pip/vendor/distlib/t32.exe vendored Normal file

Binary file not shown.

BIN
pip/vendor/distlib/t64.exe vendored Normal file

Binary file not shown.

View file

@ -16,27 +16,22 @@ import re
import shutil
import socket
import ssl
import subprocess
import sys
import tarfile
import tempfile
import threading
import time
import zipfile
from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input,
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
cache_from_source, urlopen, httplib, xmlrpclib, splittype,
HTTPHandler, HTTPSHandler as BaseHTTPSHandler,
URLError, match_hostname, CertificateError)
BaseConfigurator, valid_ident, Container, configparser,
URLError, match_hostname, CertificateError, ZipFile)
logger = logging.getLogger(__name__)
class Container(object):
"""
A generic container for when multiple values need to be returned
"""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
#
# Requirement parsing code for name + optional constraints + optional extras
#
@ -50,21 +45,26 @@ COMMA = r'\s*,\s*'
COMMA_RE = re.compile(COMMA)
IDENT = r'(\w|[.-])+'
RELOP = '([<>=!]=)|[<>]'
EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')'
VERSPEC = IDENT + r'\*?'
RELOP = '([<>=!~]=)|[<>]'
#
# The first relop is optional - if absent, will be taken as '=='
# The first relop is optional - if absent, will be taken as '~='
#
BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + IDENT + ')(' + COMMA + '(' +
RELOP + r')\s*(' + IDENT + '))*')
BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' +
RELOP + r')\s*(' + VERSPEC + '))*')
DIRECT_REF = '(from\s+(?P<diref>.*))'
#
# Either the bare constraints or the bare constraints in parentheses
#
CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + r')\s*\)|(?P<c2>' +
BARE_CONSTRAINTS + '\s*)')
CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + '|' + DIRECT_REF +
r')\s*\)|(?P<c2>' + BARE_CONSTRAINTS + '\s*)')
EXTRA_LIST = IDENT + '(' + COMMA + IDENT + ')*'
EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*'
EXTRAS = r'\[\s*(?P<ex>' + EXTRA_LIST + r')?\s*\]'
REQUIREMENT = ('(?P<dn>' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +
CONSTRAINTS + ')?$')
@ -73,7 +73,7 @@ REQUIREMENT_RE = re.compile(REQUIREMENT)
#
# Used to scan through the constraints
#
RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + IDENT + ')'
RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + VERSPEC + ')'
RELOP_IDENT_RE = re.compile(RELOP_IDENT)
def parse_requirement(s):
@ -88,13 +88,19 @@ def parse_requirement(s):
d = m.groupdict()
name = d['dn']
cons = d['c1'] or d['c2']
if not d['diref']:
url = None
else:
# direct reference
cons = None
url = d['diref'].strip()
if not cons:
cons = None
constr = ''
rs = d['dn']
else:
if cons[0] not in '<>!=':
cons = '==' + cons
cons = '~=' + cons
iterator = RELOP_IDENT_RE.finditer(cons)
cons = [get_constraint(m) for m in iterator]
rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))
@ -103,7 +109,7 @@ def parse_requirement(s):
else:
extras = COMMA_RE.split(d['ex'])
result = Container(name=name, constraints=cons, extras=extras,
requirement=rs, source=s)
requirement=rs, source=s, url=url)
return result
@ -168,6 +174,71 @@ def proceed(prompt, allowed_chars, error_prompt=None, default=None):
p = '%c: %s\n%s' % (c, error_prompt, prompt)
return c
def extract_by_key(d, keys):
if isinstance(keys, string_types):
keys = keys.split()
result = {}
for key in keys:
if key in d:
result[key] = d[key]
return result
def read_exports(stream):
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getreader('utf-8')(stream)
# Try to load as JSON, falling back on legacy format
data = stream.read()
stream = StringIO(data)
try:
data = json.load(stream)
result = data['exports']
for group, entries in result.items():
for k, v in entries.items():
s = '%s = %s' % (k, v)
entry = get_export_entry(s)
assert entry is not None
entries[k] = entry
return result
except Exception:
stream.seek(0, 0)
cp = configparser.ConfigParser()
if hasattr(cp, 'read_file'):
cp.read_file(stream)
else:
cp.readfp(stream)
result = {}
for key in cp.sections():
result[key] = entries = {}
for name, value in cp.items(key):
s = '%s = %s' % (name, value)
entry = get_export_entry(s)
assert entry is not None
#entry.dist = self
entries[name] = entry
return result
def write_exports(exports, stream):
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getwriter('utf-8')(stream)
cp = configparser.ConfigParser()
for k, v in exports.items():
# TODO check k, v for valid values
cp.add_section(k)
for entry in v.values():
if entry.suffix is None:
s = entry.prefix
else:
s = '%s:%s' % (entry.prefix, entry.suffix)
if entry.flags:
s = '%s [%s]' % (s, ', '.join(entry.flags))
cp.set(k, entry.name, s)
cp.write(stream)
@contextlib.contextmanager
def tempdir():
td = tempfile.mkdtemp()
@ -202,7 +273,7 @@ class cached_property(object):
#for attr in ('__name__', '__module__', '__doc__'):
# setattr(self, attr, getattr(func, attr, None))
def __get__(self, obj, type=None):
def __get__(self, obj, cls=None):
if obj is None:
return self
value = self.func(obj)
@ -272,16 +343,22 @@ class FileOperator(object):
return os.stat(source).st_mtime > os.stat(target).st_mtime
def copy_file(self, infile, outfile):
def copy_file(self, infile, outfile, check=True):
"""Copy a file respecting dry-run and force flags.
"""
assert not os.path.isdir(outfile)
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying %s to %s', infile, outfile)
if not self.dry_run:
msg = None
if check:
if os.path.islink(outfile):
msg = '%s is a symlink' % outfile
elif os.path.exists(outfile) and not os.path.isfile(outfile):
msg = '%s is a non-regular file' % outfile
if msg:
raise ValueError(msg + ' which would be overwritten')
shutil.copyfile(infile, outfile)
if self.record:
self.files_written.add(outfile)
self.record_as_written(outfile)
def copy_stream(self, instream, outfile, encoding=None):
assert not os.path.isdir(outfile)
@ -296,24 +373,21 @@ class FileOperator(object):
shutil.copyfileobj(instream, outstream)
finally:
outstream.close()
if self.record:
self.files_written.add(outfile)
self.record_as_written(outfile)
def write_binary_file(self, path, data):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data)
if self.record:
self.files_written.add(path)
self.record_as_written(path)
def write_text_file(self, path, data, encoding):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data.encode(encoding))
if self.record:
self.files_written.add(path)
self.record_as_written(path)
def set_mode(self, bits, mask, files):
if os.name == 'posix':
@ -351,9 +425,8 @@ class FileOperator(object):
else:
assert path.startswith(prefix)
diagpath = path[len(prefix):]
py_compile.compile(path, dpath, diagpath, True) # raise on error
if self.record:
self.files_written.add(dpath)
py_compile.compile(path, dpath, diagpath, True) # raise error
self.record_as_written(dpath)
return dpath
def ensure_removed(self, path):
@ -414,7 +487,7 @@ class FileOperator(object):
assert flist == ['__pycache__']
sd = os.path.join(d, flist[0])
os.rmdir(sd)
os.rmdir(d) # should fail if non-empty
os.rmdir(d) # should fail if non-empty
self._init_record()
def resolve(module_name, dotted_path):
@ -577,8 +650,8 @@ def is_string_sequence(seq):
return result
PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
'([0-9][a-z0-9_.+-]*)', re.I)
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)$')
'([a-z0-9_.+-]+)', re.I)
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
def split_filename(filename, project_name=None):
@ -604,6 +677,46 @@ def split_filename(filename, project_name=None):
result = m.group(1), m.group(3), pyver
return result
# Allow spaces in name because of legacy dists like "Twisted Core"
NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
r'\(\s*(?P<ver>[^\s)]+)\)$')
def parse_name_and_version(p):
"""
A utility method used to get name and version from a string.
From e.g. a Provides-Dist value.
:param p: A value in a form 'foo (1.0)'
:return: The name and version as a tuple.
"""
m = NAME_VERSION_RE.match(p)
if not m:
raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
d = m.groupdict()
return d['name'].strip().lower(), d['ver']
def get_extras(requested, available):
result = set()
requested = set(requested or [])
available = set(available or [])
if '*' in requested:
requested.remove('*')
result |= available
for r in requested:
if r == '-':
result.add(r)
elif r.startswith('-'):
unwanted = r[1:]
if unwanted not in available:
logger.warning('undeclared extra: %s' % unwanted)
if unwanted in result:
result.remove(unwanted)
else:
if r not in available:
logger.warning('undeclared extra: %s' % r)
result.add(r)
return result
#
# Extended metadata functionality
#
@ -634,39 +747,11 @@ def get_project_data(name):
result = _get_external_data(url)
return result
def get_package_data(dist):
name, version = dist.name, dist.version
def get_package_data(name, version):
url = ('https://www.red-dove.com/pypi/projects/'
'%s/%s/package-%s.json' % (name[0].upper(), name, version))
result = _get_external_data(url)
if 'metadata' in result and dist.metadata:
update_metadata(dist.metadata, result)
return result
return _get_external_data(url)
RENAMES = { # Temporary
'classifiers': 'Classifier',
'use_2to3': None,
'use_2to3_fixers': None,
'test_suite': None,
}
def update_metadata(metadata, pkginfo):
# update dist's metadata from received package data
assert metadata
assert 'metadata' in pkginfo
for k, v in pkginfo['metadata'].items():
k = k.replace('-', '_')
k = RENAMES.get(k, k)
if k is not None:
metadata[k] = v
metadata.set_metadata_version()
if 'requirements' in pkginfo:
metadata.dependencies = pkginfo['requirements']
#
# Simple event pub/sub
#
class EventMixin(object):
"""
@ -744,13 +829,26 @@ class Sequencer(object):
def __init__(self):
self._preds = {}
self._succs = {}
self._nodes = set() # nodes with no preds/succs
self._nodes = set() # nodes with no preds/succs
def add_node(self, node):
self._nodes.add(node)
def remove_node(self, node):
self._nodes.remove(node)
def remove_node(self, node, edges=False):
if node in self._nodes:
self._nodes.remove(node)
if edges:
for p in set(self._preds.get(node, ())):
self.remove(p, node)
for s in set(self._succs.get(node, ())):
self.remove(node, s)
# Remove empties
for k, v in list(self._preds.items()):
if not v:
del self._preds[k]
for k, v in list(self._succs.items()):
if not v:
del self._succs[k]
def add(self, pred, succ):
assert pred != succ
@ -897,7 +995,7 @@ def unarchive(archive_filename, dest_dir, format=None, check=True):
raise ValueError('Unknown format for %r' % archive_filename)
try:
if format == 'zip':
archive = zipfile.ZipFile(archive_filename, 'r')
archive = ZipFile(archive_filename, 'r')
if check:
names = archive.namelist()
for name in names:
@ -927,7 +1025,7 @@ def zip_dir(directory):
"""zip a directory tree into a BytesIO object"""
result = io.BytesIO()
dlen = len(directory)
with zipfile.ZipFile(result, "w") as zf:
with ZipFile(result, "w") as zf:
for root, dirs, files in os.walk(directory):
for name in files:
full = os.path.join(root, name)
@ -942,6 +1040,7 @@ def zip_dir(directory):
UNITS = ('', 'K', 'M', 'G','T','P')
class Progress(object):
unknown = 'UNKNOWN'
@ -1075,8 +1174,8 @@ def _iglob(path_glob):
radical = radical.lstrip('\\')
for path, dir, files in os.walk(prefix):
path = os.path.normpath(path)
for file in _iglob(os.path.join(path, radical)):
yield file
for fn in _iglob(os.path.join(path, radical)):
yield fn
@ -1120,6 +1219,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
if self.ca_certs and self.check_domain:
try:
match_hostname(self.sock.getpeercert(), self.host)
logger.debug('Host verified: %s', self.host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
@ -1273,7 +1373,7 @@ class CSVBase(object):
class CSVReader(CSVBase):
def __init__(self, fn, **kwargs):
def __init__(self, **kwargs):
if 'stream' in kwargs:
stream = kwargs['stream']
if sys.version_info[0] >= 3:
@ -1281,7 +1381,7 @@ class CSVReader(CSVBase):
stream = codecs.getreader('utf-8')(stream)
self.stream = stream
else:
self.stream = _csv_open(fn, 'r')
self.stream = _csv_open(kwargs['path'], 'r')
self.reader = csv.reader(self.stream, **self.defaults)
def __iter__(self):
@ -1311,3 +1411,107 @@ class CSVWriter(CSVBase):
r.append(item)
row = r
self.writer.writerow(row)
#
# Configurator functionality
#
class Configurator(BaseConfigurator):
value_converters = dict(BaseConfigurator.value_converters)
value_converters['inc'] = 'inc_convert'
def __init__(self, config, base=None):
super(Configurator, self).__init__(config)
self.base = base or os.getcwd()
def configure_custom(self, config):
def convert(o):
if isinstance(o, (list, tuple)):
result = type(o)([convert(i) for i in o])
elif isinstance(o, dict):
if '()' in o:
result = self.configure_custom(o)
else:
result = {}
for k in o:
result[k] = convert(o[k])
else:
result = self.convert(o)
return result
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
args = config.pop('[]', ())
if args:
args = tuple([convert(o) for o in args])
items = [(k, convert(config[k])) for k in config if valid_ident(k)]
kwargs = dict(items)
result = c(*args, **kwargs)
if props:
for n, v in props.items():
setattr(result, n, convert(v))
return result
def __getitem__(self, key):
result = self.config[key]
if isinstance(result, dict) and '()' in result:
self.config[key] = result = self.configure_custom(result)
return result
def inc_convert(self, value):
"""Default converter for the inc:// protocol."""
if not os.path.isabs(value):
value = os.path.join(self.base, value)
with codecs.open(value, 'r', encoding='utf-8') as f:
result = json.load(f)
return result
#
# Mixin for running subprocesses and capturing their output
#
class SubprocessMixin(object):
def __init__(self, verbose=False, progress=None):
self.verbose = verbose
self.progress = progress
def reader(self, stream, context):
"""
Read lines from a subprocess' output stream and either pass to a progress
callable (if specified) or write progress information to sys.stderr.
"""
progress = self.progress
verbose = self.verbose
while True:
s = stream.readline()
if not s:
break
if progress is not None:
progress(s, context)
else:
if not verbose:
sys.stderr.write('.')
else:
sys.stderr.write(s.decode('utf-8'))
sys.stderr.flush()
stream.close()
def run_command(self, cmd, **kwargs):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, **kwargs)
t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
t1.start()
t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
t2.start()
p.wait()
t1.join()
t2.join()
if self.progress is not None:
self.progress('done.', 'main')
elif self.verbose:
sys.stderr.write('done.\n')
return p

View file

@ -8,6 +8,7 @@ Implementation of a flexible versioning scheme providing support for PEP-386,
distribute-compatible and semantic versioning.
"""
import logging
import re
from .compat import string_types
@ -15,37 +16,17 @@ from .compat import string_types
__all__ = ['NormalizedVersion', 'NormalizedMatcher',
'LegacyVersion', 'LegacyMatcher',
'SemanticVersion', 'SemanticMatcher',
'AdaptiveVersion', 'AdaptiveMatcher',
'UnsupportedVersionError', 'HugeMajorVersionError',
'suggest_normalized_version', 'suggest_semantic_version',
'suggest_adaptive_version',
'normalized_key', 'legacy_key', 'semantic_key', 'adaptive_key',
'get_scheme']
'UnsupportedVersionError', 'get_scheme']
class UnsupportedVersionError(Exception):
logger = logging.getLogger(__name__)
class UnsupportedVersionError(ValueError):
"""This is an unsupported version."""
pass
class HugeMajorVersionError(UnsupportedVersionError):
"""An irrational version because the major version number is huge
(often because a year or date was used).
See `error_on_huge_major_num` option in `NormalizedVersion` for details.
This guard can be disabled by setting that option False.
"""
pass
class _Common(object):
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._string)
def __str__(self):
return self._string
class Version(_Common):
class Version(object):
def __init__(self, s):
self._string = s = s.strip()
self._parts = parts = self.parse(s)
@ -83,30 +64,41 @@ class Version(_Common):
def __hash__(self):
return hash(self._parts)
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self._string)
def __str__(self):
return self._string
@property
def is_prerelease(self):
raise NotImplementedError('Please implement in subclasses.')
class Matcher(_Common):
class Matcher(object):
version_class = None
predicate_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?")
constraint_re = re.compile(r'^(<=|>=|<|>|!=|==)?\s*([^\s,]+)$')
dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?")
comp_re = re.compile(r'^(<=|>=|<|>|!=|==|~=)?\s*([^\s,]+)$')
num_re = re.compile(r'^\d+(\.\d+)*$')
# value is either a callable or the name of a method
_operators = {
"<": lambda x, y: x < y,
">": lambda x, y: x > y,
"<=": lambda x, y: x == y or x < y,
">=": lambda x, y: x == y or x > y,
"==": lambda x, y: x == y,
"!=": lambda x, y: x != y,
'<': lambda v, c, p: v < c,
'>': lambda v, c, p: v > c,
'<=': lambda v, c, p: v == c or v < c,
'>=': lambda v, c, p: v == c or v > c,
'==': lambda v, c, p: v == c,
# by default, compatible => >=.
'~=': lambda v, c, p: v == c or v > c,
'!=': lambda v, c, p: v != c,
}
def __init__(self, s):
if self.version_class is None:
raise ValueError('Please specify a version class')
self._string = s = s.strip()
m = self.predicate_re.match(s)
m = self.dist_re.match(s)
if not m:
raise ValueError('Not valid: %r' % s)
groups = m.groups('')
@ -116,19 +108,47 @@ class Matcher(_Common):
if groups[2]:
constraints = [c.strip() for c in groups[2].split(',')]
for c in constraints:
m = self.constraint_re.match(c)
m = self.comp_re.match(c)
if not m:
raise ValueError('Invalid %r in %r' % (c, s))
groups = m.groups('==')
clist.append((groups[0], self.version_class(groups[1])))
groups = m.groups()
op = groups[0] or '~='
s = groups[1]
if s.endswith('.*'):
if op not in ('==', '!='):
raise ValueError('\'.*\' not allowed for '
'%r constraints' % op)
# Could be a partial version (e.g. for '2.*') which
# won't parse as a version, so keep it as a string
vn, prefix = s[:-2], True
if not self.num_re.match(vn):
# Just to check that vn is a valid version
self.version_class(vn)
else:
# Should parse as a version, so we can create an
# instance for the comparison
vn, prefix = self.version_class(s), False
clist.append((op, vn, prefix))
self._parts = tuple(clist)
def match(self, version):
"""Check if the provided version matches the constraints."""
"""
Check if the provided version matches the constraints.
:param version: The version to match against this instance.
:type version: Strring or :class:`Version` instance.
"""
if isinstance(version, string_types):
version = self.version_class(version)
for operator, constraint in self._parts:
if not self._operators[operator](version, constraint):
for operator, constraint, prefix in self._parts:
f = self._operators.get(operator)
if isinstance(f, string_types):
f = getattr(self, f)
if not f:
msg = ('%r not implemented '
'for %s' % (operator, self.__class__.__name__))
raise NotImplementedError(msg)
if not f(version, constraint, prefix):
return False
return True
@ -145,7 +165,7 @@ class Matcher(_Common):
def __eq__(self, other):
self._check_compatible(other)
return (self.key == other.key and self._parts == other._parts)
return self.key == other.key and self._parts == other._parts
def __ne__(self, other):
return not self.__eq__(other)
@ -154,104 +174,18 @@ class Matcher(_Common):
def __hash__(self):
return hash(self.key) + hash(self._parts)
# A marker used in the second and third parts of the `parts` tuple, for
# versions that don't have those segments, to sort properly. An example
# of versions in sort order ('highest' last):
# 1.0b1 ((1,0), ('b',1), ('z',))
# 1.0.dev345 ((1,0), ('z',), ('dev', 345))
# 1.0 ((1,0), ('z',), ('z',))
# 1.0.post256.dev345 ((1,0), ('z',), ('z', 'post', 256, 'dev', 345))
# 1.0.post345 ((1,0), ('z',), ('z', 'post', 345, 'z'))
# ^ ^ ^
# 'b' < 'z' ---------------------/ | |
# | |
# 'dev' < 'z' ----------------------------/ |
# |
# 'dev' < 'z' ----------------------------------------------/
# 'f' for 'final' would be kind of nice, but due to bugs in the support of
# 'rc' we must use 'z'
_FINAL_MARKER = ('z',)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._string)
_VERSION_RE = re.compile(r'''
^
(?P<version>\d+\.\d+(\.\d+)*) # minimum 'N.N'
(?:
(?P<prerel>[abc]|rc) # 'a'=alpha, 'b'=beta, 'c'=release candidate
# 'rc'= alias for release candidate
(?P<prerelversion>\d+(?:\.\d+)*)
)?
(?P<postdev>(\.post(?P<post>\d+))?(\.dev(?P<dev>\d+))?)?
$''', re.VERBOSE)
def __str__(self):
return self._string
def _parse_numdots(s, full_ver, drop_zeroes=False, min_length=0):
"""Parse 'N.N.N' sequences, return a list of ints.
@param s {str} 'N.N.N...' sequence to be parsed
@param full_ver_str {str} The full version string from which this
comes. Used for error strings.
@param min_length {int} The length to which to pad the
returned list with zeros, if necessary. Default 0.
"""
result = []
for n in s.split("."):
#if len(n) > 1 and n[0] == '0':
# raise UnsupportedVersionError("cannot have leading zero in "
# "version number segment: '%s' in %r" % (n, full_ver))
result.append(int(n))
if drop_zeroes:
while (result and result[-1] == 0 and
(1 + len(result)) > min_length):
result.pop()
return result
def pep386_key(s, fail_on_huge_major_ver=True):
"""Parses a string version into parts using PEP-386 logic."""
match = _VERSION_RE.search(s)
if not match:
raise UnsupportedVersionError(s)
groups = match.groupdict()
parts = []
# main version
block = _parse_numdots(groups['version'], s, min_length=2)
parts.append(tuple(block))
# prerelease
prerel = groups.get('prerel')
if prerel is not None:
block = [prerel]
block += _parse_numdots(groups.get('prerelversion'), s, min_length=1)
parts.append(tuple(block))
else:
parts.append(_FINAL_MARKER)
# postdev
if groups.get('postdev'):
post = groups.get('post')
dev = groups.get('dev')
postdev = []
if post is not None:
postdev.extend((_FINAL_MARKER[0], 'post', int(post)))
if dev is None:
postdev.append(_FINAL_MARKER[0])
if dev is not None:
postdev.extend(('dev', int(dev)))
parts.append(tuple(postdev))
else:
parts.append(_FINAL_MARKER)
if fail_on_huge_major_ver and parts[0][0] > 1980:
raise HugeMajorVersionError("huge major version number, %r, "
"which might cause future problems: %r" % (parts[0][0], s))
return tuple(parts)
PEP426_VERSION_RE = re.compile(r'^(\d+\.\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
r'(\.(post)(\d+))?(\.(dev)(\d+))?$')
PEP426_VERSION_RE = re.compile('^(\d+\.\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
'(\.(post)(\d+))?(\.(dev)(\d+))?$')
def pep426_key(s, _=None):
def _pep426_key(s):
s = s.strip()
m = PEP426_VERSION_RE.match(s)
if not m:
@ -280,9 +214,9 @@ def pep426_key(s, _=None):
# either before pre-release, or final release and after
if not post and dev:
# before pre-release
pre = ('a', -1) # to sort before a0
pre = ('a', -1) # to sort before a0
else:
pre = ('z',) # to sort after all pre-releases
pre = ('z',) # to sort after all pre-releases
# now look at the state of post and dev.
if not post:
post = ('_',) # sort before 'a'
@ -293,7 +227,8 @@ def pep426_key(s, _=None):
return nums, pre, post, dev
normalized_key = pep426_key
_normalized_key = _pep426_key
class NormalizedVersion(Version):
"""A rational version.
@ -313,7 +248,16 @@ class NormalizedVersion(Version):
1.2a # release level must have a release serial
1.2.3b
"""
def parse(self, s): return normalized_key(s)
def parse(self, s):
result = _normalized_key(s)
# _normalized_key loses trailing zeroes in the release
# clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
# However, PEP 440 prefix matching needs it: for example,
# (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
m = PEP426_VERSION_RE.match(s) # must succeed
groups = m.groups()
self._release_clause = tuple(int(v) for v in groups[0].split('.'))
return result
PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
@ -321,31 +265,76 @@ class NormalizedVersion(Version):
def is_prerelease(self):
return any(t[0] in self.PREREL_TAGS for t in self._parts)
class UnlimitedMajorVersion(Version):
def parse(self, s): return normalized_key(s, False)
# We want '2.5' to match '2.5.4' but not '2.50'.
def _match_at_front(x, y):
if x == y:
return True
def _match_prefix(x, y):
x = str(x)
y = str(y)
if x == y:
return True
if not x.startswith(y):
return False
n = len(y)
return x[n] == '.'
class NormalizedMatcher(Matcher):
version_class = NormalizedVersion
_operators = dict(Matcher._operators)
_operators.update({
"<=": lambda x, y: _match_at_front(x, y) or x < y,
">=": lambda x, y: _match_at_front(x, y) or x > y,
"==": lambda x, y: _match_at_front(x, y),
"!=": lambda x, y: not _match_at_front(x, y),
})
# value is either a callable or the name of a method
_operators = {
'~=': '_match_compatible',
'<': '_match_lt',
'>': '_match_gt',
'<=': '_match_le',
'>=': '_match_ge',
'==': '_match_eq',
'!=': '_match_ne',
}
def _match_lt(self, version, constraint, prefix):
if version >= constraint:
return False
release_clause = constraint._release_clause
pfx = '.'.join([str(i) for i in release_clause])
return not _match_prefix(version, pfx)
def _match_gt(self, version, constraint, prefix):
if version <= constraint:
return False
release_clause = constraint._release_clause
pfx = '.'.join([str(i) for i in release_clause])
return not _match_prefix(version, pfx)
def _match_le(self, version, constraint, prefix):
return version <= constraint
def _match_ge(self, version, constraint, prefix):
return version >= constraint
def _match_eq(self, version, constraint, prefix):
if not prefix:
result = (version == constraint)
else:
result = _match_prefix(version, constraint)
return result
def _match_ne(self, version, constraint, prefix):
if not prefix:
result = (version != constraint)
else:
result = not _match_prefix(version, constraint)
return result
def _match_compatible(self, version, constraint, prefix):
if version == constraint:
return True
if version < constraint:
return False
release_clause = constraint._release_clause
if len(release_clause) > 1:
release_clause = release_clause[:-1]
pfx = '.'.join([str(i) for i in release_clause])
return _match_prefix(version, pfx)
_REPLACEMENTS = (
(re.compile('[.+-]$'), ''), # remove trailing puncts
@ -363,7 +352,7 @@ _REPLACEMENTS = (
_SUFFIX_REPLACEMENTS = (
(re.compile('^[:~._+-]+'), ''), # remove leading puncts
(re.compile('[,*")([\]]'), ''), # remove unwanted chars
(re.compile('[,*")([\]]'), ''), # remove unwanted chars
(re.compile('[~:+_ -]'), '.'), # replace illegal chars
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
(re.compile(r'\.$'), ''), # trailing '.'
@ -371,10 +360,11 @@ _SUFFIX_REPLACEMENTS = (
_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
def suggest_semantic_version(s):
def _suggest_semantic_version(s):
"""
Try to suggest a semantic form for a version for which
suggest_normalized_version couldn't come up with anything.
_suggest_normalized_version couldn't come up with anything.
"""
result = s.strip().lower()
for pat, repl in _REPLACEMENTS:
@ -417,7 +407,7 @@ def suggest_semantic_version(s):
return result
def suggest_normalized_version(s):
def _suggest_normalized_version(s):
"""Suggest a normalized version close to the given version string.
If you have a version string that isn't rational (i.e. NormalizedVersion
@ -435,7 +425,7 @@ def suggest_normalized_version(s):
@returns A rational version string, or None, if couldn't determine one.
"""
try:
normalized_key(s)
_normalized_key(s)
return s # already rational
except UnsupportedVersionError:
pass
@ -522,37 +512,34 @@ def suggest_normalized_version(s):
rs = re.sub(r"p(\d+)$", r".post\1", rs)
try:
normalized_key(rs)
_normalized_key(rs)
except UnsupportedVersionError:
rs = None
return rs
def suggest_adaptive_version(s):
return suggest_normalized_version(s) or suggest_semantic_version(s)
#
# Legacy version processing (distribute-compatible)
#
_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
_VERSION_REPLACE = {
'pre':'c',
'preview':'c',
'-':'final-',
'rc':'c',
'dev':'@',
'pre': 'c',
'preview': 'c',
'-': 'final-',
'rc': 'c',
'dev': '@',
'': None,
'.': None,
}
def legacy_key(s):
def _legacy_key(s):
def get_parts(s):
result = []
for p in _VERSION_PART.split(s.lower()):
p = _VERSION_REPLACE.get(p, p)
if p:
if '0' <= p[:1] <= '9':
if '0' <= p[:1] <= '9':
p = p.zfill(8)
else:
p = '*' + p
@ -571,8 +558,10 @@ def legacy_key(s):
result.append(p)
return tuple(result)
class LegacyVersion(Version):
def parse(self, s): return legacy_key(s)
def parse(self, s):
return _legacy_key(s)
PREREL_TAGS = set(
['*a', '*alpha', '*b', '*beta', '*c', '*rc', '*r', '*@', '*pre']
@ -582,9 +571,28 @@ class LegacyVersion(Version):
def is_prerelease(self):
return any(x in self.PREREL_TAGS for x in self._parts)
class LegacyMatcher(Matcher):
version_class = LegacyVersion
_operators = dict(Matcher._operators)
_operators['~='] = '_match_compatible'
numeric_re = re.compile('^(\d+(\.\d+)*)')
def _match_compatible(self, version, constraint, prefix):
if version < constraint:
return False
m = self.numeric_re.match(str(constraint))
if not m:
logger.warning('Cannot compute compatible match for version %s '
' and constraint %s', version, constraint)
return True
s = m.groups()[0]
if '.' in s:
s = s.rsplit('.', 1)[0]
return _match_prefix(version, s)
#
# Semantic versioning
#
@ -593,10 +601,12 @@ _SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
def is_semver(s):
return _SEMVER_RE.match(s)
def semantic_key(s):
def _semantic_key(s):
def make_tuple(s, absent):
if s is None:
result = (absent,)
@ -607,7 +617,6 @@ def semantic_key(s):
result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
return result
result = None
m = is_semver(s)
if not m:
raise UnsupportedVersionError(s)
@ -615,11 +624,12 @@ def semantic_key(s):
major, minor, patch = [int(i) for i in groups[:3]]
# choose the '|' and '*' so that versions sort correctly
pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
return ((major, minor, patch), pre, build)
return (major, minor, patch), pre, build
class SemanticVersion(Version):
def parse(self, s): return semantic_key(s)
def parse(self, s):
return _semantic_key(s)
@property
def is_prerelease(self):
@ -629,42 +639,6 @@ class SemanticVersion(Version):
class SemanticMatcher(Matcher):
version_class = SemanticVersion
#
# Adaptive versioning. When handed a legacy version string, tries to
# determine a suggested normalized version, and work with that.
#
def adaptive_key(s):
try:
result = normalized_key(s, False)
except UnsupportedVersionError:
ss = suggest_normalized_version(s)
if ss is not None:
result = normalized_key(ss) # "guaranteed" to work
else:
ss = s # suggest_semantic_version(s) or s
result = semantic_key(ss) # let's hope ...
return result
class AdaptiveVersion(NormalizedVersion):
def parse(self, s): return adaptive_key(s)
@property
def is_prerelease(self):
try:
normalized_key(self._string)
not_sem = True
except UnsupportedVersionError:
ss = suggest_normalized_version(self._string)
not_sem = ss is not None
if not_sem:
return any(t[0] in self.PREREL_TAGS for t in self._parts)
return self._parts[1][0] != '|'
class AdaptiveMatcher(NormalizedMatcher):
version_class = AdaptiveVersion
class VersionScheme(object):
def __init__(self, key, matcher, suggester=None):
@ -702,16 +676,15 @@ class VersionScheme(object):
return result
_SCHEMES = {
'normalized': VersionScheme(normalized_key, NormalizedMatcher,
suggest_normalized_version),
'legacy': VersionScheme(legacy_key, LegacyMatcher, lambda self, s: s),
'semantic': VersionScheme(semantic_key, SemanticMatcher,
suggest_semantic_version),
'adaptive': VersionScheme(adaptive_key, AdaptiveMatcher,
suggest_adaptive_version),
'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
_suggest_normalized_version),
'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
'semantic': VersionScheme(_semantic_key, SemanticMatcher,
_suggest_semantic_version),
}
_SCHEMES['default'] = _SCHEMES['adaptive']
_SCHEMES['default'] = _SCHEMES['normalized']
def get_scheme(name):
if name not in _SCHEMES:

BIN
pip/vendor/distlib/w32.exe vendored Normal file

Binary file not shown.

BIN
pip/vendor/distlib/w64.exe vendored Normal file

Binary file not shown.

View file

@ -23,13 +23,12 @@ import sys
import tempfile
import zipfile
from . import DistlibException
from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import DistributionPath, InstalledDistribution
from .metadata import Metadata
from .scripts import ScriptMaker
from .database import InstalledDistribution
from .metadata import Metadata, METADATA_FILENAME
from .util import (FileOperator, convert_path, CSVReader, CSVWriter,
cached_property, get_cache_base)
cached_property, get_cache_base, read_exports)
logger = logging.getLogger(__name__)
@ -112,7 +111,9 @@ class Mounter(object):
raise ImportError('unable to find extension for %s' % fullname)
result = imp.load_dynamic(fullname, self.libs[fullname])
result.__loader__ = self
result.__package__, _ = fullname.rsplit('.', 1)
parts = fullname.rsplit('.', 1)
if len(parts) > 1:
result.__package__ = parts[0]
return result
_hook = Mounter()
@ -123,7 +124,7 @@ class Wheel(object):
Class to build and install from Wheel files (PEP 427).
"""
wheel_version = (1, 0)
wheel_version = (1, 1)
hash_kind = 'sha256'
def __init__(self, filename=None, sign=False, verify=False):
@ -193,13 +194,16 @@ class Wheel(object):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
metadata_filename = posixpath.join(info_dir, 'METADATA')
wrapper = codecs.getreader('utf-8')
metadata_filename = posixpath.join(info_dir, METADATA_FILENAME)
with ZipFile(pathname, 'r') as zf:
with zf.open(metadata_filename) as bf:
wf = wrapper(bf)
result = Metadata()
result.read_file(wf)
try:
with zf.open(metadata_filename) as bf:
wf = wrapper(bf)
result = Metadata(fileobj=wf)
except KeyError:
raise ValueError('Invalid wheel, because %s is '
'missing' % METADATA_FILENAME)
return result
@cached_property
@ -251,7 +255,7 @@ class Wheel(object):
p = to_posix(os.path.relpath(record_path, base))
writer.writerow((p, '', ''))
def build(self, paths, tags=None):
def build(self, paths, tags=None, wheel_version=None):
"""
Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel.
@ -334,11 +338,9 @@ class Wheel(object):
ap = to_posix(os.path.join(info_dir, fn))
archive_paths.append((ap, p))
import distlib
wheel_metadata = [
'Wheel-Version: %d.%d' % self.wheel_version,
'Generator: distlib %s' % distlib.__version__,
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
'Generator: distlib %s' % __version__,
'Root-Is-Purelib: %s' % is_pure,
]
for pyver, abi, arch in self.tags:
@ -372,21 +374,31 @@ class Wheel(object):
zf.write(p, ap)
return pathname
def install(self, paths, dry_run=False, executable=None, warner=None):
def install(self, paths, maker, **kwargs):
"""
Install a wheel to the specified paths. If ``executable`` is specified,
it should be the Unicode absolute path the to the executable written
into the shebang lines of any scripts installed. If ``warner`` is
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written.
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
"""
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
@ -405,16 +417,20 @@ class Wheel(object):
libdir = paths['purelib']
else:
libdir = paths['platlib']
records = {}
with zf.open(record_name) as bf:
with CSVReader(record_name, stream=bf) as reader:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
data_pfx = posixpath.join(data_dir, '')
info_pfx = posixpath.join(info_dir, '')
script_pfx = posixpath.join(data_dir, 'scripts', '')
# make a new instance rather than a copy of maker's,
# as we mutate it
fileop = FileOperator(dry_run=dry_run)
fileop.record = True # so we can rollback if needed
@ -427,9 +443,8 @@ class Wheel(object):
# set target dir later
# we default add_launchers to False, as the
# Python Launcher should be used instead
maker = ScriptMaker(workdir, None, fileop=fileop,
add_launchers=False)
maker.executable = executable
maker.source_dir = workdir
maker.target_dir = None
try:
for zinfo in zf.infolist():
arcname = zinfo.filename
@ -437,6 +452,10 @@ class Wheel(object):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
if u_arcname.endswith('/RECORD.jws'):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
@ -450,6 +469,9 @@ class Wheel(object):
raise DistlibException('digest mismatch for '
'%s' % arcname)
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
logger.debug('lib_only: skipping %s', u_arcname)
continue
is_script = (u_arcname.startswith(script_pfx)
and not u_arcname.endswith('.exe'))
@ -495,22 +517,83 @@ class Wheel(object):
fileop.set_executable_mode(filenames)
outfiles.extend(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
if lib_only:
logger.debug('lib_only: returning None')
dist = None
else:
# Generate scripts
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
outfiles.append(p)
# Try to get pydist.json so we can see if there are
# any commands to generate. If this fails (e.g. because
# of a legacy wheel), log a warning but don't give up.
commands = None
file_version = self.info['Wheel-Version']
if file_version == '1.0':
# Use legacy info
ep = posixpath.join(info_dir, 'entry_points.txt')
try:
with zf.open(ep) as bwf:
epdata = read_exports(bwf)
commands = {}
for key in ('console', 'gui'):
k = '%s_scripts' % key
if k in epdata:
commands['wrap_%s' % key] = d = {}
for v in epdata[k].values():
s = '%s:%s' % (v.prefix, v.suffix)
if v.flags:
s += ' %s' % v.flags
d[v.name] = s
except Exception:
logger.warning('Unable to read legacy script '
'metadata, so cannot generate '
'scripts')
else:
try:
with zf.open(metadata_name) as bwf:
wf = wrapper(bwf)
commands = json.load(wf).get('commands')
except Exception:
logger.warning('Unable to read JSON metadata, so '
'cannot generate scripts')
if commands:
console_scripts = commands.get('wrap_console', {})
gui_scripts = commands.get('wrap_gui', {})
if console_scripts or gui_scripts:
script_dir = paths.get('scripts', '')
if not os.path.isdir(script_dir):
raise ValueError('Valid script path not '
'specified')
maker.target_dir = script_dir
for k, v in console_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script)
fileop.set_executable_mode(filenames)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
if gui_scripts:
options = {'gui': True }
for k, v in gui_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script, options)
fileop.set_executable_mode(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
if p:
outfiles.append(p)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
return dist
except Exception as e: # pragma: no cover
except Exception: # pragma: no cover
logger.exception('installation failed.')
fileop.rollback()
raise
@ -518,7 +601,7 @@ class Wheel(object):
shutil.rmtree(workdir)
def _get_dylib_cache(self):
result = os.path.join(get_cache_base(), 'dylib-cache')
result = os.path.join(get_cache_base(), 'dylib-cache', sys.version[:3])
if not os.path.isdir(result):
os.makedirs(result)
return result
@ -625,6 +708,7 @@ COMPATIBLE_TAGS = compatible_tags()
del compatible_tags
def is_compatible(wheel, tags=None):
if not isinstance(wheel, Wheel):
wheel = Wheel(wheel) # assume it's a filename

10
pip/vendor/vendor.txt vendored
View file

@ -1,5 +1,5 @@
distlib==0.1.1
html5lib==1.0b1
six==1.3.0
colorama==0.2.7
requests==2.0.0
distlib==0.1.3
html5lib==1.0b1
six==1.3.0
colorama==0.2.7
requests==2.0.0