Browse Source

Update package resource API to 49.6.0 (3d404fd).

tags/release_0.25.1
JackDandy 5 years ago
parent
commit
338b582327
  1. 1
      CHANGES.md
  2. 167
      lib/pkg_resources/__init__.py
  3. 16
      lib/pkg_resources/_vendor/packaging/__about__.py
  4. 20
      lib/pkg_resources/_vendor/packaging/__init__.py
  5. 16
      lib/pkg_resources/_vendor/packaging/_compat.py
  6. 28
      lib/pkg_resources/_vendor/packaging/_structures.py
  7. 48
      lib/pkg_resources/_vendor/packaging/_typing.py
  8. 139
      lib/pkg_resources/_vendor/packaging/markers.py
  9. 0
      lib/pkg_resources/_vendor/packaging/py.typed
  10. 50
      lib/pkg_resources/_vendor/packaging/requirements.py
  11. 251
      lib/pkg_resources/_vendor/packaging/specifiers.py
  12. 751
      lib/pkg_resources/_vendor/packaging/tags.py
  13. 53
      lib/pkg_resources/_vendor/packaging/utils.py
  14. 290
      lib/pkg_resources/_vendor/packaging/version.py
  15. 4
      lib/pkg_resources/_vendor/vendored.txt
  16. 401
      lib/pkg_resources/api_tests.txt
  17. 7
      lib/pkg_resources/extern/__init__.py
  18. 23
      lib/pkg_resources/py31compat.py

1
CHANGES.md

@ -50,6 +50,7 @@
* Update hachoir_py3 3.0a6 (5b9e05a) to 3.1.2 (f739b43)
* Update hachoir_py2 2.0a6 (5b9e05a) to 2.1.2
* Update Js2Py 0.70 (f297498) to 0.70 (92250a4)
* Update package resource API to 49.6.0 (3d404fd)
* Update profilehooks module 1.11.2 (d72cc2b) to 1.12.0 (3ee1f60)
* Update Requests library 2.24.0 (1b41763) to 2.24.0 (967a05b)
* Update soupsieve_py3 2.0.0.final (e66c311) to 2.0.2.dev (05086ef)

167
lib/pkg_resources/__init__.py

@ -55,7 +55,7 @@ except NameError:
FileExistsError = OSError
from pkg_resources.extern import six
from pkg_resources.extern.six.moves import urllib, map, filter
from pkg_resources.extern.six.moves import map, filter
# capture these to bypass sandboxing
from os import utime
@ -76,7 +76,6 @@ try:
except ImportError:
importlib_machinery = None
from . import py31compat
from pkg_resources.extern import appdirs
from pkg_resources.extern import packaging
__import__('pkg_resources.extern.packaging.version')
@ -88,8 +87,8 @@ __import__('pkg_resources.extern.packaging.markers')
__metaclass__ = type
if (3, 0) < sys.version_info < (3, 4):
raise RuntimeError("Python 3.4 or later is required")
if (3, 0) < sys.version_info < (3, 5):
raise RuntimeError("Python 3.5 or later is required")
if six.PY2:
# Those builtin exceptions are only defined in Python 3
@ -178,10 +177,10 @@ def get_supported_platform():
"""Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
of Mac OS X that would be required to *use* extensions produced by
of macOS that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
version of Mac OS X that we are *running*. To allow usage of packages that
explicitly require a newer version of Mac OS X, we must also know the
version of macOS that we are *running*. To allow usage of packages that
explicitly require a newer version of macOS, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
@ -191,9 +190,9 @@ def get_supported_platform():
m = macosVersionString.match(plat)
if m is not None and sys.platform == "darwin":
try:
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
except ValueError:
# not Mac OS X
# not macOS
pass
return plat
@ -333,7 +332,7 @@ class UnknownExtra(ResolutionError):
_provider_factories = {}
PY_MAJOR = sys.version[:3]
PY_MAJOR = '{}.{}'.format(*sys.version_info)
EGG_DIST = 3
BINARY_DIST = 2
SOURCE_DIST = 1
@ -364,7 +363,7 @@ def get_provider(moduleOrReq):
return _find_adapter(_provider_factories, loader)(module)
def _macosx_vers(_cache=[]):
def _macos_vers(_cache=[]):
if not _cache:
version = platform.mac_ver()[0]
# fallback for MacPorts
@ -380,7 +379,7 @@ def _macosx_vers(_cache=[]):
return _cache[0]
def _macosx_arch(machine):
def _macos_arch(machine):
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
@ -388,18 +387,18 @@ def get_build_platform():
"""Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and Mac OS X.
needs some hacks for Linux and macOS.
"""
from sysconfig import get_platform
plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'):
try:
version = _macosx_vers()
version = _macos_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (
int(version[0]), int(version[1]),
_macosx_arch(machine),
_macos_arch(machine),
)
except ValueError:
# if someone is running a non-Mac darwin system, this will fall
@ -425,7 +424,7 @@ def compatible_platforms(provided, required):
# easy case
return True
# Mac OS X special cases
# macOS special cases
reqMac = macosVersionString.match(required)
if reqMac:
provMac = macosVersionString.match(provided)
@ -434,7 +433,7 @@ def compatible_platforms(provided, required):
if not provMac:
# this is backwards compatibility for packages built before
# setuptools 0.6. All packages built after this point will
# use the new macosx designation.
# use the new macOS designation.
provDarwin = darwinVersionString.match(provided)
if provDarwin:
dversion = int(provDarwin.group(1))
@ -442,7 +441,7 @@ def compatible_platforms(provided, required):
if dversion == 7 and macosversion >= "10.3" or \
dversion == 8 and macosversion >= "10.4":
return True
# egg isn't macosx or legacy darwin
# egg isn't macOS or legacy darwin
return False
# are they the same major version and machine type?
@ -1234,12 +1233,13 @@ class ResourceManager:
mode = os.stat(path).st_mode
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = (
"%s is writable by group/others and vulnerable to attack "
"when "
"used with get_resource_filename. Consider a more secure "
"Extraction path is writable by group/others "
"and vulnerable to attack when "
"used with get_resource_filename ({path}). "
"Consider a more secure "
"location (set with .set_extraction_path or the "
"PYTHON_EGG_CACHE environment variable)." % path
)
"PYTHON_EGG_CACHE environment variable)."
).format(**locals())
warnings.warn(msg, UserWarning)
def postprocess(self, tempname, filename):
@ -1416,8 +1416,17 @@ class NullProvider:
def get_metadata(self, name):
if not self.egg_info:
return ""
value = self._get(self._fn(self.egg_info, name))
return value.decode('utf-8') if six.PY3 else value
path = self._get_metadata_path(name)
value = self._get(path)
if six.PY2:
return value
try:
return value.decode('utf-8')
except UnicodeDecodeError as exc:
# Include the path in the error message to simplify
# troubleshooting, and without changing the exception type.
exc.reason += ' in {} file at path: {}'.format(name, path)
raise
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
@ -1448,7 +1457,8 @@ class NullProvider:
script_filename = self._fn(self.egg_info, script)
namespace['__file__'] = script_filename
if os.path.exists(script_filename):
source = open(script_filename).read()
with open(script_filename) as fid:
source = fid.read()
code = compile(source, script_filename, 'exec')
exec(code, namespace, namespace)
else:
@ -1566,6 +1576,17 @@ is not allowed.
register_loader_type(object, NullProvider)
def _parents(path):
"""
yield all parents of path including path
"""
last = None
while path != last:
yield path
last = path
path, _ = os.path.split(path)
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
@ -1574,18 +1595,16 @@ class EggProvider(NullProvider):
self._setup_prefix()
def _setup_prefix(self):
# we assume here that our metadata may be nested inside a "basket"
# of multiple eggs; that's why we use module_path instead of .archive
path = self.module_path
old = None
while path != old:
if _is_egg_path(path):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
break
old = path
path, base = os.path.split(path)
# Assume that metadata may be nested inside a "basket"
# of multiple eggs and use module_path instead of .archive.
eggs = filter(_is_egg_path, _parents(self.module_path))
egg = next(eggs, None)
egg and self._set_egg(egg)
def _set_egg(self, path):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
class DefaultProvider(EggProvider):
@ -2037,7 +2056,10 @@ def find_on_path(importer, path_item, only=False):
)
return
entries = safe_listdir(path_item)
entries = (
os.path.join(path_item, child)
for child in safe_listdir(path_item)
)
# for performance, before sorting by version,
# screen entries for only those that will yield
@ -2058,11 +2080,14 @@ def find_on_path(importer, path_item, only=False):
def dist_factory(path_item, entry, only):
"""
Return a dist_factory for a path_item and entry
"""
"""Return a dist_factory for the given entry."""
lower = entry.lower()
is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))
is_egg_info = lower.endswith('.egg-info')
is_dist_info = (
lower.endswith('.dist-info') and
os.path.isdir(os.path.join(path_item, entry))
)
is_meta = is_egg_info or is_dist_info
return (
distributions_from_metadata
if is_meta else
@ -2186,10 +2211,14 @@ def _handle_ns(packageName, path_item):
if importer is None:
return None
# capture warnings due to #1111
with warnings.catch_warnings():
warnings.simplefilter("ignore")
loader = importer.find_module(packageName)
# use find_spec (PEP 451) and fall-back to find_module (PEP 302)
try:
loader = importer.find_spec(packageName).loader
except AttributeError:
# capture warnings due to #1111
with warnings.catch_warnings():
warnings.simplefilter("ignore")
loader = importer.find_module(packageName)
if loader is None:
return None
@ -2319,7 +2348,8 @@ register_namespace_handler(object, null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
return os.path.normcase(os.path.realpath(os.path.normpath(
_cygwin_patch(filename))))
def _cygwin_patch(filename): # pragma: nocover
@ -2345,7 +2375,15 @@ def _is_egg_path(path):
"""
Determine if given path appears to be an egg.
"""
return path.lower().endswith('.egg')
return _is_zip_egg(path) or _is_unpacked_egg(path)
def _is_zip_egg(path):
return (
path.lower().endswith('.egg') and
os.path.isfile(path) and
zipfile.is_zipfile(path)
)
def _is_unpacked_egg(path):
@ -2353,7 +2391,7 @@ def _is_unpacked_egg(path):
Determine if given path appears to be an unpacked egg.
"""
return (
_is_egg_path(path) and
path.lower().endswith('.egg') and
os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
)
@ -2527,15 +2565,6 @@ class EntryPoint:
return maps
def _remove_md5_fragment(location):
if not location:
return ''
parsed = urllib.parse.urlparse(location)
if parsed[-1].startswith('md5='):
return urllib.parse.urlunparse(parsed[:-1] + ('',))
return location
def _version_from_file(lines):
"""
Given an iterable of lines from a Metadata file, return
@ -2592,7 +2621,7 @@ class Distribution:
self.parsed_version,
self.precedence,
self.key,
_remove_md5_fragment(self.location),
self.location,
self.py_version or '',
self.platform or '',
)
@ -3058,11 +3087,6 @@ def issue_warning(*args, **kw):
warnings.warn(stacklevel=level + 1, *args, **kw)
class RequirementParseError(ValueError):
def __str__(self):
return ' '.join(self.args)
def parse_requirements(strs):
"""Yield ``Requirement`` objects for each specification in `strs`
@ -3085,13 +3109,14 @@ def parse_requirements(strs):
yield Requirement(line)
class RequirementParseError(packaging.requirements.InvalidRequirement):
"Compatibility wrapper for InvalidRequirement"
class Requirement(packaging.requirements.Requirement):
def __init__(self, requirement_string):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
try:
super(Requirement, self).__init__(requirement_string)
except packaging.requirements.InvalidRequirement as e:
raise RequirementParseError(str(e))
super(Requirement, self).__init__(requirement_string)
self.unsafe_name = self.name
project_name = safe_name(self.name)
self.project_name, self.key = project_name, project_name.lower()
@ -3100,6 +3125,7 @@ class Requirement(packaging.requirements.Requirement):
self.extras = tuple(map(safe_extra, self.extras))
self.hashCmp = (
self.key,
self.url,
self.specifier,
frozenset(self.extras),
str(self.marker) if self.marker else None,
@ -3160,7 +3186,7 @@ def _find_adapter(registry, ob):
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
py31compat.makedirs(dirname, exist_ok=True)
os.makedirs(dirname, exist_ok=True)
def _bypass_ensure_directory(path):
@ -3277,6 +3303,7 @@ def _initialize_master_working_set():
list(map(working_set.add_entry, sys.path))
globals().update(locals())
class PkgResourcesDeprecationWarning(Warning):
"""
Base class for warning about deprecations in ``pkg_resources``

16
lib/pkg_resources/_vendor/packaging/__about__.py

@ -4,18 +4,24 @@
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
__version__ = "16.8"
__version__ = "20.4"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2014-2016 %s" % __author__
__license__ = "BSD-2-Clause or Apache-2.0"
__copyright__ = "Copyright 2014-2019 %s" % __author__

20
lib/pkg_resources/_vendor/packaging/__init__.py

@ -4,11 +4,23 @@
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
__author__,
__copyright__,
__email__,
__license__,
__summary__,
__title__,
__uri__,
__version__,
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]

16
lib/pkg_resources/_vendor/packaging/_compat.py

@ -5,6 +5,11 @@ from __future__ import absolute_import, division, print_function
import sys
from ._typing import TYPE_CHECKING
if TYPE_CHECKING: # pragma: no cover
from typing import Any, Dict, Tuple, Type
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
@ -12,19 +17,22 @@ PY3 = sys.version_info[0] == 3
# flake8: noqa
if PY3:
string_types = str,
string_types = (str,)
else:
string_types = basestring,
string_types = (basestring,)
def with_metaclass(meta, *bases):
# type: (Type[Any], Tuple[Type[Any], ...]) -> Any
"""
Create a base class with a metaclass.
"""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
class metaclass(meta): # type: ignore
def __new__(cls, name, this_bases, d):
# type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
return type.__new__(metaclass, "temporary_class", (), {})

28
lib/pkg_resources/_vendor/packaging/_structures.py

@ -4,65 +4,83 @@
from __future__ import absolute_import, division, print_function
class Infinity(object):
class InfinityType(object):
def __repr__(self):
# type: () -> str
return "Infinity"
def __hash__(self):
# type: () -> int
return hash(repr(self))
def __lt__(self, other):
# type: (object) -> bool
return False
def __le__(self, other):
# type: (object) -> bool
return False
def __eq__(self, other):
# type: (object) -> bool
return isinstance(other, self.__class__)
def __ne__(self, other):
# type: (object) -> bool
return not isinstance(other, self.__class__)
def __gt__(self, other):
# type: (object) -> bool
return True
def __ge__(self, other):
# type: (object) -> bool
return True
def __neg__(self):
# type: (object) -> NegativeInfinityType
return NegativeInfinity
Infinity = Infinity()
Infinity = InfinityType()
class NegativeInfinity(object):
class NegativeInfinityType(object):
def __repr__(self):
# type: () -> str
return "-Infinity"
def __hash__(self):
# type: () -> int
return hash(repr(self))
def __lt__(self, other):
# type: (object) -> bool
return True
def __le__(self, other):
# type: (object) -> bool
return True
def __eq__(self, other):
# type: (object) -> bool
return isinstance(other, self.__class__)
def __ne__(self, other):
# type: (object) -> bool
return not isinstance(other, self.__class__)
def __gt__(self, other):
# type: (object) -> bool
return False
def __ge__(self, other):
# type: (object) -> bool
return False
def __neg__(self):
# type: (object) -> InfinityType
return Infinity
NegativeInfinity = NegativeInfinity()
NegativeInfinity = NegativeInfinityType()

48
lib/pkg_resources/_vendor/packaging/_typing.py

@ -0,0 +1,48 @@
"""For neatly implementing static typing in packaging.
`mypy` - the static type analysis tool we use - uses the `typing` module, which
provides core functionality fundamental to mypy's functioning.
Generally, `typing` would be imported at runtime and used in that fashion -
it acts as a no-op at runtime and does not have any run-time overhead by
design.
As it turns out, `typing` is not vendorable - it uses separate sources for
Python 2/Python 3. Thus, this codebase can not expect it to be present.
To work around this, mypy allows the typing import to be behind a False-y
optional to prevent it from running at runtime and type-comments can be used
to remove the need for the types to be accessible directly during runtime.
This module provides the False-y guard in a nicely named fashion so that a
curious maintainer can reach here to read this.
In packaging, all static-typing related imports should be guarded as follows:
from packaging._typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import ...
Ref: https://github.com/python/mypy/issues/3216
"""
__all__ = ["TYPE_CHECKING", "cast"]
# The TYPE_CHECKING constant defined by the typing module is False at runtime
# but True while type checking.
if False: # pragma: no cover
from typing import TYPE_CHECKING
else:
TYPE_CHECKING = False
# typing's cast syntax requires calling typing.cast at runtime, but we don't
# want to import typing at runtime. Here, we inform the type checkers that
# we're importing `typing.cast` as `cast` and re-implement typing.cast's
# runtime behavior in a block that is ignored by type checkers.
if TYPE_CHECKING: # pragma: no cover
# not executed at runtime
from typing import cast
else:
# executed at runtime
def cast(type_, value): # noqa
return value

139
lib/pkg_resources/_vendor/packaging/markers.py

@ -13,12 +13,21 @@ from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedStr
from pkg_resources.extern.pyparsing import Literal as L # noqa
from ._compat import string_types
from ._typing import TYPE_CHECKING
from .specifiers import Specifier, InvalidSpecifier
if TYPE_CHECKING: # pragma: no cover
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
Operator = Callable[[str, str], bool]
__all__ = [
"InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
"Marker", "default_environment",
"InvalidMarker",
"UndefinedComparison",
"UndefinedEnvironmentName",
"Marker",
"default_environment",
]
@ -42,77 +51,73 @@ class UndefinedEnvironmentName(ValueError):
class Node(object):
def __init__(self, value):
# type: (Any) -> None
self.value = value
def __str__(self):
# type: () -> str
return str(self.value)
def __repr__(self):
# type: () -> str
return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
def serialize(self):
# type: () -> str
raise NotImplementedError
class Variable(Node):
def serialize(self):
# type: () -> str
return str(self)
class Value(Node):
def serialize(self):
# type: () -> str
return '"{0}"'.format(self)
class Op(Node):
def serialize(self):
# type: () -> str
return str(self)
VARIABLE = (
L("implementation_version") |
L("platform_python_implementation") |
L("implementation_name") |
L("python_full_version") |
L("platform_release") |
L("platform_version") |
L("platform_machine") |
L("platform_system") |
L("python_version") |
L("sys_platform") |
L("os_name") |
L("os.name") | # PEP-345
L("sys.platform") | # PEP-345
L("platform.version") | # PEP-345
L("platform.machine") | # PEP-345
L("platform.python_implementation") | # PEP-345
L("python_implementation") | # undocumented setuptools legacy
L("extra")
L("implementation_version")
| L("platform_python_implementation")
| L("implementation_name")
| L("python_full_version")
| L("platform_release")
| L("platform_version")
| L("platform_machine")
| L("platform_system")
| L("python_version")
| L("sys_platform")
| L("os_name")
| L("os.name") # PEP-345
| L("sys.platform") # PEP-345
| L("platform.version") # PEP-345
| L("platform.machine") # PEP-345
| L("platform.python_implementation") # PEP-345
| L("python_implementation") # undocumented setuptools legacy
| L("extra") # PEP-508
)
ALIASES = {
'os.name': 'os_name',
'sys.platform': 'sys_platform',
'platform.version': 'platform_version',
'platform.machine': 'platform_machine',
'platform.python_implementation': 'platform_python_implementation',
'python_implementation': 'platform_python_implementation'
"os.name": "os_name",
"sys.platform": "sys_platform",
"platform.version": "platform_version",
"platform.machine": "platform_machine",
"platform.python_implementation": "platform_python_implementation",
"python_implementation": "platform_python_implementation",
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
L("===") |
L("==") |
L(">=") |
L("<=") |
L("!=") |
L("~=") |
L(">") |
L("<")
L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
@ -139,6 +144,7 @@ MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results):
# type: (Union[ParseResults, List[Any]]) -> List[Any]
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
@ -146,14 +152,19 @@ def _coerce_parse_result(results):
def _format_marker(marker, first=True):
# type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str
assert isinstance(marker, (list, tuple, string_types))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
if (isinstance(marker, list) and len(marker) == 1 and
isinstance(marker[0], (list, tuple))):
if (
isinstance(marker, list)
and len(marker) == 1
and isinstance(marker[0], (list, tuple))
):
return _format_marker(marker[0])
if isinstance(marker, list):
@ -177,10 +188,11 @@ _operators = {
"!=": operator.ne,
">=": operator.ge,
">": operator.gt,
}
} # type: Dict[str, Operator]
def _eval_op(lhs, op, rhs):
# type: (str, Op, str) -> bool
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
@ -188,7 +200,7 @@ def _eval_op(lhs, op, rhs):
else:
return spec.contains(lhs)
oper = _operators.get(op.serialize())
oper = _operators.get(op.serialize()) # type: Optional[Operator]
if oper is None:
raise UndefinedComparison(
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
@ -197,13 +209,18 @@ def _eval_op(lhs, op, rhs):
return oper(lhs, rhs)
_undefined = object()
class Undefined(object):
pass
_undefined = Undefined()
def _get_env(environment, name):
value = environment.get(name, _undefined)
# type: (Dict[str, str], str) -> str
value = environment.get(name, _undefined) # type: Union[str, Undefined]
if value is _undefined:
if isinstance(value, Undefined):
raise UndefinedEnvironmentName(
"{0!r} does not exist in evaluation environment.".format(name)
)
@ -212,7 +229,8 @@ def _get_env(environment, name):
def _evaluate_markers(markers, environment):
groups = [[]]
# type: (List[Any], Dict[str, str]) -> bool
groups = [[]] # type: List[List[bool]]
for marker in markers:
assert isinstance(marker, (list, tuple, string_types))
@ -239,20 +257,25 @@ def _evaluate_markers(markers, environment):
def format_full_version(info):
version = '{0.major}.{0.minor}.{0.micro}'.format(info)
# type: (sys._version_info) -> str
version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel
if kind != 'final':
if kind != "final":
version += kind[0] + str(info.serial)
return version
def default_environment():
if hasattr(sys, 'implementation'):
iver = format_full_version(sys.implementation.version)
implementation_name = sys.implementation.name
# type: () -> Dict[str, str]
if hasattr(sys, "implementation"):
# Ignoring the `sys.implementation` reference for type checking due to
# mypy not liking that the attribute doesn't exist in Python 2.7 when
# run with the `--py27` flag.
iver = format_full_version(sys.implementation.version) # type: ignore
implementation_name = sys.implementation.name # type: ignore
else:
iver = '0'
implementation_name = ''
iver = "0"
implementation_name = ""
return {
"implementation_name": implementation_name,
@ -264,28 +287,32 @@ def default_environment():
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
"python_version": platform.python_version()[:3],
"python_version": ".".join(platform.python_version_tuple()[:2]),
"sys_platform": sys.platform,
}
class Marker(object):
def __init__(self, marker):
# type: (str) -> None
try:
self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e:
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
marker, marker[e.loc:e.loc + 8])
marker, marker[e.loc : e.loc + 8]
)
raise InvalidMarker(err_str)
def __str__(self):
# type: () -> str
return _format_marker(self._markers)
def __repr__(self):
# type: () -> str
return "<Marker({0!r})>".format(str(self))
def evaluate(self, environment=None):
# type: (Optional[Dict[str, str]]) -> bool
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the

0
lib/pkg_resources/_vendor/packaging/py.typed

50
lib/pkg_resources/_vendor/packaging/requirements.py

@ -11,9 +11,13 @@ from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Co
from pkg_resources.extern.pyparsing import Literal as L # noqa
from pkg_resources.extern.six.moves.urllib import parse as urlparse
from ._typing import TYPE_CHECKING
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
if TYPE_CHECKING: # pragma: no cover
from typing import List
class InvalidRequirement(ValueError):
"""
@ -38,8 +42,8 @@ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
URI = Regex(r'[^ ]+')("url")
URL = (AT + URI)
URI = Regex(r"[^ ]+")("url")
URL = AT + URI
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
@ -48,28 +52,31 @@ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
joinString=",", adjacent=False)("_raw_spec")
VERSION_MANY = Combine(
VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start:t._original_end])
lambda s, l, t: Marker(s[t._original_start : t._original_end])
)
MARKER_SEPERATOR = SEMICOLON
MARKER = MARKER_SEPERATOR + MARKER_EXPR
MARKER_SEPARATOR = SEMICOLON
MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = \
NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
# pkg_resources.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
# issue #104
REQUIREMENT.parseString("x[]")
class Requirement(object):
@ -86,19 +93,26 @@ class Requirement(object):
# TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string):
# type: (str) -> None
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
raise InvalidRequirement(
"Invalid requirement, parse error at \"{0!r}\"".format(
requirement_string[e.loc:e.loc + 8]))
'Parse error at "{0!r}": {1}'.format(
requirement_string[e.loc : e.loc + 8], e.msg
)
)
self.name = req.name
if req.url:
parsed_url = urlparse.urlparse(req.url)
if not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc):
raise InvalidRequirement("Invalid URL given")
if parsed_url.scheme == "file":
if urlparse.urlunparse(parsed_url) != req.url:
raise InvalidRequirement("Invalid URL given")
elif not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc
):
raise InvalidRequirement("Invalid URL: {0}".format(req.url))
self.url = req.url
else:
self.url = None
@ -107,7 +121,8 @@ class Requirement(object):
self.marker = req.marker if req.marker else None
def __str__(self):
parts = [self.name]
# type: () -> str
parts = [self.name] # type: List[str]
if self.extras:
parts.append("[{0}]".format(",".join(sorted(self.extras))))
@ -117,6 +132,8 @@ class Requirement(object):
if self.url:
parts.append("@ {0}".format(self.url))
if self.marker:
parts.append(" ")
if self.marker:
parts.append("; {0}".format(self.marker))
@ -124,4 +141,5 @@ class Requirement(object):
return "".join(parts)
def __repr__(self):
# type: () -> str
return "<Requirement({0!r})>".format(str(self))

251
lib/pkg_resources/_vendor/packaging/specifiers.py

@ -9,8 +9,27 @@ import itertools
import re
from ._compat import string_types, with_metaclass
from ._typing import TYPE_CHECKING
from .utils import canonicalize_version
from .version import Version, LegacyVersion, parse
if TYPE_CHECKING: # pragma: no cover
from typing import (
List,
Dict,
Union,
Iterable,
Iterator,
Optional,
Callable,
Tuple,
FrozenSet,
)
ParsedVersion = Union[Version, LegacyVersion]
UnparsedVersion = Union[Version, LegacyVersion, str]
CallableOperator = Callable[[ParsedVersion, str], bool]
class InvalidSpecifier(ValueError):
"""
@ -18,10 +37,10 @@ class InvalidSpecifier(ValueError):
"""
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore
@abc.abstractmethod
def __str__(self):
# type: () -> str
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
@ -29,12 +48,14 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __hash__(self):
# type: () -> int
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
def __eq__(self, other):
# type: (object) -> bool
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
@ -42,6 +63,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __ne__(self, other):
# type: (object) -> bool
"""
Returns a boolean representing whether or not the two Specifier like
objects are not equal.
@ -49,6 +71,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractproperty
def prereleases(self):
# type: () -> Optional[bool]
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
@ -56,6 +79,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@prereleases.setter
def prereleases(self, value):
# type: (bool) -> None
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
@ -63,12 +87,14 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def contains(self, item, prereleases=None):
# type: (str, Optional[bool]) -> bool
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
def filter(self, iterable, prereleases=None):
# type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion]
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
@ -77,9 +103,10 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
class _IndividualSpecifier(BaseSpecifier):
_operators = {}
_operators = {} # type: Dict[str, str]
def __init__(self, spec="", prereleases=None):
# type: (str, Optional[bool]) -> None
match = self._regex.search(spec)
if not match:
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
@ -87,45 +114,51 @@ class _IndividualSpecifier(BaseSpecifier):
self._spec = (
match.group("operator").strip(),
match.group("version").strip(),
)
) # type: Tuple[str, str]
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
def __repr__(self):
# type: () -> str
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<{0}({1!r}{2})>".format(
self.__class__.__name__,
str(self),
pre,
)
return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
def __str__(self):
# type: () -> str
return "{0}{1}".format(*self._spec)
@property
def _canonical_spec(self):
# type: () -> Tuple[str, Union[Version, str]]
return self._spec[0], canonicalize_version(self._spec[1])
def __hash__(self):
return hash(self._spec)
# type: () -> int
return hash(self._canonical_spec)
def __eq__(self, other):
# type: (object) -> bool
if isinstance(other, string_types):
try:
other = self.__class__(other)
other = self.__class__(str(other))
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec == other._spec
return self._canonical_spec == other._canonical_spec
def __ne__(self, other):
# type: (object) -> bool
if isinstance(other, string_types):
try:
other = self.__class__(other)
other = self.__class__(str(other))
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
@ -134,52 +167,67 @@ class _IndividualSpecifier(BaseSpecifier):
return self._spec != other._spec
def _get_operator(self, op):
return getattr(self, "_compare_{0}".format(self._operators[op]))
# type: (str) -> CallableOperator
operator_callable = getattr(
self, "_compare_{0}".format(self._operators[op])
) # type: CallableOperator
return operator_callable
def _coerce_version(self, version):
# type: (UnparsedVersion) -> ParsedVersion
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
def operator(self):
# type: () -> str
return self._spec[0]
@property
def version(self):
# type: () -> str
return self._spec[1]
@property
def prereleases(self):
# type: () -> Optional[bool]
return self._prereleases
@prereleases.setter
def prereleases(self, value):
# type: (bool) -> None
self._prereleases = value
def __contains__(self, item):
# type: (str) -> bool
return self.contains(item)
def contains(self, item, prereleases=None):
# type: (UnparsedVersion, Optional[bool]) -> bool
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
item = self._coerce_version(item)
normalized_item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
if item.is_prerelease and not prereleases:
if normalized_item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
return self._get_operator(self.operator)(item, self.version)
operator_callable = self._get_operator(self.operator) # type: CallableOperator
return operator_callable(normalized_item, self.version)
def filter(self, iterable, prereleases=None):
# type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion]
yielded = False
found_prereleases = []
@ -194,11 +242,12 @@ class _IndividualSpecifier(BaseSpecifier):
# If our version is a prerelease, and we were not set to allow
# prereleases, then we'll store it for later incase nothing
# else matches this specifier.
if (parsed_version.is_prerelease and not
(prereleases or self.prereleases)):
if parsed_version.is_prerelease and not (
prereleases or self.prereleases
):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
# accepting prereleases from the begining.
# accepting prereleases from the beginning.
else:
yielded = True
yield version
@ -213,8 +262,7 @@ class _IndividualSpecifier(BaseSpecifier):
class LegacySpecifier(_IndividualSpecifier):
_regex_str = (
r"""
_regex_str = r"""
(?P<operator>(==|!=|<=|>=|<|>))
\s*
(?P<version>
@ -225,10 +273,8 @@ class LegacySpecifier(_IndividualSpecifier):
# them, and a comma since it's a version separator.
)
"""
)
_regex = re.compile(
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"==": "equal",
@ -240,42 +286,53 @@ class LegacySpecifier(_IndividualSpecifier):
}
def _coerce_version(self, version):
# type: (Union[ParsedVersion, str]) -> LegacyVersion
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
def _compare_equal(self, prospective, spec):
# type: (LegacyVersion, str) -> bool
return prospective == self._coerce_version(spec)
def _compare_not_equal(self, prospective, spec):
# type: (LegacyVersion, str) -> bool
return prospective != self._coerce_version(spec)
def _compare_less_than_equal(self, prospective, spec):
# type: (LegacyVersion, str) -> bool
return prospective <= self._coerce_version(spec)
def _compare_greater_than_equal(self, prospective, spec):
# type: (LegacyVersion, str) -> bool
return prospective >= self._coerce_version(spec)
def _compare_less_than(self, prospective, spec):
# type: (LegacyVersion, str) -> bool
return prospective < self._coerce_version(spec)
def _compare_greater_than(self, prospective, spec):
# type: (LegacyVersion, str) -> bool
return prospective > self._coerce_version(spec)
def _require_version_compare(fn):
def _require_version_compare(
fn # type: (Callable[[Specifier, ParsedVersion, str], bool])
):
# type: (...) -> Callable[[Specifier, ParsedVersion, str], bool]
@functools.wraps(fn)
def wrapped(self, prospective, spec):
# type: (Specifier, ParsedVersion, str) -> bool
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
return wrapped
class Specifier(_IndividualSpecifier):
_regex_str = (
r"""
_regex_str = r"""
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version>
(?:
@ -367,10 +424,8 @@ class Specifier(_IndividualSpecifier):
)
)
"""
)
_regex = re.compile(
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"~=": "compatible",
@ -385,6 +440,8 @@ class Specifier(_IndividualSpecifier):
@_require_version_compare
def _compare_compatible(self, prospective, spec):
# type: (ParsedVersion, str) -> bool
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
@ -397,8 +454,7 @@ class Specifier(_IndividualSpecifier):
prefix = ".".join(
list(
itertools.takewhile(
lambda x: (not x.startswith("post") and not
x.startswith("dev")),
lambda x: (not x.startswith("post") and not x.startswith("dev")),
_version_split(spec),
)
)[:-1]
@ -407,61 +463,81 @@ class Specifier(_IndividualSpecifier):
# Add the prefix notation to the end of our string
prefix += ".*"
return (self._get_operator(">=")(prospective, spec) and
self._get_operator("==")(prospective, prefix))
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
prospective, prefix
)
@_require_version_compare
def _compare_equal(self, prospective, spec):
# type: (ParsedVersion, str) -> bool
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# In the case of prefix matching we want to ignore local segment.
prospective = Version(prospective.public)
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
spec = _version_split(spec[:-2]) # Remove the trailing .*
split_spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
prospective = _version_split(str(prospective))
split_prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
prospective = prospective[:len(spec)]
shortened_prospective = split_prospective[: len(split_spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
spec, prospective = _pad_version(spec, prospective)
padded_spec, padded_prospective = _pad_version(
split_spec, shortened_prospective
)
return padded_prospective == padded_spec
else:
# Convert our spec string into a Version
spec = Version(spec)
spec_version = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
if not spec.local:
if not spec_version.local:
prospective = Version(prospective.public)
return prospective == spec
return prospective == spec_version
@_require_version_compare
def _compare_not_equal(self, prospective, spec):
# type: (ParsedVersion, str) -> bool
return not self._compare_equal(prospective, spec)
@_require_version_compare
def _compare_less_than_equal(self, prospective, spec):
return prospective <= Version(spec)
# type: (ParsedVersion, str) -> bool
# NB: Local version identifiers are NOT permitted in the version
# specifier, so local version labels can be universally removed from
# the prospective version.
return Version(prospective.public) <= Version(spec)
@_require_version_compare
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= Version(spec)
# type: (ParsedVersion, str) -> bool
# NB: Local version identifiers are NOT permitted in the version
# specifier, so local version labels can be universally removed from
# the prospective version.
return Version(prospective.public) >= Version(spec)
@_require_version_compare
def _compare_less_than(self, prospective, spec):
def _compare_less_than(self, prospective, spec_str):
# type: (ParsedVersion, str) -> bool
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
spec = Version(spec_str)
# Check to see if the prospective version is less than the spec
# version. If it's not we can short circuit and just return False now
@ -483,10 +559,12 @@ class Specifier(_IndividualSpecifier):
return True
@_require_version_compare
def _compare_greater_than(self, prospective, spec):
def _compare_greater_than(self, prospective, spec_str):
# type: (ParsedVersion, str) -> bool
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
spec = Version(spec_str)
# Check to see if the prospective version is greater than the spec
# version. If it's not we can short circuit and just return False now
@ -503,7 +581,7 @@ class Specifier(_IndividualSpecifier):
return False
# Ensure that we do not allow a local version of the version mentioned
# in the specifier, which is techincally greater than, to match.
# in the specifier, which is technically greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
@ -514,10 +592,13 @@ class Specifier(_IndividualSpecifier):
return True
def _compare_arbitrary(self, prospective, spec):
# type: (Version, str) -> bool
return str(prospective).lower() == str(spec).lower()
@property
def prereleases(self):
# type: () -> bool
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
@ -542,6 +623,7 @@ class Specifier(_IndividualSpecifier):
@prereleases.setter
def prereleases(self, value):
# type: (bool) -> None
self._prereleases = value
@ -549,7 +631,8 @@ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version):
result = []
# type: (str) -> List[str]
result = [] # type: List[str]
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
@ -560,6 +643,7 @@ def _version_split(version):
def _pad_version(left, right):
# type: (List[str], List[str]) -> Tuple[List[str], List[str]]
left_split, right_split = [], []
# Get the release segment of our versions
@ -567,36 +651,28 @@ def _pad_version(left, right):
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions
left_split.append(left[len(left_split[0]):])
right_split.append(right[len(right_split[0]):])
left_split.append(left[len(left_split[0]) :])
right_split.append(right[len(right_split[0]) :])
# Insert our padding
left_split.insert(
1,
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
)
right_split.insert(
1,
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
)
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
return (
list(itertools.chain(*left_split)),
list(itertools.chain(*right_split)),
)
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
class SpecifierSet(BaseSpecifier):
def __init__(self, specifiers="", prereleases=None):
# Split on , to break each indidivual specifier into it's own item, and
# type: (str, Optional[bool]) -> None
# Split on , to break each individual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
parsed = set()
for specifier in specifiers:
for specifier in split_specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
@ -610,6 +686,7 @@ class SpecifierSet(BaseSpecifier):
self._prereleases = prereleases
def __repr__(self):
# type: () -> str
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
@ -619,12 +696,15 @@ class SpecifierSet(BaseSpecifier):
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
def __str__(self):
# type: () -> str
return ",".join(sorted(str(s) for s in self._specs))
def __hash__(self):
# type: () -> int
return hash(self._specs)
def __and__(self, other):
# type: (Union[SpecifierSet, str]) -> SpecifierSet
if isinstance(other, string_types):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
@ -648,9 +728,8 @@ class SpecifierSet(BaseSpecifier):
return specifier
def __eq__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
# type: (object) -> bool
if isinstance(other, (string_types, _IndividualSpecifier)):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
@ -658,9 +737,8 @@ class SpecifierSet(BaseSpecifier):
return self._specs == other._specs
def __ne__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
# type: (object) -> bool
if isinstance(other, (string_types, _IndividualSpecifier)):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
@ -668,13 +746,17 @@ class SpecifierSet(BaseSpecifier):
return self._specs != other._specs
def __len__(self):
# type: () -> int
return len(self._specs)
def __iter__(self):
# type: () -> Iterator[FrozenSet[_IndividualSpecifier]]
return iter(self._specs)
@property
def prereleases(self):
# type: () -> Optional[bool]
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
@ -692,12 +774,16 @@ class SpecifierSet(BaseSpecifier):
@prereleases.setter
def prereleases(self, value):
# type: (bool) -> None
self._prereleases = value
def __contains__(self, item):
# type: (Union[ParsedVersion, str]) -> bool
return self.contains(item)
def contains(self, item, prereleases=None):
# type: (Union[ParsedVersion, str], Optional[bool]) -> bool
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
@ -721,12 +807,15 @@ class SpecifierSet(BaseSpecifier):
# given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision.
return all(
s.contains(item, prereleases=prereleases)
for s in self._specs
)
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
def filter(
self,
iterable, # type: Iterable[Union[ParsedVersion, str]]
prereleases=None, # type: Optional[bool]
):
# type: (...) -> Iterable[Union[ParsedVersion, str]]
def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
@ -744,8 +833,8 @@ class SpecifierSet(BaseSpecifier):
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
filtered = []
found_prereleases = []
filtered = [] # type: List[Union[ParsedVersion, str]]
found_prereleases = [] # type: List[Union[ParsedVersion, str]]
for item in iterable:
# Ensure that we some kind of Version class for this item.

751
lib/pkg_resources/_vendor/packaging/tags.py

@ -0,0 +1,751 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import
import distutils.util
try:
from importlib.machinery import EXTENSION_SUFFIXES
except ImportError: # pragma: no cover
import imp
EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
del imp
import logging
import os
import platform
import re
import struct
import sys
import sysconfig
import warnings
from ._typing import TYPE_CHECKING, cast
if TYPE_CHECKING: # pragma: no cover
from typing import (
Dict,
FrozenSet,
IO,
Iterable,
Iterator,
List,
Optional,
Sequence,
Tuple,
Union,
)
PythonVersion = Sequence[int]
MacVersion = Tuple[int, int]
GlibcVersion = Tuple[int, int]
logger = logging.getLogger(__name__)
INTERPRETER_SHORT_NAMES = {
"python": "py", # Generic.
"cpython": "cp",
"pypy": "pp",
"ironpython": "ip",
"jython": "jy",
} # type: Dict[str, str]
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
class Tag(object):
"""
A representation of the tag triple for a wheel.
Instances are considered immutable and thus are hashable. Equality checking
is also supported.
"""
__slots__ = ["_interpreter", "_abi", "_platform"]
def __init__(self, interpreter, abi, platform):
# type: (str, str, str) -> None
self._interpreter = interpreter.lower()
self._abi = abi.lower()
self._platform = platform.lower()
@property
def interpreter(self):
# type: () -> str
return self._interpreter
@property
def abi(self):
# type: () -> str
return self._abi
@property
def platform(self):
# type: () -> str
return self._platform
def __eq__(self, other):
# type: (object) -> bool
if not isinstance(other, Tag):
return NotImplemented
return (
(self.platform == other.platform)
and (self.abi == other.abi)
and (self.interpreter == other.interpreter)
)
def __hash__(self):
# type: () -> int
return hash((self._interpreter, self._abi, self._platform))
def __str__(self):
# type: () -> str
return "{}-{}-{}".format(self._interpreter, self._abi, self._platform)
def __repr__(self):
# type: () -> str
return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
def parse_tag(tag):
# type: (str) -> FrozenSet[Tag]
"""
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
Returning a set is required due to the possibility that the tag is a
compressed tag set.
"""
tags = set()
interpreters, abis, platforms = tag.split("-")
for interpreter in interpreters.split("."):
for abi in abis.split("."):
for platform_ in platforms.split("."):
tags.add(Tag(interpreter, abi, platform_))
return frozenset(tags)
def _warn_keyword_parameter(func_name, kwargs):
# type: (str, Dict[str, bool]) -> bool
"""
Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only.
"""
if not kwargs:
return False
elif len(kwargs) > 1 or "warn" not in kwargs:
kwargs.pop("warn", None)
arg = next(iter(kwargs.keys()))
raise TypeError(
"{}() got an unexpected keyword argument {!r}".format(func_name, arg)
)
return kwargs["warn"]
def _get_config_var(name, warn=False):
# type: (str, bool) -> Union[int, str, None]
value = sysconfig.get_config_var(name)
if value is None and warn:
logger.debug(
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
)
return value
def _normalize_string(string):
# type: (str) -> str
return string.replace(".", "_").replace("-", "_")
def _abi3_applies(python_version):
# type: (PythonVersion) -> bool
"""
Determine if the Python version supports abi3.
PEP 384 was first implemented in Python 3.2.
"""
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
def _cpython_abis(py_version, warn=False):
# type: (PythonVersion, bool) -> List[str]
py_version = tuple(py_version) # To allow for version comparison.
abis = []
version = _version_nodot(py_version[:2])
debug = pymalloc = ucs4 = ""
with_debug = _get_config_var("Py_DEBUG", warn)
has_refcount = hasattr(sys, "gettotalrefcount")
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
# extension modules is the best option.
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
if with_debug or (with_debug is None and (has_refcount or has_ext)):
debug = "d"
if py_version < (3, 8):
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
if with_pymalloc or with_pymalloc is None:
pymalloc = "m"
if py_version < (3, 3):
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
if unicode_size == 4 or (
unicode_size is None and sys.maxunicode == 0x10FFFF
):
ucs4 = "u"
elif debug:
# Debug builds can also load "normal" extension modules.
# We can also assume no UCS-4 or pymalloc requirement.
abis.append("cp{version}".format(version=version))
abis.insert(
0,
"cp{version}{debug}{pymalloc}{ucs4}".format(
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
),
)
return abis
def cpython_tags(
python_version=None, # type: Optional[PythonVersion]
abis=None, # type: Optional[Iterable[str]]
platforms=None, # type: Optional[Iterable[str]]
**kwargs # type: bool
):
# type: (...) -> Iterator[Tag]
"""
Yields the tags for a CPython interpreter.
The tags consist of:
- cp<python_version>-<abi>-<platform>
- cp<python_version>-abi3-<platform>
- cp<python_version>-none-<platform>
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
If python_version only specifies a major version then user-provided ABIs and
the 'none' ABItag will be used.
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
their normal position and not at the beginning.
"""
warn = _warn_keyword_parameter("cpython_tags", kwargs)
if not python_version:
python_version = sys.version_info[:2]
interpreter = "cp{}".format(_version_nodot(python_version[:2]))
if abis is None:
if len(python_version) > 1:
abis = _cpython_abis(python_version, warn)
else:
abis = []
abis = list(abis)
# 'abi3' and 'none' are explicitly handled later.
for explicit_abi in ("abi3", "none"):
try:
abis.remove(explicit_abi)
except ValueError:
pass
platforms = list(platforms or _platform_tags())
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
if _abi3_applies(python_version):
for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
yield tag
for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms):
yield tag
if _abi3_applies(python_version):
for minor_version in range(python_version[1] - 1, 1, -1):
for platform_ in platforms:
interpreter = "cp{version}".format(
version=_version_nodot((python_version[0], minor_version))
)
yield Tag(interpreter, "abi3", platform_)
def _generic_abi():
# type: () -> Iterator[str]
abi = sysconfig.get_config_var("SOABI")
if abi:
yield _normalize_string(abi)
def generic_tags(
interpreter=None, # type: Optional[str]
abis=None, # type: Optional[Iterable[str]]
platforms=None, # type: Optional[Iterable[str]]
**kwargs # type: bool
):
# type: (...) -> Iterator[Tag]
"""
Yields the tags for a generic interpreter.
The tags consist of:
- <interpreter>-<abi>-<platform>
The "none" ABI will be added if it was not explicitly provided.
"""
warn = _warn_keyword_parameter("generic_tags", kwargs)
if not interpreter:
interp_name = interpreter_name()
interp_version = interpreter_version(warn=warn)
interpreter = "".join([interp_name, interp_version])
if abis is None:
abis = _generic_abi()
platforms = list(platforms or _platform_tags())
abis = list(abis)
if "none" not in abis:
abis.append("none")
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
def _py_interpreter_range(py_version):
# type: (PythonVersion) -> Iterator[str]
"""
Yields Python versions in descending order.
After the latest version, the major-only version will be yielded, and then
all previous versions of that major version.
"""
if len(py_version) > 1:
yield "py{version}".format(version=_version_nodot(py_version[:2]))
yield "py{major}".format(major=py_version[0])
if len(py_version) > 1:
for minor in range(py_version[1] - 1, -1, -1):
yield "py{version}".format(version=_version_nodot((py_version[0], minor)))
def compatible_tags(
python_version=None, # type: Optional[PythonVersion]
interpreter=None, # type: Optional[str]
platforms=None, # type: Optional[Iterable[str]]
):
# type: (...) -> Iterator[Tag]
"""
Yields the sequence of tags that are compatible with a specific version of Python.
The tags consist of:
- py*-none-<platform>
- <interpreter>-none-any # ... if `interpreter` is provided.
- py*-none-any
"""
if not python_version:
python_version = sys.version_info[:2]
platforms = list(platforms or _platform_tags())
for version in _py_interpreter_range(python_version):
for platform_ in platforms:
yield Tag(version, "none", platform_)
if interpreter:
yield Tag(interpreter, "none", "any")
for version in _py_interpreter_range(python_version):
yield Tag(version, "none", "any")
def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
# type: (str, bool) -> str
if not is_32bit:
return arch
if arch.startswith("ppc"):
return "ppc"
return "i386"
def _mac_binary_formats(version, cpu_arch):
# type: (MacVersion, str) -> List[str]
formats = [cpu_arch]
if cpu_arch == "x86_64":
if version < (10, 4):
return []
formats.extend(["intel", "fat64", "fat32"])
elif cpu_arch == "i386":
if version < (10, 4):
return []
formats.extend(["intel", "fat32", "fat"])
elif cpu_arch == "ppc64":
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
if version > (10, 5) or version < (10, 4):
return []
formats.append("fat64")
elif cpu_arch == "ppc":
if version > (10, 6):
return []
formats.extend(["fat32", "fat"])
formats.append("universal")
return formats
def mac_platforms(version=None, arch=None):
# type: (Optional[MacVersion], Optional[str]) -> Iterator[str]
"""
Yields the platform tags for a macOS system.
The `version` parameter is a two-item tuple specifying the macOS version to
generate platform tags for. The `arch` parameter is the CPU architecture to
generate platform tags for. Both parameters default to the appropriate value
for the current system.
"""
version_str, _, cpu_arch = platform.mac_ver() # type: ignore
if version is None:
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
else:
version = version
if arch is None:
arch = _mac_arch(cpu_arch)
else:
arch = arch
for minor_version in range(version[1], -1, -1):
compat_version = version[0], minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
yield "macosx_{major}_{minor}_{binary_format}".format(
major=compat_version[0],
minor=compat_version[1],
binary_format=binary_format,
)
# From PEP 513.
def _is_manylinux_compatible(name, glibc_version):
# type: (str, GlibcVersion) -> bool
# Check for presence of _manylinux module.
try:
import _manylinux # noqa
return bool(getattr(_manylinux, name + "_compatible"))
except (ImportError, AttributeError):
# Fall through to heuristic check below.
pass
return _have_compatible_glibc(*glibc_version)
def _glibc_version_string():
# type: () -> Optional[str]
# Returns glibc version string, or None if not using glibc.
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
def _glibc_version_string_confstr():
# type: () -> Optional[str]
"""
Primary implementation of glibc_version_string using os.confstr.
"""
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
# to be broken or missing. This strategy is used in the standard library
# platform module.
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
try:
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821
"CS_GNU_LIBC_VERSION"
)
assert version_string is not None
_, version = version_string.split() # type: Tuple[str, str]
except (AssertionError, AttributeError, OSError, ValueError):
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
return None
return version
def _glibc_version_string_ctypes():
# type: () -> Optional[str]
"""
Fallback implementation of glibc_version_string using ctypes.
"""
try:
import ctypes
except ImportError:
return None
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
#
# Note: typeshed is wrong here so we are ignoring this line.
process_namespace = ctypes.CDLL(None) # type: ignore
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str = gnu_get_libc_version() # type: str
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
# Separated out from have_compatible_glibc for easier unit testing.
def _check_glibc_version(version_str, required_major, minimum_minor):
# type: (str, int, int) -> bool
# Parse string and check against requested version.
#
# We use a regexp instead of str.split because we want to discard any
# random junk that might come after the minor version -- this might happen
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
# uses version strings like "2.20-2014.11"). See gh-3588.
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn(
"Expected glibc version with 2 components major.minor,"
" got: %s" % version_str,
RuntimeWarning,
)
return False
return (
int(m.group("major")) == required_major
and int(m.group("minor")) >= minimum_minor
)
def _have_compatible_glibc(required_major, minimum_minor):
# type: (int, int) -> bool
version_str = _glibc_version_string()
if version_str is None:
return False
return _check_glibc_version(version_str, required_major, minimum_minor)
# Python does not provide platform information at sufficient granularity to
# identify the architecture of the running executable in some cases, so we
# determine it dynamically by reading the information from the running
# process. This only applies on Linux, which uses the ELF format.
class _ELFFileHeader(object):
# https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
class _InvalidELFFileHeader(ValueError):
"""
An invalid ELF file header was found.
"""
ELF_MAGIC_NUMBER = 0x7F454C46
ELFCLASS32 = 1
ELFCLASS64 = 2
ELFDATA2LSB = 1
ELFDATA2MSB = 2
EM_386 = 3
EM_S390 = 22
EM_ARM = 40
EM_X86_64 = 62
EF_ARM_ABIMASK = 0xFF000000
EF_ARM_ABI_VER5 = 0x05000000
EF_ARM_ABI_FLOAT_HARD = 0x00000400
def __init__(self, file):
# type: (IO[bytes]) -> None
def unpack(fmt):
# type: (str) -> int
try:
(result,) = struct.unpack(
fmt, file.read(struct.calcsize(fmt))
) # type: (int, )
except struct.error:
raise _ELFFileHeader._InvalidELFFileHeader()
return result
self.e_ident_magic = unpack(">I")
if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
raise _ELFFileHeader._InvalidELFFileHeader()
self.e_ident_class = unpack("B")
if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
raise _ELFFileHeader._InvalidELFFileHeader()
self.e_ident_data = unpack("B")
if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
raise _ELFFileHeader._InvalidELFFileHeader()
self.e_ident_version = unpack("B")
self.e_ident_osabi = unpack("B")
self.e_ident_abiversion = unpack("B")
self.e_ident_pad = file.read(7)
format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
self.e_type = unpack(format_h)
self.e_machine = unpack(format_h)
self.e_version = unpack(format_i)
self.e_entry = unpack(format_p)
self.e_phoff = unpack(format_p)
self.e_shoff = unpack(format_p)
self.e_flags = unpack(format_i)
self.e_ehsize = unpack(format_h)
self.e_phentsize = unpack(format_h)
self.e_phnum = unpack(format_h)
self.e_shentsize = unpack(format_h)
self.e_shnum = unpack(format_h)
self.e_shstrndx = unpack(format_h)
def _get_elf_header():
# type: () -> Optional[_ELFFileHeader]
try:
with open(sys.executable, "rb") as f:
elf_header = _ELFFileHeader(f)
except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
return None
return elf_header
def _is_linux_armhf():
# type: () -> bool
# hard-float ABI can be detected from the ELF header of the running
# process
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
elf_header = _get_elf_header()
if elf_header is None:
return False
result = elf_header.e_ident_class == elf_header.ELFCLASS32
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
result &= elf_header.e_machine == elf_header.EM_ARM
result &= (
elf_header.e_flags & elf_header.EF_ARM_ABIMASK
) == elf_header.EF_ARM_ABI_VER5
result &= (
elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
) == elf_header.EF_ARM_ABI_FLOAT_HARD
return result
def _is_linux_i686():
# type: () -> bool
elf_header = _get_elf_header()
if elf_header is None:
return False
result = elf_header.e_ident_class == elf_header.ELFCLASS32
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
result &= elf_header.e_machine == elf_header.EM_386
return result
def _have_compatible_manylinux_abi(arch):
# type: (str) -> bool
if arch == "armv7l":
return _is_linux_armhf()
if arch == "i686":
return _is_linux_i686()
return True
def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
# type: (bool) -> Iterator[str]
linux = _normalize_string(distutils.util.get_platform())
if is_32bit:
if linux == "linux_x86_64":
linux = "linux_i686"
elif linux == "linux_aarch64":
linux = "linux_armv7l"
manylinux_support = []
_, arch = linux.split("_", 1)
if _have_compatible_manylinux_abi(arch):
if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}:
manylinux_support.append(
("manylinux2014", (2, 17))
) # CentOS 7 w/ glibc 2.17 (PEP 599)
if arch in {"x86_64", "i686"}:
manylinux_support.append(
("manylinux2010", (2, 12))
) # CentOS 6 w/ glibc 2.12 (PEP 571)
manylinux_support.append(
("manylinux1", (2, 5))
) # CentOS 5 w/ glibc 2.5 (PEP 513)
manylinux_support_iter = iter(manylinux_support)
for name, glibc_version in manylinux_support_iter:
if _is_manylinux_compatible(name, glibc_version):
yield linux.replace("linux", name)
break
# Support for a later manylinux implies support for an earlier version.
for name, _ in manylinux_support_iter:
yield linux.replace("linux", name)
yield linux
def _generic_platforms():
# type: () -> Iterator[str]
yield _normalize_string(distutils.util.get_platform())
def _platform_tags():
# type: () -> Iterator[str]
"""
Provides the platform tags for this installation.
"""
if platform.system() == "Darwin":
return mac_platforms()
elif platform.system() == "Linux":
return _linux_platforms()
else:
return _generic_platforms()
def interpreter_name():
# type: () -> str
"""
Returns the name of the running interpreter.
"""
try:
name = sys.implementation.name # type: ignore
except AttributeError: # pragma: no cover
# Python 2.7 compatibility.
name = platform.python_implementation().lower()
return INTERPRETER_SHORT_NAMES.get(name) or name
def interpreter_version(**kwargs):
# type: (bool) -> str
"""
Returns the version of the running interpreter.
"""
warn = _warn_keyword_parameter("interpreter_version", kwargs)
version = _get_config_var("py_version_nodot", warn=warn)
if version:
version = str(version)
else:
version = _version_nodot(sys.version_info[:2])
return version
def _version_nodot(version):
# type: (PythonVersion) -> str
if any(v >= 10 for v in version):
sep = "_"
else:
sep = ""
return sep.join(map(str, version))
def sys_tags(**kwargs):
# type: (bool) -> Iterator[Tag]
"""
Returns the sequence of tag triples for the running interpreter.
The order of the sequence corresponds to priority order for the
interpreter, from most to least important.
"""
warn = _warn_keyword_parameter("sys_tags", kwargs)
interp_name = interpreter_name()
if interp_name == "cp":
for tag in cpython_tags(warn=warn):
yield tag
else:
for tag in generic_tags():
yield tag
for tag in compatible_tags():
yield tag

53
lib/pkg_resources/_vendor/packaging/utils.py

@ -5,10 +5,61 @@ from __future__ import absolute_import, division, print_function
import re
from ._typing import TYPE_CHECKING, cast
from .version import InvalidVersion, Version
if TYPE_CHECKING: # pragma: no cover
from typing import NewType, Union
NormalizedName = NewType("NormalizedName", str)
_canonicalize_regex = re.compile(r"[-_.]+")
def canonicalize_name(name):
# type: (str) -> NormalizedName
# This is taken from PEP 503.
return _canonicalize_regex.sub("-", name).lower()
value = _canonicalize_regex.sub("-", name).lower()
return cast("NormalizedName", value)
def canonicalize_version(_version):
# type: (str) -> Union[Version, str]
"""
This is very similar to Version.__str__, but has one subtle difference
with the way it handles the release segment.
"""
try:
version = Version(_version)
except InvalidVersion:
# Legacy versions cannot be normalized
return _version
parts = []
# Epoch
if version.epoch != 0:
parts.append("{0}!".format(version.epoch))
# Release segment
# NB: This strips trailing '.0's to normalize
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
# Pre-release
if version.pre is not None:
parts.append("".join(str(x) for x in version.pre))
# Post-release
if version.post is not None:
parts.append(".post{0}".format(version.post))
# Development release
if version.dev is not None:
parts.append(".dev{0}".format(version.dev))
# Local version segment
if version.local is not None:
parts.append("+{0}".format(version.local))
return "".join(parts)

290
lib/pkg_resources/_vendor/packaging/version.py

@ -7,21 +7,46 @@ import collections
import itertools
import re
from ._structures import Infinity
__all__ = [
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]
from ._structures import Infinity, NegativeInfinity
from ._typing import TYPE_CHECKING
if TYPE_CHECKING: # pragma: no cover
from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
from ._structures import InfinityType, NegativeInfinityType
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
SubLocalType = Union[InfiniteTypes, int, str]
LocalType = Union[
NegativeInfinityType,
Tuple[
Union[
SubLocalType,
Tuple[SubLocalType, str],
Tuple[NegativeInfinityType, SubLocalType],
],
...,
],
]
CmpKey = Tuple[
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
]
LegacyCmpKey = Tuple[int, Tuple[str, ...]]
VersionComparisonMethod = Callable[
[Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
]
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
_Version = collections.namedtuple(
"_Version",
["epoch", "release", "dev", "pre", "post", "local"],
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
)
def parse(version):
# type: (str) -> Union[LegacyVersion, Version]
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
@ -40,29 +65,38 @@ class InvalidVersion(ValueError):
class _BaseVersion(object):
_key = None # type: Union[CmpKey, LegacyCmpKey]
def __hash__(self):
# type: () -> int
return hash(self._key)
def __lt__(self, other):
# type: (_BaseVersion) -> bool
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
# type: (_BaseVersion) -> bool
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
# type: (object) -> bool
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
# type: (_BaseVersion) -> bool
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
# type: (_BaseVersion) -> bool
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
# type: (object) -> bool
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
# type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented]
if not isinstance(other, _BaseVersion):
return NotImplemented
@ -70,48 +104,88 @@ class _BaseVersion(object):
class LegacyVersion(_BaseVersion):
def __init__(self, version):
# type: (str) -> None
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
# type: () -> str
return self._version
def __repr__(self):
# type: () -> str
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
# type: () -> str
return self._version
@property
def base_version(self):
# type: () -> str
return self._version
@property
def epoch(self):
# type: () -> int
return -1
@property
def release(self):
# type: () -> None
return None
@property
def pre(self):
# type: () -> None
return None
@property
def post(self):
# type: () -> None
return None
@property
def dev(self):
# type: () -> None
return None
@property
def local(self):
# type: () -> None
return None
@property
def is_prerelease(self):
# type: () -> bool
return False
@property
def is_postrelease(self):
# type: () -> bool
return False
@property
def is_devrelease(self):
# type: () -> bool
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
"pre": "c",
"preview": "c",
"-": "final-",
"rc": "c",
"dev": "@",
}
def _parse_version_parts(s):
# type: (str) -> Iterator[str]
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
@ -129,6 +203,8 @@ def _parse_version_parts(s):
def _legacy_cmpkey(version):
# type: (str) -> LegacyCmpKey
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
@ -137,7 +213,7 @@ def _legacy_cmpkey(version):
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
parts = [] # type: List[str]
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
@ -150,9 +226,9 @@ def _legacy_cmpkey(version):
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
return epoch, tuple(parts)
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
@ -190,12 +266,11 @@ VERSION_PATTERN = r"""
class Version(_BaseVersion):
_regex = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
def __init__(self, version):
# type: (str) -> None
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
@ -205,18 +280,11 @@ class Version(_BaseVersion):
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(
match.group("pre_l"),
match.group("pre_n"),
),
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
post=_parse_letter_version(
match.group("post_l"),
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
),
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
local=_parse_local_version(match.group("local")),
)
@ -231,71 +299,130 @@ class Version(_BaseVersion):
)
def __repr__(self):
# type: () -> str
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
# type: () -> str
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
if self.epoch != 0:
parts.append("{0}!".format(self.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
parts.append(".".join(str(x) for x in self.release))
# Pre-release
if self._version.pre is not None:
parts.append("".join(str(x) for x in self._version.pre))
if self.pre is not None:
parts.append("".join(str(x) for x in self.pre))
# Post-release
if self._version.post is not None:
parts.append(".post{0}".format(self._version.post[1]))
if self.post is not None:
parts.append(".post{0}".format(self.post))
# Development release
if self._version.dev is not None:
parts.append(".dev{0}".format(self._version.dev[1]))
if self.dev is not None:
parts.append(".dev{0}".format(self.dev))
# Local version segment
if self._version.local is not None:
parts.append(
"+{0}".format(".".join(str(x) for x in self._version.local))
)
if self.local is not None:
parts.append("+{0}".format(self.local))
return "".join(parts)
@property
def epoch(self):
# type: () -> int
_epoch = self._version.epoch # type: int
return _epoch
@property
def release(self):
# type: () -> Tuple[int, ...]
_release = self._version.release # type: Tuple[int, ...]
return _release
@property
def pre(self):
# type: () -> Optional[Tuple[str, int]]
_pre = self._version.pre # type: Optional[Tuple[str, int]]
return _pre
@property
def post(self):
# type: () -> Optional[Tuple[str, int]]
return self._version.post[1] if self._version.post else None
@property
def dev(self):
# type: () -> Optional[Tuple[str, int]]
return self._version.dev[1] if self._version.dev else None
@property
def local(self):
# type: () -> Optional[str]
if self._version.local:
return ".".join(str(x) for x in self._version.local)
else:
return None
@property
def public(self):
# type: () -> str
return str(self).split("+", 1)[0]
@property
def base_version(self):
# type: () -> str
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
if self.epoch != 0:
parts.append("{0}!".format(self.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
def local(self):
version_string = str(self)
if "+" in version_string:
return version_string.split("+", 1)[1]
@property
def is_prerelease(self):
return bool(self._version.dev or self._version.pre)
# type: () -> bool
return self.dev is not None or self.pre is not None
@property
def is_postrelease(self):
return bool(self._version.post)
# type: () -> bool
return self.post is not None
@property
def is_devrelease(self):
# type: () -> bool
return self.dev is not None
@property
def major(self):
# type: () -> int
return self.release[0] if len(self.release) >= 1 else 0
@property
def minor(self):
# type: () -> int
return self.release[1] if len(self.release) >= 2 else 0
@property
def micro(self):
# type: () -> int
return self.release[2] if len(self.release) >= 3 else 0
def _parse_letter_version(
letter, # type: str
number, # type: Union[str, bytes, SupportsInt]
):
# type: (...) -> Optional[Tuple[str, int]]
def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
@ -325,34 +452,42 @@ def _parse_letter_version(letter, number):
return letter, int(number)
return None
_local_version_seperators = re.compile(r"[\._-]")
_local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local):
# type: (str) -> Optional[LocalType]
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local)
for part in _local_version_separators.split(local)
)
return None
def _cmpkey(epoch, release, pre, post, dev, local):
def _cmpkey(
epoch, # type: int
release, # type: Tuple[int, ...]
pre, # type: Optional[Tuple[str, int]]
post, # type: Optional[Tuple[str, int]]
dev, # type: Optional[Tuple[str, int]]
local, # type: Optional[Tuple[SubLocalType]]
):
# type: (...) -> CmpKey
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
release = tuple(
reversed(list(
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
_release = tuple(
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
@ -360,23 +495,31 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
pre = -Infinity
_pre = NegativeInfinity # type: PrePostDevType
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
pre = Infinity
_pre = Infinity
else:
_pre = pre
# Versions without a post segment should sort before those with one.
if post is None:
post = -Infinity
_post = NegativeInfinity # type: PrePostDevType
else:
_post = post
# Versions without a development segment should sort after those with one.
if dev is None:
dev = Infinity
_dev = Infinity # type: PrePostDevType
else:
_dev = dev
if local is None:
# Versions without a local segment should sort before those with one.
local = -Infinity
_local = NegativeInfinity # type: LocalType
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
@ -385,9 +528,8 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i)
for i in local
_local = tuple(
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
)
return epoch, release, pre, post, dev, local
return epoch, _release, _pre, _post, _dev, _local

4
lib/pkg_resources/_vendor/vendored.txt

@ -0,0 +1,4 @@
packaging==20.4
pyparsing==2.2.1
six==1.10.0
appdirs==1.4.3

401
lib/pkg_resources/api_tests.txt

@ -0,0 +1,401 @@
Pluggable Distributions of Python Software
==========================================
Distributions
-------------
A "Distribution" is a collection of files that represent a "Release" of a
"Project" as of a particular point in time, denoted by a
"Version"::
>>> import sys, pkg_resources
>>> from pkg_resources import Distribution
>>> Distribution(project_name="Foo", version="1.2")
Foo 1.2
Distributions have a location, which can be a filename, URL, or really anything
else you care to use::
>>> dist = Distribution(
... location="http://example.com/something",
... project_name="Bar", version="0.9"
... )
>>> dist
Bar 0.9 (http://example.com/something)
Distributions have various introspectable attributes::
>>> dist.location
'http://example.com/something'
>>> dist.project_name
'Bar'
>>> dist.version
'0.9'
>>> dist.py_version == '{}.{}'.format(*sys.version_info)
True
>>> print(dist.platform)
None
Including various computed attributes::
>>> from pkg_resources import parse_version
>>> dist.parsed_version == parse_version(dist.version)
True
>>> dist.key # case-insensitive form of the project name
'bar'
Distributions are compared (and hashed) by version first::
>>> Distribution(version='1.0') == Distribution(version='1.0')
True
>>> Distribution(version='1.0') == Distribution(version='1.1')
False
>>> Distribution(version='1.0') < Distribution(version='1.1')
True
but also by project name (case-insensitive), platform, Python version,
location, etc.::
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="Foo",version="1.0")
True
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="foo",version="1.0")
True
>>> Distribution(project_name="Foo",version="1.0") == \
... Distribution(project_name="Foo",version="1.1")
False
>>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
... Distribution(project_name="Foo",py_version="2.4",version="1.0")
False
>>> Distribution(location="spam",version="1.0") == \
... Distribution(location="spam",version="1.0")
True
>>> Distribution(location="spam",version="1.0") == \
... Distribution(location="baz",version="1.0")
False
Hash and compare distribution by prio/plat
Get version from metadata
provider capabilities
egg_name()
as_requirement()
from_location, from_filename (w/path normalization)
Releases may have zero or more "Requirements", which indicate
what releases of another project the release requires in order to
function. A Requirement names the other project, expresses some criteria
as to what releases of that project are acceptable, and lists any "Extras"
that the requiring release may need from that project. (An Extra is an
optional feature of a Release, that can only be used if its additional
Requirements are satisfied.)
The Working Set
---------------
A collection of active distributions is called a Working Set. Note that a
Working Set can contain any importable distribution, not just pluggable ones.
For example, the Python standard library is an importable distribution that
will usually be part of the Working Set, even though it is not pluggable.
Similarly, when you are doing development work on a project, the files you are
editing are also a Distribution. (And, with a little attention to the
directory names used, and including some additional metadata, such a
"development distribution" can be made pluggable as well.)
>>> from pkg_resources import WorkingSet
A working set's entries are the sys.path entries that correspond to the active
distributions. By default, the working set's entries are the items on
``sys.path``::
>>> ws = WorkingSet()
>>> ws.entries == sys.path
True
But you can also create an empty working set explicitly, and add distributions
to it::
>>> ws = WorkingSet([])
>>> ws.add(dist)
>>> ws.entries
['http://example.com/something']
>>> dist in ws
True
>>> Distribution('foo',version="") in ws
False
And you can iterate over its distributions::
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
Adding the same distribution more than once is a no-op::
>>> ws.add(dist)
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
For that matter, adding multiple distributions for the same project also does
nothing, because a working set can only hold one active distribution per
project -- the first one added to it::
>>> ws.add(
... Distribution(
... 'http://example.com/something', project_name="Bar",
... version="7.2"
... )
... )
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
You can append a path entry to a working set using ``add_entry()``::
>>> ws.entries
['http://example.com/something']
>>> ws.add_entry(pkg_resources.__file__)
>>> ws.entries
['http://example.com/something', '...pkg_resources...']
Multiple additions result in multiple entries, even if the entry is already in
the working set (because ``sys.path`` can contain the same entry more than
once)::
>>> ws.add_entry(pkg_resources.__file__)
>>> ws.entries
['...example.com...', '...pkg_resources...', '...pkg_resources...']
And you can specify the path entry a distribution was found under, using the
optional second parameter to ``add()``::
>>> ws = WorkingSet([])
>>> ws.add(dist,"foo")
>>> ws.entries
['foo']
But even if a distribution is found under multiple path entries, it still only
shows up once when iterating the working set:
>>> ws.add_entry(ws.entries[0])
>>> list(ws)
[Bar 0.9 (http://example.com/something)]
You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
>>> from pkg_resources import Requirement
>>> print(ws.find(Requirement.parse("Foo==1.0"))) # no match, return None
None
>>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution
Bar 0.9 (http://example.com/something)
Note that asking for a conflicting version of a distribution already in a
working set triggers a ``pkg_resources.VersionConflict`` error:
>>> try:
... ws.find(Requirement.parse("Bar==1.0"))
... except pkg_resources.VersionConflict as exc:
... print(str(exc))
... else:
... raise AssertionError("VersionConflict was not raised")
(Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0'))
You can subscribe a callback function to receive notifications whenever a new
distribution is added to a working set. The callback is immediately invoked
once for each existing distribution in the working set, and then is called
again for new distributions added thereafter::
>>> def added(dist): print("Added %s" % dist)
>>> ws.subscribe(added)
Added Bar 0.9
>>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12")
>>> ws.add(foo12)
Added Foo 1.2
Note, however, that only the first distribution added for a given project name
will trigger a callback, even during the initial ``subscribe()`` callback::
>>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14")
>>> ws.add(foo14) # no callback, because Foo 1.2 is already active
>>> ws = WorkingSet([])
>>> ws.add(foo12)
>>> ws.add(foo14)
>>> ws.subscribe(added)
Added Foo 1.2
And adding a callback more than once has no effect, either::
>>> ws.subscribe(added) # no callbacks
# and no double-callbacks on subsequent additions, either
>>> just_a_test = Distribution(project_name="JustATest", version="0.99")
>>> ws.add(just_a_test)
Added JustATest 0.99
Finding Plugins
---------------
``WorkingSet`` objects can be used to figure out what plugins in an
``Environment`` can be loaded without any resolution errors::
>>> from pkg_resources import Environment
>>> plugins = Environment([]) # normally, a list of plugin directories
>>> plugins.add(foo12)
>>> plugins.add(foo14)
>>> plugins.add(just_a_test)
In the simplest case, we just get the newest version of each distribution in
the plugin environment::
>>> ws = WorkingSet([])
>>> ws.find_plugins(plugins)
([JustATest 0.99, Foo 1.4 (f14)], {})
But if there's a problem with a version conflict or missing requirements, the
method falls back to older versions, and the error info dict will contain an
exception instance for each unloadable plugin::
>>> ws.add(foo12) # this will conflict with Foo 1.4
>>> ws.find_plugins(plugins)
([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
But if you disallow fallbacks, the failed plugin will be skipped instead of
trying older versions::
>>> ws.find_plugins(plugins, fallback=False)
([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
Platform Compatibility Rules
----------------------------
On the Mac, there are potential compatibility issues for modules compiled
on newer versions of macOS than what the user is running. Additionally,
macOS will soon have two platforms to contend with: Intel and PowerPC.
Basic equality works as on other platforms::
>>> from pkg_resources import compatible_platforms as cp
>>> reqd = 'macosx-10.4-ppc'
>>> cp(reqd, reqd)
True
>>> cp("win32", reqd)
False
Distributions made on other machine types are not compatible::
>>> cp("macosx-10.4-i386", reqd)
False
Distributions made on earlier versions of the OS are compatible, as
long as they are from the same top-level version. The patchlevel version
number does not matter::
>>> cp("macosx-10.4-ppc", reqd)
True
>>> cp("macosx-10.3-ppc", reqd)
True
>>> cp("macosx-10.5-ppc", reqd)
False
>>> cp("macosx-9.5-ppc", reqd)
False
Backwards compatibility for packages made via earlier versions of
setuptools is provided as well::
>>> cp("darwin-8.2.0-Power_Macintosh", reqd)
True
>>> cp("darwin-7.2.0-Power_Macintosh", reqd)
True
>>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
False
Environment Markers
-------------------
>>> from pkg_resources import invalid_marker as im, evaluate_marker as em
>>> import os
>>> print(im("sys_platform"))
Invalid marker: 'sys_platform', parse error at ''
>>> print(im("sys_platform=="))
Invalid marker: 'sys_platform==', parse error at ''
>>> print(im("sys_platform=='win32'"))
False
>>> print(im("sys=='x'"))
Invalid marker: "sys=='x'", parse error at "sys=='x'"
>>> print(im("(extra)"))
Invalid marker: '(extra)', parse error at ')'
>>> print(im("(extra"))
Invalid marker: '(extra', parse error at ''
>>> print(im("os.open('foo')=='y'"))
Invalid marker: "os.open('foo')=='y'", parse error at 'os.open('
>>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit!
Invalid marker: "'x'=='y' and os.open('foo')=='y'", parse error at 'and os.o'
>>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit!
Invalid marker: "'x'=='x' or os.open('foo')=='y'", parse error at 'or os.op'
>>> print(im("'x' < 'y' < 'z'"))
Invalid marker: "'x' < 'y' < 'z'", parse error at "< 'z'"
>>> print(im("r'x'=='x'"))
Invalid marker: "r'x'=='x'", parse error at "r'x'=='x"
>>> print(im("'''x'''=='x'"))
Invalid marker: "'''x'''=='x'", parse error at "'x'''=='"
>>> print(im('"""x"""=="x"'))
Invalid marker: '"""x"""=="x"', parse error at '"x"""=="'
>>> print(im(r"x\n=='x'"))
Invalid marker: "x\\n=='x'", parse error at "x\\n=='x'"
>>> print(im("os.open=='y'"))
Invalid marker: "os.open=='y'", parse error at 'os.open='
>>> em("sys_platform=='win32'") == (sys.platform=='win32')
True
>>> em("python_version >= '2.7'")
True
>>> em("python_version > '2.6'")
True
>>> im("implementation_name=='cpython'")
False
>>> im("platform_python_implementation=='CPython'")
False
>>> im("implementation_version=='3.5.1'")
False

7
lib/pkg_resources/extern/__init__.py

@ -43,13 +43,6 @@ class VendorImporter:
__import__(extant)
mod = sys.modules[extant]
sys.modules[fullname] = mod
# mysterious hack:
# Remove the reference to the extant package/module
# on later Python versions to cause relative imports
# in the vendor package to resolve the same modules
# as those going through this importer.
if prefix and sys.version_info > (3, 3):
del sys.modules[extant]
return mod
except ImportError:
pass

23
lib/pkg_resources/py31compat.py

@ -1,23 +0,0 @@
import os
import errno
import sys
from .extern import six
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
six.PY2 or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
Loading…
Cancel
Save